Lines Matching refs:SPIRV

27 namespace SPIRV {  namespace
172 namespace SPIRV { namespace
220 static std::unique_ptr<const SPIRV::IncomingCall>
222 SPIRV::InstructionSet::InstructionSet Set, in lookupBuiltin()
225 std::string BuiltinName = SPIRV::lookupBuiltinNameHelper(DemangledCall); in lookupBuiltin()
234 const SPIRV::DemangledBuiltin *Builtin; in lookupBuiltin()
235 if ((Builtin = SPIRV::lookupBuiltin(BuiltinName, Set))) in lookupBuiltin()
236 return std::make_unique<SPIRV::IncomingCall>( in lookupBuiltin()
251 if (Set == SPIRV::InstructionSet::OpenCL_std) in lookupBuiltin()
253 else if (Set == SPIRV::InstructionSet::GLSL_std_450) in lookupBuiltin()
261 if (Set == SPIRV::InstructionSet::OpenCL_std) in lookupBuiltin()
263 else if (Set == SPIRV::InstructionSet::GLSL_std_450) in lookupBuiltin()
270 if (Set == SPIRV::InstructionSet::OpenCL_std || in lookupBuiltin()
271 Set == SPIRV::InstructionSet::GLSL_std_450) in lookupBuiltin()
278 (Builtin = SPIRV::lookupBuiltin(Prefix + BuiltinName, Set))) in lookupBuiltin()
279 return std::make_unique<SPIRV::IncomingCall>( in lookupBuiltin()
309 (Builtin = SPIRV::lookupBuiltin(BuiltinName + Suffix, Set))) in lookupBuiltin()
310 return std::make_unique<SPIRV::IncomingCall>( in lookupBuiltin()
398 if (ResultType->getOpcode() == SPIRV::OpTypeVector) { in buildBoolRegister()
411 MIRBuilder.getMRI()->setRegClass(ResultRegister, &SPIRV::IDRegClass); in buildBoolRegister()
424 if (ReturnType->getOpcode() == SPIRV::OpTypeVector) { in buildSelectInst()
445 DestinationReg = MRI->createVirtualRegister(&SPIRV::IDRegClass); in buildLoadInst()
460 SPIRVGlobalRegistry *GR, SPIRV::BuiltIn::BuiltIn BuiltinValue, LLT LLType, in buildBuiltinVariableLoad()
463 MIRBuilder.getMRI()->createVirtualRegister(&SPIRV::IDRegClass); in buildBuiltinVariableLoad()
467 VariableType, MIRBuilder, SPIRV::StorageClass::Input); in buildBuiltinVariableLoad()
473 SPIRV::StorageClass::Input, nullptr, /* isConst= */ isConst, in buildBuiltinVariableLoad()
474 /* HasLinkageTy */ hasLinkageTy, SPIRV::LinkageType::Import, MIRBuilder, in buildBuiltinVariableLoad()
495 static SPIRV::MemorySemantics::MemorySemantics
499 return SPIRV::MemorySemantics::None; in getSPIRVMemSemantics()
501 return SPIRV::MemorySemantics::Acquire; in getSPIRVMemSemantics()
503 return SPIRV::MemorySemantics::Release; in getSPIRVMemSemantics()
505 return SPIRV::MemorySemantics::AcquireRelease; in getSPIRVMemSemantics()
507 return SPIRV::MemorySemantics::SequentiallyConsistent; in getSPIRVMemSemantics()
513 static SPIRV::Scope::Scope getSPIRVScope(SPIRV::CLMemoryScope ClScope) { in getSPIRVScope()
515 case SPIRV::CLMemoryScope::memory_scope_work_item: in getSPIRVScope()
516 return SPIRV::Scope::Invocation; in getSPIRVScope()
517 case SPIRV::CLMemoryScope::memory_scope_work_group: in getSPIRVScope()
518 return SPIRV::Scope::Workgroup; in getSPIRVScope()
519 case SPIRV::CLMemoryScope::memory_scope_device: in getSPIRVScope()
520 return SPIRV::Scope::Device; in getSPIRVScope()
521 case SPIRV::CLMemoryScope::memory_scope_all_svm_devices: in getSPIRVScope()
522 return SPIRV::Scope::CrossDevice; in getSPIRVScope()
523 case SPIRV::CLMemoryScope::memory_scope_sub_group: in getSPIRVScope()
524 return SPIRV::Scope::Subgroup; in getSPIRVScope()
537 SPIRV::Scope::Scope Scope, in buildScopeReg()
543 static_cast<SPIRV::CLMemoryScope>(getIConstVal(CLScopeRegister, MRI)); in buildScopeReg()
547 MRI->setRegClass(CLScopeRegister, &SPIRV::IDRegClass); in buildScopeReg()
567 MRI->setRegClass(SemanticsRegister, &SPIRV::IDRegClass); in buildMemSemanticsReg()
575 const SPIRV::IncomingCall *Call, in buildOpFromWrapper()
586 MRI->setRegClass(ArgReg, &SPIRV::IDRegClass);
595 static bool buildAtomicInitInst(const SPIRV::IncomingCall *Call, in buildAtomicInitInst()
598 return buildOpFromWrapper(MIRBuilder, SPIRV::OpStore, Call, Register(0)); in buildAtomicInitInst()
602 MIRBuilder.getMRI()->setRegClass(Call->Arguments[0], &SPIRV::IDRegClass); in buildAtomicInitInst()
603 MIRBuilder.getMRI()->setRegClass(Call->Arguments[1], &SPIRV::IDRegClass); in buildAtomicInitInst()
604 MIRBuilder.buildInstr(SPIRV::OpStore) in buildAtomicInitInst()
611 static bool buildAtomicLoadInst(const SPIRV::IncomingCall *Call, in buildAtomicLoadInst()
616 return buildOpFromWrapper(MIRBuilder, SPIRV::OpAtomicLoad, Call, TypeReg); in buildAtomicLoadInst()
619 MIRBuilder.getMRI()->setRegClass(PtrRegister, &SPIRV::IDRegClass); in buildAtomicLoadInst()
626 MIRBuilder.getMRI()->setRegClass(ScopeRegister, &SPIRV::IDRegClass); in buildAtomicLoadInst()
628 ScopeRegister = buildConstantIntReg(SPIRV::Scope::Device, MIRBuilder, GR); in buildAtomicLoadInst()
634 MIRBuilder.getMRI()->setRegClass(MemSemanticsReg, &SPIRV::IDRegClass); in buildAtomicLoadInst()
637 SPIRV::MemorySemantics::SequentiallyConsistent | in buildAtomicLoadInst()
642 MIRBuilder.buildInstr(SPIRV::OpAtomicLoad) in buildAtomicLoadInst()
652 static bool buildAtomicStoreInst(const SPIRV::IncomingCall *Call, in buildAtomicStoreInst()
656 return buildOpFromWrapper(MIRBuilder, SPIRV::OpAtomicStore, Call, Register(0)); in buildAtomicStoreInst()
659 buildConstantIntReg(SPIRV::Scope::Device, MIRBuilder, GR); in buildAtomicStoreInst()
661 MIRBuilder.getMRI()->setRegClass(PtrRegister, &SPIRV::IDRegClass); in buildAtomicStoreInst()
663 SPIRV::MemorySemantics::SequentiallyConsistent | in buildAtomicStoreInst()
666 MIRBuilder.getMRI()->setRegClass(Call->Arguments[1], &SPIRV::IDRegClass); in buildAtomicStoreInst()
667 MIRBuilder.buildInstr(SPIRV::OpAtomicStore) in buildAtomicStoreInst()
677 const SPIRV::IncomingCall *Call, const SPIRV::DemangledBuiltin *Builtin, in buildAtomicCompareExchangeInst()
689 MRI->setRegClass(ObjectPtr, &SPIRV::IDRegClass); in buildAtomicCompareExchangeInst()
690 MRI->setRegClass(ExpectedArg, &SPIRV::IDRegClass); in buildAtomicCompareExchangeInst()
691 MRI->setRegClass(Desired, &SPIRV::IDRegClass); in buildAtomicCompareExchangeInst()
696 SPIRV::OpTypePointer); in buildAtomicCompareExchangeInst()
699 assert(IsCmpxchg ? ExpectedType == SPIRV::OpTypeInt in buildAtomicCompareExchangeInst()
700 : ExpectedType == SPIRV::OpTypePointer); in buildAtomicCompareExchangeInst()
701 assert(GR->isScalarOfType(Desired, SPIRV::OpTypeInt)); in buildAtomicCompareExchangeInst()
705 auto StorageClass = static_cast<SPIRV::StorageClass::StorageClass>( in buildAtomicCompareExchangeInst()
713 ? SPIRV::MemorySemantics::None in buildAtomicCompareExchangeInst()
714 : SPIRV::MemorySemantics::SequentiallyConsistent | MemSemStorage; in buildAtomicCompareExchangeInst()
717 ? SPIRV::MemorySemantics::None in buildAtomicCompareExchangeInst()
718 : SPIRV::MemorySemantics::SequentiallyConsistent | MemSemStorage; in buildAtomicCompareExchangeInst()
732 MRI->setRegClass(Call->Arguments[3], &SPIRV::IDRegClass); in buildAtomicCompareExchangeInst()
733 MRI->setRegClass(Call->Arguments[4], &SPIRV::IDRegClass); in buildAtomicCompareExchangeInst()
741 auto Scope = IsCmpxchg ? SPIRV::Scope::Workgroup : SPIRV::Scope::Device; in buildAtomicCompareExchangeInst()
745 auto ClScope = static_cast<SPIRV::CLMemoryScope>( in buildAtomicCompareExchangeInst()
750 MRI->setRegClass(Call->Arguments[5], &SPIRV::IDRegClass); in buildAtomicCompareExchangeInst()
763 MRI->setRegClass(Tmp, &SPIRV::IDRegClass); in buildAtomicCompareExchangeInst()
777 MIRBuilder.buildInstr(SPIRV::OpStore).addUse(ExpectedArg).addUse(Tmp); in buildAtomicCompareExchangeInst()
784 static bool buildAtomicRMWInst(const SPIRV::IncomingCall *Call, unsigned Opcode, in buildAtomicRMWInst()
797 ScopeRegister = buildScopeReg(ScopeRegister, SPIRV::Scope::Workgroup, in buildAtomicRMWInst()
801 unsigned Semantics = SPIRV::MemorySemantics::None; in buildAtomicRMWInst()
802 MRI->setRegClass(PtrRegister, &SPIRV::IDRegClass); in buildAtomicRMWInst()
807 MRI->setRegClass(Call->Arguments[1], &SPIRV::IDRegClass); in buildAtomicRMWInst()
811 if (Call->ReturnType->getOpcode() == SPIRV::OpTypeFloat) { in buildAtomicRMWInst()
812 if (Opcode == SPIRV::OpAtomicIAdd) { in buildAtomicRMWInst()
813 Opcode = SPIRV::OpAtomicFAddEXT; in buildAtomicRMWInst()
814 } else if (Opcode == SPIRV::OpAtomicISub) { in buildAtomicRMWInst()
817 Opcode = SPIRV::OpAtomicFAddEXT; in buildAtomicRMWInst()
820 MRI->setRegClass(NegValueReg, &SPIRV::IDRegClass); in buildAtomicRMWInst()
842 static bool buildAtomicFloatingRMWInst(const SPIRV::IncomingCall *Call, in buildAtomicFloatingRMWInst()
852 MRI->setRegClass(PtrReg, &SPIRV::IDRegClass); in buildAtomicFloatingRMWInst()
855 MRI->setRegClass(ScopeReg, &SPIRV::IDRegClass); in buildAtomicFloatingRMWInst()
858 MRI->setRegClass(MemSemanticsReg, &SPIRV::IDRegClass); in buildAtomicFloatingRMWInst()
861 MRI->setRegClass(ValueReg, &SPIRV::IDRegClass); in buildAtomicFloatingRMWInst()
875 static bool buildAtomicFlagInst(const SPIRV::IncomingCall *Call, in buildAtomicFlagInst()
878 bool IsSet = Opcode == SPIRV::OpAtomicFlagTestAndSet; in buildAtomicFlagInst()
886 unsigned Semantics = SPIRV::MemorySemantics::SequentiallyConsistent; in buildAtomicFlagInst()
892 assert((Opcode != SPIRV::OpAtomicFlagClear || in buildAtomicFlagInst()
893 (Semantics != SPIRV::MemorySemantics::Acquire && in buildAtomicFlagInst()
894 Semantics != SPIRV::MemorySemantics::AcquireRelease)) && in buildAtomicFlagInst()
900 buildScopeReg(ScopeRegister, SPIRV::Scope::Device, MIRBuilder, GR, MRI); in buildAtomicFlagInst()
912 static bool buildBarrierInst(const SPIRV::IncomingCall *Call, unsigned Opcode, in buildBarrierInst()
920 unsigned MemSemantics = SPIRV::MemorySemantics::None; in buildBarrierInst()
922 if (MemFlags & SPIRV::CLK_LOCAL_MEM_FENCE) in buildBarrierInst()
923 MemSemantics |= SPIRV::MemorySemantics::WorkgroupMemory; in buildBarrierInst()
925 if (MemFlags & SPIRV::CLK_GLOBAL_MEM_FENCE) in buildBarrierInst()
926 MemSemantics |= SPIRV::MemorySemantics::CrossWorkgroupMemory; in buildBarrierInst()
928 if (MemFlags & SPIRV::CLK_IMAGE_MEM_FENCE) in buildBarrierInst()
929 MemSemantics |= SPIRV::MemorySemantics::ImageMemory; in buildBarrierInst()
931 if (Opcode == SPIRV::OpMemoryBarrier) { in buildBarrierInst()
936 MemSemantics |= SPIRV::MemorySemantics::SequentiallyConsistent; in buildBarrierInst()
942 MRI->setRegClass(MemSemanticsReg, &SPIRV::IDRegClass); in buildBarrierInst()
947 SPIRV::Scope::Scope Scope = SPIRV::Scope::Workgroup; in buildBarrierInst()
948 SPIRV::Scope::Scope MemScope = Scope; in buildBarrierInst()
951 ((Opcode != SPIRV::OpMemoryBarrier && Call->Arguments.size() == 2) || in buildBarrierInst()
952 (Opcode == SPIRV::OpMemoryBarrier && Call->Arguments.size() == 3)) && in buildBarrierInst()
954 Register ScopeArg = (Opcode == SPIRV::OpMemoryBarrier) ? Call->Arguments[2] in buildBarrierInst()
956 SPIRV::CLMemoryScope CLScope = in buildBarrierInst()
957 static_cast<SPIRV::CLMemoryScope>(getIConstVal(ScopeArg, MRI)); in buildBarrierInst()
959 if (!(MemFlags & SPIRV::CLK_LOCAL_MEM_FENCE) || in buildBarrierInst()
960 (Opcode == SPIRV::OpMemoryBarrier)) in buildBarrierInst()
965 MRI->setRegClass(ScopeReg, &SPIRV::IDRegClass); in buildBarrierInst()
973 if (Opcode != SPIRV::OpMemoryBarrier) in buildBarrierInst()
979 static unsigned getNumComponentsForDim(SPIRV::Dim::Dim dim) { in getNumComponentsForDim()
981 case SPIRV::Dim::DIM_1D: in getNumComponentsForDim()
982 case SPIRV::Dim::DIM_Buffer: in getNumComponentsForDim()
984 case SPIRV::Dim::DIM_2D: in getNumComponentsForDim()
985 case SPIRV::Dim::DIM_Cube: in getNumComponentsForDim()
986 case SPIRV::Dim::DIM_Rect: in getNumComponentsForDim()
988 case SPIRV::Dim::DIM_3D: in getNumComponentsForDim()
997 assert(imgType->getOpcode() == SPIRV::OpTypeImage); in getNumSizeComponents()
998 auto dim = static_cast<SPIRV::Dim::Dim>(imgType->getOperand(2).getImm()); in getNumSizeComponents()
1008 static bool generateExtInst(const SPIRV::IncomingCall *Call, in generateExtInst()
1012 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateExtInst()
1014 SPIRV::lookupExtendedBuiltin(Builtin->Name, Builtin->Set)->Number; in generateExtInst()
1018 MIRBuilder.buildInstr(SPIRV::OpExtInst) in generateExtInst()
1021 .addImm(static_cast<uint32_t>(SPIRV::InstructionSet::OpenCL_std)) in generateExtInst()
1029 static bool generateRelationalInst(const SPIRV::IncomingCall *Call, in generateRelationalInst()
1033 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateRelationalInst()
1035 SPIRV::lookupNativeBuiltin(Builtin->Name, Builtin->Set)->Opcode; in generateRelationalInst()
1055 static bool generateGroupInst(const SPIRV::IncomingCall *Call, in generateGroupInst()
1058 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateGroupInst()
1059 const SPIRV::GroupBuiltin *GroupBuiltin = in generateGroupInst()
1060 SPIRV::lookupGroupBuiltin(Builtin->Name); in generateGroupInst()
1077 MRI->setRegClass(ScopeReg, &SPIRV::IDRegClass); in generateGroupInst()
1086 MRI->setRegClass(ArgReg, &SPIRV::IDRegClass); in generateGroupInst()
1101 SPIRV::OpTypeBool) in generateGroupInst()
1120 auto Scope = Builtin->Name.starts_with("sub_group") ? SPIRV::Scope::Subgroup in generateGroupInst()
1121 : SPIRV::Scope::Workgroup; in generateGroupInst()
1134 MRI->setRegClass(Call->Arguments[0], &SPIRV::IDRegClass); in generateGroupInst()
1137 MRI->setRegClass(Call->Arguments[i], &SPIRV::IDRegClass); in generateGroupInst()
1148 static bool generateIntelSubgroupsInst(const SPIRV::IncomingCall *Call, in generateIntelSubgroupsInst()
1151 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateIntelSubgroupsInst()
1154 if (!ST->canUseExtension(SPIRV::Extension::SPV_INTEL_subgroups)) { in generateIntelSubgroupsInst()
1160 const SPIRV::IntelSubgroupsBuiltin *IntelSubgroups = in generateIntelSubgroupsInst()
1161 SPIRV::lookupIntelSubgroupsBuiltin(Builtin->Name); in generateIntelSubgroupsInst()
1165 bool IsSet = OpCode != SPIRV::OpSubgroupBlockWriteINTEL && in generateIntelSubgroupsInst()
1166 OpCode != SPIRV::OpSubgroupImageBlockWriteINTEL; in generateIntelSubgroupsInst()
1176 if (Arg0Type->getOpcode() == SPIRV::OpTypeImage) { in generateIntelSubgroupsInst()
1182 case SPIRV::OpSubgroupBlockReadINTEL: in generateIntelSubgroupsInst()
1183 OpCode = SPIRV::OpSubgroupImageBlockReadINTEL; in generateIntelSubgroupsInst()
1185 case SPIRV::OpSubgroupBlockWriteINTEL: in generateIntelSubgroupsInst()
1186 OpCode = SPIRV::OpSubgroupImageBlockWriteINTEL; in generateIntelSubgroupsInst()
1211 MRI->setRegClass(Call->Arguments[i], &SPIRV::IDRegClass); in generateIntelSubgroupsInst()
1217 static bool generateGroupUniformInst(const SPIRV::IncomingCall *Call, in generateGroupUniformInst()
1220 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateGroupUniformInst()
1224 SPIRV::Extension::SPV_KHR_uniform_group_instructions)) { in generateGroupUniformInst()
1230 const SPIRV::GroupUniformBuiltin *GroupUniform = in generateGroupUniformInst()
1231 SPIRV::lookupGroupUniformBuiltin(Builtin->Name); in generateGroupUniformInst()
1235 MRI->setRegClass(GroupResultReg, &SPIRV::IDRegClass); in generateGroupUniformInst()
1239 MRI->setRegClass(ScopeReg, &SPIRV::IDRegClass); in generateGroupUniformInst()
1256 MRI->setRegClass(ValueReg, &SPIRV::IDRegClass); in generateGroupUniformInst()
1268 static bool generateKernelClockInst(const SPIRV::IncomingCall *Call, in generateKernelClockInst()
1271 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateKernelClockInst()
1274 if (!ST->canUseExtension(SPIRV::Extension::SPV_KHR_shader_clock)) { in generateKernelClockInst()
1283 MRI->setRegClass(ResultReg, &SPIRV::IDRegClass); in generateKernelClockInst()
1286 SPIRV::Scope::Scope ScopeArg = in generateKernelClockInst()
1287 StringSwitch<SPIRV::Scope::Scope>(Builtin->Name) in generateKernelClockInst()
1288 .EndsWith("device", SPIRV::Scope::Scope::Device) in generateKernelClockInst()
1289 .EndsWith("work_group", SPIRV::Scope::Scope::Workgroup) in generateKernelClockInst()
1290 .EndsWith("sub_group", SPIRV::Scope::Scope::Subgroup); in generateKernelClockInst()
1293 MIRBuilder.buildInstr(SPIRV::OpReadClockKHR) in generateKernelClockInst()
1327 static bool genWorkgroupQuery(const SPIRV::IncomingCall *Call, in genWorkgroupQuery()
1330 SPIRV::BuiltIn::BuiltIn BuiltinValue, in genWorkgroupQuery()
1353 MRI->setRegClass(DefaultReg, &SPIRV::IDRegClass); in genWorkgroupQuery()
1371 MRI->setRegClass(Extracted, &SPIRV::IDRegClass); in genWorkgroupQuery()
1390 MRI->setRegClass(CompareRegister, &SPIRV::IDRegClass); in genWorkgroupQuery()
1407 MRI->setRegClass(SelectionResult, &SPIRV::IDRegClass); in genWorkgroupQuery()
1425 static bool generateBuiltinVar(const SPIRV::IncomingCall *Call, in generateBuiltinVar()
1429 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateBuiltinVar()
1430 SPIRV::BuiltIn::BuiltIn Value = in generateBuiltinVar()
1431 SPIRV::lookupGetBuiltin(Builtin->Name, Builtin->Set)->Value; in generateBuiltinVar()
1433 if (Value == SPIRV::BuiltIn::GlobalInvocationId) in generateBuiltinVar()
1439 if (Call->ReturnType->getOpcode() == SPIRV::OpTypeVector) in generateBuiltinVar()
1449 static bool generateAtomicInst(const SPIRV::IncomingCall *Call, in generateAtomicInst()
1453 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateAtomicInst()
1455 SPIRV::lookupNativeBuiltin(Builtin->Name, Builtin->Set)->Opcode; in generateAtomicInst()
1458 case SPIRV::OpStore: in generateAtomicInst()
1460 case SPIRV::OpAtomicLoad: in generateAtomicInst()
1462 case SPIRV::OpAtomicStore: in generateAtomicInst()
1464 case SPIRV::OpAtomicCompareExchange: in generateAtomicInst()
1465 case SPIRV::OpAtomicCompareExchangeWeak: in generateAtomicInst()
1468 case SPIRV::OpAtomicIAdd: in generateAtomicInst()
1469 case SPIRV::OpAtomicISub: in generateAtomicInst()
1470 case SPIRV::OpAtomicOr: in generateAtomicInst()
1471 case SPIRV::OpAtomicXor: in generateAtomicInst()
1472 case SPIRV::OpAtomicAnd: in generateAtomicInst()
1473 case SPIRV::OpAtomicExchange: in generateAtomicInst()
1475 case SPIRV::OpMemoryBarrier: in generateAtomicInst()
1476 return buildBarrierInst(Call, SPIRV::OpMemoryBarrier, MIRBuilder, GR); in generateAtomicInst()
1477 case SPIRV::OpAtomicFlagTestAndSet: in generateAtomicInst()
1478 case SPIRV::OpAtomicFlagClear: in generateAtomicInst()
1488 static bool generateAtomicFloatingInst(const SPIRV::IncomingCall *Call, in generateAtomicFloatingInst()
1492 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateAtomicFloatingInst()
1493 unsigned Opcode = SPIRV::lookupAtomicFloatingBuiltin(Builtin->Name)->Opcode; in generateAtomicFloatingInst()
1496 case SPIRV::OpAtomicFAddEXT: in generateAtomicFloatingInst()
1497 case SPIRV::OpAtomicFMinEXT: in generateAtomicFloatingInst()
1498 case SPIRV::OpAtomicFMaxEXT: in generateAtomicFloatingInst()
1505 static bool generateBarrierInst(const SPIRV::IncomingCall *Call, in generateBarrierInst()
1509 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateBarrierInst()
1511 SPIRV::lookupNativeBuiltin(Builtin->Name, Builtin->Set)->Opcode; in generateBarrierInst()
1516 static bool generateCastToPtrInst(const SPIRV::IncomingCall *Call, in generateCastToPtrInst()
1524 static bool generateDotOrFMulInst(const SPIRV::IncomingCall *Call, in generateDotOrFMulInst()
1528 return buildOpFromWrapper(MIRBuilder, SPIRV::OpDot, Call, in generateDotOrFMulInst()
1531 bool IsVec = Opcode == SPIRV::OpTypeVector; in generateDotOrFMulInst()
1533 MIRBuilder.buildInstr(IsVec ? SPIRV::OpDot : SPIRV::OpFMulS) in generateDotOrFMulInst()
1541 static bool generateWaveInst(const SPIRV::IncomingCall *Call, in generateWaveInst()
1544 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateWaveInst()
1545 SPIRV::BuiltIn::BuiltIn Value = in generateWaveInst()
1546 SPIRV::lookupGetBuiltin(Builtin->Name, Builtin->Set)->Value; in generateWaveInst()
1549 assert(Call->ReturnType->getOpcode() == SPIRV::OpTypeInt); in generateWaveInst()
1557 static bool generateGetQueryInst(const SPIRV::IncomingCall *Call, in generateGetQueryInst()
1561 SPIRV::BuiltIn::BuiltIn Value = in generateGetQueryInst()
1562 SPIRV::lookupGetBuiltin(Call->Builtin->Name, Call->Builtin->Set)->Value; in generateGetQueryInst()
1563 uint64_t IsDefault = (Value == SPIRV::BuiltIn::GlobalSize || in generateGetQueryInst()
1564 Value == SPIRV::BuiltIn::WorkgroupSize || in generateGetQueryInst()
1565 Value == SPIRV::BuiltIn::EnqueuedWorkgroupSize); in generateGetQueryInst()
1569 static bool generateImageSizeQueryInst(const SPIRV::IncomingCall *Call, in generateImageSizeQueryInst()
1573 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateImageSizeQueryInst()
1575 SPIRV::lookupImageQueryBuiltin(Builtin->Name, Builtin->Set)->Component; in generateImageSizeQueryInst()
1580 unsigned NumExpectedRetComponents = RetTy->getOpcode() == SPIRV::OpTypeVector in generateImageSizeQueryInst()
1591 MIRBuilder.getMRI()->setRegClass(QueryResult, &SPIRV::IDRegClass); in generateImageSizeQueryInst()
1597 bool IsDimBuf = ImgType->getOperand(2).getImm() == SPIRV::Dim::DIM_Buffer; in generateImageSizeQueryInst()
1599 IsDimBuf ? SPIRV::OpImageQuerySize : SPIRV::OpImageQuerySizeLod; in generateImageSizeQueryInst()
1600 MIRBuilder.getMRI()->setRegClass(Call->Arguments[0], &SPIRV::IDRegClass); in generateImageSizeQueryInst()
1617 if (QueryResultType->getOpcode() == SPIRV::OpTypeVector) { in generateImageSizeQueryInst()
1623 MIRBuilder.buildInstr(SPIRV::OpCompositeExtract) in generateImageSizeQueryInst()
1633 auto MIB = MIRBuilder.buildInstr(SPIRV::OpVectorShuffle) in generateImageSizeQueryInst()
1644 static bool generateImageMiscQueryInst(const SPIRV::IncomingCall *Call, in generateImageMiscQueryInst()
1647 assert(Call->ReturnType->getOpcode() == SPIRV::OpTypeInt && in generateImageMiscQueryInst()
1651 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateImageMiscQueryInst()
1653 SPIRV::lookupNativeBuiltin(Builtin->Name, Builtin->Set)->Opcode; in generateImageMiscQueryInst()
1656 MIRBuilder.getMRI()->setRegClass(Image, &SPIRV::IDRegClass); in generateImageMiscQueryInst()
1657 SPIRV::Dim::Dim ImageDimensionality = static_cast<SPIRV::Dim::Dim>( in generateImageMiscQueryInst()
1662 case SPIRV::OpImageQuerySamples: in generateImageMiscQueryInst()
1663 assert(ImageDimensionality == SPIRV::Dim::DIM_2D && in generateImageMiscQueryInst()
1666 case SPIRV::OpImageQueryLevels: in generateImageMiscQueryInst()
1667 assert((ImageDimensionality == SPIRV::Dim::DIM_1D || in generateImageMiscQueryInst()
1668 ImageDimensionality == SPIRV::Dim::DIM_2D || in generateImageMiscQueryInst()
1669 ImageDimensionality == SPIRV::Dim::DIM_3D || in generateImageMiscQueryInst()
1670 ImageDimensionality == SPIRV::Dim::DIM_Cube) && in generateImageMiscQueryInst()
1683 static SPIRV::SamplerAddressingMode::SamplerAddressingMode
1685 switch (Bitmask & SPIRV::CLK_ADDRESS_MODE_MASK) { in getSamplerAddressingModeFromBitmask()
1686 case SPIRV::CLK_ADDRESS_CLAMP: in getSamplerAddressingModeFromBitmask()
1687 return SPIRV::SamplerAddressingMode::Clamp; in getSamplerAddressingModeFromBitmask()
1688 case SPIRV::CLK_ADDRESS_CLAMP_TO_EDGE: in getSamplerAddressingModeFromBitmask()
1689 return SPIRV::SamplerAddressingMode::ClampToEdge; in getSamplerAddressingModeFromBitmask()
1690 case SPIRV::CLK_ADDRESS_REPEAT: in getSamplerAddressingModeFromBitmask()
1691 return SPIRV::SamplerAddressingMode::Repeat; in getSamplerAddressingModeFromBitmask()
1692 case SPIRV::CLK_ADDRESS_MIRRORED_REPEAT: in getSamplerAddressingModeFromBitmask()
1693 return SPIRV::SamplerAddressingMode::RepeatMirrored; in getSamplerAddressingModeFromBitmask()
1694 case SPIRV::CLK_ADDRESS_NONE: in getSamplerAddressingModeFromBitmask()
1695 return SPIRV::SamplerAddressingMode::None; in getSamplerAddressingModeFromBitmask()
1702 return (Bitmask & SPIRV::CLK_NORMALIZED_COORDS_TRUE) ? 1 : 0; in getSamplerParamFromBitmask()
1705 static SPIRV::SamplerFilterMode::SamplerFilterMode
1707 if (Bitmask & SPIRV::CLK_FILTER_LINEAR) in getSamplerFilterModeFromBitmask()
1708 return SPIRV::SamplerFilterMode::Linear; in getSamplerFilterModeFromBitmask()
1709 if (Bitmask & SPIRV::CLK_FILTER_NEAREST) in getSamplerFilterModeFromBitmask()
1710 return SPIRV::SamplerFilterMode::Nearest; in getSamplerFilterModeFromBitmask()
1711 return SPIRV::SamplerFilterMode::Nearest; in getSamplerFilterModeFromBitmask()
1715 const SPIRV::IncomingCall *Call, in generateReadImageInst()
1720 MRI->setRegClass(Image, &SPIRV::IDRegClass); in generateReadImageInst()
1721 MRI->setRegClass(Call->Arguments[1], &SPIRV::IDRegClass); in generateReadImageInst()
1725 MRI->setRegClass(Call->Arguments[2], &SPIRV::IDRegClass); in generateReadImageInst()
1729 if (!GR->isScalarOfType(Sampler, SPIRV::OpTypeSampler) && in generateReadImageInst()
1741 Register SampledImage = MRI->createVirtualRegister(&SPIRV::IDRegClass); in generateReadImageInst()
1743 MIRBuilder.buildInstr(SPIRV::OpSampledImage) in generateReadImageInst()
1753 if (TempType->getOpcode() != SPIRV::OpTypeVector) { in generateReadImageInst()
1760 MRI->setRegClass(TempRegister, &SPIRV::IDRegClass); in generateReadImageInst()
1763 MIRBuilder.buildInstr(SPIRV::OpImageSampleExplicitLod) in generateReadImageInst()
1768 .addImm(SPIRV::ImageOperand::Lod) in generateReadImageInst()
1772 MIRBuilder.buildInstr(SPIRV::OpCompositeExtract) in generateReadImageInst()
1778 MIRBuilder.buildInstr(SPIRV::OpImageRead) in generateReadImageInst()
1783 .addImm(SPIRV::ImageOperand::Sample) in generateReadImageInst()
1786 MIRBuilder.buildInstr(SPIRV::OpImageRead) in generateReadImageInst()
1795 static bool generateWriteImageInst(const SPIRV::IncomingCall *Call, in generateWriteImageInst()
1798 MIRBuilder.getMRI()->setRegClass(Call->Arguments[0], &SPIRV::IDRegClass); in generateWriteImageInst()
1799 MIRBuilder.getMRI()->setRegClass(Call->Arguments[1], &SPIRV::IDRegClass); in generateWriteImageInst()
1800 MIRBuilder.getMRI()->setRegClass(Call->Arguments[2], &SPIRV::IDRegClass); in generateWriteImageInst()
1801 MIRBuilder.buildInstr(SPIRV::OpImageWrite) in generateWriteImageInst()
1809 const SPIRV::IncomingCall *Call, in generateSampleImageInst()
1831 : MRI->createVirtualRegister(&SPIRV::IDRegClass); in generateSampleImageInst()
1832 MIRBuilder.buildInstr(SPIRV::OpSampledImage) in generateSampleImageInst()
1855 MRI->setRegClass(Call->Arguments[0], &SPIRV::IDRegClass); in generateSampleImageInst()
1856 MRI->setRegClass(Call->Arguments[1], &SPIRV::IDRegClass); in generateSampleImageInst()
1857 MRI->setRegClass(Call->Arguments[3], &SPIRV::IDRegClass); in generateSampleImageInst()
1859 MIRBuilder.buildInstr(SPIRV::OpImageSampleExplicitLod) in generateSampleImageInst()
1864 .addImm(SPIRV::ImageOperand::Lod) in generateSampleImageInst()
1871 static bool generateSelectInst(const SPIRV::IncomingCall *Call, in generateSelectInst()
1878 static bool generateConstructInst(const SPIRV::IncomingCall *Call, in generateConstructInst()
1881 return buildOpFromWrapper(MIRBuilder, SPIRV::OpCompositeConstruct, Call, in generateConstructInst()
1885 static bool generateCoopMatrInst(const SPIRV::IncomingCall *Call, in generateCoopMatrInst()
1888 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateCoopMatrInst()
1890 SPIRV::lookupNativeBuiltin(Builtin->Name, Builtin->Set)->Opcode; in generateCoopMatrInst()
1891 bool IsSet = Opcode != SPIRV::OpCooperativeMatrixStoreKHR; in generateCoopMatrInst()
1894 if (Opcode == SPIRV::OpCooperativeMatrixLoadKHR && ArgSz > 3) in generateCoopMatrInst()
1896 else if (Opcode == SPIRV::OpCooperativeMatrixStoreKHR && ArgSz > 4) in generateCoopMatrInst()
1903 if (Opcode == SPIRV::OpCooperativeMatrixLengthKHR) { in generateCoopMatrInst()
1917 static bool generateSpecConstantInst(const SPIRV::IncomingCall *Call, in generateSpecConstantInst()
1921 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateSpecConstantInst()
1923 SPIRV::lookupNativeBuiltin(Builtin->Name, Builtin->Set)->Opcode; in generateSpecConstantInst()
1927 case SPIRV::OpSpecConstant: { in generateSpecConstantInst()
1931 buildOpDecorate(Call->ReturnRegister, MIRBuilder, SPIRV::Decoration::SpecId, in generateSpecConstantInst()
1942 if (Call->ReturnType->getOpcode() == SPIRV::OpTypeBool) { in generateSpecConstantInst()
1945 ? SPIRV::OpSpecConstantTrue in generateSpecConstantInst()
1946 : SPIRV::OpSpecConstantFalse; in generateSpecConstantInst()
1952 if (Call->ReturnType->getOpcode() != SPIRV::OpTypeBool) { in generateSpecConstantInst()
1960 case SPIRV::OpSpecConstantComposite: { in generateSpecConstantInst()
1973 static bool buildNDRange(const SPIRV::IncomingCall *Call, in buildNDRange()
1977 MRI->setRegClass(Call->Arguments[0], &SPIRV::IDRegClass); in buildNDRange()
1979 assert(PtrType->getOpcode() == SPIRV::OpTypePointer && in buildNDRange()
1984 Register TmpReg = MRI->createVirtualRegister(&SPIRV::IDRegClass); in buildNDRange()
1991 MRI->setRegClass(GlobalWorkSize, &SPIRV::IDRegClass); in buildNDRange()
1995 MRI->setRegClass(LocalWorkSize, &SPIRV::IDRegClass); in buildNDRange()
1998 MRI->setRegClass(GlobalWorkOffset, &SPIRV::IDRegClass); in buildNDRange()
2002 if (SpvTy->getOpcode() == SPIRV::OpTypePointer) { in buildNDRange()
2008 MRI->setRegClass(GWSPtr, &SPIRV::IDRegClass); in buildNDRange()
2015 GlobalWorkSize = MRI->createVirtualRegister(&SPIRV::IDRegClass); in buildNDRange()
2017 MIRBuilder.buildInstr(SPIRV::OpLoad) in buildNDRange()
2034 MIRBuilder.buildInstr(SPIRV::OpBuildNDRange) in buildNDRange()
2040 return MIRBuilder.buildInstr(SPIRV::OpStore) in buildNDRange()
2055 unsigned SC0 = storageClassToAddressSpace(SPIRV::StorageClass::Function); in getOrCreateSPIRVDeviceEventPointer()
2056 unsigned SC1 = storageClassToAddressSpace(SPIRV::StorageClass::Generic); in getOrCreateSPIRVDeviceEventPointer()
2061 static bool buildEnqueueKernel(const SPIRV::IncomingCall *Call, in buildEnqueueKernel()
2086 unsigned SC = storageClassToAddressSpace(SPIRV::StorageClass::Generic); in buildEnqueueKernel()
2089 Int32Ty, MIRBuilder, SPIRV::StorageClass::Function); in buildEnqueueKernel()
2091 Register Reg = MRI->createVirtualRegister(&SPIRV::IDRegClass); in buildEnqueueKernel()
2106 auto MIB = MIRBuilder.buildInstr(SPIRV::OpEnqueueKernel) in buildEnqueueKernel()
2146 static bool generateEnqueueInst(const SPIRV::IncomingCall *Call, in generateEnqueueInst()
2150 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateEnqueueInst()
2152 SPIRV::lookupNativeBuiltin(Builtin->Name, Builtin->Set)->Opcode; in generateEnqueueInst()
2155 case SPIRV::OpRetainEvent: in generateEnqueueInst()
2156 case SPIRV::OpReleaseEvent: in generateEnqueueInst()
2157 MIRBuilder.getMRI()->setRegClass(Call->Arguments[0], &SPIRV::IDRegClass); in generateEnqueueInst()
2159 case SPIRV::OpCreateUserEvent: in generateEnqueueInst()
2160 case SPIRV::OpGetDefaultQueue: in generateEnqueueInst()
2164 case SPIRV::OpIsValidEvent: in generateEnqueueInst()
2165 MIRBuilder.getMRI()->setRegClass(Call->Arguments[0], &SPIRV::IDRegClass); in generateEnqueueInst()
2170 case SPIRV::OpSetUserEventStatus: in generateEnqueueInst()
2171 MIRBuilder.getMRI()->setRegClass(Call->Arguments[0], &SPIRV::IDRegClass); in generateEnqueueInst()
2172 MIRBuilder.getMRI()->setRegClass(Call->Arguments[1], &SPIRV::IDRegClass); in generateEnqueueInst()
2176 case SPIRV::OpCaptureEventProfilingInfo: in generateEnqueueInst()
2177 MIRBuilder.getMRI()->setRegClass(Call->Arguments[0], &SPIRV::IDRegClass); in generateEnqueueInst()
2178 MIRBuilder.getMRI()->setRegClass(Call->Arguments[1], &SPIRV::IDRegClass); in generateEnqueueInst()
2179 MIRBuilder.getMRI()->setRegClass(Call->Arguments[2], &SPIRV::IDRegClass); in generateEnqueueInst()
2184 case SPIRV::OpBuildNDRange: in generateEnqueueInst()
2186 case SPIRV::OpEnqueueKernel: in generateEnqueueInst()
2193 static bool generateAsyncCopy(const SPIRV::IncomingCall *Call, in generateAsyncCopy()
2197 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateAsyncCopy()
2199 SPIRV::lookupNativeBuiltin(Builtin->Name, Builtin->Set)->Opcode; in generateAsyncCopy()
2201 bool IsSet = Opcode == SPIRV::OpGroupAsyncCopy; in generateAsyncCopy()
2207 auto Scope = buildConstantIntReg(SPIRV::Scope::Workgroup, MIRBuilder, GR); in generateAsyncCopy()
2210 case SPIRV::OpGroupAsyncCopy: { in generateAsyncCopy()
2212 Call->ReturnType->getOpcode() == SPIRV::OpTypeEvent in generateAsyncCopy()
2234 case SPIRV::OpGroupWaitEvents: in generateAsyncCopy()
2245 const SPIRV::IncomingCall *Call, in generateConvertInst()
2249 const SPIRV::ConvertBuiltin *Builtin = in generateConvertInst()
2250 SPIRV::lookupConvertBuiltin(Call->Builtin->Name, Call->Builtin->Set); in generateConvertInst()
2253 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateConvertInst()
2255 SPIRV::lookupNativeBuiltin(Builtin->Name, Builtin->Set)->Opcode; in generateConvertInst()
2262 SPIRV::Decoration::SaturatedConversion, {}); in generateConvertInst()
2265 SPIRV::Decoration::FPRoundingMode, in generateConvertInst()
2270 unsigned Opcode = SPIRV::OpNop; in generateConvertInst()
2271 if (GR->isScalarOrVectorOfType(Call->Arguments[0], SPIRV::OpTypeInt)) { in generateConvertInst()
2273 if (GR->isScalarOrVectorOfType(Call->ReturnRegister, SPIRV::OpTypeInt)) { in generateConvertInst()
2276 Opcode = Builtin->IsDestinationSigned ? SPIRV::OpSatConvertUToS in generateConvertInst()
2277 : SPIRV::OpSatConvertSToU; in generateConvertInst()
2279 Opcode = Builtin->IsDestinationSigned ? SPIRV::OpUConvert in generateConvertInst()
2280 : SPIRV::OpSConvert; in generateConvertInst()
2282 SPIRV::OpTypeFloat)) { in generateConvertInst()
2288 SPIRV::Extension::SPV_INTEL_bfloat16_conversion)) in generateConvertInst()
2293 Opcode = SPIRV::OpConvertBF16ToFINTEL; in generateConvertInst()
2297 Opcode = IsSourceSigned ? SPIRV::OpConvertSToF : SPIRV::OpConvertUToF; in generateConvertInst()
2301 SPIRV::OpTypeFloat)) { in generateConvertInst()
2303 if (GR->isScalarOrVectorOfType(Call->ReturnRegister, SPIRV::OpTypeInt)) { in generateConvertInst()
2309 SPIRV::Extension::SPV_INTEL_bfloat16_conversion)) in generateConvertInst()
2314 Opcode = SPIRV::OpConvertFToBF16INTEL; in generateConvertInst()
2316 Opcode = Builtin->IsDestinationSigned ? SPIRV::OpConvertFToS in generateConvertInst()
2317 : SPIRV::OpConvertFToU; in generateConvertInst()
2320 SPIRV::OpTypeFloat)) { in generateConvertInst()
2322 Opcode = SPIRV::OpFConvert; in generateConvertInst()
2339 assert(Opcode != SPIRV::OpNop && in generateConvertInst()
2349 static bool generateVectorLoadStoreInst(const SPIRV::IncomingCall *Call, in generateVectorLoadStoreInst()
2353 const SPIRV::VectorLoadStoreBuiltin *Builtin = in generateVectorLoadStoreInst()
2354 SPIRV::lookupVectorLoadStoreBuiltin(Call->Builtin->Name, in generateVectorLoadStoreInst()
2358 MIRBuilder.buildInstr(SPIRV::OpExtInst) in generateVectorLoadStoreInst()
2361 .addImm(static_cast<uint32_t>(SPIRV::InstructionSet::OpenCL_std)) in generateVectorLoadStoreInst()
2375 static bool generateLoadStoreInst(const SPIRV::IncomingCall *Call, in generateLoadStoreInst()
2379 const SPIRV::DemangledBuiltin *Builtin = Call->Builtin; in generateLoadStoreInst()
2381 SPIRV::lookupNativeBuiltin(Builtin->Name, Builtin->Set)->Opcode; in generateLoadStoreInst()
2382 bool IsLoad = Opcode == SPIRV::OpLoad; in generateLoadStoreInst()
2392 MRI->setRegClass(Call->Arguments[0], &SPIRV::IDRegClass); in generateLoadStoreInst()
2396 MRI->setRegClass(Call->Arguments[1], &SPIRV::IDRegClass); in generateLoadStoreInst()
2402 MRI->setRegClass(Call->Arguments[IsLoad ? 1 : 2], &SPIRV::IDRegClass); in generateLoadStoreInst()
2406 MRI->setRegClass(Call->Arguments[IsLoad ? 2 : 3], &SPIRV::IDRegClass); in generateLoadStoreInst()
2411 namespace SPIRV { namespace
2421 SPIRV::InstructionSet::InstructionSet Set) { in mapBuiltinToOpcode()
2430 case SPIRV::Relational: in mapBuiltinToOpcode()
2431 case SPIRV::Atomic: in mapBuiltinToOpcode()
2432 case SPIRV::Barrier: in mapBuiltinToOpcode()
2433 case SPIRV::CastToPtr: in mapBuiltinToOpcode()
2434 case SPIRV::ImageMiscQuery: in mapBuiltinToOpcode()
2435 case SPIRV::SpecConstant: in mapBuiltinToOpcode()
2436 case SPIRV::Enqueue: in mapBuiltinToOpcode()
2437 case SPIRV::AsyncCopy: in mapBuiltinToOpcode()
2438 case SPIRV::LoadStore: in mapBuiltinToOpcode()
2439 case SPIRV::CoopMatr: in mapBuiltinToOpcode()
2441 SPIRV::lookupNativeBuiltin(Call->Builtin->Name, Call->Builtin->Set)) in mapBuiltinToOpcode()
2444 case SPIRV::Extended: in mapBuiltinToOpcode()
2445 if (const auto *R = SPIRV::lookupExtendedBuiltin(Call->Builtin->Name, in mapBuiltinToOpcode()
2449 case SPIRV::VectorLoadStore: in mapBuiltinToOpcode()
2450 if (const auto *R = SPIRV::lookupVectorLoadStoreBuiltin(Call->Builtin->Name, in mapBuiltinToOpcode()
2452 return std::make_tuple(SPIRV::Extended, 0, R->Number); in mapBuiltinToOpcode()
2454 case SPIRV::Group: in mapBuiltinToOpcode()
2455 if (const auto *R = SPIRV::lookupGroupBuiltin(Call->Builtin->Name)) in mapBuiltinToOpcode()
2458 case SPIRV::AtomicFloating: in mapBuiltinToOpcode()
2459 if (const auto *R = SPIRV::lookupAtomicFloatingBuiltin(Call->Builtin->Name)) in mapBuiltinToOpcode()
2462 case SPIRV::IntelSubgroups: in mapBuiltinToOpcode()
2463 if (const auto *R = SPIRV::lookupIntelSubgroupsBuiltin(Call->Builtin->Name)) in mapBuiltinToOpcode()
2466 case SPIRV::GroupUniform: in mapBuiltinToOpcode()
2467 if (const auto *R = SPIRV::lookupGroupUniformBuiltin(Call->Builtin->Name)) in mapBuiltinToOpcode()
2470 case SPIRV::WriteImage: in mapBuiltinToOpcode()
2471 return std::make_tuple(Call->Builtin->Group, SPIRV::OpImageWrite, 0); in mapBuiltinToOpcode()
2472 case SPIRV::Select: in mapBuiltinToOpcode()
2474 case SPIRV::Construct: in mapBuiltinToOpcode()
2475 return std::make_tuple(Call->Builtin->Group, SPIRV::OpCompositeConstruct, in mapBuiltinToOpcode()
2477 case SPIRV::KernelClock: in mapBuiltinToOpcode()
2478 return std::make_tuple(Call->Builtin->Group, SPIRV::OpReadClockKHR, 0); in mapBuiltinToOpcode()
2486 SPIRV::InstructionSet::InstructionSet Set, in lowerBuiltin()
2499 MIRBuilder.getMRI()->setRegClass(ReturnRegister, &SPIRV::IDRegClass); in lowerBuiltin()
2523 case SPIRV::Extended: in lowerBuiltin()
2525 case SPIRV::Relational: in lowerBuiltin()
2527 case SPIRV::Group: in lowerBuiltin()
2529 case SPIRV::Variable: in lowerBuiltin()
2531 case SPIRV::Atomic: in lowerBuiltin()
2533 case SPIRV::AtomicFloating: in lowerBuiltin()
2535 case SPIRV::Barrier: in lowerBuiltin()
2537 case SPIRV::CastToPtr: in lowerBuiltin()
2539 case SPIRV::Dot: in lowerBuiltin()
2541 case SPIRV::Wave: in lowerBuiltin()
2543 case SPIRV::GetQuery: in lowerBuiltin()
2545 case SPIRV::ImageSizeQuery: in lowerBuiltin()
2547 case SPIRV::ImageMiscQuery: in lowerBuiltin()
2549 case SPIRV::ReadImage: in lowerBuiltin()
2551 case SPIRV::WriteImage: in lowerBuiltin()
2553 case SPIRV::SampleImage: in lowerBuiltin()
2555 case SPIRV::Select: in lowerBuiltin()
2557 case SPIRV::Construct: in lowerBuiltin()
2559 case SPIRV::SpecConstant: in lowerBuiltin()
2561 case SPIRV::Enqueue: in lowerBuiltin()
2563 case SPIRV::AsyncCopy: in lowerBuiltin()
2565 case SPIRV::Convert: in lowerBuiltin()
2567 case SPIRV::VectorLoadStore: in lowerBuiltin()
2569 case SPIRV::LoadStore: in lowerBuiltin()
2571 case SPIRV::IntelSubgroups: in lowerBuiltin()
2573 case SPIRV::GroupUniform: in lowerBuiltin()
2575 case SPIRV::KernelClock: in lowerBuiltin()
2577 case SPIRV::CoopMatr: in lowerBuiltin()
2676 const SPIRV::BuiltinType *TypeRecord, in getNonParameterizedType()
2697 SPIRV::AccessQualifier::AccessQualifier( in getPipeType()
2719 const SPIRV::AccessQualifier::AccessQualifier Qualifier, in getImageType()
2730 SPIRV::Dim::Dim(ExtensionType->getIntParameter(0)), in getImageType()
2733 SPIRV::ImageFormat::ImageFormat(ExtensionType->getIntParameter(5)), in getImageType()
2734 Qualifier == SPIRV::AccessQualifier::WriteOnly in getImageType()
2735 ? SPIRV::AccessQualifier::WriteOnly in getImageType()
2736 : SPIRV::AccessQualifier::AccessQualifier( in getImageType()
2744 OpaqueType, SPIRV::AccessQualifier::ReadOnly, MIRBuilder, GR); in getSampledImageType()
2749 namespace SPIRV { namespace
2758 const SPIRV::OpenCLType *OCLTypeRecord = in parseBuiltinTypeNameToTargetExtType()
2759 SPIRV::lookupOpenCLType(NameWithParameters); in parseBuiltinTypeNameToTargetExtType()
2800 SPIRV::AccessQualifier::AccessQualifier AccessQual, in lowerBuiltinType()
2822 const SPIRV::BuiltinType *TypeRecord = SPIRV::lookupBuiltinType(Name); in lowerBuiltinType()
2832 case SPIRV::OpTypeImage: in lowerBuiltinType()
2835 case SPIRV::OpTypePipe: in lowerBuiltinType()
2838 case SPIRV::OpTypeDeviceEvent: in lowerBuiltinType()
2841 case SPIRV::OpTypeSampler: in lowerBuiltinType()
2844 case SPIRV::OpTypeSampledImage: in lowerBuiltinType()
2847 case SPIRV::OpTypeCooperativeMatrixKHR: in lowerBuiltinType()