Home
last modified time | relevance | path

Searched refs:TensorSpec (Results 1 – 23 of 23) sorted by relevance

/freebsd/contrib/llvm-project/llvm/include/llvm/Analysis/
H A DTensorSpec.h62 class TensorSpec final {
65 static TensorSpec createSpec(const std::string &Name,
68 return TensorSpec(Name, Port, getDataType<T>(), sizeof(T), Shape);
76 bool operator==(const TensorSpec &Other) const {
81 bool operator!=(const TensorSpec &Other) const { return !(*this == Other); }
94 TensorSpec(const std::string &NewName, const TensorSpec &Other) in TensorSpec() function
95 : TensorSpec(NewName, Other.Port, Other.Type, Other.ElementSize, in TensorSpec()
101 TensorSpec(const std::string &Name, int Port, TensorType Type,
115 std::string tensorValueToString(const char *Buffer, const TensorSpec &Spec);
124 std::optional<TensorSpec> getTensorSpecFromJSON(LLVMContext &Ctx,
[all …]
H A DModelUnderTrainingRunner.h15 #include "llvm/Analysis/TensorSpec.h"
38 const std::vector<TensorSpec> &extraOutputsForLoggingSpecs() const { in extraOutputsForLoggingSpecs()
57 const std::vector<TensorSpec> &InputSpecs,
62 const std::vector<TensorSpec> &InputSpecs,
63 const std::vector<TensorSpec> &OutputSpecs,
64 const std::vector<TensorSpec> &ExtraOutputsForLogging = {});
70 const std::vector<TensorSpec> OutputSpecs;
71 const std::vector<TensorSpec> ExtraOutputsForLogging;
H A DInteractiveModelRunner.h44 const std::vector<TensorSpec> &Inputs,
45 const TensorSpec &Advice, StringRef OutboundName,
63 const std::vector<TensorSpec> InputSpecs;
64 const TensorSpec OutputSpec;
H A DInlineModelFeatureMaps.h156 extern const std::vector<TensorSpec> FeatureMap;
159 extern const TensorSpec InlineDecisionSpec;
161 extern const TensorSpec DefaultDecisionSpec;
H A DReleaseModeModelRunner.h74 TensorSpec::createSpec<uint64_t>("model_selector", {2}),
119 void populateTensor(size_t Pos, const TensorSpec &Spec, StringRef Prefix, in populateTensor()
H A DNoInferenceModelRunner.h23 const std::vector<TensorSpec> &Inputs);
H A DMLModelRunner.h13 #include "llvm/Analysis/TensorSpec.h"
63 void setUpBufferForTensor(size_t Index, const TensorSpec &Spec, in setUpBufferForTensor()
/freebsd/contrib/llvm-project/llvm/lib/Analysis/
H A DModelUnderTrainingRunner.cpp25 TensorSpec Spec;
60 if (auto TensorSpec = getTensorSpecFromJSON(Ctx, *SpecPart)) in loadOutputSpecs() local
62 if (!TensorSpec->isElementType<int64_t>() && in loadOutputSpecs()
63 !TensorSpec->isElementType<int32_t>() && in loadOutputSpecs()
64 !TensorSpec->isElementType<float>()) { in loadOutputSpecs()
68 TensorSpec->name()); in loadOutputSpecs()
71 Ret.push_back({*TensorSpec, LoggingName->str()}); in loadOutputSpecs()
95 const std::vector<TensorSpec> &InputSpecs, in ModelUnderTrainingRunner()
96 const std::vector<TensorSpec> &OutputSpecs, in ModelUnderTrainingRunner()
97 const std::vector<TensorSpec> &ExtraOutputsForLogging) in ModelUnderTrainingRunner()
[all …]
H A DTensorSpec.cpp33 template <> TensorType TensorSpec::getDataType<T>() { return TensorType::E; }
50 void TensorSpec::toJSON(json::OStream &OS) const { in toJSON()
62 TensorSpec::TensorSpec(const std::string &Name, int Port, TensorType Type, in TensorSpec() function in llvm::TensorSpec
69 std::optional<TensorSpec> getTensorSpecFromJSON(LLVMContext &Ctx, in getTensorSpecFromJSON()
72 [&](const llvm::Twine &Message) -> std::optional<TensorSpec> { in getTensorSpecFromJSON()
101 return TensorSpec::createSpec<T>(TensorName, TensorShape, TensorPort); in getTensorSpecFromJSON()
107 std::string tensorValueToString(const char *Buffer, const TensorSpec &Spec) { in tensorValueToString()
H A DTFLiteUtils.cpp56 const std::vector<TensorSpec> &InputSpecs,
57 const std::vector<TensorSpec> &OutputSpecs,
95 const TensorSpec &Spec);
101 StringRef SavedModelPath, const std::vector<TensorSpec> &InputSpecs, in TFModelEvaluatorImpl()
102 const std::vector<TensorSpec> &OutputSpecs, const char *Tags = "serve") in TFModelEvaluatorImpl()
185 const std::vector<TensorSpec> &InputSpecs, in TFModelEvaluator()
186 const std::vector<TensorSpec> &OutputSpecs, in TFModelEvaluator()
197 const TensorSpec &Spec) { in checkReportAndInvalidate()
H A DTrainingLogger.cpp13 #include "llvm/Analysis/TensorSpec.h"
30 void Logger::writeHeader(std::optional<TensorSpec> AdviceSpec) {
83 const std::vector<TensorSpec> &FeatureSpecs, in logRewardImpl()
84 const TensorSpec &RewardSpec, bool IncludeReward, in logRewardImpl()
85 std::optional<TensorSpec> AdviceSpec) in logRewardImpl()
H A DInlineSizeEstimatorAnalysis.cpp221 std::vector<TensorSpec> InputSpecs{TensorSpec::createSpec<int32_t>( in InlineSizeEstimatorAnalysis()
225 std::vector<TensorSpec> OutputSpecs{ in InlineSizeEstimatorAnalysis()
226 TensorSpec::createSpec<float>("StatefulPartitionedCall", {1})}; in InlineSizeEstimatorAnalysis()
H A DInteractiveModelRunner.cpp27 LLVMContext &Ctx, const std::vector<TensorSpec> &Inputs, in InteractiveModelRunner()
28 const TensorSpec &Advice, StringRef OutboundName, StringRef InboundName) in InteractiveModelRunner()
H A DMLInlineAdvisor.cpp110 const std::vector<TensorSpec> llvm::FeatureMap{
111 #define POPULATE_NAMES(DTYPE, SHAPE, NAME, __) TensorSpec::createSpec<DTYPE>(#NAME, SHAPE),
122 const TensorSpec llvm::InlineDecisionSpec =
123 TensorSpec::createSpec<int64_t>(DecisionName, {1});
125 const TensorSpec llvm::DefaultDecisionSpec =
126 TensorSpec::createSpec<int64_t>(DefaultDecisionName, {1});
H A DNoInferenceModelRunner.cpp18 LLVMContext &Ctx, const std::vector<TensorSpec> &Inputs) in NoInferenceModelRunner()
H A DDevelopmentModeInlineAdvisor.cpp48 - "tensor_spec, followed by the TensorSpec description of the
/freebsd/contrib/llvm-project/llvm/include/llvm/Analysis/Utils/
H A DTrainingLogger.h93 const std::vector<TensorSpec> FeatureSpecs;
94 const TensorSpec RewardSpec;
99 void writeHeader(std::optional<TensorSpec> AdviceSpec);
100 void writeTensor(const TensorSpec &Spec, const char *RawData) { in writeTensor()
113 const std::vector<TensorSpec> &FeatureSpecs,
114 const TensorSpec &RewardSpec, bool IncludeReward,
115 std::optional<TensorSpec> AdviceSpec = std::nullopt);
H A DTFUtils.h16 #include "llvm/Analysis/TensorSpec.h"
77 const std::vector<TensorSpec> &InputSpecs,
78 const std::vector<TensorSpec> &OutputSpecs,
/freebsd/contrib/llvm-project/llvm/lib/CodeGen/
H A DMLRegallocPriorityAdvisor.cpp
H A DMLRegAllocPriorityAdvisor.cpp80 static const TensorSpec DecisionSpec =
81 TensorSpec::createSpec<float>(DecisionName, {1});
114 TensorSpec::createSpec<type>(#name, shape),
116 static const std::vector<TensorSpec> InputFeatures{
165 static const TensorSpec Reward = TensorSpec::createSpec<float>("reward", {1});
168 TensorSpec::createSpec<type>(std::string("action_") + #name, shape),
170 static const std::vector<TensorSpec> TrainingInputFeatures{
172 TensorSpec::createSpec<float>("action_discount", {1}),
173 TensorSpec::createSpec<int32_t>("action_step_type", {1}),
174 TensorSpec::createSpec<float>("action_reward", {1})}};
[all …]
H A DMLRegallocEvictAdvisor.cpp
H A DMLRegAllocEvictAdvisor.cpp227 static const TensorSpec DecisionSpec =
228 TensorSpec::createSpec<int64_t>(DecisionName, {1});
364 TensorSpec::createSpec<type>(#name, shape),
388 std::vector<TensorSpec> InputFeatures;
422 static const TensorSpec Reward = TensorSpec::createSpec<float>("reward", {1});
429 TensorSpec::createSpec<type>(std::string("action_") + #name, shape),
461 TensorSpec::createSpec<float>("action_discount", {1}), in DevelopmentModeEvictionAdvisorAnalysis()
462 TensorSpec::createSpec<int32_t>("action_step_type", {1}), in DevelopmentModeEvictionAdvisorAnalysis()
463 TensorSpec::createSpec<float>("action_reward", {1})}; in DevelopmentModeEvictionAdvisorAnalysis()
468 TensorSpec::createSpec<float>("action_discount", {1}), in DevelopmentModeEvictionAdvisorAnalysis()
[all …]
/freebsd/lib/clang/libllvm/
H A DMakefile153 SRCS_MIN+= Analysis/TensorSpec.cpp