Lines Matching refs:Runner
95 SlotIndexes *const Indexes, MLModelRunner *Runner);
102 // The assumption is that if the Runner could not be constructed, we emit-ed
104 const MLModelRunner &getRunner() const { return *Runner; }
110 MLModelRunner *const Runner;
143 if (!Runner) {
145 Runner = std::make_unique<ReleaseModeModelRunner<CompiledModelType>>(
148 Runner = std::make_unique<InteractiveModelRunner>(
154 MF, RA, &getAnalysis<SlotIndexesWrapperPass>().getSI(), Runner.get());
156 std::unique_ptr<MLModelRunner> Runner;
181 MLModelRunner *Runner, Logger *Log)
182 : MLPriorityAdvisor(MF, RA, Indexes, Runner), Log(Log) {}
231 Runner = std::make_unique<NoInferenceModelRunner>(Ctx, InputFeatures);
233 Runner = ModelUnderTrainingRunner::createAndEnsureValid(
235 if (!Runner) {
248 if (auto *MUTR = dyn_cast<ModelUnderTrainingRunner>(Runner.get()))
262 if (!Runner)
269 MF, RA, &getAnalysis<SlotIndexesWrapperPass>().getSI(), Runner.get(),
273 std::unique_ptr<MLModelRunner> Runner;
290 MLModelRunner *Runner)
292 Runner(std::move(Runner)) {
293 assert(this->Runner);
294 Runner->switchContext(MF.getName());
301 *Runner->getTensor<int64_t>(0) = static_cast<int64_t>(Size);
302 *Runner->getTensor<int64_t>(1) = static_cast<int64_t>(Stage);
303 *Runner->getTensor<float>(2) = static_cast<float>(LI.weight());
305 return Runner->evaluate<float>();