LLVM
22.0.0git
include
llvm
Analysis
NoInferenceModelRunner.h
Go to the documentation of this file.
1
//===- NoInferenceModelRunner.h ---- noop ML model runner ------*- C++ -*-===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
10
#ifndef LLVM_ANALYSIS_NOINFERENCEMODELRUNNER_H
11
#define LLVM_ANALYSIS_NOINFERENCEMODELRUNNER_H
12
13
#include "
llvm/Analysis/MLModelRunner.h
"
14
#include "
llvm/Support/Compiler.h
"
15
namespace
llvm
{
16
class
TensorSpec
;
17
18
/// A pseudo model runner. We use it to store feature values when collecting
19
/// logs for the default policy, in 'development' mode, but never ask it to
20
/// 'run'.
21
class
NoInferenceModelRunner
:
public
MLModelRunner
{
22
public
:
23
LLVM_ABI
NoInferenceModelRunner
(
LLVMContext
&
Ctx
,
24
const
std::vector<TensorSpec> &Inputs);
25
26
static
bool
classof
(
const
MLModelRunner
*R) {
27
return
R->getKind() ==
MLModelRunner::Kind::NoOp
;
28
}
29
30
private
:
31
void
*
evaluateUntyped
()
override
{
32
llvm_unreachable
(
"We shouldn't call run on this model runner."
);
33
}
34
};
35
}
// namespace llvm
36
#endif
// LLVM_ANALYSIS_NOINFERENCEMODELRUNNER_H
Compiler.h
LLVM_ABI
#define LLVM_ABI
Definition
Compiler.h:213
MLModelRunner.h
llvm::LLVMContext
This is an important class for using LLVM in a threaded context.
Definition
LLVMContext.h:68
llvm::MLModelRunner::evaluateUntyped
virtual void * evaluateUntyped()=0
llvm::MLModelRunner::Kind::NoOp
@ NoOp
Definition
MLModelRunner.h:52
llvm::MLModelRunner::MLModelRunner
MLModelRunner(const MLModelRunner &)=delete
llvm::MLModelRunner::Ctx
LLVMContext & Ctx
Definition
MLModelRunner.h:72
llvm::NoInferenceModelRunner::classof
static bool classof(const MLModelRunner *R)
Definition
NoInferenceModelRunner.h:26
llvm::NoInferenceModelRunner::NoInferenceModelRunner
LLVM_ABI NoInferenceModelRunner(LLVMContext &Ctx, const std::vector< TensorSpec > &Inputs)
Definition
NoInferenceModelRunner.cpp:17
llvm::TensorSpec
Definition
TensorSpec.h:63
llvm_unreachable
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
Definition
ErrorHandling.h:164
llvm
This is an optimization pass for GlobalISel generic memory operations.
Definition
AddressRanges.h:18
Generated on
for LLVM by
1.14.0