12#include "llvm/Config/config.h"
13#if defined(LLVM_HAVE_TFLITE)
25#include "tensorflow/lite/interpreter.h"
26#include "tensorflow/lite/kernels/register.h"
27#include "tensorflow/lite/model.h"
28#include "tensorflow/lite/model_builder.h"
29#include "tensorflow/lite/op_resolver.h"
30#include "tensorflow/lite/logger.h"
39class EvaluationResultImpl {
41 EvaluationResultImpl(
const std::vector<const TfLiteTensor *> &Outputs)
44 const TfLiteTensor *getOutput(
size_t I) {
return Outputs[
I]; }
46 EvaluationResultImpl(
const EvaluationResultImpl &) =
delete;
47 EvaluationResultImpl(EvaluationResultImpl &&
Other) =
delete;
50 const std::vector<const TfLiteTensor *> Outputs;
53class TFModelEvaluatorImpl {
55 TFModelEvaluatorImpl(
StringRef SavedModelPath,
56 const std::vector<TensorSpec> &InputSpecs,
57 const std::vector<TensorSpec> &OutputSpecs,
60 bool isValid()
const {
return IsValid; }
61 size_t outputSize()
const {
return Output.size(); }
63 std::unique_ptr<EvaluationResultImpl> evaluate() {
65 return std::make_unique<EvaluationResultImpl>(Output);
68 const std::vector<TfLiteTensor *> &getInput()
const {
return Input; }
70 ~TFModelEvaluatorImpl();
73 std::unique_ptr<tflite::FlatBufferModel>
Model;
83 std::vector<TfLiteTensor *> Input;
86 std::vector<const TfLiteTensor *> Output;
88 void invalidate() { IsValid =
false; }
94 bool checkReportAndInvalidate(
const TfLiteTensor *Tensor,
100TFModelEvaluatorImpl::TFModelEvaluatorImpl(
101 StringRef SavedModelPath,
const std::vector<TensorSpec> &InputSpecs,
102 const std::vector<TensorSpec> &OutputSpecs,
const char *Tags =
"serve")
103 : Input(InputSpecs.
size()), Output(OutputSpecs.
size()) {
105 tflite::LoggerOptions::SetMinimumLogSeverity(tflite::TFLITE_LOG_WARNING);
109 tflite::StderrReporter ErrorReporter;
113 Model = tflite::FlatBufferModel::BuildFromFile(TFLitePath.str().c_str(),
120 tflite::ops::builtin::BuiltinOpResolver
Resolver;
121 tflite::InterpreterBuilder Builder(*Model,
Resolver);
136 TfLiteAllocationType::kTfLiteArenaRwPersistent;
138 if (
Interpreter->AllocateTensors() != TfLiteStatus::kTfLiteOk) {
150 size_t NumberFeaturesPassed = 0;
151 for (
size_t I = 0;
I < InputSpecs.size(); ++
I) {
152 auto &InputSpec = InputSpecs[
I];
153 auto MapI = InputsMap.
find(InputSpec.name() +
":" +
154 std::to_string(InputSpec.port()));
155 if (MapI == InputsMap.
end()) {
160 if (!checkReportAndInvalidate(Input[
I], InputSpec))
162 std::memset(Input[
I]->data.data, 0,
163 InputSpecs[
I].getTotalTensorBufferSize());
164 ++NumberFeaturesPassed;
167 if (NumberFeaturesPassed < Interpreter->inputs().
size()) {
169 errs() <<
"Required feature(s) have not been passed to the ML model";
174 for (
size_t I = 0;
I < OutputSpecs.size(); ++
I) {
175 const auto &OutputSpec = OutputSpecs[
I];
177 OutputsMap[OutputSpec.name() +
":" +
178 std::to_string(OutputSpec.port())]);
179 if (!checkReportAndInvalidate(Output[
I], OutputSpec))
184TFModelEvaluator::TFModelEvaluator(
StringRef SavedModelPath,
185 const std::vector<TensorSpec> &InputSpecs,
186 const std::vector<TensorSpec> &OutputSpecs,
188 : Impl(new TFModelEvaluatorImpl(SavedModelPath, InputSpecs, OutputSpecs,
190 if (!Impl->isValid())
194TFModelEvaluatorImpl::~TFModelEvaluatorImpl() {}
196bool TFModelEvaluatorImpl::checkReportAndInvalidate(
const TfLiteTensor *Tensor,
199 errs() <<
"Could not find TF_Output named: " +
Spec.name();
202 if (
Spec.getTotalTensorBufferSize() != Tensor->bytes)
211std::optional<TFModelEvaluator::EvaluationResult> TFModelEvaluator::evaluate() {
214 return EvaluationResult(Impl->evaluate());
217void *TFModelEvaluator::getUntypedInput(
size_t Index) {
218 TfLiteTensor *
T = Impl->getInput()[
Index];
224TFModelEvaluator::EvaluationResult::EvaluationResult(
225 std::unique_ptr<EvaluationResultImpl> Impl)
228TFModelEvaluator::EvaluationResult::EvaluationResult(EvaluationResult &&
Other)
231TFModelEvaluator::EvaluationResult &
232TFModelEvaluator::EvaluationResult::operator=(EvaluationResult &&
Other) {
233 Impl = std::move(
Other.Impl);
237void *TFModelEvaluator::EvaluationResult::getUntypedTensorValue(
size_t Index) {
238 return Impl->getOutput(
Index)->data.data;
242TFModelEvaluator::EvaluationResult::getUntypedTensorValue(
size_t Index)
const {
243 return Impl->getOutput(
Index)->data.data;
246TFModelEvaluator::EvaluationResult::~EvaluationResult() {}
247TFModelEvaluator::~TFModelEvaluator() {}
std::optional< std::vector< StOtherPiece > > Other
This file supports working with JSON data.
static bool isValid(const char C)
Returns true if C is a valid mangled character: <0-9a-zA-Z_>.
Interface for looking up the initializer for a variable name, used by Init::resolveReferences.
pointer data()
Return a pointer to the vector's buffer, even if empty().
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringMap - This is an unconventional map that is specialized for handling keys that are "strings",...
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
void append(SmallVectorImpl< char > &path, const Twine &a, const Twine &b="", const Twine &c="", const Twine &d="")
Append to path.
This is an optimization pass for GlobalISel generic memory operations.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
OutputIt move(R &&Range, OutputIt Out)
Provide wrappers to std::move which take ranges instead of having to pass begin/end explicitly.
Implement std::hash so that hash_code can be used in STL containers.