LLVM  15.0.0git
Namespaces | Macros | Functions
TensorSpec.cpp File Reference
#include "llvm/Config/config.h"
#include "llvm/ADT/Twine.h"
#include "llvm/Analysis/TensorSpec.h"
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/Debug.h"
#include "llvm/Support/JSON.h"
#include "llvm/Support/ManagedStatic.h"
#include "llvm/Support/MemoryBuffer.h"
#include "llvm/Support/Path.h"
#include "llvm/Support/raw_ostream.h"
#include <cassert>
#include <numeric>
Include dependency graph for TensorSpec.cpp:

Go to the source code of this file.

Namespaces

 llvm
 This is an optimization pass for GlobalISel generic memory operations.
 

Macros

#define TFUTILS_GETDATATYPE_IMPL(T, E)   template <> TensorType TensorSpec::getDataType<T>() { return TensorType::E; }
 
#define PARSE_TYPE(T, E)
 

Functions

Optional< TensorSpecllvm::getTensorSpecFromJSON (LLVMContext &Ctx, const json::Value &Value)
 Construct a TensorSpec from a JSON dictionary of the form: { "name": <string>, "port": <int>, "type": <string. More...
 
Optional< std::vector< LoggedFeatureSpec > > llvm::loadOutputSpecs (LLVMContext &Ctx, StringRef ExpectedDecisionName, StringRef ModelPath, StringRef SpecFileOverride=StringRef())
 Load the output specs. More...
 

Macro Definition Documentation

◆ PARSE_TYPE

#define PARSE_TYPE (   T,
  E 
)
Value:
if (TensorType == #T) \
return TensorSpec::createSpec<T>(TensorName, TensorShape, TensorPort);

◆ TFUTILS_GETDATATYPE_IMPL

#define TFUTILS_GETDATATYPE_IMPL (   T,
  E 
)    template <> TensorType TensorSpec::getDataType<T>() { return TensorType::E; }

Definition at line 31 of file TensorSpec.cpp.

T
llvm::TensorType
TensorType
Definition: TensorSpec.h:45