Class OnnxInferenceEngine#
Defined in File onnx_inference_engine.ixx
Nested Relationships#
Nested Types#
Class Documentation#
-
class OnnxInferenceEngine#
Public Functions
-
OnnxInferenceEngine()#
-
~OnnxInferenceEngine()#
-
OnnxInferenceEngine(const OnnxInferenceEngine&) = delete#
-
auto operator=(const OnnxInferenceEngine&) -> OnnxInferenceEngine& = delete#
-
OnnxInferenceEngine(OnnxInferenceEngine&&) noexcept#
-
auto operator=(OnnxInferenceEngine&&) noexcept -> OnnxInferenceEngine&#
-
auto initialize(const SessionConfig &config) -> std::expected<void, OnnxError>#
-
auto is_initialized() const -> bool#
-
auto run_inference(std::span<const float> input_data, const TensorShape &input_shape, const std::string &input_name = "input") -> std::expected<InferenceResult, OnnxError>#
-
auto run_inference_multi_input(const std::vector<std::pair<std::string, TensorData>> &inputs) -> std::expected<InferenceResult, OnnxError>#
-
auto get_input_names() const -> std::vector<std::string>#
-
auto get_output_names() const -> std::vector<std::string>#
-
auto get_input_shape(const std::string &name) const -> std::expected<TensorShape, OnnxError>#
-
auto get_output_shape(const std::string &name) const -> std::expected<TensorShape, OnnxError>#
-
struct Impl#
Public Members
-
std::unique_ptr<Ort::Env> env#
-
std::unique_ptr<Ort::Session> session#
-
std::unique_ptr<Ort::SessionOptions> session_options#
-
Ort::AllocatorWithDefaultOptions allocator#
-
SessionConfig config#
-
bool initialized = {false}#
-
std::vector<std::string> input_names_cache#
-
std::vector<std::string> output_names_cache#
-
std::vector<const char*> input_name_ptrs#
-
std::vector<const char*> output_name_ptrs#
-
std::unique_ptr<Ort::Env> env#
-
OnnxInferenceEngine()#