/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef TENSORFLOW_LITE_CORE_SIGNATURE_RUNNER_H_ #define TENSORFLOW_LITE_CORE_SIGNATURE_RUNNER_H_ /// \file /// /// An abstraction for invoking the TF Lite interpreter. /// Provides support for named parameters, and for including multiple /// named computations in a single model, each with its own inputs/outputs. /// /// Do NOT include this file directly, /// instead include third_party/tensorflow/lite/signature_riunner.h /// See third_party/tensorflow/lite/c/common.h for the API for defining /// operations (TfLiteRegistration). #include <cstddef> #include <cstdint> #include <string> #include <vector> #include "tensorflow/lite/core/c/common.h" #include "tensorflow/lite/core/subgraph.h" #include "tensorflow/lite/internal/signature_def.h" namespace tflite { namespace impl { class Interpreter; // Class for friend declarations. } class SignatureRunnerHelper; // Class for friend declarations. class SignatureRunnerJNIHelper; // Class for friend declarations. class TensorHandle; // Class for friend declarations. namespace impl { /// SignatureRunner class for running TFLite models using SignatureDef. /// /// Usage: /// /// <pre><code> /// // Create model from file. Note that the model instance must outlive the /// // interpreter instance. /// auto model = tflite::FlatBufferModel::BuildFromFile(...); /// if (model == nullptr) { /// // Return error. /// } /// /// // Create an Interpreter with an InterpreterBuilder. /// std::unique_ptr<tflite::Interpreter> interpreter; /// tflite::ops::builtin::BuiltinOpResolver resolver; /// if (InterpreterBuilder(*model, resolver)(&interpreter) != kTfLiteOk) { /// // Return failure. /// } /// /// // Get the list of signatures and check it. /// auto signature_defs = interpreter->signature_keys(); /// if (signature_defs.empty()) { /// // Return error. /// } /// /// // Get pointer to the SignatureRunner instance corresponding to a signature. /// // Note that the pointed SignatureRunner instance has lifetime same as the /// // Interpreter instance. /// tflite::SignatureRunner* runner = /// interpreter->GetSignatureRunner(signature_defs[0]->c_str()); /// if (runner == nullptr) { /// // Return error. /// } /// if (runner->AllocateTensors() != kTfLiteOk) { /// // Return failure. /// } /// /// // Set input data. In this example, the input tensor has float type. /// float* input = runner->input_tensor(0)->data.f; /// for (int i = 0; i < input_size; i++) { /// input[i] = ...; // } /// runner->Invoke(); /// </code></pre> /// /// WARNING: This class is *not* thread-safe. The client is responsible for /// ensuring serialized interaction to avoid data races and undefined behavior. /// /// SignatureRunner and Interpreter share the same underlying data. Calling /// methods on an Interpreter object will affect the state in corresponding /// SignatureRunner objects. Therefore, it is recommended not to call other /// Interpreter methods after calling GetSignatureRunner to create /// SignatureRunner instances. class SignatureRunner { … }; } // namespace impl } // namespace tflite #endif // TENSORFLOW_LITE_CORE_SIGNATURE_RUNNER_H_