#include "mediapipe/calculators/tensor/inference_feedback_manager.h"
#include <cstring>
#include <string>
#include <utility>
#include <vector>
#include "absl/container/flat_hash_map.h"
#include "absl/container/flat_hash_set.h"
#include "absl/log/absl_log.h"
#include "absl/status/status.h"
#include "absl/status/statusor.h"
#include "absl/strings/str_format.h"
#include "absl/strings/str_join.h"
#include "mediapipe/calculators/tensor/inference_calculator.pb.h"
#include "mediapipe/calculators/tensor/inference_io_mapper.h"
#include "mediapipe/framework/port/ret_check.h"
#include "mediapipe/framework/port/status_macros.h"
#include "mediapipe/util/tflite/tflite_signature_reader.h"
#include "mediapipe/util/tflite/utils.h"
#include "tensorflow/lite/c/common.h"
#include "tensorflow/lite/interpreter.h"
namespace mediapipe {
namespace {
bool TfLiteTensorSpecEqual(const TfLiteTensor& a, const TfLiteTensor& b) { … }
absl::flat_hash_map<std::string, int> CreateNameToIndexMap(
const std::vector<std::string>& names) { … }
}
absl::Status InferenceFeedbackManager::Init(
const InferenceCalculatorOptions::InputOutputConfig& io_config,
const InputOutputTensorNames& input_output_tensor_names,
tflite::Interpreter* interpreter) { … }
void InferenceFeedbackManager::SwapFeedbackTensors() { … }
absl::StatusOr<std::vector<InferenceFeedbackManager::TensorFeedbackIndicesLink>>
InferenceFeedbackManager::ConvertSignatureTensorNamesToModelIndices(
const InferenceCalculatorOptions::InputOutputConfig& io_config,
const InputOutputTensorNames& input_output_tensor_names_map) { … }
bool InferenceFeedbackManager::IsFeedbackInputTensorAtIndex(int idx) const { … }
bool InferenceFeedbackManager::IsFeedbackOutputTensorAtIndex(int idx) const { … }
absl::StatusOr<int> InferenceFeedbackManager::MapInputTensorToModelIndex(
int input_idx) const { … }
int InferenceFeedbackManager::GetNumberOfNonFeedbackInputTensors() const { … }
int InferenceFeedbackManager::GetNumberOfFeedbackTensors() const { … }
}