#ifndef MEDIAPIPE_CALCULATORS_TENSOR_INFERENCE_CALCULATOR_UTILS_H_
#define MEDIAPIPE_CALCULATORS_TENSOR_INFERENCE_CALCULATOR_UTILS_H_
#include <cstddef>
#include <cstdint>
#include "absl/status/status.h"
#include "absl/status/statusor.h"
#include "mediapipe/calculators/tensor/inference_calculator.pb.h"
#include "mediapipe/framework/formats/tensor.h"
#include "mediapipe/framework/memory_manager.h"
#include "mediapipe/framework/port/ret_check.h"
#include "tensorflow/lite/interpreter.h"
#include "tensorflow/lite/util.h"
namespace mediapipe {
int GetXnnpackNumThreads(
const bool opts_has_delegate,
const mediapipe::InferenceCalculatorOptions::Delegate& opts_delegate);
absl::Status CopyCpuInputIntoInterpreterTensor(const Tensor& input_tensor,
tflite::Interpreter& interpreter,
int input_tensor_index);
absl::Status CopyCpuInputIntoTfLiteTensor(const Tensor& input_tensor,
TfLiteTensor& tflite_tensor);
absl::Status CopyInterpreterTensorIntoCpuOutput(
const tflite::Interpreter& interpreter, int output_tensor_index,
Tensor& output_tensor);
absl::Status CopyTfLiteTensorIntoCpuOutput(const TfLiteTensor& tflite_tensor,
Tensor& output_tensor);
absl::StatusOr<Tensor> ConvertTfLiteTensorToTensor(
const TfLiteTensor& tflite_tensor);
template <typename T>
bool IsAlignedWithTFLiteDefaultAlignment(T* data_ptr) { … }
template <typename T>
absl::Status SetTfLiteCustomAllocation(tflite::Interpreter& interpreter,
T* data_ptr, size_t size_bytes,
int tensor_index) { … }
absl::StatusOr<Tensor> CreateTensorWithTfLiteTensorSpecs(
const TfLiteTensor& reference_tflite_tensor,
MemoryManager* memory_manager = nullptr, int alignment = 0);
}
#endif