#include <algorithm>
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "absl/status/status.h"
#include "absl/status/statusor.h"
#include "absl/time/time.h"
#include "mediapipe/calculators/tensor/inference_calculator.h"
#include "mediapipe/calculators/tensor/inference_calculator_utils.h"
#include "mediapipe/calculators/tensor/inference_interpreter_delegate_runner.h"
#include "mediapipe/calculators/tensor/inference_runner.h"
#include "mediapipe/calculators/tensor/tensor_span.h"
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/formats/tensor.h"
#include "mediapipe/framework/port/ret_check.h"
#include "mediapipe/framework/port/status_macros.h"
#if defined(MEDIAPIPE_ANDROID)
#include "tensorflow/lite/delegates/nnapi/nnapi_delegate.h"
#endif
namespace mediapipe {
namespace api2 {
class InferenceCalculatorCpuImpl
: public InferenceCalculatorNodeImpl<InferenceCalculatorCpu,
InferenceCalculatorCpuImpl> { … };
absl::Status InferenceCalculatorCpuImpl::UpdateContract(
CalculatorContract* cc) { … }
absl::Status InferenceCalculatorCpuImpl::Open(CalculatorContext* cc) { … }
absl::StatusOr<std::vector<Tensor>> InferenceCalculatorCpuImpl::Process(
CalculatorContext* cc, const TensorSpan& tensor_span) { … }
absl::Status InferenceCalculatorCpuImpl::Close(CalculatorContext* cc) { … }
absl::StatusOr<std::unique_ptr<InferenceRunner>>
InferenceCalculatorCpuImpl::CreateInferenceRunner(CalculatorContext* cc) { … }
absl::StatusOr<TfLiteDelegatePtr>
InferenceCalculatorCpuImpl::MaybeCreateDelegate(CalculatorContext* cc) { … }
}
}