#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "absl/status/status.h"
#include "absl/status/statusor.h"
#include "absl/time/time.h"
#include "mediapipe/calculators/tensor/inference_calculator.h"
#include "mediapipe/calculators/tensor/inference_calculator_utils.h"
#include "mediapipe/calculators/tensor/inference_interpreter_delegate_runner.h"
#include "mediapipe/calculators/tensor/inference_runner.h"
#include "mediapipe/calculators/tensor/tensor_span.h"
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/formats/tensor.h"
#include "mediapipe/framework/port/ret_check.h"
#include "mediapipe/framework/port/status_macros.h"
#include "tensorflow/lite/delegates/xnnpack/xnnpack_delegate.h"
namespace mediapipe {
namespace api2 {
class InferenceCalculatorXnnpackImpl
: public InferenceCalculatorNodeImpl<InferenceCalculatorXnnpack,
InferenceCalculatorXnnpackImpl> { … };
absl::Status InferenceCalculatorXnnpackImpl::UpdateContract(
CalculatorContract* cc) { … }
absl::Status InferenceCalculatorXnnpackImpl::Open(CalculatorContext* cc) { … }
absl::StatusOr<std::vector<Tensor>> InferenceCalculatorXnnpackImpl::Process(
CalculatorContext* cc, const TensorSpan& tensor_span) { … }
absl::Status InferenceCalculatorXnnpackImpl::Close(CalculatorContext* cc) { … }
absl::StatusOr<std::unique_ptr<InferenceRunner>>
InferenceCalculatorXnnpackImpl::CreateInferenceRunner(CalculatorContext* cc) { … }
absl::StatusOr<TfLiteDelegatePtr>
InferenceCalculatorXnnpackImpl::CreateDelegate(CalculatorContext* cc) { … }
}
}