#include "tensorflow/lite/tools/benchmark/benchmark_model.h"
#include <cstdint>
#ifdef __linux__
#include <unistd.h>
#endif
#include <iostream>
#include <memory>
#include <sstream>
#include <string>
#include "tensorflow/lite/profiling/memory_info.h"
#include "tensorflow/lite/profiling/time.h"
#include "tensorflow/lite/tools/benchmark/benchmark_utils.h"
#include "tensorflow/lite/tools/logging.h"
namespace tflite {
namespace benchmark {
Stat;
constexpr int kMemoryCheckIntervalMs = …;
#ifdef __linux__
void GetRssStats(size_t* vsize, size_t* rss, size_t* shared, size_t* code) { … }
#endif
BenchmarkParams BenchmarkModel::DefaultParams() { … }
BenchmarkModel::BenchmarkModel() : … { … }
void BenchmarkLoggingListener::OnBenchmarkEnd(const BenchmarkResults& results) { … }
std::vector<Flag> BenchmarkModel::GetFlags() { … }
void BenchmarkModel::LogParams() { … }
TfLiteStatus BenchmarkModel::PrepareInputData() { … }
TfLiteStatus BenchmarkModel::ResetInputsAndOutputs() { … }
Stat<int64_t> BenchmarkModel::Run(int min_num_times, float min_secs,
float max_secs, RunType run_type,
TfLiteStatus* invoke_status) { … }
TfLiteStatus BenchmarkModel::ValidateParams() { … }
TfLiteStatus BenchmarkModel::Run(int argc, char** argv) { … }
TfLiteStatus BenchmarkModel::Run() { … }
TfLiteStatus BenchmarkModel::ParseFlags(int* argc, char** argv) { … }
std::unique_ptr<profiling::memory::MemoryUsageMonitor>
BenchmarkModel::MayCreateMemoryUsageMonitor() const { … }
}
}