folly/folly/Benchmark.cpp

/*
 * Copyright (c) Meta Platforms, Inc. and affiliates.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#include <folly/Benchmark.h>

#include <algorithm>
#include <cmath>
#include <cstring>
#include <iostream>
#include <limits>
#include <map>
#include <memory>
#include <numeric>
#include <utility>
#include <vector>

#include <folly/FileUtil.h>
#include <folly/MapUtil.h>
#include <folly/String.h>
#include <folly/detail/PerfScoped.h>
#include <folly/json/json.h>

// This needs to be at the end because some versions end up including
// Windows.h without defining NOMINMAX, which breaks uses
// of `std::numeric_limits<T>::max()`. We explicitly define NOMINMAX here
// explicitly instead.
#define NOMINMAX
#include <boost/regex.hpp>

usingnamespacestd;

FOLLY_GFLAGS_DEFINE_bool();

FOLLY_GFLAGS_DEFINE_bool();

FOLLY_GFLAGS_DEFINE_bool();

#if FOLLY_PERF_IS_SUPPORTED
FOLLY_GFLAGS_DEFINE_string(
    bm_perf_args,
    "",
    "Run selected benchmarks while attaching `perf` profiling tool."
    "Advantage over attaching perf externally is that this skips "
    "initialization. The first iteration of the benchmark is also "
    "skipped to allow for all statics to be set up. This requires perf "
    " to be available on the system. Example: --bm_perf_args=\"record -g\"");
#endif

FOLLY_GFLAGS_DEFINE_bool();

FOLLY_GFLAGS_DEFINE_int64();

FOLLY_GFLAGS_DEFINE_string();

FOLLY_GFLAGS_DEFINE_bool();

FOLLY_GFLAGS_DEFINE_string();

FOLLY_GFLAGS_DEFINE_string();

FOLLY_GFLAGS_DEFINE_int64();

FOLLY_GFLAGS_DEFINE_int32();

FOLLY_GFLAGS_DEFINE_int64();

FOLLY_GFLAGS_DEFINE_int32();

FOLLY_GFLAGS_DEFINE_uint32();

FOLLY_GFLAGS_DEFINE_uint32();

FOLLY_GFLAGS_DEFINE_bool();

namespace folly {
namespace detail {

BenchmarkingState<std::chrono::high_resolution_clock>& globalBenchmarkState() {}

} // namespace detail

BenchmarkFun;

#define FB_FOLLY_GLOBAL_BENCHMARK_BASELINE
#define FB_STRINGIZE_X2

constexpr const char kGlobalBenchmarkBaseline[] =;

// Add the global baseline
BENCHMARK() {

#undef FB_STRINGIZE_X2
#undef FB_FOLLY_GLOBAL_BENCHMARK_BASELINE

static std::pair<double, UserCounters> runBenchmarkGetNSPerIteration(
    const BenchmarkFun& fun, const double globalBaseline) {}

static std::pair<double, UserCounters> runBenchmarkGetNSPerIterationEstimate(
    const BenchmarkFun& fun, const double globalBaseline) {}

static std::pair<double, UserCounters> runProfilingGetNSPerIteration(
    const BenchmarkFun& fun, const double globalBaseline) {}

struct ScaleInfo {};

static const ScaleInfo kTimeSuffixes[]{};

static const ScaleInfo kMetricSuffixes[]{};

static string humanReadable(
    double n, unsigned int decimals, const ScaleInfo* scales) {}

static string readableTime(double n, unsigned int decimals) {}

static string metricReadable(double n, unsigned int decimals) {}

namespace {

constexpr std::string_view kUnitHeaders =;
constexpr std::string_view kUnitHeadersPadding =;
void printHeaderContents(std::string_view file) {}

void printDefaultHeaderContents(std::string_view file, size_t columns) {}

void printSeparator(char pad, unsigned int columns) {}

class BenchmarkResultsPrinter {};
} // namespace

static void printBenchmarkResultsAsJson(
    const vector<detail::BenchmarkResult>& data) {}

void benchmarkResultsToDynamic(
    const vector<detail::BenchmarkResult>& data, dynamic& out) {}

void benchmarkResultsFromDynamic(
    const dynamic& d, vector<detail::BenchmarkResult>& results) {}

static pair<StringPiece, StringPiece> resultKey(
    const detail::BenchmarkResult& result) {}

void printResultComparison(
    const vector<detail::BenchmarkResult>& base,
    const vector<detail::BenchmarkResult>& test) {}

void checkRunMode() {}

namespace {

struct BenchmarksToRun {};

void addSeparator(BenchmarksToRun& res) {}

BenchmarksToRun selectBenchmarksToRun(
    const std::vector<detail::BenchmarkRegistration>& benchmarks) {}

void maybeRunWarmUpIteration(const BenchmarksToRun& toRun) {}

class ShouldDrawLineTracker {};

std::pair<std::set<std::string>, std::vector<detail::BenchmarkResult>>
runBenchmarksWithPrinterImpl(
    BenchmarkResultsPrinter* FOLLY_NULLABLE printer,
    const BenchmarksToRun& toRun) {}

std::vector<detail::BenchmarkResult> resultsFromFile(
    const std::string& filename) {}

bool writeResultsToFile(
    const std::vector<detail::BenchmarkResult>& results,
    const std::string& filename) {}

} // namespace

namespace detail {

std::ostream& operator<<(std::ostream& os, const BenchmarkResult& x) {}

bool operator==(const BenchmarkResult& x, const BenchmarkResult& y) {}

std::chrono::high_resolution_clock::duration BenchmarkSuspenderBase::timeSpent;

void BenchmarkingStateBase::addBenchmarkImpl(
    const char* file, StringPiece name, BenchmarkFun fun, bool useCounter) {}

bool BenchmarkingStateBase::useCounters() const {}

std::vector<std::string> BenchmarkingStateBase::getBenchmarkList() {}

// static
folly::StringPiece BenchmarkingStateBase::getGlobalBaselineNameForTests() {}

PerfScoped BenchmarkingStateBase::doSetUpPerfScoped(
    const std::vector<std::string>& args) const {}

PerfScoped BenchmarkingStateBase::setUpPerfScoped() const {}

template <typename Printer>
std::pair<std::set<std::string>, std::vector<BenchmarkResult>>
BenchmarkingStateBase::runBenchmarksWithPrinter(Printer* printer) const {}

std::vector<BenchmarkResult> BenchmarkingStateBase::runBenchmarksWithResults()
    const {}

std::vector<BenchmarkResult> runBenchmarksWithResults() {}

} // namespace detail

void runBenchmarks() {}

} // namespace folly