chromium/media/gpu/gpu_video_encode_accelerator_helpers.cc

// Copyright 2018 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifdef UNSAFE_BUFFERS_BUILD
// TODO(crbug.com/40285824): Remove this and convert code to safer constructs.
#pragma allow_unsafe_buffers
#endif

#include "media/gpu/gpu_video_encode_accelerator_helpers.h"

#include <algorithm>
#include <ostream>

#include "base/check_op.h"
#include "base/notreached.h"
#include "base/numerics/safe_conversions.h"
#include "media/base/bitrate.h"

namespace media {
namespace {
// The maximum number of supported spatial layers and temporal layers. These
// come from the maximum number of layers currently supported by
// VideoEncodeAccelerator implementation.
constexpr size_t kMaxSpatialLayers =;
constexpr size_t kMaxTemporalLayers =;

// The maximum size for output buffer, which is chosen empirically for
// 1080p video.
constexpr size_t kMaxBitstreamBufferSizeInBytes =;  // 2MB

// The frame size for 1080p (FHD) video in pixels.
constexpr int k1080PSizeInPixels =;
// The frame size for 1440p (QHD) video in pixels.
constexpr int k1440PSizeInPixels =;

// The mapping from resolution, bitrate, framerate to the bitstream buffer size.
struct BitstreamBufferSizeInfo {};

// The bitstream buffer size for each resolution. The table must be sorted in
// increasing order by the resolution. The value is decided by measuring the
// biggest buffer size, and then double the size as margin. (crbug.com/889739)
constexpr BitstreamBufferSizeInfo kBitstreamBufferSizeTable[] =;

// Use quadruple size of kMaxBitstreamBufferSizeInBytes when the input frame
// size is larger than 1440p, double if larger than 1080p. This is chosen
// empirically for some 4k encoding use cases and Android CTS VideoEncoderTest
// (crbug.com/927284).
size_t GetMaxEncodeBitstreamBufferSize(const gfx::Size& size) {}
}  // namespace

// This function sets the peak equal to the target. The peak can then be
// updated by callers.
VideoBitrateAllocation AllocateBitrateForDefaultEncodingWithBitrates(
    const std::vector<uint32_t>& sl_bitrates,
    const size_t num_temporal_layers,
    const bool uses_vbr) {}

size_t GetEncodeBitstreamBufferSize(const gfx::Size& size,
                                    uint32_t bitrate,
                                    uint32_t framerate) {}

// Get the maximum output bitstream buffer size. Since we don't change the
// buffer size when we update bitrate and framerate, we have to calculate the
// buffer size for the maximum bitrate.
// However, the maximum bitrate for intel chipset is 40Mbps. The buffer size
// calculated with this bitrate is always larger than 2MB. Therefore we just
// return the value.
// TODO(crbug.com/889739): Deprecate this function after we can update the
// buffer size while requesting new bitrate and framerate.
size_t GetEncodeBitstreamBufferSize(const gfx::Size& size) {}

std::vector<uint8_t> GetFpsAllocation(size_t num_temporal_layers) {}

VideoBitrateAllocation AllocateBitrateForDefaultEncoding(
    const VideoEncodeAccelerator::Config& config) {}

VideoBitrateAllocation AllocateDefaultBitrateForTesting(
    const size_t num_spatial_layers,
    const size_t num_temporal_layers,
    const Bitrate& bitrate) {}

}  // namespace media