chromium/remoting/codec/webrtc_video_encoder_gpu.cc

// Copyright 2017 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "remoting/codec/webrtc_video_encoder_gpu.h"

#include <algorithm>
#include <limits>
#include <memory>
#include <utility>
#include <vector>

#include "base/containers/flat_map.h"
#include "base/functional/bind.h"
#include "base/functional/callback_helpers.h"
#include "base/logging.h"
#include "base/memory/ptr_util.h"
#include "base/memory/shared_memory_mapping.h"
#include "base/memory/unsafe_shared_memory_region.h"
#include "base/numerics/checked_math.h"
#include "base/task/bind_post_task.h"
#include "base/task/sequenced_task_runner.h"
#include "base/task/task_traits.h"
#include "base/task/thread_pool.h"
#include "base/threading/thread_checker.h"
#include "base/time/time.h"
#include "base/trace_event/trace_event.h"
#include "build/build_config.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "gpu/config/gpu_preferences.h"
#include "media/base/bitstream_buffer.h"
#include "media/base/media_log.h"
#include "media/base/video_frame.h"
#include "media/gpu/gpu_video_encode_accelerator_factory.h"
#include "media/video/video_encode_accelerator.h"
#include "remoting/base/constants.h"
#include "remoting/codec/encoder_bitrate_filter.h"
#include "third_party/libyuv/include/libyuv/convert_from_argb.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_geometry.h"

#if BUILDFLAG(IS_WIN)
#include "base/win/scoped_com_initializer.h"
#endif

namespace {

VideoCodecProfile;
VideoFrame;
VideoPixelFormat;

// Currently, the WebrtcVideoEncoderWrapper only encodes a single frame at a
// time. Thus, there's no reason to have this set to anything greater than one.
const int kWebrtcVideoEncoderGpuOutputBufferCount =;

constexpr VideoCodecProfile kH264Profile =;

constexpr int kH264MinimumTargetBitrateKbpsPerMegapixel =;

gpu::GpuDriverBugWorkarounds CreateGpuWorkarounds() {}

gpu::GPUInfo::GPUDevice CreateGpuDevice() {}

struct OutputBuffer {};

bool OutputBuffer::IsValid() {}

}  // namespace

namespace remoting {

// WebrtcVideoEncoderGpu::Core handles the initialization, usage, and teardown
// of a VideoEncodeAccelerator object which is used to encode desktop frames for
// presentation on the client.
//
// A brief explanation of how this class is initialized:
// 1. An instance of WebrtcVideoEncoderGpu is created using the static
//      CreateForH264() function. At this point its |core_| member (an instance
//      of this class) is created with a state of UNINITIALIZED. After this
//      point, WebrtcVideoEncoderGpu will forward all Encode calls to its
//      |core_| member.
// 2. On the first encode call, the incoming DesktopFrame's dimensions are
//      stored and the Encode params are saved in |pending_encode_|. Before
//      returning, BeginInitialization() is called.
// 3. In BeginInitialization(), the Core instance constructs the
//      VideoEncodeAccelerator using the saved dimensions from the DesktopFrame.
//      If the VideoEncodeAccelerator is constructed successfully, the state is
//      set to INITIALIZING. If not, the state isset to INITIALIZATION_ERROR.
// 4. Some time later, the VideoEncodeAccelerator sets itself up and is ready
//      to encode. At this point, it calls the Core instance's
//      RequireBitstreamBuffers() method. Once bitstream buffers are allocated,
//      the state is INITIALIZED.
class WebrtcVideoEncoderGpu::Core
    : public WebrtcVideoEncoder,
      public media::VideoEncodeAccelerator::Client {};

WebrtcVideoEncoderGpu::WebrtcVideoEncoderGpu(VideoCodecProfile codec_profile)
    :{}

WebrtcVideoEncoderGpu::~WebrtcVideoEncoderGpu() {}

void WebrtcVideoEncoderGpu::Encode(std::unique_ptr<webrtc::DesktopFrame> frame,
                                   const FrameParams& params,
                                   WebrtcVideoEncoder::EncodeCallback done) {}

WebrtcVideoEncoderGpu::Core::Core(media::VideoCodecProfile codec_profile)
    :{}

WebrtcVideoEncoderGpu::Core::~Core() {}

void WebrtcVideoEncoderGpu::Core::Encode(
    std::unique_ptr<webrtc::DesktopFrame> frame,
    const FrameParams& params,
    WebrtcVideoEncoder::EncodeCallback done) {}

void WebrtcVideoEncoderGpu::Core::RequireBitstreamBuffers(
    unsigned int input_count,
    const gfx::Size& input_coded_size,
    size_t output_buffer_size) {}

void WebrtcVideoEncoderGpu::Core::BitstreamBufferReady(
    int32_t bitstream_buffer_id,
    const media::BitstreamBufferMetadata& metadata) {}

void WebrtcVideoEncoderGpu::Core::NotifyErrorStatus(
    const media::EncoderStatus& status) {}

void WebrtcVideoEncoderGpu::Core::BeginInitialization() {}

void WebrtcVideoEncoderGpu::Core::UseOutputBitstreamBufferId(
    int32_t bitstream_buffer_id) {}

void WebrtcVideoEncoderGpu::Core::RunAnyPendingEncode() {}

// static
std::unique_ptr<WebrtcVideoEncoder> WebrtcVideoEncoderGpu::CreateForH264() {}

// static
bool WebrtcVideoEncoderGpu::IsSupportedByH264(const Profile& profile) {}

}  // namespace remoting