chromium/media/gpu/chromeos/video_decoder_pipeline.cc

// Copyright 2019 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "media/gpu/chromeos/video_decoder_pipeline.h"

#include <memory>
#include <optional>
#include <vector>

#include "base/containers/contains.h"
#include "base/functional/bind.h"
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_functions.h"
#include "base/strings/string_number_conversions.h"
#include "base/task/bind_post_task.h"
#include "base/task/sequenced_task_runner.h"
#include "base/task/single_thread_task_runner.h"
#include "base/task/task_traits.h"
#include "base/task/thread_pool.h"
#include "base/trace_event/trace_event.h"
#include "build/build_config.h"
#include "media/base/async_destroy_video_decoder.h"
#include "media/base/media_log.h"
#include "media/base/media_switches.h"
#include "media/base/media_util.h"
#include "media/gpu/chromeos/dmabuf_video_frame_pool.h"
#include "media/gpu/chromeos/image_processor.h"
#include "media/gpu/chromeos/image_processor_factory.h"
#include "media/gpu/chromeos/native_pixmap_frame_resource.h"
#include "media/gpu/chromeos/oop_video_decoder.h"
#include "media/gpu/chromeos/platform_video_frame_pool.h"
#include "media/gpu/chromeos/video_frame_resource.h"
#include "media/gpu/macros.h"
#include "media/media_buildflags.h"

#if BUILDFLAG(USE_VAAPI)
#include <drm_fourcc.h>
#include "media/gpu/vaapi/vaapi_video_decoder.h"
#elif BUILDFLAG(USE_V4L2_CODEC)
#include "media/gpu/v4l2/v4l2_stateful_video_decoder.h"
#include "media/gpu/v4l2/v4l2_video_decoder.h"
#else
#error Either VA-API or V4L2 must be used for decode acceleration on Chrome OS.
#endif

namespace media {
namespace {

PixelLayoutCandidate;

// Picks the preferred compositor renderable format from |candidates|, if any.
// If |preferred_fourcc| is provided, contained in |candidates|, and considered
// renderable, it returns that. Otherwise, it goes through
// |renderable_fourccs| until it finds one that's in |candidates|. If
// it can't find a renderable format in |candidates|, it returns std::nullopt.
std::optional<Fourcc> PickRenderableFourcc(
    const std::vector<Fourcc>& renderable_fourccs,
    const std::vector<Fourcc>& candidates,
    std::optional<Fourcc> preferred_fourcc) {}

// Estimates the number of buffers needed in the output frame pool to fill the
// Renderer pipeline (this pool may provide buffers to the VideoDecoder
// directly or to the ImageProcessor, when this is instantiated).
size_t EstimateRequiredRendererPipelineBuffers(bool low_delay,
                                               bool use_protected) {}

scoped_refptr<base::SequencedTaskRunner> GetDecoderTaskRunner(
    bool in_video_decoder_process) {}

// DefaultFrameConverter uses the FrameResource built-in converters to handle
// conversion to VideoFrame objects. It is used by VideoDecoderPipeline when a
// client doesn't specify a FrameConverter.
class DefaultFrameConverter : public FrameResourceConverter {};
}  //  namespace

VideoDecoderMixin::VideoDecoderMixin(
    std::unique_ptr<MediaLog> media_log,
    scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
    base::WeakPtr<VideoDecoderMixin::Client> client)
    :{}

VideoDecoderMixin::~VideoDecoderMixin() = default;

bool VideoDecoderMixin::NeedsTranscryption() {}

CroStatus VideoDecoderMixin::AttachSecureBuffer(
    scoped_refptr<DecoderBuffer>& buffer) {}

void VideoDecoderMixin::Initialize(const VideoDecoderConfig& config,
                                   bool low_delay,
                                   CdmContext* cdm_context,
                                   InitCB init_cb,
                                   const OutputCB& output_cb,
                                   const WaitingCB& waiting_cb) {}

void VideoDecoderMixin::ReleaseSecureBuffer(uint64_t secure_handle) {}

size_t VideoDecoderMixin::GetMaxOutputFramePoolSize() const {}

VideoDecoderPipeline::ClientFlushCBState::ClientFlushCBState(
    DecodeCB flush_cb,
    DecoderStatus decoder_decode_status)
    :{}

VideoDecoderPipeline::ClientFlushCBState::~ClientFlushCBState() = default;

// static
std::unique_ptr<VideoDecoder> VideoDecoderPipeline::Create(
    const gpu::GpuDriverBugWorkarounds& workarounds,
    scoped_refptr<base::SequencedTaskRunner> client_task_runner,
    std::unique_ptr<DmabufVideoFramePool> frame_pool,
    std::unique_ptr<FrameResourceConverter> frame_converter,
    std::vector<Fourcc> renderable_fourccs,
    std::unique_ptr<MediaLog> media_log,
    mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
    bool in_video_decoder_process) {}

// static
std::unique_ptr<VideoDecoder> VideoDecoderPipeline::CreateForVDAAdapterForARC(
    const gpu::GpuDriverBugWorkarounds& workarounds,
    scoped_refptr<base::SequencedTaskRunner> client_task_runner,
    std::unique_ptr<DmabufVideoFramePool> frame_pool,
    std::vector<Fourcc> renderable_fourccs) {}

// static
std::unique_ptr<VideoDecoder> VideoDecoderPipeline::CreateForTesting(
    scoped_refptr<base::SequencedTaskRunner> client_task_runner,
    std::unique_ptr<MediaLog> media_log,
    bool ignore_resolution_changes_to_smaller_for_testing) {}

// static
std::vector<Fourcc> VideoDecoderPipeline::DefaultPreferredRenderableFourccs() {}

// static
void VideoDecoderPipeline::NotifySupportKnown(
    mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
    base::OnceCallback<
        void(mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb) {}

// static
std::optional<SupportedVideoDecoderConfigs>
VideoDecoderPipeline::GetSupportedConfigs(
    VideoDecoderType decoder_type,
    const gpu::GpuDriverBugWorkarounds& workarounds) {}

VideoDecoderPipeline::VideoDecoderPipeline(
    const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
    scoped_refptr<base::SequencedTaskRunner> client_task_runner,
    std::unique_ptr<DmabufVideoFramePool> frame_pool,
    std::unique_ptr<FrameResourceConverter> frame_converter,
    std::vector<Fourcc> renderable_fourccs,
    std::unique_ptr<MediaLog> media_log,
    CreateDecoderFunctionCB create_decoder_function_cb,
    bool uses_oop_video_decoder,
    bool in_video_decoder_process)
    :{}

VideoDecoderPipeline::~VideoDecoderPipeline() {}

// static
void VideoDecoderPipeline::DestroyAsync(
    std::unique_ptr<VideoDecoderPipeline> pipeline) {}

VideoDecoderType VideoDecoderPipeline::GetDecoderType() const {}

bool VideoDecoderPipeline::IsPlatformDecoder() const {}

int VideoDecoderPipeline::GetMaxDecodeRequests() const {}

bool VideoDecoderPipeline::FramesHoldExternalResources() const {}

bool VideoDecoderPipeline::NeedsBitstreamConversion() const {}

bool VideoDecoderPipeline::CanReadWithoutStalling() const {}

size_t VideoDecoderPipeline::GetDecoderMaxOutputFramePoolSize() const {}

void VideoDecoderPipeline::Initialize(const VideoDecoderConfig& config,
                                      bool low_delay,
                                      CdmContext* cdm_context,
                                      InitCB init_cb,
                                      const OutputCB& output_cb,
                                      const WaitingCB& waiting_cb) {}

void VideoDecoderPipeline::InitializeTask(const VideoDecoderConfig& config,
                                          bool low_delay,
                                          CdmContext* cdm_context,
                                          InitCB init_cb,
                                          const OutputCB& output_cb,
                                          const WaitingCB& waiting_cb) {}

void VideoDecoderPipeline::OnInitializeDone(InitCB init_cb,
                                            CdmContext* cdm_context,
                                            DecoderStatus status) {}

void VideoDecoderPipeline::Reset(base::OnceClosure reset_cb) {}

void VideoDecoderPipeline::ResetTask(base::OnceClosure reset_cb) {}

void VideoDecoderPipeline::OnResetDone(base::OnceClosure reset_cb) {}

void VideoDecoderPipeline::Decode(scoped_refptr<DecoderBuffer> buffer,
                                  DecodeCB decode_cb) {}

void VideoDecoderPipeline::DecodeTask(scoped_refptr<DecoderBuffer> buffer,
                                      DecodeCB decode_cb) {}

void VideoDecoderPipeline::OnDecodeDone(bool is_flush,
                                        DecodeCB decode_cb,
                                        DecoderStatus status) {}

void VideoDecoderPipeline::OnFrameDecoded(scoped_refptr<FrameResource> frame) {}

void VideoDecoderPipeline::OnFrameProcessed(
    scoped_refptr<FrameResource> frame) {}

void VideoDecoderPipeline::OnFrameConverted(
    scoped_refptr<VideoFrame> video_frame) {}

void VideoDecoderPipeline::OnDecoderWaiting(WaitingReason reason) {}

bool VideoDecoderPipeline::HasPendingFrames() const {}

void VideoDecoderPipeline::OnError(const std::string& msg) {}

void VideoDecoderPipeline::CallFlushCbIfNeeded(
    std::optional<DecoderStatus> override_status) {}

void VideoDecoderPipeline::PrepareChangeResolution() {}

void VideoDecoderPipeline::CallApplyResolutionChangeIfNeeded() {}

DmabufVideoFramePool* VideoDecoderPipeline::GetVideoFramePool() const {}

void VideoDecoderPipeline::NotifyEstimatedMaxDecodeRequests(int num) {}

CroStatus::Or<PixelLayoutCandidate>
VideoDecoderPipeline::PickDecoderOutputFormat(
    const std::vector<PixelLayoutCandidate>& candidates,
    const gfx::Rect& decoder_visible_rect,
    const gfx::Size& decoder_natural_size,
    std::optional<gfx::Size> output_size,
    size_t num_codec_reference_frames,
    bool use_protected,
    bool need_aux_frame_pool,
    std::optional<DmabufVideoFramePool::CreateFrameCB> allocator) {}

#if BUILDFLAG(IS_CHROMEOS)
void VideoDecoderPipeline::OnBufferTranscrypted(
    scoped_refptr<DecoderBuffer> transcrypted_buffer,
    DecodeCB decode_callback) {
  DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
  DCHECK(!has_error_);
  if (!transcrypted_buffer) {
    OnError("Error in buffer transcryption");
    std::move(decode_callback).Run(DecoderStatus::Codes::kFailed);
    return;
  }

  if (drop_transcrypted_buffers_) {
    std::move(decode_callback).Run(DecoderStatus::Codes::kAborted);
    return;
  }

  decoder_->Decode(std::move(transcrypted_buffer), std::move(decode_callback));
}
#endif  // BUILDFLAG(IS_CHROMEOS)

}  // namespace media