chromium/media/test/pipeline_integration_test.cc

// Copyright 2012 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include <stddef.h>
#include <stdint.h>

#include <memory>
#include <string>
#include <utility>

#include "base/command_line.h"
#include "base/functional/bind.h"
#include "base/functional/callback_helpers.h"
#include "base/memory/ref_counted.h"
#include "base/run_loop.h"
#include "base/strings/strcat.h"
#include "base/strings/string_split.h"
#include "base/strings/string_util.h"
#include "base/task/single_thread_task_runner.h"
#include "base/test/bind.h"
#include "base/test/scoped_feature_list.h"
#include "base/time/time.h"
#include "build/build_config.h"
#include "media/base/cdm_callback_promise.h"
#include "media/base/cdm_key_information.h"
#include "media/base/decoder_buffer.h"
#include "media/base/media.h"
#include "media/base/media_switches.h"
#include "media/base/media_tracks.h"
#include "media/base/mock_media_log.h"
#include "media/base/supported_types.h"
#include "media/base/test_data_util.h"
#include "media/base/timestamp_constants.h"
#include "media/cdm/aes_decryptor.h"
#include "media/cdm/json_web_key.h"
#include "media/media_buildflags.h"
#include "media/renderers/renderer_impl.h"
#include "media/test/fake_encrypted_media.h"
#include "media/test/pipeline_integration_test_base.h"
#include "media/test/test_media_source.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "third_party/abseil-cpp/absl/strings/ascii.h"
#include "url/gurl.h"

#if BUILDFLAG(IS_ANDROID)
#include "media/filters/android/media_codec_audio_decoder.h"
#endif

#if BUILDFLAG(IS_MAC)
#include "media/filters/mac/audio_toolbox_audio_decoder.h"
#endif

#if BUILDFLAG(IS_WIN)
#include "media/filters/win/media_foundation_audio_decoder.h"
#endif

#define EXPECT_AUDIO_HASH(expected)

_;
AnyNumber;
AtLeast;
AtMost;
HasSubstr;
SaveArg;

namespace media {

#if BUILDFLAG(ENABLE_AV1_DECODER)
constexpr int kAV110bitMp4FileDurationMs =;
constexpr int kAV1640WebMFileDurationMs =;
#endif  // BUILDFLAG(ENABLE_AV1_DECODER)

// Constants for the Media Source config change tests.
constexpr int kAppendTimeSec =;
constexpr int kAppendTimeMs =;
constexpr int k320WebMFileDurationMs =;
constexpr int k640WebMFileDurationMs =;
constexpr int kVP9WebMFileDurationMs =;
constexpr int kVP8AWebMFileDurationMs =;

constexpr char kSfxLosslessHash[] =;

// Hash for a full playthrough of "opus-trimming-test.(webm|ogg)".
constexpr char kOpusEndTrimmingHash_1[] =;

// The above hash, plus an additional playthrough starting from T=1s.
constexpr char kOpusEndTrimmingHash_2[] =;

// The above hash, plus an additional playthrough starting from T=6.36s.
constexpr char kOpusEndTrimmingHash_3[] =;

// Hash for a full playthrough of "bear-opus.webm".
constexpr char kOpusSmallCodecDelayHash_1[] =;

// The above hash, plus an additional playthrough starting from T=1.414s.
constexpr char kOpusSmallCodecDelayHash_2[] =;

#if BUILDFLAG(USE_PROPRIETARY_CODECS) && BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
constexpr int k1280IsoFileDurationMs = 2736;

constexpr int k1280IsoAVC3FileDurationMs = 2736;
#endif  // BUILDFLAG(USE_PROPRIETARY_CODECS)

// Return a timeline offset for bear-320x240-live.webm.
static base::Time kLiveTimelineOffset() {}

#if BUILDFLAG(IS_MAC)
class ScopedVerboseLogEnabler {
 public:
  ScopedVerboseLogEnabler() : old_level_(logging::GetMinLogLevel()) {
    logging::SetMinLogLevel(-1);
  }

  ScopedVerboseLogEnabler(const ScopedVerboseLogEnabler&) = delete;
  ScopedVerboseLogEnabler& operator=(const ScopedVerboseLogEnabler&) = delete;

  ~ScopedVerboseLogEnabler() { logging::SetMinLogLevel(old_level_); }

 private:
  const int old_level_;
};
#endif

enum PromiseResult {};

// Provides the test key in response to the encrypted event.
class KeyProvidingApp : public FakeEncryptedMedia::AppBase {};

class RotatingKeyProvidingApp : public KeyProvidingApp {};

// Ignores the encrypted event and does not perform a license request.
class NoResponseApp : public FakeEncryptedMedia::AppBase {};

// A rough simulation of GpuVideoDecoder that fails every Decode() request. This
// is used to test post-Initialize() fallback paths.
class FailingVideoDecoder : public VideoDecoder {};

class PipelineIntegrationTest : public testing::Test,
                                public PipelineIntegrationTestBase {};

struct PlaybackTestData {};

struct MSEPlaybackTestData {};

// Tells gtest how to print our PlaybackTestData structure.
std::ostream& operator<<(std::ostream& os, const PlaybackTestData& data) {}

std::ostream& operator<<(std::ostream& os, const MSEPlaybackTestData& data) {}

class BasicPlaybackTest : public PipelineIntegrationTest,
                          public testing::WithParamInterface<PlaybackTestData> {};

TEST_P(BasicPlaybackTest, PlayToEnd) {}

const PlaybackTestData kOpenCodecsTests[] =;

INSTANTIATE_TEST_SUITE_P();

#if BUILDFLAG(USE_PROPRIETARY_CODECS)

class BasicMSEPlaybackTest
    : public ::testing::WithParamInterface<MSEPlaybackTestData>,
      public PipelineIntegrationTest {
 protected:
  void PlayToEnd() {
    MSEPlaybackTestData data = GetParam();

    TestMediaSource source(data.filename, data.append_bytes);
    ASSERT_EQ(PIPELINE_OK,
              StartPipelineWithMediaSource(&source, kNormal, nullptr));
    source.EndOfStream();

    EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
    EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
    EXPECT_EQ(data.duration_ms,
              pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());

    Play();

    ASSERT_TRUE(WaitUntilOnEnded());

    EXPECT_TRUE(demuxer_->GetTimelineOffset().is_null());
    source.Shutdown();
    Stop();
  }
};

TEST_P(BasicMSEPlaybackTest, PlayToEnd) {
  PlayToEnd();
}

const PlaybackTestData kADTSTests[] = {
    {"bear-audio-main-aac.aac", 0, 2708},
    {"bear-audio-lc-aac.aac", 0, 2791},
    {"bear-audio-implicit-he-aac-v1.aac", 0, 2829},
    {"bear-audio-implicit-he-aac-v2.aac", 0, 2900},
};

// TODO(chcunningham): Migrate other basic playback tests to TEST_P.
INSTANTIATE_TEST_SUITE_P(ProprietaryCodecs,
                         BasicPlaybackTest,
                         testing::ValuesIn(kADTSTests));

const MSEPlaybackTestData kMediaSourceADTSTests[] = {
    {"bear-audio-main-aac.aac", kAppendWholeFile, 2773},
    {"bear-audio-lc-aac.aac", kAppendWholeFile, 2794},
    {"bear-audio-implicit-he-aac-v1.aac", kAppendWholeFile, 2858},
    {"bear-audio-implicit-he-aac-v2.aac", kAppendWholeFile, 2901},
};

// TODO(chcunningham): Migrate other basic MSE playback tests to TEST_P.
INSTANTIATE_TEST_SUITE_P(ProprietaryCodecs,
                         BasicMSEPlaybackTest,
                         testing::ValuesIn(kMediaSourceADTSTests));

#endif  // BUILDFLAG(USE_PROPRIETARY_CODECS)

struct MSEChangeTypeTestData {};

class MSEChangeTypeTest
    : public ::testing::WithParamInterface<
          std::tuple<MSEPlaybackTestData, MSEPlaybackTestData>>,
      public PipelineIntegrationTest {};

TEST_P(MSEChangeTypeTest, PlayBackToBack) {}

const MSEPlaybackTestData kMediaSourceAudioFiles[] =;

const MSEPlaybackTestData kMediaSourceVideoFiles[] =;

INSTANTIATE_TEST_SUITE_P();

INSTANTIATE_TEST_SUITE_P();

TEST_F(PipelineIntegrationTest, BasicPlayback) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackOpusOgg) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackOpusOgg_4ch_ChannelMapping2) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackOpusOgg_11ch_ChannelMapping2) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackHashed) {}

base::TimeDelta TimestampMs(int milliseconds) {}

TEST_F(PipelineIntegrationTest, WaveLayoutChange) {}

// TODO(crbug.com/40235621): At most one of Playback9Channels48000hz and
// Playback9Channels44100hz will pass, because for 9+ channel files the hardware
// sample rate has to match the file's sample rate. They are both disabled
// because different CI configurations have different hardware sample rates. To
// run the tests, enable them both and expect at most one of them to pass.
TEST_F(PipelineIntegrationTest, DISABLED_Playback9Channels48000hz) {}

TEST_F(PipelineIntegrationTest, DISABLED_Playback9Channels44100hz) {}

TEST_F(PipelineIntegrationTest, PlaybackStereo48000hz) {}

TEST_F(PipelineIntegrationTest, PlaybackWithAudioTrackDisabledThenEnabled) {}

TEST_F(PipelineIntegrationTest, PlaybackWithVideoTrackDisabledThenEnabled) {}

TEST_F(PipelineIntegrationTest, TrackStatusChangesBeforePipelineStarted) {}

TEST_F(PipelineIntegrationTest, TrackStatusChangesAfterPipelineEnded) {}

// TODO(crbug.com/40101269): Enable test when MacOS flake is fixed.
#if BUILDFLAG(IS_MAC)
#define MAYBE_TrackStatusChangesWhileSuspended
#else
#define MAYBE_TrackStatusChangesWhileSuspended
#endif

TEST_F(PipelineIntegrationTest, MAYBE_TrackStatusChangesWhileSuspended) {}

TEST_F(PipelineIntegrationTest, ReinitRenderersWhileAudioTrackIsDisabled) {}

TEST_F(PipelineIntegrationTest, ReinitRenderersWhileVideoTrackIsDisabled) {}

TEST_F(PipelineIntegrationTest, PipelineStoppedWhileAudioRestartPending) {}

TEST_F(PipelineIntegrationTest, PipelineStoppedWhileVideoRestartPending) {}

TEST_F(PipelineIntegrationTest, SwitchAudioTrackDuringPlayback) {}

TEST_F(PipelineIntegrationTest, SwitchVideoTrackDuringPlayback) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackOpusOggTrimmingHashed) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackOpusWebmTrimmingHashed) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackOpusMp4TrimmingHashed) {}

TEST_F(PipelineIntegrationTest, MSE_BasicPlaybackOpusWebmTrimmingHashed) {}

TEST_F(PipelineIntegrationTest, MSE_BasicPlaybackOpusMp4TrimmingHashed) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackOpusWebmHashed_MonoOutput) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackOpusPrerollExceedsCodecDelay) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackOpusMp4PrerollExceedsCodecDelay) {}

TEST_F(PipelineIntegrationTest, MSE_BasicPlaybackOpusPrerollExceedsCodecDelay) {}

TEST_F(PipelineIntegrationTest,
       MSE_BasicPlaybackOpusMp4PrerollExceedsCodecDelay) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackLive) {}

TEST_F(PipelineIntegrationTest, S32PlaybackHashed) {}

TEST_F(PipelineIntegrationTest, F32PlaybackHashed) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackEncrypted) {}

TEST_F(PipelineIntegrationTest, FlacPlaybackHashed) {}

TEST_F(PipelineIntegrationTest, MSE_BasicPlayback) {}

TEST_F(PipelineIntegrationTest, MSE_EosBeforeDemuxerOpened) {}

TEST_F(PipelineIntegrationTest, MSE_CorruptedFirstMediaSegment) {}

TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_Live) {}

#if BUILDFLAG(ENABLE_AV1_DECODER)
TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_AV1_WebM) {}

TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_AV1_10bit_WebM) {}

#endif

TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_VP9_WebM) {}

TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_VP9_BlockGroup_WebM) {}

TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_VP8A_WebM) {}

#if BUILDFLAG(ENABLE_AV1_DECODER)
TEST_F(PipelineIntegrationTest, MSE_ConfigChange_AV1_WebM) {}
#endif  // BUILDFLAG(ENABLE_AV1_DECODER)

TEST_F(PipelineIntegrationTest, MSE_ConfigChange_WebM) {}

TEST_F(PipelineIntegrationTest, MSE_AudioConfigChange_WebM) {}

TEST_F(PipelineIntegrationTest, MSE_RemoveUpdatesBufferedRanges) {}

// This test case imitates media playback with advancing media_time and
// continuously adding new data. At some point we should reach the buffering
// limit, after that MediaSource should evict some buffered data and that
// evicted data should be reflected in the change of media::Pipeline buffered
// ranges (returned by GetBufferedTimeRanges). At that point the buffered ranges
// will no longer start at 0.
TEST_F(PipelineIntegrationTest, MSE_FillUpBuffer) {}

TEST_F(PipelineIntegrationTest, MSE_GCWithDisabledVideoStream) {}

TEST_F(PipelineIntegrationTest, MSE_ConfigChange_Encrypted_WebM) {}

TEST_F(PipelineIntegrationTest, MSE_ConfigChange_ClearThenEncrypted_WebM) {}

// Config change from encrypted to clear is allowed by the demuxer, and is
// supported by the Renderer.
TEST_F(PipelineIntegrationTest, MSE_ConfigChange_EncryptedThenClear_WebM) {}

#if defined(ARCH_CPU_X86_FAMILY) && !BUILDFLAG(IS_ANDROID)
TEST_F(PipelineIntegrationTest, BasicPlaybackHi10PVP9) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackHi12PVP9) {}
#endif

#if BUILDFLAG(ENABLE_AV1_DECODER)
TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_AV1_MP4) {}

TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_AV1_Audio_OPUS_MP4) {}

TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_AV1_10bit_MP4) {}
#endif

TEST_F(PipelineIntegrationTest, MSE_FlacInMp4_Hashed) {}

TEST_F(PipelineIntegrationTest, MSE_fLaCInMp4_Hashed) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackHashed_MP3) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackHashed_FlacInMp4) {}

#if BUILDFLAG(ENABLE_AV1_DECODER)
TEST_F(PipelineIntegrationTest, BasicPlayback_VideoOnly_AV1_Mp4) {}

TEST_F(PipelineIntegrationTest, BasicPlayback_VideoOnly_MonoAV1_Mp4) {}

TEST_F(PipelineIntegrationTest, BasicPlayback_Video_AV1_Audio_Opus_Mp4) {}
#endif

class Mp3FastSeekParams {};

class Mp3FastSeekIntegrationTest
    : public PipelineIntegrationTest,
      public testing::WithParamInterface<Mp3FastSeekParams> {};

TEST_P(Mp3FastSeekIntegrationTest, FastSeekAccuracy_MP3) {}

// CBR seeks should always be fast and accurate.
INSTANTIATE_TEST_SUITE_P();

INSTANTIATE_TEST_SUITE_P();

// VBR seeks can be fast *OR* accurate, but not both. We chose fast.
INSTANTIATE_TEST_SUITE_P();

INSTANTIATE_TEST_SUITE_P();

TEST_F(PipelineIntegrationTest, MSE_MP3) {}

TEST_F(PipelineIntegrationTest, MSE_MP3_TimestampOffset) {}

TEST_F(PipelineIntegrationTest, MSE_MP3_Icecast) {}

#if BUILDFLAG(USE_PROPRIETARY_CODECS)

TEST_F(PipelineIntegrationTest, MSE_ADTS) {
  TestMediaSource source("sfx.adts", kAppendWholeFile);
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithMediaSource(&source, kHashed, nullptr));
  source.EndOfStream();

  EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
  EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
  EXPECT_EQ(325, pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());

  Play();

  EXPECT_TRUE(WaitUntilOnEnded());

  // Verify that nothing was stripped.
  EXPECT_AUDIO_HASH("0.46,1.72,4.26,4.57,3.39,1.53,");
}

TEST_F(PipelineIntegrationTest, MSE_ADTS_TimestampOffset) {
  TestMediaSource source("sfx.adts", kAppendWholeFile);
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithMediaSource(&source, kHashed, nullptr));
  EXPECT_EQ(325, source.last_timestamp_offset().InMilliseconds());

  // Trim off multiple frames off the beginning of the segment which will cause
  // the first decoded frame to be incorrect if preroll isn't implemented.
  const base::TimeDelta adts_preroll_duration =
      base::Seconds(2.5 * 1024 / 44100);
  const base::TimeDelta append_time =
      source.last_timestamp_offset() - adts_preroll_duration;

  scoped_refptr<DecoderBuffer> second_file = ReadTestDataFile("sfx.adts");
  source.AppendAtTimeWithWindow(append_time,
                                append_time + adts_preroll_duration,
                                kInfiniteDuration, second_file->AsSpan());
  source.EndOfStream();

  Play();
  EXPECT_TRUE(WaitUntilOnEnded());

  EXPECT_EQ(592, source.last_timestamp_offset().InMilliseconds());
  EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
  EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
  EXPECT_EQ(592, pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());

  // Verify preroll is stripped.
  EXPECT_AUDIO_HASH("-1.76,-1.35,-0.72,0.70,1.24,0.52,");
}

TEST_F(PipelineIntegrationTest, BasicPlaybackHashed_ADTS) {
  ASSERT_EQ(PIPELINE_OK, Start("sfx.adts", kHashed));

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());

  // Verify codec delay and preroll are stripped.
  EXPECT_AUDIO_HASH("1.80,1.66,2.31,3.26,4.46,3.36,");
}

TEST_F(PipelineIntegrationTest, BasicPlaybackHashed_M4A) {
  ASSERT_EQ(PIPELINE_OK,
            Start("440hz-10ms.m4a", kHashed | kUnreliableDuration));
  Play();
  ASSERT_TRUE(WaitUntilOnEnded());

  // Verify preroll is stripped. This file uses a preroll of 2112 frames, which
  // spans all three packets in the file. Postroll is not correctly stripped at
  // present; see the note below.
  EXPECT_AUDIO_HASH("3.84,4.25,4.33,3.58,3.27,3.16,");

  // Note the above hash is incorrect since the <audio> path doesn't properly
  // trim trailing silence at end of stream for AAC decodes. This isn't a huge
  // deal since plain src= tags can't splice streams and MSE requires an
  // explicit append window for correctness.
  //
  // The WebAudio path via AudioFileReader computes this correctly, so the hash
  // below is taken from that test.
  //
  // EXPECT_AUDIO_HASH("3.77,4.53,4.75,3.48,3.67,3.76,");
}

#if BUILDFLAG(IS_MAC) || BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_WIN)
std::unique_ptr<AudioDecoder> CreateXheAacDecoder(
    scoped_refptr<base::SequencedTaskRunner> task_runner,
    MediaLog& media_log) {
#if BUILDFLAG(IS_MAC)
  return std::make_unique<AudioToolboxAudioDecoder>(media_log.Clone());
#elif BUILDFLAG(IS_ANDROID)
  return std::make_unique<MediaCodecAudioDecoder>(task_runner);
#elif BUILDFLAG(IS_WIN)
  return MediaFoundationAudioDecoder::Create();
#else
#error "xHE-AAC decoding is not supported on this platform.";
#endif
}

TEST_F(PipelineIntegrationTest, BasicPlaybackXHE_AAC) {
  if (!IsSupportedAudioType(
          {AudioCodec::kAAC, AudioCodecProfile::kXHE_AAC, false})) {
    GTEST_SKIP() << "Unsupported platform.";
  }

  auto prepend_audio_decoders_cb = base::BindLambdaForTesting([this]() {
    std::vector<std::unique_ptr<AudioDecoder>> audio_decoders;
    audio_decoders.push_back(CreateXheAacDecoder(
        task_environment_.GetMainThreadTaskRunner(), media_log_));
    return audio_decoders;
  });

  ASSERT_EQ(PIPELINE_OK,
            Start("noise-xhe-aac.mp4", kNormal, CreateVideoDecodersCB(),
                  prepend_audio_decoders_cb));
  Play();
  ASSERT_TRUE(WaitUntilOnEnded());

  // Note: We don't test hashes for xHE-AAC content since the decoder is
  // provided by the operating system and will apply DRC based on device
  // specific params.

  // TODO(crbug.com/40817722): Seeking doesn't always work properly when using
  // ffmpeg since it doesn't handle non-keyframe xHE-AAC samples properly.
}

TEST_F(PipelineIntegrationTest, MSE_BasicPlaybackXHE_AAC) {
  if (!IsSupportedAudioType(
          {AudioCodec::kAAC, AudioCodecProfile::kXHE_AAC, false})) {
    GTEST_SKIP() << "Unsupported platform.";
  }

  auto prepend_audio_decoders_cb = base::BindLambdaForTesting([this]() {
    std::vector<std::unique_ptr<AudioDecoder>> audio_decoders;
    audio_decoders.push_back(CreateXheAacDecoder(
        task_environment_.GetMainThreadTaskRunner(), media_log_));
    return audio_decoders;
  });

  TestMediaSource source("noise-xhe-aac.mp4", kAppendWholeFile);
  EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(
                             &source, kNormal, prepend_audio_decoders_cb));
  source.EndOfStream();
  Play();
  ASSERT_TRUE(WaitUntilOnEnded());
  Pause();

  // Note: We don't test hashes for xHE-AAC content since the decoder is
  // provided by the operating system and will apply DRC based on device
  // specific params.

  // Seek to ensure a flushing and playback resumption works properly.
  auto seek_time = pipeline_->GetMediaDuration() / 2;
  ASSERT_TRUE(Seek(seek_time));
  EXPECT_EQ(seek_time, pipeline_->GetMediaTime());

  Play();
  ASSERT_TRUE(WaitUntilOnEnded());
}
#endif  // BUILDFLAG(IS_MAC) || BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_WIN)

std::vector<std::unique_ptr<VideoDecoder>> CreateFailingVideoDecoder() {
  std::vector<std::unique_ptr<VideoDecoder>> failing_video_decoder;
  failing_video_decoder.push_back(std::make_unique<FailingVideoDecoder>());
  return failing_video_decoder;
}

TEST_F(PipelineIntegrationTest, BasicFallback) {
  ASSERT_EQ(PIPELINE_OK,
            Start("bear.webm", kNormal,
                  base::BindRepeating(&CreateFailingVideoDecoder)));

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());
}

#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
TEST_F(PipelineIntegrationTest, MSE_ConfigChange_MP4) {
  TestMediaSource source("bear-640x360-av_frag.mp4", kAppendWholeFile);
  EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));

  const gfx::Size kNewSize(1280, 720);
  EXPECT_CALL(*this, OnVideoConfigChange(::testing::Property(
                         &VideoDecoderConfig::natural_size, kNewSize)))
      .Times(1);
  EXPECT_CALL(*this, OnVideoNaturalSizeChange(kNewSize)).Times(1);
  scoped_refptr<DecoderBuffer> second_file =
      ReadTestDataFile("bear-1280x720-av_frag.mp4");
  source.AppendAtTime(base::Seconds(kAppendTimeSec), second_file->AsSpan());
  source.EndOfStream();

  Play();
  EXPECT_TRUE(WaitUntilOnEnded());

  EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
  EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());

  // TODO(wolenetz): Update to 2769 once MSE endOfStream implementation no
  // longer truncates duration to the highest in intersection ranges, but
  // compliantly to the largest track buffer ranges end time across all tracks
  // and SourceBuffers. See https://crbug.com/639144.
  constexpr int k1280IsoFileDurationMsAV = 2763;

  EXPECT_EQ(kAppendTimeMs + k1280IsoFileDurationMsAV,
            pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());

  source.Shutdown();
  Stop();
}

TEST_F(PipelineIntegrationTest, MSE_ConfigChange_Encrypted_MP4_CENC_VideoOnly) {
  TestMediaSource source("bear-640x360-v_frag-cenc-mdat.mp4", kAppendWholeFile);
  FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithEncryptedMedia(&source, &encrypted_media));

  const gfx::Size kNewSize(1280, 720);
  EXPECT_CALL(*this, OnVideoConfigChange(::testing::Property(
                         &VideoDecoderConfig::natural_size, kNewSize)))
      .Times(1);
  EXPECT_CALL(*this, OnVideoNaturalSizeChange(kNewSize)).Times(1);
  scoped_refptr<DecoderBuffer> second_file =
      ReadTestDataFile("bear-1280x720-v_frag-cenc.mp4");
  source.AppendAtTime(base::Seconds(kAppendTimeSec), second_file->AsSpan());
  source.EndOfStream();

  Play();
  EXPECT_TRUE(WaitUntilOnEnded());

  EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
  EXPECT_EQ(33, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
  EXPECT_EQ(kAppendTimeMs + k1280IsoFileDurationMs,
            pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());

  source.Shutdown();
  Stop();
}

TEST_F(PipelineIntegrationTest,
       MSE_ConfigChange_Encrypted_MP4_CENC_KeyRotation_VideoOnly) {
  TestMediaSource source("bear-640x360-v_frag-cenc-key_rotation.mp4",
                         kAppendWholeFile);
  FakeEncryptedMedia encrypted_media(new RotatingKeyProvidingApp());
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithEncryptedMedia(&source, &encrypted_media));

  EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(1280, 720))).Times(1);
  scoped_refptr<DecoderBuffer> second_file =
      ReadTestDataFile("bear-1280x720-v_frag-cenc-key_rotation.mp4");
  source.AppendAtTime(base::Seconds(kAppendTimeSec), second_file->AsSpan());
  source.EndOfStream();

  Play();
  EXPECT_TRUE(WaitUntilOnEnded());

  EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
  EXPECT_EQ(33, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
  EXPECT_EQ(kAppendTimeMs + k1280IsoFileDurationMs,
            pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());

  source.Shutdown();
  Stop();
}

TEST_F(PipelineIntegrationTest, MSE_ConfigChange_ClearThenEncrypted_MP4_CENC) {
  TestMediaSource source("bear-640x360-v_frag.mp4", kAppendWholeFile);
  FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithEncryptedMedia(&source, &encrypted_media));

  EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(1280, 720))).Times(1);
  scoped_refptr<DecoderBuffer> second_file =
      ReadTestDataFile("bear-1280x720-v_frag-cenc.mp4");
  source.set_expected_append_result(
      TestMediaSource::ExpectedAppendResult::kFailure);
  source.AppendAtTime(base::Seconds(kAppendTimeSec), second_file->AsSpan());

  source.EndOfStream();

  EXPECT_EQ(33, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
  EXPECT_EQ(kAppendTimeMs + k1280IsoFileDurationMs,
            pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());
  source.Shutdown();
  Stop();
}

// Config changes from encrypted to clear are not currently supported.
TEST_F(PipelineIntegrationTest, MSE_ConfigChange_EncryptedThenClear_MP4_CENC) {
  TestMediaSource source("bear-640x360-v_frag-cenc-mdat.mp4", kAppendWholeFile);
  FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithEncryptedMedia(&source, &encrypted_media));

  scoped_refptr<DecoderBuffer> second_file =
      ReadTestDataFile("bear-1280x720-av_frag.mp4");

  source.set_expected_append_result(
      TestMediaSource::ExpectedAppendResult::kFailure);
  source.AppendAtTime(base::Seconds(kAppendTimeSec), second_file->AsSpan());

  source.EndOfStream();

  EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
  EXPECT_EQ(33, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());

  // The second video was not added, so its time has not been added.
  constexpr int k640IsoCencFileDurationMs = 2769;
  EXPECT_EQ(k640IsoCencFileDurationMs,
            pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());

  Play();

  EXPECT_EQ(CHUNK_DEMUXER_ERROR_APPEND_FAILED, WaitUntilEndedOrError());
  source.Shutdown();
}
#endif  // BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)

TEST_F(PipelineIntegrationTest, StereoAACMarkedAsMono) {
  ASSERT_EQ(PIPELINE_OK, Start("mono_cpe.adts"));
  Play();
  ASSERT_TRUE(WaitUntilOnEnded());
}
#endif  // BUILDFLAG(USE_PROPRIETARY_CODECS)

// Verify files which change configuration midstream fail gracefully.
TEST_F(PipelineIntegrationTest, MidStreamConfigChangesFail) {}

TEST_F(PipelineIntegrationTest, BasicPlayback_16x9AspectRatio) {}

TEST_F(PipelineIntegrationTest, MSE_EncryptedPlayback_WebM) {}

TEST_F(PipelineIntegrationTest, MSE_EncryptedPlayback_ClearStart_WebM) {}

TEST_F(PipelineIntegrationTest, MSE_EncryptedPlayback_NoEncryptedFrames_WebM) {}

TEST_F(PipelineIntegrationTest, MSE_EncryptedPlayback_MP4_VP9_CENC_VideoOnly) {}

TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_VideoOnly_MP4_VP9) {}

#if BUILDFLAG(USE_PROPRIETARY_CODECS)
#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
TEST_F(PipelineIntegrationTest, MSE_EncryptedPlayback_MP4_CENC_VideoOnly) {
  TestMediaSource source("bear-1280x720-v_frag-cenc.mp4", kAppendWholeFile);
  FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithEncryptedMedia(&source, &encrypted_media));

  source.EndOfStream();
  ASSERT_EQ(PIPELINE_OK, pipeline_status_);

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());
  source.Shutdown();
  Stop();
}

TEST_F(PipelineIntegrationTest,
       MSE_EncryptedPlayback_NoEncryptedFrames_MP4_CENC_VideoOnly) {
  TestMediaSource source("bear-1280x720-v_frag-cenc_clear-all.mp4",
                         kAppendWholeFile);
  FakeEncryptedMedia encrypted_media(new NoResponseApp());
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithEncryptedMedia(&source, &encrypted_media));

  source.EndOfStream();

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());
  source.Shutdown();
  Stop();
}

TEST_F(PipelineIntegrationTest, MSE_Mp2ts_AAC_HE_SBR_Audio) {
  TestMediaSource source("bear-1280x720-aac_he.ts", kAppendWholeFile);
#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
  EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
  source.EndOfStream();
  ASSERT_EQ(PIPELINE_OK, pipeline_status_);

  // Check that SBR is taken into account correctly by mpeg2ts parser. When an
  // SBR stream is parsed as non-SBR stream, then audio frame durations are
  // calculated incorrectly and that leads to gaps in buffered ranges (so this
  // check will fail) and eventually leads to stalled playback.
  EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
#else
  EXPECT_EQ(
      DEMUXER_ERROR_COULD_NOT_OPEN,
      StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
#endif
}

#endif  // BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)

TEST_F(PipelineIntegrationTest, MSE_EncryptedPlayback_MP4_CENC_AudioOnly) {
  TestMediaSource source("bear-1280x720-a_frag-cenc.mp4", kAppendWholeFile);
  FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithEncryptedMedia(&source, &encrypted_media));

  source.EndOfStream();
  ASSERT_EQ(PIPELINE_OK, pipeline_status_);

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());
  source.Shutdown();
  Stop();
}

TEST_F(PipelineIntegrationTest, MSE_Mpeg2ts_MP3Audio_Mp4a_6B) {
  TestMediaSource source("bear-audio-mp4a.6B.ts",
                         "video/mp2t; codecs=\"mp4a.6B\"", kAppendWholeFile);
#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
  EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
  source.EndOfStream();
  ASSERT_EQ(PIPELINE_OK, pipeline_status_);
#else
  EXPECT_EQ(
      DEMUXER_ERROR_COULD_NOT_OPEN,
      StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
#endif
}

TEST_F(PipelineIntegrationTest, MSE_Mpeg2ts_MP3Audio_Mp4a_69) {
  TestMediaSource source("bear-audio-mp4a.69.ts",
                         "video/mp2t; codecs=\"mp4a.69\"", kAppendWholeFile);
#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
  EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
  source.EndOfStream();
  ASSERT_EQ(PIPELINE_OK, pipeline_status_);
#else
  EXPECT_EQ(
      DEMUXER_ERROR_COULD_NOT_OPEN,
      StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
#endif
}

TEST_F(PipelineIntegrationTest,
       MSE_EncryptedPlayback_NoEncryptedFrames_MP4_CENC_AudioOnly) {
  TestMediaSource source("bear-1280x720-a_frag-cenc_clear-all.mp4",
                         kAppendWholeFile);
  FakeEncryptedMedia encrypted_media(new NoResponseApp());
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithEncryptedMedia(&source, &encrypted_media));

  source.EndOfStream();

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());
  source.Shutdown();
  Stop();
}

#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)

// Older packagers saved sample encryption auxiliary information in the
// beginning of mdat box.
TEST_F(PipelineIntegrationTest, MSE_EncryptedPlayback_MP4_CENC_MDAT_Video) {
  TestMediaSource source("bear-640x360-v_frag-cenc-mdat.mp4", kAppendWholeFile);
  FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithEncryptedMedia(&source, &encrypted_media));

  source.EndOfStream();

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());
  source.Shutdown();
  Stop();
}

TEST_F(PipelineIntegrationTest, MSE_EncryptedPlayback_MP4_CENC_SENC_Video) {
  TestMediaSource source("bear-640x360-v_frag-cenc-senc.mp4", kAppendWholeFile);
  FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithEncryptedMedia(&source, &encrypted_media));

  source.EndOfStream();

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());
  source.Shutdown();
  Stop();
}

// 'SAIZ' and 'SAIO' boxes contain redundant information which is already
// available in 'SENC' box. Although 'SAIZ' and 'SAIO' boxes are required per
// CENC spec for backward compatibility reasons, but we do not use the two
// boxes if 'SENC' box is present, so the code should work even if the two
// boxes are not present.
TEST_F(PipelineIntegrationTest,
       MSE_EncryptedPlayback_MP4_CENC_SENC_NO_SAIZ_SAIO_Video) {
  TestMediaSource source("bear-640x360-v_frag-cenc-senc-no-saiz-saio.mp4",
                         kAppendWholeFile);
  FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithEncryptedMedia(&source, &encrypted_media));

  source.EndOfStream();

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());
  source.Shutdown();
  Stop();
}

TEST_F(PipelineIntegrationTest,
       MSE_EncryptedPlayback_MP4_CENC_KeyRotation_Video) {
  TestMediaSource source("bear-1280x720-v_frag-cenc-key_rotation.mp4",
                         kAppendWholeFile);
  FakeEncryptedMedia encrypted_media(new RotatingKeyProvidingApp());
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithEncryptedMedia(&source, &encrypted_media));

  source.EndOfStream();

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());
  source.Shutdown();
  Stop();
}

TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_VideoOnly_MP4_AVC3) {
  TestMediaSource source("bear-1280x720-v_frag-avc3.mp4", kAppendWholeFile);
  EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
  source.EndOfStream();

  EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
  EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
  EXPECT_EQ(k1280IsoAVC3FileDurationMs,
            pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());
  source.Shutdown();
  Stop();
}
#endif  // BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)

TEST_F(PipelineIntegrationTest,
       MSE_EncryptedPlayback_MP4_CENC_KeyRotation_Audio) {
  TestMediaSource source("bear-1280x720-a_frag-cenc-key_rotation.mp4",
                         kAppendWholeFile);
  FakeEncryptedMedia encrypted_media(new RotatingKeyProvidingApp());
  EXPECT_EQ(PIPELINE_OK,
            StartPipelineWithEncryptedMedia(&source, &encrypted_media));

  source.EndOfStream();

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());
  source.Shutdown();
  Stop();
}

TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_VideoOnly_MP4_HEVC) {
  // HEVC demuxing might be enabled even on platforms that don't support HEVC
  // decoding. For those cases we'll get DECODER_ERROR_NOT_SUPPORTED, which
  // indicates indicates that we did pass media mime type checks and attempted
  // to actually demux and decode the stream. On platforms that support both
  // demuxing and decoding we'll get PIPELINE_OK.
  const char kMp4HevcVideoOnly[] = "video/mp4; codecs=\"hvc1.1.6.L93.B0\"";
  TestMediaSource source("bear-320x240-v_frag-hevc.mp4", kMp4HevcVideoOnly,
                         kAppendWholeFile);
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
  PipelineStatus status = StartPipelineWithMediaSource(&source);
  EXPECT_TRUE(status == PIPELINE_OK || status == DECODER_ERROR_NOT_SUPPORTED);
#else
  EXPECT_EQ(
      DEMUXER_ERROR_COULD_NOT_OPEN,
      StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
#endif  // BUILDFLAG(ENABLE_PLATFORM_HEVC)
}

// Same test as above but using a different mime type.
TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_VideoOnly_MP4_HEV1) {
  const char kMp4Hev1VideoOnly[] = "video/mp4; codecs=\"hev1.1.6.L93.B0\"";
  TestMediaSource source("bear-320x240-v_frag-hevc.mp4", kMp4Hev1VideoOnly,
                         kAppendWholeFile);
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
  PipelineStatus status = StartPipelineWithMediaSource(&source);
  EXPECT_TRUE(status == PIPELINE_OK || status == DECODER_ERROR_NOT_SUPPORTED);
#else
  EXPECT_EQ(
      DEMUXER_ERROR_COULD_NOT_OPEN,
      StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
#endif  // BUILDFLAG(ENABLE_PLATFORM_HEVC)
}

#endif  // BUILDFLAG(USE_PROPRIETARY_CODECS)

TEST_F(PipelineIntegrationTest, SeekWhilePaused) {}

TEST_F(PipelineIntegrationTest, SeekWhilePlaying) {}

TEST_F(PipelineIntegrationTest, SuspendWhilePaused) {}

TEST_F(PipelineIntegrationTest, SuspendWhilePlaying) {}

// Verify audio decoder & renderer can handle aborted demuxer reads.
TEST_F(PipelineIntegrationTest, MSE_ChunkDemuxerAbortRead_AudioOnly) {}

// Verify video decoder & renderer can handle aborted demuxer reads.
TEST_F(PipelineIntegrationTest, MSE_ChunkDemuxerAbortRead_VideoOnly) {}

TEST_F(PipelineIntegrationTest,
       BasicPlayback_AudioOnly_Opus_4ch_ChannelMapping2_WebM) {}

TEST_F(PipelineIntegrationTest,
       BasicPlayback_AudioOnly_Opus_11ch_ChannelMapping2_WebM) {}

// Verify that VP9 video in WebM containers can be played back.
TEST_F(PipelineIntegrationTest, BasicPlayback_VideoOnly_VP9_WebM) {}

#if BUILDFLAG(ENABLE_AV1_DECODER)
TEST_F(PipelineIntegrationTest, BasicPlayback_VideoOnly_AV1_WebM) {}
#endif

// Verify that VP9 video and Opus audio in the same WebM container can be played
// back.
TEST_F(PipelineIntegrationTest, BasicPlayback_VP9_Opus_WebM) {}

// Verify that VP8 video with alpha channel can be played back.
TEST_F(PipelineIntegrationTest, BasicPlayback_VP8A_WebM) {}

// Verify that VP8A video with odd width/height can be played back.
TEST_F(PipelineIntegrationTest, BasicPlayback_VP8A_Odd_WebM) {}

// Verify that VP9 video with odd width/height can be played back.
TEST_F(PipelineIntegrationTest, BasicPlayback_VP9_Odd_WebM) {}

// Verify that VP9 video with alpha channel can be played back.
TEST_F(PipelineIntegrationTest, BasicPlayback_VP9A_WebM) {}

// Verify that VP9A video with odd width/height can be played back.
TEST_F(PipelineIntegrationTest, BasicPlayback_VP9A_Odd_WebM) {}

// Verify that VP9 video with 4:4:4 subsampling can be played back.
TEST_F(PipelineIntegrationTest, P444_VP9_WebM) {}

// Verify that frames of VP9 video in the BT.709 color space have the YV12HD
// format.
TEST_F(PipelineIntegrationTest, BT709_VP9_WebM) {}

TEST_F(PipelineIntegrationTest, HD_VP9_WebM) {}

// Verify that videos with an odd frame size playback successfully.
TEST_F(PipelineIntegrationTest, BasicPlayback_OddVideoSize) {}

// Verify that OPUS audio in a webm which reports a 44.1kHz sample rate plays
// correctly at 48kHz
TEST_F(PipelineIntegrationTest, BasicPlayback_Opus441kHz) {}

// Same as above but using MediaSource.
TEST_F(PipelineIntegrationTest, MSE_BasicPlayback_Opus441kHz) {}

// Ensures audio-only playback with missing or negative timestamps works.  Tests
// the common live-streaming case for chained ogg.  See http://crbug.com/396864.
TEST_F(PipelineIntegrationTest, BasicPlaybackChainedOgg) {}

TEST_F(PipelineIntegrationTest, TrailingGarbage) {}

TEST_F(PipelineIntegrationTest, BasicPlaybackPositiveStartTime) {}

#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)

// Tests that we signal ended even when audio runs longer than video track.
TEST_F(PipelineIntegrationTest, BasicPlaybackAudioLongerThanVideo) {}

// Tests that we signal ended even when audio runs shorter than video track.
TEST_F(PipelineIntegrationTest, BasicPlaybackAudioShorterThanVideo) {}

#if BUILDFLAG(USE_PROPRIETARY_CODECS)
TEST_F(PipelineIntegrationTest, NegativeVideoTimestamps) {
  ASSERT_EQ(PIPELINE_OK,
            Start("sync2-trimmed.mp4", kHashed | kUnreliableDuration));
  Play();
  ASSERT_TRUE(WaitUntilOnEnded());
  EXPECT_EQ("aa56bcbc674d2e7a60bbecb77c55bb1e", GetVideoHash());
  EXPECT_AUDIO_HASH("89.10,30.04,90.81,29.89,89.55,29.20,");
}

TEST_F(PipelineIntegrationTest, Rotated_Metadata_0) {
  ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_0.mp4"));
  ASSERT_EQ(VIDEO_ROTATION_0,
            metadata_.video_decoder_config.video_transformation().rotation);
}

TEST_F(PipelineIntegrationTest, Rotated_Metadata_90) {
  ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_90.mp4"));
  ASSERT_EQ(VIDEO_ROTATION_90,
            metadata_.video_decoder_config.video_transformation().rotation);
}

TEST_F(PipelineIntegrationTest, Rotated_Metadata_180) {
  ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_180.mp4"));
  ASSERT_EQ(VIDEO_ROTATION_180,
            metadata_.video_decoder_config.video_transformation().rotation);
}

TEST_F(PipelineIntegrationTest, Rotated_Metadata_270) {
  ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_270.mp4"));
  ASSERT_EQ(VIDEO_ROTATION_270,
            metadata_.video_decoder_config.video_transformation().rotation);
}

TEST_F(PipelineIntegrationTest, Spherical) {
  ASSERT_EQ(PIPELINE_OK, Start("spherical.mp4", kHashed));
  Play();
  ASSERT_TRUE(WaitUntilOnEnded());
  EXPECT_EQ("1cb7f980020d99ea852e22dd6bd8d9de", GetVideoHash());
}

TEST_F(PipelineIntegrationTest, BasicPlaybackHi10P) {
  ASSERT_EQ(PIPELINE_OK, Start("bear-320x180-hi10p.mp4"));

  Play();

  ASSERT_TRUE(WaitUntilOnEnded());
}

#if BUILDFLAG(ENABLE_HLS_DEMUXER)
TEST_F(PipelineIntegrationTest, HLSMediaPlaylistTSavc1) {
  base::test::ScopedFeatureList enable_hls{kBuiltInHlsPlayer};
  ASSERT_EQ(PIPELINE_OK, StartPipelineWithHlsManifest("hls/mp_ts_avc1.m3u8"));
  Play();
  ASSERT_TRUE(WaitUntilOnEnded());
  EXPECT_EQ("6bc0ecac3fea91d9591cb3197d28b196", GetVideoHash());
}
#endif

// Verify that full-range H264 video has the right color space.
TEST_F(PipelineIntegrationTest, Fullrange_H264) {
  ASSERT_EQ(PIPELINE_OK, Start("blackwhite_yuvj420p.mp4"));
  Play();
  ASSERT_TRUE(WaitUntilOnEnded());
  EXPECT_EQ(last_video_frame_color_space_, gfx::ColorSpace::CreateJpeg());
}
#endif  // BUILDFLAG(USE_PROPRIETARY_CODECS)
#endif  // BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)

}  // namespace media