Skip to content

Commit

Permalink
[Chromecast] Introduce CastAudioOutputDevice.
Browse files Browse the repository at this point in the history
For non-passthrough audio stream on Android, we will use CastAudioOutputDevice
which utilizes the audio output service to render audio.

Bug: b:199219433
Test: Cast YT on atv
Change-Id: I39eba3731a6411d83f2be9c08c3a15e41432c682
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/3230395
Commit-Queue: Junbo Ke <juke@chromium.org>
Reviewed-by: Daniel Cheng <dcheng@chromium.org>
Reviewed-by: Kenneth MacKay <kmackay@chromium.org>
Reviewed-by: Yuchen Liu <yucliu@chromium.org>
Reviewed-by: Avi Drissman <avi@chromium.org>
Cr-Commit-Position: refs/heads/main@{#936774}
  • Loading branch information
Junbo Ke authored and Chromium LUCI CQ committed Oct 31, 2021
1 parent 0a140c5 commit 9048be4
Show file tree
Hide file tree
Showing 18 changed files with 638 additions and 48 deletions.
10 changes: 8 additions & 2 deletions chromecast/base/cast_features.cc
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,11 @@ const base::Feature kEnableSideGesturePassThrough{
const base::Feature kEnableChromeAudioManagerAndroid{
"enable_chrome_audio_manager_android", base::FEATURE_DISABLED_BY_DEFAULT};

// Enables CastAudioOutputDevice for audio output on Android. When disabled,
// CastAudioManagerAndroid will be used.
const base::Feature kEnableCastAudioOutputDevice{
"enable_cast_audio_output_device", base::FEATURE_DISABLED_BY_DEFAULT};

// End Chromecast Feature definitions.
const base::Feature* kFeatures[] = {
&kAllowUserMediaAccess,
Expand All @@ -170,15 +175,16 @@ const base::Feature* kFeatures[] = {
&kEnableGeneralAudienceBrowsing,
&kEnableSideGesturePassThrough,
&kEnableChromeAudioManagerAndroid,
&kEnableCastAudioOutputDevice,
};

std::vector<const base::Feature*> GetInternalFeatures();

const std::vector<const base::Feature*>& GetFeatures() {
static const base::NoDestructor<std::vector<const base::Feature*>> features(
[] {
auto features = std::vector<const base::Feature*>(
kFeatures, kFeatures + sizeof(kFeatures) / sizeof(base::Feature*));
std::vector<const base::Feature*> features(std::begin(kFeatures),
std::end(kFeatures));
auto internal_features = GetInternalFeatures();
features.insert(features.end(), internal_features.begin(),
internal_features.end());
Expand Down
1 change: 1 addition & 0 deletions chromecast/base/cast_features.h
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ extern const base::Feature kDisableIdleSocketsCloseOnMemoryPressure;
extern const base::Feature kEnableGeneralAudienceBrowsing;
extern const base::Feature kEnableSideGesturePassThrough;
extern const base::Feature kEnableChromeAudioManagerAndroid;
extern const base::Feature kEnableCastAudioOutputDevice;

// Get an iterable list of all of the cast features for checking all features as
// a collection.
Expand Down
2 changes: 2 additions & 0 deletions chromecast/media/DEPS
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ include_rules = [
"+chromecast/common/mojom",
"+chromecast/external_mojo",
"+chromecast/net/socket_util.h",
"+content/public/renderer",
"+media/audio",
"+media/base",
"+media/cdm",
Expand All @@ -22,6 +23,7 @@ include_rules = [
"+third_party/blink/public/platform/audio/web_audio_device_source_type.h",
"+third_party/blink/public/web/modules/media/audio/web_audio_device_factory.h",
"+third_party/blink/public/web/modules/media/audio/web_audio_output_ipc_factory.h",
"+third_party/blink/public/web/web_local_frame.h",
"+third_party/widevine/cdm/buildflags.h",
"+third_party/widevine/cdm/widevine_cdm_common.h",
]
25 changes: 25 additions & 0 deletions chromecast/media/audio/BUILD.gn
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,26 @@ cast_source_set("audio") {
configs += [ "//media/audio:platform_config" ]
}

cast_source_set("cast_audio_output_device") {
sources = [
"cast_audio_output_device.cc",
"cast_audio_output_device.h",
]

deps = [
":audio_io_thread",
"//base",
"//chromecast/common/mojom",
"//chromecast/media/audio/audio_output_service:output_connection",
"//chromecast/media/audio/audio_output_service:proto",
"//chromecast/media/base:monotonic_clock",
"//content/renderer:renderer",
"//media",
"//media/mojo/mojom",
"//net",
]
}

if (is_android) {
cast_source_set("cast_audio_device_factory") {
sources = [
Expand All @@ -193,9 +213,14 @@ if (is_android) {
]

deps = [
":cast_audio_output_device",
"//base",
"//chromecast/base",
"//content/public/renderer",
"//content/renderer:renderer",
"//media",
"//third_party/blink/public:blink",
"//third_party/blink/public/common",
]
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,16 @@ message StreamVolume {
// Indicates that EOS for an audio output stream has been played out.
message EosPlayedOut {}

// Current media pts and the corresponding reference timestamp.
// Current media pts, the corresponding reference timestamp, rendering delay and
// the timestamp when the delay was measured.
// TODO(b/173250111): Rename this message to RenderingDelay when all users of
// |media_timestamp_microseconds| and |reference_timestamp_microseconds| have
// migrated to using delay information.
message CurrentMediaTimestamp {
optional int64 media_timestamp_microseconds = 1;
optional int64 reference_timestamp_microseconds = 2;
optional int64 delay_microseconds = 3;
optional int64 delay_timestamp_microseconds = 4;
}

// Informs the backend to stop the playback.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,9 +185,11 @@ bool OutputStreamConnection::HandleMetadata(const Generic& message) {
}

if (message.has_current_media_timestamp()) {
delegate_->UpdateMediaTime(
delegate_->OnNextBuffer(
message.current_media_timestamp().media_timestamp_microseconds(),
message.current_media_timestamp().reference_timestamp_microseconds());
message.current_media_timestamp().reference_timestamp_microseconds(),
message.current_media_timestamp().delay_microseconds(),
message.current_media_timestamp().delay_timestamp_microseconds());
}
return true;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,15 @@ class OutputStreamConnection : public OutputConnection,
virtual void OnBackendInitialized(
const BackendInitializationStatus& status) = 0;

// Called when the current media pts potentially changes because:
// 1. A new buffer is pushed or
// 2. Playback state has changed (start/pause/resume).
virtual void UpdateMediaTime(int64_t media_timestamp_microseconds,
int64_t reference_timestamp_microseconds) = 0;
// Called when the audio pipeline backend is ready to receive the next
// buffer.
// TODO(b/173250111): Remove `media_timestamp_microseconds` and
// `reference_timestamp_microseconds` once all the implementations switched
// to using delay information.
virtual void OnNextBuffer(int64_t media_timestamp_microseconds,
int64_t reference_timestamp_microseconds,
int64_t delay_microseconds,
int64_t delay_timestamp_microseconds) = 0;

protected:
virtual ~Delegate() = default;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,12 +171,17 @@ class AudioOutputServiceReceiver::Stream
socket_->ReceiveMoreMessages();
}

void UpdateMediaTime(int64_t media_timestamp_microseconds,
int64_t reference_timestamp_microseconds) override {
void UpdateMediaTimeAndRenderingDelay(
int64_t media_timestamp_microseconds,
int64_t reference_timestamp_microseconds,
int64_t delay_microseconds,
int64_t delay_timestamp_microseconds) override {
audio_output_service::CurrentMediaTimestamp message;
message.set_media_timestamp_microseconds(media_timestamp_microseconds);
message.set_reference_timestamp_microseconds(
reference_timestamp_microseconds);
message.set_delay_microseconds(delay_microseconds);
message.set_delay_timestamp_microseconds(delay_timestamp_microseconds);
audio_output_service::Generic generic;
*(generic.mutable_current_media_timestamp()) = message;
socket_->SendProto(kUpdateMediaTime, generic);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,9 +233,7 @@ void CmaBackendShim::SetPlaybackRateOnMediaThread(float playback_rate) {
}

if (backend_state_ != BackendState::kStopped) {
POST_DELEGATE_TASK(&Delegate::UpdateMediaTime,
cma_backend_->GetCurrentPts(),
base::TimeTicks::Now().ToInternalValue());
UpdateMediaTimeAndRenderingDelay();
}

if (playback_rate_ == 0.0f) {
Expand Down Expand Up @@ -324,6 +322,18 @@ bool CmaBackendShim::SetAudioConfig() {
return audio_decoder_->SetConfig(audio_config);
}

void CmaBackendShim::UpdateMediaTimeAndRenderingDelay() {
if (!cma_backend_ || !audio_decoder_) {
return;
}
auto rendering_delay = audio_decoder_->GetRenderingDelay();
POST_DELEGATE_TASK(&Delegate::UpdateMediaTimeAndRenderingDelay,
cma_backend_->GetCurrentPts(),
base::TimeTicks::Now().since_origin().InMicroseconds(),
rendering_delay.delay_microseconds,
rendering_delay.timestamp_microseconds);
}

void CmaBackendShim::OnPushBufferComplete(BufferStatus status) {
DCHECK(media_task_runner_->RunsTasksInCurrentSequence());

Expand All @@ -333,8 +343,7 @@ void CmaBackendShim::OnPushBufferComplete(BufferStatus status) {
return;
}
POST_DELEGATE_TASK(&Delegate::OnBufferPushed);
POST_DELEGATE_TASK(&Delegate::UpdateMediaTime, cma_backend_->GetCurrentPts(),
base::TimeTicks::Now().ToInternalValue());
UpdateMediaTimeAndRenderingDelay();
}

void CmaBackendShim::OnEndOfStream() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,11 @@ class CmaBackendShim : public CmaBackend::AudioDecoder::Delegate {
virtual void OnBufferPushed() = 0;

// Called when the audio pts changed.
virtual void UpdateMediaTime(int64_t media_timestamp_microseconds,
int64_t reference_timestamp_microseconds) = 0;
virtual void UpdateMediaTimeAndRenderingDelay(
int64_t media_timestamp_microseconds,
int64_t reference_timestamp_microseconds,
int64_t delay_microseconds,
int64_t delay_timestamp_microseconds) = 0;

// Called if an error occurs in audio playback. No more delegate calls will
// be made.
Expand Down Expand Up @@ -131,6 +134,7 @@ class CmaBackendShim : public CmaBackend::AudioDecoder::Delegate {
void StopOnMediaThread();
void UpdateAudioConfigOnMediaThread(const CmaBackendParams& params);
bool SetAudioConfig();
void UpdateMediaTimeAndRenderingDelay();

const base::WeakPtr<Delegate> delegate_;
const scoped_refptr<base::SequencedTaskRunner> delegate_task_runner_;
Expand Down
Loading

0 comments on commit 9048be4

Please sign in to comment.