Bug 1628792 - p1: merge exoplayer r2.11.4 codebase. r=agi,geckoview-reviewers

Reference bugs:
- bug 1341990 (original import)
- bug 1523544 (package renaming)
- bug 1457255 (warning fixes)
- bug 1459420 (Firefox ProxySelector support)

Differential Revision: https://phabricator.services.mozilla.com/D78389
This commit is contained in:
John Lin 2020-06-04 21:18:43 +00:00
Родитель 0cc2ef2ac9
Коммит 68b7382249
550 изменённых файлов: 90410 добавлений и 20731 удалений

Просмотреть файл

@ -212,6 +212,13 @@ tasks.withType(org.jetbrains.kotlin.gradle.tasks.KotlinCompile) {
}
dependencies {
// For exoplayer.
implementation "androidx.annotation:annotation:1.1.0"
compileOnly "com.google.code.findbugs:jsr305:3.0.2"
compileOnly "org.checkerframework:checker-compat-qual:2.5.0"
compileOnly "org.checkerframework:checker-qual:2.5.0"
compileOnly "org.jetbrains.kotlin:kotlin-annotations-jvm:1.3.70"
implementation "com.android.support:support-v4:$support_library_version"
implementation "com.android.support:palette-v7:$support_library_version"
implementation "org.yaml:snakeyaml:1.24:android"

Просмотреть файл

@ -0,0 +1,81 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioManager;
import android.os.Handler;
/* package */ final class AudioBecomingNoisyManager {
private final Context context;
private final AudioBecomingNoisyReceiver receiver;
private boolean receiverRegistered;
public interface EventListener {
void onAudioBecomingNoisy();
}
public AudioBecomingNoisyManager(Context context, Handler eventHandler, EventListener listener) {
this.context = context.getApplicationContext();
this.receiver = new AudioBecomingNoisyReceiver(eventHandler, listener);
}
/**
* Enables the {@link AudioBecomingNoisyManager} which calls {@link
* EventListener#onAudioBecomingNoisy()} upon receiving an intent of {@link
* AudioManager#ACTION_AUDIO_BECOMING_NOISY}.
*
* @param enabled True if the listener should be notified when audio is becoming noisy.
*/
public void setEnabled(boolean enabled) {
if (enabled && !receiverRegistered) {
context.registerReceiver(
receiver, new IntentFilter(AudioManager.ACTION_AUDIO_BECOMING_NOISY));
receiverRegistered = true;
} else if (!enabled && receiverRegistered) {
context.unregisterReceiver(receiver);
receiverRegistered = false;
}
}
private final class AudioBecomingNoisyReceiver extends BroadcastReceiver implements Runnable {
private final EventListener listener;
private final Handler eventHandler;
public AudioBecomingNoisyReceiver(Handler eventHandler, EventListener listener) {
this.eventHandler = eventHandler;
this.listener = listener;
}
@Override
public void onReceive(Context context, Intent intent) {
if (AudioManager.ACTION_AUDIO_BECOMING_NOISY.equals(intent.getAction())) {
eventHandler.post(this);
}
}
@Override
public void run() {
if (receiverRegistered) {
listener.onAudioBecomingNoisy();
}
}
}
}

Просмотреть файл

@ -0,0 +1,397 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.content.Context;
import android.media.AudioFocusRequest;
import android.media.AudioManager;
import android.os.Handler;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.VisibleForTesting;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioAttributes;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Log;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** Manages requesting and responding to changes in audio focus. */
/* package */ final class AudioFocusManager {
/** Interface to allow AudioFocusManager to give commands to a player. */
public interface PlayerControl {
/**
* Called when the volume multiplier on the player should be changed.
*
* @param volumeMultiplier The new volume multiplier.
*/
void setVolumeMultiplier(float volumeMultiplier);
/**
* Called when a command must be executed on the player.
*
* @param playerCommand The command that must be executed.
*/
void executePlayerCommand(@PlayerCommand int playerCommand);
}
/**
* Player commands. One of {@link #PLAYER_COMMAND_DO_NOT_PLAY}, {@link
* #PLAYER_COMMAND_WAIT_FOR_CALLBACK} or {@link #PLAYER_COMMAND_PLAY_WHEN_READY}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({
PLAYER_COMMAND_DO_NOT_PLAY,
PLAYER_COMMAND_WAIT_FOR_CALLBACK,
PLAYER_COMMAND_PLAY_WHEN_READY,
})
public @interface PlayerCommand {}
/** Do not play. */
public static final int PLAYER_COMMAND_DO_NOT_PLAY = -1;
/** Do not play now. Wait for callback to play. */
public static final int PLAYER_COMMAND_WAIT_FOR_CALLBACK = 0;
/** Play freely. */
public static final int PLAYER_COMMAND_PLAY_WHEN_READY = 1;
/** Audio focus state. */
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({
AUDIO_FOCUS_STATE_NO_FOCUS,
AUDIO_FOCUS_STATE_HAVE_FOCUS,
AUDIO_FOCUS_STATE_LOSS_TRANSIENT,
AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK
})
private @interface AudioFocusState {}
/** No audio focus is currently being held. */
private static final int AUDIO_FOCUS_STATE_NO_FOCUS = 0;
/** The requested audio focus is currently held. */
private static final int AUDIO_FOCUS_STATE_HAVE_FOCUS = 1;
/** Audio focus has been temporarily lost. */
private static final int AUDIO_FOCUS_STATE_LOSS_TRANSIENT = 2;
/** Audio focus has been temporarily lost, but playback may continue with reduced volume. */
private static final int AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK = 3;
private static final String TAG = "AudioFocusManager";
private static final float VOLUME_MULTIPLIER_DUCK = 0.2f;
private static final float VOLUME_MULTIPLIER_DEFAULT = 1.0f;
private final AudioManager audioManager;
private final AudioFocusListener focusListener;
@Nullable private PlayerControl playerControl;
@Nullable private AudioAttributes audioAttributes;
@AudioFocusState private int audioFocusState;
@C.AudioFocusGain private int focusGain;
private float volumeMultiplier = VOLUME_MULTIPLIER_DEFAULT;
private @MonotonicNonNull AudioFocusRequest audioFocusRequest;
private boolean rebuildAudioFocusRequest;
/**
* Constructs an AudioFocusManager to automatically handle audio focus for a player.
*
* @param context The current context.
* @param eventHandler A {@link Handler} to for the thread on which the player is used.
* @param playerControl A {@link PlayerControl} to handle commands from this instance.
*/
public AudioFocusManager(Context context, Handler eventHandler, PlayerControl playerControl) {
this.audioManager =
(AudioManager) context.getApplicationContext().getSystemService(Context.AUDIO_SERVICE);
this.playerControl = playerControl;
this.focusListener = new AudioFocusListener(eventHandler);
this.audioFocusState = AUDIO_FOCUS_STATE_NO_FOCUS;
}
/** Gets the current player volume multiplier. */
public float getVolumeMultiplier() {
return volumeMultiplier;
}
/**
* Sets audio attributes that should be used to manage audio focus.
*
* <p>Call {@link #updateAudioFocus(boolean, int)} to update the audio focus based on these
* attributes.
*
* @param audioAttributes The audio attributes or {@code null} if audio focus should not be
* managed automatically.
*/
public void setAudioAttributes(@Nullable AudioAttributes audioAttributes) {
if (!Util.areEqual(this.audioAttributes, audioAttributes)) {
this.audioAttributes = audioAttributes;
focusGain = convertAudioAttributesToFocusGain(audioAttributes);
Assertions.checkArgument(
focusGain == C.AUDIOFOCUS_GAIN || focusGain == C.AUDIOFOCUS_NONE,
"Automatic handling of audio focus is only available for USAGE_MEDIA and USAGE_GAME.");
}
}
/**
* Called by the player to abandon or request audio focus based on the desired player state.
*
* @param playWhenReady The desired value of playWhenReady.
* @param playbackState The desired playback state.
* @return A {@link PlayerCommand} to execute on the player.
*/
@PlayerCommand
public int updateAudioFocus(boolean playWhenReady, @Player.State int playbackState) {
if (shouldAbandonAudioFocus(playbackState)) {
abandonAudioFocus();
return playWhenReady ? PLAYER_COMMAND_PLAY_WHEN_READY : PLAYER_COMMAND_DO_NOT_PLAY;
}
return playWhenReady ? requestAudioFocus() : PLAYER_COMMAND_DO_NOT_PLAY;
}
/**
* Called when the manager is no longer required. Audio focus will be released without making any
* calls to the {@link PlayerControl}.
*/
public void release() {
playerControl = null;
abandonAudioFocus();
}
// Internal methods.
@VisibleForTesting
/* package */ AudioManager.OnAudioFocusChangeListener getFocusListener() {
return focusListener;
}
private boolean shouldAbandonAudioFocus(@Player.State int playbackState) {
return playbackState == Player.STATE_IDLE || focusGain != C.AUDIOFOCUS_GAIN;
}
@PlayerCommand
private int requestAudioFocus() {
if (audioFocusState == AUDIO_FOCUS_STATE_HAVE_FOCUS) {
return PLAYER_COMMAND_PLAY_WHEN_READY;
}
int requestResult = Util.SDK_INT >= 26 ? requestAudioFocusV26() : requestAudioFocusDefault();
if (requestResult == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
setAudioFocusState(AUDIO_FOCUS_STATE_HAVE_FOCUS);
return PLAYER_COMMAND_PLAY_WHEN_READY;
} else {
setAudioFocusState(AUDIO_FOCUS_STATE_NO_FOCUS);
return PLAYER_COMMAND_DO_NOT_PLAY;
}
}
private void abandonAudioFocus() {
if (audioFocusState == AUDIO_FOCUS_STATE_NO_FOCUS) {
return;
}
if (Util.SDK_INT >= 26) {
abandonAudioFocusV26();
} else {
abandonAudioFocusDefault();
}
setAudioFocusState(AUDIO_FOCUS_STATE_NO_FOCUS);
}
private int requestAudioFocusDefault() {
return audioManager.requestAudioFocus(
focusListener,
Util.getStreamTypeForAudioUsage(Assertions.checkNotNull(audioAttributes).usage),
focusGain);
}
@RequiresApi(26)
private int requestAudioFocusV26() {
if (audioFocusRequest == null || rebuildAudioFocusRequest) {
AudioFocusRequest.Builder builder =
audioFocusRequest == null
? new AudioFocusRequest.Builder(focusGain)
: new AudioFocusRequest.Builder(audioFocusRequest);
boolean willPauseWhenDucked = willPauseWhenDucked();
audioFocusRequest =
builder
.setAudioAttributes(Assertions.checkNotNull(audioAttributes).getAudioAttributesV21())
.setWillPauseWhenDucked(willPauseWhenDucked)
.setOnAudioFocusChangeListener(focusListener)
.build();
rebuildAudioFocusRequest = false;
}
return audioManager.requestAudioFocus(audioFocusRequest);
}
private void abandonAudioFocusDefault() {
audioManager.abandonAudioFocus(focusListener);
}
@RequiresApi(26)
private void abandonAudioFocusV26() {
if (audioFocusRequest != null) {
audioManager.abandonAudioFocusRequest(audioFocusRequest);
}
}
private boolean willPauseWhenDucked() {
return audioAttributes != null && audioAttributes.contentType == C.CONTENT_TYPE_SPEECH;
}
/**
* Converts {@link AudioAttributes} to one of the audio focus request.
*
* <p>This follows the class Javadoc of {@link AudioFocusRequest}.
*
* @param audioAttributes The audio attributes associated with this focus request.
* @return The type of audio focus gain that should be requested.
*/
@C.AudioFocusGain
private static int convertAudioAttributesToFocusGain(@Nullable AudioAttributes audioAttributes) {
if (audioAttributes == null) {
// Don't handle audio focus. It may be either video only contents or developers
// want to have more finer grained control. (e.g. adding audio focus listener)
return C.AUDIOFOCUS_NONE;
}
switch (audioAttributes.usage) {
// USAGE_VOICE_COMMUNICATION_SIGNALLING is for DTMF that may happen multiple times
// during the phone call when AUDIOFOCUS_GAIN_TRANSIENT is requested for that.
// Don't request audio focus here.
case C.USAGE_VOICE_COMMUNICATION_SIGNALLING:
return C.AUDIOFOCUS_NONE;
// Javadoc says 'AUDIOFOCUS_GAIN: Examples of uses of this focus gain are for music
// playback, for a game or a video player'
case C.USAGE_GAME:
case C.USAGE_MEDIA:
return C.AUDIOFOCUS_GAIN;
// Special usages: USAGE_UNKNOWN shouldn't be used. Request audio focus to prevent
// multiple media playback happen at the same time.
case C.USAGE_UNKNOWN:
Log.w(
TAG,
"Specify a proper usage in the audio attributes for audio focus"
+ " handling. Using AUDIOFOCUS_GAIN by default.");
return C.AUDIOFOCUS_GAIN;
// Javadoc says 'AUDIOFOCUS_GAIN_TRANSIENT: An example is for playing an alarm, or
// during a VoIP call'
case C.USAGE_ALARM:
case C.USAGE_VOICE_COMMUNICATION:
return C.AUDIOFOCUS_GAIN_TRANSIENT;
// Javadoc says 'AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK: Examples are when playing
// driving directions or notifications'
case C.USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
case C.USAGE_ASSISTANCE_SONIFICATION:
case C.USAGE_NOTIFICATION:
case C.USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
case C.USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
case C.USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
case C.USAGE_NOTIFICATION_EVENT:
case C.USAGE_NOTIFICATION_RINGTONE:
return C.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK;
// Javadoc says 'AUDIOFOCUS_GAIN_EXCLUSIVE: This is typically used if you are doing
// audio recording or speech recognition'.
// Assistant is considered as both recording and notifying developer
case C.USAGE_ASSISTANT:
if (Util.SDK_INT >= 19) {
return C.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE;
} else {
return C.AUDIOFOCUS_GAIN_TRANSIENT;
}
// Special usages:
case C.USAGE_ASSISTANCE_ACCESSIBILITY:
if (audioAttributes.contentType == C.CONTENT_TYPE_SPEECH) {
// Voice shouldn't be interrupted by other playback.
return C.AUDIOFOCUS_GAIN_TRANSIENT;
}
return C.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK;
default:
Log.w(TAG, "Unidentified audio usage: " + audioAttributes.usage);
return C.AUDIOFOCUS_NONE;
}
}
private void setAudioFocusState(@AudioFocusState int audioFocusState) {
if (this.audioFocusState == audioFocusState) {
return;
}
this.audioFocusState = audioFocusState;
float volumeMultiplier =
(audioFocusState == AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK)
? AudioFocusManager.VOLUME_MULTIPLIER_DUCK
: AudioFocusManager.VOLUME_MULTIPLIER_DEFAULT;
if (this.volumeMultiplier == volumeMultiplier) {
return;
}
this.volumeMultiplier = volumeMultiplier;
if (playerControl != null) {
playerControl.setVolumeMultiplier(volumeMultiplier);
}
}
private void handlePlatformAudioFocusChange(int focusChange) {
switch (focusChange) {
case AudioManager.AUDIOFOCUS_GAIN:
setAudioFocusState(AUDIO_FOCUS_STATE_HAVE_FOCUS);
executePlayerCommand(PLAYER_COMMAND_PLAY_WHEN_READY);
return;
case AudioManager.AUDIOFOCUS_LOSS:
executePlayerCommand(PLAYER_COMMAND_DO_NOT_PLAY);
abandonAudioFocus();
return;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT || willPauseWhenDucked()) {
executePlayerCommand(PLAYER_COMMAND_WAIT_FOR_CALLBACK);
setAudioFocusState(AUDIO_FOCUS_STATE_LOSS_TRANSIENT);
} else {
setAudioFocusState(AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK);
}
return;
default:
Log.w(TAG, "Unknown focus change type: " + focusChange);
}
}
private void executePlayerCommand(@PlayerCommand int playerCommand) {
if (playerControl != null) {
playerControl.executePlayerCommand(playerCommand);
}
}
// Internal audio focus listener.
private class AudioFocusListener implements AudioManager.OnAudioFocusChangeListener {
private final Handler eventHandler;
public AudioFocusListener(Handler eventHandler) {
this.eventHandler = eventHandler;
}
@Override
public void onAudioFocusChange(int focusChange) {
eventHandler.post(() -> handlePlatformAudioFocusChange(focusChange));
}
}
}

Просмотреть файл

@ -0,0 +1,209 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
/** Abstract base {@link Player} which implements common implementation independent methods. */
public abstract class BasePlayer implements Player {
protected final Timeline.Window window;
public BasePlayer() {
window = new Timeline.Window();
}
@Override
public final boolean isPlaying() {
return getPlaybackState() == Player.STATE_READY
&& getPlayWhenReady()
&& getPlaybackSuppressionReason() == PLAYBACK_SUPPRESSION_REASON_NONE;
}
@Override
public final void seekToDefaultPosition() {
seekToDefaultPosition(getCurrentWindowIndex());
}
@Override
public final void seekToDefaultPosition(int windowIndex) {
seekTo(windowIndex, /* positionMs= */ C.TIME_UNSET);
}
@Override
public final void seekTo(long positionMs) {
seekTo(getCurrentWindowIndex(), positionMs);
}
@Override
public final boolean hasPrevious() {
return getPreviousWindowIndex() != C.INDEX_UNSET;
}
@Override
public final void previous() {
int previousWindowIndex = getPreviousWindowIndex();
if (previousWindowIndex != C.INDEX_UNSET) {
seekToDefaultPosition(previousWindowIndex);
}
}
@Override
public final boolean hasNext() {
return getNextWindowIndex() != C.INDEX_UNSET;
}
@Override
public final void next() {
int nextWindowIndex = getNextWindowIndex();
if (nextWindowIndex != C.INDEX_UNSET) {
seekToDefaultPosition(nextWindowIndex);
}
}
@Override
public final void stop() {
stop(/* reset= */ false);
}
@Override
public final int getNextWindowIndex() {
Timeline timeline = getCurrentTimeline();
return timeline.isEmpty()
? C.INDEX_UNSET
: timeline.getNextWindowIndex(
getCurrentWindowIndex(), getRepeatModeForNavigation(), getShuffleModeEnabled());
}
@Override
public final int getPreviousWindowIndex() {
Timeline timeline = getCurrentTimeline();
return timeline.isEmpty()
? C.INDEX_UNSET
: timeline.getPreviousWindowIndex(
getCurrentWindowIndex(), getRepeatModeForNavigation(), getShuffleModeEnabled());
}
@Override
@Nullable
public final Object getCurrentTag() {
Timeline timeline = getCurrentTimeline();
return timeline.isEmpty() ? null : timeline.getWindow(getCurrentWindowIndex(), window).tag;
}
@Override
@Nullable
public final Object getCurrentManifest() {
Timeline timeline = getCurrentTimeline();
return timeline.isEmpty() ? null : timeline.getWindow(getCurrentWindowIndex(), window).manifest;
}
@Override
public final int getBufferedPercentage() {
long position = getBufferedPosition();
long duration = getDuration();
return position == C.TIME_UNSET || duration == C.TIME_UNSET
? 0
: duration == 0 ? 100 : Util.constrainValue((int) ((position * 100) / duration), 0, 100);
}
@Override
public final boolean isCurrentWindowDynamic() {
Timeline timeline = getCurrentTimeline();
return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isDynamic;
}
@Override
public final boolean isCurrentWindowLive() {
Timeline timeline = getCurrentTimeline();
return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isLive;
}
@Override
public final boolean isCurrentWindowSeekable() {
Timeline timeline = getCurrentTimeline();
return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isSeekable;
}
@Override
public final long getContentDuration() {
Timeline timeline = getCurrentTimeline();
return timeline.isEmpty()
? C.TIME_UNSET
: timeline.getWindow(getCurrentWindowIndex(), window).getDurationMs();
}
@RepeatMode
private int getRepeatModeForNavigation() {
@RepeatMode int repeatMode = getRepeatMode();
return repeatMode == REPEAT_MODE_ONE ? REPEAT_MODE_OFF : repeatMode;
}
/** Holds a listener reference. */
protected static final class ListenerHolder {
/**
* The listener on which {link #invoke} will execute {@link ListenerInvocation listener
* invocations}.
*/
public final Player.EventListener listener;
private boolean released;
public ListenerHolder(Player.EventListener listener) {
this.listener = listener;
}
/** Prevents any further {@link ListenerInvocation} to be executed on {@link #listener}. */
public void release() {
released = true;
}
/**
* Executes the given {@link ListenerInvocation} on {@link #listener}. Does nothing if {@link
* #release} has been called on this instance.
*/
public void invoke(ListenerInvocation listenerInvocation) {
if (!released) {
listenerInvocation.invokeListener(listener);
}
}
@Override
public boolean equals(@Nullable Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
return listener.equals(((ListenerHolder) other).listener);
}
@Override
public int hashCode() {
return listener.hashCode();
}
}
/** Parameterized invocation of a {@link Player.EventListener} method. */
protected interface ListenerInvocation {
/** Executes the invocation on the given {@link Player.EventListener}. */
void invokeListener(Player.EventListener listener);
}
}

Просмотреть файл

@ -15,10 +15,17 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.os.Looper;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmInitData;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmSession;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmSessionManager;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.ExoMediaCrypto;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.SampleStream;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.MediaClock;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.io.IOException;
/**
@ -27,14 +34,17 @@ import java.io.IOException;
public abstract class BaseRenderer implements Renderer, RendererCapabilities {
private final int trackType;
private final FormatHolder formatHolder;
private RendererConfiguration configuration;
private int index;
private int state;
private SampleStream stream;
private Format[] streamFormats;
private long streamOffsetUs;
private boolean readEndOfStream;
private long readingPositionUs;
private boolean streamIsFinal;
private boolean throwRendererExceptionIsExecuting;
/**
* @param trackType The track type that the renderer handles. One of the {@link C}
@ -42,7 +52,8 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
*/
public BaseRenderer(int trackType) {
this.trackType = trackType;
readEndOfStream = true;
formatHolder = new FormatHolder();
readingPositionUs = C.TIME_END_OF_SOURCE;
}
@Override
@ -61,6 +72,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
}
@Override
@Nullable
public MediaClock getMediaClock() {
return null;
}
@ -94,19 +106,26 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
throws ExoPlaybackException {
Assertions.checkState(!streamIsFinal);
this.stream = stream;
readEndOfStream = false;
readingPositionUs = offsetUs;
streamFormats = formats;
streamOffsetUs = offsetUs;
onStreamChanged(formats);
onStreamChanged(formats, offsetUs);
}
@Override
@Nullable
public final SampleStream getStream() {
return stream;
}
@Override
public final boolean hasReadStreamToEnd() {
return readEndOfStream;
return readingPositionUs == C.TIME_END_OF_SOURCE;
}
@Override
public final long getReadingPositionUs() {
return readingPositionUs;
}
@Override
@ -127,7 +146,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
@Override
public final void resetPosition(long positionUs) throws ExoPlaybackException {
streamIsFinal = false;
readEndOfStream = false;
readingPositionUs = positionUs;
onPositionReset(positionUs, false);
}
@ -141,23 +160,33 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
@Override
public final void disable() {
Assertions.checkState(state == STATE_ENABLED);
formatHolder.clear();
state = STATE_DISABLED;
onDisabled();
stream = null;
streamFormats = null;
streamIsFinal = false;
onDisabled();
}
@Override
public final void reset() {
Assertions.checkState(state == STATE_DISABLED);
formatHolder.clear();
onReset();
}
// RendererCapabilities implementation.
@Override
@AdaptiveSupport
public int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException {
return ADAPTIVE_NOT_SUPPORTED;
}
// ExoPlayerComponent implementation.
// PlayerMessage.Target implementation.
@Override
public void handleMessage(int what, Object object) throws ExoPlaybackException {
public void handleMessage(int what, @Nullable Object object) throws ExoPlaybackException {
// Do nothing.
}
@ -183,16 +212,19 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
* The default implementation is a no-op.
*
* @param formats The enabled formats.
* @param offsetUs The offset that will be added to the timestamps of buffers read via
* {@link #readSource(FormatHolder, DecoderInputBuffer, boolean)} so that decoder input
* buffers have monotonically increasing timestamps.
* @throws ExoPlaybackException If an error occurs.
*/
protected void onStreamChanged(Format[] formats) throws ExoPlaybackException {
protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException {
// Do nothing.
}
/**
* Called when the position is reset. This occurs when the renderer is enabled after
* {@link #onStreamChanged(Format[])} has been called, and also when a position discontinuity
* is encountered.
* {@link #onStreamChanged(Format[], long)} has been called, and also when a position
* discontinuity is encountered.
* <p>
* After a position reset, the renderer's {@link SampleStream} is guaranteed to provide samples
* starting from a key frame.
@ -238,8 +270,28 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
// Do nothing.
}
/**
* Called when the renderer is reset.
*
* <p>The default implementation is a no-op.
*/
protected void onReset() {
// Do nothing.
}
// Methods to be called by subclasses.
/** Returns a clear {@link FormatHolder}. */
protected final FormatHolder getFormatHolder() {
formatHolder.clear();
return formatHolder;
}
/** Returns the formats of the currently enabled stream. */
protected final Format[] getStreamFormats() {
return streamFormats;
}
/**
* Returns the configuration set when the renderer was most recently enabled.
*/
@ -247,6 +299,35 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
return configuration;
}
/** Returns a {@link DrmSession} ready for assignment, handling resource management. */
@Nullable
protected final <T extends ExoMediaCrypto> DrmSession<T> getUpdatedSourceDrmSession(
@Nullable Format oldFormat,
Format newFormat,
@Nullable DrmSessionManager<T> drmSessionManager,
@Nullable DrmSession<T> existingSourceSession)
throws ExoPlaybackException {
boolean drmInitDataChanged =
!Util.areEqual(newFormat.drmInitData, oldFormat == null ? null : oldFormat.drmInitData);
if (!drmInitDataChanged) {
return existingSourceSession;
}
@Nullable DrmSession<T> newSourceDrmSession = null;
if (newFormat.drmInitData != null) {
if (drmSessionManager == null) {
throw createRendererException(
new IllegalStateException("Media requires a DrmSessionManager"), newFormat);
}
newSourceDrmSession =
drmSessionManager.acquireSession(
Assertions.checkNotNull(Looper.myLooper()), newFormat.drmInitData);
}
if (existingSourceSession != null) {
existingSourceSession.release();
}
return newSourceDrmSession;
}
/**
* Returns the index of the renderer within the player.
*/
@ -254,6 +335,30 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
return index;
}
/**
* Creates an {@link ExoPlaybackException} of type {@link ExoPlaybackException#TYPE_RENDERER} for
* this renderer.
*
* @param cause The cause of the exception.
* @param format The current format used by the renderer. May be null.
*/
protected final ExoPlaybackException createRendererException(
Exception cause, @Nullable Format format) {
@FormatSupport int formatSupport = RendererCapabilities.FORMAT_HANDLED;
if (format != null && !throwRendererExceptionIsExecuting) {
// Prevent recursive re-entry from subclass supportsFormat implementations.
throwRendererExceptionIsExecuting = true;
try {
formatSupport = RendererCapabilities.getFormatSupport(supportsFormat(format));
} catch (ExoPlaybackException e) {
// Ignore, we are already failing.
} finally {
throwRendererExceptionIsExecuting = false;
}
}
return ExoPlaybackException.createForRenderer(cause, getIndex(), format, formatSupport);
}
/**
* Reads from the enabled upstream source. If the upstream source has been read to the end then
* {@link C#RESULT_BUFFER_READ} is only returned if {@link #setCurrentStreamFinal()} has been
@ -261,23 +366,24 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
*
* @param formatHolder A {@link FormatHolder} to populate in the case of reading a format.
* @param buffer A {@link DecoderInputBuffer} to populate in the case of reading a sample or the
* end of the stream. If the end of the stream has been reached, the
* {@link C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer.
* end of the stream. If the end of the stream has been reached, the {@link
* C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer.
* @param formatRequired Whether the caller requires that the format of the stream be read even if
* it's not changing. A sample will never be read if set to true, however it is still possible
* for the end of stream or nothing to be read.
* @return The result, which can be {@link C#RESULT_NOTHING_READ}, {@link C#RESULT_FORMAT_READ} or
* {@link C#RESULT_BUFFER_READ}.
*/
protected final int readSource(FormatHolder formatHolder, DecoderInputBuffer buffer,
boolean formatRequired) {
protected final int readSource(
FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired) {
int result = stream.readData(formatHolder, buffer, formatRequired);
if (result == C.RESULT_BUFFER_READ) {
if (buffer.isEndOfStream()) {
readEndOfStream = true;
readingPositionUs = C.TIME_END_OF_SOURCE;
return streamIsFinal ? C.RESULT_BUFFER_READ : C.RESULT_NOTHING_READ;
}
buffer.timeUs += streamOffsetUs;
readingPositionUs = Math.max(readingPositionUs, buffer.timeUs);
} else if (result == C.RESULT_FORMAT_READ) {
Format format = formatHolder.format;
if (format.subsampleOffsetUs != Format.OFFSET_SAMPLE_RELATIVE) {
@ -293,18 +399,38 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
* {@code positionUs} is beyond it.
*
* @param positionUs The position in microseconds.
* @return The number of samples that were skipped.
*/
protected void skipSource(long positionUs) {
stream.skipData(positionUs - streamOffsetUs);
protected int skipSource(long positionUs) {
return stream.skipData(positionUs - streamOffsetUs);
}
/**
* Returns whether the upstream source is ready.
*
* @return Whether the source is ready.
*/
protected final boolean isSourceReady() {
return readEndOfStream ? streamIsFinal : stream.isReady();
return hasReadStreamToEnd() ? streamIsFinal : stream.isReady();
}
/**
* Returns whether {@code drmSessionManager} supports the specified {@code drmInitData}, or true
* if {@code drmInitData} is null.
*
* @param drmSessionManager The drm session manager.
* @param drmInitData {@link DrmInitData} of the format to check for support.
* @return Whether {@code drmSessionManager} supports the specified {@code drmInitData}, or
* true if {@code drmInitData} is null.
*/
protected static boolean supportsFormatDrm(@Nullable DrmSessionManager<?> drmSessionManager,
@Nullable DrmInitData drmInitData) {
if (drmInitData == null) {
// Content is unencrypted.
return true;
} else if (drmSessionManager == null) {
// Content is encrypted, but no drm session manager is available.
return false;
}
return drmSessionManager.canAcquireSession(drmInitData);
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,75 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Player.RepeatMode;
/**
* Dispatches operations to the {@link Player}.
* <p>
* Implementations may choose to suppress (e.g. prevent playback from resuming if audio focus is
* denied) or modify (e.g. change the seek position to prevent a user from seeking past a
* non-skippable advert) operations.
*/
public interface ControlDispatcher {
/**
* Dispatches a {@link Player#setPlayWhenReady(boolean)} operation.
*
* @param player The {@link Player} to which the operation should be dispatched.
* @param playWhenReady Whether playback should proceed when ready.
* @return True if the operation was dispatched. False if suppressed.
*/
boolean dispatchSetPlayWhenReady(Player player, boolean playWhenReady);
/**
* Dispatches a {@link Player#seekTo(int, long)} operation.
*
* @param player The {@link Player} to which the operation should be dispatched.
* @param windowIndex The index of the window.
* @param positionMs The seek position in the specified window, or {@link C#TIME_UNSET} to seek to
* the window's default position.
* @return True if the operation was dispatched. False if suppressed.
*/
boolean dispatchSeekTo(Player player, int windowIndex, long positionMs);
/**
* Dispatches a {@link Player#setRepeatMode(int)} operation.
*
* @param player The {@link Player} to which the operation should be dispatched.
* @param repeatMode The repeat mode.
* @return True if the operation was dispatched. False if suppressed.
*/
boolean dispatchSetRepeatMode(Player player, @RepeatMode int repeatMode);
/**
* Dispatches a {@link Player#setShuffleModeEnabled(boolean)} operation.
*
* @param player The {@link Player} to which the operation should be dispatched.
* @param shuffleModeEnabled Whether shuffling is enabled.
* @return True if the operation was dispatched. False if suppressed.
*/
boolean dispatchSetShuffleModeEnabled(Player player, boolean shuffleModeEnabled);
/**
* Dispatches a {@link Player#stop()} operation.
*
* @param player The {@link Player} to which the operation should be dispatched.
* @param reset Whether the player should be reset.
* @return True if the operation was dispatched. False if suppressed.
*/
boolean dispatchStop(Player player, boolean reset);
}

Просмотреть файл

@ -0,0 +1,55 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Player.RepeatMode;
/**
* Default {@link ControlDispatcher} that dispatches all operations to the player without
* modification.
*/
public class DefaultControlDispatcher implements ControlDispatcher {
@Override
public boolean dispatchSetPlayWhenReady(Player player, boolean playWhenReady) {
player.setPlayWhenReady(playWhenReady);
return true;
}
@Override
public boolean dispatchSeekTo(Player player, int windowIndex, long positionMs) {
player.seekTo(windowIndex, positionMs);
return true;
}
@Override
public boolean dispatchSetRepeatMode(Player player, @RepeatMode int repeatMode) {
player.setRepeatMode(repeatMode);
return true;
}
@Override
public boolean dispatchSetShuffleModeEnabled(Player player, boolean shuffleModeEnabled) {
player.setShuffleModeEnabled(shuffleModeEnabled);
return true;
}
@Override
public boolean dispatchStop(Player player, boolean reset) {
player.stop(reset);
return true;
}
}

Просмотреть файл

@ -19,24 +19,26 @@ import org.mozilla.thirdparty.com.google.android.exoplayer2.source.TrackGroupArr
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import org.mozilla.thirdparty.com.google.android.exoplayer2.upstream.Allocator;
import org.mozilla.thirdparty.com.google.android.exoplayer2.upstream.DefaultAllocator;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.PriorityTaskManager;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
/**
* The default {@link LoadControl} implementation.
*/
public final class DefaultLoadControl implements LoadControl {
public class DefaultLoadControl implements LoadControl {
/**
* The default minimum duration of media that the player will attempt to ensure is buffered at all
* times, in milliseconds.
* times, in milliseconds. This value is only applied to playbacks without video.
*/
public static final int DEFAULT_MIN_BUFFER_MS = 15000;
/**
* The default maximum duration of media that the player will attempt to buffer, in milliseconds.
* For playbacks with video, this is also the default minimum duration of media that the player
* will attempt to ensure is buffered.
*/
public static final int DEFAULT_MAX_BUFFER_MS = 30000;
public static final int DEFAULT_MAX_BUFFER_MS = 50000;
/**
* The default duration of media that must be buffered for playback to start or resume following a
@ -45,90 +47,296 @@ public final class DefaultLoadControl implements LoadControl {
public static final int DEFAULT_BUFFER_FOR_PLAYBACK_MS = 2500;
/**
* The default duration of media that must be buffered for playback to resume after a rebuffer,
* in milliseconds. A rebuffer is defined to be caused by buffer depletion rather than a user
* action.
* The default duration of media that must be buffered for playback to resume after a rebuffer, in
* milliseconds. A rebuffer is defined to be caused by buffer depletion rather than a user action.
*/
public static final int DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS = 5000;
public static final int DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS = 5000;
private static final int ABOVE_HIGH_WATERMARK = 0;
private static final int BETWEEN_WATERMARKS = 1;
private static final int BELOW_LOW_WATERMARK = 2;
/**
* The default target buffer size in bytes. The value ({@link C#LENGTH_UNSET}) means that the load
* control will calculate the target buffer size based on the selected tracks.
*/
public static final int DEFAULT_TARGET_BUFFER_BYTES = C.LENGTH_UNSET;
/** The default prioritization of buffer time constraints over size constraints. */
public static final boolean DEFAULT_PRIORITIZE_TIME_OVER_SIZE_THRESHOLDS = true;
/** The default back buffer duration in milliseconds. */
public static final int DEFAULT_BACK_BUFFER_DURATION_MS = 0;
/** The default for whether the back buffer is retained from the previous keyframe. */
public static final boolean DEFAULT_RETAIN_BACK_BUFFER_FROM_KEYFRAME = false;
/** A default size in bytes for a video buffer. */
public static final int DEFAULT_VIDEO_BUFFER_SIZE = 500 * C.DEFAULT_BUFFER_SEGMENT_SIZE;
/** A default size in bytes for an audio buffer. */
public static final int DEFAULT_AUDIO_BUFFER_SIZE = 54 * C.DEFAULT_BUFFER_SEGMENT_SIZE;
/** A default size in bytes for a text buffer. */
public static final int DEFAULT_TEXT_BUFFER_SIZE = 2 * C.DEFAULT_BUFFER_SEGMENT_SIZE;
/** A default size in bytes for a metadata buffer. */
public static final int DEFAULT_METADATA_BUFFER_SIZE = 2 * C.DEFAULT_BUFFER_SEGMENT_SIZE;
/** A default size in bytes for a camera motion buffer. */
public static final int DEFAULT_CAMERA_MOTION_BUFFER_SIZE = 2 * C.DEFAULT_BUFFER_SEGMENT_SIZE;
/** A default size in bytes for a muxed buffer (e.g. containing video, audio and text). */
public static final int DEFAULT_MUXED_BUFFER_SIZE =
DEFAULT_VIDEO_BUFFER_SIZE + DEFAULT_AUDIO_BUFFER_SIZE + DEFAULT_TEXT_BUFFER_SIZE;
/** Builder for {@link DefaultLoadControl}. */
public static final class Builder {
private DefaultAllocator allocator;
private int minBufferAudioMs;
private int minBufferVideoMs;
private int maxBufferMs;
private int bufferForPlaybackMs;
private int bufferForPlaybackAfterRebufferMs;
private int targetBufferBytes;
private boolean prioritizeTimeOverSizeThresholds;
private int backBufferDurationMs;
private boolean retainBackBufferFromKeyframe;
private boolean createDefaultLoadControlCalled;
/** Constructs a new instance. */
public Builder() {
minBufferAudioMs = DEFAULT_MIN_BUFFER_MS;
minBufferVideoMs = DEFAULT_MAX_BUFFER_MS;
maxBufferMs = DEFAULT_MAX_BUFFER_MS;
bufferForPlaybackMs = DEFAULT_BUFFER_FOR_PLAYBACK_MS;
bufferForPlaybackAfterRebufferMs = DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS;
targetBufferBytes = DEFAULT_TARGET_BUFFER_BYTES;
prioritizeTimeOverSizeThresholds = DEFAULT_PRIORITIZE_TIME_OVER_SIZE_THRESHOLDS;
backBufferDurationMs = DEFAULT_BACK_BUFFER_DURATION_MS;
retainBackBufferFromKeyframe = DEFAULT_RETAIN_BACK_BUFFER_FROM_KEYFRAME;
}
/**
* Sets the {@link DefaultAllocator} used by the loader.
*
* @param allocator The {@link DefaultAllocator}.
* @return This builder, for convenience.
* @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called.
*/
public Builder setAllocator(DefaultAllocator allocator) {
Assertions.checkState(!createDefaultLoadControlCalled);
this.allocator = allocator;
return this;
}
/**
* Sets the buffer duration parameters.
*
* @param minBufferMs The minimum duration of media that the player will attempt to ensure is
* buffered at all times, in milliseconds.
* @param maxBufferMs The maximum duration of media that the player will attempt to buffer, in
* milliseconds.
* @param bufferForPlaybackMs The duration of media that must be buffered for playback to start
* or resume following a user action such as a seek, in milliseconds.
* @param bufferForPlaybackAfterRebufferMs The default duration of media that must be buffered
* for playback to resume after a rebuffer, in milliseconds. A rebuffer is defined to be
* caused by buffer depletion rather than a user action.
* @return This builder, for convenience.
* @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called.
*/
public Builder setBufferDurationsMs(
int minBufferMs,
int maxBufferMs,
int bufferForPlaybackMs,
int bufferForPlaybackAfterRebufferMs) {
Assertions.checkState(!createDefaultLoadControlCalled);
assertGreaterOrEqual(bufferForPlaybackMs, 0, "bufferForPlaybackMs", "0");
assertGreaterOrEqual(
bufferForPlaybackAfterRebufferMs, 0, "bufferForPlaybackAfterRebufferMs", "0");
assertGreaterOrEqual(minBufferMs, bufferForPlaybackMs, "minBufferMs", "bufferForPlaybackMs");
assertGreaterOrEqual(
minBufferMs,
bufferForPlaybackAfterRebufferMs,
"minBufferMs",
"bufferForPlaybackAfterRebufferMs");
assertGreaterOrEqual(maxBufferMs, minBufferMs, "maxBufferMs", "minBufferMs");
this.minBufferAudioMs = minBufferMs;
this.minBufferVideoMs = minBufferMs;
this.maxBufferMs = maxBufferMs;
this.bufferForPlaybackMs = bufferForPlaybackMs;
this.bufferForPlaybackAfterRebufferMs = bufferForPlaybackAfterRebufferMs;
return this;
}
/**
* Sets the target buffer size in bytes. If set to {@link C#LENGTH_UNSET}, the target buffer
* size will be calculated based on the selected tracks.
*
* @param targetBufferBytes The target buffer size in bytes.
* @return This builder, for convenience.
* @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called.
*/
public Builder setTargetBufferBytes(int targetBufferBytes) {
Assertions.checkState(!createDefaultLoadControlCalled);
this.targetBufferBytes = targetBufferBytes;
return this;
}
/**
* Sets whether the load control prioritizes buffer time constraints over buffer size
* constraints.
*
* @param prioritizeTimeOverSizeThresholds Whether the load control prioritizes buffer time
* constraints over buffer size constraints.
* @return This builder, for convenience.
* @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called.
*/
public Builder setPrioritizeTimeOverSizeThresholds(boolean prioritizeTimeOverSizeThresholds) {
Assertions.checkState(!createDefaultLoadControlCalled);
this.prioritizeTimeOverSizeThresholds = prioritizeTimeOverSizeThresholds;
return this;
}
/**
* Sets the back buffer duration, and whether the back buffer is retained from the previous
* keyframe.
*
* @param backBufferDurationMs The back buffer duration in milliseconds.
* @param retainBackBufferFromKeyframe Whether the back buffer is retained from the previous
* keyframe.
* @return This builder, for convenience.
* @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called.
*/
public Builder setBackBuffer(int backBufferDurationMs, boolean retainBackBufferFromKeyframe) {
Assertions.checkState(!createDefaultLoadControlCalled);
assertGreaterOrEqual(backBufferDurationMs, 0, "backBufferDurationMs", "0");
this.backBufferDurationMs = backBufferDurationMs;
this.retainBackBufferFromKeyframe = retainBackBufferFromKeyframe;
return this;
}
/** Creates a {@link DefaultLoadControl}. */
public DefaultLoadControl createDefaultLoadControl() {
Assertions.checkState(!createDefaultLoadControlCalled);
createDefaultLoadControlCalled = true;
if (allocator == null) {
allocator = new DefaultAllocator(/* trimOnReset= */ true, C.DEFAULT_BUFFER_SEGMENT_SIZE);
}
return new DefaultLoadControl(
allocator,
minBufferAudioMs,
minBufferVideoMs,
maxBufferMs,
bufferForPlaybackMs,
bufferForPlaybackAfterRebufferMs,
targetBufferBytes,
prioritizeTimeOverSizeThresholds,
backBufferDurationMs,
retainBackBufferFromKeyframe);
}
}
private final DefaultAllocator allocator;
private final long minBufferUs;
private final long minBufferAudioUs;
private final long minBufferVideoUs;
private final long maxBufferUs;
private final long bufferForPlaybackUs;
private final long bufferForPlaybackAfterRebufferUs;
private final PriorityTaskManager priorityTaskManager;
private final int targetBufferBytesOverwrite;
private final boolean prioritizeTimeOverSizeThresholds;
private final long backBufferDurationUs;
private final boolean retainBackBufferFromKeyframe;
private int targetBufferSize;
private boolean isBuffering;
private boolean hasVideo;
/**
* Constructs a new instance, using the {@code DEFAULT_*} constants defined in this class.
*/
/** Constructs a new instance, using the {@code DEFAULT_*} constants defined in this class. */
@SuppressWarnings("deprecation")
public DefaultLoadControl() {
this(new DefaultAllocator(true, C.DEFAULT_BUFFER_SEGMENT_SIZE));
}
/**
* Constructs a new instance, using the {@code DEFAULT_*} constants defined in this class.
*
* @param allocator The {@link DefaultAllocator} used by the loader.
*/
/** @deprecated Use {@link Builder} instead. */
@Deprecated
public DefaultLoadControl(DefaultAllocator allocator) {
this(allocator, DEFAULT_MIN_BUFFER_MS, DEFAULT_MAX_BUFFER_MS, DEFAULT_BUFFER_FOR_PLAYBACK_MS,
DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS);
this(
allocator,
/* minBufferAudioMs= */ DEFAULT_MIN_BUFFER_MS,
/* minBufferVideoMs= */ DEFAULT_MAX_BUFFER_MS,
DEFAULT_MAX_BUFFER_MS,
DEFAULT_BUFFER_FOR_PLAYBACK_MS,
DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS,
DEFAULT_TARGET_BUFFER_BYTES,
DEFAULT_PRIORITIZE_TIME_OVER_SIZE_THRESHOLDS,
DEFAULT_BACK_BUFFER_DURATION_MS,
DEFAULT_RETAIN_BACK_BUFFER_FROM_KEYFRAME);
}
/**
* Constructs a new instance.
*
* @param allocator The {@link DefaultAllocator} used by the loader.
* @param minBufferMs The minimum duration of media that the player will attempt to ensure is
* buffered at all times, in milliseconds.
* @param maxBufferMs The maximum duration of media that the player will attempt buffer, in
* milliseconds.
* @param bufferForPlaybackMs The duration of media that must be buffered for playback to start or
* resume following a user action such as a seek, in milliseconds.
* @param bufferForPlaybackAfterRebufferMs The default duration of media that must be buffered for
* playback to resume after a rebuffer, in milliseconds. A rebuffer is defined to be caused by
* buffer depletion rather than a user action.
*/
public DefaultLoadControl(DefaultAllocator allocator, int minBufferMs, int maxBufferMs,
long bufferForPlaybackMs, long bufferForPlaybackAfterRebufferMs) {
this(allocator, minBufferMs, maxBufferMs, bufferForPlaybackMs, bufferForPlaybackAfterRebufferMs,
null);
/** @deprecated Use {@link Builder} instead. */
@Deprecated
public DefaultLoadControl(
DefaultAllocator allocator,
int minBufferMs,
int maxBufferMs,
int bufferForPlaybackMs,
int bufferForPlaybackAfterRebufferMs,
int targetBufferBytes,
boolean prioritizeTimeOverSizeThresholds) {
this(
allocator,
/* minBufferAudioMs= */ minBufferMs,
/* minBufferVideoMs= */ minBufferMs,
maxBufferMs,
bufferForPlaybackMs,
bufferForPlaybackAfterRebufferMs,
targetBufferBytes,
prioritizeTimeOverSizeThresholds,
DEFAULT_BACK_BUFFER_DURATION_MS,
DEFAULT_RETAIN_BACK_BUFFER_FROM_KEYFRAME);
}
/**
* Constructs a new instance.
*
* @param allocator The {@link DefaultAllocator} used by the loader.
* @param minBufferMs The minimum duration of media that the player will attempt to ensure is
* buffered at all times, in milliseconds.
* @param maxBufferMs The maximum duration of media that the player will attempt buffer, in
* milliseconds.
* @param bufferForPlaybackMs The duration of media that must be buffered for playback to start or
* resume following a user action such as a seek, in milliseconds.
* @param bufferForPlaybackAfterRebufferMs The default duration of media that must be buffered for
* playback to resume after a rebuffer, in milliseconds. A rebuffer is defined to be caused by
* buffer depletion rather than a user action.
* @param priorityTaskManager If not null, registers itself as a task with priority
* {@link C#PRIORITY_PLAYBACK} during loading periods, and unregisters itself during draining
* periods.
*/
public DefaultLoadControl(DefaultAllocator allocator, int minBufferMs, int maxBufferMs,
long bufferForPlaybackMs, long bufferForPlaybackAfterRebufferMs,
PriorityTaskManager priorityTaskManager) {
protected DefaultLoadControl(
DefaultAllocator allocator,
int minBufferAudioMs,
int minBufferVideoMs,
int maxBufferMs,
int bufferForPlaybackMs,
int bufferForPlaybackAfterRebufferMs,
int targetBufferBytes,
boolean prioritizeTimeOverSizeThresholds,
int backBufferDurationMs,
boolean retainBackBufferFromKeyframe) {
assertGreaterOrEqual(bufferForPlaybackMs, 0, "bufferForPlaybackMs", "0");
assertGreaterOrEqual(
bufferForPlaybackAfterRebufferMs, 0, "bufferForPlaybackAfterRebufferMs", "0");
assertGreaterOrEqual(
minBufferAudioMs, bufferForPlaybackMs, "minBufferAudioMs", "bufferForPlaybackMs");
assertGreaterOrEqual(
minBufferVideoMs, bufferForPlaybackMs, "minBufferVideoMs", "bufferForPlaybackMs");
assertGreaterOrEqual(
minBufferAudioMs,
bufferForPlaybackAfterRebufferMs,
"minBufferAudioMs",
"bufferForPlaybackAfterRebufferMs");
assertGreaterOrEqual(
minBufferVideoMs,
bufferForPlaybackAfterRebufferMs,
"minBufferVideoMs",
"bufferForPlaybackAfterRebufferMs");
assertGreaterOrEqual(maxBufferMs, minBufferAudioMs, "maxBufferMs", "minBufferAudioMs");
assertGreaterOrEqual(maxBufferMs, minBufferVideoMs, "maxBufferMs", "minBufferVideoMs");
assertGreaterOrEqual(backBufferDurationMs, 0, "backBufferDurationMs", "0");
this.allocator = allocator;
minBufferUs = minBufferMs * 1000L;
maxBufferUs = maxBufferMs * 1000L;
bufferForPlaybackUs = bufferForPlaybackMs * 1000L;
bufferForPlaybackAfterRebufferUs = bufferForPlaybackAfterRebufferMs * 1000L;
this.priorityTaskManager = priorityTaskManager;
this.minBufferAudioUs = C.msToUs(minBufferAudioMs);
this.minBufferVideoUs = C.msToUs(minBufferVideoMs);
this.maxBufferUs = C.msToUs(maxBufferMs);
this.bufferForPlaybackUs = C.msToUs(bufferForPlaybackMs);
this.bufferForPlaybackAfterRebufferUs = C.msToUs(bufferForPlaybackAfterRebufferMs);
this.targetBufferBytesOverwrite = targetBufferBytes;
this.prioritizeTimeOverSizeThresholds = prioritizeTimeOverSizeThresholds;
this.backBufferDurationUs = C.msToUs(backBufferDurationMs);
this.retainBackBufferFromKeyframe = retainBackBufferFromKeyframe;
}
@Override
@ -139,12 +347,11 @@ public final class DefaultLoadControl implements LoadControl {
@Override
public void onTracksSelected(Renderer[] renderers, TrackGroupArray trackGroups,
TrackSelectionArray trackSelections) {
targetBufferSize = 0;
for (int i = 0; i < renderers.length; i++) {
if (trackSelections.get(i) != null) {
targetBufferSize += Util.getDefaultBufferSize(renderers[i].getTrackType());
}
}
hasVideo = hasVideo(renderers, trackSelections);
targetBufferSize =
targetBufferBytesOverwrite == C.LENGTH_UNSET
? calculateTargetBufferSize(renderers, trackSelections)
: targetBufferBytesOverwrite;
allocator.setTargetBufferSize(targetBufferSize);
}
@ -164,42 +371,103 @@ public final class DefaultLoadControl implements LoadControl {
}
@Override
public boolean shouldStartPlayback(long bufferedDurationUs, boolean rebuffering) {
long minBufferDurationUs = rebuffering ? bufferForPlaybackAfterRebufferUs : bufferForPlaybackUs;
return minBufferDurationUs <= 0 || bufferedDurationUs >= minBufferDurationUs;
public long getBackBufferDurationUs() {
return backBufferDurationUs;
}
@Override
public boolean shouldContinueLoading(long bufferedDurationUs) {
int bufferTimeState = getBufferTimeState(bufferedDurationUs);
public boolean retainBackBufferFromKeyframe() {
return retainBackBufferFromKeyframe;
}
@Override
public boolean shouldContinueLoading(long bufferedDurationUs, float playbackSpeed) {
boolean targetBufferSizeReached = allocator.getTotalBytesAllocated() >= targetBufferSize;
boolean wasBuffering = isBuffering;
isBuffering = bufferTimeState == BELOW_LOW_WATERMARK
|| (bufferTimeState == BETWEEN_WATERMARKS && isBuffering && !targetBufferSizeReached);
if (priorityTaskManager != null && isBuffering != wasBuffering) {
if (isBuffering) {
priorityTaskManager.add(C.PRIORITY_PLAYBACK);
} else {
priorityTaskManager.remove(C.PRIORITY_PLAYBACK);
}
long minBufferUs = hasVideo ? minBufferVideoUs : minBufferAudioUs;
if (playbackSpeed > 1) {
// The playback speed is faster than real time, so scale up the minimum required media
// duration to keep enough media buffered for a playout duration of minBufferUs.
long mediaDurationMinBufferUs =
Util.getMediaDurationForPlayoutDuration(minBufferUs, playbackSpeed);
minBufferUs = Math.min(mediaDurationMinBufferUs, maxBufferUs);
}
if (bufferedDurationUs < minBufferUs) {
isBuffering = prioritizeTimeOverSizeThresholds || !targetBufferSizeReached;
} else if (bufferedDurationUs >= maxBufferUs || targetBufferSizeReached) {
isBuffering = false;
} // Else don't change the buffering state
return isBuffering;
}
private int getBufferTimeState(long bufferedDurationUs) {
return bufferedDurationUs > maxBufferUs ? ABOVE_HIGH_WATERMARK
: (bufferedDurationUs < minBufferUs ? BELOW_LOW_WATERMARK : BETWEEN_WATERMARKS);
@Override
public boolean shouldStartPlayback(
long bufferedDurationUs, float playbackSpeed, boolean rebuffering) {
bufferedDurationUs = Util.getPlayoutDurationForMediaDuration(bufferedDurationUs, playbackSpeed);
long minBufferDurationUs = rebuffering ? bufferForPlaybackAfterRebufferUs : bufferForPlaybackUs;
return minBufferDurationUs <= 0
|| bufferedDurationUs >= minBufferDurationUs
|| (!prioritizeTimeOverSizeThresholds
&& allocator.getTotalBytesAllocated() >= targetBufferSize);
}
/**
* Calculate target buffer size in bytes based on the selected tracks. The player will try not to
* exceed this target buffer. Only used when {@code targetBufferBytes} is {@link C#LENGTH_UNSET}.
*
* @param renderers The renderers for which the track were selected.
* @param trackSelectionArray The selected tracks.
* @return The target buffer size in bytes.
*/
protected int calculateTargetBufferSize(
Renderer[] renderers, TrackSelectionArray trackSelectionArray) {
int targetBufferSize = 0;
for (int i = 0; i < renderers.length; i++) {
if (trackSelectionArray.get(i) != null) {
targetBufferSize += getDefaultBufferSize(renderers[i].getTrackType());
}
}
return targetBufferSize;
}
private void reset(boolean resetAllocator) {
targetBufferSize = 0;
if (priorityTaskManager != null && isBuffering) {
priorityTaskManager.remove(C.PRIORITY_PLAYBACK);
}
isBuffering = false;
if (resetAllocator) {
allocator.reset();
}
}
private static int getDefaultBufferSize(int trackType) {
switch (trackType) {
case C.TRACK_TYPE_DEFAULT:
return DEFAULT_MUXED_BUFFER_SIZE;
case C.TRACK_TYPE_AUDIO:
return DEFAULT_AUDIO_BUFFER_SIZE;
case C.TRACK_TYPE_VIDEO:
return DEFAULT_VIDEO_BUFFER_SIZE;
case C.TRACK_TYPE_TEXT:
return DEFAULT_TEXT_BUFFER_SIZE;
case C.TRACK_TYPE_METADATA:
return DEFAULT_METADATA_BUFFER_SIZE;
case C.TRACK_TYPE_CAMERA_MOTION:
return DEFAULT_CAMERA_MOTION_BUFFER_SIZE;
case C.TRACK_TYPE_NONE:
return 0;
default:
throw new IllegalArgumentException();
}
}
private static boolean hasVideo(Renderer[] renderers, TrackSelectionArray trackSelectionArray) {
for (int i = 0; i < renderers.length; i++) {
if (renderers[i].getTrackType() == C.TRACK_TYPE_VIDEO && trackSelectionArray.get(i) != null) {
return true;
}
}
return false;
}
private static void assertGreaterOrEqual(int value1, int value2, String name1, String name2) {
Assertions.checkArgument(value1 >= value2, name1 + " cannot be less than " + name2);
}
}

Просмотреть файл

@ -0,0 +1,197 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Clock;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.MediaClock;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.StandaloneMediaClock;
/**
* Default {@link MediaClock} which uses a renderer media clock and falls back to a
* {@link StandaloneMediaClock} if necessary.
*/
/* package */ final class DefaultMediaClock implements MediaClock {
/**
* Listener interface to be notified of changes to the active playback parameters.
*/
public interface PlaybackParameterListener {
/**
* Called when the active playback parameters changed. Will not be called for {@link
* #setPlaybackParameters(PlaybackParameters)}.
*
* @param newPlaybackParameters The newly active {@link PlaybackParameters}.
*/
void onPlaybackParametersChanged(PlaybackParameters newPlaybackParameters);
}
private final StandaloneMediaClock standaloneClock;
private final PlaybackParameterListener listener;
@Nullable private Renderer rendererClockSource;
@Nullable private MediaClock rendererClock;
private boolean isUsingStandaloneClock;
private boolean standaloneClockIsStarted;
/**
* Creates a new instance with listener for playback parameter changes and a {@link Clock} to use
* for the standalone clock implementation.
*
* @param listener A {@link PlaybackParameterListener} to listen for playback parameter
* changes.
* @param clock A {@link Clock}.
*/
public DefaultMediaClock(PlaybackParameterListener listener, Clock clock) {
this.listener = listener;
this.standaloneClock = new StandaloneMediaClock(clock);
isUsingStandaloneClock = true;
}
/**
* Starts the standalone fallback clock.
*/
public void start() {
standaloneClockIsStarted = true;
standaloneClock.start();
}
/**
* Stops the standalone fallback clock.
*/
public void stop() {
standaloneClockIsStarted = false;
standaloneClock.stop();
}
/**
* Resets the position of the standalone fallback clock.
*
* @param positionUs The position to set in microseconds.
*/
public void resetPosition(long positionUs) {
standaloneClock.resetPosition(positionUs);
}
/**
* Notifies the media clock that a renderer has been enabled. Starts using the media clock of the
* provided renderer if available.
*
* @param renderer The renderer which has been enabled.
* @throws ExoPlaybackException If the renderer provides a media clock and another renderer media
* clock is already provided.
*/
public void onRendererEnabled(Renderer renderer) throws ExoPlaybackException {
MediaClock rendererMediaClock = renderer.getMediaClock();
if (rendererMediaClock != null && rendererMediaClock != rendererClock) {
if (rendererClock != null) {
throw ExoPlaybackException.createForUnexpected(
new IllegalStateException("Multiple renderer media clocks enabled."));
}
this.rendererClock = rendererMediaClock;
this.rendererClockSource = renderer;
rendererClock.setPlaybackParameters(standaloneClock.getPlaybackParameters());
}
}
/**
* Notifies the media clock that a renderer has been disabled. Stops using the media clock of this
* renderer if used.
*
* @param renderer The renderer which has been disabled.
*/
public void onRendererDisabled(Renderer renderer) {
if (renderer == rendererClockSource) {
this.rendererClock = null;
this.rendererClockSource = null;
isUsingStandaloneClock = true;
}
}
/**
* Syncs internal clock if needed and returns current clock position in microseconds.
*
* @param isReadingAhead Whether the renderers are reading ahead.
*/
public long syncAndGetPositionUs(boolean isReadingAhead) {
syncClocks(isReadingAhead);
return getPositionUs();
}
// MediaClock implementation.
@Override
public long getPositionUs() {
return isUsingStandaloneClock ? standaloneClock.getPositionUs() : rendererClock.getPositionUs();
}
@Override
public void setPlaybackParameters(PlaybackParameters playbackParameters) {
if (rendererClock != null) {
rendererClock.setPlaybackParameters(playbackParameters);
playbackParameters = rendererClock.getPlaybackParameters();
}
standaloneClock.setPlaybackParameters(playbackParameters);
}
@Override
public PlaybackParameters getPlaybackParameters() {
return rendererClock != null
? rendererClock.getPlaybackParameters()
: standaloneClock.getPlaybackParameters();
}
private void syncClocks(boolean isReadingAhead) {
if (shouldUseStandaloneClock(isReadingAhead)) {
isUsingStandaloneClock = true;
if (standaloneClockIsStarted) {
standaloneClock.start();
}
return;
}
long rendererClockPositionUs = rendererClock.getPositionUs();
if (isUsingStandaloneClock) {
// Ensure enabling the renderer clock doesn't jump backwards in time.
if (rendererClockPositionUs < standaloneClock.getPositionUs()) {
standaloneClock.stop();
return;
}
isUsingStandaloneClock = false;
if (standaloneClockIsStarted) {
standaloneClock.start();
}
}
// Continuously sync stand-alone clock to renderer clock so that it can take over if needed.
standaloneClock.resetPosition(rendererClockPositionUs);
PlaybackParameters playbackParameters = rendererClock.getPlaybackParameters();
if (!playbackParameters.equals(standaloneClock.getPlaybackParameters())) {
standaloneClock.setPlaybackParameters(playbackParameters);
listener.onPlaybackParametersChanged(playbackParameters);
}
}
private boolean shouldUseStandaloneClock(boolean isReadingAhead) {
// Use the standalone clock if the clock providing renderer is not set or has ended. Also use
// the standalone clock if the renderer is not ready and we have finished reading the stream or
// are reading ahead to avoid getting stuck if tracks in the current period have uneven
// durations. See: https://github.com/google/ExoPlayer/issues/1874.
return rendererClockSource == null
|| rendererClockSource.isEnded()
|| (!rendererClockSource.isReady()
&& (isReadingAhead || rendererClockSource.hasReadStreamToEnd()));
}
}

Просмотреть файл

@ -16,22 +16,29 @@
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.content.Context;
import android.media.MediaCodec;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.IntDef;
import android.util.Log;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioCapabilities;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioProcessor;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioRendererEventListener;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.DefaultAudioSink;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.MediaCodecAudioRenderer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmSessionManager;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
import org.mozilla.thirdparty.com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
import org.mozilla.thirdparty.com.google.android.exoplayer2.metadata.MetadataOutput;
import org.mozilla.thirdparty.com.google.android.exoplayer2.metadata.MetadataRenderer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.text.TextOutput;
import org.mozilla.thirdparty.com.google.android.exoplayer2.text.TextRenderer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelector;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Log;
import org.mozilla.thirdparty.com.google.android.exoplayer2.video.MediaCodecVideoRenderer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.video.VideoRendererEventListener;
import org.mozilla.thirdparty.com.google.android.exoplayer2.video.spherical.CameraMotionRenderer;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.reflect.Constructor;
@ -49,11 +56,12 @@ public class DefaultRenderersFactory implements RenderersFactory {
public static final long DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS = 5000;
/**
* Modes for using extension renderers.
* Modes for using extension renderers. One of {@link #EXTENSION_RENDERER_MODE_OFF}, {@link
* #EXTENSION_RENDERER_MODE_ON} or {@link #EXTENSION_RENDERER_MODE_PREFER}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({EXTENSION_RENDERER_MODE_OFF, EXTENSION_RENDERER_MODE_ON,
EXTENSION_RENDERER_MODE_PREFER})
@IntDef({EXTENSION_RENDERER_MODE_OFF, EXTENSION_RENDERER_MODE_ON, EXTENSION_RENDERER_MODE_PREFER})
public @interface ExtensionRendererMode {}
/**
* Do not allow use of extension renderers.
@ -79,99 +87,249 @@ public class DefaultRenderersFactory implements RenderersFactory {
protected static final int MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY = 50;
private final Context context;
private final DrmSessionManager<FrameworkMediaCrypto> drmSessionManager;
private final @ExtensionRendererMode int extensionRendererMode;
private final long allowedVideoJoiningTimeMs;
@Nullable private DrmSessionManager<FrameworkMediaCrypto> drmSessionManager;
@ExtensionRendererMode private int extensionRendererMode;
private long allowedVideoJoiningTimeMs;
private boolean playClearSamplesWithoutKeys;
private boolean enableDecoderFallback;
private MediaCodecSelector mediaCodecSelector;
/**
* @param context A {@link Context}.
*/
/** @param context A {@link Context}. */
public DefaultRenderersFactory(Context context) {
this(context, null);
this.context = context;
extensionRendererMode = EXTENSION_RENDERER_MODE_OFF;
allowedVideoJoiningTimeMs = DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS;
mediaCodecSelector = MediaCodecSelector.DEFAULT;
}
/**
* @param context A {@link Context}.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if DRM protected
* playbacks are not required.
* @deprecated Use {@link #DefaultRenderersFactory(Context)} and pass {@link DrmSessionManager}
* directly to {@link SimpleExoPlayer.Builder}.
*/
public DefaultRenderersFactory(Context context,
DrmSessionManager<FrameworkMediaCrypto> drmSessionManager) {
@Deprecated
@SuppressWarnings("deprecation")
public DefaultRenderersFactory(
Context context, @Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager) {
this(context, drmSessionManager, EXTENSION_RENDERER_MODE_OFF);
}
/**
* @param context A {@link Context}.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if DRM protected
* playbacks are not required..
* @param extensionRendererMode The extension renderer mode, which determines if and how
* available extension renderers are used. Note that extensions must be included in the
* application build for them to be considered available.
* @deprecated Use {@link #DefaultRenderersFactory(Context)} and {@link
* #setExtensionRendererMode(int)}.
*/
public DefaultRenderersFactory(Context context,
DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@ExtensionRendererMode int extensionRendererMode) {
this(context, drmSessionManager, extensionRendererMode,
DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS);
@Deprecated
@SuppressWarnings("deprecation")
public DefaultRenderersFactory(
Context context, @ExtensionRendererMode int extensionRendererMode) {
this(context, extensionRendererMode, DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS);
}
/**
* @param context A {@link Context}.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if DRM protected
* playbacks are not required..
* @param extensionRendererMode The extension renderer mode, which determines if and how
* available extension renderers are used. Note that extensions must be included in the
* application build for them to be considered available.
* @param allowedVideoJoiningTimeMs The maximum duration for which video renderers can attempt
* to seamlessly join an ongoing playback.
* @deprecated Use {@link #DefaultRenderersFactory(Context)} and {@link
* #setExtensionRendererMode(int)}, and pass {@link DrmSessionManager} directly to {@link
* SimpleExoPlayer.Builder}.
*/
public DefaultRenderersFactory(Context context,
DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@ExtensionRendererMode int extensionRendererMode, long allowedVideoJoiningTimeMs) {
@Deprecated
@SuppressWarnings("deprecation")
public DefaultRenderersFactory(
Context context,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@ExtensionRendererMode int extensionRendererMode) {
this(context, drmSessionManager, extensionRendererMode, DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS);
}
/**
* @deprecated Use {@link #DefaultRenderersFactory(Context)}, {@link
* #setExtensionRendererMode(int)} and {@link #setAllowedVideoJoiningTimeMs(long)}.
*/
@Deprecated
@SuppressWarnings("deprecation")
public DefaultRenderersFactory(
Context context,
@ExtensionRendererMode int extensionRendererMode,
long allowedVideoJoiningTimeMs) {
this(context, null, extensionRendererMode, allowedVideoJoiningTimeMs);
}
/**
* @deprecated Use {@link #DefaultRenderersFactory(Context)}, {@link
* #setExtensionRendererMode(int)} and {@link #setAllowedVideoJoiningTimeMs(long)}, and pass
* {@link DrmSessionManager} directly to {@link SimpleExoPlayer.Builder}.
*/
@Deprecated
public DefaultRenderersFactory(
Context context,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@ExtensionRendererMode int extensionRendererMode,
long allowedVideoJoiningTimeMs) {
this.context = context;
this.drmSessionManager = drmSessionManager;
this.extensionRendererMode = extensionRendererMode;
this.allowedVideoJoiningTimeMs = allowedVideoJoiningTimeMs;
this.drmSessionManager = drmSessionManager;
mediaCodecSelector = MediaCodecSelector.DEFAULT;
}
/**
* Sets the extension renderer mode, which determines if and how available extension renderers are
* used. Note that extensions must be included in the application build for them to be considered
* available.
*
* <p>The default value is {@link #EXTENSION_RENDERER_MODE_OFF}.
*
* @param extensionRendererMode The extension renderer mode.
* @return This factory, for convenience.
*/
public DefaultRenderersFactory setExtensionRendererMode(
@ExtensionRendererMode int extensionRendererMode) {
this.extensionRendererMode = extensionRendererMode;
return this;
}
/**
* Sets whether renderers are permitted to play clear regions of encrypted media prior to having
* obtained the keys necessary to decrypt encrypted regions of the media. For encrypted media that
* starts with a short clear region, this allows playback to begin in parallel with key
* acquisition, which can reduce startup latency.
*
* <p>The default value is {@code false}.
*
* @param playClearSamplesWithoutKeys Whether renderers are permitted to play clear regions of
* encrypted media prior to having obtained the keys necessary to decrypt encrypted regions of
* the media.
* @return This factory, for convenience.
*/
public DefaultRenderersFactory setPlayClearSamplesWithoutKeys(
boolean playClearSamplesWithoutKeys) {
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
return this;
}
/**
* Sets whether to enable fallback to lower-priority decoders if decoder initialization fails.
* This may result in using a decoder that is less efficient or slower than the primary decoder.
*
* @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder
* initialization fails.
* @return This factory, for convenience.
*/
public DefaultRenderersFactory setEnableDecoderFallback(boolean enableDecoderFallback) {
this.enableDecoderFallback = enableDecoderFallback;
return this;
}
/**
* Sets a {@link MediaCodecSelector} for use by {@link MediaCodec} based renderers.
*
* <p>The default value is {@link MediaCodecSelector#DEFAULT}.
*
* @param mediaCodecSelector The {@link MediaCodecSelector}.
* @return This factory, for convenience.
*/
public DefaultRenderersFactory setMediaCodecSelector(MediaCodecSelector mediaCodecSelector) {
this.mediaCodecSelector = mediaCodecSelector;
return this;
}
/**
* Sets the maximum duration for which video renderers can attempt to seamlessly join an ongoing
* playback.
*
* <p>The default value is {@link #DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS}.
*
* @param allowedVideoJoiningTimeMs The maximum duration for which video renderers can attempt to
* seamlessly join an ongoing playback, in milliseconds.
* @return This factory, for convenience.
*/
public DefaultRenderersFactory setAllowedVideoJoiningTimeMs(long allowedVideoJoiningTimeMs) {
this.allowedVideoJoiningTimeMs = allowedVideoJoiningTimeMs;
return this;
}
@Override
public Renderer[] createRenderers(Handler eventHandler,
public Renderer[] createRenderers(
Handler eventHandler,
VideoRendererEventListener videoRendererEventListener,
AudioRendererEventListener audioRendererEventListener,
TextRenderer.Output textRendererOutput, MetadataRenderer.Output metadataRendererOutput) {
TextOutput textRendererOutput,
MetadataOutput metadataRendererOutput,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager) {
if (drmSessionManager == null) {
drmSessionManager = this.drmSessionManager;
}
ArrayList<Renderer> renderersList = new ArrayList<>();
buildVideoRenderers(context, drmSessionManager, allowedVideoJoiningTimeMs,
eventHandler, videoRendererEventListener, extensionRendererMode, renderersList);
buildAudioRenderers(context, drmSessionManager, buildAudioProcessors(),
eventHandler, audioRendererEventListener, extensionRendererMode, renderersList);
buildVideoRenderers(
context,
extensionRendererMode,
mediaCodecSelector,
drmSessionManager,
playClearSamplesWithoutKeys,
enableDecoderFallback,
eventHandler,
videoRendererEventListener,
allowedVideoJoiningTimeMs,
renderersList);
buildAudioRenderers(
context,
extensionRendererMode,
mediaCodecSelector,
drmSessionManager,
playClearSamplesWithoutKeys,
enableDecoderFallback,
buildAudioProcessors(),
eventHandler,
audioRendererEventListener,
renderersList);
buildTextRenderers(context, textRendererOutput, eventHandler.getLooper(),
extensionRendererMode, renderersList);
buildMetadataRenderers(context, metadataRendererOutput, eventHandler.getLooper(),
extensionRendererMode, renderersList);
buildCameraMotionRenderers(context, extensionRendererMode, renderersList);
buildMiscellaneousRenderers(context, eventHandler, extensionRendererMode, renderersList);
return renderersList.toArray(new Renderer[renderersList.size()]);
return renderersList.toArray(new Renderer[0]);
}
/**
* Builds video renderers for use by the player.
*
* @param context The {@link Context} associated with the player.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the player
* will not be used for DRM protected playbacks.
* @param allowedVideoJoiningTimeMs The maximum duration in milliseconds for which video
* renderers can attempt to seamlessly join an ongoing playback.
* @param extensionRendererMode The extension renderer mode.
* @param mediaCodecSelector A decoder selector.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the player will
* not be used for DRM protected playbacks.
* @param playClearSamplesWithoutKeys Whether renderers are permitted to play clear regions of
* encrypted media prior to having obtained the keys necessary to decrypt encrypted regions of
* the media.
* @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder
* initialization fails. This may result in using a decoder that is slower/less efficient than
* the primary decoder.
* @param eventHandler A handler associated with the main thread's looper.
* @param eventListener An event listener.
* @param extensionRendererMode The extension renderer mode.
* @param allowedVideoJoiningTimeMs The maximum duration for which video renderers can attempt to
* seamlessly join an ongoing playback, in milliseconds.
* @param out An array to which the built renderers should be appended.
*/
protected void buildVideoRenderers(Context context,
DrmSessionManager<FrameworkMediaCrypto> drmSessionManager, long allowedVideoJoiningTimeMs,
Handler eventHandler, VideoRendererEventListener eventListener,
@ExtensionRendererMode int extensionRendererMode, ArrayList<Renderer> out) {
out.add(new MediaCodecVideoRenderer(context, MediaCodecSelector.DEFAULT,
allowedVideoJoiningTimeMs, drmSessionManager, false, eventHandler, eventListener,
MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY));
protected void buildVideoRenderers(
Context context,
@ExtensionRendererMode int extensionRendererMode,
MediaCodecSelector mediaCodecSelector,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys,
boolean enableDecoderFallback,
Handler eventHandler,
VideoRendererEventListener eventListener,
long allowedVideoJoiningTimeMs,
ArrayList<Renderer> out) {
out.add(
new MediaCodecVideoRenderer(
context,
mediaCodecSelector,
allowedVideoJoiningTimeMs,
drmSessionManager,
playClearSamplesWithoutKeys,
enableDecoderFallback,
eventHandler,
eventListener,
MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY));
if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) {
return;
@ -182,18 +340,57 @@ public class DefaultRenderersFactory implements RenderersFactory {
}
try {
Class<?> clazz =
Class.forName("org.mozilla.thirdparty.com.google.android.exoplayer2.ext.vp9.LibvpxVideoRenderer");
Constructor<?> constructor = clazz.getConstructor(boolean.class, long.class, Handler.class,
VideoRendererEventListener.class, int.class);
Renderer renderer = (Renderer) constructor.newInstance(true, allowedVideoJoiningTimeMs,
eventHandler, eventListener, MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY);
// Full class names used for constructor args so the LINT rule triggers if any of them move.
// LINT.IfChange
Class<?> clazz = Class.forName("org.mozilla.thirdparty.com.google.android.exoplayer2.ext.vp9.LibvpxVideoRenderer");
Constructor<?> constructor =
clazz.getConstructor(
long.class,
android.os.Handler.class,
org.mozilla.thirdparty.com.google.android.exoplayer2.video.VideoRendererEventListener.class,
int.class);
// LINT.ThenChange(../../../../../../../proguard-rules.txt)
Renderer renderer =
(Renderer)
constructor.newInstance(
allowedVideoJoiningTimeMs,
eventHandler,
eventListener,
MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibvpxVideoRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
throw new RuntimeException(e);
// The extension is present, but instantiation failed.
throw new RuntimeException("Error instantiating VP9 extension", e);
}
try {
// Full class names used for constructor args so the LINT rule triggers if any of them move.
// LINT.IfChange
Class<?> clazz = Class.forName("org.mozilla.thirdparty.com.google.android.exoplayer2.ext.av1.Libgav1VideoRenderer");
Constructor<?> constructor =
clazz.getConstructor(
long.class,
android.os.Handler.class,
org.mozilla.thirdparty.com.google.android.exoplayer2.video.VideoRendererEventListener.class,
int.class);
// LINT.ThenChange(../../../../../../../proguard-rules.txt)
Renderer renderer =
(Renderer)
constructor.newInstance(
allowedVideoJoiningTimeMs,
eventHandler,
eventListener,
MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded Libgav1VideoRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
// The extension is present, but instantiation failed.
throw new RuntimeException("Error instantiating AV1 extension", e);
}
}
@ -201,22 +398,43 @@ public class DefaultRenderersFactory implements RenderersFactory {
* Builds audio renderers for use by the player.
*
* @param context The {@link Context} associated with the player.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the player
* will not be used for DRM protected playbacks.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio
* buffers before output. May be empty.
* @param extensionRendererMode The extension renderer mode.
* @param mediaCodecSelector A decoder selector.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the player will
* not be used for DRM protected playbacks.
* @param playClearSamplesWithoutKeys Whether renderers are permitted to play clear regions of
* encrypted media prior to having obtained the keys necessary to decrypt encrypted regions of
* the media.
* @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder
* initialization fails. This may result in using a decoder that is slower/less efficient than
* the primary decoder.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio buffers
* before output. May be empty.
* @param eventHandler A handler to use when invoking event listeners and outputs.
* @param eventListener An event listener.
* @param extensionRendererMode The extension renderer mode.
* @param out An array to which the built renderers should be appended.
*/
protected void buildAudioRenderers(Context context,
DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
AudioProcessor[] audioProcessors, Handler eventHandler,
AudioRendererEventListener eventListener, @ExtensionRendererMode int extensionRendererMode,
protected void buildAudioRenderers(
Context context,
@ExtensionRendererMode int extensionRendererMode,
MediaCodecSelector mediaCodecSelector,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys,
boolean enableDecoderFallback,
AudioProcessor[] audioProcessors,
Handler eventHandler,
AudioRendererEventListener eventListener,
ArrayList<Renderer> out) {
out.add(new MediaCodecAudioRenderer(MediaCodecSelector.DEFAULT, drmSessionManager, true,
eventHandler, eventListener, AudioCapabilities.getCapabilities(context), audioProcessors));
out.add(
new MediaCodecAudioRenderer(
context,
mediaCodecSelector,
drmSessionManager,
playClearSamplesWithoutKeys,
enableDecoderFallback,
eventHandler,
eventListener,
new DefaultAudioSink(AudioCapabilities.getCapabilities(context), audioProcessors)));
if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) {
return;
@ -227,48 +445,67 @@ public class DefaultRenderersFactory implements RenderersFactory {
}
try {
Class<?> clazz =
Class.forName("org.mozilla.thirdparty.com.google.android.exoplayer2.ext.opus.LibopusAudioRenderer");
Constructor<?> constructor = clazz.getConstructor(Handler.class,
AudioRendererEventListener.class, AudioProcessor[].class);
Renderer renderer = (Renderer) constructor.newInstance(eventHandler, eventListener,
audioProcessors);
// Full class names used for constructor args so the LINT rule triggers if any of them move.
// LINT.IfChange
Class<?> clazz = Class.forName("org.mozilla.thirdparty.com.google.android.exoplayer2.ext.opus.LibopusAudioRenderer");
Constructor<?> constructor =
clazz.getConstructor(
android.os.Handler.class,
org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioRendererEventListener.class,
org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioProcessor[].class);
// LINT.ThenChange(../../../../../../../proguard-rules.txt)
Renderer renderer =
(Renderer) constructor.newInstance(eventHandler, eventListener, audioProcessors);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibopusAudioRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
throw new RuntimeException(e);
// The extension is present, but instantiation failed.
throw new RuntimeException("Error instantiating Opus extension", e);
}
try {
Class<?> clazz =
Class.forName("org.mozilla.thirdparty.com.google.android.exoplayer2.ext.flac.LibflacAudioRenderer");
Constructor<?> constructor = clazz.getConstructor(Handler.class,
AudioRendererEventListener.class, AudioProcessor[].class);
Renderer renderer = (Renderer) constructor.newInstance(eventHandler, eventListener,
audioProcessors);
// Full class names used for constructor args so the LINT rule triggers if any of them move.
// LINT.IfChange
Class<?> clazz = Class.forName("org.mozilla.thirdparty.com.google.android.exoplayer2.ext.flac.LibflacAudioRenderer");
Constructor<?> constructor =
clazz.getConstructor(
android.os.Handler.class,
org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioRendererEventListener.class,
org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioProcessor[].class);
// LINT.ThenChange(../../../../../../../proguard-rules.txt)
Renderer renderer =
(Renderer) constructor.newInstance(eventHandler, eventListener, audioProcessors);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibflacAudioRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
throw new RuntimeException(e);
// The extension is present, but instantiation failed.
throw new RuntimeException("Error instantiating FLAC extension", e);
}
try {
// Full class names used for constructor args so the LINT rule triggers if any of them move.
// LINT.IfChange
Class<?> clazz =
Class.forName("org.mozilla.thirdparty.com.google.android.exoplayer2.ext.ffmpeg.FfmpegAudioRenderer");
Constructor<?> constructor = clazz.getConstructor(Handler.class,
AudioRendererEventListener.class, AudioProcessor[].class);
Renderer renderer = (Renderer) constructor.newInstance(eventHandler, eventListener,
audioProcessors);
Constructor<?> constructor =
clazz.getConstructor(
android.os.Handler.class,
org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioRendererEventListener.class,
org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioProcessor[].class);
// LINT.ThenChange(../../../../../../../proguard-rules.txt)
Renderer renderer =
(Renderer) constructor.newInstance(eventHandler, eventListener, audioProcessors);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded FfmpegAudioRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
throw new RuntimeException(e);
// The extension is present, but instantiation failed.
throw new RuntimeException("Error instantiating FFmpeg extension", e);
}
}
@ -277,13 +514,15 @@ public class DefaultRenderersFactory implements RenderersFactory {
*
* @param context The {@link Context} associated with the player.
* @param output An output for the renderers.
* @param outputLooper The looper associated with the thread on which the output should be
* called.
* @param outputLooper The looper associated with the thread on which the output should be called.
* @param extensionRendererMode The extension renderer mode.
* @param out An array to which the built renderers should be appended.
*/
protected void buildTextRenderers(Context context, TextRenderer.Output output,
Looper outputLooper, @ExtensionRendererMode int extensionRendererMode,
protected void buildTextRenderers(
Context context,
TextOutput output,
Looper outputLooper,
@ExtensionRendererMode int extensionRendererMode,
ArrayList<Renderer> out) {
out.add(new TextRenderer(output, outputLooper));
}
@ -293,17 +532,31 @@ public class DefaultRenderersFactory implements RenderersFactory {
*
* @param context The {@link Context} associated with the player.
* @param output An output for the renderers.
* @param outputLooper The looper associated with the thread on which the output should be
* called.
* @param outputLooper The looper associated with the thread on which the output should be called.
* @param extensionRendererMode The extension renderer mode.
* @param out An array to which the built renderers should be appended.
*/
protected void buildMetadataRenderers(Context context, MetadataRenderer.Output output,
Looper outputLooper, @ExtensionRendererMode int extensionRendererMode,
protected void buildMetadataRenderers(
Context context,
MetadataOutput output,
Looper outputLooper,
@ExtensionRendererMode int extensionRendererMode,
ArrayList<Renderer> out) {
out.add(new MetadataRenderer(output, outputLooper));
}
/**
* Builds camera motion renderers for use by the player.
*
* @param context The {@link Context} associated with the player.
* @param extensionRendererMode The extension renderer mode.
* @param out An array to which the built renderers should be appended.
*/
protected void buildCameraMotionRenderers(
Context context, @ExtensionRendererMode int extensionRendererMode, ArrayList<Renderer> out) {
out.add(new CameraMotionRenderer());
}
/**
* Builds any miscellaneous renderers used by the player.
*

Просмотреть файл

@ -15,10 +15,14 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.support.annotation.IntDef;
import android.os.SystemClock;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.RendererCapabilities.FormatSupport;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import java.io.IOException;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@ -28,10 +32,13 @@ import java.lang.annotation.RetentionPolicy;
public final class ExoPlaybackException extends Exception {
/**
* The type of source that produced the error.
* The type of source that produced the error. One of {@link #TYPE_SOURCE}, {@link #TYPE_RENDERER}
* {@link #TYPE_UNEXPECTED}, {@link #TYPE_REMOTE} or {@link #TYPE_OUT_OF_MEMORY}. Note that new
* types may be added in the future and error handling should handle unknown type values.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({TYPE_SOURCE, TYPE_RENDERER, TYPE_UNEXPECTED})
@IntDef({TYPE_SOURCE, TYPE_RENDERER, TYPE_UNEXPECTED, TYPE_REMOTE, TYPE_OUT_OF_MEMORY})
public @interface Type {}
/**
* The error occurred loading data from a {@link MediaSource}.
@ -51,11 +58,16 @@ public final class ExoPlaybackException extends Exception {
* Call {@link #getUnexpectedException()} to retrieve the underlying cause.
*/
public static final int TYPE_UNEXPECTED = 2;
/**
* The type of the playback failure. One of {@link #TYPE_SOURCE}, {@link #TYPE_RENDERER} and
* {@link #TYPE_UNEXPECTED}.
* The error occurred in a remote component.
*
* <p>Call {@link #getMessage()} to retrieve the message associated with the error.
*/
public static final int TYPE_REMOTE = 3;
/** The error was an {@link OutOfMemoryError}. */
public static final int TYPE_OUT_OF_MEMORY = 4;
/** The {@link Type} of the playback failure. */
@Type public final int type;
/**
@ -64,15 +76,22 @@ public final class ExoPlaybackException extends Exception {
public final int rendererIndex;
/**
* Creates an instance of type {@link #TYPE_RENDERER}.
*
* @param cause The cause of the failure.
* @param rendererIndex The index of the renderer in which the failure occurred.
* @return The created instance.
* If {@link #type} is {@link #TYPE_RENDERER}, this is the {@link Format} the renderer was using
* at the time of the exception, or null if the renderer wasn't using a {@link Format}.
*/
public static ExoPlaybackException createForRenderer(Exception cause, int rendererIndex) {
return new ExoPlaybackException(TYPE_RENDERER, null, cause, rendererIndex);
}
@Nullable public final Format rendererFormat;
/**
* If {@link #type} is {@link #TYPE_RENDERER}, this is the level of {@link FormatSupport} of the
* renderer for {@link #rendererFormat}. If {@link #rendererFormat} is null, this is {@link
* RendererCapabilities#FORMAT_HANDLED}.
*/
@FormatSupport public final int rendererFormatSupport;
/** The value of {@link SystemClock#elapsedRealtime()} when this exception was created. */
public final long timestampMs;
@Nullable private final Throwable cause;
/**
* Creates an instance of type {@link #TYPE_SOURCE}.
@ -81,7 +100,31 @@ public final class ExoPlaybackException extends Exception {
* @return The created instance.
*/
public static ExoPlaybackException createForSource(IOException cause) {
return new ExoPlaybackException(TYPE_SOURCE, null, cause, C.INDEX_UNSET);
return new ExoPlaybackException(TYPE_SOURCE, cause);
}
/**
* Creates an instance of type {@link #TYPE_RENDERER}.
*
* @param cause The cause of the failure.
* @param rendererIndex The index of the renderer in which the failure occurred.
* @param rendererFormat The {@link Format} the renderer was using at the time of the exception,
* or null if the renderer wasn't using a {@link Format}.
* @param rendererFormatSupport The {@link FormatSupport} of the renderer for {@code
* rendererFormat}. Ignored if {@code rendererFormat} is null.
* @return The created instance.
*/
public static ExoPlaybackException createForRenderer(
Exception cause,
int rendererIndex,
@Nullable Format rendererFormat,
@FormatSupport int rendererFormatSupport) {
return new ExoPlaybackException(
TYPE_RENDERER,
cause,
rendererIndex,
rendererFormat,
rendererFormat == null ? RendererCapabilities.FORMAT_HANDLED : rendererFormatSupport);
}
/**
@ -90,15 +133,62 @@ public final class ExoPlaybackException extends Exception {
* @param cause The cause of the failure.
* @return The created instance.
*/
/* package */ static ExoPlaybackException createForUnexpected(RuntimeException cause) {
return new ExoPlaybackException(TYPE_UNEXPECTED, null, cause, C.INDEX_UNSET);
public static ExoPlaybackException createForUnexpected(RuntimeException cause) {
return new ExoPlaybackException(TYPE_UNEXPECTED, cause);
}
private ExoPlaybackException(@Type int type, String message, Throwable cause,
int rendererIndex) {
super(message, cause);
/**
* Creates an instance of type {@link #TYPE_REMOTE}.
*
* @param message The message associated with the error.
* @return The created instance.
*/
public static ExoPlaybackException createForRemote(String message) {
return new ExoPlaybackException(TYPE_REMOTE, message);
}
/**
* Creates an instance of type {@link #TYPE_OUT_OF_MEMORY}.
*
* @param cause The cause of the failure.
* @return The created instance.
*/
public static ExoPlaybackException createForOutOfMemoryError(OutOfMemoryError cause) {
return new ExoPlaybackException(TYPE_OUT_OF_MEMORY, cause);
}
private ExoPlaybackException(@Type int type, Throwable cause) {
this(
type,
cause,
/* rendererIndex= */ C.INDEX_UNSET,
/* rendererFormat= */ null,
/* rendererFormatSupport= */ RendererCapabilities.FORMAT_HANDLED);
}
private ExoPlaybackException(
@Type int type,
Throwable cause,
int rendererIndex,
@Nullable Format rendererFormat,
@FormatSupport int rendererFormatSupport) {
super(cause);
this.type = type;
this.cause = cause;
this.rendererIndex = rendererIndex;
this.rendererFormat = rendererFormat;
this.rendererFormatSupport = rendererFormatSupport;
timestampMs = SystemClock.elapsedRealtime();
}
private ExoPlaybackException(@Type int type, String message) {
super(message);
this.type = type;
rendererIndex = C.INDEX_UNSET;
rendererFormat = null;
rendererFormatSupport = RendererCapabilities.FORMAT_UNSUPPORTED_TYPE;
cause = null;
timestampMs = SystemClock.elapsedRealtime();
}
/**
@ -108,7 +198,7 @@ public final class ExoPlaybackException extends Exception {
*/
public IOException getSourceException() {
Assertions.checkState(type == TYPE_SOURCE);
return (IOException) getCause();
return (IOException) Assertions.checkNotNull(cause);
}
/**
@ -118,7 +208,7 @@ public final class ExoPlaybackException extends Exception {
*/
public Exception getRendererException() {
Assertions.checkState(type == TYPE_RENDERER);
return (Exception) getCause();
return (Exception) Assertions.checkNotNull(cause);
}
/**
@ -128,7 +218,16 @@ public final class ExoPlaybackException extends Exception {
*/
public RuntimeException getUnexpectedException() {
Assertions.checkState(type == TYPE_UNEXPECTED);
return (RuntimeException) getCause();
return (RuntimeException) Assertions.checkNotNull(cause);
}
/**
* Retrieves the underlying error when {@link #type} is {@link #TYPE_OUT_OF_MEMORY}.
*
* @throws IllegalStateException If {@link #type} is not {@link #TYPE_OUT_OF_MEMORY}.
*/
public OutOfMemoryError getOutOfMemoryError() {
Assertions.checkState(type == TYPE_OUT_OF_MEMORY);
return (OutOfMemoryError) Assertions.checkNotNull(cause);
}
}

Просмотреть файл

@ -15,267 +15,325 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.support.annotation.Nullable;
import android.content.Context;
import android.os.Looper;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import org.mozilla.thirdparty.com.google.android.exoplayer2.analytics.AnalyticsCollector;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.MediaCodecAudioRenderer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.metadata.MetadataRenderer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.ClippingMediaSource;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.ConcatenatingMediaSource;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.ExtractorMediaSource;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.LoopingMediaSource;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MergingMediaSource;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.ProgressiveMediaSource;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.SingleSampleMediaSource;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.TrackGroupArray;
import org.mozilla.thirdparty.com.google.android.exoplayer2.text.TextRenderer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelector;
import org.mozilla.thirdparty.com.google.android.exoplayer2.upstream.BandwidthMeter;
import org.mozilla.thirdparty.com.google.android.exoplayer2.upstream.DataSource;
import org.mozilla.thirdparty.com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Clock;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import org.mozilla.thirdparty.com.google.android.exoplayer2.video.MediaCodecVideoRenderer;
/**
* An extensible media player exposing traditional high-level media player functionality, such as
* the ability to buffer media, play, pause and seek. Instances can be obtained from
* {@link ExoPlayerFactory}.
* An extensible media player that plays {@link MediaSource}s. Instances can be obtained from {@link
* SimpleExoPlayer.Builder} or {@link ExoPlayer.Builder}.
*
* <h3>Player components</h3>
*
* <h3>Player composition</h3>
* <p>ExoPlayer is designed to make few assumptions about (and hence impose few restrictions on) the
* type of the media being played, how and where it is stored, and how it is rendered. Rather than
* implementing the loading and rendering of media directly, ExoPlayer implementations delegate this
* work to components that are injected when a player is created or when it's prepared for playback.
* Components common to all ExoPlayer implementations are:
*
* <ul>
* <li>A <b>{@link MediaSource}</b> that defines the media to be played, loads the media, and from
* which the loaded media can be read. A MediaSource is injected via {@link #prepare} at the start
* of playback. The library modules provide default implementations for regular media files
* ({@link ExtractorMediaSource}), DASH (DashMediaSource), SmoothStreaming (SsMediaSource) and HLS
* (HlsMediaSource), implementations for merging ({@link MergingMediaSource}) and concatenating
* ({@link ConcatenatingMediaSource}) other MediaSources, and an implementation for loading single
* samples ({@link SingleSampleMediaSource}) most often used for side-loaded subtitle and closed
* caption files.</li>
* which the loaded media can be read. A MediaSource is injected via {@link
* #prepare(MediaSource)} at the start of playback. The library modules provide default
* implementations for progressive media files ({@link ProgressiveMediaSource}), DASH
* (DashMediaSource), SmoothStreaming (SsMediaSource) and HLS (HlsMediaSource), an
* implementation for loading single media samples ({@link SingleSampleMediaSource}) that's
* most often used for side-loaded subtitle files, and implementations for building more
* complex MediaSources from simpler ones ({@link MergingMediaSource}, {@link
* ConcatenatingMediaSource}, {@link LoopingMediaSource} and {@link ClippingMediaSource}).
* <li><b>{@link Renderer}</b>s that render individual components of the media. The library
* provides default implementations for common media types ({@link MediaCodecVideoRenderer},
* {@link MediaCodecAudioRenderer}, {@link TextRenderer} and {@link MetadataRenderer}). A Renderer
* consumes media of its corresponding type from the MediaSource being played. Renderers are
* injected when the player is created.</li>
* provides default implementations for common media types ({@link MediaCodecVideoRenderer},
* {@link MediaCodecAudioRenderer}, {@link TextRenderer} and {@link MetadataRenderer}). A
* Renderer consumes media from the MediaSource being played. Renderers are injected when the
* player is created.
* <li>A <b>{@link TrackSelector}</b> that selects tracks provided by the MediaSource to be
* consumed by each of the available Renderers. The library provides a default implementation
* ({@link DefaultTrackSelector}) suitable for most use cases. A TrackSelector is injected when
* the player is created.</li>
* consumed by each of the available Renderers. The library provides a default implementation
* ({@link DefaultTrackSelector}) suitable for most use cases. A TrackSelector is injected
* when the player is created.
* <li>A <b>{@link LoadControl}</b> that controls when the MediaSource buffers more media, and how
* much media is buffered. The library provides a default implementation
* ({@link DefaultLoadControl}) suitable for most use cases. A LoadControl is injected when the
* player is created.</li>
* much media is buffered. The library provides a default implementation ({@link
* DefaultLoadControl}) suitable for most use cases. A LoadControl is injected when the player
* is created.
* </ul>
*
* <p>An ExoPlayer can be built using the default components provided by the library, but may also
* be built using custom implementations if non-standard behaviors are required. For example a
* custom LoadControl could be injected to change the player's buffering strategy, or a custom
* Renderer could be injected to use a video codec not supported natively by Android.
* Renderer could be injected to add support for a video codec not supported natively by Android.
*
* <p>The concept of injecting components that implement pieces of player functionality is present
* throughout the library. The default component implementations listed above delegate work to
* further injected components. This allows many sub-components to be individually replaced with
* custom implementations. For example the default MediaSource implementations require one or more
* {@link DataSource} factories to be injected via their constructors. By providing a custom factory
* it's possible to load data from a non-standard source or through a different network stack.
* it's possible to load data from a non-standard source, or through a different network stack.
*
* <h3>Threading model</h3>
* <p>The figure below shows ExoPlayer's threading model.</p>
* <p align="center">
* <img src="doc-files/exoplayer-threading-model.svg" alt="ExoPlayer's threading model">
* </p>
*
* <p>The figure below shows ExoPlayer's threading model.
*
* <p style="align:center"><img src="doc-files/exoplayer-threading-model.svg" alt="ExoPlayer's
* threading model">
*
* <ul>
* <li>It is recommended that ExoPlayer instances are created and accessed from a single application
* thread. The application's main thread is ideal. Accessing an instance from multiple threads is
* discouraged, however if an application does wish to do this then it may do so provided that it
* ensures accesses are synchronized.</li>
* <li>Registered listeners are called on the thread that created the ExoPlayer instance.</li>
* <li>An internal playback thread is responsible for playback. Injected player components such as
* Renderers, MediaSources, TrackSelectors and LoadControls are called by the player on this
* thread.</li>
* <li>When the application performs an operation on the player, for example a seek, a message is
* delivered to the internal playback thread via a message queue. The internal playback thread
* consumes messages from the queue and performs the corresponding operations. Similarly, when a
* playback event occurs on the internal playback thread, a message is delivered to the application
* thread via a second message queue. The application thread consumes messages from the queue,
* updating the application visible state and calling corresponding listener methods.</li>
* <li>Injected player components may use additional background threads. For example a MediaSource
* may use a background thread to load data. These are implementation specific.</li>
* <li>ExoPlayer instances must be accessed from a single application thread. For the vast
* majority of cases this should be the application's main thread. Using the application's
* main thread is also a requirement when using ExoPlayer's UI components or the IMA
* extension. The thread on which an ExoPlayer instance must be accessed can be explicitly
* specified by passing a `Looper` when creating the player. If no `Looper` is specified, then
* the `Looper` of the thread that the player is created on is used, or if that thread does
* not have a `Looper`, the `Looper` of the application's main thread is used. In all cases
* the `Looper` of the thread from which the player must be accessed can be queried using
* {@link #getApplicationLooper()}.
* <li>Registered listeners are called on the thread associated with {@link
* #getApplicationLooper()}. Note that this means registered listeners are called on the same
* thread which must be used to access the player.
* <li>An internal playback thread is responsible for playback. Injected player components such as
* Renderers, MediaSources, TrackSelectors and LoadControls are called by the player on this
* thread.
* <li>When the application performs an operation on the player, for example a seek, a message is
* delivered to the internal playback thread via a message queue. The internal playback thread
* consumes messages from the queue and performs the corresponding operations. Similarly, when
* a playback event occurs on the internal playback thread, a message is delivered to the
* application thread via a second message queue. The application thread consumes messages
* from the queue, updating the application visible state and calling corresponding listener
* methods.
* <li>Injected player components may use additional background threads. For example a MediaSource
* may use background threads to load data. These are implementation specific.
* </ul>
*/
public interface ExoPlayer {
public interface ExoPlayer extends Player {
/**
* Listener of changes in player state.
* A builder for {@link ExoPlayer} instances.
*
* <p>See {@link #Builder(Context, Renderer...)} for the list of default values.
*/
interface EventListener {
final class Builder {
private final Renderer[] renderers;
private Clock clock;
private TrackSelector trackSelector;
private LoadControl loadControl;
private BandwidthMeter bandwidthMeter;
private Looper looper;
private AnalyticsCollector analyticsCollector;
private boolean useLazyPreparation;
private boolean buildCalled;
/**
* Called when the timeline and/or manifest has been refreshed.
* <p>
* Note that if the timeline has changed then a position discontinuity may also have occurred.
* For example the current period index may have changed as a result of periods being added or
* removed from the timeline. The will <em>not</em> be reported via a separate call to
* {@link #onPositionDiscontinuity()}.
* Creates a builder with a list of {@link Renderer Renderers}.
*
* @param timeline The latest timeline. Never null, but may be empty.
* @param manifest The latest manifest. May be null.
*/
void onTimelineChanged(Timeline timeline, Object manifest);
/**
* Called when the available or selected tracks change.
* <p>The builder uses the following default values:
*
* @param trackGroups The available tracks. Never null, but may be of length zero.
* @param trackSelections The track selections for each {@link Renderer}. Never null and always
* of length {@link #getRendererCount()}, but may contain null elements.
*/
void onTracksChanged(TrackGroupArray trackGroups, TrackSelectionArray trackSelections);
/**
* Called when the player starts or stops loading the source.
* <ul>
* <li>{@link TrackSelector}: {@link DefaultTrackSelector}
* <li>{@link LoadControl}: {@link DefaultLoadControl}
* <li>{@link BandwidthMeter}: {@link DefaultBandwidthMeter#getSingletonInstance(Context)}
* <li>{@link Looper}: The {@link Looper} associated with the current thread, or the {@link
* Looper} of the application's main thread if the current thread doesn't have a {@link
* Looper}
* <li>{@link AnalyticsCollector}: {@link AnalyticsCollector} with {@link Clock#DEFAULT}
* <li>{@code useLazyPreparation}: {@code true}
* <li>{@link Clock}: {@link Clock#DEFAULT}
* </ul>
*
* @param isLoading Whether the source is currently being loaded.
* @param context A {@link Context}.
* @param renderers The {@link Renderer Renderers} to be used by the player.
*/
void onLoadingChanged(boolean isLoading);
/**
* Called when the value returned from either {@link #getPlayWhenReady()} or
* {@link #getPlaybackState()} changes.
*
* @param playWhenReady Whether playback will proceed when ready.
* @param playbackState One of the {@code STATE} constants defined in the {@link ExoPlayer}
* interface.
*/
void onPlayerStateChanged(boolean playWhenReady, int playbackState);
/**
* Called when an error occurs. The playback state will transition to {@link #STATE_IDLE}
* immediately after this method is called. The player instance can still be used, and
* {@link #release()} must still be called on the player should it no longer be required.
*
* @param error The error.
*/
void onPlayerError(ExoPlaybackException error);
/**
* Called when a position discontinuity occurs without a change to the timeline. A position
* discontinuity occurs when the current window or period index changes (as a result of playback
* transitioning from one period in the timeline to the next), or when the playback position
* jumps within the period currently being played (as a result of a seek being performed, or
* when the source introduces a discontinuity internally).
* <p>
* When a position discontinuity occurs as a result of a change to the timeline this method is
* <em>not</em> called. {@link #onTimelineChanged(Timeline, Object)} is called in this case.
*/
void onPositionDiscontinuity();
/**
* Called when the current playback parameters change. The playback parameters may change due to
* a call to {@link ExoPlayer#setPlaybackParameters(PlaybackParameters)}, or the player itself
* may change them (for example, if audio playback switches to passthrough mode, where speed
* adjustment is no longer possible).
*
* @param playbackParameters The playback parameters.
*/
void onPlaybackParametersChanged(PlaybackParameters playbackParameters);
}
/**
* A component of an {@link ExoPlayer} that can receive messages on the playback thread.
* <p>
* Messages can be delivered to a component via {@link #sendMessages} and
* {@link #blockingSendMessages}.
*/
interface ExoPlayerComponent {
/**
* Handles a message delivered to the component. Called on the playback thread.
*
* @param messageType The message type.
* @param message The message.
* @throws ExoPlaybackException If an error occurred whilst handling the message.
*/
void handleMessage(int messageType, Object message) throws ExoPlaybackException;
}
/**
* Defines a message and a target {@link ExoPlayerComponent} to receive it.
*/
final class ExoPlayerMessage {
/**
* The target to receive the message.
*/
public final ExoPlayerComponent target;
/**
* The type of the message.
*/
public final int messageType;
/**
* The message.
*/
public final Object message;
/**
* @param target The target of the message.
* @param messageType The message type.
* @param message The message.
*/
public ExoPlayerMessage(ExoPlayerComponent target, int messageType, Object message) {
this.target = target;
this.messageType = messageType;
this.message = message;
public Builder(Context context, Renderer... renderers) {
this(
renderers,
new DefaultTrackSelector(context),
new DefaultLoadControl(),
DefaultBandwidthMeter.getSingletonInstance(context),
Util.getLooper(),
new AnalyticsCollector(Clock.DEFAULT),
/* useLazyPreparation= */ true,
Clock.DEFAULT);
}
/**
* Creates a builder with the specified custom components.
*
* <p>Note that this constructor is only useful if you try to ensure that ExoPlayer's default
* components can be removed by ProGuard or R8. For most components except renderers, there is
* only a marginal benefit of doing that.
*
* @param renderers The {@link Renderer Renderers} to be used by the player.
* @param trackSelector A {@link TrackSelector}.
* @param loadControl A {@link LoadControl}.
* @param bandwidthMeter A {@link BandwidthMeter}.
* @param looper A {@link Looper} that must be used for all calls to the player.
* @param analyticsCollector An {@link AnalyticsCollector}.
* @param useLazyPreparation Whether media sources should be initialized lazily.
* @param clock A {@link Clock}. Should always be {@link Clock#DEFAULT}.
*/
public Builder(
Renderer[] renderers,
TrackSelector trackSelector,
LoadControl loadControl,
BandwidthMeter bandwidthMeter,
Looper looper,
AnalyticsCollector analyticsCollector,
boolean useLazyPreparation,
Clock clock) {
Assertions.checkArgument(renderers.length > 0);
this.renderers = renderers;
this.trackSelector = trackSelector;
this.loadControl = loadControl;
this.bandwidthMeter = bandwidthMeter;
this.looper = looper;
this.analyticsCollector = analyticsCollector;
this.useLazyPreparation = useLazyPreparation;
this.clock = clock;
}
/**
* Sets the {@link TrackSelector} that will be used by the player.
*
* @param trackSelector A {@link TrackSelector}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setTrackSelector(TrackSelector trackSelector) {
Assertions.checkState(!buildCalled);
this.trackSelector = trackSelector;
return this;
}
/**
* Sets the {@link LoadControl} that will be used by the player.
*
* @param loadControl A {@link LoadControl}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setLoadControl(LoadControl loadControl) {
Assertions.checkState(!buildCalled);
this.loadControl = loadControl;
return this;
}
/**
* Sets the {@link BandwidthMeter} that will be used by the player.
*
* @param bandwidthMeter A {@link BandwidthMeter}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setBandwidthMeter(BandwidthMeter bandwidthMeter) {
Assertions.checkState(!buildCalled);
this.bandwidthMeter = bandwidthMeter;
return this;
}
/**
* Sets the {@link Looper} that must be used for all calls to the player and that is used to
* call listeners on.
*
* @param looper A {@link Looper}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setLooper(Looper looper) {
Assertions.checkState(!buildCalled);
this.looper = looper;
return this;
}
/**
* Sets the {@link AnalyticsCollector} that will collect and forward all player events.
*
* @param analyticsCollector An {@link AnalyticsCollector}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setAnalyticsCollector(AnalyticsCollector analyticsCollector) {
Assertions.checkState(!buildCalled);
this.analyticsCollector = analyticsCollector;
return this;
}
/**
* Sets whether media sources should be initialized lazily.
*
* <p>If false, all initial preparation steps (e.g., manifest loads) happen immediately. If
* true, these initial preparations are triggered only when the player starts buffering the
* media.
*
* @param useLazyPreparation Whether to use lazy preparation.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setUseLazyPreparation(boolean useLazyPreparation) {
Assertions.checkState(!buildCalled);
this.useLazyPreparation = useLazyPreparation;
return this;
}
/**
* Sets the {@link Clock} that will be used by the player. Should only be set for testing
* purposes.
*
* @param clock A {@link Clock}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
@VisibleForTesting
public Builder setClock(Clock clock) {
Assertions.checkState(!buildCalled);
this.clock = clock;
return this;
}
/**
* Builds an {@link ExoPlayer} instance.
*
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public ExoPlayer build() {
Assertions.checkState(!buildCalled);
buildCalled = true;
return new ExoPlayerImpl(
renderers, trackSelector, loadControl, bandwidthMeter, clock, looper);
}
}
/**
* The player does not have a source to play, so it is neither buffering nor ready to play.
*/
int STATE_IDLE = 1;
/**
* The player not able to immediately play from the current position. The cause is
* {@link Renderer} specific, but this state typically occurs when more data needs to be
* loaded to be ready to play, or more data needs to be buffered for playback to resume.
*/
int STATE_BUFFERING = 2;
/**
* The player is able to immediately play from the current position. The player will be playing if
* {@link #getPlayWhenReady()} returns true, and paused otherwise.
*/
int STATE_READY = 3;
/**
* The player has finished playing the media.
*/
int STATE_ENDED = 4;
/** Returns the {@link Looper} associated with the playback thread. */
Looper getPlaybackLooper();
/**
* Register a listener to receive events from the player. The listener's methods will be called on
* the thread that was used to construct the player.
*
* @param listener The listener to register.
* Retries a failed or stopped playback. Does nothing if the player has been reset, or if playback
* has not failed or been stopped.
*/
void addListener(EventListener listener);
void retry();
/**
* Unregister a listener. The listener will no longer receive events from the player.
*
* @param listener The listener to unregister.
*/
void removeListener(EventListener listener);
/**
* Returns the current state of the player.
*
* @return One of the {@code STATE} constants defined in this interface.
*/
int getPlaybackState();
/**
* Prepares the player to play the provided {@link MediaSource}. Equivalent to
* {@code prepare(mediaSource, true, true)}.
* Prepares the player to play the provided {@link MediaSource}. Equivalent to {@code
* prepare(mediaSource, true, true)}.
*/
void prepare(MediaSource mediaSource);
@ -294,202 +352,53 @@ public interface ExoPlayer {
void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState);
/**
* Sets whether playback should proceed when {@link #getPlaybackState()} == {@link #STATE_READY}.
* <p>
* If the player is already in the ready state then this method can be used to pause and resume
* playback.
* Creates a message that can be sent to a {@link PlayerMessage.Target}. By default, the message
* will be delivered immediately without blocking on the playback thread. The default {@link
* PlayerMessage#getType()} is 0 and the default {@link PlayerMessage#getPayload()} is null. If a
* position is specified with {@link PlayerMessage#setPosition(long)}, the message will be
* delivered at this position in the current window defined by {@link #getCurrentWindowIndex()}.
* Alternatively, the message can be sent at a specific window using {@link
* PlayerMessage#setPosition(int, long)}.
*/
PlayerMessage createMessage(PlayerMessage.Target target);
/**
* Sets the parameters that control how seek operations are performed.
*
* @param playWhenReady Whether playback should proceed when ready.
* @param seekParameters The seek parameters, or {@code null} to use the defaults.
*/
void setPlayWhenReady(boolean playWhenReady);
void setSeekParameters(@Nullable SeekParameters seekParameters);
/** Returns the currently active {@link SeekParameters} of the player. */
SeekParameters getSeekParameters();
/**
* Whether playback will proceed when {@link #getPlaybackState()} == {@link #STATE_READY}.
* Sets whether the player is allowed to keep holding limited resources such as video decoders,
* even when in the idle state. By doing so, the player may be able to reduce latency when
* starting to play another piece of content for which the same resources are required.
*
* @return Whether playback will proceed when ready.
*/
boolean getPlayWhenReady();
/**
* Whether the player is currently loading the source.
* <p>This mode should be used with caution, since holding limited resources may prevent other
* players of media components from acquiring them. It should only be enabled when <em>both</em>
* of the following conditions are true:
*
* @return Whether the player is currently loading the source.
*/
boolean isLoading();
/**
* Seeks to the default position associated with the current window. The position can depend on
* the type of source passed to {@link #prepare(MediaSource)}. For live streams it will typically
* be the live edge of the window. For other streams it will typically be the start of the window.
*/
void seekToDefaultPosition();
/**
* Seeks to the default position associated with the specified window. The position can depend on
* the type of source passed to {@link #prepare(MediaSource)}. For live streams it will typically
* be the live edge of the window. For other streams it will typically be the start of the window.
* <ul>
* <li>The application that owns the player is in the foreground.
* <li>The player is used in a way that may benefit from foreground mode. For this to be true,
* the same player instance must be used to play multiple pieces of content, and there must
* be gaps between the playbacks (i.e. {@link #stop} is called to halt one playback, and
* {@link #prepare} is called some time later to start a new one).
* </ul>
*
* @param windowIndex The index of the window whose associated default position should be seeked
* to.
*/
void seekToDefaultPosition(int windowIndex);
/**
* Seeks to a position specified in milliseconds in the current window.
* <p>Note that foreground mode is <em>not</em> useful for switching between content without gaps
* between the playbacks. For this use case {@link #stop} does not need to be called, and simply
* calling {@link #prepare} for the new media will cause limited resources to be retained even if
* foreground mode is not enabled.
*
* @param positionMs The seek position in the current window, or {@link C#TIME_UNSET} to seek to
* the window's default position.
*/
void seekTo(long positionMs);
/**
* Seeks to a position specified in milliseconds in the specified window.
* <p>If foreground mode is enabled, it's the application's responsibility to disable it when the
* conditions described above no longer hold.
*
* @param windowIndex The index of the window.
* @param positionMs The seek position in the specified window, or {@link C#TIME_UNSET} to seek to
* the window's default position.
* @param foregroundMode Whether the player is allowed to keep limited resources even when in the
* idle state.
*/
void seekTo(int windowIndex, long positionMs);
/**
* Attempts to set the playback parameters. Passing {@code null} sets the parameters to the
* default, {@link PlaybackParameters#DEFAULT}, which means there is no speed or pitch adjustment.
* <p>
* Playback parameters changes may cause the player to buffer.
* {@link EventListener#onPlaybackParametersChanged(PlaybackParameters)} will be called whenever
* the currently active playback parameters change. When that listener is called, the parameters
* passed to it may not match {@code playbackParameters}. For example, the chosen speed or pitch
* may be out of range, in which case they are constrained to a set of permitted values. If it is
* not possible to change the playback parameters, the listener will not be invoked.
*
* @param playbackParameters The playback parameters, or {@code null} to use the defaults.
*/
void setPlaybackParameters(@Nullable PlaybackParameters playbackParameters);
/**
* Returns the currently active playback parameters.
*
* @see EventListener#onPlaybackParametersChanged(PlaybackParameters)
*/
PlaybackParameters getPlaybackParameters();
/**
* Stops playback. Use {@code setPlayWhenReady(false)} rather than this method if the intention
* is to pause playback.
* <p>
* Calling this method will cause the playback state to transition to {@link #STATE_IDLE}. The
* player instance can still be used, and {@link #release()} must still be called on the player if
* it's no longer required.
* <p>
* Calling this method does not reset the playback position.
*/
void stop();
/**
* Releases the player. This method must be called when the player is no longer required. The
* player must not be used after calling this method.
*/
void release();
/**
* Sends messages to their target components. The messages are delivered on the playback thread.
* If a component throws an {@link ExoPlaybackException} then it is propagated out of the player
* as an error.
*
* @param messages The messages to be sent.
*/
void sendMessages(ExoPlayerMessage... messages);
/**
* Variant of {@link #sendMessages(ExoPlayerMessage...)} that blocks until after the messages have
* been delivered.
*
* @param messages The messages to be sent.
*/
void blockingSendMessages(ExoPlayerMessage... messages);
/**
* Returns the number of renderers.
*/
int getRendererCount();
/**
* Returns the track type that the renderer at a given index handles.
*
* @see Renderer#getTrackType()
* @param index The index of the renderer.
* @return One of the {@code TRACK_TYPE_*} constants defined in {@link C}.
*/
int getRendererType(int index);
/**
* Returns the available track groups.
*/
TrackGroupArray getCurrentTrackGroups();
/**
* Returns the current track selections for each renderer.
*/
TrackSelectionArray getCurrentTrackSelections();
/**
* Returns the current manifest. The type depends on the {@link MediaSource} passed to
* {@link #prepare}. May be null.
*/
Object getCurrentManifest();
/**
* Returns the current {@link Timeline}. Never null, but may be empty.
*/
Timeline getCurrentTimeline();
/**
* Returns the index of the period currently being played.
*/
int getCurrentPeriodIndex();
/**
* Returns the index of the window currently being played.
*/
int getCurrentWindowIndex();
/**
* Returns the duration of the current window in milliseconds, or {@link C#TIME_UNSET} if the
* duration is not known.
*/
long getDuration();
/**
* Returns the playback position in the current window, in milliseconds.
*/
long getCurrentPosition();
/**
* Returns an estimate of the position in the current window up to which data is buffered, in
* milliseconds.
*/
long getBufferedPosition();
/**
* Returns an estimate of the percentage in the current window up to which data is buffered, or 0
* if no estimate is available.
*/
int getBufferedPercentage();
/**
* Returns whether the current window is dynamic, or {@code false} if the {@link Timeline} is
* empty.
*
* @see Timeline.Window#isDynamic
*/
boolean isCurrentWindowDynamic();
/**
* Returns whether the current window is seekable, or {@code false} if the {@link Timeline} is
* empty.
*
* @see Timeline.Window#isSeekable
*/
boolean isCurrentWindowSeekable();
void setForegroundMode(boolean foregroundMode);
}

Просмотреть файл

@ -17,158 +17,334 @@ package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.content.Context;
import android.os.Looper;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.analytics.AnalyticsCollector;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmSessionManager;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelector;
import org.mozilla.thirdparty.com.google.android.exoplayer2.upstream.BandwidthMeter;
import org.mozilla.thirdparty.com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Clock;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
/**
* A factory for {@link ExoPlayer} instances.
*/
/** @deprecated Use {@link SimpleExoPlayer.Builder} or {@link ExoPlayer.Builder} instead. */
@Deprecated
public final class ExoPlayerFactory {
private ExoPlayerFactory() {}
/**
* Creates a {@link SimpleExoPlayer} instance. Must be called from a thread that has an associated
* {@link Looper}.
*
* @param context A {@link Context}.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @deprecated Use {@link #newSimpleInstance(RenderersFactory, TrackSelector, LoadControl)}.
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector,
LoadControl loadControl) {
RenderersFactory renderersFactory = new DefaultRenderersFactory(context);
return newSimpleInstance(renderersFactory, trackSelector, loadControl);
}
/**
* Creates a {@link SimpleExoPlayer} instance. Must be called from a thread that has an associated
* {@link Looper}. Available extension renderers are not used.
*
* @param context A {@link Context}.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
* @deprecated Use {@link #newSimpleInstance(RenderersFactory, TrackSelector, LoadControl)}.
*/
@Deprecated
public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector,
LoadControl loadControl, DrmSessionManager<FrameworkMediaCrypto> drmSessionManager) {
RenderersFactory renderersFactory = new DefaultRenderersFactory(context, drmSessionManager);
return newSimpleInstance(renderersFactory, trackSelector, loadControl);
}
/**
* Creates a {@link SimpleExoPlayer} instance. Must be called from a thread that has an associated
* {@link Looper}.
*
* @param context A {@link Context}.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
* @param extensionRendererMode The extension renderer mode, which determines if and how available
* extension renderers are used. Note that extensions must be included in the application
* build for them to be considered available.
* @deprecated Use {@link #newSimpleInstance(RenderersFactory, TrackSelector, LoadControl)}.
*/
@Deprecated
public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector,
LoadControl loadControl, DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@DefaultRenderersFactory.ExtensionRendererMode int extensionRendererMode) {
RenderersFactory renderersFactory = new DefaultRenderersFactory(context, drmSessionManager,
extensionRendererMode);
return newSimpleInstance(renderersFactory, trackSelector, loadControl);
RenderersFactory renderersFactory =
new DefaultRenderersFactory(context).setExtensionRendererMode(extensionRendererMode);
return newSimpleInstance(
context, renderersFactory, trackSelector, loadControl, drmSessionManager);
}
/**
* Creates a {@link SimpleExoPlayer} instance. Must be called from a thread that has an associated
* {@link Looper}.
*
* @param context A {@link Context}.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
* @param extensionRendererMode The extension renderer mode, which determines if and how available
* extension renderers are used. Note that extensions must be included in the application
* build for them to be considered available.
* @param allowedVideoJoiningTimeMs The maximum duration for which a video renderer can attempt to
* seamlessly join an ongoing playback.
* @deprecated Use {@link #newSimpleInstance(RenderersFactory, TrackSelector, LoadControl)}.
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector,
LoadControl loadControl, DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@DefaultRenderersFactory.ExtensionRendererMode int extensionRendererMode,
long allowedVideoJoiningTimeMs) {
RenderersFactory renderersFactory = new DefaultRenderersFactory(context, drmSessionManager,
extensionRendererMode, allowedVideoJoiningTimeMs);
return newSimpleInstance(renderersFactory, trackSelector, loadControl);
RenderersFactory renderersFactory =
new DefaultRenderersFactory(context)
.setExtensionRendererMode(extensionRendererMode)
.setAllowedVideoJoiningTimeMs(allowedVideoJoiningTimeMs);
return newSimpleInstance(
context, renderersFactory, trackSelector, loadControl, drmSessionManager);
}
/**
* Creates a {@link SimpleExoPlayer} instance. Must be called from a thread that has an associated
* {@link Looper}.
*
* @param context A {@link Context}.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
*/
/** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(Context context) {
return newSimpleInstance(context, new DefaultTrackSelector(context));
}
/** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector) {
return newSimpleInstance(new DefaultRenderersFactory(context), trackSelector);
return newSimpleInstance(context, new DefaultRenderersFactory(context), trackSelector);
}
/** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context, RenderersFactory renderersFactory, TrackSelector trackSelector) {
return newSimpleInstance(context, renderersFactory, trackSelector, new DefaultLoadControl());
}
/** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context, TrackSelector trackSelector, LoadControl loadControl) {
RenderersFactory renderersFactory = new DefaultRenderersFactory(context);
return newSimpleInstance(context, renderersFactory, trackSelector, loadControl);
}
/**
* Creates a {@link SimpleExoPlayer} instance. Must be called from a thread that has an associated
* {@link Looper}.
*
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
public static SimpleExoPlayer newSimpleInstance(RenderersFactory renderersFactory,
TrackSelector trackSelector) {
return newSimpleInstance(renderersFactory, trackSelector, new DefaultLoadControl());
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager) {
RenderersFactory renderersFactory = new DefaultRenderersFactory(context);
return newSimpleInstance(
context, renderersFactory, trackSelector, loadControl, drmSessionManager);
}
/**
* Creates a {@link SimpleExoPlayer} instance. Must be called from a thread that has an associated
* {@link Looper}.
*
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
public static SimpleExoPlayer newSimpleInstance(RenderersFactory renderersFactory,
TrackSelector trackSelector, LoadControl loadControl) {
return new SimpleExoPlayer(renderersFactory, trackSelector, loadControl);
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager) {
return newSimpleInstance(
context, renderersFactory, trackSelector, new DefaultLoadControl(), drmSessionManager);
}
/**
* Creates an {@link ExoPlayer} instance. Must be called from a thread that has an associated
* {@link Looper}.
*
* @param renderers The {@link Renderer}s that will be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
*/
public static ExoPlayer newInstance(Renderer[] renderers, TrackSelector trackSelector) {
return newInstance(renderers, trackSelector, new DefaultLoadControl());
}
/**
* Creates an {@link ExoPlayer} instance. Must be called from a thread that has an associated
* {@link Looper}.
*
* @param renderers The {@link Renderer}s that will be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
*/
public static ExoPlayer newInstance(Renderer[] renderers, TrackSelector trackSelector,
/** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl) {
return new ExoPlayerImpl(renderers, trackSelector, loadControl);
return newSimpleInstance(
context,
renderersFactory,
trackSelector,
loadControl,
/* drmSessionManager= */ null,
Util.getLooper());
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager) {
return newSimpleInstance(
context, renderersFactory, trackSelector, loadControl, drmSessionManager, Util.getLooper());
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
BandwidthMeter bandwidthMeter) {
return newSimpleInstance(
context,
renderersFactory,
trackSelector,
loadControl,
drmSessionManager,
bandwidthMeter,
new AnalyticsCollector(Clock.DEFAULT),
Util.getLooper());
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
AnalyticsCollector analyticsCollector) {
return newSimpleInstance(
context,
renderersFactory,
trackSelector,
loadControl,
drmSessionManager,
analyticsCollector,
Util.getLooper());
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
Looper looper) {
return newSimpleInstance(
context,
renderersFactory,
trackSelector,
loadControl,
drmSessionManager,
new AnalyticsCollector(Clock.DEFAULT),
looper);
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
AnalyticsCollector analyticsCollector,
Looper looper) {
return newSimpleInstance(
context,
renderersFactory,
trackSelector,
loadControl,
drmSessionManager,
DefaultBandwidthMeter.getSingletonInstance(context),
analyticsCollector,
looper);
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@SuppressWarnings("deprecation")
@Deprecated
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
BandwidthMeter bandwidthMeter,
AnalyticsCollector analyticsCollector,
Looper looper) {
return new SimpleExoPlayer(
context,
renderersFactory,
trackSelector,
loadControl,
drmSessionManager,
bandwidthMeter,
analyticsCollector,
Clock.DEFAULT,
looper);
}
/** @deprecated Use {@link ExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static ExoPlayer newInstance(
Context context, Renderer[] renderers, TrackSelector trackSelector) {
return newInstance(context, renderers, trackSelector, new DefaultLoadControl());
}
/** @deprecated Use {@link ExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static ExoPlayer newInstance(
Context context, Renderer[] renderers, TrackSelector trackSelector, LoadControl loadControl) {
return newInstance(context, renderers, trackSelector, loadControl, Util.getLooper());
}
/** @deprecated Use {@link ExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static ExoPlayer newInstance(
Context context,
Renderer[] renderers,
TrackSelector trackSelector,
LoadControl loadControl,
Looper looper) {
return newInstance(
context,
renderers,
trackSelector,
loadControl,
DefaultBandwidthMeter.getSingletonInstance(context),
looper);
}
/** @deprecated Use {@link ExoPlayer.Builder} instead. */
@Deprecated
public static ExoPlayer newInstance(
Context context,
Renderer[] renderers,
TrackSelector trackSelector,
LoadControl loadControl,
BandwidthMeter bandwidthMeter,
Looper looper) {
return new ExoPlayerImpl(
renderers, trackSelector, loadControl, bandwidthMeter, Clock.DEFAULT, looper);
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -15,43 +15,72 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import java.util.HashSet;
/**
* Information about the ExoPlayer library.
*/
public interface ExoPlayerLibraryInfo {
public final class ExoPlayerLibraryInfo {
/**
* The version of the library expressed as a string, for example "1.2.3".
* A tag to use when logging library information.
*/
public static final String TAG = "ExoPlayer";
/** The version of the library expressed as a string, for example "1.2.3". */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION_INT) or vice versa.
String VERSION = "2.4.0";
public static final String VERSION = "2.11.4";
/**
* The version of the library expressed as {@code "ExoPlayerLib/" + VERSION}.
*/
/** The version of the library expressed as {@code "ExoPlayerLib/" + VERSION}. */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
String VERSION_SLASHY = "ExoPlayerLib/2.4.0";
public static final String VERSION_SLASHY = "ExoPlayerLib/2.11.4";
/**
* The version of the library expressed as an integer, for example 1002003.
* <p>
* Three digits are used for each component of {@link #VERSION}. For example "1.2.3" has the
*
* <p>Three digits are used for each component of {@link #VERSION}. For example "1.2.3" has the
* corresponding integer version 1002003 (001-002-003), and "123.45.6" has the corresponding
* integer version 123045006 (123-045-006).
*/
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
int VERSION_INT = 2004000;
public static final int VERSION_INT = 2011004;
/**
* Whether the library was compiled with {@link org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions}
* checks enabled.
*/
boolean ASSERTIONS_ENABLED = true;
public static final boolean ASSERTIONS_ENABLED = true;
/** Whether an exception should be thrown in case of an OpenGl error. */
public static final boolean GL_ASSERTIONS_ENABLED = false;
/**
* Whether the library was compiled with {@link org.mozilla.thirdparty.com.google.android.exoplayer2.util.TraceUtil}
* trace enabled.
*/
boolean TRACE_ENABLED = true;
public static final boolean TRACE_ENABLED = true;
private static final HashSet<String> registeredModules = new HashSet<>();
private static String registeredModulesString = "goog.exo.core";
private ExoPlayerLibraryInfo() {} // Prevents instantiation.
/**
* Returns a string consisting of registered module names separated by ", ".
*/
public static synchronized String registeredModules() {
return registeredModulesString;
}
/**
* Registers a module to be returned in the {@link #registeredModules()} string.
*
* @param name The name of the module being registered.
*/
public static synchronized void registerModule(String name) {
if (registeredModules.add(name)) {
registeredModulesString = registeredModulesString + ", " + name;
}
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -15,14 +15,29 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmSession;
/**
* Holds a {@link Format}.
*/
public final class FormatHolder {
/**
* The held {@link Format}.
*/
public Format format;
/** Whether the {@link #format} setter also sets the {@link #drmSession} field. */
// TODO: Remove once all Renderers and MediaSources have migrated to the new DRM model [Internal
// ref: b/129764794].
public boolean includesDrmSession;
/** An accompanying context for decrypting samples in the format. */
@Nullable public DrmSession<?> drmSession;
/** The held {@link Format}. */
@Nullable public Format format;
/** Clears the holder. */
public void clear() {
includesDrmSession = false;
drmSession = null;
format = null;
}
}

Просмотреть файл

@ -56,23 +56,58 @@ public interface LoadControl {
Allocator getAllocator();
/**
* Called by the player to determine whether sufficient media is buffered for playback to be
* started or resumed.
* Returns the duration of media to retain in the buffer prior to the current playback position,
* for fast backward seeking.
* <p>
* Note: If {@link #retainBackBufferFromKeyframe()} is false then seeking in the back-buffer will
* only be fast if the back-buffer contains a keyframe prior to the seek position.
* <p>
* Note: Implementations should return a single value. Dynamic changes to the back-buffer are not
* currently supported.
*
* @param bufferedDurationUs The duration of media that's currently buffered.
* @param rebuffering Whether the player is rebuffering. A rebuffer is defined to be caused by
* buffer depletion rather than a user action. Hence this parameter is false during initial
* buffering and when buffering as a result of a seek operation.
* @return Whether playback should be allowed to start or resume.
* @return The duration of media to retain in the buffer prior to the current playback position,
* in microseconds.
*/
boolean shouldStartPlayback(long bufferedDurationUs, boolean rebuffering);
long getBackBufferDurationUs();
/**
* Returns whether media should be retained from the keyframe before the current playback position
* minus {@link #getBackBufferDurationUs()}, rather than any sample before or at that position.
* <p>
* Warning: Returning true will cause the back-buffer size to depend on the spacing of keyframes
* in the media being played. Returning true is not recommended unless you control the media and
* are comfortable with the back-buffer size exceeding {@link #getBackBufferDurationUs()} by as
* much as the maximum duration between adjacent keyframes in the media.
* <p>
* Note: Implementations should return a single value. Dynamic changes to the back-buffer are not
* currently supported.
*
* @return Whether media should be retained from the keyframe before the current playback position
* minus {@link #getBackBufferDurationUs()}, rather than any sample before or at that position.
*/
boolean retainBackBufferFromKeyframe();
/**
* Called by the player to determine whether it should continue to load the source.
*
* @param bufferedDurationUs The duration of media that's currently buffered.
* @param playbackSpeed The current playback speed.
* @return Whether the loading should continue.
*/
boolean shouldContinueLoading(long bufferedDurationUs);
boolean shouldContinueLoading(long bufferedDurationUs, float playbackSpeed);
/**
* Called repeatedly by the player when it's loading the source, has yet to start playback, and
* has the minimum amount of data necessary for playback to be started. The value returned
* determines whether playback is actually started. The load control may opt to return {@code
* false} until some condition has been met (e.g. a certain amount of media is buffered).
*
* @param bufferedDurationUs The duration of media that's currently buffered.
* @param playbackSpeed The current playback speed.
* @param rebuffering Whether the player is rebuffering. A rebuffer is defined to be caused by
* buffer depletion rather than a user action. Hence this parameter is false during initial
* buffering and when buffering as a result of a seek operation.
* @return Whether playback should be allowed to start or resume.
*/
boolean shouldStartPlayback(long bufferedDurationUs, float playbackSpeed, boolean rebuffering);
}

Просмотреть файл

@ -0,0 +1,432 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.ClippingMediaPeriod;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.EmptySampleStream;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaPeriod;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.SampleStream;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.TrackGroupArray;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelection;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelector;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelectorResult;
import org.mozilla.thirdparty.com.google.android.exoplayer2.upstream.Allocator;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Log;
import org.checkerframework.checker.nullness.compatqual.NullableType;
/** Holds a {@link MediaPeriod} with information required to play it as part of a timeline. */
/* package */ final class MediaPeriodHolder {
private static final String TAG = "MediaPeriodHolder";
/** The {@link MediaPeriod} wrapped by this class. */
public final MediaPeriod mediaPeriod;
/** The unique timeline period identifier the media period belongs to. */
public final Object uid;
/**
* The sample streams for each renderer associated with this period. May contain null elements.
*/
public final @NullableType SampleStream[] sampleStreams;
/** Whether the media period has finished preparing. */
public boolean prepared;
/** Whether any of the tracks of this media period are enabled. */
public boolean hasEnabledTracks;
/** {@link MediaPeriodInfo} about this media period. */
public MediaPeriodInfo info;
private final boolean[] mayRetainStreamFlags;
private final RendererCapabilities[] rendererCapabilities;
private final TrackSelector trackSelector;
private final MediaSource mediaSource;
@Nullable private MediaPeriodHolder next;
private TrackGroupArray trackGroups;
private TrackSelectorResult trackSelectorResult;
private long rendererPositionOffsetUs;
/**
* Creates a new holder with information required to play it as part of a timeline.
*
* @param rendererCapabilities The renderer capabilities.
* @param rendererPositionOffsetUs The renderer time of the start of the period, in microseconds.
* @param trackSelector The track selector.
* @param allocator The allocator.
* @param mediaSource The media source that produced the media period.
* @param info Information used to identify this media period in its timeline period.
* @param emptyTrackSelectorResult A {@link TrackSelectorResult} with empty selections for each
* renderer.
*/
public MediaPeriodHolder(
RendererCapabilities[] rendererCapabilities,
long rendererPositionOffsetUs,
TrackSelector trackSelector,
Allocator allocator,
MediaSource mediaSource,
MediaPeriodInfo info,
TrackSelectorResult emptyTrackSelectorResult) {
this.rendererCapabilities = rendererCapabilities;
this.rendererPositionOffsetUs = rendererPositionOffsetUs;
this.trackSelector = trackSelector;
this.mediaSource = mediaSource;
this.uid = info.id.periodUid;
this.info = info;
this.trackGroups = TrackGroupArray.EMPTY;
this.trackSelectorResult = emptyTrackSelectorResult;
sampleStreams = new SampleStream[rendererCapabilities.length];
mayRetainStreamFlags = new boolean[rendererCapabilities.length];
mediaPeriod =
createMediaPeriod(
info.id, mediaSource, allocator, info.startPositionUs, info.endPositionUs);
}
/**
* Converts time relative to the start of the period to the respective renderer time using {@link
* #getRendererOffset()}, in microseconds.
*/
public long toRendererTime(long periodTimeUs) {
return periodTimeUs + getRendererOffset();
}
/**
* Converts renderer time to the respective time relative to the start of the period using {@link
* #getRendererOffset()}, in microseconds.
*/
public long toPeriodTime(long rendererTimeUs) {
return rendererTimeUs - getRendererOffset();
}
/** Returns the renderer time of the start of the period, in microseconds. */
public long getRendererOffset() {
return rendererPositionOffsetUs;
}
/**
* Sets the renderer time of the start of the period, in microseconds.
*
* @param rendererPositionOffsetUs The new renderer position offset, in microseconds.
*/
public void setRendererOffset(long rendererPositionOffsetUs) {
this.rendererPositionOffsetUs = rendererPositionOffsetUs;
}
/** Returns start position of period in renderer time. */
public long getStartPositionRendererTime() {
return info.startPositionUs + rendererPositionOffsetUs;
}
/** Returns whether the period is fully buffered. */
public boolean isFullyBuffered() {
return prepared
&& (!hasEnabledTracks || mediaPeriod.getBufferedPositionUs() == C.TIME_END_OF_SOURCE);
}
/**
* Returns the buffered position in microseconds. If the period is buffered to the end, then the
* period duration is returned.
*
* @return The buffered position in microseconds.
*/
public long getBufferedPositionUs() {
if (!prepared) {
return info.startPositionUs;
}
long bufferedPositionUs =
hasEnabledTracks ? mediaPeriod.getBufferedPositionUs() : C.TIME_END_OF_SOURCE;
return bufferedPositionUs == C.TIME_END_OF_SOURCE ? info.durationUs : bufferedPositionUs;
}
/**
* Returns the next load time relative to the start of the period, or {@link C#TIME_END_OF_SOURCE}
* if loading has finished.
*/
public long getNextLoadPositionUs() {
return !prepared ? 0 : mediaPeriod.getNextLoadPositionUs();
}
/**
* Handles period preparation.
*
* @param playbackSpeed The current playback speed.
* @param timeline The current {@link Timeline}.
* @throws ExoPlaybackException If an error occurs during track selection.
*/
public void handlePrepared(float playbackSpeed, Timeline timeline) throws ExoPlaybackException {
prepared = true;
trackGroups = mediaPeriod.getTrackGroups();
TrackSelectorResult selectorResult = selectTracks(playbackSpeed, timeline);
long newStartPositionUs =
applyTrackSelection(
selectorResult, info.startPositionUs, /* forceRecreateStreams= */ false);
rendererPositionOffsetUs += info.startPositionUs - newStartPositionUs;
info = info.copyWithStartPositionUs(newStartPositionUs);
}
/**
* Reevaluates the buffer of the media period at the given renderer position. Should only be
* called if this is the loading media period.
*
* @param rendererPositionUs The playing position in renderer time, in microseconds.
*/
public void reevaluateBuffer(long rendererPositionUs) {
Assertions.checkState(isLoadingMediaPeriod());
if (prepared) {
mediaPeriod.reevaluateBuffer(toPeriodTime(rendererPositionUs));
}
}
/**
* Continues loading the media period at the given renderer position. Should only be called if
* this is the loading media period.
*
* @param rendererPositionUs The load position in renderer time, in microseconds.
*/
public void continueLoading(long rendererPositionUs) {
Assertions.checkState(isLoadingMediaPeriod());
long loadingPeriodPositionUs = toPeriodTime(rendererPositionUs);
mediaPeriod.continueLoading(loadingPeriodPositionUs);
}
/**
* Selects tracks for the period. Must only be called if {@link #prepared} is {@code true}.
*
* <p>The new track selection needs to be applied with {@link
* #applyTrackSelection(TrackSelectorResult, long, boolean)} before taking effect.
*
* @param playbackSpeed The current playback speed.
* @param timeline The current {@link Timeline}.
* @return The {@link TrackSelectorResult}.
* @throws ExoPlaybackException If an error occurs during track selection.
*/
public TrackSelectorResult selectTracks(float playbackSpeed, Timeline timeline)
throws ExoPlaybackException {
TrackSelectorResult selectorResult =
trackSelector.selectTracks(rendererCapabilities, getTrackGroups(), info.id, timeline);
for (TrackSelection trackSelection : selectorResult.selections.getAll()) {
if (trackSelection != null) {
trackSelection.onPlaybackSpeed(playbackSpeed);
}
}
return selectorResult;
}
/**
* Applies a {@link TrackSelectorResult} to the period.
*
* @param trackSelectorResult The {@link TrackSelectorResult} to apply.
* @param positionUs The position relative to the start of the period at which to apply the new
* track selections, in microseconds.
* @param forceRecreateStreams Whether all streams are forced to be recreated.
* @return The actual position relative to the start of the period at which the new track
* selections are applied.
*/
public long applyTrackSelection(
TrackSelectorResult trackSelectorResult, long positionUs, boolean forceRecreateStreams) {
return applyTrackSelection(
trackSelectorResult,
positionUs,
forceRecreateStreams,
new boolean[rendererCapabilities.length]);
}
/**
* Applies a {@link TrackSelectorResult} to the period.
*
* @param newTrackSelectorResult The {@link TrackSelectorResult} to apply.
* @param positionUs The position relative to the start of the period at which to apply the new
* track selections, in microseconds.
* @param forceRecreateStreams Whether all streams are forced to be recreated.
* @param streamResetFlags Will be populated to indicate which streams have been reset or were
* newly created.
* @return The actual position relative to the start of the period at which the new track
* selections are applied.
*/
public long applyTrackSelection(
TrackSelectorResult newTrackSelectorResult,
long positionUs,
boolean forceRecreateStreams,
boolean[] streamResetFlags) {
for (int i = 0; i < newTrackSelectorResult.length; i++) {
mayRetainStreamFlags[i] =
!forceRecreateStreams && newTrackSelectorResult.isEquivalent(trackSelectorResult, i);
}
// Undo the effect of previous call to associate no-sample renderers with empty tracks
// so the mediaPeriod receives back whatever it sent us before.
disassociateNoSampleRenderersWithEmptySampleStream(sampleStreams);
disableTrackSelectionsInResult();
trackSelectorResult = newTrackSelectorResult;
enableTrackSelectionsInResult();
// Disable streams on the period and get new streams for updated/newly-enabled tracks.
TrackSelectionArray trackSelections = newTrackSelectorResult.selections;
positionUs =
mediaPeriod.selectTracks(
trackSelections.getAll(),
mayRetainStreamFlags,
sampleStreams,
streamResetFlags,
positionUs);
associateNoSampleRenderersWithEmptySampleStream(sampleStreams);
// Update whether we have enabled tracks and sanity check the expected streams are non-null.
hasEnabledTracks = false;
for (int i = 0; i < sampleStreams.length; i++) {
if (sampleStreams[i] != null) {
Assertions.checkState(newTrackSelectorResult.isRendererEnabled(i));
// hasEnabledTracks should be true only when non-empty streams exists.
if (rendererCapabilities[i].getTrackType() != C.TRACK_TYPE_NONE) {
hasEnabledTracks = true;
}
} else {
Assertions.checkState(trackSelections.get(i) == null);
}
}
return positionUs;
}
/** Releases the media period. No other method should be called after the release. */
public void release() {
disableTrackSelectionsInResult();
releaseMediaPeriod(info.endPositionUs, mediaSource, mediaPeriod);
}
/**
* Sets the next media period holder in the queue.
*
* @param nextMediaPeriodHolder The next holder, or null if this will be the new loading media
* period holder at the end of the queue.
*/
public void setNext(@Nullable MediaPeriodHolder nextMediaPeriodHolder) {
if (nextMediaPeriodHolder == next) {
return;
}
disableTrackSelectionsInResult();
next = nextMediaPeriodHolder;
enableTrackSelectionsInResult();
}
/**
* Returns the next media period holder in the queue, or null if this is the last media period
* (and thus the loading media period).
*/
@Nullable
public MediaPeriodHolder getNext() {
return next;
}
/** Returns the {@link TrackGroupArray} exposed by this media period. */
public TrackGroupArray getTrackGroups() {
return trackGroups;
}
/** Returns the {@link TrackSelectorResult} which is currently applied. */
public TrackSelectorResult getTrackSelectorResult() {
return trackSelectorResult;
}
private void enableTrackSelectionsInResult() {
if (!isLoadingMediaPeriod()) {
return;
}
for (int i = 0; i < trackSelectorResult.length; i++) {
boolean rendererEnabled = trackSelectorResult.isRendererEnabled(i);
TrackSelection trackSelection = trackSelectorResult.selections.get(i);
if (rendererEnabled && trackSelection != null) {
trackSelection.enable();
}
}
}
private void disableTrackSelectionsInResult() {
if (!isLoadingMediaPeriod()) {
return;
}
for (int i = 0; i < trackSelectorResult.length; i++) {
boolean rendererEnabled = trackSelectorResult.isRendererEnabled(i);
TrackSelection trackSelection = trackSelectorResult.selections.get(i);
if (rendererEnabled && trackSelection != null) {
trackSelection.disable();
}
}
}
/**
* For each renderer of type {@link C#TRACK_TYPE_NONE}, we will remove the dummy {@link
* EmptySampleStream} that was associated with it.
*/
private void disassociateNoSampleRenderersWithEmptySampleStream(
@NullableType SampleStream[] sampleStreams) {
for (int i = 0; i < rendererCapabilities.length; i++) {
if (rendererCapabilities[i].getTrackType() == C.TRACK_TYPE_NONE) {
sampleStreams[i] = null;
}
}
}
/**
* For each renderer of type {@link C#TRACK_TYPE_NONE} that was enabled, we will associate it with
* a dummy {@link EmptySampleStream}.
*/
private void associateNoSampleRenderersWithEmptySampleStream(
@NullableType SampleStream[] sampleStreams) {
for (int i = 0; i < rendererCapabilities.length; i++) {
if (rendererCapabilities[i].getTrackType() == C.TRACK_TYPE_NONE
&& trackSelectorResult.isRendererEnabled(i)) {
sampleStreams[i] = new EmptySampleStream();
}
}
}
private boolean isLoadingMediaPeriod() {
return next == null;
}
/** Returns a media period corresponding to the given {@code id}. */
private static MediaPeriod createMediaPeriod(
MediaPeriodId id,
MediaSource mediaSource,
Allocator allocator,
long startPositionUs,
long endPositionUs) {
MediaPeriod mediaPeriod = mediaSource.createPeriod(id, allocator, startPositionUs);
if (endPositionUs != C.TIME_UNSET && endPositionUs != C.TIME_END_OF_SOURCE) {
mediaPeriod =
new ClippingMediaPeriod(
mediaPeriod, /* enableInitialDiscontinuity= */ true, /* startUs= */ 0, endPositionUs);
}
return mediaPeriod;
}
/** Releases the given {@code mediaPeriod}, logging and suppressing any errors. */
private static void releaseMediaPeriod(
long endPositionUs, MediaSource mediaSource, MediaPeriod mediaPeriod) {
try {
if (endPositionUs != C.TIME_UNSET && endPositionUs != C.TIME_END_OF_SOURCE) {
mediaSource.releasePeriod(((ClippingMediaPeriod) mediaPeriod).mediaPeriod);
} else {
mediaSource.releasePeriod(mediaPeriod);
}
} catch (RuntimeException e) {
// There's nothing we can do.
Log.e(TAG, "Period release failed.", e);
}
}
}

Просмотреть файл

@ -0,0 +1,141 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaPeriod;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
/** Stores the information required to load and play a {@link MediaPeriod}. */
/* package */ final class MediaPeriodInfo {
/** The media period's identifier. */
public final MediaPeriodId id;
/** The start position of the media to play within the media period, in microseconds. */
public final long startPositionUs;
/**
* If this is an ad, the position to play in the next content media period. {@link C#TIME_UNSET}
* if this is not an ad or the next content media period should be played from its default
* position.
*/
public final long contentPositionUs;
/**
* The end position to which the media period's content is clipped in order to play a following ad
* group, in microseconds, or {@link C#TIME_UNSET} if there is no following ad group or if this
* media period is an ad. The value {@link C#TIME_END_OF_SOURCE} indicates that a postroll ad
* follows at the end of this content media period.
*/
public final long endPositionUs;
/**
* The duration of the media period, like {@link #endPositionUs} but with {@link
* C#TIME_END_OF_SOURCE} and {@link C#TIME_UNSET} resolved to the timeline period duration if
* known.
*/
public final long durationUs;
/**
* Whether this is the last media period in its timeline period (e.g., a postroll ad, or a media
* period corresponding to a timeline period without ads).
*/
public final boolean isLastInTimelinePeriod;
/**
* Whether this is the last media period in the entire timeline. If true, {@link
* #isLastInTimelinePeriod} will also be true.
*/
public final boolean isFinal;
MediaPeriodInfo(
MediaPeriodId id,
long startPositionUs,
long contentPositionUs,
long endPositionUs,
long durationUs,
boolean isLastInTimelinePeriod,
boolean isFinal) {
this.id = id;
this.startPositionUs = startPositionUs;
this.contentPositionUs = contentPositionUs;
this.endPositionUs = endPositionUs;
this.durationUs = durationUs;
this.isLastInTimelinePeriod = isLastInTimelinePeriod;
this.isFinal = isFinal;
}
/**
* Returns a copy of this instance with the start position set to the specified value. May return
* the same instance if nothing changed.
*/
public MediaPeriodInfo copyWithStartPositionUs(long startPositionUs) {
return startPositionUs == this.startPositionUs
? this
: new MediaPeriodInfo(
id,
startPositionUs,
contentPositionUs,
endPositionUs,
durationUs,
isLastInTimelinePeriod,
isFinal);
}
/**
* Returns a copy of this instance with the content position set to the specified value. May
* return the same instance if nothing changed.
*/
public MediaPeriodInfo copyWithContentPositionUs(long contentPositionUs) {
return contentPositionUs == this.contentPositionUs
? this
: new MediaPeriodInfo(
id,
startPositionUs,
contentPositionUs,
endPositionUs,
durationUs,
isLastInTimelinePeriod,
isFinal);
}
@Override
public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MediaPeriodInfo that = (MediaPeriodInfo) o;
return startPositionUs == that.startPositionUs
&& contentPositionUs == that.contentPositionUs
&& endPositionUs == that.endPositionUs
&& durationUs == that.durationUs
&& isLastInTimelinePeriod == that.isLastInTimelinePeriod
&& isFinal == that.isFinal
&& Util.areEqual(id, that.id);
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + id.hashCode();
result = 31 * result + (int) startPositionUs;
result = 31 * result + (int) contentPositionUs;
result = 31 * result + (int) endPositionUs;
result = 31 * result + (int) durationUs;
result = 31 * result + (isLastInTimelinePeriod ? 1 : 0);
result = 31 * result + (isFinal ? 1 : 0);
return result;
}
}

Просмотреть файл

@ -0,0 +1,743 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.util.Pair;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Player.RepeatMode;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaPeriod;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelector;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelectorResult;
import org.mozilla.thirdparty.com.google.android.exoplayer2.upstream.Allocator;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
/**
* Holds a queue of media periods, from the currently playing media period at the front to the
* loading media period at the end of the queue, with methods for controlling loading and updating
* the queue. Also has a reference to the media period currently being read.
*/
/* package */ final class MediaPeriodQueue {
/**
* Limits the maximum number of periods to buffer ahead of the current playing period. The
* buffering policy normally prevents buffering too far ahead, but the policy could allow too many
* small periods to be buffered if the period count were not limited.
*/
private static final int MAXIMUM_BUFFER_AHEAD_PERIODS = 100;
private final Timeline.Period period;
private final Timeline.Window window;
private long nextWindowSequenceNumber;
private Timeline timeline;
private @RepeatMode int repeatMode;
private boolean shuffleModeEnabled;
@Nullable private MediaPeriodHolder playing;
@Nullable private MediaPeriodHolder reading;
@Nullable private MediaPeriodHolder loading;
private int length;
@Nullable private Object oldFrontPeriodUid;
private long oldFrontPeriodWindowSequenceNumber;
/** Creates a new media period queue. */
public MediaPeriodQueue() {
period = new Timeline.Period();
window = new Timeline.Window();
timeline = Timeline.EMPTY;
}
/**
* Sets the {@link Timeline}. Call {@link #updateQueuedPeriods(long, long)} to update the queued
* media periods to take into account the new timeline.
*/
public void setTimeline(Timeline timeline) {
this.timeline = timeline;
}
/**
* Sets the {@link RepeatMode} and returns whether the repeat mode change has been fully handled.
* If not, it is necessary to seek to the current playback position.
*/
public boolean updateRepeatMode(@RepeatMode int repeatMode) {
this.repeatMode = repeatMode;
return updateForPlaybackModeChange();
}
/**
* Sets whether shuffling is enabled and returns whether the shuffle mode change has been fully
* handled. If not, it is necessary to seek to the current playback position.
*/
public boolean updateShuffleModeEnabled(boolean shuffleModeEnabled) {
this.shuffleModeEnabled = shuffleModeEnabled;
return updateForPlaybackModeChange();
}
/** Returns whether {@code mediaPeriod} is the current loading media period. */
public boolean isLoading(MediaPeriod mediaPeriod) {
return loading != null && loading.mediaPeriod == mediaPeriod;
}
/**
* If there is a loading period, reevaluates its buffer.
*
* @param rendererPositionUs The current renderer position.
*/
public void reevaluateBuffer(long rendererPositionUs) {
if (loading != null) {
loading.reevaluateBuffer(rendererPositionUs);
}
}
/** Returns whether a new loading media period should be enqueued, if available. */
public boolean shouldLoadNextMediaPeriod() {
return loading == null
|| (!loading.info.isFinal
&& loading.isFullyBuffered()
&& loading.info.durationUs != C.TIME_UNSET
&& length < MAXIMUM_BUFFER_AHEAD_PERIODS);
}
/**
* Returns the {@link MediaPeriodInfo} for the next media period to load.
*
* @param rendererPositionUs The current renderer position.
* @param playbackInfo The current playback information.
* @return The {@link MediaPeriodInfo} for the next media period to load, or {@code null} if not
* yet known.
*/
public @Nullable MediaPeriodInfo getNextMediaPeriodInfo(
long rendererPositionUs, PlaybackInfo playbackInfo) {
return loading == null
? getFirstMediaPeriodInfo(playbackInfo)
: getFollowingMediaPeriodInfo(loading, rendererPositionUs);
}
/**
* Enqueues a new media period holder based on the specified information as the new loading media
* period, and returns it.
*
* @param rendererCapabilities The renderer capabilities.
* @param trackSelector The track selector.
* @param allocator The allocator.
* @param mediaSource The media source that produced the media period.
* @param info Information used to identify this media period in its timeline period.
* @param emptyTrackSelectorResult A {@link TrackSelectorResult} with empty selections for each
* renderer.
*/
public MediaPeriodHolder enqueueNextMediaPeriodHolder(
RendererCapabilities[] rendererCapabilities,
TrackSelector trackSelector,
Allocator allocator,
MediaSource mediaSource,
MediaPeriodInfo info,
TrackSelectorResult emptyTrackSelectorResult) {
long rendererPositionOffsetUs =
loading == null
? (info.id.isAd() && info.contentPositionUs != C.TIME_UNSET
? info.contentPositionUs
: 0)
: (loading.getRendererOffset() + loading.info.durationUs - info.startPositionUs);
MediaPeriodHolder newPeriodHolder =
new MediaPeriodHolder(
rendererCapabilities,
rendererPositionOffsetUs,
trackSelector,
allocator,
mediaSource,
info,
emptyTrackSelectorResult);
if (loading != null) {
loading.setNext(newPeriodHolder);
} else {
playing = newPeriodHolder;
reading = newPeriodHolder;
}
oldFrontPeriodUid = null;
loading = newPeriodHolder;
length++;
return newPeriodHolder;
}
/**
* Returns the loading period holder which is at the end of the queue, or null if the queue is
* empty.
*/
@Nullable
public MediaPeriodHolder getLoadingPeriod() {
return loading;
}
/**
* Returns the playing period holder which is at the front of the queue, or null if the queue is
* empty.
*/
@Nullable
public MediaPeriodHolder getPlayingPeriod() {
return playing;
}
/** Returns the reading period holder, or null if the queue is empty. */
@Nullable
public MediaPeriodHolder getReadingPeriod() {
return reading;
}
/**
* Continues reading from the next period holder in the queue.
*
* @return The updated reading period holder.
*/
public MediaPeriodHolder advanceReadingPeriod() {
Assertions.checkState(reading != null && reading.getNext() != null);
reading = reading.getNext();
return reading;
}
/**
* Dequeues the playing period holder from the front of the queue and advances the playing period
* holder to be the next item in the queue.
*
* @return The updated playing period holder, or null if the queue is or becomes empty.
*/
@Nullable
public MediaPeriodHolder advancePlayingPeriod() {
if (playing == null) {
return null;
}
if (playing == reading) {
reading = playing.getNext();
}
playing.release();
length--;
if (length == 0) {
loading = null;
oldFrontPeriodUid = playing.uid;
oldFrontPeriodWindowSequenceNumber = playing.info.id.windowSequenceNumber;
}
playing = playing.getNext();
return playing;
}
/**
* Removes all period holders after the given period holder. This process may also remove the
* currently reading period holder. If that is the case, the reading period holder is set to be
* the same as the playing period holder at the front of the queue.
*
* @param mediaPeriodHolder The media period holder that shall be the new end of the queue.
* @return Whether the reading period has been removed.
*/
public boolean removeAfter(MediaPeriodHolder mediaPeriodHolder) {
Assertions.checkState(mediaPeriodHolder != null);
boolean removedReading = false;
loading = mediaPeriodHolder;
while (mediaPeriodHolder.getNext() != null) {
mediaPeriodHolder = mediaPeriodHolder.getNext();
if (mediaPeriodHolder == reading) {
reading = playing;
removedReading = true;
}
mediaPeriodHolder.release();
length--;
}
loading.setNext(null);
return removedReading;
}
/**
* Clears the queue.
*
* @param keepFrontPeriodUid Whether the queue should keep the id of the media period in the front
* of queue (typically the playing one) for later reuse.
*/
public void clear(boolean keepFrontPeriodUid) {
MediaPeriodHolder front = playing;
if (front != null) {
oldFrontPeriodUid = keepFrontPeriodUid ? front.uid : null;
oldFrontPeriodWindowSequenceNumber = front.info.id.windowSequenceNumber;
removeAfter(front);
front.release();
} else if (!keepFrontPeriodUid) {
oldFrontPeriodUid = null;
}
playing = null;
loading = null;
reading = null;
length = 0;
}
/**
* Updates media periods in the queue to take into account the latest timeline, and returns
* whether the timeline change has been fully handled. If not, it is necessary to seek to the
* current playback position. The method assumes that the first media period in the queue is still
* consistent with the new timeline.
*
* @param rendererPositionUs The current renderer position in microseconds.
* @param maxRendererReadPositionUs The maximum renderer position up to which renderers have read
* the current reading media period in microseconds, or {@link C#TIME_END_OF_SOURCE} if they
* have read to the end.
* @return Whether the timeline change has been handled completely.
*/
public boolean updateQueuedPeriods(long rendererPositionUs, long maxRendererReadPositionUs) {
// TODO: Merge this into setTimeline so that the queue gets updated as soon as the new timeline
// is set, once all cases handled by ExoPlayerImplInternal.handleSourceInfoRefreshed can be
// handled here.
MediaPeriodHolder previousPeriodHolder = null;
MediaPeriodHolder periodHolder = playing;
while (periodHolder != null) {
MediaPeriodInfo oldPeriodInfo = periodHolder.info;
// Get period info based on new timeline.
MediaPeriodInfo newPeriodInfo;
if (previousPeriodHolder == null) {
// The id and start position of the first period have already been verified by
// ExoPlayerImplInternal.handleSourceInfoRefreshed. Just update duration, isLastInTimeline
// and isLastInPeriod flags.
newPeriodInfo = getUpdatedMediaPeriodInfo(oldPeriodInfo);
} else {
newPeriodInfo = getFollowingMediaPeriodInfo(previousPeriodHolder, rendererPositionUs);
if (newPeriodInfo == null) {
// We've loaded a next media period that is not in the new timeline.
return !removeAfter(previousPeriodHolder);
}
if (!canKeepMediaPeriodHolder(oldPeriodInfo, newPeriodInfo)) {
// The new media period has a different id or start position.
return !removeAfter(previousPeriodHolder);
}
}
// Use new period info, but keep old content position.
periodHolder.info = newPeriodInfo.copyWithContentPositionUs(oldPeriodInfo.contentPositionUs);
if (!areDurationsCompatible(oldPeriodInfo.durationUs, newPeriodInfo.durationUs)) {
// The period duration changed. Remove all subsequent periods and check whether we read
// beyond the new duration.
long newDurationInRendererTime =
newPeriodInfo.durationUs == C.TIME_UNSET
? Long.MAX_VALUE
: periodHolder.toRendererTime(newPeriodInfo.durationUs);
boolean isReadingAndReadBeyondNewDuration =
periodHolder == reading
&& (maxRendererReadPositionUs == C.TIME_END_OF_SOURCE
|| maxRendererReadPositionUs >= newDurationInRendererTime);
boolean readingPeriodRemoved = removeAfter(periodHolder);
return !readingPeriodRemoved && !isReadingAndReadBeyondNewDuration;
}
previousPeriodHolder = periodHolder;
periodHolder = periodHolder.getNext();
}
return true;
}
/**
* Returns new media period info based on specified {@code mediaPeriodInfo} but taking into
* account the current timeline. This method must only be called if the period is still part of
* the current timeline.
*
* @param info Media period info for a media period based on an old timeline.
* @return The updated media period info for the current timeline.
*/
public MediaPeriodInfo getUpdatedMediaPeriodInfo(MediaPeriodInfo info) {
MediaPeriodId id = info.id;
boolean isLastInPeriod = isLastInPeriod(id);
boolean isLastInTimeline = isLastInTimeline(id, isLastInPeriod);
timeline.getPeriodByUid(info.id.periodUid, period);
long durationUs =
id.isAd()
? period.getAdDurationUs(id.adGroupIndex, id.adIndexInAdGroup)
: (info.endPositionUs == C.TIME_UNSET || info.endPositionUs == C.TIME_END_OF_SOURCE
? period.getDurationUs()
: info.endPositionUs);
return new MediaPeriodInfo(
id,
info.startPositionUs,
info.contentPositionUs,
info.endPositionUs,
durationUs,
isLastInPeriod,
isLastInTimeline);
}
/**
* Resolves the specified timeline period and position to a {@link MediaPeriodId} that should be
* played, returning an identifier for an ad group if one needs to be played before the specified
* position, or an identifier for a content media period if not.
*
* @param periodUid The uid of the timeline period to play.
* @param positionUs The next content position in the period to play.
* @return The identifier for the first media period to play, taking into account unplayed ads.
*/
public MediaPeriodId resolveMediaPeriodIdForAds(Object periodUid, long positionUs) {
long windowSequenceNumber = resolvePeriodIndexToWindowSequenceNumber(periodUid);
return resolveMediaPeriodIdForAds(periodUid, positionUs, windowSequenceNumber);
}
// Internal methods.
/**
* Resolves the specified timeline period and position to a {@link MediaPeriodId} that should be
* played, returning an identifier for an ad group if one needs to be played before the specified
* position, or an identifier for a content media period if not.
*
* @param periodUid The uid of the timeline period to play.
* @param positionUs The next content position in the period to play.
* @param windowSequenceNumber The sequence number of the window in the buffered sequence of
* windows this period is part of.
* @return The identifier for the first media period to play, taking into account unplayed ads.
*/
private MediaPeriodId resolveMediaPeriodIdForAds(
Object periodUid, long positionUs, long windowSequenceNumber) {
timeline.getPeriodByUid(periodUid, period);
int adGroupIndex = period.getAdGroupIndexForPositionUs(positionUs);
if (adGroupIndex == C.INDEX_UNSET) {
int nextAdGroupIndex = period.getAdGroupIndexAfterPositionUs(positionUs);
return new MediaPeriodId(periodUid, windowSequenceNumber, nextAdGroupIndex);
} else {
int adIndexInAdGroup = period.getFirstAdIndexToPlay(adGroupIndex);
return new MediaPeriodId(periodUid, adGroupIndex, adIndexInAdGroup, windowSequenceNumber);
}
}
/**
* Resolves the specified period uid to a corresponding window sequence number. Either by reusing
* the window sequence number of an existing matching media period or by creating a new window
* sequence number.
*
* @param periodUid The uid of the timeline period.
* @return A window sequence number for a media period created for this timeline period.
*/
private long resolvePeriodIndexToWindowSequenceNumber(Object periodUid) {
int windowIndex = timeline.getPeriodByUid(periodUid, period).windowIndex;
if (oldFrontPeriodUid != null) {
int oldFrontPeriodIndex = timeline.getIndexOfPeriod(oldFrontPeriodUid);
if (oldFrontPeriodIndex != C.INDEX_UNSET) {
int oldFrontWindowIndex = timeline.getPeriod(oldFrontPeriodIndex, period).windowIndex;
if (oldFrontWindowIndex == windowIndex) {
// Try to match old front uid after the queue has been cleared.
return oldFrontPeriodWindowSequenceNumber;
}
}
}
MediaPeriodHolder mediaPeriodHolder = playing;
while (mediaPeriodHolder != null) {
if (mediaPeriodHolder.uid.equals(periodUid)) {
// Reuse window sequence number of first exact period match.
return mediaPeriodHolder.info.id.windowSequenceNumber;
}
mediaPeriodHolder = mediaPeriodHolder.getNext();
}
mediaPeriodHolder = playing;
while (mediaPeriodHolder != null) {
int indexOfHolderInTimeline = timeline.getIndexOfPeriod(mediaPeriodHolder.uid);
if (indexOfHolderInTimeline != C.INDEX_UNSET) {
int holderWindowIndex = timeline.getPeriod(indexOfHolderInTimeline, period).windowIndex;
if (holderWindowIndex == windowIndex) {
// As an alternative, try to match other periods of the same window.
return mediaPeriodHolder.info.id.windowSequenceNumber;
}
}
mediaPeriodHolder = mediaPeriodHolder.getNext();
}
// If no match is found, create new sequence number.
long windowSequenceNumber = nextWindowSequenceNumber++;
if (playing == null) {
// If the queue is empty, save it as old front uid to allow later reuse.
oldFrontPeriodUid = periodUid;
oldFrontPeriodWindowSequenceNumber = windowSequenceNumber;
}
return windowSequenceNumber;
}
/**
* Returns whether a period described by {@code oldInfo} can be kept for playing the media period
* described by {@code newInfo}.
*/
private boolean canKeepMediaPeriodHolder(MediaPeriodInfo oldInfo, MediaPeriodInfo newInfo) {
return oldInfo.startPositionUs == newInfo.startPositionUs && oldInfo.id.equals(newInfo.id);
}
/**
* Returns whether a duration change of a period is compatible with keeping the following periods.
*/
private boolean areDurationsCompatible(long previousDurationUs, long newDurationUs) {
return previousDurationUs == C.TIME_UNSET || previousDurationUs == newDurationUs;
}
/**
* Updates the queue for any playback mode change, and returns whether the change was fully
* handled. If not, it is necessary to seek to the current playback position.
*/
private boolean updateForPlaybackModeChange() {
// Find the last existing period holder that matches the new period order.
MediaPeriodHolder lastValidPeriodHolder = playing;
if (lastValidPeriodHolder == null) {
return true;
}
int currentPeriodIndex = timeline.getIndexOfPeriod(lastValidPeriodHolder.uid);
while (true) {
int nextPeriodIndex =
timeline.getNextPeriodIndex(
currentPeriodIndex, period, window, repeatMode, shuffleModeEnabled);
while (lastValidPeriodHolder.getNext() != null
&& !lastValidPeriodHolder.info.isLastInTimelinePeriod) {
lastValidPeriodHolder = lastValidPeriodHolder.getNext();
}
MediaPeriodHolder nextMediaPeriodHolder = lastValidPeriodHolder.getNext();
if (nextPeriodIndex == C.INDEX_UNSET || nextMediaPeriodHolder == null) {
break;
}
int nextPeriodHolderPeriodIndex = timeline.getIndexOfPeriod(nextMediaPeriodHolder.uid);
if (nextPeriodHolderPeriodIndex != nextPeriodIndex) {
break;
}
lastValidPeriodHolder = nextMediaPeriodHolder;
currentPeriodIndex = nextPeriodIndex;
}
// Release any period holders that don't match the new period order.
boolean readingPeriodRemoved = removeAfter(lastValidPeriodHolder);
// Update the period info for the last holder, as it may now be the last period in the timeline.
lastValidPeriodHolder.info = getUpdatedMediaPeriodInfo(lastValidPeriodHolder.info);
// If renderers may have read from a period that's been removed, it is necessary to restart.
return !readingPeriodRemoved;
}
/**
* Returns the first {@link MediaPeriodInfo} to play, based on the specified playback position.
*/
private MediaPeriodInfo getFirstMediaPeriodInfo(PlaybackInfo playbackInfo) {
return getMediaPeriodInfo(
playbackInfo.periodId, playbackInfo.contentPositionUs, playbackInfo.startPositionUs);
}
/**
* Returns the {@link MediaPeriodInfo} for the media period following {@code mediaPeriodHolder}'s
* media period.
*
* @param mediaPeriodHolder The media period holder.
* @param rendererPositionUs The current renderer position in microseconds.
* @return The following media period's info, or {@code null} if it is not yet possible to get the
* next media period info.
*/
private @Nullable MediaPeriodInfo getFollowingMediaPeriodInfo(
MediaPeriodHolder mediaPeriodHolder, long rendererPositionUs) {
// TODO: This method is called repeatedly from ExoPlayerImplInternal.maybeUpdateLoadingPeriod
// but if the timeline is not ready to provide the next period it can't return a non-null value
// until the timeline is updated. Store whether the next timeline period is ready when the
// timeline is updated, to avoid repeatedly checking the same timeline.
MediaPeriodInfo mediaPeriodInfo = mediaPeriodHolder.info;
// The expected delay until playback transitions to the new period is equal the duration of
// media that's currently buffered (assuming no interruptions). This is used to project forward
// the start position for transitions to new windows.
long bufferedDurationUs =
mediaPeriodHolder.getRendererOffset() + mediaPeriodInfo.durationUs - rendererPositionUs;
if (mediaPeriodInfo.isLastInTimelinePeriod) {
int currentPeriodIndex = timeline.getIndexOfPeriod(mediaPeriodInfo.id.periodUid);
int nextPeriodIndex =
timeline.getNextPeriodIndex(
currentPeriodIndex, period, window, repeatMode, shuffleModeEnabled);
if (nextPeriodIndex == C.INDEX_UNSET) {
// We can't create a next period yet.
return null;
}
long startPositionUs;
long contentPositionUs;
int nextWindowIndex =
timeline.getPeriod(nextPeriodIndex, period, /* setIds= */ true).windowIndex;
Object nextPeriodUid = period.uid;
long windowSequenceNumber = mediaPeriodInfo.id.windowSequenceNumber;
if (timeline.getWindow(nextWindowIndex, window).firstPeriodIndex == nextPeriodIndex) {
// We're starting to buffer a new window. When playback transitions to this window we'll
// want it to be from its default start position, so project the default start position
// forward by the duration of the buffer, and start buffering from this point.
contentPositionUs = C.TIME_UNSET;
Pair<Object, Long> defaultPosition =
timeline.getPeriodPosition(
window,
period,
nextWindowIndex,
/* windowPositionUs= */ C.TIME_UNSET,
/* defaultPositionProjectionUs= */ Math.max(0, bufferedDurationUs));
if (defaultPosition == null) {
return null;
}
nextPeriodUid = defaultPosition.first;
startPositionUs = defaultPosition.second;
MediaPeriodHolder nextMediaPeriodHolder = mediaPeriodHolder.getNext();
if (nextMediaPeriodHolder != null && nextMediaPeriodHolder.uid.equals(nextPeriodUid)) {
windowSequenceNumber = nextMediaPeriodHolder.info.id.windowSequenceNumber;
} else {
windowSequenceNumber = nextWindowSequenceNumber++;
}
} else {
// We're starting to buffer a new period within the same window.
startPositionUs = 0;
contentPositionUs = 0;
}
MediaPeriodId periodId =
resolveMediaPeriodIdForAds(nextPeriodUid, startPositionUs, windowSequenceNumber);
return getMediaPeriodInfo(periodId, contentPositionUs, startPositionUs);
}
MediaPeriodId currentPeriodId = mediaPeriodInfo.id;
timeline.getPeriodByUid(currentPeriodId.periodUid, period);
if (currentPeriodId.isAd()) {
int adGroupIndex = currentPeriodId.adGroupIndex;
int adCountInCurrentAdGroup = period.getAdCountInAdGroup(adGroupIndex);
if (adCountInCurrentAdGroup == C.LENGTH_UNSET) {
return null;
}
int nextAdIndexInAdGroup =
period.getNextAdIndexToPlay(adGroupIndex, currentPeriodId.adIndexInAdGroup);
if (nextAdIndexInAdGroup < adCountInCurrentAdGroup) {
// Play the next ad in the ad group if it's available.
return !period.isAdAvailable(adGroupIndex, nextAdIndexInAdGroup)
? null
: getMediaPeriodInfoForAd(
currentPeriodId.periodUid,
adGroupIndex,
nextAdIndexInAdGroup,
mediaPeriodInfo.contentPositionUs,
currentPeriodId.windowSequenceNumber);
} else {
// Play content from the ad group position.
long startPositionUs = mediaPeriodInfo.contentPositionUs;
if (startPositionUs == C.TIME_UNSET) {
// If we're transitioning from an ad group to content starting from its default position,
// project the start position forward as if this were a transition to a new window.
Pair<Object, Long> defaultPosition =
timeline.getPeriodPosition(
window,
period,
period.windowIndex,
/* windowPositionUs= */ C.TIME_UNSET,
/* defaultPositionProjectionUs= */ Math.max(0, bufferedDurationUs));
if (defaultPosition == null) {
return null;
}
startPositionUs = defaultPosition.second;
}
return getMediaPeriodInfoForContent(
currentPeriodId.periodUid, startPositionUs, currentPeriodId.windowSequenceNumber);
}
} else {
// Play the next ad group if it's available.
int nextAdGroupIndex = period.getAdGroupIndexForPositionUs(mediaPeriodInfo.endPositionUs);
if (nextAdGroupIndex == C.INDEX_UNSET) {
// The next ad group can't be played. Play content from the previous end position instead.
return getMediaPeriodInfoForContent(
currentPeriodId.periodUid,
/* startPositionUs= */ mediaPeriodInfo.durationUs,
currentPeriodId.windowSequenceNumber);
}
int adIndexInAdGroup = period.getFirstAdIndexToPlay(nextAdGroupIndex);
return !period.isAdAvailable(nextAdGroupIndex, adIndexInAdGroup)
? null
: getMediaPeriodInfoForAd(
currentPeriodId.periodUid,
nextAdGroupIndex,
adIndexInAdGroup,
/* contentPositionUs= */ mediaPeriodInfo.durationUs,
currentPeriodId.windowSequenceNumber);
}
}
private MediaPeriodInfo getMediaPeriodInfo(
MediaPeriodId id, long contentPositionUs, long startPositionUs) {
timeline.getPeriodByUid(id.periodUid, period);
if (id.isAd()) {
if (!period.isAdAvailable(id.adGroupIndex, id.adIndexInAdGroup)) {
return null;
}
return getMediaPeriodInfoForAd(
id.periodUid,
id.adGroupIndex,
id.adIndexInAdGroup,
contentPositionUs,
id.windowSequenceNumber);
} else {
return getMediaPeriodInfoForContent(id.periodUid, startPositionUs, id.windowSequenceNumber);
}
}
private MediaPeriodInfo getMediaPeriodInfoForAd(
Object periodUid,
int adGroupIndex,
int adIndexInAdGroup,
long contentPositionUs,
long windowSequenceNumber) {
MediaPeriodId id =
new MediaPeriodId(periodUid, adGroupIndex, adIndexInAdGroup, windowSequenceNumber);
long durationUs =
timeline
.getPeriodByUid(id.periodUid, period)
.getAdDurationUs(id.adGroupIndex, id.adIndexInAdGroup);
long startPositionUs =
adIndexInAdGroup == period.getFirstAdIndexToPlay(adGroupIndex)
? period.getAdResumePositionUs()
: 0;
return new MediaPeriodInfo(
id,
startPositionUs,
contentPositionUs,
/* endPositionUs= */ C.TIME_UNSET,
durationUs,
/* isLastInTimelinePeriod= */ false,
/* isFinal= */ false);
}
private MediaPeriodInfo getMediaPeriodInfoForContent(
Object periodUid, long startPositionUs, long windowSequenceNumber) {
int nextAdGroupIndex = period.getAdGroupIndexAfterPositionUs(startPositionUs);
MediaPeriodId id = new MediaPeriodId(periodUid, windowSequenceNumber, nextAdGroupIndex);
boolean isLastInPeriod = isLastInPeriod(id);
boolean isLastInTimeline = isLastInTimeline(id, isLastInPeriod);
long endPositionUs =
nextAdGroupIndex != C.INDEX_UNSET
? period.getAdGroupTimeUs(nextAdGroupIndex)
: C.TIME_UNSET;
long durationUs =
endPositionUs == C.TIME_UNSET || endPositionUs == C.TIME_END_OF_SOURCE
? period.durationUs
: endPositionUs;
return new MediaPeriodInfo(
id,
startPositionUs,
/* contentPositionUs= */ C.TIME_UNSET,
endPositionUs,
durationUs,
isLastInPeriod,
isLastInTimeline);
}
private boolean isLastInPeriod(MediaPeriodId id) {
return !id.isAd() && id.nextAdGroupIndex == C.INDEX_UNSET;
}
private boolean isLastInTimeline(MediaPeriodId id, boolean isLastMediaPeriodInPeriod) {
int periodIndex = timeline.getIndexOfPeriod(id.periodUid);
int windowIndex = timeline.getPeriod(periodIndex, period).windowIndex;
return !timeline.getWindow(windowIndex, window).isDynamic
&& timeline.isLastPeriod(periodIndex, period, window, repeatMode, shuffleModeEnabled)
&& isLastMediaPeriodInPeriod;
}
}

Просмотреть файл

@ -0,0 +1,306 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.SampleStream;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.MediaClock;
import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* A {@link Renderer} implementation whose track type is {@link C#TRACK_TYPE_NONE} and does not
* consume data from its {@link SampleStream}.
*/
public abstract class NoSampleRenderer implements Renderer, RendererCapabilities {
@MonotonicNonNull private RendererConfiguration configuration;
private int index;
private int state;
@Nullable private SampleStream stream;
private boolean streamIsFinal;
@Override
public final int getTrackType() {
return C.TRACK_TYPE_NONE;
}
@Override
public final RendererCapabilities getCapabilities() {
return this;
}
@Override
public final void setIndex(int index) {
this.index = index;
}
@Override
@Nullable
public MediaClock getMediaClock() {
return null;
}
@Override
public final int getState() {
return state;
}
/**
* Replaces the {@link SampleStream} that will be associated with this renderer.
* <p>
* This method may be called when the renderer is in the following states:
* {@link #STATE_DISABLED}.
*
* @param configuration The renderer configuration.
* @param formats The enabled formats. Should be empty.
* @param stream The {@link SampleStream} from which the renderer should consume.
* @param positionUs The player's current position.
* @param joining Whether this renderer is being enabled to join an ongoing playback.
* @param offsetUs The offset that should be subtracted from {@code positionUs}
* to get the playback position with respect to the media.
* @throws ExoPlaybackException If an error occurs.
*/
@Override
public final void enable(RendererConfiguration configuration, Format[] formats,
SampleStream stream, long positionUs, boolean joining, long offsetUs)
throws ExoPlaybackException {
Assertions.checkState(state == STATE_DISABLED);
this.configuration = configuration;
state = STATE_ENABLED;
onEnabled(joining);
replaceStream(formats, stream, offsetUs);
onPositionReset(positionUs, joining);
}
@Override
public final void start() throws ExoPlaybackException {
Assertions.checkState(state == STATE_ENABLED);
state = STATE_STARTED;
onStarted();
}
/**
* Replaces the {@link SampleStream} that will be associated with this renderer.
* <p>
* This method may be called when the renderer is in the following states:
* {@link #STATE_ENABLED}, {@link #STATE_STARTED}.
*
* @param formats The enabled formats. Should be empty.
* @param stream The {@link SampleStream} to be associated with this renderer.
* @param offsetUs The offset that should be subtracted from {@code positionUs} in
* {@link #render(long, long)} to get the playback position with respect to the media.
* @throws ExoPlaybackException If an error occurs.
*/
@Override
public final void replaceStream(Format[] formats, SampleStream stream, long offsetUs)
throws ExoPlaybackException {
Assertions.checkState(!streamIsFinal);
this.stream = stream;
onRendererOffsetChanged(offsetUs);
}
@Override
@Nullable
public final SampleStream getStream() {
return stream;
}
@Override
public final boolean hasReadStreamToEnd() {
return true;
}
@Override
public long getReadingPositionUs() {
return C.TIME_END_OF_SOURCE;
}
@Override
public final void setCurrentStreamFinal() {
streamIsFinal = true;
}
@Override
public final boolean isCurrentStreamFinal() {
return streamIsFinal;
}
@Override
public final void maybeThrowStreamError() throws IOException {
}
@Override
public final void resetPosition(long positionUs) throws ExoPlaybackException {
streamIsFinal = false;
onPositionReset(positionUs, false);
}
@Override
public final void stop() throws ExoPlaybackException {
Assertions.checkState(state == STATE_STARTED);
state = STATE_ENABLED;
onStopped();
}
@Override
public final void disable() {
Assertions.checkState(state == STATE_ENABLED);
state = STATE_DISABLED;
stream = null;
streamIsFinal = false;
onDisabled();
}
@Override
public final void reset() {
Assertions.checkState(state == STATE_DISABLED);
onReset();
}
@Override
public boolean isReady() {
return true;
}
@Override
public boolean isEnded() {
return true;
}
// RendererCapabilities implementation.
@Override
@Capabilities
public int supportsFormat(Format format) throws ExoPlaybackException {
return RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE);
}
@Override
@AdaptiveSupport
public int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException {
return ADAPTIVE_NOT_SUPPORTED;
}
// PlayerMessage.Target implementation.
@Override
public void handleMessage(int what, @Nullable Object object) throws ExoPlaybackException {
// Do nothing.
}
// Methods to be overridden by subclasses.
/**
* Called when the renderer is enabled.
* <p>
* The default implementation is a no-op.
*
* @param joining Whether this renderer is being enabled to join an ongoing playback.
* @throws ExoPlaybackException If an error occurs.
*/
protected void onEnabled(boolean joining) throws ExoPlaybackException {
// Do nothing.
}
/**
* Called when the renderer's offset has been changed.
* <p>
* The default implementation is a no-op.
*
* @param offsetUs The offset that should be subtracted from {@code positionUs} in
* {@link #render(long, long)} to get the playback position with respect to the media.
* @throws ExoPlaybackException If an error occurs.
*/
protected void onRendererOffsetChanged(long offsetUs) throws ExoPlaybackException {
// Do nothing.
}
/**
* Called when the position is reset. This occurs when the renderer is enabled after
* {@link #onRendererOffsetChanged(long)} has been called, and also when a position
* discontinuity is encountered.
* <p>
* The default implementation is a no-op.
*
* @param positionUs The new playback position in microseconds.
* @param joining Whether this renderer is being enabled to join an ongoing playback.
* @throws ExoPlaybackException If an error occurs.
*/
protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
// Do nothing.
}
/**
* Called when the renderer is started.
* <p>
* The default implementation is a no-op.
*
* @throws ExoPlaybackException If an error occurs.
*/
protected void onStarted() throws ExoPlaybackException {
// Do nothing.
}
/**
* Called when the renderer is stopped.
* <p>
* The default implementation is a no-op.
*
* @throws ExoPlaybackException If an error occurs.
*/
protected void onStopped() throws ExoPlaybackException {
// Do nothing.
}
/**
* Called when the renderer is disabled.
* <p>
* The default implementation is a no-op.
*/
protected void onDisabled() {
// Do nothing.
}
/**
* Called when the renderer is reset.
*
* <p>The default implementation is a no-op.
*/
protected void onReset() {
// Do nothing.
}
// Methods to be called by subclasses.
/**
* Returns the configuration set when the renderer was most recently enabled, or {@code null} if
* the renderer has never been enabled.
*/
@Nullable
protected final RendererConfiguration getConfiguration() {
return configuration;
}
/**
* Returns the index of the renderer within the player.
*/
protected final int getIndex() {
return index;
}
}

Просмотреть файл

@ -0,0 +1,358 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import androidx.annotation.CheckResult;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.TrackGroupArray;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelectorResult;
/**
* Information about an ongoing playback.
*/
/* package */ final class PlaybackInfo {
/**
* Dummy media period id used while the timeline is empty and no period id is specified. This id
* is used when playback infos are created with {@link #createDummy(long, TrackSelectorResult)}.
*/
private static final MediaPeriodId DUMMY_MEDIA_PERIOD_ID =
new MediaPeriodId(/* periodUid= */ new Object());
/** The current {@link Timeline}. */
public final Timeline timeline;
/** The {@link MediaPeriodId} of the currently playing media period in the {@link #timeline}. */
public final MediaPeriodId periodId;
/**
* The start position at which playback started in {@link #periodId} relative to the start of the
* associated period in the {@link #timeline}, in microseconds. Note that this value changes for
* each position discontinuity.
*/
public final long startPositionUs;
/**
* If {@link #periodId} refers to an ad, the position of the suspended content relative to the
* start of the associated period in the {@link #timeline}, in microseconds. {@link C#TIME_UNSET}
* if {@link #periodId} does not refer to an ad or if the suspended content should be played from
* its default position.
*/
public final long contentPositionUs;
/** The current playback state. One of the {@link Player}.STATE_ constants. */
@Player.State public final int playbackState;
/** The current playback error, or null if this is not an error state. */
@Nullable public final ExoPlaybackException playbackError;
/** Whether the player is currently loading. */
public final boolean isLoading;
/** The currently available track groups. */
public final TrackGroupArray trackGroups;
/** The result of the current track selection. */
public final TrackSelectorResult trackSelectorResult;
/** The {@link MediaPeriodId} of the currently loading media period in the {@link #timeline}. */
public final MediaPeriodId loadingMediaPeriodId;
/**
* Position up to which media is buffered in {@link #loadingMediaPeriodId) relative to the start
* of the associated period in the {@link #timeline}, in microseconds.
*/
public volatile long bufferedPositionUs;
/**
* Total duration of buffered media from {@link #positionUs} to {@link #bufferedPositionUs}
* including all ads.
*/
public volatile long totalBufferedDurationUs;
/**
* Current playback position in {@link #periodId} relative to the start of the associated period
* in the {@link #timeline}, in microseconds.
*/
public volatile long positionUs;
/**
* Creates empty dummy playback info which can be used for masking as long as no real playback
* info is available.
*
* @param startPositionUs The start position at which playback should start, in microseconds.
* @param emptyTrackSelectorResult An empty track selector result with null entries for each
* renderer.
* @return A dummy playback info.
*/
public static PlaybackInfo createDummy(
long startPositionUs, TrackSelectorResult emptyTrackSelectorResult) {
return new PlaybackInfo(
Timeline.EMPTY,
DUMMY_MEDIA_PERIOD_ID,
startPositionUs,
/* contentPositionUs= */ C.TIME_UNSET,
Player.STATE_IDLE,
/* playbackError= */ null,
/* isLoading= */ false,
TrackGroupArray.EMPTY,
emptyTrackSelectorResult,
DUMMY_MEDIA_PERIOD_ID,
startPositionUs,
/* totalBufferedDurationUs= */ 0,
startPositionUs);
}
/**
* Create playback info.
*
* @param timeline See {@link #timeline}.
* @param periodId See {@link #periodId}.
* @param startPositionUs See {@link #startPositionUs}.
* @param contentPositionUs See {@link #contentPositionUs}.
* @param playbackState See {@link #playbackState}.
* @param isLoading See {@link #isLoading}.
* @param trackGroups See {@link #trackGroups}.
* @param trackSelectorResult See {@link #trackSelectorResult}.
* @param loadingMediaPeriodId See {@link #loadingMediaPeriodId}.
* @param bufferedPositionUs See {@link #bufferedPositionUs}.
* @param totalBufferedDurationUs See {@link #totalBufferedDurationUs}.
* @param positionUs See {@link #positionUs}.
*/
public PlaybackInfo(
Timeline timeline,
MediaPeriodId periodId,
long startPositionUs,
long contentPositionUs,
@Player.State int playbackState,
@Nullable ExoPlaybackException playbackError,
boolean isLoading,
TrackGroupArray trackGroups,
TrackSelectorResult trackSelectorResult,
MediaPeriodId loadingMediaPeriodId,
long bufferedPositionUs,
long totalBufferedDurationUs,
long positionUs) {
this.timeline = timeline;
this.periodId = periodId;
this.startPositionUs = startPositionUs;
this.contentPositionUs = contentPositionUs;
this.playbackState = playbackState;
this.playbackError = playbackError;
this.isLoading = isLoading;
this.trackGroups = trackGroups;
this.trackSelectorResult = trackSelectorResult;
this.loadingMediaPeriodId = loadingMediaPeriodId;
this.bufferedPositionUs = bufferedPositionUs;
this.totalBufferedDurationUs = totalBufferedDurationUs;
this.positionUs = positionUs;
}
/**
* Returns dummy media period id for the first-to-be-played period of the current timeline.
*
* @param shuffleModeEnabled Whether shuffle mode is enabled.
* @param window A writable {@link Timeline.Window}.
* @param period A writable {@link Timeline.Period}.
* @return A dummy media period id for the first-to-be-played period of the current timeline.
*/
public MediaPeriodId getDummyFirstMediaPeriodId(
boolean shuffleModeEnabled, Timeline.Window window, Timeline.Period period) {
if (timeline.isEmpty()) {
return DUMMY_MEDIA_PERIOD_ID;
}
int firstWindowIndex = timeline.getFirstWindowIndex(shuffleModeEnabled);
int firstPeriodIndex = timeline.getWindow(firstWindowIndex, window).firstPeriodIndex;
int currentPeriodIndex = timeline.getIndexOfPeriod(periodId.periodUid);
long windowSequenceNumber = C.INDEX_UNSET;
if (currentPeriodIndex != C.INDEX_UNSET) {
int currentWindowIndex = timeline.getPeriod(currentPeriodIndex, period).windowIndex;
if (firstWindowIndex == currentWindowIndex) {
// Keep window sequence number if the new position is still in the same window.
windowSequenceNumber = periodId.windowSequenceNumber;
}
}
return new MediaPeriodId(timeline.getUidOfPeriod(firstPeriodIndex), windowSequenceNumber);
}
/**
* Copies playback info with new playing position.
*
* @param periodId New playing media period. See {@link #periodId}.
* @param positionUs New position. See {@link #positionUs}.
* @param contentPositionUs New content position. See {@link #contentPositionUs}. Value is ignored
* if {@code periodId.isAd()} is true.
* @param totalBufferedDurationUs New buffered duration. See {@link #totalBufferedDurationUs}.
* @return Copied playback info with new playing position.
*/
@CheckResult
public PlaybackInfo copyWithNewPosition(
MediaPeriodId periodId,
long positionUs,
long contentPositionUs,
long totalBufferedDurationUs) {
return new PlaybackInfo(
timeline,
periodId,
positionUs,
periodId.isAd() ? contentPositionUs : C.TIME_UNSET,
playbackState,
playbackError,
isLoading,
trackGroups,
trackSelectorResult,
loadingMediaPeriodId,
bufferedPositionUs,
totalBufferedDurationUs,
positionUs);
}
/**
* Copies playback info with the new timeline.
*
* @param timeline New timeline. See {@link #timeline}.
* @return Copied playback info with the new timeline.
*/
@CheckResult
public PlaybackInfo copyWithTimeline(Timeline timeline) {
return new PlaybackInfo(
timeline,
periodId,
startPositionUs,
contentPositionUs,
playbackState,
playbackError,
isLoading,
trackGroups,
trackSelectorResult,
loadingMediaPeriodId,
bufferedPositionUs,
totalBufferedDurationUs,
positionUs);
}
/**
* Copies playback info with new playback state.
*
* @param playbackState New playback state. See {@link #playbackState}.
* @return Copied playback info with new playback state.
*/
@CheckResult
public PlaybackInfo copyWithPlaybackState(int playbackState) {
return new PlaybackInfo(
timeline,
periodId,
startPositionUs,
contentPositionUs,
playbackState,
playbackError,
isLoading,
trackGroups,
trackSelectorResult,
loadingMediaPeriodId,
bufferedPositionUs,
totalBufferedDurationUs,
positionUs);
}
/**
* Copies playback info with a playback error.
*
* @param playbackError The error. See {@link #playbackError}.
* @return Copied playback info with the playback error.
*/
@CheckResult
public PlaybackInfo copyWithPlaybackError(@Nullable ExoPlaybackException playbackError) {
return new PlaybackInfo(
timeline,
periodId,
startPositionUs,
contentPositionUs,
playbackState,
playbackError,
isLoading,
trackGroups,
trackSelectorResult,
loadingMediaPeriodId,
bufferedPositionUs,
totalBufferedDurationUs,
positionUs);
}
/**
* Copies playback info with new loading state.
*
* @param isLoading New loading state. See {@link #isLoading}.
* @return Copied playback info with new loading state.
*/
@CheckResult
public PlaybackInfo copyWithIsLoading(boolean isLoading) {
return new PlaybackInfo(
timeline,
periodId,
startPositionUs,
contentPositionUs,
playbackState,
playbackError,
isLoading,
trackGroups,
trackSelectorResult,
loadingMediaPeriodId,
bufferedPositionUs,
totalBufferedDurationUs,
positionUs);
}
/**
* Copies playback info with new track information.
*
* @param trackGroups New track groups. See {@link #trackGroups}.
* @param trackSelectorResult New track selector result. See {@link #trackSelectorResult}.
* @return Copied playback info with new track information.
*/
@CheckResult
public PlaybackInfo copyWithTrackInfo(
TrackGroupArray trackGroups, TrackSelectorResult trackSelectorResult) {
return new PlaybackInfo(
timeline,
periodId,
startPositionUs,
contentPositionUs,
playbackState,
playbackError,
isLoading,
trackGroups,
trackSelectorResult,
loadingMediaPeriodId,
bufferedPositionUs,
totalBufferedDurationUs,
positionUs);
}
/**
* Copies playback info with new loading media period.
*
* @param loadingMediaPeriodId New loading media period id. See {@link #loadingMediaPeriodId}.
* @return Copied playback info with new loading media period.
*/
@CheckResult
public PlaybackInfo copyWithLoadingMediaPeriodId(MediaPeriodId loadingMediaPeriodId) {
return new PlaybackInfo(
timeline,
periodId,
startPositionUs,
contentPositionUs,
playbackState,
playbackError,
isLoading,
trackGroups,
trackSelectorResult,
loadingMediaPeriodId,
bufferedPositionUs,
totalBufferedDurationUs,
positionUs);
}
}

Просмотреть файл

@ -15,53 +15,80 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
/**
* The parameters that apply to playback.
*/
public final class PlaybackParameters {
/**
* The default playback parameters: real-time playback with no pitch modification.
* The default playback parameters: real-time playback with no pitch modification or silence
* skipping.
*/
public static final PlaybackParameters DEFAULT = new PlaybackParameters(1f, 1f);
public static final PlaybackParameters DEFAULT = new PlaybackParameters(/* speed= */ 1f);
/**
* The factor by which playback will be sped up.
*/
/** The factor by which playback will be sped up. */
public final float speed;
/**
* The factor by which the audio pitch will be scaled.
*/
/** The factor by which the audio pitch will be scaled. */
public final float pitch;
/** Whether to skip silence in the input. */
public final boolean skipSilence;
private final int scaledUsPerMs;
/**
* Creates new playback parameters.
* Creates new playback parameters that set the playback speed.
*
* @param speed The factor by which playback will be sped up.
* @param pitch The factor by which the audio pitch will be scaled.
* @param speed The factor by which playback will be sped up. Must be greater than zero.
*/
public PlaybackParameters(float speed) {
this(speed, /* pitch= */ 1f, /* skipSilence= */ false);
}
/**
* Creates new playback parameters that set the playback speed and audio pitch scaling factor.
*
* @param speed The factor by which playback will be sped up. Must be greater than zero.
* @param pitch The factor by which the audio pitch will be scaled. Must be greater than zero.
*/
public PlaybackParameters(float speed, float pitch) {
this(speed, pitch, /* skipSilence= */ false);
}
/**
* Creates new playback parameters that set the playback speed, audio pitch scaling factor and
* whether to skip silence in the audio stream.
*
* @param speed The factor by which playback will be sped up. Must be greater than zero.
* @param pitch The factor by which the audio pitch will be scaled. Must be greater than zero.
* @param skipSilence Whether to skip silences in the audio stream.
*/
public PlaybackParameters(float speed, float pitch, boolean skipSilence) {
Assertions.checkArgument(speed > 0);
Assertions.checkArgument(pitch > 0);
this.speed = speed;
this.pitch = pitch;
this.skipSilence = skipSilence;
scaledUsPerMs = Math.round(speed * 1000f);
}
/**
* Scales the millisecond duration {@code timeMs} by the playback speed, returning the result in
* microseconds.
* Returns the media time in microseconds that will elapse in {@code timeMs} milliseconds of
* wallclock time.
*
* @param timeMs The time to scale, in milliseconds.
* @return The scaled time, in microseconds.
*/
public long getSpeedAdjustedDurationUs(long timeMs) {
public long getMediaTimeUsForPlayoutTimeMs(long timeMs) {
return timeMs * scaledUsPerMs;
}
@Override
public boolean equals(Object obj) {
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
@ -69,14 +96,17 @@ public final class PlaybackParameters {
return false;
}
PlaybackParameters other = (PlaybackParameters) obj;
return this.speed == other.speed && this.pitch == other.pitch;
return this.speed == other.speed
&& this.pitch == other.pitch
&& this.skipSilence == other.skipSilence;
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + Float.floatToRawIntBits(speed);
result = 31 * result + Float.floatToRawIntBits(pitch);
result = 31 * result + (skipSilence ? 1 : 0);
return result;
}

Просмотреть файл

@ -0,0 +1,23 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
/** Called to prepare a playback. */
public interface PlaybackPreparer {
/** Called to prepare a playback. */
void preparePlayback();
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,301 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.os.Handler;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
/**
* Defines a player message which can be sent with a {@link Sender} and received by a {@link
* Target}.
*/
public final class PlayerMessage {
/** A target for messages. */
public interface Target {
/**
* Handles a message delivered to the target.
*
* @param messageType The message type.
* @param payload The message payload.
* @throws ExoPlaybackException If an error occurred whilst handling the message. Should only be
* thrown by targets that handle messages on the playback thread.
*/
void handleMessage(int messageType, @Nullable Object payload) throws ExoPlaybackException;
}
/** A sender for messages. */
public interface Sender {
/**
* Sends a message.
*
* @param message The message to be sent.
*/
void sendMessage(PlayerMessage message);
}
private final Target target;
private final Sender sender;
private final Timeline timeline;
private int type;
@Nullable private Object payload;
private Handler handler;
private int windowIndex;
private long positionMs;
private boolean deleteAfterDelivery;
private boolean isSent;
private boolean isDelivered;
private boolean isProcessed;
private boolean isCanceled;
/**
* Creates a new message.
*
* @param sender The {@link Sender} used to send the message.
* @param target The {@link Target} the message is sent to.
* @param timeline The timeline used when setting the position with {@link #setPosition(long)}. If
* set to {@link Timeline#EMPTY}, any position can be specified.
* @param defaultWindowIndex The default window index in the {@code timeline} when no other window
* index is specified.
* @param defaultHandler The default handler to send the message on when no other handler is
* specified.
*/
public PlayerMessage(
Sender sender,
Target target,
Timeline timeline,
int defaultWindowIndex,
Handler defaultHandler) {
this.sender = sender;
this.target = target;
this.timeline = timeline;
this.handler = defaultHandler;
this.windowIndex = defaultWindowIndex;
this.positionMs = C.TIME_UNSET;
this.deleteAfterDelivery = true;
}
/** Returns the timeline used for setting the position with {@link #setPosition(long)}. */
public Timeline getTimeline() {
return timeline;
}
/** Returns the target the message is sent to. */
public Target getTarget() {
return target;
}
/**
* Sets the message type forwarded to {@link Target#handleMessage(int, Object)}.
*
* @param messageType The message type.
* @return This message.
* @throws IllegalStateException If {@link #send()} has already been called.
*/
public PlayerMessage setType(int messageType) {
Assertions.checkState(!isSent);
this.type = messageType;
return this;
}
/** Returns the message type forwarded to {@link Target#handleMessage(int, Object)}. */
public int getType() {
return type;
}
/**
* Sets the message payload forwarded to {@link Target#handleMessage(int, Object)}.
*
* @param payload The message payload.
* @return This message.
* @throws IllegalStateException If {@link #send()} has already been called.
*/
public PlayerMessage setPayload(@Nullable Object payload) {
Assertions.checkState(!isSent);
this.payload = payload;
return this;
}
/** Returns the message payload forwarded to {@link Target#handleMessage(int, Object)}. */
@Nullable
public Object getPayload() {
return payload;
}
/**
* Sets the handler the message is delivered on.
*
* @param handler A {@link Handler}.
* @return This message.
* @throws IllegalStateException If {@link #send()} has already been called.
*/
public PlayerMessage setHandler(Handler handler) {
Assertions.checkState(!isSent);
this.handler = handler;
return this;
}
/** Returns the handler the message is delivered on. */
public Handler getHandler() {
return handler;
}
/**
* Returns position in window at {@link #getWindowIndex()} at which the message will be delivered,
* in milliseconds. If {@link C#TIME_UNSET}, the message will be delivered immediately.
*/
public long getPositionMs() {
return positionMs;
}
/**
* Sets a position in the current window at which the message will be delivered.
*
* @param positionMs The position in the current window at which the message will be sent, in
* milliseconds.
* @return This message.
* @throws IllegalStateException If {@link #send()} has already been called.
*/
public PlayerMessage setPosition(long positionMs) {
Assertions.checkState(!isSent);
this.positionMs = positionMs;
return this;
}
/**
* Sets a position in a window at which the message will be delivered.
*
* @param windowIndex The index of the window at which the message will be sent.
* @param positionMs The position in the window with index {@code windowIndex} at which the
* message will be sent, in milliseconds.
* @return This message.
* @throws IllegalSeekPositionException If the timeline returned by {@link #getTimeline()} is not
* empty and the provided window index is not within the bounds of the timeline.
* @throws IllegalStateException If {@link #send()} has already been called.
*/
public PlayerMessage setPosition(int windowIndex, long positionMs) {
Assertions.checkState(!isSent);
Assertions.checkArgument(positionMs != C.TIME_UNSET);
if (windowIndex < 0 || (!timeline.isEmpty() && windowIndex >= timeline.getWindowCount())) {
throw new IllegalSeekPositionException(timeline, windowIndex, positionMs);
}
this.windowIndex = windowIndex;
this.positionMs = positionMs;
return this;
}
/** Returns window index at which the message will be delivered. */
public int getWindowIndex() {
return windowIndex;
}
/**
* Sets whether the message will be deleted after delivery. If false, the message will be resent
* if playback reaches the specified position again. Only allowed to be false if a position is set
* with {@link #setPosition(long)}.
*
* @param deleteAfterDelivery Whether the message is deleted after delivery.
* @return This message.
* @throws IllegalStateException If {@link #send()} has already been called.
*/
public PlayerMessage setDeleteAfterDelivery(boolean deleteAfterDelivery) {
Assertions.checkState(!isSent);
this.deleteAfterDelivery = deleteAfterDelivery;
return this;
}
/** Returns whether the message will be deleted after delivery. */
public boolean getDeleteAfterDelivery() {
return deleteAfterDelivery;
}
/**
* Sends the message. If the target throws an {@link ExoPlaybackException} then it is propagated
* out of the player as an error using {@link
* Player.EventListener#onPlayerError(ExoPlaybackException)}.
*
* @return This message.
* @throws IllegalStateException If this message has already been sent.
*/
public PlayerMessage send() {
Assertions.checkState(!isSent);
if (positionMs == C.TIME_UNSET) {
Assertions.checkArgument(deleteAfterDelivery);
}
isSent = true;
sender.sendMessage(this);
return this;
}
/**
* Cancels the message delivery.
*
* @return This message.
* @throws IllegalStateException If this method is called before {@link #send()}.
*/
public synchronized PlayerMessage cancel() {
Assertions.checkState(isSent);
isCanceled = true;
markAsProcessed(/* isDelivered= */ false);
return this;
}
/** Returns whether the message delivery has been canceled. */
public synchronized boolean isCanceled() {
return isCanceled;
}
/**
* Blocks until after the message has been delivered or the player is no longer able to deliver
* the message.
*
* <p>Note that this method can't be called if the current thread is the same thread used by the
* message handler set with {@link #setHandler(Handler)} as it would cause a deadlock.
*
* @return Whether the message was delivered successfully.
* @throws IllegalStateException If this method is called before {@link #send()}.
* @throws IllegalStateException If this method is called on the same thread used by the message
* handler set with {@link #setHandler(Handler)}.
* @throws InterruptedException If the current thread is interrupted while waiting for the message
* to be delivered.
*/
public synchronized boolean blockUntilDelivered() throws InterruptedException {
Assertions.checkState(isSent);
Assertions.checkState(handler.getLooper().getThread() != Thread.currentThread());
while (!isProcessed) {
wait();
}
return isDelivered;
}
/**
* Marks the message as processed. Should only be called by a {@link Sender} and may be called
* multiple times.
*
* @param isDelivered Whether the message has been delivered to its target. The message is
* considered as being delivered when this method has been called with {@code isDelivered} set
* to true at least once.
*/
public synchronized void markAsProcessed(boolean isDelivered) {
this.isDelivered |= isDelivered;
isProcessed = true;
notifyAll();
}
}

Просмотреть файл

@ -15,30 +15,47 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import org.mozilla.thirdparty.com.google.android.exoplayer2.ExoPlayer.ExoPlayerComponent;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.SampleStream;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.MediaClock;
import java.io.IOException;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Renders media read from a {@link SampleStream}.
* <p>
* Internally, a renderer's lifecycle is managed by the owning {@link ExoPlayer}. The renderer is
* transitioned through various states as the overall playback state changes. The valid state
* transitions are shown below, annotated with the methods that are called during each transition.
* <p align="center">
* <img src="doc-files/renderer-states.svg" alt="Renderer state transitions">
* </p>
*
* <p>Internally, a renderer's lifecycle is managed by the owning {@link ExoPlayer}. The renderer is
* transitioned through various states as the overall playback state and enabled tracks change. The
* valid state transitions are shown below, annotated with the methods that are called during each
* transition.
*
* <p style="align:center"><img src="doc-files/renderer-states.svg" alt="Renderer state
* transitions">
*/
public interface Renderer extends ExoPlayerComponent {
public interface Renderer extends PlayerMessage.Target {
/**
* The renderer is disabled.
* The renderer states. One of {@link #STATE_DISABLED}, {@link #STATE_ENABLED} or {@link
* #STATE_STARTED}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({STATE_DISABLED, STATE_ENABLED, STATE_STARTED})
@interface State {}
/**
* The renderer is disabled. A renderer in this state may hold resources that it requires for
* rendering (e.g. media decoders), for use if it's subsequently enabled. {@link #reset()} can be
* called to force the renderer to release these resources.
*/
int STATE_DISABLED = 0;
/**
* The renderer is enabled but not started. A renderer in this state is not actively rendering
* media, but will typically hold resources that it requires for rendering (e.g. media decoders).
* The renderer is enabled but not started. A renderer in this state may render media at the
* current position (e.g. an initial video frame), but the position will not advance. A renderer
* in this state will typically hold resources that it requires for rendering (e.g. media
* decoders).
*/
int STATE_ENABLED = 1;
/**
@ -72,18 +89,21 @@ public interface Renderer extends ExoPlayerComponent {
/**
* If the renderer advances its own playback position then this method returns a corresponding
* {@link MediaClock}. If provided, the player will use the returned {@link MediaClock} as its
* source of time during playback. A player may have at most one renderer that returns a
* {@link MediaClock} from this method.
* source of time during playback. A player may have at most one renderer that returns a {@link
* MediaClock} from this method.
*
* @return The {@link MediaClock} tracking the playback position of the renderer, or null.
*/
@Nullable
MediaClock getMediaClock();
/**
* Returns the current state of the renderer.
*
* @return The current state (one of the {@code STATE_*} constants).
* @return The current state. One of {@link #STATE_DISABLED}, {@link #STATE_ENABLED} and {@link
* #STATE_STARTED}.
*/
@State
int getState();
/**
@ -130,9 +150,8 @@ public interface Renderer extends ExoPlayerComponent {
void replaceStream(Format[] formats, SampleStream stream, long offsetUs)
throws ExoPlaybackException;
/**
* Returns the {@link SampleStream} being consumed, or null if the renderer is disabled.
*/
/** Returns the {@link SampleStream} being consumed, or null if the renderer is disabled. */
@Nullable
SampleStream getStream();
/**
@ -143,6 +162,16 @@ public interface Renderer extends ExoPlayerComponent {
*/
boolean hasReadStreamToEnd();
/**
* Returns the playback position up to which the renderer has read samples from the current {@link
* SampleStream}, in microseconds, or {@link C#TIME_END_OF_SOURCE} if the renderer has read the
* current {@link SampleStream} to the end.
*
* <p>This method may be called when the renderer is in the following states: {@link
* #STATE_ENABLED}, {@link #STATE_STARTED}.
*/
long getReadingPositionUs();
/**
* Signals to the renderer that the current {@link SampleStream} will be the final one supplied
* before it is next disabled or reset.
@ -184,6 +213,18 @@ public interface Renderer extends ExoPlayerComponent {
*/
void resetPosition(long positionUs) throws ExoPlaybackException;
/**
* Sets the operating rate of this renderer, where 1 is the default rate, 2 is twice the default
* rate, 0.5 is half the default rate and so on. The operating rate is a hint to the renderer of
* the speed at which playback will proceed, and may be used for resource planning.
*
* <p>The default implementation is a no-op.
*
* @param operatingRate The operating rate.
* @throws ExoPlaybackException If an error occurs handling the operating rate.
*/
default void setOperatingRate(float operatingRate) throws ExoPlaybackException {}
/**
* Incrementally renders the {@link SampleStream}.
* <p>
@ -226,7 +267,7 @@ public interface Renderer extends ExoPlayerComponent {
/**
* Whether the renderer is ready for the {@link ExoPlayer} instance to transition to
* {@link ExoPlayer#STATE_ENDED}. The player will make this transition as soon as {@code true} is
* {@link Player#STATE_ENDED}. The player will make this transition as soon as {@code true} is
* returned by all of its {@link Renderer}s.
* <p>
* This method may be called when the renderer is in the following states:
@ -254,4 +295,12 @@ public interface Renderer extends ExoPlayerComponent {
*/
void disable();
/**
* Forces the renderer to give up any resources (e.g. media decoders) that it may be holding. If
* the renderer is not holding any resources, the call is a no-op.
*
* <p>This method may be called when the renderer is in the following states: {@link
* #STATE_DISABLED}.
*/
void reset();
}

Просмотреть файл

@ -15,7 +15,12 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.annotation.SuppressLint;
import androidx.annotation.IntDef;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.MimeTypes;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Defines the capabilities of a {@link Renderer}.
@ -23,24 +28,47 @@ import org.mozilla.thirdparty.com.google.android.exoplayer2.util.MimeTypes;
public interface RendererCapabilities {
/**
* A mask to apply to the result of {@link #supportsFormat(Format)} to obtain one of
* {@link #FORMAT_HANDLED}, {@link #FORMAT_EXCEEDS_CAPABILITIES},
* {@link #FORMAT_UNSUPPORTED_SUBTYPE} and {@link #FORMAT_UNSUPPORTED_TYPE}.
* Level of renderer support for a format. One of {@link #FORMAT_HANDLED}, {@link
* #FORMAT_EXCEEDS_CAPABILITIES}, {@link #FORMAT_UNSUPPORTED_DRM}, {@link
* #FORMAT_UNSUPPORTED_SUBTYPE} or {@link #FORMAT_UNSUPPORTED_TYPE}.
*/
int FORMAT_SUPPORT_MASK = 0b11;
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({
FORMAT_HANDLED,
FORMAT_EXCEEDS_CAPABILITIES,
FORMAT_UNSUPPORTED_DRM,
FORMAT_UNSUPPORTED_SUBTYPE,
FORMAT_UNSUPPORTED_TYPE
})
@interface FormatSupport {}
/** A mask to apply to {@link Capabilities} to obtain the {@link FormatSupport} only. */
int FORMAT_SUPPORT_MASK = 0b111;
/**
* The {@link Renderer} is capable of rendering the format.
*/
int FORMAT_HANDLED = 0b11;
int FORMAT_HANDLED = 0b100;
/**
* The {@link Renderer} is capable of rendering formats with the same mime type, but the
* properties of the format exceed the renderer's capability.
* properties of the format exceed the renderer's capabilities. There is a chance the renderer
* will be able to play the format in practice because some renderers report their capabilities
* conservatively, but the expected outcome is that playback will fail.
* <p>
* Example: The {@link Renderer} is capable of rendering H264 and the format's mime type is
* {@link MimeTypes#VIDEO_H264}, but the format's resolution exceeds the maximum limit supported
* by the underlying H264 decoder.
*/
int FORMAT_EXCEEDS_CAPABILITIES = 0b10;
int FORMAT_EXCEEDS_CAPABILITIES = 0b011;
/**
* The {@link Renderer} is capable of rendering formats with the same mime type, but is not
* capable of rendering the format because the format's drm protection is not supported.
* <p>
* Example: The {@link Renderer} is capable of rendering H264 and the format's mime type is
* {@link MimeTypes#VIDEO_H264}, but the format indicates PlayReady drm protection where-as the
* renderer only supports Widevine.
*/
int FORMAT_UNSUPPORTED_DRM = 0b010;
/**
* The {@link Renderer} is a general purpose renderer for formats of the same top-level type,
* but is not capable of rendering the format or any other format with the same mime type because
@ -49,7 +77,7 @@ public interface RendererCapabilities {
* Example: The {@link Renderer} is a general purpose audio renderer and the format's
* mime type matches audio/[subtype], but there does not exist a suitable decoder for [subtype].
*/
int FORMAT_UNSUPPORTED_SUBTYPE = 0b01;
int FORMAT_UNSUPPORTED_SUBTYPE = 0b001;
/**
* The {@link Renderer} is not capable of rendering the format, either because it does not
* support the format's top-level type, or because it's a specialized renderer for a different
@ -58,40 +86,179 @@ public interface RendererCapabilities {
* Example: The {@link Renderer} is a general purpose video renderer, but the format has an
* audio mime type.
*/
int FORMAT_UNSUPPORTED_TYPE = 0b00;
int FORMAT_UNSUPPORTED_TYPE = 0b000;
/**
* A mask to apply to the result of {@link #supportsFormat(Format)} to obtain one of
* {@link #ADAPTIVE_SEAMLESS}, {@link #ADAPTIVE_NOT_SEAMLESS} and {@link #ADAPTIVE_NOT_SUPPORTED}.
* Level of renderer support for adaptive format switches. One of {@link #ADAPTIVE_SEAMLESS},
* {@link #ADAPTIVE_NOT_SEAMLESS} or {@link #ADAPTIVE_NOT_SUPPORTED}.
*/
int ADAPTIVE_SUPPORT_MASK = 0b1100;
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({ADAPTIVE_SEAMLESS, ADAPTIVE_NOT_SEAMLESS, ADAPTIVE_NOT_SUPPORTED})
@interface AdaptiveSupport {}
/** A mask to apply to {@link Capabilities} to obtain the {@link AdaptiveSupport} only. */
int ADAPTIVE_SUPPORT_MASK = 0b11000;
/**
* The {@link Renderer} can seamlessly adapt between formats.
*/
int ADAPTIVE_SEAMLESS = 0b1000;
int ADAPTIVE_SEAMLESS = 0b10000;
/**
* The {@link Renderer} can adapt between formats, but may suffer a brief discontinuity
* (~50-100ms) when adaptation occurs.
*/
int ADAPTIVE_NOT_SEAMLESS = 0b0100;
int ADAPTIVE_NOT_SEAMLESS = 0b01000;
/**
* The {@link Renderer} does not support adaptation between formats.
*/
int ADAPTIVE_NOT_SUPPORTED = 0b0000;
int ADAPTIVE_NOT_SUPPORTED = 0b00000;
/**
* A mask to apply to the result of {@link #supportsFormat(Format)} to obtain one of
* {@link #TUNNELING_SUPPORTED} and {@link #TUNNELING_NOT_SUPPORTED}.
* Level of renderer support for tunneling. One of {@link #TUNNELING_SUPPORTED} or {@link
* #TUNNELING_NOT_SUPPORTED}.
*/
int TUNNELING_SUPPORT_MASK = 0b10000;
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({TUNNELING_SUPPORTED, TUNNELING_NOT_SUPPORTED})
@interface TunnelingSupport {}
/** A mask to apply to {@link Capabilities} to obtain the {@link TunnelingSupport} only. */
int TUNNELING_SUPPORT_MASK = 0b100000;
/**
* The {@link Renderer} supports tunneled output.
*/
int TUNNELING_SUPPORTED = 0b10000;
int TUNNELING_SUPPORTED = 0b100000;
/**
* The {@link Renderer} does not support tunneled output.
*/
int TUNNELING_NOT_SUPPORTED = 0b00000;
int TUNNELING_NOT_SUPPORTED = 0b000000;
/**
* Combined renderer capabilities.
*
* <p>This is a bitwise OR of {@link FormatSupport}, {@link AdaptiveSupport} and {@link
* TunnelingSupport}. Use {@link #getFormatSupport(int)}, {@link #getAdaptiveSupport(int)} or
* {@link #getTunnelingSupport(int)} to obtain the individual flags. And use {@link #create(int)}
* or {@link #create(int, int, int)} to create the combined capabilities.
*
* <p>Possible values:
*
* <ul>
* <li>{@link FormatSupport}: The level of support for the format itself. One of {@link
* #FORMAT_HANDLED}, {@link #FORMAT_EXCEEDS_CAPABILITIES}, {@link #FORMAT_UNSUPPORTED_DRM},
* {@link #FORMAT_UNSUPPORTED_SUBTYPE} and {@link #FORMAT_UNSUPPORTED_TYPE}.
* <li>{@link AdaptiveSupport}: The level of support for adapting from the format to another
* format of the same mime type. One of {@link #ADAPTIVE_SEAMLESS}, {@link
* #ADAPTIVE_NOT_SEAMLESS} and {@link #ADAPTIVE_NOT_SUPPORTED}. Only set if the level of
* support for the format itself is {@link #FORMAT_HANDLED} or {@link
* #FORMAT_EXCEEDS_CAPABILITIES}.
* <li>{@link TunnelingSupport}: The level of support for tunneling. One of {@link
* #TUNNELING_SUPPORTED} and {@link #TUNNELING_NOT_SUPPORTED}. Only set if the level of
* support for the format itself is {@link #FORMAT_HANDLED} or {@link
* #FORMAT_EXCEEDS_CAPABILITIES}.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
// Intentionally empty to prevent assignment or comparison with individual flags without masking.
@IntDef({})
@interface Capabilities {}
/**
* Returns {@link Capabilities} for the given {@link FormatSupport}.
*
* <p>The {@link AdaptiveSupport} is set to {@link #ADAPTIVE_NOT_SUPPORTED} and {{@link
* TunnelingSupport} is set to {@link #TUNNELING_NOT_SUPPORTED}.
*
* @param formatSupport The {@link FormatSupport}.
* @return The combined {@link Capabilities} of the given {@link FormatSupport}, {@link
* #ADAPTIVE_NOT_SUPPORTED} and {@link #TUNNELING_NOT_SUPPORTED}.
*/
@Capabilities
static int create(@FormatSupport int formatSupport) {
return create(formatSupport, ADAPTIVE_NOT_SUPPORTED, TUNNELING_NOT_SUPPORTED);
}
/**
* Returns {@link Capabilities} combining the given {@link FormatSupport}, {@link AdaptiveSupport}
* and {@link TunnelingSupport}.
*
* @param formatSupport The {@link FormatSupport}.
* @param adaptiveSupport The {@link AdaptiveSupport}.
* @param tunnelingSupport The {@link TunnelingSupport}.
* @return The combined {@link Capabilities}.
*/
// Suppression needed for IntDef casting.
@SuppressLint("WrongConstant")
@Capabilities
static int create(
@FormatSupport int formatSupport,
@AdaptiveSupport int adaptiveSupport,
@TunnelingSupport int tunnelingSupport) {
return formatSupport | adaptiveSupport | tunnelingSupport;
}
/**
* Returns the {@link FormatSupport} from the combined {@link Capabilities}.
*
* @param supportFlags The combined {@link Capabilities}.
* @return The {@link FormatSupport} only.
*/
// Suppression needed for IntDef casting.
@SuppressLint("WrongConstant")
@FormatSupport
static int getFormatSupport(@Capabilities int supportFlags) {
return supportFlags & FORMAT_SUPPORT_MASK;
}
/**
* Returns the {@link AdaptiveSupport} from the combined {@link Capabilities}.
*
* @param supportFlags The combined {@link Capabilities}.
* @return The {@link AdaptiveSupport} only.
*/
// Suppression needed for IntDef casting.
@SuppressLint("WrongConstant")
@AdaptiveSupport
static int getAdaptiveSupport(@Capabilities int supportFlags) {
return supportFlags & ADAPTIVE_SUPPORT_MASK;
}
/**
* Returns the {@link TunnelingSupport} from the combined {@link Capabilities}.
*
* @param supportFlags The combined {@link Capabilities}.
* @return The {@link TunnelingSupport} only.
*/
// Suppression needed for IntDef casting.
@SuppressLint("WrongConstant")
@TunnelingSupport
static int getTunnelingSupport(@Capabilities int supportFlags) {
return supportFlags & TUNNELING_SUPPORT_MASK;
}
/**
* Returns string representation of a {@link FormatSupport} flag.
*
* @param formatSupport A {@link FormatSupport} flag.
* @return A string representation of the flag.
*/
static String getFormatSupportString(@FormatSupport int formatSupport) {
switch (formatSupport) {
case RendererCapabilities.FORMAT_HANDLED:
return "YES";
case RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES:
return "NO_EXCEEDS_CAPABILITIES";
case RendererCapabilities.FORMAT_UNSUPPORTED_DRM:
return "NO_UNSUPPORTED_DRM";
case RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE:
return "NO_UNSUPPORTED_TYPE";
case RendererCapabilities.FORMAT_UNSUPPORTED_TYPE:
return "NO";
default:
throw new IllegalStateException();
}
}
/**
* Returns the track type that the {@link Renderer} handles. For example, a video renderer will
@ -104,37 +271,23 @@ public interface RendererCapabilities {
int getTrackType();
/**
* Returns the extent to which the {@link Renderer} supports a given format. The returned value is
* the bitwise OR of three properties:
* <ul>
* <li>The level of support for the format itself. One of {@link #FORMAT_HANDLED},
* {@link #FORMAT_EXCEEDS_CAPABILITIES}, {@link #FORMAT_UNSUPPORTED_SUBTYPE} and
* {@link #FORMAT_UNSUPPORTED_TYPE}.</li>
* <li>The level of support for adapting from the format to another format of the same mime type.
* One of {@link #ADAPTIVE_SEAMLESS}, {@link #ADAPTIVE_NOT_SEAMLESS} and
* {@link #ADAPTIVE_NOT_SUPPORTED}.</li>
* <li>The level of support for tunneling. One of {@link #TUNNELING_SUPPORTED} and
* {@link #TUNNELING_NOT_SUPPORTED}.</li>
* </ul>
* The individual properties can be retrieved by performing a bitwise AND with
* {@link #FORMAT_SUPPORT_MASK}, {@link #ADAPTIVE_SUPPORT_MASK} and
* {@link #TUNNELING_SUPPORT_MASK} respectively.
* Returns the extent to which the {@link Renderer} supports a given format.
*
* @param format The format.
* @return The extent to which the renderer is capable of supporting the given format.
* @return The {@link Capabilities} for this format.
* @throws ExoPlaybackException If an error occurs.
*/
@Capabilities
int supportsFormat(Format format) throws ExoPlaybackException;
/**
* Returns the extent to which the {@link Renderer} supports adapting between supported formats
* that have different mime types.
* that have different MIME types.
*
* @return The extent to which the renderer supports adapting between supported formats that have
* different mime types. One of {@link #ADAPTIVE_SEAMLESS}, {@link #ADAPTIVE_NOT_SEAMLESS} and
* {@link #ADAPTIVE_NOT_SUPPORTED}.
* @return The {@link AdaptiveSupport} for adapting between supported formats that have different
* MIME types.
* @throws ExoPlaybackException If an error occurs.
*/
@AdaptiveSupport
int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException;
}

Просмотреть файл

@ -15,6 +15,8 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import androidx.annotation.Nullable;
/**
* The configuration of a {@link Renderer}.
*/
@ -41,7 +43,7 @@ public final class RendererConfiguration {
}
@Override
public boolean equals(Object obj) {
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}

Просмотреть файл

@ -16,9 +16,12 @@
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.os.Handler;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioRendererEventListener;
import org.mozilla.thirdparty.com.google.android.exoplayer2.metadata.MetadataRenderer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.text.TextRenderer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmSessionManager;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
import org.mozilla.thirdparty.com.google.android.exoplayer2.metadata.MetadataOutput;
import org.mozilla.thirdparty.com.google.android.exoplayer2.text.TextOutput;
import org.mozilla.thirdparty.com.google.android.exoplayer2.video.VideoRendererEventListener;
/**
@ -31,14 +34,17 @@ public interface RenderersFactory {
*
* @param eventHandler A handler to use when invoking event listeners and outputs.
* @param videoRendererEventListener An event listener for video renderers.
* @param videoRendererEventListener An event listener for audio renderers.
* @param audioRendererEventListener An event listener for audio renderers.
* @param textRendererOutput An output for text renderers.
* @param metadataRendererOutput An output for metadata renderers.
* @param drmSessionManager A drm session manager used by renderers.
* @return The {@link Renderer instances}.
*/
Renderer[] createRenderers(Handler eventHandler,
Renderer[] createRenderers(
Handler eventHandler,
VideoRendererEventListener videoRendererEventListener,
AudioRendererEventListener audioRendererEventListener,
TextRenderer.Output textRendererOutput, MetadataRenderer.Output metadataRendererOutput);
TextOutput textRendererOutput,
MetadataOutput metadataRendererOutput,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager);
}

Просмотреть файл

@ -0,0 +1,91 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
/**
* Parameters that apply to seeking.
*
* <p>The predefined {@link #EXACT}, {@link #CLOSEST_SYNC}, {@link #PREVIOUS_SYNC} and {@link
* #NEXT_SYNC} parameters are suitable for most use cases. Seeking to sync points is typically
* faster but less accurate than exact seeking.
*
* <p>In the general case, an instance specifies a maximum tolerance before ({@link
* #toleranceBeforeUs}) and after ({@link #toleranceAfterUs}) a requested seek position ({@code x}).
* If one or more sync points falls within the window {@code [x - toleranceBeforeUs, x +
* toleranceAfterUs]} then the seek will be performed to the sync point within the window that's
* closest to {@code x}. If no sync point falls within the window then the seek will be performed to
* {@code x - toleranceBeforeUs}. Internally the player may need to seek to an earlier sync point
* and discard media until this position is reached.
*/
public final class SeekParameters {
/** Parameters for exact seeking. */
public static final SeekParameters EXACT = new SeekParameters(0, 0);
/** Parameters for seeking to the closest sync point. */
public static final SeekParameters CLOSEST_SYNC =
new SeekParameters(Long.MAX_VALUE, Long.MAX_VALUE);
/** Parameters for seeking to the sync point immediately before a requested seek position. */
public static final SeekParameters PREVIOUS_SYNC = new SeekParameters(Long.MAX_VALUE, 0);
/** Parameters for seeking to the sync point immediately after a requested seek position. */
public static final SeekParameters NEXT_SYNC = new SeekParameters(0, Long.MAX_VALUE);
/** Default parameters. */
public static final SeekParameters DEFAULT = EXACT;
/**
* The maximum time that the actual position seeked to may precede the requested seek position, in
* microseconds.
*/
public final long toleranceBeforeUs;
/**
* The maximum time that the actual position seeked to may exceed the requested seek position, in
* microseconds.
*/
public final long toleranceAfterUs;
/**
* @param toleranceBeforeUs The maximum time that the actual position seeked to may precede the
* requested seek position, in microseconds. Must be non-negative.
* @param toleranceAfterUs The maximum time that the actual position seeked to may exceed the
* requested seek position, in microseconds. Must be non-negative.
*/
public SeekParameters(long toleranceBeforeUs, long toleranceAfterUs) {
Assertions.checkArgument(toleranceBeforeUs >= 0);
Assertions.checkArgument(toleranceAfterUs >= 0);
this.toleranceBeforeUs = toleranceBeforeUs;
this.toleranceAfterUs = toleranceAfterUs;
}
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
SeekParameters other = (SeekParameters) obj;
return toleranceBeforeUs == other.toleranceBeforeUs
&& toleranceAfterUs == other.toleranceAfterUs;
}
@Override
public int hashCode() {
return (31 * (int) toleranceBeforeUs) + (int) toleranceAfterUs;
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -15,220 +15,123 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.util.Pair;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.ads.AdPlaybackState;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
/**
* A representation of media currently available for playback.
* <p>
* Timeline instances are immutable. For cases where the available media is changing dynamically
* (e.g. live streams) a timeline provides a snapshot of the media currently available.
* <p>
* A timeline consists of related {@link Period}s and {@link Window}s. A period defines a single
* logical piece of media, for example a media file. A window spans one or more periods, defining
* the region within those periods that's currently available for playback along with additional
* information such as whether seeking is supported within the window. Each window defines a default
* position, which is the position from which playback will start when the player starts playing the
* window. The following examples illustrate timelines for various use cases.
* A flexible representation of the structure of media. A timeline is able to represent the
* structure of a wide variety of media, from simple cases like a single media file through to
* complex compositions of media such as playlists and streams with inserted ads. Instances are
* immutable. For cases where media is changing dynamically (e.g. live streams), a timeline provides
* a snapshot of the current state.
*
* <p>A timeline consists of {@link Window Windows} and {@link Period Periods}.
*
* <ul>
* <li>A {@link Window} usually corresponds to one playlist item. It may span one or more periods
* and it defines the region within those periods that's currently available for playback. The
* window also provides additional information such as whether seeking is supported within the
* window and the default position, which is the position from which playback will start when
* the player starts playing the window.
* <li>A {@link Period} defines a single logical piece of media, for example a media file. It may
* also define groups of ads inserted into the media, along with information about whether
* those ads have been loaded and played.
* </ul>
*
* <p>The following examples illustrate timelines for various use cases.
*
* <h3 id="single-file">Single media file or on-demand stream</h3>
* <p align="center">
* <img src="doc-files/timeline-single-file.svg" alt="Example timeline for a single file">
* </p>
* A timeline for a single media file or on-demand stream consists of a single period and window.
* The window spans the whole period, indicating that all parts of the media are available for
* playback. The window's default position is typically at the start of the period (indicated by the
* black dot in the figure above).
*
* <p style="align:center"><img src="doc-files/timeline-single-file.svg" alt="Example timeline for a
* single file"> A timeline for a single media file or on-demand stream consists of a single period
* and window. The window spans the whole period, indicating that all parts of the media are
* available for playback. The window's default position is typically at the start of the period
* (indicated by the black dot in the figure above).
*
* <h3>Playlist of media files or on-demand streams</h3>
* <p align="center">
* <img src="doc-files/timeline-playlist.svg" alt="Example timeline for a playlist of files">
* </p>
* A timeline for a playlist of media files or on-demand streams consists of multiple periods, each
* with its own window. Each window spans the whole of the corresponding period, and typically has a
* default position at the start of the period. The properties of the periods and windows (e.g.
* their durations and whether the window is seekable) will often only become known when the player
* starts buffering the corresponding file or stream.
*
* <p style="align:center"><img src="doc-files/timeline-playlist.svg" alt="Example timeline for a
* playlist of files"> A timeline for a playlist of media files or on-demand streams consists of
* multiple periods, each with its own window. Each window spans the whole of the corresponding
* period, and typically has a default position at the start of the period. The properties of the
* periods and windows (e.g. their durations and whether the window is seekable) will often only
* become known when the player starts buffering the corresponding file or stream.
*
* <h3 id="live-limited">Live stream with limited availability</h3>
* <p align="center">
* <img src="doc-files/timeline-live-limited.svg" alt="Example timeline for a live stream with
* limited availability">
* </p>
* A timeline for a live stream consists of a period whose duration is unknown, since it's
* continually extending as more content is broadcast. If content only remains available for a
* limited period of time then the window may start at a non-zero position, defining the region of
* content that can still be played. The window will have {@link Window#isDynamic} set to true if
* the stream is still live. Its default position is typically near to the live edge (indicated by
* the black dot in the figure above).
*
* <p style="align:center"><img src="doc-files/timeline-live-limited.svg" alt="Example timeline for
* a live stream with limited availability"> A timeline for a live stream consists of a period whose
* duration is unknown, since it's continually extending as more content is broadcast. If content
* only remains available for a limited period of time then the window may start at a non-zero
* position, defining the region of content that can still be played. The window will have {@link
* Window#isLive} set to true to indicate it's a live stream and {@link Window#isDynamic} set to
* true as long as we expect changes to the live window. Its default position is typically near to
* the live edge (indicated by the black dot in the figure above).
*
* <h3>Live stream with indefinite availability</h3>
* <p align="center">
* <img src="doc-files/timeline-live-indefinite.svg" alt="Example timeline for a live stream with
* indefinite availability">
* </p>
* A timeline for a live stream with indefinite availability is similar to the
* <a href="#live-limited">Live stream with limited availability</a> case, except that the window
* starts at the beginning of the period to indicate that all of the previously broadcast content
* can still be played.
*
* <p style="align:center"><img src="doc-files/timeline-live-indefinite.svg" alt="Example timeline
* for a live stream with indefinite availability"> A timeline for a live stream with indefinite
* availability is similar to the <a href="#live-limited">Live stream with limited availability</a>
* case, except that the window starts at the beginning of the period to indicate that all of the
* previously broadcast content can still be played.
*
* <h3 id="live-multi-period">Live stream with multiple periods</h3>
* <p align="center">
* <img src="doc-files/timeline-live-multi-period.svg" alt="Example timeline for a live stream
* with multiple periods">
* </p>
* This case arises when a live stream is explicitly divided into separate periods, for example at
* content and advert boundaries. This case is similar to the <a href="#live-limited">Live stream
* with limited availability</a> case, except that the window may span more than one period.
* Multiple periods are also possible in the indefinite availability case.
*
* <h3>On-demand pre-roll followed by live stream</h3>
* <p align="center">
* <img src="doc-files/timeline-advanced.svg" alt="Example timeline for an on-demand pre-roll
* followed by a live stream">
* </p>
* This case is the concatenation of the <a href="#single-file">Single media file or on-demand
* stream</a> and <a href="#multi-period">Live stream with multiple periods</a> cases. When playback
* of the pre-roll ends, playback of the live stream will start from its default position near the
* live edge.
* <p style="align:center"><img src="doc-files/timeline-live-multi-period.svg" alt="Example timeline
* for a live stream with multiple periods"> This case arises when a live stream is explicitly
* divided into separate periods, for example at content boundaries. This case is similar to the <a
* href="#live-limited">Live stream with limited availability</a> case, except that the window may
* span more than one period. Multiple periods are also possible in the indefinite availability
* case.
*
* <h3>On-demand stream followed by live stream</h3>
*
* <p style="align:center"><img src="doc-files/timeline-advanced.svg" alt="Example timeline for an
* on-demand stream followed by a live stream"> This case is the concatenation of the <a
* href="#single-file">Single media file or on-demand stream</a> and <a href="#multi-period">Live
* stream with multiple periods</a> cases. When playback of the on-demand stream ends, playback of
* the live stream will start from its default position near the live edge.
*
* <h3 id="single-file-midrolls">On-demand stream with mid-roll ads</h3>
*
* <p style="align:center"><img src="doc-files/timeline-single-file-midrolls.svg" alt="Example
* timeline for an on-demand stream with mid-roll ad groups"> This case includes mid-roll ad groups,
* which are defined as part of the timeline's single period. The period can be queried for
* information about the ad groups and the ads they contain.
*/
public abstract class Timeline {
/**
* An empty timeline.
*/
public static final Timeline EMPTY = new Timeline() {
@Override
public int getWindowCount() {
return 0;
}
@Override
public Window getWindow(int windowIndex, Window window, boolean setIds,
long defaultPositionProjectionUs) {
throw new IndexOutOfBoundsException();
}
@Override
public int getPeriodCount() {
return 0;
}
@Override
public Period getPeriod(int periodIndex, Period period, boolean setIds) {
throw new IndexOutOfBoundsException();
}
@Override
public int getIndexOfPeriod(Object uid) {
return C.INDEX_UNSET;
}
};
/**
* Returns whether the timeline is empty.
*/
public final boolean isEmpty() {
return getWindowCount() == 0;
}
/**
* Returns the number of windows in the timeline.
*/
public abstract int getWindowCount();
/**
* Populates a {@link Window} with data for the window at the specified index. Does not populate
* {@link Window#id}.
* Holds information about a window in a {@link Timeline}. A window usually corresponds to one
* playlist item and defines a region of media currently available for playback along with
* additional information such as whether seeking is supported within the window. The figure below
* shows some of the information defined by a window, as well as how this information relates to
* corresponding {@link Period Periods} in the timeline.
*
* @param windowIndex The index of the window.
* @param window The {@link Window} to populate. Must not be null.
* @return The populated {@link Window}, for convenience.
*/
public final Window getWindow(int windowIndex, Window window) {
return getWindow(windowIndex, window, false);
}
/**
* Populates a {@link Window} with data for the window at the specified index.
*
* @param windowIndex The index of the window.
* @param window The {@link Window} to populate. Must not be null.
* @param setIds Whether {@link Window#id} should be populated. If false, the field will be set to
* null. The caller should pass false for efficiency reasons unless the field is required.
* @return The populated {@link Window}, for convenience.
*/
public Window getWindow(int windowIndex, Window window, boolean setIds) {
return getWindow(windowIndex, window, setIds, 0);
}
/**
* Populates a {@link Window} with data for the window at the specified index.
*
* @param windowIndex The index of the window.
* @param window The {@link Window} to populate. Must not be null.
* @param setIds Whether {@link Window#id} should be populated. If false, the field will be set to
* null. The caller should pass false for efficiency reasons unless the field is required.
* @param defaultPositionProjectionUs A duration into the future that the populated window's
* default start position should be projected.
* @return The populated {@link Window}, for convenience.
*/
public abstract Window getWindow(int windowIndex, Window window, boolean setIds,
long defaultPositionProjectionUs);
/**
* Returns the number of periods in the timeline.
*/
public abstract int getPeriodCount();
/**
* Populates a {@link Period} with data for the period at the specified index. Does not populate
* {@link Period#id} and {@link Period#uid}.
*
* @param periodIndex The index of the period.
* @param period The {@link Period} to populate. Must not be null.
* @return The populated {@link Period}, for convenience.
*/
public final Period getPeriod(int periodIndex, Period period) {
return getPeriod(periodIndex, period, false);
}
/**
* Populates a {@link Period} with data for the period at the specified index.
*
* @param periodIndex The index of the period.
* @param period The {@link Period} to populate. Must not be null.
* @param setIds Whether {@link Period#id} and {@link Period#uid} should be populated. If false,
* the fields will be set to null. The caller should pass false for efficiency reasons unless
* the fields are required.
* @return The populated {@link Period}, for convenience.
*/
public abstract Period getPeriod(int periodIndex, Period period, boolean setIds);
/**
* Returns the index of the period identified by its unique {@code id}, or {@link C#INDEX_UNSET}
* if the period is not in the timeline.
*
* @param uid A unique identifier for a period.
* @return The index of the period, or {@link C#INDEX_UNSET} if the period was not found.
*/
public abstract int getIndexOfPeriod(Object uid);
/**
* Holds information about a window in a {@link Timeline}. A window defines a region of media
* currently available for playback along with additional information such as whether seeking is
* supported within the window. See {@link Timeline} for more details. The figure below shows some
* of the information defined by a window, as well as how this information relates to
* corresponding {@link Period}s in the timeline.
* <p align="center">
* <img src="doc-files/timeline-window.svg" alt="Information defined by a timeline window">
* </p>
* <p style="align:center"><img src="doc-files/timeline-window.svg" alt="Information defined by a
* timeline window">
*/
public static final class Window {
/**
* An identifier for the window. Not necessarily unique.
* A {@link #uid} for a window that must be used for single-window {@link Timeline Timelines}.
*/
public Object id;
public static final Object SINGLE_WINDOW_UID = new Object();
/**
* A unique identifier for the window. Single-window {@link Timeline Timelines} must use {@link
* #SINGLE_WINDOW_UID}.
*/
public Object uid;
/** A tag for the window. Not necessarily unique. */
@Nullable public Object tag;
/** The manifest of the window. May be {@code null}. */
@Nullable public Object manifest;
/**
* The start time of the presentation to which this window belongs in milliseconds since the
@ -247,14 +150,22 @@ public abstract class Timeline {
*/
public boolean isSeekable;
/**
* Whether this window may change when the timeline is updated.
*/
// TODO: Split this to better describe which parts of the window might change. For example it
// should be possible to individually determine whether the start and end positions of the
// window may change relative to the underlying periods. For an example of where it's useful to
// know that the end position is fixed whilst the start position may still change, see:
// https://github.com/google/ExoPlayer/issues/4780.
/** Whether this window may change when the timeline is updated. */
public boolean isDynamic;
/**
* The index of the first period that belongs to this window.
* Whether the media in this window is live. For informational purposes only.
*
* <p>Check {@link #isDynamic} to know whether this window may still change.
*/
public boolean isLive;
/** The index of the first period that belongs to this window. */
public int firstPeriodIndex;
/**
@ -281,17 +192,34 @@ public abstract class Timeline {
*/
public long positionInFirstPeriodUs;
/**
* Sets the data held by this window.
*/
public Window set(Object id, long presentationStartTimeMs, long windowStartTimeMs,
boolean isSeekable, boolean isDynamic, long defaultPositionUs, long durationUs,
int firstPeriodIndex, int lastPeriodIndex, long positionInFirstPeriodUs) {
this.id = id;
/** Creates window. */
public Window() {
uid = SINGLE_WINDOW_UID;
}
/** Sets the data held by this window. */
public Window set(
Object uid,
@Nullable Object tag,
@Nullable Object manifest,
long presentationStartTimeMs,
long windowStartTimeMs,
boolean isSeekable,
boolean isDynamic,
boolean isLive,
long defaultPositionUs,
long durationUs,
int firstPeriodIndex,
int lastPeriodIndex,
long positionInFirstPeriodUs) {
this.uid = uid;
this.tag = tag;
this.manifest = manifest;
this.presentationStartTimeMs = presentationStartTimeMs;
this.windowStartTimeMs = windowStartTimeMs;
this.isSeekable = isSeekable;
this.isDynamic = isDynamic;
this.isLive = isLive;
this.defaultPositionUs = defaultPositionUs;
this.durationUs = durationUs;
this.firstPeriodIndex = firstPeriodIndex;
@ -354,24 +282,27 @@ public abstract class Timeline {
/**
* Holds information about a period in a {@link Timeline}. A period defines a single logical piece
* of media, for example a a media file. See {@link Timeline} for more details. The figure below
* shows some of the information defined by a period, as well as how this information relates to a
* corresponding {@link Window} in the timeline.
* <p align="center">
* <img src="doc-files/timeline-period.svg" alt="Information defined by a period">
* </p>
* of media, for example a media file. It may also define groups of ads inserted into the media,
* along with information about whether those ads have been loaded and played.
*
* <p>The figure below shows some of the information defined by a period, as well as how this
* information relates to a corresponding {@link Window} in the timeline.
*
* <p style="align:center"><img src="doc-files/timeline-period.svg" alt="Information defined by a
* period">
*/
public static final class Period {
/**
* An identifier for the period. Not necessarily unique.
* An identifier for the period. Not necessarily unique. May be null if the ids of the period
* are not required.
*/
public Object id;
@Nullable public Object id;
/**
* A unique identifier for the period.
* A unique identifier for the period. May be null if the ids of the period are not required.
*/
public Object uid;
@Nullable public Object uid;
/**
* The index of the window to which this period belongs.
@ -383,24 +314,68 @@ public abstract class Timeline {
*/
public long durationUs;
/**
* Whether this period contains an ad.
*/
public boolean isAd;
private long positionInWindowUs;
private AdPlaybackState adPlaybackState;
/** Creates a new instance with no ad playback state. */
public Period() {
adPlaybackState = AdPlaybackState.NONE;
}
/**
* Sets the data held by this period.
*
* @param id An identifier for the period. Not necessarily unique. May be null if the ids of the
* period are not required.
* @param uid A unique identifier for the period. May be null if the ids of the period are not
* required.
* @param windowIndex The index of the window to which this period belongs.
* @param durationUs The duration of this period in microseconds, or {@link C#TIME_UNSET} if
* unknown.
* @param positionInWindowUs The position of the start of this period relative to the start of
* the window to which it belongs, in milliseconds. May be negative if the start of the
* period is not within the window.
* @return This period, for convenience.
*/
public Period set(Object id, Object uid, int windowIndex, long durationUs,
long positionInWindowUs, boolean isAd) {
public Period set(
@Nullable Object id,
@Nullable Object uid,
int windowIndex,
long durationUs,
long positionInWindowUs) {
return set(id, uid, windowIndex, durationUs, positionInWindowUs, AdPlaybackState.NONE);
}
/**
* Sets the data held by this period.
*
* @param id An identifier for the period. Not necessarily unique. May be null if the ids of the
* period are not required.
* @param uid A unique identifier for the period. May be null if the ids of the period are not
* required.
* @param windowIndex The index of the window to which this period belongs.
* @param durationUs The duration of this period in microseconds, or {@link C#TIME_UNSET} if
* unknown.
* @param positionInWindowUs The position of the start of this period relative to the start of
* the window to which it belongs, in milliseconds. May be negative if the start of the
* period is not within the window.
* @param adPlaybackState The state of the period's ads, or {@link AdPlaybackState#NONE} if
* there are no ads.
* @return This period, for convenience.
*/
public Period set(
@Nullable Object id,
@Nullable Object uid,
int windowIndex,
long durationUs,
long positionInWindowUs,
AdPlaybackState adPlaybackState) {
this.id = id;
this.uid = uid;
this.windowIndex = windowIndex;
this.durationUs = durationUs;
this.positionInWindowUs = positionInWindowUs;
this.isAd = isAd;
this.adPlaybackState = adPlaybackState;
return this;
}
@ -436,6 +411,427 @@ public abstract class Timeline {
return positionInWindowUs;
}
/**
* Returns the number of ad groups in the period.
*/
public int getAdGroupCount() {
return adPlaybackState.adGroupCount;
}
/**
* Returns the time of the ad group at index {@code adGroupIndex} in the period, in
* microseconds.
*
* @param adGroupIndex The ad group index.
* @return The time of the ad group at the index, in microseconds, or {@link
* C#TIME_END_OF_SOURCE} for a post-roll ad group.
*/
public long getAdGroupTimeUs(int adGroupIndex) {
return adPlaybackState.adGroupTimesUs[adGroupIndex];
}
/**
* Returns the index of the first ad in the specified ad group that should be played, or the
* number of ads in the ad group if no ads should be played.
*
* @param adGroupIndex The ad group index.
* @return The index of the first ad that should be played, or the number of ads in the ad group
* if no ads should be played.
*/
public int getFirstAdIndexToPlay(int adGroupIndex) {
return adPlaybackState.adGroups[adGroupIndex].getFirstAdIndexToPlay();
}
/**
* Returns the index of the next ad in the specified ad group that should be played after
* playing {@code adIndexInAdGroup}, or the number of ads in the ad group if no later ads should
* be played.
*
* @param adGroupIndex The ad group index.
* @param lastPlayedAdIndex The last played ad index in the ad group.
* @return The index of the next ad that should be played, or the number of ads in the ad group
* if the ad group does not have any ads remaining to play.
*/
public int getNextAdIndexToPlay(int adGroupIndex, int lastPlayedAdIndex) {
return adPlaybackState.adGroups[adGroupIndex].getNextAdIndexToPlay(lastPlayedAdIndex);
}
/**
* Returns whether the ad group at index {@code adGroupIndex} has been played.
*
* @param adGroupIndex The ad group index.
* @return Whether the ad group at index {@code adGroupIndex} has been played.
*/
public boolean hasPlayedAdGroup(int adGroupIndex) {
return !adPlaybackState.adGroups[adGroupIndex].hasUnplayedAds();
}
/**
* Returns the index of the ad group at or before {@code positionUs}, if that ad group is
* unplayed. Returns {@link C#INDEX_UNSET} if the ad group at or before {@code positionUs} has
* no ads remaining to be played, or if there is no such ad group.
*
* @param positionUs The position at or before which to find an ad group, in microseconds.
* @return The index of the ad group, or {@link C#INDEX_UNSET}.
*/
public int getAdGroupIndexForPositionUs(long positionUs) {
return adPlaybackState.getAdGroupIndexForPositionUs(positionUs);
}
/**
* Returns the index of the next ad group after {@code positionUs} that has ads remaining to be
* played. Returns {@link C#INDEX_UNSET} if there is no such ad group.
*
* @param positionUs The position after which to find an ad group, in microseconds.
* @return The index of the ad group, or {@link C#INDEX_UNSET}.
*/
public int getAdGroupIndexAfterPositionUs(long positionUs) {
return adPlaybackState.getAdGroupIndexAfterPositionUs(positionUs, durationUs);
}
/**
* Returns the number of ads in the ad group at index {@code adGroupIndex}, or
* {@link C#LENGTH_UNSET} if not yet known.
*
* @param adGroupIndex The ad group index.
* @return The number of ads in the ad group, or {@link C#LENGTH_UNSET} if not yet known.
*/
public int getAdCountInAdGroup(int adGroupIndex) {
return adPlaybackState.adGroups[adGroupIndex].count;
}
/**
* Returns whether the URL for the specified ad is known.
*
* @param adGroupIndex The ad group index.
* @param adIndexInAdGroup The ad index in the ad group.
* @return Whether the URL for the specified ad is known.
*/
public boolean isAdAvailable(int adGroupIndex, int adIndexInAdGroup) {
AdPlaybackState.AdGroup adGroup = adPlaybackState.adGroups[adGroupIndex];
return adGroup.count != C.LENGTH_UNSET
&& adGroup.states[adIndexInAdGroup] != AdPlaybackState.AD_STATE_UNAVAILABLE;
}
/**
* Returns the duration of the ad at index {@code adIndexInAdGroup} in the ad group at
* {@code adGroupIndex}, in microseconds, or {@link C#TIME_UNSET} if not yet known.
*
* @param adGroupIndex The ad group index.
* @param adIndexInAdGroup The ad index in the ad group.
* @return The duration of the ad, or {@link C#TIME_UNSET} if not yet known.
*/
public long getAdDurationUs(int adGroupIndex, int adIndexInAdGroup) {
AdPlaybackState.AdGroup adGroup = adPlaybackState.adGroups[adGroupIndex];
return adGroup.count != C.LENGTH_UNSET ? adGroup.durationsUs[adIndexInAdGroup] : C.TIME_UNSET;
}
/**
* Returns the position offset in the first unplayed ad at which to begin playback, in
* microseconds.
*/
public long getAdResumePositionUs() {
return adPlaybackState.adResumePositionUs;
}
}
/** An empty timeline. */
public static final Timeline EMPTY =
new Timeline() {
@Override
public int getWindowCount() {
return 0;
}
@Override
public Window getWindow(int windowIndex, Window window, long defaultPositionProjectionUs) {
throw new IndexOutOfBoundsException();
}
@Override
public int getPeriodCount() {
return 0;
}
@Override
public Period getPeriod(int periodIndex, Period period, boolean setIds) {
throw new IndexOutOfBoundsException();
}
@Override
public int getIndexOfPeriod(Object uid) {
return C.INDEX_UNSET;
}
@Override
public Object getUidOfPeriod(int periodIndex) {
throw new IndexOutOfBoundsException();
}
};
/**
* Returns whether the timeline is empty.
*/
public final boolean isEmpty() {
return getWindowCount() == 0;
}
/**
* Returns the number of windows in the timeline.
*/
public abstract int getWindowCount();
/**
* Returns the index of the window after the window at index {@code windowIndex} depending on the
* {@code repeatMode} and whether shuffling is enabled.
*
* @param windowIndex Index of a window in the timeline.
* @param repeatMode A repeat mode.
* @param shuffleModeEnabled Whether shuffling is enabled.
* @return The index of the next window, or {@link C#INDEX_UNSET} if this is the last window.
*/
public int getNextWindowIndex(int windowIndex, @Player.RepeatMode int repeatMode,
boolean shuffleModeEnabled) {
switch (repeatMode) {
case Player.REPEAT_MODE_OFF:
return windowIndex == getLastWindowIndex(shuffleModeEnabled) ? C.INDEX_UNSET
: windowIndex + 1;
case Player.REPEAT_MODE_ONE:
return windowIndex;
case Player.REPEAT_MODE_ALL:
return windowIndex == getLastWindowIndex(shuffleModeEnabled)
? getFirstWindowIndex(shuffleModeEnabled) : windowIndex + 1;
default:
throw new IllegalStateException();
}
}
/**
* Returns the index of the window before the window at index {@code windowIndex} depending on the
* {@code repeatMode} and whether shuffling is enabled.
*
* @param windowIndex Index of a window in the timeline.
* @param repeatMode A repeat mode.
* @param shuffleModeEnabled Whether shuffling is enabled.
* @return The index of the previous window, or {@link C#INDEX_UNSET} if this is the first window.
*/
public int getPreviousWindowIndex(int windowIndex, @Player.RepeatMode int repeatMode,
boolean shuffleModeEnabled) {
switch (repeatMode) {
case Player.REPEAT_MODE_OFF:
return windowIndex == getFirstWindowIndex(shuffleModeEnabled) ? C.INDEX_UNSET
: windowIndex - 1;
case Player.REPEAT_MODE_ONE:
return windowIndex;
case Player.REPEAT_MODE_ALL:
return windowIndex == getFirstWindowIndex(shuffleModeEnabled)
? getLastWindowIndex(shuffleModeEnabled) : windowIndex - 1;
default:
throw new IllegalStateException();
}
}
/**
* Returns the index of the last window in the playback order depending on whether shuffling is
* enabled.
*
* @param shuffleModeEnabled Whether shuffling is enabled.
* @return The index of the last window in the playback order, or {@link C#INDEX_UNSET} if the
* timeline is empty.
*/
public int getLastWindowIndex(boolean shuffleModeEnabled) {
return isEmpty() ? C.INDEX_UNSET : getWindowCount() - 1;
}
/**
* Returns the index of the first window in the playback order depending on whether shuffling is
* enabled.
*
* @param shuffleModeEnabled Whether shuffling is enabled.
* @return The index of the first window in the playback order, or {@link C#INDEX_UNSET} if the
* timeline is empty.
*/
public int getFirstWindowIndex(boolean shuffleModeEnabled) {
return isEmpty() ? C.INDEX_UNSET : 0;
}
/**
* Populates a {@link Window} with data for the window at the specified index.
*
* @param windowIndex The index of the window.
* @param window The {@link Window} to populate. Must not be null.
* @return The populated {@link Window}, for convenience.
*/
public final Window getWindow(int windowIndex, Window window) {
return getWindow(windowIndex, window, /* defaultPositionProjectionUs= */ 0);
}
/** @deprecated Use {@link #getWindow(int, Window)} instead. Tags will always be set. */
@Deprecated
public final Window getWindow(int windowIndex, Window window, boolean setTag) {
return getWindow(windowIndex, window, /* defaultPositionProjectionUs= */ 0);
}
/**
* Populates a {@link Window} with data for the window at the specified index.
*
* @param windowIndex The index of the window.
* @param window The {@link Window} to populate. Must not be null.
* @param defaultPositionProjectionUs A duration into the future that the populated window's
* default start position should be projected.
* @return The populated {@link Window}, for convenience.
*/
public abstract Window getWindow(
int windowIndex, Window window, long defaultPositionProjectionUs);
/**
* Returns the number of periods in the timeline.
*/
public abstract int getPeriodCount();
/**
* Returns the index of the period after the period at index {@code periodIndex} depending on the
* {@code repeatMode} and whether shuffling is enabled.
*
* @param periodIndex Index of a period in the timeline.
* @param period A {@link Period} to be used internally. Must not be null.
* @param window A {@link Window} to be used internally. Must not be null.
* @param repeatMode A repeat mode.
* @param shuffleModeEnabled Whether shuffling is enabled.
* @return The index of the next period, or {@link C#INDEX_UNSET} if this is the last period.
*/
public final int getNextPeriodIndex(int periodIndex, Period period, Window window,
@Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) {
int windowIndex = getPeriod(periodIndex, period).windowIndex;
if (getWindow(windowIndex, window).lastPeriodIndex == periodIndex) {
int nextWindowIndex = getNextWindowIndex(windowIndex, repeatMode, shuffleModeEnabled);
if (nextWindowIndex == C.INDEX_UNSET) {
return C.INDEX_UNSET;
}
return getWindow(nextWindowIndex, window).firstPeriodIndex;
}
return periodIndex + 1;
}
/**
* Returns whether the given period is the last period of the timeline depending on the
* {@code repeatMode} and whether shuffling is enabled.
*
* @param periodIndex A period index.
* @param period A {@link Period} to be used internally. Must not be null.
* @param window A {@link Window} to be used internally. Must not be null.
* @param repeatMode A repeat mode.
* @param shuffleModeEnabled Whether shuffling is enabled.
* @return Whether the period of the given index is the last period of the timeline.
*/
public final boolean isLastPeriod(int periodIndex, Period period, Window window,
@Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) {
return getNextPeriodIndex(periodIndex, period, window, repeatMode, shuffleModeEnabled)
== C.INDEX_UNSET;
}
/**
* Calls {@link #getPeriodPosition(Window, Period, int, long, long)} with a zero default position
* projection.
*/
public final Pair<Object, Long> getPeriodPosition(
Window window, Period period, int windowIndex, long windowPositionUs) {
return Assertions.checkNotNull(
getPeriodPosition(
window, period, windowIndex, windowPositionUs, /* defaultPositionProjectionUs= */ 0));
}
/**
* Converts (windowIndex, windowPositionUs) to the corresponding (periodUid, periodPositionUs).
*
* @param window A {@link Window} that may be overwritten.
* @param period A {@link Period} that may be overwritten.
* @param windowIndex The window index.
* @param windowPositionUs The window time, or {@link C#TIME_UNSET} to use the window's default
* start position.
* @param defaultPositionProjectionUs If {@code windowPositionUs} is {@link C#TIME_UNSET}, the
* duration into the future by which the window's position should be projected.
* @return The corresponding (periodUid, periodPositionUs), or null if {@code #windowPositionUs}
* is {@link C#TIME_UNSET}, {@code defaultPositionProjectionUs} is non-zero, and the window's
* position could not be projected by {@code defaultPositionProjectionUs}.
*/
@Nullable
public final Pair<Object, Long> getPeriodPosition(
Window window,
Period period,
int windowIndex,
long windowPositionUs,
long defaultPositionProjectionUs) {
Assertions.checkIndex(windowIndex, 0, getWindowCount());
getWindow(windowIndex, window, defaultPositionProjectionUs);
if (windowPositionUs == C.TIME_UNSET) {
windowPositionUs = window.getDefaultPositionUs();
if (windowPositionUs == C.TIME_UNSET) {
return null;
}
}
int periodIndex = window.firstPeriodIndex;
long periodPositionUs = window.getPositionInFirstPeriodUs() + windowPositionUs;
long periodDurationUs = getPeriod(periodIndex, period, /* setIds= */ true).getDurationUs();
while (periodDurationUs != C.TIME_UNSET && periodPositionUs >= periodDurationUs
&& periodIndex < window.lastPeriodIndex) {
periodPositionUs -= periodDurationUs;
periodDurationUs = getPeriod(++periodIndex, period, /* setIds= */ true).getDurationUs();
}
return Pair.create(Assertions.checkNotNull(period.uid), periodPositionUs);
}
/**
* Populates a {@link Period} with data for the period with the specified unique identifier.
*
* @param periodUid The unique identifier of the period.
* @param period The {@link Period} to populate. Must not be null.
* @return The populated {@link Period}, for convenience.
*/
public Period getPeriodByUid(Object periodUid, Period period) {
return getPeriod(getIndexOfPeriod(periodUid), period, /* setIds= */ true);
}
/**
* Populates a {@link Period} with data for the period at the specified index. {@link Period#id}
* and {@link Period#uid} will be set to null.
*
* @param periodIndex The index of the period.
* @param period The {@link Period} to populate. Must not be null.
* @return The populated {@link Period}, for convenience.
*/
public final Period getPeriod(int periodIndex, Period period) {
return getPeriod(periodIndex, period, false);
}
/**
* Populates a {@link Period} with data for the period at the specified index.
*
* @param periodIndex The index of the period.
* @param period The {@link Period} to populate. Must not be null.
* @param setIds Whether {@link Period#id} and {@link Period#uid} should be populated. If false,
* the fields will be set to null. The caller should pass false for efficiency reasons unless
* the fields are required.
* @return The populated {@link Period}, for convenience.
*/
public abstract Period getPeriod(int periodIndex, Period period, boolean setIds);
/**
* Returns the index of the period identified by its unique {@link Period#uid}, or {@link
* C#INDEX_UNSET} if the period is not in the timeline.
*
* @param uid A unique identifier for a period.
* @return The index of the period, or {@link C#INDEX_UNSET} if the period was not found.
*/
public abstract int getIndexOfPeriod(Object uid);
/**
* Returns the unique id of the period identified by its index in the timeline.
*
* @param periodIndex The index of the period.
* @return The unique id of the period.
*/
public abstract Object getUidOfPeriod(int periodIndex);
}

Просмотреть файл

@ -0,0 +1,101 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.annotation.SuppressLint;
import android.content.Context;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Log;
/**
* Handles a {@link WakeLock}.
*
* <p>The handling of wake locks requires the {@link android.Manifest.permission#WAKE_LOCK}
* permission.
*/
/* package */ final class WakeLockManager {
private static final String TAG = "WakeLockManager";
private static final String WAKE_LOCK_TAG = "ExoPlayer:WakeLockManager";
@Nullable private final PowerManager powerManager;
@Nullable private WakeLock wakeLock;
private boolean enabled;
private boolean stayAwake;
public WakeLockManager(Context context) {
powerManager =
(PowerManager) context.getApplicationContext().getSystemService(Context.POWER_SERVICE);
}
/**
* Sets whether to enable the acquiring and releasing of the {@link WakeLock}.
*
* <p>By default, wake lock handling is not enabled. Enabling this will acquire the wake lock if
* necessary. Disabling this will release the wake lock if it is held.
*
* <p>Enabling {@link WakeLock} requires the {@link android.Manifest.permission#WAKE_LOCK}.
*
* @param enabled True if the player should handle a {@link WakeLock}, false otherwise.
*/
public void setEnabled(boolean enabled) {
if (enabled) {
if (wakeLock == null) {
if (powerManager == null) {
Log.w(TAG, "PowerManager is null, therefore not creating the WakeLock.");
return;
}
wakeLock = powerManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, WAKE_LOCK_TAG);
wakeLock.setReferenceCounted(false);
}
}
this.enabled = enabled;
updateWakeLock();
}
/**
* Sets whether to acquire or release the {@link WakeLock}.
*
* <p>Please note this method requires wake lock handling to be enabled through setEnabled(boolean
* enable) to actually have an impact on the {@link WakeLock}.
*
* @param stayAwake True if the player should acquire the {@link WakeLock}. False if the player
* should release.
*/
public void setStayAwake(boolean stayAwake) {
this.stayAwake = stayAwake;
updateWakeLock();
}
// WakelockTimeout suppressed because the time the wake lock is needed for is unknown (could be
// listening to radio with screen off for multiple hours), therefore we can not determine a
// reasonable timeout that would not affect the user.
@SuppressLint("WakelockTimeout")
private void updateWakeLock() {
if (wakeLock == null) {
return;
}
if (enabled && stayAwake) {
wakeLock.acquire();
} else {
wakeLock.release();
}
}
}

Просмотреть файл

@ -0,0 +1,94 @@
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2;
import android.content.Context;
import android.net.wifi.WifiManager;
import android.net.wifi.WifiManager.WifiLock;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Log;
/**
* Handles a {@link WifiLock}
*
* <p>The handling of wifi locks requires the {@link android.Manifest.permission#WAKE_LOCK}
* permission.
*/
/* package */ final class WifiLockManager {
private static final String TAG = "WifiLockManager";
private static final String WIFI_LOCK_TAG = "ExoPlayer:WifiLockManager";
@Nullable private final WifiManager wifiManager;
@Nullable private WifiLock wifiLock;
private boolean enabled;
private boolean stayAwake;
public WifiLockManager(Context context) {
wifiManager =
(WifiManager) context.getApplicationContext().getSystemService(Context.WIFI_SERVICE);
}
/**
* Sets whether to enable the usage of a {@link WifiLock}.
*
* <p>By default, wifi lock handling is not enabled. Enabling will acquire the wifi lock if
* necessary. Disabling will release the wifi lock if held.
*
* <p>Enabling {@link WifiLock} requires the {@link android.Manifest.permission#WAKE_LOCK}.
*
* @param enabled True if the player should handle a {@link WifiLock}.
*/
public void setEnabled(boolean enabled) {
if (enabled && wifiLock == null) {
if (wifiManager == null) {
Log.w(TAG, "WifiManager is null, therefore not creating the WifiLock.");
return;
}
wifiLock = wifiManager.createWifiLock(WifiManager.WIFI_MODE_FULL_HIGH_PERF, WIFI_LOCK_TAG);
wifiLock.setReferenceCounted(false);
}
this.enabled = enabled;
updateWifiLock();
}
/**
* Sets whether to acquire or release the {@link WifiLock}.
*
* <p>The wifi lock will not be acquired unless handling has been enabled through {@link
* #setEnabled(boolean)}.
*
* @param stayAwake True if the player should acquire the {@link WifiLock}. False if it should
* release.
*/
public void setStayAwake(boolean stayAwake) {
this.stayAwake = stayAwake;
updateWifiLock();
}
private void updateWifiLock() {
if (wifiLock == null) {
return;
}
if (enabled && stayAwake) {
wifiLock.acquire();
} else {
wifiLock.release();
}
}
}

Просмотреть файл

@ -0,0 +1,881 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.analytics;
import android.view.Surface;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.ExoPlaybackException;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.PlaybackParameters;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Player;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Player.PlaybackSuppressionReason;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Timeline;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Timeline.Period;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Timeline.Window;
import org.mozilla.thirdparty.com.google.android.exoplayer2.analytics.AnalyticsListener.EventTime;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioAttributes;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioListener;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioRendererEventListener;
import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.DecoderCounters;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DefaultDrmSessionEventListener;
import org.mozilla.thirdparty.com.google.android.exoplayer2.metadata.Metadata;
import org.mozilla.thirdparty.com.google.android.exoplayer2.metadata.MetadataOutput;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSourceEventListener;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.TrackGroupArray;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import org.mozilla.thirdparty.com.google.android.exoplayer2.upstream.BandwidthMeter;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Clock;
import org.mozilla.thirdparty.com.google.android.exoplayer2.video.VideoListener;
import org.mozilla.thirdparty.com.google.android.exoplayer2.video.VideoRendererEventListener;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/**
* Data collector which is able to forward analytics events to {@link AnalyticsListener}s by
* listening to all available ExoPlayer listeners.
*/
public class AnalyticsCollector
implements Player.EventListener,
MetadataOutput,
AudioRendererEventListener,
VideoRendererEventListener,
MediaSourceEventListener,
BandwidthMeter.EventListener,
DefaultDrmSessionEventListener,
VideoListener,
AudioListener {
private final CopyOnWriteArraySet<AnalyticsListener> listeners;
private final Clock clock;
private final Window window;
private final MediaPeriodQueueTracker mediaPeriodQueueTracker;
private @MonotonicNonNull Player player;
/**
* Creates an analytics collector.
*
* @param clock A {@link Clock} used to generate timestamps.
*/
public AnalyticsCollector(Clock clock) {
this.clock = Assertions.checkNotNull(clock);
listeners = new CopyOnWriteArraySet<>();
mediaPeriodQueueTracker = new MediaPeriodQueueTracker();
window = new Window();
}
/**
* Adds a listener for analytics events.
*
* @param listener The listener to add.
*/
public void addListener(AnalyticsListener listener) {
listeners.add(listener);
}
/**
* Removes a previously added analytics event listener.
*
* @param listener The listener to remove.
*/
public void removeListener(AnalyticsListener listener) {
listeners.remove(listener);
}
/**
* Sets the player for which data will be collected. Must only be called if no player has been set
* yet or the current player is idle.
*
* @param player The {@link Player} for which data will be collected.
*/
public void setPlayer(Player player) {
Assertions.checkState(
this.player == null || mediaPeriodQueueTracker.mediaPeriodInfoQueue.isEmpty());
this.player = Assertions.checkNotNull(player);
}
// External events.
/**
* Notify analytics collector that a seek operation will start. Should be called before the player
* adjusts its state and position to the seek.
*/
public final void notifySeekStarted() {
if (!mediaPeriodQueueTracker.isSeeking()) {
EventTime eventTime = generatePlayingMediaPeriodEventTime();
mediaPeriodQueueTracker.onSeekStarted();
for (AnalyticsListener listener : listeners) {
listener.onSeekStarted(eventTime);
}
}
}
/**
* Resets the analytics collector for a new media source. Should be called before the player is
* prepared with a new media source.
*/
public final void resetForNewMediaSource() {
// Copying the list is needed because onMediaPeriodReleased will modify the list.
List<MediaPeriodInfo> mediaPeriodInfos =
new ArrayList<>(mediaPeriodQueueTracker.mediaPeriodInfoQueue);
for (MediaPeriodInfo mediaPeriodInfo : mediaPeriodInfos) {
onMediaPeriodReleased(mediaPeriodInfo.windowIndex, mediaPeriodInfo.mediaPeriodId);
}
}
// MetadataOutput implementation.
@Override
public final void onMetadata(Metadata metadata) {
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onMetadata(eventTime, metadata);
}
}
// AudioRendererEventListener implementation.
@Override
public final void onAudioEnabled(DecoderCounters counters) {
// The renderers are only enabled after we changed the playing media period.
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDecoderEnabled(eventTime, C.TRACK_TYPE_AUDIO, counters);
}
}
@Override
public final void onAudioDecoderInitialized(
String decoderName, long initializedTimestampMs, long initializationDurationMs) {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDecoderInitialized(
eventTime, C.TRACK_TYPE_AUDIO, decoderName, initializationDurationMs);
}
}
@Override
public final void onAudioInputFormatChanged(Format format) {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDecoderInputFormatChanged(eventTime, C.TRACK_TYPE_AUDIO, format);
}
}
@Override
public final void onAudioSinkUnderrun(
int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onAudioUnderrun(eventTime, bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
}
}
@Override
public final void onAudioDisabled(DecoderCounters counters) {
// The renderers are disabled after we changed the playing media period on the playback thread
// but before this change is reported to the app thread.
EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDecoderDisabled(eventTime, C.TRACK_TYPE_AUDIO, counters);
}
}
// AudioListener implementation.
@Override
public final void onAudioSessionId(int audioSessionId) {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onAudioSessionId(eventTime, audioSessionId);
}
}
@Override
public void onAudioAttributesChanged(AudioAttributes audioAttributes) {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onAudioAttributesChanged(eventTime, audioAttributes);
}
}
@Override
public void onVolumeChanged(float audioVolume) {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onVolumeChanged(eventTime, audioVolume);
}
}
// VideoRendererEventListener implementation.
@Override
public final void onVideoEnabled(DecoderCounters counters) {
// The renderers are only enabled after we changed the playing media period.
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDecoderEnabled(eventTime, C.TRACK_TYPE_VIDEO, counters);
}
}
@Override
public final void onVideoDecoderInitialized(
String decoderName, long initializedTimestampMs, long initializationDurationMs) {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDecoderInitialized(
eventTime, C.TRACK_TYPE_VIDEO, decoderName, initializationDurationMs);
}
}
@Override
public final void onVideoInputFormatChanged(Format format) {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDecoderInputFormatChanged(eventTime, C.TRACK_TYPE_VIDEO, format);
}
}
@Override
public final void onDroppedFrames(int count, long elapsedMs) {
EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDroppedVideoFrames(eventTime, count, elapsedMs);
}
}
@Override
public final void onVideoDisabled(DecoderCounters counters) {
// The renderers are disabled after we changed the playing media period on the playback thread
// but before this change is reported to the app thread.
EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDecoderDisabled(eventTime, C.TRACK_TYPE_VIDEO, counters);
}
}
@Override
public final void onRenderedFirstFrame(@Nullable Surface surface) {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onRenderedFirstFrame(eventTime, surface);
}
}
// VideoListener implementation.
@Override
public final void onRenderedFirstFrame() {
// Do nothing. Already reported in VideoRendererEventListener.onRenderedFirstFrame.
}
@Override
public final void onVideoSizeChanged(
int width, int height, int unappliedRotationDegrees, float pixelWidthHeightRatio) {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onVideoSizeChanged(
eventTime, width, height, unappliedRotationDegrees, pixelWidthHeightRatio);
}
}
@Override
public void onSurfaceSizeChanged(int width, int height) {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onSurfaceSizeChanged(eventTime, width, height);
}
}
// MediaSourceEventListener implementation.
@Override
public final void onMediaPeriodCreated(int windowIndex, MediaPeriodId mediaPeriodId) {
mediaPeriodQueueTracker.onMediaPeriodCreated(windowIndex, mediaPeriodId);
EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId);
for (AnalyticsListener listener : listeners) {
listener.onMediaPeriodCreated(eventTime);
}
}
@Override
public final void onMediaPeriodReleased(int windowIndex, MediaPeriodId mediaPeriodId) {
EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId);
if (mediaPeriodQueueTracker.onMediaPeriodReleased(mediaPeriodId)) {
for (AnalyticsListener listener : listeners) {
listener.onMediaPeriodReleased(eventTime);
}
}
}
@Override
public final void onLoadStarted(
int windowIndex,
@Nullable MediaPeriodId mediaPeriodId,
LoadEventInfo loadEventInfo,
MediaLoadData mediaLoadData) {
EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId);
for (AnalyticsListener listener : listeners) {
listener.onLoadStarted(eventTime, loadEventInfo, mediaLoadData);
}
}
@Override
public final void onLoadCompleted(
int windowIndex,
@Nullable MediaPeriodId mediaPeriodId,
LoadEventInfo loadEventInfo,
MediaLoadData mediaLoadData) {
EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId);
for (AnalyticsListener listener : listeners) {
listener.onLoadCompleted(eventTime, loadEventInfo, mediaLoadData);
}
}
@Override
public final void onLoadCanceled(
int windowIndex,
@Nullable MediaPeriodId mediaPeriodId,
LoadEventInfo loadEventInfo,
MediaLoadData mediaLoadData) {
EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId);
for (AnalyticsListener listener : listeners) {
listener.onLoadCanceled(eventTime, loadEventInfo, mediaLoadData);
}
}
@Override
public final void onLoadError(
int windowIndex,
@Nullable MediaPeriodId mediaPeriodId,
LoadEventInfo loadEventInfo,
MediaLoadData mediaLoadData,
IOException error,
boolean wasCanceled) {
EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId);
for (AnalyticsListener listener : listeners) {
listener.onLoadError(eventTime, loadEventInfo, mediaLoadData, error, wasCanceled);
}
}
@Override
public final void onReadingStarted(int windowIndex, MediaPeriodId mediaPeriodId) {
mediaPeriodQueueTracker.onReadingStarted(mediaPeriodId);
EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId);
for (AnalyticsListener listener : listeners) {
listener.onReadingStarted(eventTime);
}
}
@Override
public final void onUpstreamDiscarded(
int windowIndex, @Nullable MediaPeriodId mediaPeriodId, MediaLoadData mediaLoadData) {
EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId);
for (AnalyticsListener listener : listeners) {
listener.onUpstreamDiscarded(eventTime, mediaLoadData);
}
}
@Override
public final void onDownstreamFormatChanged(
int windowIndex, @Nullable MediaPeriodId mediaPeriodId, MediaLoadData mediaLoadData) {
EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId);
for (AnalyticsListener listener : listeners) {
listener.onDownstreamFormatChanged(eventTime, mediaLoadData);
}
}
// Player.EventListener implementation.
// TODO: Add onFinishedReportingChanges to Player.EventListener to know when a set of simultaneous
// callbacks finished. This helps to assign exactly the same EventTime to all of them instead of
// having slightly different real times.
@Override
public final void onTimelineChanged(Timeline timeline, @Player.TimelineChangeReason int reason) {
mediaPeriodQueueTracker.onTimelineChanged(timeline);
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onTimelineChanged(eventTime, reason);
}
}
@Override
public final void onTracksChanged(
TrackGroupArray trackGroups, TrackSelectionArray trackSelections) {
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onTracksChanged(eventTime, trackGroups, trackSelections);
}
}
@Override
public final void onLoadingChanged(boolean isLoading) {
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onLoadingChanged(eventTime, isLoading);
}
}
@Override
public final void onPlayerStateChanged(boolean playWhenReady, @Player.State int playbackState) {
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onPlayerStateChanged(eventTime, playWhenReady, playbackState);
}
}
@Override
public void onPlaybackSuppressionReasonChanged(
@PlaybackSuppressionReason int playbackSuppressionReason) {
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onPlaybackSuppressionReasonChanged(eventTime, playbackSuppressionReason);
}
}
@Override
public void onIsPlayingChanged(boolean isPlaying) {
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onIsPlayingChanged(eventTime, isPlaying);
}
}
@Override
public final void onRepeatModeChanged(@Player.RepeatMode int repeatMode) {
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onRepeatModeChanged(eventTime, repeatMode);
}
}
@Override
public final void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) {
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onShuffleModeChanged(eventTime, shuffleModeEnabled);
}
}
@Override
public final void onPlayerError(ExoPlaybackException error) {
EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onPlayerError(eventTime, error);
}
}
@Override
public final void onPositionDiscontinuity(@Player.DiscontinuityReason int reason) {
mediaPeriodQueueTracker.onPositionDiscontinuity(reason);
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onPositionDiscontinuity(eventTime, reason);
}
}
@Override
public final void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onPlaybackParametersChanged(eventTime, playbackParameters);
}
}
@Override
public final void onSeekProcessed() {
if (mediaPeriodQueueTracker.isSeeking()) {
mediaPeriodQueueTracker.onSeekProcessed();
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onSeekProcessed(eventTime);
}
}
}
// BandwidthMeter.Listener implementation.
@Override
public final void onBandwidthSample(int elapsedMs, long bytes, long bitrate) {
EventTime eventTime = generateLoadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onBandwidthEstimate(eventTime, elapsedMs, bytes, bitrate);
}
}
// DefaultDrmSessionManager.EventListener implementation.
@Override
public final void onDrmSessionAcquired() {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDrmSessionAcquired(eventTime);
}
}
@Override
public final void onDrmKeysLoaded() {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDrmKeysLoaded(eventTime);
}
}
@Override
public final void onDrmSessionManagerError(Exception error) {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDrmSessionManagerError(eventTime, error);
}
}
@Override
public final void onDrmKeysRestored() {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDrmKeysRestored(eventTime);
}
}
@Override
public final void onDrmKeysRemoved() {
EventTime eventTime = generateReadingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDrmKeysRemoved(eventTime);
}
}
@Override
public final void onDrmSessionReleased() {
EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onDrmSessionReleased(eventTime);
}
}
// Internal methods.
/** Returns read-only set of registered listeners. */
protected Set<AnalyticsListener> getListeners() {
return Collections.unmodifiableSet(listeners);
}
/** Returns a new {@link EventTime} for the specified timeline, window and media period id. */
@RequiresNonNull("player")
protected EventTime generateEventTime(
Timeline timeline, int windowIndex, @Nullable MediaPeriodId mediaPeriodId) {
if (timeline.isEmpty()) {
// Ensure media period id is only reported together with a valid timeline.
mediaPeriodId = null;
}
long realtimeMs = clock.elapsedRealtime();
long eventPositionMs;
boolean isInCurrentWindow =
timeline == player.getCurrentTimeline() && windowIndex == player.getCurrentWindowIndex();
if (mediaPeriodId != null && mediaPeriodId.isAd()) {
boolean isCurrentAd =
isInCurrentWindow
&& player.getCurrentAdGroupIndex() == mediaPeriodId.adGroupIndex
&& player.getCurrentAdIndexInAdGroup() == mediaPeriodId.adIndexInAdGroup;
// Assume start position of 0 for future ads.
eventPositionMs = isCurrentAd ? player.getCurrentPosition() : 0;
} else if (isInCurrentWindow) {
eventPositionMs = player.getContentPosition();
} else {
// Assume default start position for future content windows. If timeline is not available yet,
// assume start position of 0.
eventPositionMs =
timeline.isEmpty() ? 0 : timeline.getWindow(windowIndex, window).getDefaultPositionMs();
}
return new EventTime(
realtimeMs,
timeline,
windowIndex,
mediaPeriodId,
eventPositionMs,
player.getCurrentPosition(),
player.getTotalBufferedDuration());
}
private EventTime generateEventTime(@Nullable MediaPeriodInfo mediaPeriodInfo) {
Assertions.checkNotNull(player);
if (mediaPeriodInfo == null) {
int windowIndex = player.getCurrentWindowIndex();
mediaPeriodInfo = mediaPeriodQueueTracker.tryResolveWindowIndex(windowIndex);
if (mediaPeriodInfo == null) {
Timeline timeline = player.getCurrentTimeline();
boolean windowIsInTimeline = windowIndex < timeline.getWindowCount();
return generateEventTime(
windowIsInTimeline ? timeline : Timeline.EMPTY, windowIndex, /* mediaPeriodId= */ null);
}
}
return generateEventTime(
mediaPeriodInfo.timeline, mediaPeriodInfo.windowIndex, mediaPeriodInfo.mediaPeriodId);
}
private EventTime generateLastReportedPlayingMediaPeriodEventTime() {
return generateEventTime(mediaPeriodQueueTracker.getLastReportedPlayingMediaPeriod());
}
private EventTime generatePlayingMediaPeriodEventTime() {
return generateEventTime(mediaPeriodQueueTracker.getPlayingMediaPeriod());
}
private EventTime generateReadingMediaPeriodEventTime() {
return generateEventTime(mediaPeriodQueueTracker.getReadingMediaPeriod());
}
private EventTime generateLoadingMediaPeriodEventTime() {
return generateEventTime(mediaPeriodQueueTracker.getLoadingMediaPeriod());
}
private EventTime generateMediaPeriodEventTime(
int windowIndex, @Nullable MediaPeriodId mediaPeriodId) {
Assertions.checkNotNull(player);
if (mediaPeriodId != null) {
MediaPeriodInfo mediaPeriodInfo = mediaPeriodQueueTracker.getMediaPeriodInfo(mediaPeriodId);
return mediaPeriodInfo != null
? generateEventTime(mediaPeriodInfo)
: generateEventTime(Timeline.EMPTY, windowIndex, mediaPeriodId);
}
Timeline timeline = player.getCurrentTimeline();
boolean windowIsInTimeline = windowIndex < timeline.getWindowCount();
return generateEventTime(
windowIsInTimeline ? timeline : Timeline.EMPTY, windowIndex, /* mediaPeriodId= */ null);
}
/** Keeps track of the active media periods and currently playing and reading media period. */
private static final class MediaPeriodQueueTracker {
// TODO: Investigate reporting MediaPeriodId in renderer events and adding a listener of queue
// changes, which would hopefully remove the need to track the queue here.
private final ArrayList<MediaPeriodInfo> mediaPeriodInfoQueue;
private final HashMap<MediaPeriodId, MediaPeriodInfo> mediaPeriodIdToInfo;
private final Period period;
@Nullable private MediaPeriodInfo lastPlayingMediaPeriod;
@Nullable private MediaPeriodInfo lastReportedPlayingMediaPeriod;
@Nullable private MediaPeriodInfo readingMediaPeriod;
private Timeline timeline;
private boolean isSeeking;
public MediaPeriodQueueTracker() {
mediaPeriodInfoQueue = new ArrayList<>();
mediaPeriodIdToInfo = new HashMap<>();
period = new Period();
timeline = Timeline.EMPTY;
}
/**
* Returns the {@link MediaPeriodInfo} of the media period in the front of the queue. This is
* the playing media period unless the player hasn't started playing yet (in which case it is
* the loading media period or null). While the player is seeking or preparing, this method will
* always return null to reflect the uncertainty about the current playing period. May also be
* null, if the timeline is empty or no media period is active yet.
*/
@Nullable
public MediaPeriodInfo getPlayingMediaPeriod() {
return mediaPeriodInfoQueue.isEmpty() || timeline.isEmpty() || isSeeking
? null
: mediaPeriodInfoQueue.get(0);
}
/**
* Returns the {@link MediaPeriodInfo} of the currently playing media period. This is the
* publicly reported period which should always match {@link Player#getCurrentPeriodIndex()}
* unless the player is currently seeking or being prepared in which case the previous period is
* reported until the seek or preparation is processed. May be null, if no media period is
* active yet.
*/
@Nullable
public MediaPeriodInfo getLastReportedPlayingMediaPeriod() {
return lastReportedPlayingMediaPeriod;
}
/**
* Returns the {@link MediaPeriodInfo} of the media period currently being read by the player.
* May be null, if the player is not reading a media period.
*/
@Nullable
public MediaPeriodInfo getReadingMediaPeriod() {
return readingMediaPeriod;
}
/**
* Returns the {@link MediaPeriodInfo} of the media period at the end of the queue which is
* currently loading or will be the next one loading. May be null, if no media period is active
* yet.
*/
@Nullable
public MediaPeriodInfo getLoadingMediaPeriod() {
return mediaPeriodInfoQueue.isEmpty()
? null
: mediaPeriodInfoQueue.get(mediaPeriodInfoQueue.size() - 1);
}
/** Returns the {@link MediaPeriodInfo} for the given {@link MediaPeriodId}. */
@Nullable
public MediaPeriodInfo getMediaPeriodInfo(MediaPeriodId mediaPeriodId) {
return mediaPeriodIdToInfo.get(mediaPeriodId);
}
/** Returns whether the player is currently seeking. */
public boolean isSeeking() {
return isSeeking;
}
/**
* Tries to find an existing media period info from the specified window index. Only returns a
* non-null media period info if there is a unique, unambiguous match.
*/
@Nullable
public MediaPeriodInfo tryResolveWindowIndex(int windowIndex) {
MediaPeriodInfo match = null;
for (int i = 0; i < mediaPeriodInfoQueue.size(); i++) {
MediaPeriodInfo info = mediaPeriodInfoQueue.get(i);
int periodIndex = timeline.getIndexOfPeriod(info.mediaPeriodId.periodUid);
if (periodIndex != C.INDEX_UNSET
&& timeline.getPeriod(periodIndex, period).windowIndex == windowIndex) {
if (match != null) {
// Ambiguous match.
return null;
}
match = info;
}
}
return match;
}
/** Updates the queue with a reported position discontinuity . */
public void onPositionDiscontinuity(@Player.DiscontinuityReason int reason) {
lastReportedPlayingMediaPeriod = lastPlayingMediaPeriod;
}
/** Updates the queue with a reported timeline change. */
public void onTimelineChanged(Timeline timeline) {
for (int i = 0; i < mediaPeriodInfoQueue.size(); i++) {
MediaPeriodInfo newMediaPeriodInfo =
updateMediaPeriodInfoToNewTimeline(mediaPeriodInfoQueue.get(i), timeline);
mediaPeriodInfoQueue.set(i, newMediaPeriodInfo);
mediaPeriodIdToInfo.put(newMediaPeriodInfo.mediaPeriodId, newMediaPeriodInfo);
}
if (readingMediaPeriod != null) {
readingMediaPeriod = updateMediaPeriodInfoToNewTimeline(readingMediaPeriod, timeline);
}
this.timeline = timeline;
lastReportedPlayingMediaPeriod = lastPlayingMediaPeriod;
}
/** Updates the queue with a reported start of seek. */
public void onSeekStarted() {
isSeeking = true;
}
/** Updates the queue with a reported processed seek. */
public void onSeekProcessed() {
isSeeking = false;
lastReportedPlayingMediaPeriod = lastPlayingMediaPeriod;
}
/** Updates the queue with a newly created media period. */
public void onMediaPeriodCreated(int windowIndex, MediaPeriodId mediaPeriodId) {
int periodIndex = timeline.getIndexOfPeriod(mediaPeriodId.periodUid);
boolean isInTimeline = periodIndex != C.INDEX_UNSET;
MediaPeriodInfo mediaPeriodInfo =
new MediaPeriodInfo(
mediaPeriodId,
isInTimeline ? timeline : Timeline.EMPTY,
isInTimeline ? timeline.getPeriod(periodIndex, period).windowIndex : windowIndex);
mediaPeriodInfoQueue.add(mediaPeriodInfo);
mediaPeriodIdToInfo.put(mediaPeriodId, mediaPeriodInfo);
lastPlayingMediaPeriod = mediaPeriodInfoQueue.get(0);
if (mediaPeriodInfoQueue.size() == 1 && !timeline.isEmpty()) {
lastReportedPlayingMediaPeriod = lastPlayingMediaPeriod;
}
}
/**
* Updates the queue with a released media period. Returns whether the media period was still in
* the queue.
*/
public boolean onMediaPeriodReleased(MediaPeriodId mediaPeriodId) {
MediaPeriodInfo mediaPeriodInfo = mediaPeriodIdToInfo.remove(mediaPeriodId);
if (mediaPeriodInfo == null) {
// The media period has already been removed from the queue in resetForNewMediaSource().
return false;
}
mediaPeriodInfoQueue.remove(mediaPeriodInfo);
if (readingMediaPeriod != null && mediaPeriodId.equals(readingMediaPeriod.mediaPeriodId)) {
readingMediaPeriod = mediaPeriodInfoQueue.isEmpty() ? null : mediaPeriodInfoQueue.get(0);
}
if (!mediaPeriodInfoQueue.isEmpty()) {
lastPlayingMediaPeriod = mediaPeriodInfoQueue.get(0);
}
return true;
}
/** Update the queue with a change in the reading media period. */
public void onReadingStarted(MediaPeriodId mediaPeriodId) {
readingMediaPeriod = mediaPeriodIdToInfo.get(mediaPeriodId);
}
private MediaPeriodInfo updateMediaPeriodInfoToNewTimeline(
MediaPeriodInfo info, Timeline newTimeline) {
int newPeriodIndex = newTimeline.getIndexOfPeriod(info.mediaPeriodId.periodUid);
if (newPeriodIndex == C.INDEX_UNSET) {
// Media period is not yet or no longer available in the new timeline. Keep it as it is.
return info;
}
int newWindowIndex = newTimeline.getPeriod(newPeriodIndex, period).windowIndex;
return new MediaPeriodInfo(info.mediaPeriodId, newTimeline, newWindowIndex);
}
}
/** Information about a media period and its associated timeline. */
private static final class MediaPeriodInfo {
/** The {@link MediaPeriodId} of the media period. */
public final MediaPeriodId mediaPeriodId;
/**
* The {@link Timeline} in which the media period can be found. Or {@link Timeline#EMPTY} if the
* media period is not part of a known timeline yet.
*/
public final Timeline timeline;
/**
* The window index of the media period in the timeline. If the timeline is empty, this is the
* prospective window index.
*/
public final int windowIndex;
public MediaPeriodInfo(MediaPeriodId mediaPeriodId, Timeline timeline, int windowIndex) {
this.mediaPeriodId = mediaPeriodId;
this.timeline = timeline;
this.windowIndex = windowIndex;
}
}
}

Просмотреть файл

@ -0,0 +1,514 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.analytics;
import android.view.Surface;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.ExoPlaybackException;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.PlaybackParameters;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Player;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Player.DiscontinuityReason;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Player.PlaybackSuppressionReason;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Player.TimelineChangeReason;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Timeline;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioAttributes;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioSink;
import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.DecoderCounters;
import org.mozilla.thirdparty.com.google.android.exoplayer2.metadata.Metadata;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSourceEventListener.LoadEventInfo;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSourceEventListener.MediaLoadData;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.TrackGroupArray;
import org.mozilla.thirdparty.com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import java.io.IOException;
/**
* A listener for analytics events.
*
* <p>All events are recorded with an {@link EventTime} specifying the elapsed real time and media
* time at the time of the event.
*
* <p>All methods have no-op default implementations to allow selective overrides.
*/
public interface AnalyticsListener {
/** Time information of an event. */
final class EventTime {
/**
* Elapsed real-time as returned by {@code SystemClock.elapsedRealtime()} at the time of the
* event, in milliseconds.
*/
public final long realtimeMs;
/** Timeline at the time of the event. */
public final Timeline timeline;
/**
* Window index in the {@link #timeline} this event belongs to, or the prospective window index
* if the timeline is not yet known and empty.
*/
public final int windowIndex;
/**
* Media period identifier for the media period this event belongs to, or {@code null} if the
* event is not associated with a specific media period.
*/
@Nullable public final MediaPeriodId mediaPeriodId;
/**
* Position in the window or ad this event belongs to at the time of the event, in milliseconds.
*/
public final long eventPlaybackPositionMs;
/**
* Position in the current timeline window ({@link Player#getCurrentWindowIndex()}) or the
* currently playing ad at the time of the event, in milliseconds.
*/
public final long currentPlaybackPositionMs;
/**
* Total buffered duration from {@link #currentPlaybackPositionMs} at the time of the event, in
* milliseconds. This includes pre-buffered data for subsequent ads and windows.
*/
public final long totalBufferedDurationMs;
/**
* @param realtimeMs Elapsed real-time as returned by {@code SystemClock.elapsedRealtime()} at
* the time of the event, in milliseconds.
* @param timeline Timeline at the time of the event.
* @param windowIndex Window index in the {@link #timeline} this event belongs to, or the
* prospective window index if the timeline is not yet known and empty.
* @param mediaPeriodId Media period identifier for the media period this event belongs to, or
* {@code null} if the event is not associated with a specific media period.
* @param eventPlaybackPositionMs Position in the window or ad this event belongs to at the time
* of the event, in milliseconds.
* @param currentPlaybackPositionMs Position in the current timeline window ({@link
* Player#getCurrentWindowIndex()}) or the currently playing ad at the time of the event, in
* milliseconds.
* @param totalBufferedDurationMs Total buffered duration from {@link
* #currentPlaybackPositionMs} at the time of the event, in milliseconds. This includes
* pre-buffered data for subsequent ads and windows.
*/
public EventTime(
long realtimeMs,
Timeline timeline,
int windowIndex,
@Nullable MediaPeriodId mediaPeriodId,
long eventPlaybackPositionMs,
long currentPlaybackPositionMs,
long totalBufferedDurationMs) {
this.realtimeMs = realtimeMs;
this.timeline = timeline;
this.windowIndex = windowIndex;
this.mediaPeriodId = mediaPeriodId;
this.eventPlaybackPositionMs = eventPlaybackPositionMs;
this.currentPlaybackPositionMs = currentPlaybackPositionMs;
this.totalBufferedDurationMs = totalBufferedDurationMs;
}
}
/**
* Called when the player state changed.
*
* @param eventTime The event time.
* @param playWhenReady Whether the playback will proceed when ready.
* @param playbackState The new {@link Player.State playback state}.
*/
default void onPlayerStateChanged(
EventTime eventTime, boolean playWhenReady, @Player.State int playbackState) {}
/**
* Called when playback suppression reason changed.
*
* @param eventTime The event time.
* @param playbackSuppressionReason The new {@link PlaybackSuppressionReason}.
*/
default void onPlaybackSuppressionReasonChanged(
EventTime eventTime, @PlaybackSuppressionReason int playbackSuppressionReason) {}
/**
* Called when the player starts or stops playing.
*
* @param eventTime The event time.
* @param isPlaying Whether the player is playing.
*/
default void onIsPlayingChanged(EventTime eventTime, boolean isPlaying) {}
/**
* Called when the timeline changed.
*
* @param eventTime The event time.
* @param reason The reason for the timeline change.
*/
default void onTimelineChanged(EventTime eventTime, @TimelineChangeReason int reason) {}
/**
* Called when a position discontinuity occurred.
*
* @param eventTime The event time.
* @param reason The reason for the position discontinuity.
*/
default void onPositionDiscontinuity(EventTime eventTime, @DiscontinuityReason int reason) {}
/**
* Called when a seek operation started.
*
* @param eventTime The event time.
*/
default void onSeekStarted(EventTime eventTime) {}
/**
* Called when a seek operation was processed.
*
* @param eventTime The event time.
*/
default void onSeekProcessed(EventTime eventTime) {}
/**
* Called when the playback parameters changed.
*
* @param eventTime The event time.
* @param playbackParameters The new playback parameters.
*/
default void onPlaybackParametersChanged(
EventTime eventTime, PlaybackParameters playbackParameters) {}
/**
* Called when the repeat mode changed.
*
* @param eventTime The event time.
* @param repeatMode The new repeat mode.
*/
default void onRepeatModeChanged(EventTime eventTime, @Player.RepeatMode int repeatMode) {}
/**
* Called when the shuffle mode changed.
*
* @param eventTime The event time.
* @param shuffleModeEnabled Whether the shuffle mode is enabled.
*/
default void onShuffleModeChanged(EventTime eventTime, boolean shuffleModeEnabled) {}
/**
* Called when the player starts or stops loading data from a source.
*
* @param eventTime The event time.
* @param isLoading Whether the player is loading.
*/
default void onLoadingChanged(EventTime eventTime, boolean isLoading) {}
/**
* Called when a fatal player error occurred.
*
* @param eventTime The event time.
* @param error The error.
*/
default void onPlayerError(EventTime eventTime, ExoPlaybackException error) {}
/**
* Called when the available or selected tracks for the renderers changed.
*
* @param eventTime The event time.
* @param trackGroups The available tracks. May be empty.
* @param trackSelections The track selections for each renderer. May contain null elements.
*/
default void onTracksChanged(
EventTime eventTime, TrackGroupArray trackGroups, TrackSelectionArray trackSelections) {}
/**
* Called when a media source started loading data.
*
* @param eventTime The event time.
* @param loadEventInfo The {@link LoadEventInfo} defining the load event.
* @param mediaLoadData The {@link MediaLoadData} defining the data being loaded.
*/
default void onLoadStarted(
EventTime eventTime, LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData) {}
/**
* Called when a media source completed loading data.
*
* @param eventTime The event time.
* @param loadEventInfo The {@link LoadEventInfo} defining the load event.
* @param mediaLoadData The {@link MediaLoadData} defining the data being loaded.
*/
default void onLoadCompleted(
EventTime eventTime, LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData) {}
/**
* Called when a media source canceled loading data.
*
* @param eventTime The event time.
* @param loadEventInfo The {@link LoadEventInfo} defining the load event.
* @param mediaLoadData The {@link MediaLoadData} defining the data being loaded.
*/
default void onLoadCanceled(
EventTime eventTime, LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData) {}
/**
* Called when a media source loading error occurred. These errors are just for informational
* purposes and the player may recover.
*
* @param eventTime The event time.
* @param loadEventInfo The {@link LoadEventInfo} defining the load event.
* @param mediaLoadData The {@link MediaLoadData} defining the data being loaded.
* @param error The load error.
* @param wasCanceled Whether the load was canceled as a result of the error.
*/
default void onLoadError(
EventTime eventTime,
LoadEventInfo loadEventInfo,
MediaLoadData mediaLoadData,
IOException error,
boolean wasCanceled) {}
/**
* Called when the downstream format sent to the renderers changed.
*
* @param eventTime The event time.
* @param mediaLoadData The {@link MediaLoadData} defining the newly selected media data.
*/
default void onDownstreamFormatChanged(EventTime eventTime, MediaLoadData mediaLoadData) {}
/**
* Called when data is removed from the back of a media buffer, typically so that it can be
* re-buffered in a different format.
*
* @param eventTime The event time.
* @param mediaLoadData The {@link MediaLoadData} defining the media being discarded.
*/
default void onUpstreamDiscarded(EventTime eventTime, MediaLoadData mediaLoadData) {}
/**
* Called when a media source created a media period.
*
* @param eventTime The event time.
*/
default void onMediaPeriodCreated(EventTime eventTime) {}
/**
* Called when a media source released a media period.
*
* @param eventTime The event time.
*/
default void onMediaPeriodReleased(EventTime eventTime) {}
/**
* Called when the player started reading a media period.
*
* @param eventTime The event time.
*/
default void onReadingStarted(EventTime eventTime) {}
/**
* Called when the bandwidth estimate for the current data source has been updated.
*
* @param eventTime The event time.
* @param totalLoadTimeMs The total time spend loading this update is based on, in milliseconds.
* @param totalBytesLoaded The total bytes loaded this update is based on.
* @param bitrateEstimate The bandwidth estimate, in bits per second.
*/
default void onBandwidthEstimate(
EventTime eventTime, int totalLoadTimeMs, long totalBytesLoaded, long bitrateEstimate) {}
/**
* Called when the output surface size changed.
*
* @param eventTime The event time.
* @param width The surface width in pixels. May be {@link C#LENGTH_UNSET} if unknown, or 0 if the
* video is not rendered onto a surface.
* @param height The surface height in pixels. May be {@link C#LENGTH_UNSET} if unknown, or 0 if
* the video is not rendered onto a surface.
*/
default void onSurfaceSizeChanged(EventTime eventTime, int width, int height) {}
/**
* Called when there is {@link Metadata} associated with the current playback time.
*
* @param eventTime The event time.
* @param metadata The metadata.
*/
default void onMetadata(EventTime eventTime, Metadata metadata) {}
/**
* Called when an audio or video decoder has been enabled.
*
* @param eventTime The event time.
* @param trackType The track type of the enabled decoder. Either {@link C#TRACK_TYPE_AUDIO} or
* {@link C#TRACK_TYPE_VIDEO}.
* @param decoderCounters The accumulated event counters associated with this decoder.
*/
default void onDecoderEnabled(
EventTime eventTime, int trackType, DecoderCounters decoderCounters) {}
/**
* Called when an audio or video decoder has been initialized.
*
* @param eventTime The event time.
* @param trackType The track type of the initialized decoder. Either {@link C#TRACK_TYPE_AUDIO}
* or {@link C#TRACK_TYPE_VIDEO}.
* @param decoderName The decoder that was created.
* @param initializationDurationMs Time taken to initialize the decoder, in milliseconds.
*/
default void onDecoderInitialized(
EventTime eventTime, int trackType, String decoderName, long initializationDurationMs) {}
/**
* Called when an audio or video decoder input format changed.
*
* @param eventTime The event time.
* @param trackType The track type of the decoder whose format changed. Either {@link
* C#TRACK_TYPE_AUDIO} or {@link C#TRACK_TYPE_VIDEO}.
* @param format The new input format for the decoder.
*/
default void onDecoderInputFormatChanged(EventTime eventTime, int trackType, Format format) {}
/**
* Called when an audio or video decoder has been disabled.
*
* @param eventTime The event time.
* @param trackType The track type of the disabled decoder. Either {@link C#TRACK_TYPE_AUDIO} or
* {@link C#TRACK_TYPE_VIDEO}.
* @param decoderCounters The accumulated event counters associated with this decoder.
*/
default void onDecoderDisabled(
EventTime eventTime, int trackType, DecoderCounters decoderCounters) {}
/**
* Called when the audio session id is set.
*
* @param eventTime The event time.
* @param audioSessionId The audio session id.
*/
default void onAudioSessionId(EventTime eventTime, int audioSessionId) {}
/**
* Called when the audio attributes change.
*
* @param eventTime The event time.
* @param audioAttributes The audio attributes.
*/
default void onAudioAttributesChanged(EventTime eventTime, AudioAttributes audioAttributes) {}
/**
* Called when the volume changes.
*
* @param eventTime The event time.
* @param volume The new volume, with 0 being silence and 1 being unity gain.
*/
default void onVolumeChanged(EventTime eventTime, float volume) {}
/**
* Called when an audio underrun occurred.
*
* @param eventTime The event time.
* @param bufferSize The size of the {@link AudioSink}'s buffer, in bytes.
* @param bufferSizeMs The size of the {@link AudioSink}'s buffer, in milliseconds, if it is
* configured for PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output,
* as the buffered media can have a variable bitrate so the duration may be unknown.
* @param elapsedSinceLastFeedMs The time since the {@link AudioSink} was last fed data.
*/
default void onAudioUnderrun(
EventTime eventTime, int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {}
/**
* Called after video frames have been dropped.
*
* @param eventTime The event time.
* @param droppedFrames The number of dropped frames since the last call to this method.
* @param elapsedMs The duration in milliseconds over which the frames were dropped. This duration
* is timed from when the renderer was started or from when dropped frames were last reported
* (whichever was more recent), and not from when the first of the reported drops occurred.
*/
default void onDroppedVideoFrames(EventTime eventTime, int droppedFrames, long elapsedMs) {}
/**
* Called before a frame is rendered for the first time since setting the surface, and each time
* there's a change in the size or pixel aspect ratio of the video being rendered.
*
* @param eventTime The event time.
* @param width The width of the video.
* @param height The height of the video.
* @param unappliedRotationDegrees For videos that require a rotation, this is the clockwise
* rotation in degrees that the application should apply for the video for it to be rendered
* in the correct orientation. This value will always be zero on API levels 21 and above,
* since the renderer will apply all necessary rotations internally.
* @param pixelWidthHeightRatio The width to height ratio of each pixel.
*/
default void onVideoSizeChanged(
EventTime eventTime,
int width,
int height,
int unappliedRotationDegrees,
float pixelWidthHeightRatio) {}
/**
* Called when a frame is rendered for the first time since setting the surface, and when a frame
* is rendered for the first time since the renderer was reset.
*
* @param eventTime The event time.
* @param surface The {@link Surface} to which a first frame has been rendered, or {@code null} if
* the renderer renders to something that isn't a {@link Surface}.
*/
default void onRenderedFirstFrame(EventTime eventTime, @Nullable Surface surface) {}
/**
* Called each time a drm session is acquired.
*
* @param eventTime The event time.
*/
default void onDrmSessionAcquired(EventTime eventTime) {}
/**
* Called each time drm keys are loaded.
*
* @param eventTime The event time.
*/
default void onDrmKeysLoaded(EventTime eventTime) {}
/**
* Called when a drm error occurs. These errors are just for informational purposes and the player
* may recover.
*
* @param eventTime The event time.
* @param error The error.
*/
default void onDrmSessionManagerError(EventTime eventTime, Exception error) {}
/**
* Called each time offline drm keys are restored.
*
* @param eventTime The event time.
*/
default void onDrmKeysRestored(EventTime eventTime) {}
/**
* Called each time offline drm keys are removed.
*
* @param eventTime The event time.
*/
default void onDrmKeysRemoved(EventTime eventTime) {}
/**
* Called each time a drm session is released.
*
* @param eventTime The event time.
*/
default void onDrmSessionReleased(EventTime eventTime) {}
}

Просмотреть файл

@ -0,0 +1,23 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.analytics;
/**
* @deprecated Use {@link AnalyticsListener} directly for selective overrides as all methods are
* implemented as no-op default methods.
*/
@Deprecated
public abstract class DefaultAnalyticsListener implements AnalyticsListener {}

Просмотреть файл

@ -0,0 +1,355 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.analytics;
import android.util.Base64;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Player;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Player.DiscontinuityReason;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Timeline;
import org.mozilla.thirdparty.com.google.android.exoplayer2.analytics.AnalyticsListener.EventTime;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Random;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/**
* Default {@link PlaybackSessionManager} which instantiates a new session for each window in the
* timeline and also for each ad within the windows.
*
* <p>Sessions are identified by Base64-encoded, URL-safe, random strings.
*/
public final class DefaultPlaybackSessionManager implements PlaybackSessionManager {
private static final Random RANDOM = new Random();
private static final int SESSION_ID_LENGTH = 12;
private final Timeline.Window window;
private final Timeline.Period period;
private final HashMap<String, SessionDescriptor> sessions;
private @MonotonicNonNull Listener listener;
private Timeline currentTimeline;
@Nullable private MediaPeriodId currentMediaPeriodId;
@Nullable private String activeSessionId;
/** Creates session manager. */
public DefaultPlaybackSessionManager() {
window = new Timeline.Window();
period = new Timeline.Period();
sessions = new HashMap<>();
currentTimeline = Timeline.EMPTY;
}
@Override
public void setListener(Listener listener) {
this.listener = listener;
}
@Override
public synchronized String getSessionForMediaPeriodId(
Timeline timeline, MediaPeriodId mediaPeriodId) {
int windowIndex = timeline.getPeriodByUid(mediaPeriodId.periodUid, period).windowIndex;
return getOrAddSession(windowIndex, mediaPeriodId).sessionId;
}
@Override
public synchronized boolean belongsToSession(EventTime eventTime, String sessionId) {
SessionDescriptor sessionDescriptor = sessions.get(sessionId);
if (sessionDescriptor == null) {
return false;
}
sessionDescriptor.maybeSetWindowSequenceNumber(eventTime.windowIndex, eventTime.mediaPeriodId);
return sessionDescriptor.belongsToSession(eventTime.windowIndex, eventTime.mediaPeriodId);
}
@Override
public synchronized void updateSessions(EventTime eventTime) {
boolean isObviouslyFinished =
eventTime.mediaPeriodId != null
&& currentMediaPeriodId != null
&& eventTime.mediaPeriodId.windowSequenceNumber
< currentMediaPeriodId.windowSequenceNumber;
if (!isObviouslyFinished) {
SessionDescriptor descriptor =
getOrAddSession(eventTime.windowIndex, eventTime.mediaPeriodId);
if (!descriptor.isCreated) {
descriptor.isCreated = true;
Assertions.checkNotNull(listener).onSessionCreated(eventTime, descriptor.sessionId);
if (activeSessionId == null) {
updateActiveSession(eventTime, descriptor);
}
}
}
}
@Override
public synchronized void handleTimelineUpdate(EventTime eventTime) {
Assertions.checkNotNull(listener);
Timeline previousTimeline = currentTimeline;
currentTimeline = eventTime.timeline;
Iterator<SessionDescriptor> iterator = sessions.values().iterator();
while (iterator.hasNext()) {
SessionDescriptor session = iterator.next();
if (!session.tryResolvingToNewTimeline(previousTimeline, currentTimeline)) {
iterator.remove();
if (session.isCreated) {
if (session.sessionId.equals(activeSessionId)) {
activeSessionId = null;
}
listener.onSessionFinished(
eventTime, session.sessionId, /* automaticTransitionToNextPlayback= */ false);
}
}
}
handlePositionDiscontinuity(eventTime, Player.DISCONTINUITY_REASON_INTERNAL);
}
@Override
public synchronized void handlePositionDiscontinuity(
EventTime eventTime, @DiscontinuityReason int reason) {
Assertions.checkNotNull(listener);
boolean hasAutomaticTransition =
reason == Player.DISCONTINUITY_REASON_PERIOD_TRANSITION
|| reason == Player.DISCONTINUITY_REASON_AD_INSERTION;
Iterator<SessionDescriptor> iterator = sessions.values().iterator();
while (iterator.hasNext()) {
SessionDescriptor session = iterator.next();
if (session.isFinishedAtEventTime(eventTime)) {
iterator.remove();
if (session.isCreated) {
boolean isRemovingActiveSession = session.sessionId.equals(activeSessionId);
boolean isAutomaticTransition = hasAutomaticTransition && isRemovingActiveSession;
if (isRemovingActiveSession) {
activeSessionId = null;
}
listener.onSessionFinished(eventTime, session.sessionId, isAutomaticTransition);
}
}
}
SessionDescriptor activeSessionDescriptor =
getOrAddSession(eventTime.windowIndex, eventTime.mediaPeriodId);
if (eventTime.mediaPeriodId != null
&& eventTime.mediaPeriodId.isAd()
&& (currentMediaPeriodId == null
|| currentMediaPeriodId.windowSequenceNumber
!= eventTime.mediaPeriodId.windowSequenceNumber
|| currentMediaPeriodId.adGroupIndex != eventTime.mediaPeriodId.adGroupIndex
|| currentMediaPeriodId.adIndexInAdGroup != eventTime.mediaPeriodId.adIndexInAdGroup)) {
// New ad playback started. Find corresponding content session and notify ad playback started.
MediaPeriodId contentMediaPeriodId =
new MediaPeriodId(
eventTime.mediaPeriodId.periodUid, eventTime.mediaPeriodId.windowSequenceNumber);
SessionDescriptor contentSession =
getOrAddSession(eventTime.windowIndex, contentMediaPeriodId);
if (contentSession.isCreated && activeSessionDescriptor.isCreated) {
listener.onAdPlaybackStarted(
eventTime, contentSession.sessionId, activeSessionDescriptor.sessionId);
}
}
updateActiveSession(eventTime, activeSessionDescriptor);
}
private SessionDescriptor getOrAddSession(
int windowIndex, @Nullable MediaPeriodId mediaPeriodId) {
// There should only be one matching session if mediaPeriodId is non-null. If mediaPeriodId is
// null, there may be multiple matching sessions with different window sequence numbers or
// adMediaPeriodIds. The best match is the one with the smaller window sequence number, and for
// windows with ads, the content session is preferred over ad sessions.
SessionDescriptor bestMatch = null;
long bestMatchWindowSequenceNumber = Long.MAX_VALUE;
for (SessionDescriptor sessionDescriptor : sessions.values()) {
sessionDescriptor.maybeSetWindowSequenceNumber(windowIndex, mediaPeriodId);
if (sessionDescriptor.belongsToSession(windowIndex, mediaPeriodId)) {
long windowSequenceNumber = sessionDescriptor.windowSequenceNumber;
if (windowSequenceNumber == C.INDEX_UNSET
|| windowSequenceNumber < bestMatchWindowSequenceNumber) {
bestMatch = sessionDescriptor;
bestMatchWindowSequenceNumber = windowSequenceNumber;
} else if (windowSequenceNumber == bestMatchWindowSequenceNumber
&& Util.castNonNull(bestMatch).adMediaPeriodId != null
&& sessionDescriptor.adMediaPeriodId != null) {
bestMatch = sessionDescriptor;
}
}
}
if (bestMatch == null) {
String sessionId = generateSessionId();
bestMatch = new SessionDescriptor(sessionId, windowIndex, mediaPeriodId);
sessions.put(sessionId, bestMatch);
}
return bestMatch;
}
@RequiresNonNull("listener")
private void updateActiveSession(EventTime eventTime, SessionDescriptor sessionDescriptor) {
currentMediaPeriodId = eventTime.mediaPeriodId;
if (sessionDescriptor.isCreated) {
activeSessionId = sessionDescriptor.sessionId;
if (!sessionDescriptor.isActive) {
sessionDescriptor.isActive = true;
listener.onSessionActive(eventTime, sessionDescriptor.sessionId);
}
}
}
private static String generateSessionId() {
byte[] randomBytes = new byte[SESSION_ID_LENGTH];
RANDOM.nextBytes(randomBytes);
return Base64.encodeToString(randomBytes, Base64.URL_SAFE | Base64.NO_WRAP);
}
/**
* Descriptor for a session.
*
* <p>The session may be described in one of three ways:
*
* <ul>
* <li>A window index with unset window sequence number and a null ad media period id
* <li>A content window with index and sequence number, but a null ad media period id.
* <li>An ad with all values set.
* </ul>
*/
private final class SessionDescriptor {
private final String sessionId;
private int windowIndex;
private long windowSequenceNumber;
private @MonotonicNonNull MediaPeriodId adMediaPeriodId;
private boolean isCreated;
private boolean isActive;
public SessionDescriptor(
String sessionId, int windowIndex, @Nullable MediaPeriodId mediaPeriodId) {
this.sessionId = sessionId;
this.windowIndex = windowIndex;
this.windowSequenceNumber =
mediaPeriodId == null ? C.INDEX_UNSET : mediaPeriodId.windowSequenceNumber;
if (mediaPeriodId != null && mediaPeriodId.isAd()) {
this.adMediaPeriodId = mediaPeriodId;
}
}
public boolean tryResolvingToNewTimeline(Timeline oldTimeline, Timeline newTimeline) {
windowIndex = resolveWindowIndexToNewTimeline(oldTimeline, newTimeline, windowIndex);
if (windowIndex == C.INDEX_UNSET) {
return false;
}
if (adMediaPeriodId == null) {
return true;
}
int newPeriodIndex = newTimeline.getIndexOfPeriod(adMediaPeriodId.periodUid);
return newPeriodIndex != C.INDEX_UNSET;
}
public boolean belongsToSession(
int eventWindowIndex, @Nullable MediaPeriodId eventMediaPeriodId) {
if (eventMediaPeriodId == null) {
// Events without concrete media period id are for all sessions of the same window.
return eventWindowIndex == windowIndex;
}
if (adMediaPeriodId == null) {
// If this is a content session, only events for content with the same window sequence
// number belong to this session.
return !eventMediaPeriodId.isAd()
&& eventMediaPeriodId.windowSequenceNumber == windowSequenceNumber;
}
// If this is an ad session, only events for this ad belong to the session.
return eventMediaPeriodId.windowSequenceNumber == adMediaPeriodId.windowSequenceNumber
&& eventMediaPeriodId.adGroupIndex == adMediaPeriodId.adGroupIndex
&& eventMediaPeriodId.adIndexInAdGroup == adMediaPeriodId.adIndexInAdGroup;
}
public void maybeSetWindowSequenceNumber(
int eventWindowIndex, @Nullable MediaPeriodId eventMediaPeriodId) {
if (windowSequenceNumber == C.INDEX_UNSET
&& eventWindowIndex == windowIndex
&& eventMediaPeriodId != null
&& !eventMediaPeriodId.isAd()) {
// Set window sequence number for this session as soon as we have one.
windowSequenceNumber = eventMediaPeriodId.windowSequenceNumber;
}
}
public boolean isFinishedAtEventTime(EventTime eventTime) {
if (windowSequenceNumber == C.INDEX_UNSET) {
// Sessions with unspecified window sequence number are kept until we know more.
return false;
}
if (eventTime.mediaPeriodId == null) {
// For event times without media period id (e.g. after seek to new window), we only keep
// sessions of this window.
return windowIndex != eventTime.windowIndex;
}
if (eventTime.mediaPeriodId.windowSequenceNumber > windowSequenceNumber) {
// All past window sequence numbers are finished.
return true;
}
if (adMediaPeriodId == null) {
// Current or future content is not finished.
return false;
}
int eventPeriodIndex = eventTime.timeline.getIndexOfPeriod(eventTime.mediaPeriodId.periodUid);
int adPeriodIndex = eventTime.timeline.getIndexOfPeriod(adMediaPeriodId.periodUid);
if (eventTime.mediaPeriodId.windowSequenceNumber < adMediaPeriodId.windowSequenceNumber
|| eventPeriodIndex < adPeriodIndex) {
// Ads in future windows or periods are not finished.
return false;
}
if (eventPeriodIndex > adPeriodIndex) {
// Ads in past periods are finished.
return true;
}
if (eventTime.mediaPeriodId.isAd()) {
int eventAdGroup = eventTime.mediaPeriodId.adGroupIndex;
int eventAdIndex = eventTime.mediaPeriodId.adIndexInAdGroup;
// Finished if event is for an ad after this one in the same period.
return eventAdGroup > adMediaPeriodId.adGroupIndex
|| (eventAdGroup == adMediaPeriodId.adGroupIndex
&& eventAdIndex > adMediaPeriodId.adIndexInAdGroup);
} else {
// Finished if the event is for content after this ad.
return eventTime.mediaPeriodId.nextAdGroupIndex == C.INDEX_UNSET
|| eventTime.mediaPeriodId.nextAdGroupIndex > adMediaPeriodId.adGroupIndex;
}
}
private int resolveWindowIndexToNewTimeline(
Timeline oldTimeline, Timeline newTimeline, int windowIndex) {
if (windowIndex >= oldTimeline.getWindowCount()) {
return windowIndex < newTimeline.getWindowCount() ? windowIndex : C.INDEX_UNSET;
}
oldTimeline.getWindow(windowIndex, window);
for (int periodIndex = window.firstPeriodIndex;
periodIndex <= window.lastPeriodIndex;
periodIndex++) {
Object periodUid = oldTimeline.getUidOfPeriod(periodIndex);
int newPeriodIndex = newTimeline.getIndexOfPeriod(periodUid);
if (newPeriodIndex != C.INDEX_UNSET) {
return newTimeline.getPeriod(newPeriodIndex, period).windowIndex;
}
}
return C.INDEX_UNSET;
}
}
}

Просмотреть файл

@ -0,0 +1,120 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.analytics;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Player.DiscontinuityReason;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Timeline;
import org.mozilla.thirdparty.com.google.android.exoplayer2.analytics.AnalyticsListener.EventTime;
import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
/**
* Manager for active playback sessions.
*
* <p>The manager keeps track of the association between window index and/or media period id to
* session identifier.
*/
public interface PlaybackSessionManager {
/** A listener for session updates. */
interface Listener {
/**
* Called when a new session is created as a result of {@link #updateSessions(EventTime)}.
*
* @param eventTime The {@link EventTime} at which the session is created.
* @param sessionId The identifier of the new session.
*/
void onSessionCreated(EventTime eventTime, String sessionId);
/**
* Called when a session becomes active, i.e. playing in the foreground.
*
* @param eventTime The {@link EventTime} at which the session becomes active.
* @param sessionId The identifier of the session.
*/
void onSessionActive(EventTime eventTime, String sessionId);
/**
* Called when a session is interrupted by ad playback.
*
* @param eventTime The {@link EventTime} at which the ad playback starts.
* @param contentSessionId The session identifier of the content session.
* @param adSessionId The identifier of the ad session.
*/
void onAdPlaybackStarted(EventTime eventTime, String contentSessionId, String adSessionId);
/**
* Called when a session is permanently finished.
*
* @param eventTime The {@link EventTime} at which the session finished.
* @param sessionId The identifier of the finished session.
* @param automaticTransitionToNextPlayback Whether the session finished because of an automatic
* transition to the next playback item.
*/
void onSessionFinished(
EventTime eventTime, String sessionId, boolean automaticTransitionToNextPlayback);
}
/**
* Sets the listener to be notified of session updates. Must be called before the session manager
* is used.
*
* @param listener The {@link Listener} to be notified of session updates.
*/
void setListener(Listener listener);
/**
* Returns the session identifier for the given media period id.
*
* <p>Note that this will reserve a new session identifier if it doesn't exist yet, but will not
* call any {@link Listener} callbacks.
*
* @param timeline The timeline, {@code mediaPeriodId} is part of.
* @param mediaPeriodId A {@link MediaPeriodId}.
*/
String getSessionForMediaPeriodId(Timeline timeline, MediaPeriodId mediaPeriodId);
/**
* Returns whether an event time belong to a session.
*
* @param eventTime The {@link EventTime}.
* @param sessionId A session identifier.
* @return Whether the event belongs to the specified session.
*/
boolean belongsToSession(EventTime eventTime, String sessionId);
/**
* Updates or creates sessions based on a player {@link EventTime}.
*
* @param eventTime The {@link EventTime}.
*/
void updateSessions(EventTime eventTime);
/**
* Updates the session associations to a new timeline.
*
* @param eventTime The event time with the timeline change.
*/
void handleTimelineUpdate(EventTime eventTime);
/**
* Handles a position discontinuity.
*
* @param eventTime The event time of the position discontinuity.
* @param reason The {@link DiscontinuityReason}.
*/
void handlePositionDiscontinuity(EventTime eventTime, @DiscontinuityReason int reason);
}

Просмотреть файл

@ -0,0 +1,980 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.analytics;
import android.os.SystemClock;
import android.util.Pair;
import androidx.annotation.IntDef;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.analytics.AnalyticsListener.EventTime;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.Collections;
import java.util.List;
import org.checkerframework.checker.nullness.compatqual.NullableType;
/** Statistics about playbacks. */
public final class PlaybackStats {
/**
* State of a playback. One of {@link #PLAYBACK_STATE_NOT_STARTED}, {@link
* #PLAYBACK_STATE_JOINING_FOREGROUND}, {@link #PLAYBACK_STATE_JOINING_BACKGROUND}, {@link
* #PLAYBACK_STATE_PLAYING}, {@link #PLAYBACK_STATE_PAUSED}, {@link #PLAYBACK_STATE_SEEKING},
* {@link #PLAYBACK_STATE_BUFFERING}, {@link #PLAYBACK_STATE_PAUSED_BUFFERING}, {@link
* #PLAYBACK_STATE_SEEK_BUFFERING}, {@link #PLAYBACK_STATE_SUPPRESSED}, {@link
* #PLAYBACK_STATE_SUPPRESSED_BUFFERING}, {@link #PLAYBACK_STATE_ENDED}, {@link
* #PLAYBACK_STATE_STOPPED}, {@link #PLAYBACK_STATE_FAILED}, {@link
* #PLAYBACK_STATE_INTERRUPTED_BY_AD} or {@link #PLAYBACK_STATE_ABANDONED}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@Target({ElementType.TYPE_PARAMETER, ElementType.TYPE_USE})
@IntDef({
PLAYBACK_STATE_NOT_STARTED,
PLAYBACK_STATE_JOINING_BACKGROUND,
PLAYBACK_STATE_JOINING_FOREGROUND,
PLAYBACK_STATE_PLAYING,
PLAYBACK_STATE_PAUSED,
PLAYBACK_STATE_SEEKING,
PLAYBACK_STATE_BUFFERING,
PLAYBACK_STATE_PAUSED_BUFFERING,
PLAYBACK_STATE_SEEK_BUFFERING,
PLAYBACK_STATE_SUPPRESSED,
PLAYBACK_STATE_SUPPRESSED_BUFFERING,
PLAYBACK_STATE_ENDED,
PLAYBACK_STATE_STOPPED,
PLAYBACK_STATE_FAILED,
PLAYBACK_STATE_INTERRUPTED_BY_AD,
PLAYBACK_STATE_ABANDONED
})
@interface PlaybackState {}
/** Playback has not started (initial state). */
public static final int PLAYBACK_STATE_NOT_STARTED = 0;
/** Playback is buffering in the background for initial playback start. */
public static final int PLAYBACK_STATE_JOINING_BACKGROUND = 1;
/** Playback is buffering in the foreground for initial playback start. */
public static final int PLAYBACK_STATE_JOINING_FOREGROUND = 2;
/** Playback is actively playing. */
public static final int PLAYBACK_STATE_PLAYING = 3;
/** Playback is paused but ready to play. */
public static final int PLAYBACK_STATE_PAUSED = 4;
/** Playback is handling a seek. */
public static final int PLAYBACK_STATE_SEEKING = 5;
/** Playback is buffering to resume active playback. */
public static final int PLAYBACK_STATE_BUFFERING = 6;
/** Playback is buffering while paused. */
public static final int PLAYBACK_STATE_PAUSED_BUFFERING = 7;
/** Playback is buffering after a seek. */
public static final int PLAYBACK_STATE_SEEK_BUFFERING = 8;
/** Playback is suppressed (e.g. due to audio focus loss). */
public static final int PLAYBACK_STATE_SUPPRESSED = 9;
/** Playback is suppressed (e.g. due to audio focus loss) while buffering to resume a playback. */
public static final int PLAYBACK_STATE_SUPPRESSED_BUFFERING = 10;
/** Playback has reached the end of the media. */
public static final int PLAYBACK_STATE_ENDED = 11;
/** Playback is stopped and can be restarted. */
public static final int PLAYBACK_STATE_STOPPED = 12;
/** Playback is stopped due a fatal error and can be retried. */
public static final int PLAYBACK_STATE_FAILED = 13;
/** Playback is interrupted by an ad. */
public static final int PLAYBACK_STATE_INTERRUPTED_BY_AD = 14;
/** Playback is abandoned before reaching the end of the media. */
public static final int PLAYBACK_STATE_ABANDONED = 15;
/** Total number of playback states. */
/* package */ static final int PLAYBACK_STATE_COUNT = 16;
/** Empty playback stats. */
public static final PlaybackStats EMPTY = merge(/* nothing */ );
/**
* Returns the combined {@link PlaybackStats} for all input {@link PlaybackStats}.
*
* <p>Note that the full history of events is not kept as the history only makes sense in the
* context of a single playback.
*
* @param playbackStats Array of {@link PlaybackStats} to combine.
* @return The combined {@link PlaybackStats}.
*/
public static PlaybackStats merge(PlaybackStats... playbackStats) {
int playbackCount = 0;
long[] playbackStateDurationsMs = new long[PLAYBACK_STATE_COUNT];
long firstReportedTimeMs = C.TIME_UNSET;
int foregroundPlaybackCount = 0;
int abandonedBeforeReadyCount = 0;
int endedCount = 0;
int backgroundJoiningCount = 0;
long totalValidJoinTimeMs = C.TIME_UNSET;
int validJoinTimeCount = 0;
int totalPauseCount = 0;
int totalPauseBufferCount = 0;
int totalSeekCount = 0;
int totalRebufferCount = 0;
long maxRebufferTimeMs = C.TIME_UNSET;
int adPlaybackCount = 0;
long totalVideoFormatHeightTimeMs = 0;
long totalVideoFormatHeightTimeProduct = 0;
long totalVideoFormatBitrateTimeMs = 0;
long totalVideoFormatBitrateTimeProduct = 0;
long totalAudioFormatTimeMs = 0;
long totalAudioFormatBitrateTimeProduct = 0;
int initialVideoFormatHeightCount = 0;
int initialVideoFormatBitrateCount = 0;
int totalInitialVideoFormatHeight = C.LENGTH_UNSET;
long totalInitialVideoFormatBitrate = C.LENGTH_UNSET;
int initialAudioFormatBitrateCount = 0;
long totalInitialAudioFormatBitrate = C.LENGTH_UNSET;
long totalBandwidthTimeMs = 0;
long totalBandwidthBytes = 0;
long totalDroppedFrames = 0;
long totalAudioUnderruns = 0;
int fatalErrorPlaybackCount = 0;
int fatalErrorCount = 0;
int nonFatalErrorCount = 0;
for (PlaybackStats stats : playbackStats) {
playbackCount += stats.playbackCount;
for (int i = 0; i < PLAYBACK_STATE_COUNT; i++) {
playbackStateDurationsMs[i] += stats.playbackStateDurationsMs[i];
}
if (firstReportedTimeMs == C.TIME_UNSET) {
firstReportedTimeMs = stats.firstReportedTimeMs;
} else if (stats.firstReportedTimeMs != C.TIME_UNSET) {
firstReportedTimeMs = Math.min(firstReportedTimeMs, stats.firstReportedTimeMs);
}
foregroundPlaybackCount += stats.foregroundPlaybackCount;
abandonedBeforeReadyCount += stats.abandonedBeforeReadyCount;
endedCount += stats.endedCount;
backgroundJoiningCount += stats.backgroundJoiningCount;
if (totalValidJoinTimeMs == C.TIME_UNSET) {
totalValidJoinTimeMs = stats.totalValidJoinTimeMs;
} else if (stats.totalValidJoinTimeMs != C.TIME_UNSET) {
totalValidJoinTimeMs += stats.totalValidJoinTimeMs;
}
validJoinTimeCount += stats.validJoinTimeCount;
totalPauseCount += stats.totalPauseCount;
totalPauseBufferCount += stats.totalPauseBufferCount;
totalSeekCount += stats.totalSeekCount;
totalRebufferCount += stats.totalRebufferCount;
if (maxRebufferTimeMs == C.TIME_UNSET) {
maxRebufferTimeMs = stats.maxRebufferTimeMs;
} else if (stats.maxRebufferTimeMs != C.TIME_UNSET) {
maxRebufferTimeMs = Math.max(maxRebufferTimeMs, stats.maxRebufferTimeMs);
}
adPlaybackCount += stats.adPlaybackCount;
totalVideoFormatHeightTimeMs += stats.totalVideoFormatHeightTimeMs;
totalVideoFormatHeightTimeProduct += stats.totalVideoFormatHeightTimeProduct;
totalVideoFormatBitrateTimeMs += stats.totalVideoFormatBitrateTimeMs;
totalVideoFormatBitrateTimeProduct += stats.totalVideoFormatBitrateTimeProduct;
totalAudioFormatTimeMs += stats.totalAudioFormatTimeMs;
totalAudioFormatBitrateTimeProduct += stats.totalAudioFormatBitrateTimeProduct;
initialVideoFormatHeightCount += stats.initialVideoFormatHeightCount;
initialVideoFormatBitrateCount += stats.initialVideoFormatBitrateCount;
if (totalInitialVideoFormatHeight == C.LENGTH_UNSET) {
totalInitialVideoFormatHeight = stats.totalInitialVideoFormatHeight;
} else if (stats.totalInitialVideoFormatHeight != C.LENGTH_UNSET) {
totalInitialVideoFormatHeight += stats.totalInitialVideoFormatHeight;
}
if (totalInitialVideoFormatBitrate == C.LENGTH_UNSET) {
totalInitialVideoFormatBitrate = stats.totalInitialVideoFormatBitrate;
} else if (stats.totalInitialVideoFormatBitrate != C.LENGTH_UNSET) {
totalInitialVideoFormatBitrate += stats.totalInitialVideoFormatBitrate;
}
initialAudioFormatBitrateCount += stats.initialAudioFormatBitrateCount;
if (totalInitialAudioFormatBitrate == C.LENGTH_UNSET) {
totalInitialAudioFormatBitrate = stats.totalInitialAudioFormatBitrate;
} else if (stats.totalInitialAudioFormatBitrate != C.LENGTH_UNSET) {
totalInitialAudioFormatBitrate += stats.totalInitialAudioFormatBitrate;
}
totalBandwidthTimeMs += stats.totalBandwidthTimeMs;
totalBandwidthBytes += stats.totalBandwidthBytes;
totalDroppedFrames += stats.totalDroppedFrames;
totalAudioUnderruns += stats.totalAudioUnderruns;
fatalErrorPlaybackCount += stats.fatalErrorPlaybackCount;
fatalErrorCount += stats.fatalErrorCount;
nonFatalErrorCount += stats.nonFatalErrorCount;
}
return new PlaybackStats(
playbackCount,
playbackStateDurationsMs,
/* playbackStateHistory */ Collections.emptyList(),
/* mediaTimeHistory= */ Collections.emptyList(),
firstReportedTimeMs,
foregroundPlaybackCount,
abandonedBeforeReadyCount,
endedCount,
backgroundJoiningCount,
totalValidJoinTimeMs,
validJoinTimeCount,
totalPauseCount,
totalPauseBufferCount,
totalSeekCount,
totalRebufferCount,
maxRebufferTimeMs,
adPlaybackCount,
/* videoFormatHistory= */ Collections.emptyList(),
/* audioFormatHistory= */ Collections.emptyList(),
totalVideoFormatHeightTimeMs,
totalVideoFormatHeightTimeProduct,
totalVideoFormatBitrateTimeMs,
totalVideoFormatBitrateTimeProduct,
totalAudioFormatTimeMs,
totalAudioFormatBitrateTimeProduct,
initialVideoFormatHeightCount,
initialVideoFormatBitrateCount,
totalInitialVideoFormatHeight,
totalInitialVideoFormatBitrate,
initialAudioFormatBitrateCount,
totalInitialAudioFormatBitrate,
totalBandwidthTimeMs,
totalBandwidthBytes,
totalDroppedFrames,
totalAudioUnderruns,
fatalErrorPlaybackCount,
fatalErrorCount,
nonFatalErrorCount,
/* fatalErrorHistory= */ Collections.emptyList(),
/* nonFatalErrorHistory= */ Collections.emptyList());
}
/** The number of individual playbacks for which these stats were collected. */
public final int playbackCount;
// Playback state stats.
/**
* The playback state history as ordered pairs of the {@link EventTime} at which a state became
* active and the {@link PlaybackState}.
*/
public final List<Pair<EventTime, @PlaybackState Integer>> playbackStateHistory;
/**
* The media time history as an ordered list of long[2] arrays with [0] being the realtime as
* returned by {@code SystemClock.elapsedRealtime()} and [1] being the media time at this
* realtime, in milliseconds.
*/
public final List<long[]> mediaTimeHistory;
/**
* The elapsed real-time as returned by {@code SystemClock.elapsedRealtime()} of the first
* reported playback event, or {@link C#TIME_UNSET} if no event has been reported.
*/
public final long firstReportedTimeMs;
/** The number of playbacks which were the active foreground playback at some point. */
public final int foregroundPlaybackCount;
/** The number of playbacks which were abandoned before they were ready to play. */
public final int abandonedBeforeReadyCount;
/** The number of playbacks which reached the ended state at least once. */
public final int endedCount;
/** The number of playbacks which were pre-buffered in the background. */
public final int backgroundJoiningCount;
/**
* The total time spent joining the playback, in milliseconds, or {@link C#TIME_UNSET} if no valid
* join time could be determined.
*
* <p>Note that this does not include background joining time. A join time may be invalid if the
* playback never reached {@link #PLAYBACK_STATE_PLAYING} or {@link #PLAYBACK_STATE_PAUSED}, or
* joining was interrupted by a seek, stop, or error state.
*/
public final long totalValidJoinTimeMs;
/**
* The number of playbacks with a valid join time as documented in {@link #totalValidJoinTimeMs}.
*/
public final int validJoinTimeCount;
/** The total number of times a playback has been paused. */
public final int totalPauseCount;
/** The total number of times a playback has been paused while rebuffering. */
public final int totalPauseBufferCount;
/**
* The total number of times a seek occurred. This includes seeks happening before playback
* resumed after another seek.
*/
public final int totalSeekCount;
/**
* The total number of times a rebuffer occurred. This excludes initial joining and buffering
* after seek.
*/
public final int totalRebufferCount;
/**
* The maximum time spent during a single rebuffer, in milliseconds, or {@link C#TIME_UNSET} if no
* rebuffer occurred.
*/
public final long maxRebufferTimeMs;
/** The number of ad playbacks. */
public final int adPlaybackCount;
// Format stats.
/**
* The video format history as ordered pairs of the {@link EventTime} at which a format started
* being used and the {@link Format}. The {@link Format} may be null if no video format was used.
*/
public final List<Pair<EventTime, @NullableType Format>> videoFormatHistory;
/**
* The audio format history as ordered pairs of the {@link EventTime} at which a format started
* being used and the {@link Format}. The {@link Format} may be null if no audio format was used.
*/
public final List<Pair<EventTime, @NullableType Format>> audioFormatHistory;
/** The total media time for which video format height data is available, in milliseconds. */
public final long totalVideoFormatHeightTimeMs;
/**
* The accumulated sum of all video format heights, in pixels, times the time the format was used
* for playback, in milliseconds.
*/
public final long totalVideoFormatHeightTimeProduct;
/** The total media time for which video format bitrate data is available, in milliseconds. */
public final long totalVideoFormatBitrateTimeMs;
/**
* The accumulated sum of all video format bitrates, in bits per second, times the time the format
* was used for playback, in milliseconds.
*/
public final long totalVideoFormatBitrateTimeProduct;
/** The total media time for which audio format data is available, in milliseconds. */
public final long totalAudioFormatTimeMs;
/**
* The accumulated sum of all audio format bitrates, in bits per second, times the time the format
* was used for playback, in milliseconds.
*/
public final long totalAudioFormatBitrateTimeProduct;
/** The number of playbacks with initial video format height data. */
public final int initialVideoFormatHeightCount;
/** The number of playbacks with initial video format bitrate data. */
public final int initialVideoFormatBitrateCount;
/**
* The total initial video format height for all playbacks, in pixels, or {@link C#LENGTH_UNSET}
* if no initial video format data is available.
*/
public final int totalInitialVideoFormatHeight;
/**
* The total initial video format bitrate for all playbacks, in bits per second, or {@link
* C#LENGTH_UNSET} if no initial video format data is available.
*/
public final long totalInitialVideoFormatBitrate;
/** The number of playbacks with initial audio format bitrate data. */
public final int initialAudioFormatBitrateCount;
/**
* The total initial audio format bitrate for all playbacks, in bits per second, or {@link
* C#LENGTH_UNSET} if no initial audio format data is available.
*/
public final long totalInitialAudioFormatBitrate;
// Bandwidth stats.
/** The total time for which bandwidth measurement data is available, in milliseconds. */
public final long totalBandwidthTimeMs;
/** The total bytes transferred during {@link #totalBandwidthTimeMs}. */
public final long totalBandwidthBytes;
// Renderer quality stats.
/** The total number of dropped video frames. */
public final long totalDroppedFrames;
/** The total number of audio underruns. */
public final long totalAudioUnderruns;
// Error stats.
/**
* The total number of playback with at least one fatal error. Errors are fatal if playback
* stopped due to this error.
*/
public final int fatalErrorPlaybackCount;
/** The total number of fatal errors. Errors are fatal if playback stopped due to this error. */
public final int fatalErrorCount;
/**
* The total number of non-fatal errors. Error are non-fatal if playback can recover from the
* error without stopping.
*/
public final int nonFatalErrorCount;
/**
* The history of fatal errors as ordered pairs of the {@link EventTime} at which an error
* occurred and the error. Errors are fatal if playback stopped due to this error.
*/
public final List<Pair<EventTime, Exception>> fatalErrorHistory;
/**
* The history of non-fatal errors as ordered pairs of the {@link EventTime} at which an error
* occurred and the error. Error are non-fatal if playback can recover from the error without
* stopping.
*/
public final List<Pair<EventTime, Exception>> nonFatalErrorHistory;
private final long[] playbackStateDurationsMs;
/* package */ PlaybackStats(
int playbackCount,
long[] playbackStateDurationsMs,
List<Pair<EventTime, @PlaybackState Integer>> playbackStateHistory,
List<long[]> mediaTimeHistory,
long firstReportedTimeMs,
int foregroundPlaybackCount,
int abandonedBeforeReadyCount,
int endedCount,
int backgroundJoiningCount,
long totalValidJoinTimeMs,
int validJoinTimeCount,
int totalPauseCount,
int totalPauseBufferCount,
int totalSeekCount,
int totalRebufferCount,
long maxRebufferTimeMs,
int adPlaybackCount,
List<Pair<EventTime, @NullableType Format>> videoFormatHistory,
List<Pair<EventTime, @NullableType Format>> audioFormatHistory,
long totalVideoFormatHeightTimeMs,
long totalVideoFormatHeightTimeProduct,
long totalVideoFormatBitrateTimeMs,
long totalVideoFormatBitrateTimeProduct,
long totalAudioFormatTimeMs,
long totalAudioFormatBitrateTimeProduct,
int initialVideoFormatHeightCount,
int initialVideoFormatBitrateCount,
int totalInitialVideoFormatHeight,
long totalInitialVideoFormatBitrate,
int initialAudioFormatBitrateCount,
long totalInitialAudioFormatBitrate,
long totalBandwidthTimeMs,
long totalBandwidthBytes,
long totalDroppedFrames,
long totalAudioUnderruns,
int fatalErrorPlaybackCount,
int fatalErrorCount,
int nonFatalErrorCount,
List<Pair<EventTime, Exception>> fatalErrorHistory,
List<Pair<EventTime, Exception>> nonFatalErrorHistory) {
this.playbackCount = playbackCount;
this.playbackStateDurationsMs = playbackStateDurationsMs;
this.playbackStateHistory = Collections.unmodifiableList(playbackStateHistory);
this.mediaTimeHistory = Collections.unmodifiableList(mediaTimeHistory);
this.firstReportedTimeMs = firstReportedTimeMs;
this.foregroundPlaybackCount = foregroundPlaybackCount;
this.abandonedBeforeReadyCount = abandonedBeforeReadyCount;
this.endedCount = endedCount;
this.backgroundJoiningCount = backgroundJoiningCount;
this.totalValidJoinTimeMs = totalValidJoinTimeMs;
this.validJoinTimeCount = validJoinTimeCount;
this.totalPauseCount = totalPauseCount;
this.totalPauseBufferCount = totalPauseBufferCount;
this.totalSeekCount = totalSeekCount;
this.totalRebufferCount = totalRebufferCount;
this.maxRebufferTimeMs = maxRebufferTimeMs;
this.adPlaybackCount = adPlaybackCount;
this.videoFormatHistory = Collections.unmodifiableList(videoFormatHistory);
this.audioFormatHistory = Collections.unmodifiableList(audioFormatHistory);
this.totalVideoFormatHeightTimeMs = totalVideoFormatHeightTimeMs;
this.totalVideoFormatHeightTimeProduct = totalVideoFormatHeightTimeProduct;
this.totalVideoFormatBitrateTimeMs = totalVideoFormatBitrateTimeMs;
this.totalVideoFormatBitrateTimeProduct = totalVideoFormatBitrateTimeProduct;
this.totalAudioFormatTimeMs = totalAudioFormatTimeMs;
this.totalAudioFormatBitrateTimeProduct = totalAudioFormatBitrateTimeProduct;
this.initialVideoFormatHeightCount = initialVideoFormatHeightCount;
this.initialVideoFormatBitrateCount = initialVideoFormatBitrateCount;
this.totalInitialVideoFormatHeight = totalInitialVideoFormatHeight;
this.totalInitialVideoFormatBitrate = totalInitialVideoFormatBitrate;
this.initialAudioFormatBitrateCount = initialAudioFormatBitrateCount;
this.totalInitialAudioFormatBitrate = totalInitialAudioFormatBitrate;
this.totalBandwidthTimeMs = totalBandwidthTimeMs;
this.totalBandwidthBytes = totalBandwidthBytes;
this.totalDroppedFrames = totalDroppedFrames;
this.totalAudioUnderruns = totalAudioUnderruns;
this.fatalErrorPlaybackCount = fatalErrorPlaybackCount;
this.fatalErrorCount = fatalErrorCount;
this.nonFatalErrorCount = nonFatalErrorCount;
this.fatalErrorHistory = Collections.unmodifiableList(fatalErrorHistory);
this.nonFatalErrorHistory = Collections.unmodifiableList(nonFatalErrorHistory);
}
/**
* Returns the total time spent in a given {@link PlaybackState}, in milliseconds.
*
* @param playbackState A {@link PlaybackState}.
* @return Total spent in the given playback state, in milliseconds
*/
public long getPlaybackStateDurationMs(@PlaybackState int playbackState) {
return playbackStateDurationsMs[playbackState];
}
/**
* Returns the {@link PlaybackState} at the given time.
*
* @param realtimeMs The time as returned by {@link SystemClock#elapsedRealtime()}.
* @return The {@link PlaybackState} at that time, or {@link #PLAYBACK_STATE_NOT_STARTED} if the
* given time is before the first known playback state in the history.
*/
public @PlaybackState int getPlaybackStateAtTime(long realtimeMs) {
@PlaybackState int state = PLAYBACK_STATE_NOT_STARTED;
for (Pair<EventTime, @PlaybackState Integer> timeAndState : playbackStateHistory) {
if (timeAndState.first.realtimeMs > realtimeMs) {
break;
}
state = timeAndState.second;
}
return state;
}
/**
* Returns the estimated media time at the given realtime, in milliseconds, or {@link
* C#TIME_UNSET} if the media time history is unknown.
*
* @param realtimeMs The realtime as returned by {@link SystemClock#elapsedRealtime()}.
* @return The estimated media time in milliseconds at this realtime, {@link C#TIME_UNSET} if no
* estimate can be given.
*/
public long getMediaTimeMsAtRealtimeMs(long realtimeMs) {
if (mediaTimeHistory.isEmpty()) {
return C.TIME_UNSET;
}
int nextIndex = 0;
while (nextIndex < mediaTimeHistory.size()
&& mediaTimeHistory.get(nextIndex)[0] <= realtimeMs) {
nextIndex++;
}
if (nextIndex == 0) {
return mediaTimeHistory.get(0)[1];
}
if (nextIndex == mediaTimeHistory.size()) {
return mediaTimeHistory.get(mediaTimeHistory.size() - 1)[1];
}
long prevRealtimeMs = mediaTimeHistory.get(nextIndex - 1)[0];
long prevMediaTimeMs = mediaTimeHistory.get(nextIndex - 1)[1];
long nextRealtimeMs = mediaTimeHistory.get(nextIndex)[0];
long nextMediaTimeMs = mediaTimeHistory.get(nextIndex)[1];
long realtimeDurationMs = nextRealtimeMs - prevRealtimeMs;
if (realtimeDurationMs == 0) {
return prevMediaTimeMs;
}
float fraction = (float) (realtimeMs - prevRealtimeMs) / realtimeDurationMs;
return prevMediaTimeMs + (long) ((nextMediaTimeMs - prevMediaTimeMs) * fraction);
}
/**
* Returns the mean time spent joining the playback, in milliseconds, or {@link C#TIME_UNSET} if
* no valid join time is available. Only includes playbacks with valid join times as documented in
* {@link #totalValidJoinTimeMs}.
*/
public long getMeanJoinTimeMs() {
return validJoinTimeCount == 0 ? C.TIME_UNSET : totalValidJoinTimeMs / validJoinTimeCount;
}
/**
* Returns the total time spent joining the playback in foreground, in milliseconds. This does
* include invalid join times where the playback never reached {@link #PLAYBACK_STATE_PLAYING} or
* {@link #PLAYBACK_STATE_PAUSED}, or joining was interrupted by a seek, stop, or error state.
*/
public long getTotalJoinTimeMs() {
return getPlaybackStateDurationMs(PLAYBACK_STATE_JOINING_FOREGROUND);
}
/** Returns the total time spent actively playing, in milliseconds. */
public long getTotalPlayTimeMs() {
return getPlaybackStateDurationMs(PLAYBACK_STATE_PLAYING);
}
/**
* Returns the mean time spent actively playing per foreground playback, in milliseconds, or
* {@link C#TIME_UNSET} if no playback has been in foreground.
*/
public long getMeanPlayTimeMs() {
return foregroundPlaybackCount == 0
? C.TIME_UNSET
: getTotalPlayTimeMs() / foregroundPlaybackCount;
}
/** Returns the total time spent in a paused state, in milliseconds. */
public long getTotalPausedTimeMs() {
return getPlaybackStateDurationMs(PLAYBACK_STATE_PAUSED)
+ getPlaybackStateDurationMs(PLAYBACK_STATE_PAUSED_BUFFERING);
}
/**
* Returns the mean time spent in a paused state per foreground playback, in milliseconds, or
* {@link C#TIME_UNSET} if no playback has been in foreground.
*/
public long getMeanPausedTimeMs() {
return foregroundPlaybackCount == 0
? C.TIME_UNSET
: getTotalPausedTimeMs() / foregroundPlaybackCount;
}
/**
* Returns the total time spent rebuffering, in milliseconds. This excludes initial join times,
* buffer times after a seek and buffering while paused.
*/
public long getTotalRebufferTimeMs() {
return getPlaybackStateDurationMs(PLAYBACK_STATE_BUFFERING);
}
/**
* Returns the mean time spent rebuffering per foreground playback, in milliseconds, or {@link
* C#TIME_UNSET} if no playback has been in foreground. This excludes initial join times, buffer
* times after a seek and buffering while paused.
*/
public long getMeanRebufferTimeMs() {
return foregroundPlaybackCount == 0
? C.TIME_UNSET
: getTotalRebufferTimeMs() / foregroundPlaybackCount;
}
/**
* Returns the mean time spent during a single rebuffer, in milliseconds, or {@link C#TIME_UNSET}
* if no rebuffer was recorded. This excludes initial join times and buffer times after a seek.
*/
public long getMeanSingleRebufferTimeMs() {
return totalRebufferCount == 0
? C.TIME_UNSET
: (getPlaybackStateDurationMs(PLAYBACK_STATE_BUFFERING)
+ getPlaybackStateDurationMs(PLAYBACK_STATE_PAUSED_BUFFERING))
/ totalRebufferCount;
}
/**
* Returns the total time spent from the start of a seek until playback is ready again, in
* milliseconds.
*/
public long getTotalSeekTimeMs() {
return getPlaybackStateDurationMs(PLAYBACK_STATE_SEEKING)
+ getPlaybackStateDurationMs(PLAYBACK_STATE_SEEK_BUFFERING);
}
/**
* Returns the mean time spent per foreground playback from the start of a seek until playback is
* ready again, in milliseconds, or {@link C#TIME_UNSET} if no playback has been in foreground.
*/
public long getMeanSeekTimeMs() {
return foregroundPlaybackCount == 0
? C.TIME_UNSET
: getTotalSeekTimeMs() / foregroundPlaybackCount;
}
/**
* Returns the mean time spent from the start of a single seek until playback is ready again, in
* milliseconds, or {@link C#TIME_UNSET} if no seek occurred.
*/
public long getMeanSingleSeekTimeMs() {
return totalSeekCount == 0 ? C.TIME_UNSET : getTotalSeekTimeMs() / totalSeekCount;
}
/**
* Returns the total time spent actively waiting for playback, in milliseconds. This includes all
* join times, rebuffer times and seek times, but excludes times without user intention to play,
* e.g. all paused states.
*/
public long getTotalWaitTimeMs() {
return getPlaybackStateDurationMs(PLAYBACK_STATE_JOINING_FOREGROUND)
+ getPlaybackStateDurationMs(PLAYBACK_STATE_BUFFERING)
+ getPlaybackStateDurationMs(PLAYBACK_STATE_SEEKING)
+ getPlaybackStateDurationMs(PLAYBACK_STATE_SEEK_BUFFERING);
}
/**
* Returns the mean time spent actively waiting for playback per foreground playback, in
* milliseconds, or {@link C#TIME_UNSET} if no playback has been in foreground. This includes all
* join times, rebuffer times and seek times, but excludes times without user intention to play,
* e.g. all paused states.
*/
public long getMeanWaitTimeMs() {
return foregroundPlaybackCount == 0
? C.TIME_UNSET
: getTotalWaitTimeMs() / foregroundPlaybackCount;
}
/** Returns the total time spent playing or actively waiting for playback, in milliseconds. */
public long getTotalPlayAndWaitTimeMs() {
return getTotalPlayTimeMs() + getTotalWaitTimeMs();
}
/**
* Returns the mean time spent playing or actively waiting for playback per foreground playback,
* in milliseconds, or {@link C#TIME_UNSET} if no playback has been in foreground.
*/
public long getMeanPlayAndWaitTimeMs() {
return foregroundPlaybackCount == 0
? C.TIME_UNSET
: getTotalPlayAndWaitTimeMs() / foregroundPlaybackCount;
}
/** Returns the total time covered by any playback state, in milliseconds. */
public long getTotalElapsedTimeMs() {
long totalTimeMs = 0;
for (int i = 0; i < PLAYBACK_STATE_COUNT; i++) {
totalTimeMs += playbackStateDurationsMs[i];
}
return totalTimeMs;
}
/**
* Returns the mean time covered by any playback state per playback, in milliseconds, or {@link
* C#TIME_UNSET} if no playback was recorded.
*/
public long getMeanElapsedTimeMs() {
return playbackCount == 0 ? C.TIME_UNSET : getTotalElapsedTimeMs() / playbackCount;
}
/**
* Returns the ratio of foreground playbacks which were abandoned before they were ready to play,
* or {@code 0.0} if no playback has been in foreground.
*/
public float getAbandonedBeforeReadyRatio() {
int foregroundAbandonedBeforeReady =
abandonedBeforeReadyCount - (playbackCount - foregroundPlaybackCount);
return foregroundPlaybackCount == 0
? 0f
: (float) foregroundAbandonedBeforeReady / foregroundPlaybackCount;
}
/**
* Returns the ratio of foreground playbacks which reached the ended state at least once, or
* {@code 0.0} if no playback has been in foreground.
*/
public float getEndedRatio() {
return foregroundPlaybackCount == 0 ? 0f : (float) endedCount / foregroundPlaybackCount;
}
/**
* Returns the mean number of times a playback has been paused per foreground playback, or {@code
* 0.0} if no playback has been in foreground.
*/
public float getMeanPauseCount() {
return foregroundPlaybackCount == 0 ? 0f : (float) totalPauseCount / foregroundPlaybackCount;
}
/**
* Returns the mean number of times a playback has been paused while rebuffering per foreground
* playback, or {@code 0.0} if no playback has been in foreground.
*/
public float getMeanPauseBufferCount() {
return foregroundPlaybackCount == 0
? 0f
: (float) totalPauseBufferCount / foregroundPlaybackCount;
}
/**
* Returns the mean number of times a seek occurred per foreground playback, or {@code 0.0} if no
* playback has been in foreground. This includes seeks happening before playback resumed after
* another seek.
*/
public float getMeanSeekCount() {
return foregroundPlaybackCount == 0 ? 0f : (float) totalSeekCount / foregroundPlaybackCount;
}
/**
* Returns the mean number of times a rebuffer occurred per foreground playback, or {@code 0.0} if
* no playback has been in foreground. This excludes initial joining and buffering after seek.
*/
public float getMeanRebufferCount() {
return foregroundPlaybackCount == 0 ? 0f : (float) totalRebufferCount / foregroundPlaybackCount;
}
/**
* Returns the ratio of wait times to the total time spent playing and waiting, or {@code 0.0} if
* no time was spend playing or waiting. This is equivalent to {@link #getTotalWaitTimeMs()} /
* {@link #getTotalPlayAndWaitTimeMs()} and also to {@link #getJoinTimeRatio()} + {@link
* #getRebufferTimeRatio()} + {@link #getSeekTimeRatio()}.
*/
public float getWaitTimeRatio() {
long playAndWaitTimeMs = getTotalPlayAndWaitTimeMs();
return playAndWaitTimeMs == 0 ? 0f : (float) getTotalWaitTimeMs() / playAndWaitTimeMs;
}
/**
* Returns the ratio of foreground join time to the total time spent playing and waiting, or
* {@code 0.0} if no time was spend playing or waiting. This is equivalent to {@link
* #getTotalJoinTimeMs()} / {@link #getTotalPlayAndWaitTimeMs()}.
*/
public float getJoinTimeRatio() {
long playAndWaitTimeMs = getTotalPlayAndWaitTimeMs();
return playAndWaitTimeMs == 0 ? 0f : (float) getTotalJoinTimeMs() / playAndWaitTimeMs;
}
/**
* Returns the ratio of rebuffer time to the total time spent playing and waiting, or {@code 0.0}
* if no time was spend playing or waiting. This is equivalent to {@link
* #getTotalRebufferTimeMs()} / {@link #getTotalPlayAndWaitTimeMs()}.
*/
public float getRebufferTimeRatio() {
long playAndWaitTimeMs = getTotalPlayAndWaitTimeMs();
return playAndWaitTimeMs == 0 ? 0f : (float) getTotalRebufferTimeMs() / playAndWaitTimeMs;
}
/**
* Returns the ratio of seek time to the total time spent playing and waiting, or {@code 0.0} if
* no time was spend playing or waiting. This is equivalent to {@link #getTotalSeekTimeMs()} /
* {@link #getTotalPlayAndWaitTimeMs()}.
*/
public float getSeekTimeRatio() {
long playAndWaitTimeMs = getTotalPlayAndWaitTimeMs();
return playAndWaitTimeMs == 0 ? 0f : (float) getTotalSeekTimeMs() / playAndWaitTimeMs;
}
/**
* Returns the rate of rebuffer events, in rebuffers per play time second, or {@code 0.0} if no
* time was spend playing. This is equivalent to 1.0 / {@link #getMeanTimeBetweenRebuffers()}.
*/
public float getRebufferRate() {
long playTimeMs = getTotalPlayTimeMs();
return playTimeMs == 0 ? 0f : 1000f * totalRebufferCount / playTimeMs;
}
/**
* Returns the mean play time between rebuffer events, in seconds. This is equivalent to 1.0 /
* {@link #getRebufferRate()}. Note that this may return {@link Float#POSITIVE_INFINITY}.
*/
public float getMeanTimeBetweenRebuffers() {
return 1f / getRebufferRate();
}
/**
* Returns the mean initial video format height, in pixels, or {@link C#LENGTH_UNSET} if no video
* format data is available.
*/
public int getMeanInitialVideoFormatHeight() {
return initialVideoFormatHeightCount == 0
? C.LENGTH_UNSET
: totalInitialVideoFormatHeight / initialVideoFormatHeightCount;
}
/**
* Returns the mean initial video format bitrate, in bits per second, or {@link C#LENGTH_UNSET} if
* no video format data is available.
*/
public int getMeanInitialVideoFormatBitrate() {
return initialVideoFormatBitrateCount == 0
? C.LENGTH_UNSET
: (int) (totalInitialVideoFormatBitrate / initialVideoFormatBitrateCount);
}
/**
* Returns the mean initial audio format bitrate, in bits per second, or {@link C#LENGTH_UNSET} if
* no audio format data is available.
*/
public int getMeanInitialAudioFormatBitrate() {
return initialAudioFormatBitrateCount == 0
? C.LENGTH_UNSET
: (int) (totalInitialAudioFormatBitrate / initialAudioFormatBitrateCount);
}
/**
* Returns the mean video format height, in pixels, or {@link C#LENGTH_UNSET} if no video format
* data is available. This is a weighted average taking the time the format was used for playback
* into account.
*/
public int getMeanVideoFormatHeight() {
return totalVideoFormatHeightTimeMs == 0
? C.LENGTH_UNSET
: (int) (totalVideoFormatHeightTimeProduct / totalVideoFormatHeightTimeMs);
}
/**
* Returns the mean video format bitrate, in bits per second, or {@link C#LENGTH_UNSET} if no
* video format data is available. This is a weighted average taking the time the format was used
* for playback into account.
*/
public int getMeanVideoFormatBitrate() {
return totalVideoFormatBitrateTimeMs == 0
? C.LENGTH_UNSET
: (int) (totalVideoFormatBitrateTimeProduct / totalVideoFormatBitrateTimeMs);
}
/**
* Returns the mean audio format bitrate, in bits per second, or {@link C#LENGTH_UNSET} if no
* audio format data is available. This is a weighted average taking the time the format was used
* for playback into account.
*/
public int getMeanAudioFormatBitrate() {
return totalAudioFormatTimeMs == 0
? C.LENGTH_UNSET
: (int) (totalAudioFormatBitrateTimeProduct / totalAudioFormatTimeMs);
}
/**
* Returns the mean network bandwidth based on transfer measurements, in bits per second, or
* {@link C#LENGTH_UNSET} if no transfer data is available.
*/
public int getMeanBandwidth() {
return totalBandwidthTimeMs == 0
? C.LENGTH_UNSET
: (int) (totalBandwidthBytes * 8000 / totalBandwidthTimeMs);
}
/**
* Returns the mean rate at which video frames are dropped, in dropped frames per play time
* second, or {@code 0.0} if no time was spent playing.
*/
public float getDroppedFramesRate() {
long playTimeMs = getTotalPlayTimeMs();
return playTimeMs == 0 ? 0f : 1000f * totalDroppedFrames / playTimeMs;
}
/**
* Returns the mean rate at which audio underruns occurred, in underruns per play time second, or
* {@code 0.0} if no time was spent playing.
*/
public float getAudioUnderrunRate() {
long playTimeMs = getTotalPlayTimeMs();
return playTimeMs == 0 ? 0f : 1000f * totalAudioUnderruns / playTimeMs;
}
/**
* Returns the ratio of foreground playbacks which experienced fatal errors, or {@code 0.0} if no
* playback has been in foreground.
*/
public float getFatalErrorRatio() {
return foregroundPlaybackCount == 0
? 0f
: (float) fatalErrorPlaybackCount / foregroundPlaybackCount;
}
/**
* Returns the rate of fatal errors, in errors per play time second, or {@code 0.0} if no time was
* spend playing. This is equivalent to 1.0 / {@link #getMeanTimeBetweenFatalErrors()}.
*/
public float getFatalErrorRate() {
long playTimeMs = getTotalPlayTimeMs();
return playTimeMs == 0 ? 0f : 1000f * fatalErrorCount / playTimeMs;
}
/**
* Returns the mean play time between fatal errors, in seconds. This is equivalent to 1.0 / {@link
* #getFatalErrorRate()}. Note that this may return {@link Float#POSITIVE_INFINITY}.
*/
public float getMeanTimeBetweenFatalErrors() {
return 1f / getFatalErrorRate();
}
/**
* Returns the mean number of non-fatal errors per foreground playback, or {@code 0.0} if no
* playback has been in foreground.
*/
public float getMeanNonFatalErrorCount() {
return foregroundPlaybackCount == 0 ? 0f : (float) nonFatalErrorCount / foregroundPlaybackCount;
}
/**
* Returns the rate of non-fatal errors, in errors per play time second, or {@code 0.0} if no time
* was spend playing. This is equivalent to 1.0 / {@link #getMeanTimeBetweenNonFatalErrors()}.
*/
public float getNonFatalErrorRate() {
long playTimeMs = getTotalPlayTimeMs();
return playTimeMs == 0 ? 0f : 1000f * nonFatalErrorCount / playTimeMs;
}
/**
* Returns the mean play time between non-fatal errors, in seconds. This is equivalent to 1.0 /
* {@link #getNonFatalErrorRate()}. Note that this may return {@link Float#POSITIVE_INFINITY}.
*/
public float getMeanTimeBetweenNonFatalErrors() {
return 1f / getNonFatalErrorRate();
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,19 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@NonNullApi
package org.mozilla.thirdparty.com.google.android.exoplayer2.analytics;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.NonNullApi;

Просмотреть файл

@ -15,29 +15,56 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.Ac3Util.SyncFrameInfo.StreamType;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmInitData;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.MimeTypes;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.ParsableBitArray;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.ParsableByteArray;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
/**
* Utility methods for parsing (E-)AC-3 syncframes, which are access units in (E-)AC-3 bitstreams.
* Utility methods for parsing Dolby TrueHD and (E-)AC-3 syncframes. (E-)AC-3 parsing follows the
* definition in ETSI TS 102 366 V1.4.1.
*/
public final class Ac3Util {
/**
* Holds sample format information as presented by a syncframe header.
*/
public static final class Ac3SyncFrameInfo {
/** Holds sample format information as presented by a syncframe header. */
public static final class SyncFrameInfo {
/**
* The sample mime type of the bitstream. One of {@link MimeTypes#AUDIO_AC3} and
* {@link MimeTypes#AUDIO_E_AC3}.
* AC3 stream types. See also E.1.3.1.1. One of {@link #STREAM_TYPE_UNDEFINED}, {@link
* #STREAM_TYPE_TYPE0}, {@link #STREAM_TYPE_TYPE1} or {@link #STREAM_TYPE_TYPE2}.
*/
public final String mimeType;
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({STREAM_TYPE_UNDEFINED, STREAM_TYPE_TYPE0, STREAM_TYPE_TYPE1, STREAM_TYPE_TYPE2})
public @interface StreamType {}
/** Undefined AC3 stream type. */
public static final int STREAM_TYPE_UNDEFINED = -1;
/** Type 0 AC3 stream type. */
public static final int STREAM_TYPE_TYPE0 = 0;
/** Type 1 AC3 stream type. */
public static final int STREAM_TYPE_TYPE1 = 1;
/** Type 2 AC3 stream type. */
public static final int STREAM_TYPE_TYPE2 = 2;
/**
* The sample mime type of the bitstream. One of {@link MimeTypes#AUDIO_AC3} and {@link
* MimeTypes#AUDIO_E_AC3}.
*/
@Nullable public final String mimeType;
/**
* The type of the stream if {@link #mimeType} is {@link MimeTypes#AUDIO_E_AC3}, or {@link
* #STREAM_TYPE_UNDEFINED} otherwise.
*/
public final @StreamType int streamType;
/**
* The audio sampling rate in Hz.
*/
@ -55,9 +82,15 @@ public final class Ac3Util {
*/
public final int sampleCount;
private Ac3SyncFrameInfo(String mimeType, int channelCount, int sampleRate, int frameSize,
private SyncFrameInfo(
@Nullable String mimeType,
@StreamType int streamType,
int channelCount,
int sampleRate,
int frameSize,
int sampleCount) {
this.mimeType = mimeType;
this.streamType = streamType;
this.channelCount = channelCount;
this.sampleRate = sampleRate;
this.frameSize = frameSize;
@ -66,13 +99,22 @@ public final class Ac3Util {
}
/**
* The number of samples to store in each output chunk when rechunking TrueHD streams. The number
* of samples extracted from the container corresponding to one syncframe must be an integer
* multiple of this value.
*/
public static final int TRUEHD_RECHUNK_SAMPLE_COUNT = 16;
/**
* The number of bytes that must be parsed from a TrueHD syncframe to calculate the sample count.
*/
public static final int TRUEHD_SYNCFRAME_PREFIX_LENGTH = 10;
/**
* The number of new samples per (E-)AC-3 audio block.
*/
private static final int AUDIO_SAMPLES_PER_AUDIO_BLOCK = 256;
/**
* Each syncframe has 6 blocks that provide 256 new audio samples. See ETSI TS 102 366 4.1.
*/
/** Each syncframe has 6 blocks that provide 256 new audio samples. See subsection 4.1. */
private static final int AC3_SYNCFRAME_AUDIO_SAMPLE_COUNT = 6 * AUDIO_SAMPLES_PER_AUDIO_BLOCK;
/**
* Number of audio blocks per E-AC-3 syncframe, indexed by numblkscod.
@ -90,29 +132,30 @@ public final class Ac3Util {
* Channel counts, indexed by acmod.
*/
private static final int[] CHANNEL_COUNT_BY_ACMOD = new int[] {2, 1, 2, 3, 3, 4, 4, 5};
/**
* Nominal bitrates in kbps, indexed by frmsizecod / 2. (See ETSI TS 102 366 table 4.13.)
*/
private static final int[] BITRATE_BY_HALF_FRMSIZECOD = new int[] {32, 40, 48, 56, 64, 80, 96,
112, 128, 160, 192, 224, 256, 320, 384, 448, 512, 576, 640};
/**
* 16-bit words per syncframe, indexed by frmsizecod / 2. (See ETSI TS 102 366 table 4.13.)
*/
private static final int[] SYNCFRAME_SIZE_WORDS_BY_HALF_FRMSIZECOD_44_1 = new int[] {69, 87, 104,
121, 139, 174, 208, 243, 278, 348, 417, 487, 557, 696, 835, 975, 1114, 1253, 1393};
/** Nominal bitrates in kbps, indexed by frmsizecod / 2. (See table 4.13.) */
private static final int[] BITRATE_BY_HALF_FRMSIZECOD =
new int[] {
32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 448, 512, 576, 640
};
/** 16-bit words per syncframe, indexed by frmsizecod / 2. (See table 4.13.) */
private static final int[] SYNCFRAME_SIZE_WORDS_BY_HALF_FRMSIZECOD_44_1 =
new int[] {
69, 87, 104, 121, 139, 174, 208, 243, 278, 348, 417, 487, 557, 696, 835, 975, 1114, 1253,
1393
};
/**
* Returns the AC-3 format given {@code data} containing the AC3SpecificBox according to
* ETSI TS 102 366 Annex F. The reading position of {@code data} will be modified.
* Returns the AC-3 format given {@code data} containing the AC3SpecificBox according to Annex F.
* The reading position of {@code data} will be modified.
*
* @param data The AC3SpecificBox to parse.
* @param trackId The track identifier to set on the format, or null.
* @param trackId The track identifier to set on the format.
* @param language The language to set on the format.
* @param drmInitData {@link DrmInitData} to be included in the format.
* @return The AC-3 format parsed from data in the header.
*/
public static Format parseAc3AnnexFFormat(ParsableByteArray data, String trackId,
String language, DrmInitData drmInitData) {
public static Format parseAc3AnnexFFormat(
ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData) {
int fscod = (data.readUnsignedByte() & 0xC0) >> 6;
int sampleRate = SAMPLE_RATE_BY_FSCOD[fscod];
int nextByte = data.readUnsignedByte();
@ -120,26 +163,35 @@ public final class Ac3Util {
if ((nextByte & 0x04) != 0) { // lfeon
channelCount++;
}
return Format.createAudioSampleFormat(trackId, MimeTypes.AUDIO_AC3, null, Format.NO_VALUE,
Format.NO_VALUE, channelCount, sampleRate, null, drmInitData, 0, language);
return Format.createAudioSampleFormat(
trackId,
MimeTypes.AUDIO_AC3,
/* codecs= */ null,
Format.NO_VALUE,
Format.NO_VALUE,
channelCount,
sampleRate,
/* initializationData= */ null,
drmInitData,
/* selectionFlags= */ 0,
language);
}
/**
* Returns the E-AC-3 format given {@code data} containing the EC3SpecificBox according to
* ETSI TS 102 366 Annex F. The reading position of {@code data} will be modified.
* Returns the E-AC-3 format given {@code data} containing the EC3SpecificBox according to Annex
* F. The reading position of {@code data} will be modified.
*
* @param data The EC3SpecificBox to parse.
* @param trackId The track identifier to set on the format, or null.
* @param trackId The track identifier to set on the format.
* @param language The language to set on the format.
* @param drmInitData {@link DrmInitData} to be included in the format.
* @return The E-AC-3 format parsed from data in the header.
*/
public static Format parseEAc3AnnexFFormat(ParsableByteArray data, String trackId,
String language, DrmInitData drmInitData) {
public static Format parseEAc3AnnexFFormat(
ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData) {
data.skipBytes(2); // data_rate, num_ind_sub
// Read only the first substream.
// TODO: Read later substreams?
// Read the first independent substream.
int fscod = (data.readUnsignedByte() & 0xC0) >> 6;
int sampleRate = SAMPLE_RATE_BY_FSCOD[fscod];
int nextByte = data.readUnsignedByte();
@ -147,8 +199,37 @@ public final class Ac3Util {
if ((nextByte & 0x01) != 0) { // lfeon
channelCount++;
}
return Format.createAudioSampleFormat(trackId, MimeTypes.AUDIO_E_AC3, null, Format.NO_VALUE,
Format.NO_VALUE, channelCount, sampleRate, null, drmInitData, 0, language);
// Read the first dependent substream.
nextByte = data.readUnsignedByte();
int numDepSub = ((nextByte & 0x1E) >> 1);
if (numDepSub > 0) {
int lowByteChanLoc = data.readUnsignedByte();
// Read Lrs/Rrs pair
// TODO: Read other channel configuration
if ((lowByteChanLoc & 0x02) != 0) {
channelCount += 2;
}
}
String mimeType = MimeTypes.AUDIO_E_AC3;
if (data.bytesLeft() > 0) {
nextByte = data.readUnsignedByte();
if ((nextByte & 0x01) != 0) { // flag_ec3_extension_type_a
mimeType = MimeTypes.AUDIO_E_AC3_JOC;
}
}
return Format.createAudioSampleFormat(
trackId,
mimeType,
/* codecs= */ null,
Format.NO_VALUE,
Format.NO_VALUE,
channelCount,
sampleRate,
/* initializationData= */ null,
drmInitData,
/* selectionFlags= */ 0,
language);
}
/**
@ -158,36 +239,206 @@ public final class Ac3Util {
* @param data The data to parse, positioned at the start of the syncframe.
* @return The (E-)AC-3 format data parsed from the header.
*/
public static Ac3SyncFrameInfo parseAc3SyncframeInfo(ParsableBitArray data) {
public static SyncFrameInfo parseAc3SyncframeInfo(ParsableBitArray data) {
int initialPosition = data.getPosition();
data.skipBits(40);
boolean isEac3 = data.readBits(5) == 16;
// Parse the bitstream ID for AC-3 and E-AC-3 (see subsections 4.3, E.1.2 and E.1.3.1.6).
boolean isEac3 = data.readBits(5) > 10;
data.setPosition(initialPosition);
String mimeType;
@Nullable String mimeType;
@StreamType int streamType = SyncFrameInfo.STREAM_TYPE_UNDEFINED;
int sampleRate;
int acmod;
int frameSize;
int sampleCount;
boolean lfeon;
int channelCount;
if (isEac3) {
mimeType = MimeTypes.AUDIO_E_AC3;
data.skipBits(16 + 2 + 3); // syncword, strmtype, substreamid
frameSize = (data.readBits(11) + 1) * 2;
// Subsection E.1.2.
data.skipBits(16); // syncword
switch (data.readBits(2)) { // strmtyp
case 0:
streamType = SyncFrameInfo.STREAM_TYPE_TYPE0;
break;
case 1:
streamType = SyncFrameInfo.STREAM_TYPE_TYPE1;
break;
case 2:
streamType = SyncFrameInfo.STREAM_TYPE_TYPE2;
break;
default:
streamType = SyncFrameInfo.STREAM_TYPE_UNDEFINED;
break;
}
data.skipBits(3); // substreamid
frameSize = (data.readBits(11) + 1) * 2; // See frmsiz in subsection E.1.3.1.3.
int fscod = data.readBits(2);
int audioBlocks;
int numblkscod;
if (fscod == 3) {
numblkscod = 3;
sampleRate = SAMPLE_RATE_BY_FSCOD2[data.readBits(2)];
audioBlocks = 6;
} else {
int numblkscod = data.readBits(2);
numblkscod = data.readBits(2);
audioBlocks = BLOCKS_PER_SYNCFRAME_BY_NUMBLKSCOD[numblkscod];
sampleRate = SAMPLE_RATE_BY_FSCOD[fscod];
}
sampleCount = AUDIO_SAMPLES_PER_AUDIO_BLOCK * audioBlocks;
acmod = data.readBits(3);
lfeon = data.readBit();
channelCount = CHANNEL_COUNT_BY_ACMOD[acmod] + (lfeon ? 1 : 0);
data.skipBits(5 + 5); // bsid, dialnorm
if (data.readBit()) { // compre
data.skipBits(8); // compr
}
if (acmod == 0) {
data.skipBits(5); // dialnorm2
if (data.readBit()) { // compr2e
data.skipBits(8); // compr2
}
}
if (streamType == SyncFrameInfo.STREAM_TYPE_TYPE1 && data.readBit()) { // chanmape
data.skipBits(16); // chanmap
}
if (data.readBit()) { // mixmdate
if (acmod > 2) {
data.skipBits(2); // dmixmod
}
if ((acmod & 0x01) != 0 && acmod > 2) {
data.skipBits(3 + 3); // ltrtcmixlev, lorocmixlev
}
if ((acmod & 0x04) != 0) {
data.skipBits(6); // ltrtsurmixlev, lorosurmixlev
}
if (lfeon && data.readBit()) { // lfemixlevcode
data.skipBits(5); // lfemixlevcod
}
if (streamType == SyncFrameInfo.STREAM_TYPE_TYPE0) {
if (data.readBit()) { // pgmscle
data.skipBits(6); //pgmscl
}
if (acmod == 0 && data.readBit()) { // pgmscl2e
data.skipBits(6); // pgmscl2
}
if (data.readBit()) { // extpgmscle
data.skipBits(6); // extpgmscl
}
int mixdef = data.readBits(2);
if (mixdef == 1) {
data.skipBits(1 + 1 + 3); // premixcmpsel, drcsrc, premixcmpscl
} else if (mixdef == 2) {
data.skipBits(12); // mixdata
} else if (mixdef == 3) {
int mixdeflen = data.readBits(5);
if (data.readBit()) { // mixdata2e
data.skipBits(1 + 1 + 3); // premixcmpsel, drcsrc, premixcmpscl
if (data.readBit()) { // extpgmlscle
data.skipBits(4); // extpgmlscl
}
if (data.readBit()) { // extpgmcscle
data.skipBits(4); // extpgmcscl
}
if (data.readBit()) { // extpgmrscle
data.skipBits(4); // extpgmrscl
}
if (data.readBit()) { // extpgmlsscle
data.skipBits(4); // extpgmlsscl
}
if (data.readBit()) { // extpgmrsscle
data.skipBits(4); // extpgmrsscl
}
if (data.readBit()) { // extpgmlfescle
data.skipBits(4); // extpgmlfescl
}
if (data.readBit()) { // dmixscle
data.skipBits(4); // dmixscl
}
if (data.readBit()) { // addche
if (data.readBit()) { // extpgmaux1scle
data.skipBits(4); // extpgmaux1scl
}
if (data.readBit()) { // extpgmaux2scle
data.skipBits(4); // extpgmaux2scl
}
}
}
if (data.readBit()) { // mixdata3e
data.skipBits(5); // spchdat
if (data.readBit()) { // addspchdate
data.skipBits(5 + 2); // spchdat1, spchan1att
if (data.readBit()) { // addspdat1e
data.skipBits(5 + 3); // spchdat2, spchan2att
}
}
}
data.skipBits(8 * (mixdeflen + 2)); // mixdata
data.byteAlign(); // mixdatafill
}
if (acmod < 2) {
if (data.readBit()) { // paninfoe
data.skipBits(8 + 6); // panmean, paninfo
}
if (acmod == 0) {
if (data.readBit()) { // paninfo2e
data.skipBits(8 + 6); // panmean2, paninfo2
}
}
}
if (data.readBit()) { // frmmixcfginfoe
if (numblkscod == 0) {
data.skipBits(5); // blkmixcfginfo[0]
} else {
for (int blk = 0; blk < audioBlocks; blk++) {
if (data.readBit()) { // blkmixcfginfoe
data.skipBits(5); // blkmixcfginfo[blk]
}
}
}
}
}
}
if (data.readBit()) { // infomdate
data.skipBits(3 + 1 + 1); // bsmod, copyrightb, origbs
if (acmod == 2) {
data.skipBits(2 + 2); // dsurmod, dheadphonmod
}
if (acmod >= 6) {
data.skipBits(2); // dsurexmod
}
if (data.readBit()) { // audioprodie
data.skipBits(5 + 2 + 1); // mixlevel, roomtyp, adconvtyp
}
if (acmod == 0 && data.readBit()) { // audioprodi2e
data.skipBits(5 + 2 + 1); // mixlevel2, roomtyp2, adconvtyp2
}
if (fscod < 3) {
data.skipBit(); // sourcefscod
}
}
if (streamType == SyncFrameInfo.STREAM_TYPE_TYPE0 && numblkscod != 3) {
data.skipBit(); // convsync
}
if (streamType == SyncFrameInfo.STREAM_TYPE_TYPE2
&& (numblkscod == 3 || data.readBit())) { // blkid
data.skipBits(6); // frmsizecod
}
mimeType = MimeTypes.AUDIO_E_AC3;
if (data.readBit()) { // addbsie
int addbsil = data.readBits(6);
if (addbsil == 1 && data.readBits(8) == 1) { // addbsi
mimeType = MimeTypes.AUDIO_E_AC3_JOC;
}
}
} else /* is AC-3 */ {
mimeType = MimeTypes.AUDIO_AC3;
data.skipBits(16 + 16); // syncword, crc1
int fscod = data.readBits(2);
if (fscod == 3) {
// fscod '11' indicates that the decoder should not attempt to decode audio. We invalidate
// the mime type to prevent association with a renderer.
mimeType = null;
}
int frmsizecod = data.readBits(6);
frameSize = getAc3SyncframeSize(fscod, frmsizecod);
data.skipBits(5 + 3); // bsid, bsmod
@ -201,48 +452,112 @@ public final class Ac3Util {
if (acmod == 2) {
data.skipBits(2); // dsurmod
}
sampleRate = SAMPLE_RATE_BY_FSCOD[fscod];
sampleRate =
fscod < SAMPLE_RATE_BY_FSCOD.length ? SAMPLE_RATE_BY_FSCOD[fscod] : Format.NO_VALUE;
sampleCount = AC3_SYNCFRAME_AUDIO_SAMPLE_COUNT;
lfeon = data.readBit();
channelCount = CHANNEL_COUNT_BY_ACMOD[acmod] + (lfeon ? 1 : 0);
}
boolean lfeon = data.readBit();
int channelCount = CHANNEL_COUNT_BY_ACMOD[acmod] + (lfeon ? 1 : 0);
return new Ac3SyncFrameInfo(mimeType, channelCount, sampleRate, frameSize, sampleCount);
return new SyncFrameInfo(
mimeType, streamType, channelCount, sampleRate, frameSize, sampleCount);
}
/**
* Returns the size in bytes of the given AC-3 syncframe.
* Returns the size in bytes of the given (E-)AC-3 syncframe.
*
* @param data The syncframe to parse.
* @return The syncframe size in bytes. {@link C#LENGTH_UNSET} if the input is invalid.
*/
public static int parseAc3SyncframeSize(byte[] data) {
if (data.length < 5) {
if (data.length < 6) {
return C.LENGTH_UNSET;
}
int fscod = (data[4] & 0xC0) >> 6;
int frmsizecod = data[4] & 0x3F;
return getAc3SyncframeSize(fscod, frmsizecod);
// Parse the bitstream ID for AC-3 and E-AC-3 (see subsections 4.3, E.1.2 and E.1.3.1.6).
boolean isEac3 = ((data[5] & 0xF8) >> 3) > 10;
if (isEac3) {
int frmsiz = (data[2] & 0x07) << 8; // Most significant 3 bits.
frmsiz |= data[3] & 0xFF; // Least significant 8 bits.
return (frmsiz + 1) * 2; // See frmsiz in subsection E.1.3.1.3.
} else {
int fscod = (data[4] & 0xC0) >> 6;
int frmsizecod = data[4] & 0x3F;
return getAc3SyncframeSize(fscod, frmsizecod);
}
}
/**
* Returns the number of audio samples in an AC-3 syncframe.
*/
public static int getAc3SyncframeAudioSampleCount() {
return AC3_SYNCFRAME_AUDIO_SAMPLE_COUNT;
}
/**
* Reads the number of audio samples represented by the given E-AC-3 syncframe. The buffer's
* Reads the number of audio samples represented by the given (E-)AC-3 syncframe. The buffer's
* position is not modified.
*
* @param buffer The {@link ByteBuffer} from which to read the syncframe.
* @return The number of audio samples represented by the syncframe.
*/
public static int parseEAc3SyncframeAudioSampleCount(ByteBuffer buffer) {
// See ETSI TS 102 366 subsection E.1.2.2.
int fscod = (buffer.get(buffer.position() + 4) & 0xC0) >> 6;
return AUDIO_SAMPLES_PER_AUDIO_BLOCK * (fscod == 0x03 ? 6
: BLOCKS_PER_SYNCFRAME_BY_NUMBLKSCOD[(buffer.get(buffer.position() + 4) & 0x30) >> 4]);
public static int parseAc3SyncframeAudioSampleCount(ByteBuffer buffer) {
// Parse the bitstream ID for AC-3 and E-AC-3 (see subsections 4.3, E.1.2 and E.1.3.1.6).
boolean isEac3 = ((buffer.get(buffer.position() + 5) & 0xF8) >> 3) > 10;
if (isEac3) {
int fscod = (buffer.get(buffer.position() + 4) & 0xC0) >> 6;
int numblkscod = fscod == 0x03 ? 3 : (buffer.get(buffer.position() + 4) & 0x30) >> 4;
return BLOCKS_PER_SYNCFRAME_BY_NUMBLKSCOD[numblkscod] * AUDIO_SAMPLES_PER_AUDIO_BLOCK;
} else {
return AC3_SYNCFRAME_AUDIO_SAMPLE_COUNT;
}
}
/**
* Returns the offset relative to the buffer's position of the start of a TrueHD syncframe, or
* {@link C#INDEX_UNSET} if no syncframe was found. The buffer's position is not modified.
*
* @param buffer The {@link ByteBuffer} within which to find a syncframe.
* @return The offset relative to the buffer's position of the start of a TrueHD syncframe, or
* {@link C#INDEX_UNSET} if no syncframe was found.
*/
public static int findTrueHdSyncframeOffset(ByteBuffer buffer) {
int startIndex = buffer.position();
int endIndex = buffer.limit() - TRUEHD_SYNCFRAME_PREFIX_LENGTH;
for (int i = startIndex; i <= endIndex; i++) {
// The syncword ends 0xBA for TrueHD or 0xBB for MLP.
if ((buffer.getInt(i + 4) & 0xFEFFFFFF) == 0xBA6F72F8) {
return i - startIndex;
}
}
return C.INDEX_UNSET;
}
/**
* Returns the number of audio samples represented by the given TrueHD syncframe, or 0 if the
* buffer is not the start of a syncframe.
*
* @param syncframe The bytes from which to read the syncframe. Must be at least {@link
* #TRUEHD_SYNCFRAME_PREFIX_LENGTH} bytes long.
* @return The number of audio samples represented by the syncframe, or 0 if the buffer doesn't
* contain the start of a syncframe.
*/
public static int parseTrueHdSyncframeAudioSampleCount(byte[] syncframe) {
// See "Dolby TrueHD (MLP) high-level bitstream description" on the Dolby developer site,
// subsections 2.2 and 4.2.1. The syncword ends 0xBA for TrueHD or 0xBB for MLP.
if (syncframe[4] != (byte) 0xF8
|| syncframe[5] != (byte) 0x72
|| syncframe[6] != (byte) 0x6F
|| (syncframe[7] & 0xFE) != 0xBA) {
return 0;
}
boolean isMlp = (syncframe[7] & 0xFF) == 0xBB;
return 40 << ((syncframe[isMlp ? 9 : 8] >> 4) & 0x07);
}
/**
* Reads the number of audio samples represented by a TrueHD syncframe. The buffer's position is
* not modified.
*
* @param buffer The {@link ByteBuffer} from which to read the syncframe.
* @param offset The offset of the start of the syncframe relative to the buffer's position.
* @return The number of audio samples represented by the syncframe.
*/
public static int parseTrueHdSyncframeAudioSampleCount(ByteBuffer buffer, int offset) {
// TODO: Link to specification if available.
boolean isMlp = (buffer.get(buffer.position() + offset + 7) & 0xFF) == 0xBB;
return 40 << ((buffer.get(buffer.position() + offset + (isMlp ? 9 : 8)) >> 4) & 0x07);
}
private static int getAc3SyncframeSize(int fscod, int frmsizecod) {

Просмотреть файл

@ -0,0 +1,250 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmInitData;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.MimeTypes;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.ParsableBitArray;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.ParsableByteArray;
import java.nio.ByteBuffer;
/** Utility methods for parsing AC-4 frames, which are access units in AC-4 bitstreams. */
public final class Ac4Util {
/** Holds sample format information as presented by a syncframe header. */
public static final class SyncFrameInfo {
/** The bitstream version. */
public final int bitstreamVersion;
/** The audio sampling rate in Hz. */
public final int sampleRate;
/** The number of audio channels */
public final int channelCount;
/** The size of the frame. */
public final int frameSize;
/** Number of audio samples in the frame. */
public final int sampleCount;
private SyncFrameInfo(
int bitstreamVersion, int channelCount, int sampleRate, int frameSize, int sampleCount) {
this.bitstreamVersion = bitstreamVersion;
this.channelCount = channelCount;
this.sampleRate = sampleRate;
this.frameSize = frameSize;
this.sampleCount = sampleCount;
}
}
public static final int AC40_SYNCWORD = 0xAC40;
public static final int AC41_SYNCWORD = 0xAC41;
/** The channel count of AC-4 stream. */
// TODO: Parse AC-4 stream channel count.
private static final int CHANNEL_COUNT_2 = 2;
/**
* The AC-4 sync frame header size for extractor. The seven bytes are 0xAC, 0x40, 0xFF, 0xFF,
* sizeByte1, sizeByte2, sizeByte3. See ETSI TS 103 190-1 V1.3.1, Annex G
*/
public static final int SAMPLE_HEADER_SIZE = 7;
/**
* The header size for AC-4 parser. Only needs to be as big as we need to read, not the full
* header size.
*/
public static final int HEADER_SIZE_FOR_PARSER = 16;
/**
* Number of audio samples in the frame. Defined in IEC61937-14:2017 table 5 and 6. This table
* provides the number of samples per frame at the playback sampling frequency of 48 kHz. For 44.1
* kHz, only frame_rate_index(13) is valid and corresponding sample count is 2048.
*/
private static final int[] SAMPLE_COUNT =
new int[] {
/* [ 0] 23.976 fps */ 2002,
/* [ 1] 24 fps */ 2000,
/* [ 2] 25 fps */ 1920,
/* [ 3] 29.97 fps */ 1601, // 1601 | 1602 | 1601 | 1602 | 1602
/* [ 4] 30 fps */ 1600,
/* [ 5] 47.95 fps */ 1001,
/* [ 6] 48 fps */ 1000,
/* [ 7] 50 fps */ 960,
/* [ 8] 59.94 fps */ 800, // 800 | 801 | 801 | 801 | 801
/* [ 9] 60 fps */ 800,
/* [10] 100 fps */ 480,
/* [11] 119.88 fps */ 400, // 400 | 400 | 401 | 400 | 401
/* [12] 120 fps */ 400,
/* [13] 23.438 fps */ 2048
};
/**
* Returns the AC-4 format given {@code data} containing the AC4SpecificBox according to ETSI TS
* 103 190-1 Annex E. The reading position of {@code data} will be modified.
*
* @param data The AC4SpecificBox to parse.
* @param trackId The track identifier to set on the format.
* @param language The language to set on the format.
* @param drmInitData {@link DrmInitData} to be included in the format.
* @return The AC-4 format parsed from data in the header.
*/
public static Format parseAc4AnnexEFormat(
ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData) {
data.skipBytes(1); // ac4_dsi_version, bitstream_version[0:5]
int sampleRate = ((data.readUnsignedByte() & 0x20) >> 5 == 1) ? 48000 : 44100;
return Format.createAudioSampleFormat(
trackId,
MimeTypes.AUDIO_AC4,
/* codecs= */ null,
/* bitrate= */ Format.NO_VALUE,
/* maxInputSize= */ Format.NO_VALUE,
CHANNEL_COUNT_2,
sampleRate,
/* initializationData= */ null,
drmInitData,
/* selectionFlags= */ 0,
language);
}
/**
* Returns AC-4 format information given {@code data} containing a syncframe. The reading position
* of {@code data} will be modified.
*
* @param data The data to parse, positioned at the start of the syncframe.
* @return The AC-4 format data parsed from the header.
*/
public static SyncFrameInfo parseAc4SyncframeInfo(ParsableBitArray data) {
int headerSize = 0;
int syncWord = data.readBits(16);
headerSize += 2;
int frameSize = data.readBits(16);
headerSize += 2;
if (frameSize == 0xFFFF) {
frameSize = data.readBits(24);
headerSize += 3; // Extended frame_size
}
frameSize += headerSize;
if (syncWord == AC41_SYNCWORD) {
frameSize += 2; // crc_word
}
int bitstreamVersion = data.readBits(2);
if (bitstreamVersion == 3) {
bitstreamVersion += readVariableBits(data, /* bitsPerRead= */ 2);
}
int sequenceCounter = data.readBits(10);
if (data.readBit()) { // b_wait_frames
if (data.readBits(3) > 0) { // wait_frames
data.skipBits(2); // reserved
}
}
int sampleRate = data.readBit() ? 48000 : 44100;
int frameRateIndex = data.readBits(4);
int sampleCount = 0;
if (sampleRate == 44100 && frameRateIndex == 13) {
sampleCount = SAMPLE_COUNT[frameRateIndex];
} else if (sampleRate == 48000 && frameRateIndex < SAMPLE_COUNT.length) {
sampleCount = SAMPLE_COUNT[frameRateIndex];
switch (sequenceCounter % 5) {
case 1: // fall through
case 3:
if (frameRateIndex == 3 || frameRateIndex == 8) {
sampleCount++;
}
break;
case 2:
if (frameRateIndex == 8 || frameRateIndex == 11) {
sampleCount++;
}
break;
case 4:
if (frameRateIndex == 3 || frameRateIndex == 8 || frameRateIndex == 11) {
sampleCount++;
}
break;
default:
break;
}
}
return new SyncFrameInfo(bitstreamVersion, CHANNEL_COUNT_2, sampleRate, frameSize, sampleCount);
}
/**
* Returns the size in bytes of the given AC-4 syncframe.
*
* @param data The syncframe to parse.
* @param syncword The syncword value for the syncframe.
* @return The syncframe size in bytes, or {@link C#LENGTH_UNSET} if the input is invalid.
*/
public static int parseAc4SyncframeSize(byte[] data, int syncword) {
if (data.length < 7) {
return C.LENGTH_UNSET;
}
int headerSize = 2; // syncword
int frameSize = ((data[2] & 0xFF) << 8) | (data[3] & 0xFF);
headerSize += 2;
if (frameSize == 0xFFFF) {
frameSize = ((data[4] & 0xFF) << 16) | ((data[5] & 0xFF) << 8) | (data[6] & 0xFF);
headerSize += 3;
}
if (syncword == AC41_SYNCWORD) {
headerSize += 2;
}
frameSize += headerSize;
return frameSize;
}
/**
* Reads the number of audio samples represented by the given AC-4 syncframe. The buffer's
* position is not modified.
*
* @param buffer The {@link ByteBuffer} from which to read the syncframe.
* @return The number of audio samples represented by the syncframe.
*/
public static int parseAc4SyncframeAudioSampleCount(ByteBuffer buffer) {
byte[] bufferBytes = new byte[HEADER_SIZE_FOR_PARSER];
int position = buffer.position();
buffer.get(bufferBytes);
buffer.position(position);
return parseAc4SyncframeInfo(new ParsableBitArray(bufferBytes)).sampleCount;
}
/** Populates {@code buffer} with an AC-4 sample header for a sample of the specified size. */
public static void getAc4SampleHeader(int size, ParsableByteArray buffer) {
// See ETSI TS 103 190-1 V1.3.1, Annex G.
buffer.reset(SAMPLE_HEADER_SIZE);
buffer.data[0] = (byte) 0xAC;
buffer.data[1] = 0x40;
buffer.data[2] = (byte) 0xFF;
buffer.data[3] = (byte) 0xFF;
buffer.data[4] = (byte) ((size >> 16) & 0xFF);
buffer.data[5] = (byte) ((size >> 8) & 0xFF);
buffer.data[6] = (byte) (size & 0xFF);
}
private static int readVariableBits(ParsableBitArray data, int bitsPerRead) {
int value = 0;
while (true) {
value += data.readBits(bitsPerRead);
if (!data.readBit()) {
break;
}
value++;
value <<= bitsPerRead;
}
return value;
}
private Ac4Util() {}
}

Просмотреть файл

@ -0,0 +1,162 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import android.annotation.TargetApi;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
/**
* Attributes for audio playback, which configure the underlying platform
* {@link android.media.AudioTrack}.
* <p>
* To set the audio attributes, create an instance using the {@link Builder} and either pass it to
* {@link org.mozilla.thirdparty.com.google.android.exoplayer2SimpleExoPlayer#setAudioAttributes(AudioAttributes)} or
* send a message of type {@link C#MSG_SET_AUDIO_ATTRIBUTES} to the audio renderers.
* <p>
* This class is based on {@link android.media.AudioAttributes}, but can be used on all supported
* API versions.
*/
public final class AudioAttributes {
public static final AudioAttributes DEFAULT = new Builder().build();
/**
* Builder for {@link AudioAttributes}.
*/
public static final class Builder {
private @C.AudioContentType int contentType;
private @C.AudioFlags int flags;
private @C.AudioUsage int usage;
private @C.AudioAllowedCapturePolicy int allowedCapturePolicy;
/**
* Creates a new builder for {@link AudioAttributes}.
*
* <p>By default the content type is {@link C#CONTENT_TYPE_UNKNOWN}, usage is {@link
* C#USAGE_MEDIA}, capture policy is {@link C#ALLOW_CAPTURE_BY_ALL} and no flags are set.
*/
public Builder() {
contentType = C.CONTENT_TYPE_UNKNOWN;
flags = 0;
usage = C.USAGE_MEDIA;
allowedCapturePolicy = C.ALLOW_CAPTURE_BY_ALL;
}
/**
* @see android.media.AudioAttributes.Builder#setContentType(int)
*/
public Builder setContentType(@C.AudioContentType int contentType) {
this.contentType = contentType;
return this;
}
/**
* @see android.media.AudioAttributes.Builder#setFlags(int)
*/
public Builder setFlags(@C.AudioFlags int flags) {
this.flags = flags;
return this;
}
/**
* @see android.media.AudioAttributes.Builder#setUsage(int)
*/
public Builder setUsage(@C.AudioUsage int usage) {
this.usage = usage;
return this;
}
/** See {@link android.media.AudioAttributes.Builder#setAllowedCapturePolicy(int)}. */
public Builder setAllowedCapturePolicy(@C.AudioAllowedCapturePolicy int allowedCapturePolicy) {
this.allowedCapturePolicy = allowedCapturePolicy;
return this;
}
/** Creates an {@link AudioAttributes} instance from this builder. */
public AudioAttributes build() {
return new AudioAttributes(contentType, flags, usage, allowedCapturePolicy);
}
}
public final @C.AudioContentType int contentType;
public final @C.AudioFlags int flags;
public final @C.AudioUsage int usage;
public final @C.AudioAllowedCapturePolicy int allowedCapturePolicy;
@Nullable private android.media.AudioAttributes audioAttributesV21;
private AudioAttributes(
@C.AudioContentType int contentType,
@C.AudioFlags int flags,
@C.AudioUsage int usage,
@C.AudioAllowedCapturePolicy int allowedCapturePolicy) {
this.contentType = contentType;
this.flags = flags;
this.usage = usage;
this.allowedCapturePolicy = allowedCapturePolicy;
}
/**
* Returns a {@link android.media.AudioAttributes} from this instance.
*
* <p>Field {@link AudioAttributes#allowedCapturePolicy} is ignored for API levels prior to 29.
*/
@TargetApi(21)
public android.media.AudioAttributes getAudioAttributesV21() {
if (audioAttributesV21 == null) {
android.media.AudioAttributes.Builder builder =
new android.media.AudioAttributes.Builder()
.setContentType(contentType)
.setFlags(flags)
.setUsage(usage);
if (Util.SDK_INT >= 29) {
builder.setAllowedCapturePolicy(allowedCapturePolicy);
}
audioAttributesV21 = builder.build();
}
return audioAttributesV21;
}
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
AudioAttributes other = (AudioAttributes) obj;
return this.contentType == other.contentType
&& this.flags == other.flags
&& this.usage == other.usage
&& this.allowedCapturePolicy == other.allowedCapturePolicy;
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + contentType;
result = 31 * result + flags;
result = 31 * result + usage;
result = 31 * result + allowedCapturePolicy;
return result;
}
}

Просмотреть файл

@ -22,19 +22,32 @@ import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.net.Uri;
import android.provider.Settings.Global;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.util.Arrays;
/**
* Represents the set of audio formats that a device is capable of playing.
*/
/** Represents the set of audio formats that a device is capable of playing. */
@TargetApi(21)
public final class AudioCapabilities {
/**
* The minimum audio capabilities supported by all devices.
*/
private static final int DEFAULT_MAX_CHANNEL_COUNT = 8;
/** The minimum audio capabilities supported by all devices. */
public static final AudioCapabilities DEFAULT_AUDIO_CAPABILITIES =
new AudioCapabilities(new int[] {AudioFormat.ENCODING_PCM_16BIT}, 2);
new AudioCapabilities(new int[] {AudioFormat.ENCODING_PCM_16BIT}, DEFAULT_MAX_CHANNEL_COUNT);
/** Audio capabilities when the device specifies external surround sound. */
private static final AudioCapabilities EXTERNAL_SURROUND_SOUND_CAPABILITIES =
new AudioCapabilities(
new int[] {
AudioFormat.ENCODING_PCM_16BIT, AudioFormat.ENCODING_AC3, AudioFormat.ENCODING_E_AC3
},
DEFAULT_MAX_CHANNEL_COUNT);
/** Global settings key for devices that can specify external surround sound. */
private static final String EXTERNAL_SURROUND_SOUND_KEY = "external_surround_sound_enabled";
/**
* Returns the current audio capabilities for the device.
@ -44,17 +57,36 @@ public final class AudioCapabilities {
*/
@SuppressWarnings("InlinedApi")
public static AudioCapabilities getCapabilities(Context context) {
return getCapabilities(
context.registerReceiver(null, new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG)));
Intent intent =
context.registerReceiver(
/* receiver= */ null, new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG));
return getCapabilities(context, intent);
}
@SuppressLint("InlinedApi")
/* package */ static AudioCapabilities getCapabilities(Intent intent) {
/* package */ static AudioCapabilities getCapabilities(Context context, @Nullable Intent intent) {
if (deviceMaySetExternalSurroundSoundGlobalSetting()
&& Global.getInt(context.getContentResolver(), EXTERNAL_SURROUND_SOUND_KEY, 0) == 1) {
return EXTERNAL_SURROUND_SOUND_CAPABILITIES;
}
if (intent == null || intent.getIntExtra(AudioManager.EXTRA_AUDIO_PLUG_STATE, 0) == 0) {
return DEFAULT_AUDIO_CAPABILITIES;
}
return new AudioCapabilities(intent.getIntArrayExtra(AudioManager.EXTRA_ENCODINGS),
intent.getIntExtra(AudioManager.EXTRA_MAX_CHANNEL_COUNT, 0));
return new AudioCapabilities(
intent.getIntArrayExtra(AudioManager.EXTRA_ENCODINGS),
intent.getIntExtra(
AudioManager.EXTRA_MAX_CHANNEL_COUNT, /* defaultValue= */ DEFAULT_MAX_CHANNEL_COUNT));
}
/**
* Returns the global settings {@link Uri} used by the device to specify external surround sound,
* or null if the device does not support this functionality.
*/
@Nullable
/* package */ static Uri getExternalSurroundSoundGlobalSettingUri() {
return deviceMaySetExternalSurroundSoundGlobalSetting()
? Global.getUriFor(EXTERNAL_SURROUND_SOUND_KEY)
: null;
}
private final int[] supportedEncodings;
@ -64,11 +96,15 @@ public final class AudioCapabilities {
* Constructs new audio capabilities based on a set of supported encodings and a maximum channel
* count.
*
* <p>Applications should generally call {@link #getCapabilities(Context)} to obtain an instance
* based on the capabilities advertised by the platform, rather than calling this constructor.
*
* @param supportedEncodings Supported audio encodings from {@link android.media.AudioFormat}'s
* {@code ENCODING_*} constants.
* {@code ENCODING_*} constants. Passing {@code null} indicates that no encodings are
* supported.
* @param maxChannelCount The maximum number of audio channels that can be played simultaneously.
*/
/* package */ AudioCapabilities(int[] supportedEncodings, int maxChannelCount) {
public AudioCapabilities(@Nullable int[] supportedEncodings, int maxChannelCount) {
if (supportedEncodings != null) {
this.supportedEncodings = Arrays.copyOf(supportedEncodings, supportedEncodings.length);
Arrays.sort(this.supportedEncodings);
@ -96,7 +132,7 @@ public final class AudioCapabilities {
}
@Override
public boolean equals(Object other) {
public boolean equals(@Nullable Object other) {
if (this == other) {
return true;
}
@ -119,4 +155,7 @@ public final class AudioCapabilities {
+ ", supportedEncodings=" + Arrays.toString(supportedEncodings) + "]";
}
private static boolean deviceMaySetExternalSurroundSoundGlobalSetting() {
return Util.SDK_INT >= 17 && "Amazon".equals(Util.MANUFACTURER);
}
}

Просмотреть файл

@ -16,10 +16,15 @@
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import android.content.BroadcastReceiver;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.database.ContentObserver;
import android.media.AudioManager;
import android.net.Uri;
import android.os.Handler;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
@ -45,18 +50,29 @@ public final class AudioCapabilitiesReceiver {
private final Context context;
private final Listener listener;
private final BroadcastReceiver receiver;
private final Handler handler;
@Nullable private final BroadcastReceiver receiver;
@Nullable private final ExternalSurroundSoundSettingObserver externalSurroundSoundSettingObserver;
/* package */ AudioCapabilities audioCapabilities;
/* package */ @Nullable AudioCapabilities audioCapabilities;
private boolean registered;
/**
* @param context A context for registering the receiver.
* @param listener The listener to notify when audio capabilities change.
*/
public AudioCapabilitiesReceiver(Context context, Listener listener) {
this.context = Assertions.checkNotNull(context);
context = context.getApplicationContext();
this.context = context;
this.listener = Assertions.checkNotNull(listener);
this.receiver = Util.SDK_INT >= 21 ? new HdmiAudioPlugBroadcastReceiver() : null;
handler = new Handler(Util.getLooper());
receiver = Util.SDK_INT >= 21 ? new HdmiAudioPlugBroadcastReceiver() : null;
Uri externalSurroundSoundUri = AudioCapabilities.getExternalSurroundSoundGlobalSettingUri();
externalSurroundSoundSettingObserver =
externalSurroundSoundUri != null
? new ExternalSurroundSoundSettingObserver(
handler, context.getContentResolver(), externalSurroundSoundUri)
: null;
}
/**
@ -68,9 +84,21 @@ public final class AudioCapabilitiesReceiver {
*/
@SuppressWarnings("InlinedApi")
public AudioCapabilities register() {
Intent stickyIntent = receiver == null ? null
: context.registerReceiver(receiver, new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG));
audioCapabilities = AudioCapabilities.getCapabilities(stickyIntent);
if (registered) {
return Assertions.checkNotNull(audioCapabilities);
}
registered = true;
if (externalSurroundSoundSettingObserver != null) {
externalSurroundSoundSettingObserver.register();
}
Intent stickyIntent = null;
if (receiver != null) {
IntentFilter intentFilter = new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG);
stickyIntent =
context.registerReceiver(
receiver, intentFilter, /* broadcastPermission= */ null, handler);
}
audioCapabilities = AudioCapabilities.getCapabilities(context, stickyIntent);
return audioCapabilities;
}
@ -79,9 +107,24 @@ public final class AudioCapabilitiesReceiver {
* changes occur.
*/
public void unregister() {
if (!registered) {
return;
}
audioCapabilities = null;
if (receiver != null) {
context.unregisterReceiver(receiver);
}
if (externalSurroundSoundSettingObserver != null) {
externalSurroundSoundSettingObserver.unregister();
}
registered = false;
}
private void onNewAudioCapabilities(AudioCapabilities newAudioCapabilities) {
if (registered && !newAudioCapabilities.equals(audioCapabilities)) {
audioCapabilities = newAudioCapabilities;
listener.onAudioCapabilitiesChanged(newAudioCapabilities);
}
}
private final class HdmiAudioPlugBroadcastReceiver extends BroadcastReceiver {
@ -89,14 +132,35 @@ public final class AudioCapabilitiesReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if (!isInitialStickyBroadcast()) {
AudioCapabilities newAudioCapabilities = AudioCapabilities.getCapabilities(intent);
if (!newAudioCapabilities.equals(audioCapabilities)) {
audioCapabilities = newAudioCapabilities;
listener.onAudioCapabilitiesChanged(newAudioCapabilities);
}
onNewAudioCapabilities(AudioCapabilities.getCapabilities(context, intent));
}
}
}
private final class ExternalSurroundSoundSettingObserver extends ContentObserver {
private final ContentResolver resolver;
private final Uri settingUri;
public ExternalSurroundSoundSettingObserver(
Handler handler, ContentResolver resolver, Uri settingUri) {
super(handler);
this.resolver = resolver;
this.settingUri = settingUri;
}
public void register() {
resolver.registerContentObserver(settingUri, /* notifyForDescendants= */ false, this);
}
public void unregister() {
resolver.unregisterContentObserver(this);
}
@Override
public void onChange(boolean selfChange) {
onNewAudioCapabilities(AudioCapabilities.getCapabilities(context));
}
}
}

Просмотреть файл

@ -15,27 +15,21 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
/**
* Thrown when an audio decoder error occurs.
*/
public abstract class AudioDecoderException extends Exception {
/** Thrown when an audio decoder error occurs. */
public class AudioDecoderException extends Exception {
/**
* @param detailMessage The detail message for this exception.
*/
public AudioDecoderException(String detailMessage) {
super(detailMessage);
/** @param message The detail message for this exception. */
public AudioDecoderException(String message) {
super(message);
}
/**
* @param detailMessage The detail message for this exception.
* @param cause the cause (which is saved for later retrieval by the
* {@link #getCause()} method). (A <tt>null</tt> value is
* permitted, and indicates that the cause is nonexistent or
* unknown.)
* @param message The detail message for this exception.
* @param cause the cause (which is saved for later retrieval by the {@link #getCause()} method).
* A <tt>null</tt> value is permitted, and indicates that the cause is nonexistent or unknown.
*/
public AudioDecoderException(String detailMessage, Throwable cause) {
super(detailMessage, cause);
public AudioDecoderException(String message, Throwable cause) {
super(message, cause);
}
}

Просмотреть файл

@ -0,0 +1,41 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
/** A listener for changes in audio configuration. */
public interface AudioListener {
/**
* Called when the audio session is set.
*
* @param audioSessionId The audio session id.
*/
default void onAudioSessionId(int audioSessionId) {}
/**
* Called when the audio attributes change.
*
* @param audioAttributes The audio attributes.
*/
default void onAudioAttributesChanged(AudioAttributes audioAttributes) {}
/**
* Called when the volume changes.
*
* @param volume The new volume, with 0 being silence and 1 being unity gain.
*/
default void onVolumeChanged(float volume) {}
}

Просмотреть файл

@ -16,65 +16,92 @@
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Interface for audio processors.
* Interface for audio processors, which take audio data as input and transform it, potentially
* modifying its channel count, encoding and/or sample rate.
*
* <p>In addition to being able to modify the format of audio, implementations may allow parameters
* to be set that affect the output audio and whether the processor is active/inactive.
*/
public interface AudioProcessor {
/**
* Exception thrown when a processor can't be configured for a given input audio format.
*/
final class UnhandledFormatException extends Exception {
/** PCM audio format that may be handled by an audio processor. */
final class AudioFormat {
public static final AudioFormat NOT_SET =
new AudioFormat(
/* sampleRate= */ Format.NO_VALUE,
/* channelCount= */ Format.NO_VALUE,
/* encoding= */ Format.NO_VALUE);
public UnhandledFormatException(int sampleRateHz, int channelCount, @C.Encoding int encoding) {
super("Unhandled format: " + sampleRateHz + " Hz, " + channelCount + " channels in encoding "
+ encoding);
/** The sample rate in Hertz. */
public final int sampleRate;
/** The number of interleaved channels. */
public final int channelCount;
/** The type of linear PCM encoding. */
@C.PcmEncoding public final int encoding;
/** The number of bytes used to represent one audio frame. */
public final int bytesPerFrame;
public AudioFormat(int sampleRate, int channelCount, @C.PcmEncoding int encoding) {
this.sampleRate = sampleRate;
this.channelCount = channelCount;
this.encoding = encoding;
bytesPerFrame =
Util.isEncodingLinearPcm(encoding)
? Util.getPcmFrameSize(encoding, channelCount)
: Format.NO_VALUE;
}
@Override
public String toString() {
return "AudioFormat["
+ "sampleRate="
+ sampleRate
+ ", channelCount="
+ channelCount
+ ", encoding="
+ encoding
+ ']';
}
}
/** Exception thrown when a processor can't be configured for a given input audio format. */
final class UnhandledAudioFormatException extends Exception {
public UnhandledAudioFormatException(AudioFormat inputAudioFormat) {
super("Unhandled format: " + inputAudioFormat);
}
}
/**
* An empty, direct {@link ByteBuffer}.
*/
/** An empty, direct {@link ByteBuffer}. */
ByteBuffer EMPTY_BUFFER = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
/**
* Configures the processor to process input audio with the specified format. After calling this
* method, {@link #isActive()} returns whether the processor needs to handle buffers; if not, the
* processor will not accept any buffers until it is reconfigured. Returns {@code true} if the
* processor must be flushed, or if the value returned by {@link #isActive()} has changed as a
* result of the call. If it's active, {@link #getOutputChannelCount()} and
* {@link #getOutputEncoding()} return the processor's output format.
* method, call {@link #isActive()} to determine whether the audio processor is active. Returns
* the configured output audio format if this instance is active.
*
* @param sampleRateHz The sample rate of input audio in Hz.
* @param channelCount The number of interleaved channels in input audio.
* @param encoding The encoding of input audio.
* @return {@code true} if the processor must be flushed or the value returned by
* {@link #isActive()} has changed as a result of the call.
* @throws UnhandledFormatException Thrown if the specified format can't be handled as input.
* <p>After calling this method, it is necessary to {@link #flush()} the processor to apply the
* new configuration. Before applying the new configuration, it is safe to queue input and get
* output in the old input/output formats. Call {@link #queueEndOfStream()} when no more input
* will be supplied in the old input format.
*
* @param inputAudioFormat The format of audio that will be queued after the next call to {@link
* #flush()}.
* @return The configured output audio format if this instance is {@link #isActive() active}.
* @throws UnhandledAudioFormatException Thrown if the specified format can't be handled as input.
*/
boolean configure(int sampleRateHz, int channelCount, @C.Encoding int encoding)
throws UnhandledFormatException;
AudioFormat configure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException;
/**
* Returns whether the processor is configured and active.
*/
/** Returns whether the processor is configured and will process input buffers. */
boolean isActive();
/**
* Returns the number of audio channels in the data output by the processor.
*/
int getOutputChannelCount();
/**
* Returns the audio encoding used in the data output by the processor.
*/
@C.Encoding
int getOutputEncoding();
/**
* Queues audio data between the position and limit of the input {@code buffer} for processing.
* {@code buffer} must be a direct byte buffer with native byte order. Its contents are treated as
@ -111,13 +138,11 @@ public interface AudioProcessor {
boolean isEnded();
/**
* Clears any state in preparation for receiving a new stream of input buffers.
* Clears any buffered data and pending output. If the audio processor is active, also prepares
* the audio processor to receive a new stream of input in the last configured (pending) format.
*/
void flush();
/**
* Resets the processor to its initial state.
*/
/** Resets the processor to its unconfigured state, releasing any resources. */
void reset();
}

Просмотреть файл

@ -15,8 +15,11 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import static org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util.castNonNull;
import android.os.Handler;
import android.os.SystemClock;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Renderer;
@ -24,7 +27,8 @@ import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.DecoderCount
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
/**
* Listener of audio {@link Renderer} events.
* Listener of audio {@link Renderer} events. All methods have no-op default implementations to
* allow selective overrides.
*/
public interface AudioRendererEventListener {
@ -34,14 +38,14 @@ public interface AudioRendererEventListener {
* @param counters {@link DecoderCounters} that will be updated by the renderer for as long as it
* remains enabled.
*/
void onAudioEnabled(DecoderCounters counters);
default void onAudioEnabled(DecoderCounters counters) {}
/**
* Called when the audio session is set.
*
* @param audioSessionId The audio session id.
*/
void onAudioSessionId(int audioSessionId);
default void onAudioSessionId(int audioSessionId) {}
/**
* Called when a decoder is created.
@ -51,48 +55,50 @@ public interface AudioRendererEventListener {
* finished.
* @param initializationDurationMs The time taken to initialize the decoder in milliseconds.
*/
void onAudioDecoderInitialized(String decoderName, long initializedTimestampMs,
long initializationDurationMs);
default void onAudioDecoderInitialized(
String decoderName, long initializedTimestampMs, long initializationDurationMs) {}
/**
* Called when the format of the media being consumed by the renderer changes.
*
* @param format The new format.
*/
void onAudioInputFormatChanged(Format format);
default void onAudioInputFormatChanged(Format format) {}
/**
* Called when an {@link AudioTrack} underrun occurs.
* Called when an {@link AudioSink} underrun occurs.
*
* @param bufferSize The size of the {@link AudioTrack}'s buffer, in bytes.
* @param bufferSizeMs The size of the {@link AudioTrack}'s buffer, in milliseconds, if it is
* @param bufferSize The size of the {@link AudioSink}'s buffer, in bytes.
* @param bufferSizeMs The size of the {@link AudioSink}'s buffer, in milliseconds, if it is
* configured for PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output,
* as the buffered media can have a variable bitrate so the duration may be unknown.
* @param elapsedSinceLastFeedMs The time since the {@link AudioTrack} was last fed data.
* @param elapsedSinceLastFeedMs The time since the {@link AudioSink} was last fed data.
*/
void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs);
default void onAudioSinkUnderrun(
int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {}
/**
* Called when the renderer is disabled.
*
* @param counters {@link DecoderCounters} that were updated by the renderer.
*/
void onAudioDisabled(DecoderCounters counters);
default void onAudioDisabled(DecoderCounters counters) {}
/**
* Dispatches events to a {@link AudioRendererEventListener}.
*/
final class EventDispatcher {
private final Handler handler;
private final AudioRendererEventListener listener;
@Nullable private final Handler handler;
@Nullable private final AudioRendererEventListener listener;
/**
* @param handler A handler for dispatching events, or null if creating a dummy instance.
* @param listener The listener to which events should be dispatched, or null if creating a
* dummy instance.
*/
public EventDispatcher(Handler handler, AudioRendererEventListener listener) {
public EventDispatcher(@Nullable Handler handler,
@Nullable AudioRendererEventListener listener) {
this.handler = listener != null ? Assertions.checkNotNull(handler) : null;
this.listener = listener;
}
@ -101,13 +107,8 @@ public interface AudioRendererEventListener {
* Invokes {@link AudioRendererEventListener#onAudioEnabled(DecoderCounters)}.
*/
public void enabled(final DecoderCounters decoderCounters) {
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onAudioEnabled(decoderCounters);
}
});
if (handler != null) {
handler.post(() -> castNonNull(listener).onAudioEnabled(decoderCounters));
}
}
@ -116,14 +117,12 @@ public interface AudioRendererEventListener {
*/
public void decoderInitialized(final String decoderName,
final long initializedTimestampMs, final long initializationDurationMs) {
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onAudioDecoderInitialized(decoderName, initializedTimestampMs,
initializationDurationMs);
}
});
if (handler != null) {
handler.post(
() ->
castNonNull(listener)
.onAudioDecoderInitialized(
decoderName, initializedTimestampMs, initializationDurationMs));
}
}
@ -131,28 +130,21 @@ public interface AudioRendererEventListener {
* Invokes {@link AudioRendererEventListener#onAudioInputFormatChanged(Format)}.
*/
public void inputFormatChanged(final Format format) {
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onAudioInputFormatChanged(format);
}
});
if (handler != null) {
handler.post(() -> castNonNull(listener).onAudioInputFormatChanged(format));
}
}
/**
* Invokes {@link AudioRendererEventListener#onAudioTrackUnderrun(int, long, long)}.
* Invokes {@link AudioRendererEventListener#onAudioSinkUnderrun(int, long, long)}.
*/
public void audioTrackUnderrun(final int bufferSize, final long bufferSizeMs,
final long elapsedSinceLastFeedMs) {
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onAudioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
}
});
if (handler != null) {
handler.post(
() ->
castNonNull(listener)
.onAudioSinkUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs));
}
}
@ -160,14 +152,13 @@ public interface AudioRendererEventListener {
* Invokes {@link AudioRendererEventListener#onAudioDisabled(DecoderCounters)}.
*/
public void disabled(final DecoderCounters counters) {
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
counters.ensureUpdated();
listener.onAudioDisabled(counters);
}
});
counters.ensureUpdated();
if (handler != null) {
handler.post(
() -> {
counters.ensureUpdated();
castNonNull(listener).onAudioDisabled(counters);
});
}
}
@ -175,16 +166,9 @@ public interface AudioRendererEventListener {
* Invokes {@link AudioRendererEventListener#onAudioSessionId(int)}.
*/
public void audioSessionId(final int audioSessionId) {
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onAudioSessionId(audioSessionId);
}
});
if (handler != null) {
handler.post(() -> castNonNull(listener).onAudioSessionId(audioSessionId));
}
}
}
}

Просмотреть файл

@ -0,0 +1,329 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import android.media.AudioTrack;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.PlaybackParameters;
import java.nio.ByteBuffer;
/**
* A sink that consumes audio data.
*
* <p>Before starting playback, specify the input audio format by calling {@link #configure(int,
* int, int, int, int[], int, int)}.
*
* <p>Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()}
* when the data being fed is discontinuous. Call {@link #play()} to start playing the written data.
*
* <p>Call {@link #configure(int, int, int, int, int[], int, int)} whenever the input format
* changes. The sink will be reinitialized on the next call to {@link #handleBuffer(ByteBuffer,
* long)}.
*
* <p>Call {@link #flush()} to prepare the sink to receive audio data from a new playback position.
*
* <p>Call {@link #playToEndOfStream()} repeatedly to play out all data when no more input buffers
* will be provided via {@link #handleBuffer(ByteBuffer, long)} until the next {@link #flush()}.
* Call {@link #reset()} when the instance is no longer required.
*
* <p>The implementation may be backed by a platform {@link AudioTrack}. In this case, {@link
* #setAudioSessionId(int)}, {@link #setAudioAttributes(AudioAttributes)}, {@link
* #enableTunnelingV21(int)} and/or {@link #disableTunneling()} may be called before writing data to
* the sink. These methods may also be called after writing data to the sink, in which case it will
* be reinitialized as required. For implementations that are not based on platform {@link
* AudioTrack}s, calling methods relating to audio sessions, audio attributes, and tunneling may
* have no effect.
*/
public interface AudioSink {
/**
* Listener for audio sink events.
*/
interface Listener {
/**
* Called if the audio sink has started rendering audio to a new platform audio session.
*
* @param audioSessionId The newly generated audio session's identifier.
*/
void onAudioSessionId(int audioSessionId);
/**
* Called when the audio sink handles a buffer whose timestamp is discontinuous with the last
* buffer handled since it was reset.
*/
void onPositionDiscontinuity();
/**
* Called when the audio sink runs out of data.
* <p>
* An audio sink implementation may never call this method (for example, if audio data is
* consumed in batches rather than based on the sink's own clock).
*
* @param bufferSize The size of the sink's buffer, in bytes.
* @param bufferSizeMs The size of the sink's buffer, in milliseconds, if it is configured for
* PCM output. {@link C#TIME_UNSET} if it is configured for encoded audio output, as the
* buffered media can have a variable bitrate so the duration may be unknown.
* @param elapsedSinceLastFeedMs The time since the sink was last fed data, in milliseconds.
*/
void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs);
}
/**
* Thrown when a failure occurs configuring the sink.
*/
final class ConfigurationException extends Exception {
/**
* Creates a new configuration exception with the specified {@code cause} and no message.
*/
public ConfigurationException(Throwable cause) {
super(cause);
}
/**
* Creates a new configuration exception with the specified {@code message} and no cause.
*/
public ConfigurationException(String message) {
super(message);
}
}
/**
* Thrown when a failure occurs initializing the sink.
*/
final class InitializationException extends Exception {
/**
* The underlying {@link AudioTrack}'s state, if applicable.
*/
public final int audioTrackState;
/**
* @param audioTrackState The underlying {@link AudioTrack}'s state, if applicable.
* @param sampleRate The requested sample rate in Hz.
* @param channelConfig The requested channel configuration.
* @param bufferSize The requested buffer size in bytes.
*/
public InitializationException(int audioTrackState, int sampleRate, int channelConfig,
int bufferSize) {
super("AudioTrack init failed: " + audioTrackState + ", Config(" + sampleRate + ", "
+ channelConfig + ", " + bufferSize + ")");
this.audioTrackState = audioTrackState;
}
}
/**
* Thrown when a failure occurs writing to the sink.
*/
final class WriteException extends Exception {
/**
* The error value returned from the sink implementation. If the sink writes to a platform
* {@link AudioTrack}, this will be the error value returned from
* {@link AudioTrack#write(byte[], int, int)} or {@link AudioTrack#write(ByteBuffer, int, int)}.
* Otherwise, the meaning of the error code depends on the sink implementation.
*/
public final int errorCode;
/**
* @param errorCode The error value returned from the sink implementation.
*/
public WriteException(int errorCode) {
super("AudioTrack write failed: " + errorCode);
this.errorCode = errorCode;
}
}
/**
* Returned by {@link #getCurrentPositionUs(boolean)} when the position is not set.
*/
long CURRENT_POSITION_NOT_SET = Long.MIN_VALUE;
/**
* Sets the listener for sink events, which should be the audio renderer.
*
* @param listener The listener for sink events, which should be the audio renderer.
*/
void setListener(Listener listener);
/**
* Returns whether the sink supports the audio format.
*
* @param channelCount The number of channels, or {@link Format#NO_VALUE} if not known.
* @param encoding The audio encoding, or {@link Format#NO_VALUE} if not known.
* @return Whether the sink supports the audio format.
*/
boolean supportsOutput(int channelCount, @C.Encoding int encoding);
/**
* Returns the playback position in the stream starting at zero, in microseconds, or
* {@link #CURRENT_POSITION_NOT_SET} if it is not yet available.
*
* @param sourceEnded Specify {@code true} if no more input buffers will be provided.
* @return The playback position relative to the start of playback, in microseconds.
*/
long getCurrentPositionUs(boolean sourceEnded);
/**
* Configures (or reconfigures) the sink.
*
* @param inputEncoding The encoding of audio data provided in the input buffers.
* @param inputChannelCount The number of channels.
* @param inputSampleRate The sample rate in Hz.
* @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a
* suitable buffer size.
* @param outputChannels A mapping from input to output channels that is applied to this sink's
* input as a preprocessing step, if handling PCM input. Specify {@code null} to leave the
* input unchanged. Otherwise, the element at index {@code i} specifies index of the input
* channel to map to output channel {@code i} when preprocessing input buffers. After the map
* is applied the audio data will have {@code outputChannels.length} channels.
* @param trimStartFrames The number of audio frames to trim from the start of data written to the
* sink after this call.
* @param trimEndFrames The number of audio frames to trim from data written to the sink
* immediately preceding the next call to {@link #flush()} or this method.
* @throws ConfigurationException If an error occurs configuring the sink.
*/
void configure(
@C.Encoding int inputEncoding,
int inputChannelCount,
int inputSampleRate,
int specifiedBufferSize,
@Nullable int[] outputChannels,
int trimStartFrames,
int trimEndFrames)
throws ConfigurationException;
/**
* Starts or resumes consuming audio if initialized.
*/
void play();
/** Signals to the sink that the next buffer may be discontinuous with the previous buffer. */
void handleDiscontinuity();
/**
* Attempts to process data from a {@link ByteBuffer}, starting from its current position and
* ending at its limit (exclusive). The position of the {@link ByteBuffer} is advanced by the
* number of bytes that were handled. {@link Listener#onPositionDiscontinuity()} will be called if
* {@code presentationTimeUs} is discontinuous with the last buffer handled since the last reset.
*
* <p>Returns whether the data was handled in full. If the data was not handled in full then the
* same {@link ByteBuffer} must be provided to subsequent calls until it has been fully consumed,
* except in the case of an intervening call to {@link #flush()} (or to {@link #configure(int,
* int, int, int, int[], int, int)} that causes the sink to be flushed).
*
* @param buffer The buffer containing audio data.
* @param presentationTimeUs The presentation timestamp of the buffer in microseconds.
* @return Whether the buffer was handled fully.
* @throws InitializationException If an error occurs initializing the sink.
* @throws WriteException If an error occurs writing the audio data.
*/
boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs)
throws InitializationException, WriteException;
/**
* Processes any remaining data. {@link #isEnded()} will return {@code true} when no data remains.
*
* @throws WriteException If an error occurs draining data to the sink.
*/
void playToEndOfStream() throws WriteException;
/**
* Returns whether {@link #playToEndOfStream} has been called and all buffers have been processed.
*/
boolean isEnded();
/**
* Returns whether the sink has data pending that has not been consumed yet.
*/
boolean hasPendingData();
/**
* Attempts to set the playback parameters. The audio sink may override these parameters if they
* are not supported.
*
* @param playbackParameters The new playback parameters to attempt to set.
*/
void setPlaybackParameters(PlaybackParameters playbackParameters);
/**
* Gets the active {@link PlaybackParameters}.
*/
PlaybackParameters getPlaybackParameters();
/**
* Sets attributes for audio playback. If the attributes have changed and if the sink is not
* configured for use with tunneling, then it is reset and the audio session id is cleared.
* <p>
* If the sink is configured for use with tunneling then the audio attributes are ignored. The
* sink is not reset and the audio session id is not cleared. The passed attributes will be used
* if the sink is later re-configured into non-tunneled mode.
*
* @param audioAttributes The attributes for audio playback.
*/
void setAudioAttributes(AudioAttributes audioAttributes);
/** Sets the audio session id. */
void setAudioSessionId(int audioSessionId);
/** Sets the auxiliary effect. */
void setAuxEffectInfo(AuxEffectInfo auxEffectInfo);
/**
* Enables tunneling, if possible. The sink is reset if tunneling was previously disabled or if
* the audio session id has changed. Enabling tunneling is only possible if the sink is based on a
* platform {@link AudioTrack}, and requires platform API version 21 onwards.
*
* @param tunnelingAudioSessionId The audio session id to use.
* @throws IllegalStateException Thrown if enabling tunneling on platform API version &lt; 21.
*/
void enableTunnelingV21(int tunnelingAudioSessionId);
/**
* Disables tunneling. If tunneling was previously enabled then the sink is reset and any audio
* session id is cleared.
*/
void disableTunneling();
/**
* Sets the playback volume.
*
* @param volume A volume in the range [0.0, 1.0].
*/
void setVolume(float volume);
/**
* Pauses playback.
*/
void pause();
/**
* Flushes the sink, after which it is ready to receive buffers from a new playback position.
*
* <p>The audio session may remain active until {@link #reset()} is called.
*/
void flush();
/** Resets the renderer, releasing any resources that it currently holds. */
void reset();
}

Просмотреть файл

@ -0,0 +1,309 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import android.annotation.TargetApi;
import android.media.AudioTimestamp;
import android.media.AudioTrack;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Polls the {@link AudioTrack} timestamp, if the platform supports it, taking care of polling at
* the appropriate rate to detect when the timestamp starts to advance.
*
* <p>When the audio track isn't paused, call {@link #maybePollTimestamp(long)} regularly to check
* for timestamp updates. If it returns {@code true}, call {@link #getTimestampPositionFrames()} and
* {@link #getTimestampSystemTimeUs()} to access the updated timestamp, then call {@link
* #acceptTimestamp()} or {@link #rejectTimestamp()} to accept or reject it.
*
* <p>If {@link #hasTimestamp()} returns {@code true}, call {@link #getTimestampSystemTimeUs()} to
* get the system time at which the latest timestamp was sampled and {@link
* #getTimestampPositionFrames()} to get its position in frames. If {@link #isTimestampAdvancing()}
* returns {@code true}, the caller should assume that the timestamp has been increasing in real
* time since it was sampled. Otherwise, it may be stationary.
*
* <p>Call {@link #reset()} when pausing or resuming the track.
*/
/* package */ final class AudioTimestampPoller {
/** Timestamp polling states. */
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({
STATE_INITIALIZING,
STATE_TIMESTAMP,
STATE_TIMESTAMP_ADVANCING,
STATE_NO_TIMESTAMP,
STATE_ERROR
})
private @interface State {}
/** State when first initializing. */
private static final int STATE_INITIALIZING = 0;
/** State when we have a timestamp and we don't know if it's advancing. */
private static final int STATE_TIMESTAMP = 1;
/** State when we have a timestamp and we know it is advancing. */
private static final int STATE_TIMESTAMP_ADVANCING = 2;
/** State when the no timestamp is available. */
private static final int STATE_NO_TIMESTAMP = 3;
/** State when the last timestamp was rejected as invalid. */
private static final int STATE_ERROR = 4;
/** The polling interval for {@link #STATE_INITIALIZING} and {@link #STATE_TIMESTAMP}. */
private static final int FAST_POLL_INTERVAL_US = 5_000;
/**
* The polling interval for {@link #STATE_TIMESTAMP_ADVANCING} and {@link #STATE_NO_TIMESTAMP}.
*/
private static final int SLOW_POLL_INTERVAL_US = 10_000_000;
/** The polling interval for {@link #STATE_ERROR}. */
private static final int ERROR_POLL_INTERVAL_US = 500_000;
/**
* The minimum duration to remain in {@link #STATE_INITIALIZING} if no timestamps are being
* returned before transitioning to {@link #STATE_NO_TIMESTAMP}.
*/
private static final int INITIALIZING_DURATION_US = 500_000;
@Nullable private final AudioTimestampV19 audioTimestamp;
private @State int state;
private long initializeSystemTimeUs;
private long sampleIntervalUs;
private long lastTimestampSampleTimeUs;
private long initialTimestampPositionFrames;
/**
* Creates a new audio timestamp poller.
*
* @param audioTrack The audio track that will provide timestamps, if the platform supports it.
*/
public AudioTimestampPoller(AudioTrack audioTrack) {
if (Util.SDK_INT >= 19) {
audioTimestamp = new AudioTimestampV19(audioTrack);
reset();
} else {
audioTimestamp = null;
updateState(STATE_NO_TIMESTAMP);
}
}
/**
* Polls the timestamp if required and returns whether it was updated. If {@code true}, the latest
* timestamp is available via {@link #getTimestampSystemTimeUs()} and {@link
* #getTimestampPositionFrames()}, and the caller should call {@link #acceptTimestamp()} if the
* timestamp was valid, or {@link #rejectTimestamp()} otherwise. The values returned by {@link
* #hasTimestamp()} and {@link #isTimestampAdvancing()} may be updated.
*
* @param systemTimeUs The current system time, in microseconds.
* @return Whether the timestamp was updated.
*/
public boolean maybePollTimestamp(long systemTimeUs) {
if (audioTimestamp == null || (systemTimeUs - lastTimestampSampleTimeUs) < sampleIntervalUs) {
return false;
}
lastTimestampSampleTimeUs = systemTimeUs;
boolean updatedTimestamp = audioTimestamp.maybeUpdateTimestamp();
switch (state) {
case STATE_INITIALIZING:
if (updatedTimestamp) {
if (audioTimestamp.getTimestampSystemTimeUs() >= initializeSystemTimeUs) {
// We have an initial timestamp, but don't know if it's advancing yet.
initialTimestampPositionFrames = audioTimestamp.getTimestampPositionFrames();
updateState(STATE_TIMESTAMP);
} else {
// Drop the timestamp, as it was sampled before the last reset.
updatedTimestamp = false;
}
} else if (systemTimeUs - initializeSystemTimeUs > INITIALIZING_DURATION_US) {
// We haven't received a timestamp for a while, so they probably aren't available for the
// current audio route. Poll infrequently in case the route changes later.
// TODO: Ideally we should listen for audio route changes in order to detect when a
// timestamp becomes available again.
updateState(STATE_NO_TIMESTAMP);
}
break;
case STATE_TIMESTAMP:
if (updatedTimestamp) {
long timestampPositionFrames = audioTimestamp.getTimestampPositionFrames();
if (timestampPositionFrames > initialTimestampPositionFrames) {
updateState(STATE_TIMESTAMP_ADVANCING);
}
} else {
reset();
}
break;
case STATE_TIMESTAMP_ADVANCING:
if (!updatedTimestamp) {
// The audio route may have changed, so reset polling.
reset();
}
break;
case STATE_NO_TIMESTAMP:
if (updatedTimestamp) {
// The audio route may have changed, so reset polling.
reset();
}
break;
case STATE_ERROR:
// Do nothing. If the caller accepts any new timestamp we'll reset polling.
break;
default:
throw new IllegalStateException();
}
return updatedTimestamp;
}
/**
* Rejects the timestamp last polled in {@link #maybePollTimestamp(long)}. The instance will enter
* the error state and poll timestamps infrequently until the next call to {@link
* #acceptTimestamp()}.
*/
public void rejectTimestamp() {
updateState(STATE_ERROR);
}
/**
* Accepts the timestamp last polled in {@link #maybePollTimestamp(long)}. If the instance is in
* the error state, it will begin to poll timestamps frequently again.
*/
public void acceptTimestamp() {
if (state == STATE_ERROR) {
reset();
}
}
/**
* Returns whether this instance has a timestamp that can be used to calculate the audio track
* position. If {@code true}, call {@link #getTimestampSystemTimeUs()} and {@link
* #getTimestampSystemTimeUs()} to access the timestamp.
*/
public boolean hasTimestamp() {
return state == STATE_TIMESTAMP || state == STATE_TIMESTAMP_ADVANCING;
}
/**
* Returns whether the timestamp appears to be advancing. If {@code true}, call {@link
* #getTimestampSystemTimeUs()} and {@link #getTimestampSystemTimeUs()} to access the timestamp. A
* current position for the track can be extrapolated based on elapsed real time since the system
* time at which the timestamp was sampled.
*/
public boolean isTimestampAdvancing() {
return state == STATE_TIMESTAMP_ADVANCING;
}
/** Resets polling. Should be called whenever the audio track is paused or resumed. */
public void reset() {
if (audioTimestamp != null) {
updateState(STATE_INITIALIZING);
}
}
/**
* If {@link #maybePollTimestamp(long)} or {@link #hasTimestamp()} returned {@code true}, returns
* the system time at which the latest timestamp was sampled, in microseconds.
*/
public long getTimestampSystemTimeUs() {
return audioTimestamp != null ? audioTimestamp.getTimestampSystemTimeUs() : C.TIME_UNSET;
}
/**
* If {@link #maybePollTimestamp(long)} or {@link #hasTimestamp()} returned {@code true}, returns
* the latest timestamp's position in frames.
*/
public long getTimestampPositionFrames() {
return audioTimestamp != null ? audioTimestamp.getTimestampPositionFrames() : C.POSITION_UNSET;
}
private void updateState(@State int state) {
this.state = state;
switch (state) {
case STATE_INITIALIZING:
// Force polling a timestamp immediately, and poll quickly.
lastTimestampSampleTimeUs = 0;
initialTimestampPositionFrames = C.POSITION_UNSET;
initializeSystemTimeUs = System.nanoTime() / 1000;
sampleIntervalUs = FAST_POLL_INTERVAL_US;
break;
case STATE_TIMESTAMP:
sampleIntervalUs = FAST_POLL_INTERVAL_US;
break;
case STATE_TIMESTAMP_ADVANCING:
case STATE_NO_TIMESTAMP:
sampleIntervalUs = SLOW_POLL_INTERVAL_US;
break;
case STATE_ERROR:
sampleIntervalUs = ERROR_POLL_INTERVAL_US;
break;
default:
throw new IllegalStateException();
}
}
@TargetApi(19)
private static final class AudioTimestampV19 {
private final AudioTrack audioTrack;
private final AudioTimestamp audioTimestamp;
private long rawTimestampFramePositionWrapCount;
private long lastTimestampRawPositionFrames;
private long lastTimestampPositionFrames;
/**
* Creates a new {@link AudioTimestamp} wrapper.
*
* @param audioTrack The audio track that will provide timestamps.
*/
public AudioTimestampV19(AudioTrack audioTrack) {
this.audioTrack = audioTrack;
audioTimestamp = new AudioTimestamp();
}
/**
* Attempts to update the audio track timestamp. Returns {@code true} if the timestamp was
* updated, in which case the updated timestamp system time and position can be accessed with
* {@link #getTimestampSystemTimeUs()} and {@link #getTimestampPositionFrames()}. Returns {@code
* false} if no timestamp is available, in which case those methods should not be called.
*/
public boolean maybeUpdateTimestamp() {
boolean updated = audioTrack.getTimestamp(audioTimestamp);
if (updated) {
long rawPositionFrames = audioTimestamp.framePosition;
if (lastTimestampRawPositionFrames > rawPositionFrames) {
// The value must have wrapped around.
rawTimestampFramePositionWrapCount++;
}
lastTimestampRawPositionFrames = rawPositionFrames;
lastTimestampPositionFrames =
rawPositionFrames + (rawTimestampFramePositionWrapCount << 32);
}
return updated;
}
public long getTimestampSystemTimeUs() {
return audioTimestamp.nanoTime / 1000;
}
public long getTimestampPositionFrames() {
return lastTimestampPositionFrames;
}
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,545 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import static org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util.castNonNull;
import android.media.AudioTimestamp;
import android.media.AudioTrack;
import android.os.SystemClock;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.reflect.Method;
/**
* Wraps an {@link AudioTrack}, exposing a position based on {@link
* AudioTrack#getPlaybackHeadPosition()} and {@link AudioTrack#getTimestamp(AudioTimestamp)}.
*
* <p>Call {@link #setAudioTrack(AudioTrack, int, int, int)} to set the audio track to wrap. Call
* {@link #mayHandleBuffer(long)} if there is input data to write to the track. If it returns false,
* the audio track position is stabilizing and no data may be written. Call {@link #start()}
* immediately before calling {@link AudioTrack#play()}. Call {@link #pause()} when pausing the
* track. Call {@link #handleEndOfStream(long)} when no more data will be written to the track. When
* the audio track will no longer be used, call {@link #reset()}.
*/
/* package */ final class AudioTrackPositionTracker {
/** Listener for position tracker events. */
public interface Listener {
/**
* Called when the frame position is too far from the expected frame position.
*
* @param audioTimestampPositionFrames The frame position of the last known audio track
* timestamp.
* @param audioTimestampSystemTimeUs The system time associated with the last known audio track
* timestamp, in microseconds.
* @param systemTimeUs The current time.
* @param playbackPositionUs The current playback head position in microseconds.
*/
void onPositionFramesMismatch(
long audioTimestampPositionFrames,
long audioTimestampSystemTimeUs,
long systemTimeUs,
long playbackPositionUs);
/**
* Called when the system time associated with the last known audio track timestamp is
* unexpectedly far from the current time.
*
* @param audioTimestampPositionFrames The frame position of the last known audio track
* timestamp.
* @param audioTimestampSystemTimeUs The system time associated with the last known audio track
* timestamp, in microseconds.
* @param systemTimeUs The current time.
* @param playbackPositionUs The current playback head position in microseconds.
*/
void onSystemTimeUsMismatch(
long audioTimestampPositionFrames,
long audioTimestampSystemTimeUs,
long systemTimeUs,
long playbackPositionUs);
/**
* Called when the audio track has provided an invalid latency.
*
* @param latencyUs The reported latency in microseconds.
*/
void onInvalidLatency(long latencyUs);
/**
* Called when the audio track runs out of data to play.
*
* @param bufferSize The size of the sink's buffer, in bytes.
* @param bufferSizeMs The size of the sink's buffer, in milliseconds, if it is configured for
* PCM output. {@link C#TIME_UNSET} if it is configured for encoded audio output, as the
* buffered media can have a variable bitrate so the duration may be unknown.
*/
void onUnderrun(int bufferSize, long bufferSizeMs);
}
/** {@link AudioTrack} playback states. */
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({PLAYSTATE_STOPPED, PLAYSTATE_PAUSED, PLAYSTATE_PLAYING})
private @interface PlayState {}
/** @see AudioTrack#PLAYSTATE_STOPPED */
private static final int PLAYSTATE_STOPPED = AudioTrack.PLAYSTATE_STOPPED;
/** @see AudioTrack#PLAYSTATE_PAUSED */
private static final int PLAYSTATE_PAUSED = AudioTrack.PLAYSTATE_PAUSED;
/** @see AudioTrack#PLAYSTATE_PLAYING */
private static final int PLAYSTATE_PLAYING = AudioTrack.PLAYSTATE_PLAYING;
/**
* AudioTrack timestamps are deemed spurious if they are offset from the system clock by more than
* this amount.
*
* <p>This is a fail safe that should not be required on correctly functioning devices.
*/
private static final long MAX_AUDIO_TIMESTAMP_OFFSET_US = 5 * C.MICROS_PER_SECOND;
/**
* AudioTrack latencies are deemed impossibly large if they are greater than this amount.
*
* <p>This is a fail safe that should not be required on correctly functioning devices.
*/
private static final long MAX_LATENCY_US = 5 * C.MICROS_PER_SECOND;
private static final long FORCE_RESET_WORKAROUND_TIMEOUT_MS = 200;
private static final int MAX_PLAYHEAD_OFFSET_COUNT = 10;
private static final int MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US = 30000;
private static final int MIN_LATENCY_SAMPLE_INTERVAL_US = 500000;
private final Listener listener;
private final long[] playheadOffsets;
@Nullable private AudioTrack audioTrack;
private int outputPcmFrameSize;
private int bufferSize;
@Nullable private AudioTimestampPoller audioTimestampPoller;
private int outputSampleRate;
private boolean needsPassthroughWorkarounds;
private long bufferSizeUs;
private long smoothedPlayheadOffsetUs;
private long lastPlayheadSampleTimeUs;
@Nullable private Method getLatencyMethod;
private long latencyUs;
private boolean hasData;
private boolean isOutputPcm;
private long lastLatencySampleTimeUs;
private long lastRawPlaybackHeadPosition;
private long rawPlaybackHeadWrapCount;
private long passthroughWorkaroundPauseOffset;
private int nextPlayheadOffsetIndex;
private int playheadOffsetCount;
private long stopTimestampUs;
private long forceResetWorkaroundTimeMs;
private long stopPlaybackHeadPosition;
private long endPlaybackHeadPosition;
/**
* Creates a new audio track position tracker.
*
* @param listener A listener for position tracking events.
*/
public AudioTrackPositionTracker(Listener listener) {
this.listener = Assertions.checkNotNull(listener);
if (Util.SDK_INT >= 18) {
try {
getLatencyMethod = AudioTrack.class.getMethod("getLatency", (Class<?>[]) null);
} catch (NoSuchMethodException e) {
// There's no guarantee this method exists. Do nothing.
}
}
playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT];
}
/**
* Sets the {@link AudioTrack} to wrap. Subsequent method calls on this instance relate to this
* track's position, until the next call to {@link #reset()}.
*
* @param audioTrack The audio track to wrap.
* @param outputEncoding The encoding of the audio track.
* @param outputPcmFrameSize For PCM output encodings, the frame size. The value is ignored
* otherwise.
* @param bufferSize The audio track buffer size in bytes.
*/
public void setAudioTrack(
AudioTrack audioTrack,
@C.Encoding int outputEncoding,
int outputPcmFrameSize,
int bufferSize) {
this.audioTrack = audioTrack;
this.outputPcmFrameSize = outputPcmFrameSize;
this.bufferSize = bufferSize;
audioTimestampPoller = new AudioTimestampPoller(audioTrack);
outputSampleRate = audioTrack.getSampleRate();
needsPassthroughWorkarounds = needsPassthroughWorkarounds(outputEncoding);
isOutputPcm = Util.isEncodingLinearPcm(outputEncoding);
bufferSizeUs = isOutputPcm ? framesToDurationUs(bufferSize / outputPcmFrameSize) : C.TIME_UNSET;
lastRawPlaybackHeadPosition = 0;
rawPlaybackHeadWrapCount = 0;
passthroughWorkaroundPauseOffset = 0;
hasData = false;
stopTimestampUs = C.TIME_UNSET;
forceResetWorkaroundTimeMs = C.TIME_UNSET;
latencyUs = 0;
}
public long getCurrentPositionUs(boolean sourceEnded) {
if (Assertions.checkNotNull(this.audioTrack).getPlayState() == PLAYSTATE_PLAYING) {
maybeSampleSyncParams();
}
// If the device supports it, use the playback timestamp from AudioTrack.getTimestamp.
// Otherwise, derive a smoothed position by sampling the track's frame position.
long systemTimeUs = System.nanoTime() / 1000;
AudioTimestampPoller audioTimestampPoller = Assertions.checkNotNull(this.audioTimestampPoller);
if (audioTimestampPoller.hasTimestamp()) {
// Calculate the speed-adjusted position using the timestamp (which may be in the future).
long timestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames();
long timestampPositionUs = framesToDurationUs(timestampPositionFrames);
if (!audioTimestampPoller.isTimestampAdvancing()) {
return timestampPositionUs;
}
long elapsedSinceTimestampUs = systemTimeUs - audioTimestampPoller.getTimestampSystemTimeUs();
return timestampPositionUs + elapsedSinceTimestampUs;
} else {
long positionUs;
if (playheadOffsetCount == 0) {
// The AudioTrack has started, but we don't have any samples to compute a smoothed position.
positionUs = getPlaybackHeadPositionUs();
} else {
// getPlaybackHeadPositionUs() only has a granularity of ~20 ms, so we base the position off
// the system clock (and a smoothed offset between it and the playhead position) so as to
// prevent jitter in the reported positions.
positionUs = systemTimeUs + smoothedPlayheadOffsetUs;
}
if (!sourceEnded) {
positionUs -= latencyUs;
}
return positionUs;
}
}
/** Starts position tracking. Must be called immediately before {@link AudioTrack#play()}. */
public void start() {
Assertions.checkNotNull(audioTimestampPoller).reset();
}
/** Returns whether the audio track is in the playing state. */
public boolean isPlaying() {
return Assertions.checkNotNull(audioTrack).getPlayState() == PLAYSTATE_PLAYING;
}
/**
* Checks the state of the audio track and returns whether the caller can write data to the track.
* Notifies {@link Listener#onUnderrun(int, long)} if the track has underrun.
*
* @param writtenFrames The number of frames that have been written.
* @return Whether the caller can write data to the track.
*/
public boolean mayHandleBuffer(long writtenFrames) {
@PlayState int playState = Assertions.checkNotNull(audioTrack).getPlayState();
if (needsPassthroughWorkarounds) {
// An AC-3 audio track continues to play data written while it is paused. Stop writing so its
// buffer empties. See [Internal: b/18899620].
if (playState == PLAYSTATE_PAUSED) {
// We force an underrun to pause the track, so don't notify the listener in this case.
hasData = false;
return false;
}
// A new AC-3 audio track's playback position continues to increase from the old track's
// position for a short time after is has been released. Avoid writing data until the playback
// head position actually returns to zero.
if (playState == PLAYSTATE_STOPPED && getPlaybackHeadPosition() == 0) {
return false;
}
}
boolean hadData = hasData;
hasData = hasPendingData(writtenFrames);
if (hadData && !hasData && playState != PLAYSTATE_STOPPED && listener != null) {
listener.onUnderrun(bufferSize, C.usToMs(bufferSizeUs));
}
return true;
}
/**
* Returns an estimate of the number of additional bytes that can be written to the audio track's
* buffer without running out of space.
*
* <p>May only be called if the output encoding is one of the PCM encodings.
*
* @param writtenBytes The number of bytes written to the audio track so far.
* @return An estimate of the number of bytes that can be written.
*/
public int getAvailableBufferSize(long writtenBytes) {
int bytesPending = (int) (writtenBytes - (getPlaybackHeadPosition() * outputPcmFrameSize));
return bufferSize - bytesPending;
}
/** Returns whether the track is in an invalid state and must be recreated. */
public boolean isStalled(long writtenFrames) {
return forceResetWorkaroundTimeMs != C.TIME_UNSET
&& writtenFrames > 0
&& SystemClock.elapsedRealtime() - forceResetWorkaroundTimeMs
>= FORCE_RESET_WORKAROUND_TIMEOUT_MS;
}
/**
* Records the writing position at which the stream ended, so that the reported position can
* continue to increment while remaining data is played out.
*
* @param writtenFrames The number of frames that have been written.
*/
public void handleEndOfStream(long writtenFrames) {
stopPlaybackHeadPosition = getPlaybackHeadPosition();
stopTimestampUs = SystemClock.elapsedRealtime() * 1000;
endPlaybackHeadPosition = writtenFrames;
}
/**
* Returns whether the audio track has any pending data to play out at its current position.
*
* @param writtenFrames The number of frames written to the audio track.
* @return Whether the audio track has any pending data to play out.
*/
public boolean hasPendingData(long writtenFrames) {
return writtenFrames > getPlaybackHeadPosition()
|| forceHasPendingData();
}
/**
* Pauses the audio track position tracker, returning whether the audio track needs to be paused
* to cause playback to pause. If {@code false} is returned the audio track will pause without
* further interaction, as the end of stream has been handled.
*/
public boolean pause() {
resetSyncParams();
if (stopTimestampUs == C.TIME_UNSET) {
// The audio track is going to be paused, so reset the timestamp poller to ensure it doesn't
// supply an advancing position.
Assertions.checkNotNull(audioTimestampPoller).reset();
return true;
}
// We've handled the end of the stream already, so there's no need to pause the track.
return false;
}
/**
* Resets the position tracker. Should be called when the audio track previous passed to {@link
* #setAudioTrack(AudioTrack, int, int, int)} is no longer in use.
*/
public void reset() {
resetSyncParams();
audioTrack = null;
audioTimestampPoller = null;
}
private void maybeSampleSyncParams() {
long playbackPositionUs = getPlaybackHeadPositionUs();
if (playbackPositionUs == 0) {
// The AudioTrack hasn't output anything yet.
return;
}
long systemTimeUs = System.nanoTime() / 1000;
if (systemTimeUs - lastPlayheadSampleTimeUs >= MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US) {
// Take a new sample and update the smoothed offset between the system clock and the playhead.
playheadOffsets[nextPlayheadOffsetIndex] = playbackPositionUs - systemTimeUs;
nextPlayheadOffsetIndex = (nextPlayheadOffsetIndex + 1) % MAX_PLAYHEAD_OFFSET_COUNT;
if (playheadOffsetCount < MAX_PLAYHEAD_OFFSET_COUNT) {
playheadOffsetCount++;
}
lastPlayheadSampleTimeUs = systemTimeUs;
smoothedPlayheadOffsetUs = 0;
for (int i = 0; i < playheadOffsetCount; i++) {
smoothedPlayheadOffsetUs += playheadOffsets[i] / playheadOffsetCount;
}
}
if (needsPassthroughWorkarounds) {
// Don't sample the timestamp and latency if this is an AC-3 passthrough AudioTrack on
// platform API versions 21/22, as incorrect values are returned. See [Internal: b/21145353].
return;
}
maybePollAndCheckTimestamp(systemTimeUs, playbackPositionUs);
maybeUpdateLatency(systemTimeUs);
}
private void maybePollAndCheckTimestamp(long systemTimeUs, long playbackPositionUs) {
AudioTimestampPoller audioTimestampPoller = Assertions.checkNotNull(this.audioTimestampPoller);
if (!audioTimestampPoller.maybePollTimestamp(systemTimeUs)) {
return;
}
// Perform sanity checks on the timestamp and accept/reject it.
long audioTimestampSystemTimeUs = audioTimestampPoller.getTimestampSystemTimeUs();
long audioTimestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames();
if (Math.abs(audioTimestampSystemTimeUs - systemTimeUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) {
listener.onSystemTimeUsMismatch(
audioTimestampPositionFrames,
audioTimestampSystemTimeUs,
systemTimeUs,
playbackPositionUs);
audioTimestampPoller.rejectTimestamp();
} else if (Math.abs(framesToDurationUs(audioTimestampPositionFrames) - playbackPositionUs)
> MAX_AUDIO_TIMESTAMP_OFFSET_US) {
listener.onPositionFramesMismatch(
audioTimestampPositionFrames,
audioTimestampSystemTimeUs,
systemTimeUs,
playbackPositionUs);
audioTimestampPoller.rejectTimestamp();
} else {
audioTimestampPoller.acceptTimestamp();
}
}
private void maybeUpdateLatency(long systemTimeUs) {
if (isOutputPcm
&& getLatencyMethod != null
&& systemTimeUs - lastLatencySampleTimeUs >= MIN_LATENCY_SAMPLE_INTERVAL_US) {
try {
// Compute the audio track latency, excluding the latency due to the buffer (leaving
// latency due to the mixer and audio hardware driver).
latencyUs =
castNonNull((Integer) getLatencyMethod.invoke(Assertions.checkNotNull(audioTrack)))
* 1000L
- bufferSizeUs;
// Sanity check that the latency is non-negative.
latencyUs = Math.max(latencyUs, 0);
// Sanity check that the latency isn't too large.
if (latencyUs > MAX_LATENCY_US) {
listener.onInvalidLatency(latencyUs);
latencyUs = 0;
}
} catch (Exception e) {
// The method existed, but doesn't work. Don't try again.
getLatencyMethod = null;
}
lastLatencySampleTimeUs = systemTimeUs;
}
}
private long framesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / outputSampleRate;
}
private void resetSyncParams() {
smoothedPlayheadOffsetUs = 0;
playheadOffsetCount = 0;
nextPlayheadOffsetIndex = 0;
lastPlayheadSampleTimeUs = 0;
}
/**
* If passthrough workarounds are enabled, pausing is implemented by forcing the AudioTrack to
* underrun. In this case, still behave as if we have pending data, otherwise writing won't
* resume.
*/
private boolean forceHasPendingData() {
return needsPassthroughWorkarounds
&& Assertions.checkNotNull(audioTrack).getPlayState() == AudioTrack.PLAYSTATE_PAUSED
&& getPlaybackHeadPosition() == 0;
}
/**
* Returns whether to work around problems with passthrough audio tracks. See [Internal:
* b/18899620, b/19187573, b/21145353].
*/
private static boolean needsPassthroughWorkarounds(@C.Encoding int outputEncoding) {
return Util.SDK_INT < 23
&& (outputEncoding == C.ENCODING_AC3 || outputEncoding == C.ENCODING_E_AC3);
}
private long getPlaybackHeadPositionUs() {
return framesToDurationUs(getPlaybackHeadPosition());
}
/**
* {@link AudioTrack#getPlaybackHeadPosition()} returns a value intended to be interpreted as an
* unsigned 32 bit integer, which also wraps around periodically. This method returns the playback
* head position as a long that will only wrap around if the value exceeds {@link Long#MAX_VALUE}
* (which in practice will never happen).
*
* @return The playback head position, in frames.
*/
private long getPlaybackHeadPosition() {
AudioTrack audioTrack = Assertions.checkNotNull(this.audioTrack);
if (stopTimestampUs != C.TIME_UNSET) {
// Simulate the playback head position up to the total number of frames submitted.
long elapsedTimeSinceStopUs = (SystemClock.elapsedRealtime() * 1000) - stopTimestampUs;
long framesSinceStop = (elapsedTimeSinceStopUs * outputSampleRate) / C.MICROS_PER_SECOND;
return Math.min(endPlaybackHeadPosition, stopPlaybackHeadPosition + framesSinceStop);
}
int state = audioTrack.getPlayState();
if (state == PLAYSTATE_STOPPED) {
// The audio track hasn't been started.
return 0;
}
long rawPlaybackHeadPosition = 0xFFFFFFFFL & audioTrack.getPlaybackHeadPosition();
if (needsPassthroughWorkarounds) {
// Work around an issue with passthrough/direct AudioTracks on platform API versions 21/22
// where the playback head position jumps back to zero on paused passthrough/direct audio
// tracks. See [Internal: b/19187573].
if (state == PLAYSTATE_PAUSED && rawPlaybackHeadPosition == 0) {
passthroughWorkaroundPauseOffset = lastRawPlaybackHeadPosition;
}
rawPlaybackHeadPosition += passthroughWorkaroundPauseOffset;
}
if (Util.SDK_INT <= 29) {
if (rawPlaybackHeadPosition == 0
&& lastRawPlaybackHeadPosition > 0
&& state == PLAYSTATE_PLAYING) {
// If connecting a Bluetooth audio device fails, the AudioTrack may be left in a state
// where its Java API is in the playing state, but the native track is stopped. When this
// happens the playback head position gets stuck at zero. In this case, return the old
// playback head position and force the track to be reset after
// {@link #FORCE_RESET_WORKAROUND_TIMEOUT_MS} has elapsed.
if (forceResetWorkaroundTimeMs == C.TIME_UNSET) {
forceResetWorkaroundTimeMs = SystemClock.elapsedRealtime();
}
return lastRawPlaybackHeadPosition;
} else {
forceResetWorkaroundTimeMs = C.TIME_UNSET;
}
}
if (lastRawPlaybackHeadPosition > rawPlaybackHeadPosition) {
// The value must have wrapped around.
rawPlaybackHeadWrapCount++;
}
lastRawPlaybackHeadPosition = rawPlaybackHeadPosition;
return rawPlaybackHeadPosition + (rawPlaybackHeadWrapCount << 32);
}
}

Просмотреть файл

@ -0,0 +1,85 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import android.media.AudioTrack;
import android.media.audiofx.AudioEffect;
import androidx.annotation.Nullable;
/**
* Represents auxiliary effect information, which can be used to attach an auxiliary effect to an
* underlying {@link AudioTrack}.
*
* <p>Auxiliary effects can only be applied if the application has the {@code
* android.permission.MODIFY_AUDIO_SETTINGS} permission. Apps are responsible for retaining the
* associated audio effect instance and releasing it when it's no longer needed. See the
* documentation of {@link AudioEffect} for more information.
*/
public final class AuxEffectInfo {
/** Value for {@link #effectId} representing no auxiliary effect. */
public static final int NO_AUX_EFFECT_ID = 0;
/**
* The identifier of the effect, or {@link #NO_AUX_EFFECT_ID} if there is no effect.
*
* @see android.media.AudioTrack#attachAuxEffect(int)
*/
public final int effectId;
/**
* The send level for the effect.
*
* @see android.media.AudioTrack#setAuxEffectSendLevel(float)
*/
public final float sendLevel;
/**
* Creates an instance with the given effect identifier and send level.
*
* @param effectId The effect identifier. This is the value returned by {@link
* AudioEffect#getId()} on the effect, or {@value NO_AUX_EFFECT_ID} which represents no
* effect. This value is passed to {@link AudioTrack#attachAuxEffect(int)} on the underlying
* audio track.
* @param sendLevel The send level for the effect, where 0 represents no effect and a value of 1
* is full send. If {@code effectId} is not {@value #NO_AUX_EFFECT_ID}, this value is passed
* to {@link AudioTrack#setAuxEffectSendLevel(float)} on the underlying audio track.
*/
public AuxEffectInfo(int effectId, float sendLevel) {
this.effectId = effectId;
this.sendLevel = sendLevel;
}
@Override
public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AuxEffectInfo auxEffectInfo = (AuxEffectInfo) o;
return effectId == auxEffectInfo.effectId
&& Float.compare(auxEffectInfo.sendLevel, sendLevel) == 0;
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + effectId;
result = 31 * result + Float.floatToIntBits(sendLevel);
return result;
}
}

Просмотреть файл

@ -0,0 +1,143 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import androidx.annotation.CallSuper;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Base class for audio processors that keep an output buffer and an internal buffer that is reused
* whenever input is queued. Subclasses should override {@link #onConfigure(AudioFormat)} to return
* the output audio format for the processor if it's active.
*/
public abstract class BaseAudioProcessor implements AudioProcessor {
/** The current input audio format. */
protected AudioFormat inputAudioFormat;
/** The current output audio format. */
protected AudioFormat outputAudioFormat;
private AudioFormat pendingInputAudioFormat;
private AudioFormat pendingOutputAudioFormat;
private ByteBuffer buffer;
private ByteBuffer outputBuffer;
private boolean inputEnded;
public BaseAudioProcessor() {
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
pendingInputAudioFormat = AudioFormat.NOT_SET;
pendingOutputAudioFormat = AudioFormat.NOT_SET;
inputAudioFormat = AudioFormat.NOT_SET;
outputAudioFormat = AudioFormat.NOT_SET;
}
@Override
public final AudioFormat configure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
pendingInputAudioFormat = inputAudioFormat;
pendingOutputAudioFormat = onConfigure(inputAudioFormat);
return isActive() ? pendingOutputAudioFormat : AudioFormat.NOT_SET;
}
@Override
public boolean isActive() {
return pendingOutputAudioFormat != AudioFormat.NOT_SET;
}
@Override
public final void queueEndOfStream() {
inputEnded = true;
onQueueEndOfStream();
}
@CallSuper
@Override
public ByteBuffer getOutput() {
ByteBuffer outputBuffer = this.outputBuffer;
this.outputBuffer = EMPTY_BUFFER;
return outputBuffer;
}
@CallSuper
@SuppressWarnings("ReferenceEquality")
@Override
public boolean isEnded() {
return inputEnded && outputBuffer == EMPTY_BUFFER;
}
@Override
public final void flush() {
outputBuffer = EMPTY_BUFFER;
inputEnded = false;
inputAudioFormat = pendingInputAudioFormat;
outputAudioFormat = pendingOutputAudioFormat;
onFlush();
}
@Override
public final void reset() {
flush();
buffer = EMPTY_BUFFER;
pendingInputAudioFormat = AudioFormat.NOT_SET;
pendingOutputAudioFormat = AudioFormat.NOT_SET;
inputAudioFormat = AudioFormat.NOT_SET;
outputAudioFormat = AudioFormat.NOT_SET;
onReset();
}
/**
* Replaces the current output buffer with a buffer of at least {@code count} bytes and returns
* it. Callers should write to the returned buffer then {@link ByteBuffer#flip()} it so it can be
* read via {@link #getOutput()}.
*/
protected final ByteBuffer replaceOutputBuffer(int count) {
if (buffer.capacity() < count) {
buffer = ByteBuffer.allocateDirect(count).order(ByteOrder.nativeOrder());
} else {
buffer.clear();
}
outputBuffer = buffer;
return buffer;
}
/** Returns whether the current output buffer has any data remaining. */
protected final boolean hasPendingOutput() {
return outputBuffer.hasRemaining();
}
/** Called when the processor is configured for a new input format. */
protected AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
return AudioFormat.NOT_SET;
}
/** Called when the end-of-stream is queued to the processor. */
protected void onQueueEndOfStream() {
// Do nothing.
}
/** Called when the processor is flushed, directly or as part of resetting. */
protected void onFlush() {
// Do nothing.
}
/** Called when the processor is reset. */
protected void onReset() {
// Do nothing.
}
}

Просмотреть файл

@ -15,148 +15,85 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C.Encoding;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
/**
* An {@link AudioProcessor} that applies a mapping from input channels onto specified output
* channels. This can be used to reorder, duplicate or discard channels.
*/
/* package */ final class ChannelMappingAudioProcessor implements AudioProcessor {
@SuppressWarnings("nullness:initialization.fields.uninitialized")
/* package */ final class ChannelMappingAudioProcessor extends BaseAudioProcessor {
private int channelCount;
private int sampleRateHz;
private int[] pendingOutputChannels;
private boolean active;
private int[] outputChannels;
private ByteBuffer buffer;
private ByteBuffer outputBuffer;
private boolean inputEnded;
@Nullable private int[] pendingOutputChannels;
@Nullable private int[] outputChannels;
/**
* Creates a new processor that applies a channel mapping.
*/
public ChannelMappingAudioProcessor() {
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
}
/**
* Resets the channel mapping. After calling this method, call {@link #configure(int, int, int)}
* to start using the new channel map.
* Resets the channel mapping. After calling this method, call {@link #configure(AudioFormat)} to
* start using the new channel map.
*
* @see AudioTrack#configure(String, int, int, int, int, int[])
* @param outputChannels The mapping from input to output channel indices, or {@code null} to
* leave the input unchanged.
* @see AudioSink#configure(int, int, int, int, int[], int, int)
*/
public void setChannelMap(int[] outputChannels) {
public void setChannelMap(@Nullable int[] outputChannels) {
pendingOutputChannels = outputChannels;
}
@Override
public boolean configure(int sampleRateHz, int channelCount, @Encoding int encoding)
throws UnhandledFormatException {
boolean outputChannelsChanged = !Arrays.equals(pendingOutputChannels, outputChannels);
outputChannels = pendingOutputChannels;
public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
@Nullable int[] outputChannels = pendingOutputChannels;
if (outputChannels == null) {
active = false;
return outputChannelsChanged;
return AudioFormat.NOT_SET;
}
if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
}
if (!outputChannelsChanged && this.sampleRateHz == sampleRateHz
&& this.channelCount == channelCount) {
return false;
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
active = channelCount != outputChannels.length;
if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
boolean active = inputAudioFormat.channelCount != outputChannels.length;
for (int i = 0; i < outputChannels.length; i++) {
int channelIndex = outputChannels[i];
if (channelIndex >= channelCount) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
if (channelIndex >= inputAudioFormat.channelCount) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
active |= (channelIndex != i);
}
return true;
}
@Override
public boolean isActive() {
return active;
}
@Override
public int getOutputChannelCount() {
return outputChannels == null ? channelCount : outputChannels.length;
}
@Override
public int getOutputEncoding() {
return C.ENCODING_PCM_16BIT;
return active
? new AudioFormat(inputAudioFormat.sampleRate, outputChannels.length, C.ENCODING_PCM_16BIT)
: AudioFormat.NOT_SET;
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
int[] outputChannels = Assertions.checkNotNull(this.outputChannels);
int position = inputBuffer.position();
int limit = inputBuffer.limit();
int frameCount = (limit - position) / (2 * channelCount);
int outputSize = frameCount * outputChannels.length * 2;
if (buffer.capacity() < outputSize) {
buffer = ByteBuffer.allocateDirect(outputSize).order(ByteOrder.nativeOrder());
} else {
buffer.clear();
}
int frameCount = (limit - position) / inputAudioFormat.bytesPerFrame;
int outputSize = frameCount * outputAudioFormat.bytesPerFrame;
ByteBuffer buffer = replaceOutputBuffer(outputSize);
while (position < limit) {
for (int channelIndex : outputChannels) {
buffer.putShort(inputBuffer.getShort(position + 2 * channelIndex));
}
position += channelCount * 2;
position += inputAudioFormat.bytesPerFrame;
}
inputBuffer.position(limit);
buffer.flip();
outputBuffer = buffer;
}
@Override
public void queueEndOfStream() {
inputEnded = true;
protected void onFlush() {
outputChannels = pendingOutputChannels;
}
@Override
public ByteBuffer getOutput() {
ByteBuffer outputBuffer = this.outputBuffer;
this.outputBuffer = EMPTY_BUFFER;
return outputBuffer;
}
@SuppressWarnings("ReferenceEquality")
@Override
public boolean isEnded() {
return inputEnded && outputBuffer == EMPTY_BUFFER;
}
@Override
public void flush() {
outputBuffer = EMPTY_BUFFER;
inputEnded = false;
}
@Override
public void reset() {
flush();
buffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
protected void onReset() {
outputChannels = null;
active = false;
pendingOutputChannels = null;
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -15,17 +15,28 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmInitData;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.MimeTypes;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.ParsableBitArray;
import java.nio.ByteBuffer;
import java.util.Arrays;
/**
* Utility methods for parsing DTS frames.
*/
public final class DtsUtil {
private static final int SYNC_VALUE_BE = 0x7FFE8001;
private static final int SYNC_VALUE_14B_BE = 0x1FFFE800;
private static final int SYNC_VALUE_LE = 0xFE7F0180;
private static final int SYNC_VALUE_14B_LE = 0xFF1F00E8;
private static final byte FIRST_BYTE_BE = (byte) (SYNC_VALUE_BE >>> 24);
private static final byte FIRST_BYTE_14B_BE = (byte) (SYNC_VALUE_14B_BE >>> 24);
private static final byte FIRST_BYTE_LE = (byte) (SYNC_VALUE_LE >>> 24);
private static final byte FIRST_BYTE_14B_LE = (byte) (SYNC_VALUE_14B_LE >>> 24);
/**
* Maps AMODE to the number of channels. See ETSI TS 102 114 table 5.4.
*/
@ -45,20 +56,34 @@ public final class DtsUtil {
384, 448, 512, 640, 768, 896, 1024, 1152, 1280, 1536, 1920, 2048, 2304, 2560, 2688, 2816,
2823, 2944, 3072, 3840, 4096, 6144, 7680};
/**
* Returns whether a given integer matches a DTS sync word. Synchronization and storage modes are
* defined in ETSI TS 102 114 V1.1.1 (2002-08), Section 5.3.
*
* @param word An integer.
* @return Whether a given integer matches a DTS sync word.
*/
public static boolean isSyncWord(int word) {
return word == SYNC_VALUE_BE
|| word == SYNC_VALUE_LE
|| word == SYNC_VALUE_14B_BE
|| word == SYNC_VALUE_14B_LE;
}
/**
* Returns the DTS format given {@code data} containing the DTS frame according to ETSI TS 102 114
* subsections 5.3/5.4.
*
* @param frame The DTS frame to parse.
* @param trackId The track identifier to set on the format, or null.
* @param trackId The track identifier to set on the format.
* @param language The language to set on the format.
* @param drmInitData {@link DrmInitData} to be included in the format.
* @return The DTS format parsed from data in the header.
*/
public static Format parseDtsFormat(byte[] frame, String trackId, String language,
DrmInitData drmInitData) {
ParsableBitArray frameBits = new ParsableBitArray(frame);
frameBits.skipBits(4 * 8 + 1 + 5 + 1 + 7 + 14); // SYNC, FTYPE, SHORT, CPF, NBLKS, FSIZE
public static Format parseDtsFormat(
byte[] frame, String trackId, @Nullable String language, @Nullable DrmInitData drmInitData) {
ParsableBitArray frameBits = getNormalizedFrameHeader(frame);
frameBits.skipBits(32 + 1 + 5 + 1 + 7 + 14); // SYNC, FTYPE, SHORT, CPF, NBLKS, FSIZE
int amode = frameBits.readBits(6);
int channelCount = CHANNELS_BY_AMODE[amode];
int sfreq = frameBits.readBits(4);
@ -79,8 +104,21 @@ public final class DtsUtil {
* @return The number of audio samples represented by the frame.
*/
public static int parseDtsAudioSampleCount(byte[] data) {
// See ETSI TS 102 114 subsection 5.4.1.
int nblks = ((data[4] & 0x01) << 6) | ((data[5] & 0xFC) >> 2);
int nblks;
switch (data[0]) {
case FIRST_BYTE_LE:
nblks = ((data[5] & 0x01) << 6) | ((data[4] & 0xFC) >> 2);
break;
case FIRST_BYTE_14B_LE:
nblks = ((data[4] & 0x07) << 4) | ((data[7] & 0x3C) >> 2);
break;
case FIRST_BYTE_14B_BE:
nblks = ((data[5] & 0x07) << 4) | ((data[6] & 0x3C) >> 2);
break;
default:
// We blindly assume FIRST_BYTE_BE if none of the others match.
nblks = ((data[4] & 0x01) << 6) | ((data[5] & 0xFC) >> 2);
}
return (nblks + 1) * 32;
}
@ -94,8 +132,21 @@ public final class DtsUtil {
public static int parseDtsAudioSampleCount(ByteBuffer buffer) {
// See ETSI TS 102 114 subsection 5.4.1.
int position = buffer.position();
int nblks = ((buffer.get(position + 4) & 0x01) << 6)
| ((buffer.get(position + 5) & 0xFC) >> 2);
int nblks;
switch (buffer.get(position)) {
case FIRST_BYTE_LE:
nblks = ((buffer.get(position + 5) & 0x01) << 6) | ((buffer.get(position + 4) & 0xFC) >> 2);
break;
case FIRST_BYTE_14B_LE:
nblks = ((buffer.get(position + 4) & 0x07) << 4) | ((buffer.get(position + 7) & 0x3C) >> 2);
break;
case FIRST_BYTE_14B_BE:
nblks = ((buffer.get(position + 5) & 0x07) << 4) | ((buffer.get(position + 6) & 0x3C) >> 2);
break;
default:
// We blindly assume FIRST_BYTE_BE if none of the others match.
nblks = ((buffer.get(position + 4) & 0x01) << 6) | ((buffer.get(position + 5) & 0xFC) >> 2);
}
return (nblks + 1) * 32;
}
@ -106,9 +157,59 @@ public final class DtsUtil {
* @return The frame's size in bytes.
*/
public static int getDtsFrameSize(byte[] data) {
return (((data[5] & 0x02) << 12)
| ((data[6] & 0xFF) << 4)
| ((data[7] & 0xF0) >> 4)) + 1;
int fsize;
boolean uses14BitPerWord = false;
switch (data[0]) {
case FIRST_BYTE_14B_BE:
fsize = (((data[6] & 0x03) << 12) | ((data[7] & 0xFF) << 4) | ((data[8] & 0x3C) >> 2)) + 1;
uses14BitPerWord = true;
break;
case FIRST_BYTE_LE:
fsize = (((data[4] & 0x03) << 12) | ((data[7] & 0xFF) << 4) | ((data[6] & 0xF0) >> 4)) + 1;
break;
case FIRST_BYTE_14B_LE:
fsize = (((data[7] & 0x03) << 12) | ((data[6] & 0xFF) << 4) | ((data[9] & 0x3C) >> 2)) + 1;
uses14BitPerWord = true;
break;
default:
// We blindly assume FIRST_BYTE_BE if none of the others match.
fsize = (((data[5] & 0x03) << 12) | ((data[6] & 0xFF) << 4) | ((data[7] & 0xF0) >> 4)) + 1;
}
// If the frame is stored in 14-bit mode, adjust the frame size to reflect the actual byte size.
return uses14BitPerWord ? fsize * 16 / 14 : fsize;
}
private static ParsableBitArray getNormalizedFrameHeader(byte[] frameHeader) {
if (frameHeader[0] == FIRST_BYTE_BE) {
// The frame is already 16-bit mode, big endian.
return new ParsableBitArray(frameHeader);
}
// Data is not normalized, but we don't want to modify frameHeader.
frameHeader = Arrays.copyOf(frameHeader, frameHeader.length);
if (isLittleEndianFrameHeader(frameHeader)) {
// Change endianness.
for (int i = 0; i < frameHeader.length - 1; i += 2) {
byte temp = frameHeader[i];
frameHeader[i] = frameHeader[i + 1];
frameHeader[i + 1] = temp;
}
}
ParsableBitArray frameBits = new ParsableBitArray(frameHeader);
if (frameHeader[0] == (byte) (SYNC_VALUE_14B_BE >> 24)) {
// Discard the 2 most significant bits of each 16 bit word.
ParsableBitArray scratchBits = new ParsableBitArray(frameHeader);
while (scratchBits.bitsLeft() >= 16) {
scratchBits.skipBits(2);
frameBits.putInt(scratchBits.readBits(14), 14);
}
}
frameBits.reset(frameHeader);
return frameBits;
}
private static boolean isLittleEndianFrameHeader(byte[] frameHeader) {
return frameHeader[0] == FIRST_BYTE_LE || frameHeader[0] == FIRST_BYTE_14B_LE;
}
private DtsUtil() {}

Просмотреть файл

@ -0,0 +1,109 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
/**
* An {@link AudioProcessor} that converts high resolution PCM audio to 32-bit float. The following
* encodings are supported as input:
*
* <ul>
* <li>{@link C#ENCODING_PCM_24BIT}
* <li>{@link C#ENCODING_PCM_32BIT}
* <li>{@link C#ENCODING_PCM_FLOAT} ({@link #isActive()} will return {@code false})
* </ul>
*/
/* package */ final class FloatResamplingAudioProcessor extends BaseAudioProcessor {
private static final int FLOAT_NAN_AS_INT = Float.floatToIntBits(Float.NaN);
private static final double PCM_32_BIT_INT_TO_PCM_32_BIT_FLOAT_FACTOR = 1.0 / 0x7FFFFFFF;
@Override
public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
@C.PcmEncoding int encoding = inputAudioFormat.encoding;
if (!Util.isEncodingHighResolutionPcm(encoding)) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
return encoding != C.ENCODING_PCM_FLOAT
? new AudioFormat(
inputAudioFormat.sampleRate, inputAudioFormat.channelCount, C.ENCODING_PCM_FLOAT)
: AudioFormat.NOT_SET;
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
int position = inputBuffer.position();
int limit = inputBuffer.limit();
int size = limit - position;
ByteBuffer buffer;
switch (inputAudioFormat.encoding) {
case C.ENCODING_PCM_24BIT:
buffer = replaceOutputBuffer((size / 3) * 4);
for (int i = position; i < limit; i += 3) {
int pcm32BitInteger =
((inputBuffer.get(i) & 0xFF) << 8)
| ((inputBuffer.get(i + 1) & 0xFF) << 16)
| ((inputBuffer.get(i + 2) & 0xFF) << 24);
writePcm32BitFloat(pcm32BitInteger, buffer);
}
break;
case C.ENCODING_PCM_32BIT:
buffer = replaceOutputBuffer(size);
for (int i = position; i < limit; i += 4) {
int pcm32BitInteger =
(inputBuffer.get(i) & 0xFF)
| ((inputBuffer.get(i + 1) & 0xFF) << 8)
| ((inputBuffer.get(i + 2) & 0xFF) << 16)
| ((inputBuffer.get(i + 3) & 0xFF) << 24);
writePcm32BitFloat(pcm32BitInteger, buffer);
}
break;
case C.ENCODING_PCM_8BIT:
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_16BIT_BIG_ENDIAN:
case C.ENCODING_PCM_FLOAT:
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:
// Never happens.
throw new IllegalStateException();
}
inputBuffer.position(inputBuffer.limit());
buffer.flip();
}
/**
* Converts the provided 32-bit integer to a 32-bit float value and writes it to {@code buffer}.
*
* @param pcm32BitInt The 32-bit integer value to convert to 32-bit float in [-1.0, 1.0].
* @param buffer The output buffer.
*/
private static void writePcm32BitFloat(int pcm32BitInt, ByteBuffer buffer) {
float pcm32BitFloat = (float) (PCM_32_BIT_INT_TO_PCM_32_BIT_FLOAT_FACTOR * pcm32BitInt);
int floatBits = Float.floatToIntBits(pcm32BitFloat);
if (floatBits == FLOAT_NAN_AS_INT) {
floatBits = Float.floatToIntBits((float) 0.0);
}
buffer.putInt(floatBits);
}
}

Просмотреть файл

@ -0,0 +1,151 @@
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.PlaybackParameters;
import java.nio.ByteBuffer;
/** An overridable {@link AudioSink} implementation forwarding all methods to another sink. */
public class ForwardingAudioSink implements AudioSink {
private final AudioSink sink;
public ForwardingAudioSink(AudioSink sink) {
this.sink = sink;
}
@Override
public void setListener(Listener listener) {
sink.setListener(listener);
}
@Override
public boolean supportsOutput(int channelCount, int encoding) {
return sink.supportsOutput(channelCount, encoding);
}
@Override
public long getCurrentPositionUs(boolean sourceEnded) {
return sink.getCurrentPositionUs(sourceEnded);
}
@Override
public void configure(
int inputEncoding,
int inputChannelCount,
int inputSampleRate,
int specifiedBufferSize,
@Nullable int[] outputChannels,
int trimStartFrames,
int trimEndFrames)
throws ConfigurationException {
sink.configure(
inputEncoding,
inputChannelCount,
inputSampleRate,
specifiedBufferSize,
outputChannels,
trimStartFrames,
trimEndFrames);
}
@Override
public void play() {
sink.play();
}
@Override
public void handleDiscontinuity() {
sink.handleDiscontinuity();
}
@Override
public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs)
throws InitializationException, WriteException {
return sink.handleBuffer(buffer, presentationTimeUs);
}
@Override
public void playToEndOfStream() throws WriteException {
sink.playToEndOfStream();
}
@Override
public boolean isEnded() {
return sink.isEnded();
}
@Override
public boolean hasPendingData() {
return sink.hasPendingData();
}
@Override
public void setPlaybackParameters(PlaybackParameters playbackParameters) {
sink.setPlaybackParameters(playbackParameters);
}
@Override
public PlaybackParameters getPlaybackParameters() {
return sink.getPlaybackParameters();
}
@Override
public void setAudioAttributes(AudioAttributes audioAttributes) {
sink.setAudioAttributes(audioAttributes);
}
@Override
public void setAudioSessionId(int audioSessionId) {
sink.setAudioSessionId(audioSessionId);
}
@Override
public void setAuxEffectInfo(AuxEffectInfo auxEffectInfo) {
sink.setAuxEffectInfo(auxEffectInfo);
}
@Override
public void enableTunnelingV21(int tunnelingAudioSessionId) {
sink.enableTunnelingV21(tunnelingAudioSessionId);
}
@Override
public void disableTunneling() {
sink.disableTunneling();
}
@Override
public void setVolume(float volume) {
sink.setVolume(volume);
}
@Override
public void pause() {
sink.pause();
}
@Override
public void flush() {
sink.flush();
}
@Override
public void reset() {
sink.reset();
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -18,65 +18,38 @@ package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* An {@link AudioProcessor} that converts audio data to {@link C#ENCODING_PCM_16BIT}.
* An {@link AudioProcessor} that converts different PCM audio encodings to 16-bit integer PCM. The
* following encodings are supported as input:
*
* <ul>
* <li>{@link C#ENCODING_PCM_8BIT}
* <li>{@link C#ENCODING_PCM_16BIT} ({@link #isActive()} will return {@code false})
* <li>{@link C#ENCODING_PCM_16BIT_BIG_ENDIAN}
* <li>{@link C#ENCODING_PCM_24BIT}
* <li>{@link C#ENCODING_PCM_32BIT}
* <li>{@link C#ENCODING_PCM_FLOAT}
* </ul>
*/
/* package */ final class ResamplingAudioProcessor implements AudioProcessor {
private int sampleRateHz;
private int channelCount;
@C.PcmEncoding
private int encoding;
private ByteBuffer buffer;
private ByteBuffer outputBuffer;
private boolean inputEnded;
/**
* Creates a new audio processor that converts audio data to {@link C#ENCODING_PCM_16BIT}.
*/
public ResamplingAudioProcessor() {
sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE;
encoding = C.ENCODING_INVALID;
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
}
/* package */ final class ResamplingAudioProcessor extends BaseAudioProcessor {
@Override
public boolean configure(int sampleRateHz, int channelCount, @C.Encoding int encoding)
throws UnhandledFormatException {
if (encoding != C.ENCODING_PCM_8BIT && encoding != C.ENCODING_PCM_16BIT
&& encoding != C.ENCODING_PCM_24BIT && encoding != C.ENCODING_PCM_32BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
@C.PcmEncoding int encoding = inputAudioFormat.encoding;
if (encoding != C.ENCODING_PCM_8BIT
&& encoding != C.ENCODING_PCM_16BIT
&& encoding != C.ENCODING_PCM_16BIT_BIG_ENDIAN
&& encoding != C.ENCODING_PCM_24BIT
&& encoding != C.ENCODING_PCM_32BIT
&& encoding != C.ENCODING_PCM_FLOAT) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount
&& this.encoding == encoding) {
return false;
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
this.encoding = encoding;
if (encoding == C.ENCODING_PCM_16BIT) {
buffer = EMPTY_BUFFER;
}
return true;
}
@Override
public boolean isActive() {
return encoding != C.ENCODING_INVALID && encoding != C.ENCODING_PCM_16BIT;
}
@Override
public int getOutputChannelCount() {
return channelCount;
}
@Override
public int getOutputEncoding() {
return C.ENCODING_PCM_16BIT;
return encoding != C.ENCODING_PCM_16BIT
? new AudioFormat(
inputAudioFormat.sampleRate, inputAudioFormat.channelCount, C.ENCODING_PCM_16BIT)
: AudioFormat.NOT_SET;
}
@Override
@ -86,14 +59,18 @@ import java.nio.ByteOrder;
int limit = inputBuffer.limit();
int size = limit - position;
int resampledSize;
switch (encoding) {
switch (inputAudioFormat.encoding) {
case C.ENCODING_PCM_8BIT:
resampledSize = size * 2;
break;
case C.ENCODING_PCM_16BIT_BIG_ENDIAN:
resampledSize = size;
break;
case C.ENCODING_PCM_24BIT:
resampledSize = (size / 3) * 2;
break;
case C.ENCODING_PCM_32BIT:
case C.ENCODING_PCM_FLOAT:
resampledSize = size / 2;
break;
case C.ENCODING_PCM_16BIT:
@ -102,35 +79,47 @@ import java.nio.ByteOrder;
default:
throw new IllegalStateException();
}
if (buffer.capacity() < resampledSize) {
buffer = ByteBuffer.allocateDirect(resampledSize).order(ByteOrder.nativeOrder());
} else {
buffer.clear();
}
// Resample the little endian input and update the input/output buffers.
switch (encoding) {
ByteBuffer buffer = replaceOutputBuffer(resampledSize);
switch (inputAudioFormat.encoding) {
case C.ENCODING_PCM_8BIT:
// 8->16 bit resampling. Shift each byte from [0, 256) to [-128, 128) and scale up.
// 8 -> 16 bit resampling. Shift each byte from [0, 256) to [-128, 128) and scale up.
for (int i = position; i < limit; i++) {
buffer.put((byte) 0);
buffer.put((byte) ((inputBuffer.get(i) & 0xFF) - 128));
}
break;
case C.ENCODING_PCM_16BIT_BIG_ENDIAN:
// Big endian to little endian resampling. Swap the byte order.
for (int i = position; i < limit; i += 2) {
buffer.put(inputBuffer.get(i + 1));
buffer.put(inputBuffer.get(i));
}
break;
case C.ENCODING_PCM_24BIT:
// 24->16 bit resampling. Drop the least significant byte.
// 24 -> 16 bit resampling. Drop the least significant byte.
for (int i = position; i < limit; i += 3) {
buffer.put(inputBuffer.get(i + 1));
buffer.put(inputBuffer.get(i + 2));
}
break;
case C.ENCODING_PCM_32BIT:
// 32->16 bit resampling. Drop the two least significant bytes.
// 32 -> 16 bit resampling. Drop the two least significant bytes.
for (int i = position; i < limit; i += 4) {
buffer.put(inputBuffer.get(i + 2));
buffer.put(inputBuffer.get(i + 3));
}
break;
case C.ENCODING_PCM_FLOAT:
// 32 bit floating point -> 16 bit resampling. Floating point values are in the range
// [-1.0, 1.0], so need to be scaled by Short.MAX_VALUE.
for (int i = position; i < limit; i += 4) {
short value = (short) (inputBuffer.getFloat(i) * Short.MAX_VALUE);
buffer.put((byte) (value & 0xFF));
buffer.put((byte) ((value >> 8) & 0xFF));
}
break;
case C.ENCODING_PCM_16BIT:
case C.ENCODING_INVALID:
case Format.NO_VALUE:
@ -140,40 +129,6 @@ import java.nio.ByteOrder;
}
inputBuffer.position(inputBuffer.limit());
buffer.flip();
outputBuffer = buffer;
}
@Override
public void queueEndOfStream() {
inputEnded = true;
}
@Override
public ByteBuffer getOutput() {
ByteBuffer outputBuffer = this.outputBuffer;
this.outputBuffer = EMPTY_BUFFER;
return outputBuffer;
}
@SuppressWarnings("ReferenceEquality")
@Override
public boolean isEnded() {
return inputEnded && outputBuffer == EMPTY_BUFFER;
}
@Override
public void flush() {
outputBuffer = EMPTY_BUFFER;
inputEnded = false;
}
@Override
public void reset() {
flush();
buffer = EMPTY_BUFFER;
sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE;
encoding = C.ENCODING_INVALID;
}
}

Просмотреть файл

@ -0,0 +1,352 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import androidx.annotation.IntDef;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
/**
* An {@link AudioProcessor} that skips silence in the input stream. Input and output are 16-bit
* PCM.
*/
public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor {
/**
* The minimum duration of audio that must be below {@link #SILENCE_THRESHOLD_LEVEL} to classify
* that part of audio as silent, in microseconds.
*/
private static final long MINIMUM_SILENCE_DURATION_US = 150_000;
/**
* The duration of silence by which to extend non-silent sections, in microseconds. The value must
* not exceed {@link #MINIMUM_SILENCE_DURATION_US}.
*/
private static final long PADDING_SILENCE_US = 20_000;
/**
* The absolute level below which an individual PCM sample is classified as silent. Note: the
* specified value will be rounded so that the threshold check only depends on the more
* significant byte, for efficiency.
*/
private static final short SILENCE_THRESHOLD_LEVEL = 1024;
/**
* Threshold for classifying an individual PCM sample as silent based on its more significant
* byte. This is {@link #SILENCE_THRESHOLD_LEVEL} divided by 256 with rounding.
*/
private static final byte SILENCE_THRESHOLD_LEVEL_MSB = (SILENCE_THRESHOLD_LEVEL + 128) >> 8;
/** Trimming states. */
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({
STATE_NOISY,
STATE_MAYBE_SILENT,
STATE_SILENT,
})
private @interface State {}
/** State when the input is not silent. */
private static final int STATE_NOISY = 0;
/** State when the input may be silent but we haven't read enough yet to know. */
private static final int STATE_MAYBE_SILENT = 1;
/** State when the input is silent. */
private static final int STATE_SILENT = 2;
private int bytesPerFrame;
private boolean enabled;
/**
* Buffers audio data that may be classified as silence while in {@link #STATE_MAYBE_SILENT}. If
* the input becomes noisy before the buffer has filled, it will be output. Otherwise, the buffer
* contents will be dropped and the state will transition to {@link #STATE_SILENT}.
*/
private byte[] maybeSilenceBuffer;
/**
* Stores the latest part of the input while silent. It will be output as padding if the next
* input is noisy.
*/
private byte[] paddingBuffer;
@State private int state;
private int maybeSilenceBufferSize;
private int paddingSize;
private boolean hasOutputNoise;
private long skippedFrames;
/** Creates a new silence trimming audio processor. */
public SilenceSkippingAudioProcessor() {
maybeSilenceBuffer = Util.EMPTY_BYTE_ARRAY;
paddingBuffer = Util.EMPTY_BYTE_ARRAY;
}
/**
* Sets whether to skip silence in the input. This method may only be called after draining data
* through the processor. The value returned by {@link #isActive()} may change, and the processor
* must be {@link #flush() flushed} before queueing more data.
*
* @param enabled Whether to skip silence in the input.
*/
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
/**
* Returns the total number of frames of input audio that were skipped due to being classified as
* silence since the last call to {@link #flush()}.
*/
public long getSkippedFrames() {
return skippedFrames;
}
// AudioProcessor implementation.
@Override
public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
return enabled ? inputAudioFormat : AudioFormat.NOT_SET;
}
@Override
public boolean isActive() {
return enabled;
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
while (inputBuffer.hasRemaining() && !hasPendingOutput()) {
switch (state) {
case STATE_NOISY:
processNoisy(inputBuffer);
break;
case STATE_MAYBE_SILENT:
processMaybeSilence(inputBuffer);
break;
case STATE_SILENT:
processSilence(inputBuffer);
break;
default:
throw new IllegalStateException();
}
}
}
@Override
protected void onQueueEndOfStream() {
if (maybeSilenceBufferSize > 0) {
// We haven't received enough silence to transition to the silent state, so output the buffer.
output(maybeSilenceBuffer, maybeSilenceBufferSize);
}
if (!hasOutputNoise) {
skippedFrames += paddingSize / bytesPerFrame;
}
}
@Override
protected void onFlush() {
if (enabled) {
bytesPerFrame = inputAudioFormat.bytesPerFrame;
int maybeSilenceBufferSize = durationUsToFrames(MINIMUM_SILENCE_DURATION_US) * bytesPerFrame;
if (maybeSilenceBuffer.length != maybeSilenceBufferSize) {
maybeSilenceBuffer = new byte[maybeSilenceBufferSize];
}
paddingSize = durationUsToFrames(PADDING_SILENCE_US) * bytesPerFrame;
if (paddingBuffer.length != paddingSize) {
paddingBuffer = new byte[paddingSize];
}
}
state = STATE_NOISY;
skippedFrames = 0;
maybeSilenceBufferSize = 0;
hasOutputNoise = false;
}
@Override
protected void onReset() {
enabled = false;
paddingSize = 0;
maybeSilenceBuffer = Util.EMPTY_BYTE_ARRAY;
paddingBuffer = Util.EMPTY_BYTE_ARRAY;
}
// Internal methods.
/**
* Incrementally processes new input from {@code inputBuffer} while in {@link #STATE_NOISY},
* updating the state if needed.
*/
private void processNoisy(ByteBuffer inputBuffer) {
int limit = inputBuffer.limit();
// Check if there's any noise within the maybe silence buffer duration.
inputBuffer.limit(Math.min(limit, inputBuffer.position() + maybeSilenceBuffer.length));
int noiseLimit = findNoiseLimit(inputBuffer);
if (noiseLimit == inputBuffer.position()) {
// The buffer contains the start of possible silence.
state = STATE_MAYBE_SILENT;
} else {
inputBuffer.limit(noiseLimit);
output(inputBuffer);
}
// Restore the limit.
inputBuffer.limit(limit);
}
/**
* Incrementally processes new input from {@code inputBuffer} while in {@link
* #STATE_MAYBE_SILENT}, updating the state if needed.
*/
private void processMaybeSilence(ByteBuffer inputBuffer) {
int limit = inputBuffer.limit();
int noisePosition = findNoisePosition(inputBuffer);
int maybeSilenceInputSize = noisePosition - inputBuffer.position();
int maybeSilenceBufferRemaining = maybeSilenceBuffer.length - maybeSilenceBufferSize;
if (noisePosition < limit && maybeSilenceInputSize < maybeSilenceBufferRemaining) {
// The maybe silence buffer isn't full, so output it and switch back to the noisy state.
output(maybeSilenceBuffer, maybeSilenceBufferSize);
maybeSilenceBufferSize = 0;
state = STATE_NOISY;
} else {
// Fill as much of the maybe silence buffer as possible.
int bytesToWrite = Math.min(maybeSilenceInputSize, maybeSilenceBufferRemaining);
inputBuffer.limit(inputBuffer.position() + bytesToWrite);
inputBuffer.get(maybeSilenceBuffer, maybeSilenceBufferSize, bytesToWrite);
maybeSilenceBufferSize += bytesToWrite;
if (maybeSilenceBufferSize == maybeSilenceBuffer.length) {
// We've reached a period of silence, so skip it, taking in to account padding for both
// the noisy to silent transition and any future silent to noisy transition.
if (hasOutputNoise) {
output(maybeSilenceBuffer, paddingSize);
skippedFrames += (maybeSilenceBufferSize - paddingSize * 2) / bytesPerFrame;
} else {
skippedFrames += (maybeSilenceBufferSize - paddingSize) / bytesPerFrame;
}
updatePaddingBuffer(inputBuffer, maybeSilenceBuffer, maybeSilenceBufferSize);
maybeSilenceBufferSize = 0;
state = STATE_SILENT;
}
// Restore the limit.
inputBuffer.limit(limit);
}
}
/**
* Incrementally processes new input from {@code inputBuffer} while in {@link #STATE_SILENT},
* updating the state if needed.
*/
private void processSilence(ByteBuffer inputBuffer) {
int limit = inputBuffer.limit();
int noisyPosition = findNoisePosition(inputBuffer);
inputBuffer.limit(noisyPosition);
skippedFrames += inputBuffer.remaining() / bytesPerFrame;
updatePaddingBuffer(inputBuffer, paddingBuffer, paddingSize);
if (noisyPosition < limit) {
// Output the padding, which may include previous input as well as new input, then transition
// back to the noisy state.
output(paddingBuffer, paddingSize);
state = STATE_NOISY;
// Restore the limit.
inputBuffer.limit(limit);
}
}
/**
* Copies {@code length} elements from {@code data} to populate a new output buffer from the
* processor.
*/
private void output(byte[] data, int length) {
replaceOutputBuffer(length).put(data, 0, length).flip();
if (length > 0) {
hasOutputNoise = true;
}
}
/**
* Copies remaining bytes from {@code data} to populate a new output buffer from the processor.
*/
private void output(ByteBuffer data) {
int length = data.remaining();
replaceOutputBuffer(length).put(data).flip();
if (length > 0) {
hasOutputNoise = true;
}
}
/**
* Fills {@link #paddingBuffer} using data from {@code input}, plus any additional buffered data
* at the end of {@code buffer} (up to its {@code size}) required to fill it, advancing the input
* position.
*/
private void updatePaddingBuffer(ByteBuffer input, byte[] buffer, int size) {
int fromInputSize = Math.min(input.remaining(), paddingSize);
int fromBufferSize = paddingSize - fromInputSize;
System.arraycopy(
/* src= */ buffer,
/* srcPos= */ size - fromBufferSize,
/* dest= */ paddingBuffer,
/* destPos= */ 0,
/* length= */ fromBufferSize);
input.position(input.limit() - fromInputSize);
input.get(paddingBuffer, fromBufferSize, fromInputSize);
}
/**
* Returns the number of input frames corresponding to {@code durationUs} microseconds of audio.
*/
private int durationUsToFrames(long durationUs) {
return (int) ((durationUs * inputAudioFormat.sampleRate) / C.MICROS_PER_SECOND);
}
/**
* Returns the earliest byte position in [position, limit) of {@code buffer} that contains a frame
* classified as a noisy frame, or the limit of the buffer if no such frame exists.
*/
private int findNoisePosition(ByteBuffer buffer) {
// The input is in ByteOrder.nativeOrder(), which is little endian on Android.
for (int i = buffer.position() + 1; i < buffer.limit(); i += 2) {
if (Math.abs(buffer.get(i)) > SILENCE_THRESHOLD_LEVEL_MSB) {
// Round to the start of the frame.
return bytesPerFrame * (i / bytesPerFrame);
}
}
return buffer.limit();
}
/**
* Returns the earliest byte position in [position, limit) of {@code buffer} such that all frames
* from the byte position to the limit are classified as silent.
*/
private int findNoiseLimit(ByteBuffer buffer) {
// The input is in ByteOrder.nativeOrder(), which is little endian on Android.
for (int i = buffer.limit() - 1; i >= buffer.position(); i -= 2) {
if (Math.abs(buffer.get(i)) > SILENCE_THRESHOLD_LEVEL_MSB) {
// Return the start of the next frame.
return bytesPerFrame * (i / bytesPerFrame) + bytesPerFrame;
}
}
return buffer.position();
}
}

Просмотреть файл

@ -17,21 +17,25 @@ package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import android.media.audiofx.Virtualizer;
import android.os.Handler;
import android.os.Looper;
import android.os.SystemClock;
import android.support.annotation.IntDef;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.BaseRenderer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.ExoPlaybackException;
import org.mozilla.thirdparty.com.google.android.exoplayer2.ExoPlayer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.FormatHolder;
import org.mozilla.thirdparty.com.google.android.exoplayer2.PlaybackParameters;
import org.mozilla.thirdparty.com.google.android.exoplayer2.PlayerMessage.Target;
import org.mozilla.thirdparty.com.google.android.exoplayer2.RendererCapabilities;
import org.mozilla.thirdparty.com.google.android.exoplayer2.audio.AudioRendererEventListener.EventDispatcher;
import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.DecoderCounters;
import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.SimpleDecoder;
import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.SimpleOutputBuffer;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmSession;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmSession.DrmSessionException;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmSessionManager;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.ExoMediaCrypto;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
@ -39,17 +43,36 @@ import org.mozilla.thirdparty.com.google.android.exoplayer2.util.MediaClock;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.MimeTypes;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.TraceUtil;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Decodes and renders audio using a {@link SimpleDecoder}.
*
* <p>This renderer accepts the following messages sent via {@link ExoPlayer#createMessage(Target)}
* on the playback thread:
*
* <ul>
* <li>Message with type {@link C#MSG_SET_VOLUME} to set the volume. The message payload should be
* a {@link Float} with 0 being silence and 1 being unity gain.
* <li>Message with type {@link C#MSG_SET_AUDIO_ATTRIBUTES} to set the audio attributes. The
* message payload should be an {@link org.mozilla.thirdparty.com.google.android.exoplayer2audio.AudioAttributes}
* instance that will configure the underlying audio track.
* <li>Message with type {@link C#MSG_SET_AUX_EFFECT_INFO} to set the auxiliary effect. The
* message payload should be an {@link AuxEffectInfo} instance that will configure the
* underlying audio track.
* </ul>
*/
public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements MediaClock {
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({REINITIALIZATION_STATE_NONE, REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM,
REINITIALIZATION_STATE_WAIT_END_OF_STREAM})
@IntDef({
REINITIALIZATION_STATE_NONE,
REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM,
REINITIALIZATION_STATE_WAIT_END_OF_STREAM
})
private @interface ReinitializationState {}
/**
* The decoder does not need to be re-initialized.
@ -71,31 +94,34 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
private final DrmSessionManager<ExoMediaCrypto> drmSessionManager;
private final boolean playClearSamplesWithoutKeys;
private final EventDispatcher eventDispatcher;
private final AudioTrack audioTrack;
private final FormatHolder formatHolder;
private final AudioSink audioSink;
private final DecoderInputBuffer flagsOnlyBuffer;
private boolean drmResourcesAcquired;
private DecoderCounters decoderCounters;
private Format inputFormat;
private int encoderDelay;
private int encoderPadding;
private SimpleDecoder<DecoderInputBuffer, ? extends SimpleOutputBuffer,
? extends AudioDecoderException> decoder;
private DecoderInputBuffer inputBuffer;
private SimpleOutputBuffer outputBuffer;
private DrmSession<ExoMediaCrypto> drmSession;
private DrmSession<ExoMediaCrypto> pendingDrmSession;
@Nullable private DrmSession<ExoMediaCrypto> decoderDrmSession;
@Nullable private DrmSession<ExoMediaCrypto> sourceDrmSession;
@ReinitializationState private int decoderReinitializationState;
private boolean decoderReceivedBuffers;
private boolean audioTrackNeedsConfigure;
private long currentPositionUs;
private boolean allowFirstBufferPositionDiscontinuity;
private boolean allowPositionDiscontinuity;
private boolean inputStreamEnded;
private boolean outputStreamEnded;
private boolean waitingForKeys;
public SimpleDecoderAudioRenderer() {
this(null, null);
this(/* eventHandler= */ null, /* eventListener= */ null);
}
/**
@ -104,9 +130,17 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
*/
public SimpleDecoderAudioRenderer(Handler eventHandler,
AudioRendererEventListener eventListener, AudioProcessor... audioProcessors) {
this(eventHandler, eventListener, null, null, false, audioProcessors);
public SimpleDecoderAudioRenderer(
@Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener,
AudioProcessor... audioProcessors) {
this(
eventHandler,
eventListener,
/* audioCapabilities= */ null,
/* drmSessionManager= */ null,
/* playClearSamplesWithoutKeys= */ false,
audioProcessors);
}
/**
@ -116,9 +150,16 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
*/
public SimpleDecoderAudioRenderer(Handler eventHandler,
AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities) {
this(eventHandler, eventListener, audioCapabilities, null, false);
public SimpleDecoderAudioRenderer(
@Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener,
@Nullable AudioCapabilities audioCapabilities) {
this(
eventHandler,
eventListener,
audioCapabilities,
/* drmSessionManager= */ null,
/* playClearSamplesWithoutKeys= */ false);
}
/**
@ -136,52 +177,95 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
*/
public SimpleDecoderAudioRenderer(Handler eventHandler,
AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities,
DrmSessionManager<ExoMediaCrypto> drmSessionManager, boolean playClearSamplesWithoutKeys,
public SimpleDecoderAudioRenderer(
@Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener,
@Nullable AudioCapabilities audioCapabilities,
@Nullable DrmSessionManager<ExoMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys,
AudioProcessor... audioProcessors) {
this(eventHandler, eventListener, drmSessionManager,
playClearSamplesWithoutKeys, new DefaultAudioSink(audioCapabilities, audioProcessors));
}
/**
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param drmSessionManager For use with encrypted media. May be null if support for encrypted
* media is not required.
* @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
* For example a media file may start with a short clear region so as to allow playback to
* begin in parallel with key acquisition. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param audioSink The sink to which audio will be output.
*/
public SimpleDecoderAudioRenderer(
@Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener,
@Nullable DrmSessionManager<ExoMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys,
AudioSink audioSink) {
super(C.TRACK_TYPE_AUDIO);
this.drmSessionManager = drmSessionManager;
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
eventDispatcher = new EventDispatcher(eventHandler, eventListener);
audioTrack = new AudioTrack(audioCapabilities, audioProcessors, new AudioTrackListener());
formatHolder = new FormatHolder();
this.audioSink = audioSink;
audioSink.setListener(new AudioSinkListener());
flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance();
decoderReinitializationState = REINITIALIZATION_STATE_NONE;
audioTrackNeedsConfigure = true;
}
@Override
@Nullable
public MediaClock getMediaClock() {
return this;
}
@Override
@Capabilities
public final int supportsFormat(Format format) {
int formatSupport = supportsFormatInternal(format);
if (formatSupport == FORMAT_UNSUPPORTED_TYPE || formatSupport == FORMAT_UNSUPPORTED_SUBTYPE) {
return formatSupport;
if (!MimeTypes.isAudio(format.sampleMimeType)) {
return RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE);
}
@FormatSupport int formatSupport = supportsFormatInternal(drmSessionManager, format);
if (formatSupport <= FORMAT_UNSUPPORTED_DRM) {
return RendererCapabilities.create(formatSupport);
}
@TunnelingSupport
int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED;
return ADAPTIVE_NOT_SEAMLESS | tunnelingSupport | formatSupport;
return RendererCapabilities.create(formatSupport, ADAPTIVE_NOT_SEAMLESS, tunnelingSupport);
}
/**
* Returns the {@link #FORMAT_SUPPORT_MASK} component of the return value for
* {@link #supportsFormat(Format)}.
* Returns the {@link FormatSupport} for the given {@link Format}.
*
* @param format The format.
* @return The extent to which the renderer supports the format itself.
* @param drmSessionManager The renderer's {@link DrmSessionManager}.
* @param format The format, which has an audio {@link Format#sampleMimeType}.
* @return The {@link FormatSupport} for this {@link Format}.
*/
protected abstract int supportsFormatInternal(Format format);
@FormatSupport
protected abstract int supportsFormatInternal(
@Nullable DrmSessionManager<ExoMediaCrypto> drmSessionManager, Format format);
/**
* Returns whether the sink supports the audio format.
*
* @see AudioSink#supportsOutput(int, int)
*/
protected final boolean supportsOutput(int channelCount, @C.Encoding int encoding) {
return audioSink.supportsOutput(channelCount, encoding);
}
@Override
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
if (outputStreamEnded) {
try {
audioTrack.playToEndOfStream();
} catch (AudioTrack.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
audioSink.playToEndOfStream();
} catch (AudioSink.WriteException e) {
throw createRendererException(e, inputFormat);
}
return;
}
@ -189,10 +273,11 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
// Try and read a format if we don't have one already.
if (inputFormat == null) {
// We don't have a format yet, so try and read one.
FormatHolder formatHolder = getFormatHolder();
flagsOnlyBuffer.clear();
int result = readSource(formatHolder, flagsOnlyBuffer, true);
if (result == C.RESULT_FORMAT_READ) {
onInputFormatChanged(formatHolder.format);
onInputFormatChanged(formatHolder);
} else if (result == C.RESULT_BUFFER_READ) {
// End of stream read having not read a format.
Assertions.checkState(flagsOnlyBuffer.isEndOfStream());
@ -215,9 +300,9 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
while (drainOutputBuffer()) {}
while (feedInputBuffer()) {}
TraceUtil.endSection();
} catch (AudioDecoderException | AudioTrack.ConfigurationException
| AudioTrack.InitializationException | AudioTrack.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
} catch (AudioDecoderException | AudioSink.ConfigurationException
| AudioSink.InitializationException | AudioSink.WriteException e) {
throw createRendererException(e, inputFormat);
}
decoderCounters.ensureUpdated();
}
@ -229,21 +314,21 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* order to spatialize the audio channels. For this use case, any {@link Virtualizer} instances
* should be released in {@link #onDisabled()} (if not before).
*
* @see AudioTrack.Listener#onAudioSessionId(int)
* @see AudioSink.Listener#onAudioSessionId(int)
*/
protected void onAudioSessionId(int audioSessionId) {
// Do nothing.
}
/**
* @see AudioTrack.Listener#onPositionDiscontinuity()
* @see AudioSink.Listener#onPositionDiscontinuity()
*/
protected void onAudioTrackPositionDiscontinuity() {
// Do nothing.
}
/**
* @see AudioTrack.Listener#onUnderrun(int, long, long)
* @see AudioSink.Listener#onUnderrun(int, long, long)
*/
protected void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs,
long elapsedSinceLastFeedMs) {
@ -259,32 +344,40 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* @return The decoder.
* @throws AudioDecoderException If an error occurred creating a suitable decoder.
*/
protected abstract SimpleDecoder<DecoderInputBuffer, ? extends SimpleOutputBuffer,
? extends AudioDecoderException> createDecoder(Format format, ExoMediaCrypto mediaCrypto)
throws AudioDecoderException;
protected abstract SimpleDecoder<
DecoderInputBuffer, ? extends SimpleOutputBuffer, ? extends AudioDecoderException>
createDecoder(Format format, @Nullable ExoMediaCrypto mediaCrypto)
throws AudioDecoderException;
/**
* Returns the format of audio buffers output by the decoder. Will not be called until the first
* output buffer has been dequeued, so the decoder may use input data to determine the format.
* <p>
* The default implementation returns a 16-bit PCM format with the same channel count and sample
* rate as the input.
*/
protected Format getOutputFormat() {
return Format.createAudioSampleFormat(null, MimeTypes.AUDIO_RAW, null, Format.NO_VALUE,
Format.NO_VALUE, inputFormat.channelCount, inputFormat.sampleRate, C.ENCODING_PCM_16BIT,
null, null, 0, null);
protected abstract Format getOutputFormat();
/**
* Returns whether the existing decoder can be kept for a new format.
*
* @param oldFormat The previous format.
* @param newFormat The new format.
* @return True if the existing decoder can be kept.
*/
protected boolean canKeepCodec(Format oldFormat, Format newFormat) {
return false;
}
private boolean drainOutputBuffer() throws ExoPlaybackException, AudioDecoderException,
AudioTrack.ConfigurationException, AudioTrack.InitializationException,
AudioTrack.WriteException {
AudioSink.ConfigurationException, AudioSink.InitializationException,
AudioSink.WriteException {
if (outputBuffer == null) {
outputBuffer = decoder.dequeueOutputBuffer();
if (outputBuffer == null) {
return false;
}
decoderCounters.skippedOutputBufferCount += outputBuffer.skippedOutputBufferCount;
if (outputBuffer.skippedOutputBufferCount > 0) {
decoderCounters.skippedOutputBufferCount += outputBuffer.skippedOutputBufferCount;
audioSink.handleDiscontinuity();
}
}
if (outputBuffer.isEndOfStream()) {
@ -304,12 +397,12 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
if (audioTrackNeedsConfigure) {
Format outputFormat = getOutputFormat();
audioTrack.configure(outputFormat.sampleMimeType, outputFormat.channelCount,
outputFormat.sampleRate, outputFormat.pcmEncoding, 0);
audioSink.configure(outputFormat.pcmEncoding, outputFormat.channelCount,
outputFormat.sampleRate, 0, null, encoderDelay, encoderPadding);
audioTrackNeedsConfigure = false;
}
if (audioTrack.handleBuffer(outputBuffer.data, outputBuffer.timeUs)) {
if (audioSink.handleBuffer(outputBuffer.data, outputBuffer.timeUs)) {
decoderCounters.renderedOutputBufferCount++;
outputBuffer.release();
outputBuffer = null;
@ -342,6 +435,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
}
int result;
FormatHolder formatHolder = getFormatHolder();
if (waitingForKeys) {
// We've already read an encrypted sample into buffer, and are waiting for keys.
result = C.RESULT_BUFFER_READ;
@ -353,7 +447,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
return false;
}
if (result == C.RESULT_FORMAT_READ) {
onInputFormatChanged(formatHolder.format);
onInputFormatChanged(formatHolder);
return true;
}
if (inputBuffer.isEndOfStream()) {
@ -368,6 +462,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
return false;
}
inputBuffer.flip();
onQueueInputBuffer(inputBuffer);
decoder.queueInputBuffer(inputBuffer);
decoderReceivedBuffers = true;
decoderCounters.inputBufferCount++;
@ -376,23 +471,25 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
}
private boolean shouldWaitForKeys(boolean bufferEncrypted) throws ExoPlaybackException {
if (drmSession == null) {
if (decoderDrmSession == null
|| (!bufferEncrypted
&& (playClearSamplesWithoutKeys || decoderDrmSession.playClearSamplesWithoutKeys()))) {
return false;
}
@DrmSession.State int drmSessionState = drmSession.getState();
@DrmSession.State int drmSessionState = decoderDrmSession.getState();
if (drmSessionState == DrmSession.STATE_ERROR) {
throw ExoPlaybackException.createForRenderer(drmSession.getError(), getIndex());
throw createRendererException(decoderDrmSession.getError(), inputFormat);
}
return drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS
&& (bufferEncrypted || !playClearSamplesWithoutKeys);
return drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS;
}
private void processEndOfStream() throws ExoPlaybackException {
outputStreamEnded = true;
try {
audioTrack.playToEndOfStream();
} catch (AudioTrack.WriteException e) {
throw ExoPlaybackException.createForRenderer(drmSession.getError(), getIndex());
audioSink.playToEndOfStream();
} catch (AudioSink.WriteException e) {
// TODO(internal: b/145658993) Use outputFormat for the call from drainOutputBuffer.
throw createRendererException(e, inputFormat);
}
}
@ -414,52 +511,54 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
@Override
public boolean isEnded() {
return outputStreamEnded && audioTrack.isEnded();
return outputStreamEnded && audioSink.isEnded();
}
@Override
public boolean isReady() {
return audioTrack.hasPendingData()
return audioSink.hasPendingData()
|| (inputFormat != null && !waitingForKeys && (isSourceReady() || outputBuffer != null));
}
@Override
public long getPositionUs() {
long newCurrentPositionUs = audioTrack.getCurrentPositionUs(isEnded());
if (newCurrentPositionUs != AudioTrack.CURRENT_POSITION_NOT_SET) {
currentPositionUs = allowPositionDiscontinuity ? newCurrentPositionUs
: Math.max(currentPositionUs, newCurrentPositionUs);
allowPositionDiscontinuity = false;
if (getState() == STATE_STARTED) {
updateCurrentPosition();
}
return currentPositionUs;
}
@Override
public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
return audioTrack.setPlaybackParameters(playbackParameters);
public void setPlaybackParameters(PlaybackParameters playbackParameters) {
audioSink.setPlaybackParameters(playbackParameters);
}
@Override
public PlaybackParameters getPlaybackParameters() {
return audioTrack.getPlaybackParameters();
return audioSink.getPlaybackParameters();
}
@Override
protected void onEnabled(boolean joining) throws ExoPlaybackException {
if (drmSessionManager != null && !drmResourcesAcquired) {
drmResourcesAcquired = true;
drmSessionManager.prepare();
}
decoderCounters = new DecoderCounters();
eventDispatcher.enabled(decoderCounters);
int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId;
if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) {
audioTrack.enableTunnelingV21(tunnelingAudioSessionId);
audioSink.enableTunnelingV21(tunnelingAudioSessionId);
} else {
audioTrack.disableTunneling();
audioSink.disableTunneling();
}
}
@Override
protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
audioTrack.reset();
audioSink.flush();
currentPositionUs = positionUs;
allowFirstBufferPositionDiscontinuity = true;
allowPositionDiscontinuity = true;
inputStreamEnded = false;
outputStreamEnded = false;
@ -470,12 +569,13 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
@Override
protected void onStarted() {
audioTrack.play();
audioSink.play();
}
@Override
protected void onStopped() {
audioTrack.pause();
updateCurrentPosition();
audioSink.pause();
}
@Override
@ -484,25 +584,39 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
audioTrackNeedsConfigure = true;
waitingForKeys = false;
try {
setSourceDrmSession(null);
releaseDecoder();
audioTrack.release();
audioSink.reset();
} finally {
try {
if (drmSession != null) {
drmSessionManager.releaseSession(drmSession);
}
} finally {
try {
if (pendingDrmSession != null && pendingDrmSession != drmSession) {
drmSessionManager.releaseSession(pendingDrmSession);
}
} finally {
drmSession = null;
pendingDrmSession = null;
decoderCounters.ensureUpdated();
eventDispatcher.disabled(decoderCounters);
}
}
eventDispatcher.disabled(decoderCounters);
}
}
@Override
protected void onReset() {
if (drmSessionManager != null && drmResourcesAcquired) {
drmResourcesAcquired = false;
drmSessionManager.release();
}
}
@Override
public void handleMessage(int messageType, @Nullable Object message) throws ExoPlaybackException {
switch (messageType) {
case C.MSG_SET_VOLUME:
audioSink.setVolume((Float) message);
break;
case C.MSG_SET_AUDIO_ATTRIBUTES:
AudioAttributes audioAttributes = (AudioAttributes) message;
audioSink.setAudioAttributes(audioAttributes);
break;
case C.MSG_SET_AUX_EFFECT_INFO:
AuxEffectInfo auxEffectInfo = (AuxEffectInfo) message;
audioSink.setAuxEffectInfo(auxEffectInfo);
break;
default:
super.handleMessage(messageType, message);
break;
}
}
@ -511,18 +625,20 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
return;
}
drmSession = pendingDrmSession;
setDecoderDrmSession(sourceDrmSession);
ExoMediaCrypto mediaCrypto = null;
if (drmSession != null) {
@DrmSession.State int drmSessionState = drmSession.getState();
if (drmSessionState == DrmSession.STATE_ERROR) {
throw ExoPlaybackException.createForRenderer(drmSession.getError(), getIndex());
} else if (drmSessionState == DrmSession.STATE_OPENED
|| drmSessionState == DrmSession.STATE_OPENED_WITH_KEYS) {
mediaCrypto = drmSession.getMediaCrypto();
} else {
// The drm session isn't open yet.
return;
if (decoderDrmSession != null) {
mediaCrypto = decoderDrmSession.getMediaCrypto();
if (mediaCrypto == null) {
DrmSessionException drmError = decoderDrmSession.getError();
if (drmError != null) {
// Continue for now. We may be able to avoid failure if the session recovers, or if a new
// input format causes the session to be replaced before it's used.
} else {
// The drm session isn't open yet.
return;
}
}
}
@ -536,76 +652,87 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
codecInitializedTimestamp - codecInitializingTimestamp);
decoderCounters.decoderInitCount++;
} catch (AudioDecoderException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
throw createRendererException(e, inputFormat);
}
}
private void releaseDecoder() {
if (decoder == null) {
return;
}
inputBuffer = null;
outputBuffer = null;
decoder.release();
decoder = null;
decoderCounters.decoderReleaseCount++;
decoderReinitializationState = REINITIALIZATION_STATE_NONE;
decoderReceivedBuffers = false;
if (decoder != null) {
decoder.release();
decoder = null;
decoderCounters.decoderReleaseCount++;
}
setDecoderDrmSession(null);
}
private void onInputFormatChanged(Format newFormat) throws ExoPlaybackException {
private void setSourceDrmSession(@Nullable DrmSession<ExoMediaCrypto> session) {
DrmSession.replaceSession(sourceDrmSession, session);
sourceDrmSession = session;
}
private void setDecoderDrmSession(@Nullable DrmSession<ExoMediaCrypto> session) {
DrmSession.replaceSession(decoderDrmSession, session);
decoderDrmSession = session;
}
@SuppressWarnings("unchecked")
private void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException {
Format newFormat = Assertions.checkNotNull(formatHolder.format);
if (formatHolder.includesDrmSession) {
setSourceDrmSession((DrmSession<ExoMediaCrypto>) formatHolder.drmSession);
} else {
sourceDrmSession =
getUpdatedSourceDrmSession(inputFormat, newFormat, drmSessionManager, sourceDrmSession);
}
Format oldFormat = inputFormat;
inputFormat = newFormat;
boolean drmInitDataChanged = !Util.areEqual(inputFormat.drmInitData, oldFormat == null ? null
: oldFormat.drmInitData);
if (drmInitDataChanged) {
if (inputFormat.drmInitData != null) {
if (drmSessionManager == null) {
throw ExoPlaybackException.createForRenderer(
new IllegalStateException("Media requires a DrmSessionManager"), getIndex());
}
pendingDrmSession = drmSessionManager.acquireSession(Looper.myLooper(),
inputFormat.drmInitData);
if (pendingDrmSession == drmSession) {
drmSessionManager.releaseSession(pendingDrmSession);
}
if (!canKeepCodec(oldFormat, inputFormat)) {
if (decoderReceivedBuffers) {
// Signal end of stream and wait for any final output buffers before re-initialization.
decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM;
} else {
pendingDrmSession = null;
// There aren't any final output buffers, so release the decoder immediately.
releaseDecoder();
maybeInitDecoder();
audioTrackNeedsConfigure = true;
}
}
if (decoderReceivedBuffers) {
// Signal end of stream and wait for any final output buffers before re-initialization.
decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM;
} else {
// There aren't any final output buffers, so release the decoder immediately.
releaseDecoder();
maybeInitDecoder();
audioTrackNeedsConfigure = true;
}
encoderDelay = inputFormat.encoderDelay;
encoderPadding = inputFormat.encoderPadding;
eventDispatcher.inputFormatChanged(newFormat);
eventDispatcher.inputFormatChanged(inputFormat);
}
@Override
public void handleMessage(int messageType, Object message) throws ExoPlaybackException {
switch (messageType) {
case C.MSG_SET_VOLUME:
audioTrack.setVolume((Float) message);
break;
case C.MSG_SET_STREAM_TYPE:
@C.StreamType int streamType = (Integer) message;
audioTrack.setStreamType(streamType);
break;
default:
super.handleMessage(messageType, message);
break;
private void onQueueInputBuffer(DecoderInputBuffer buffer) {
if (allowFirstBufferPositionDiscontinuity && !buffer.isDecodeOnly()) {
// TODO: Remove this hack once we have a proper fix for [Internal: b/71876314].
// Allow the position to jump if the first presentable input buffer has a timestamp that
// differs significantly from what was expected.
if (Math.abs(buffer.timeUs - currentPositionUs) > 500000) {
currentPositionUs = buffer.timeUs;
}
allowFirstBufferPositionDiscontinuity = false;
}
}
private final class AudioTrackListener implements AudioTrack.Listener {
private void updateCurrentPosition() {
long newCurrentPositionUs = audioSink.getCurrentPositionUs(isEnded());
if (newCurrentPositionUs != AudioSink.CURRENT_POSITION_NOT_SET) {
currentPositionUs =
allowPositionDiscontinuity
? newCurrentPositionUs
: Math.max(currentPositionUs, newCurrentPositionUs);
allowPositionDiscontinuity = false;
}
}
private final class AudioSinkListener implements AudioSink.Listener {
@Override
public void onAudioSessionId(int audioSessionId) {

Просмотреть файл

@ -27,32 +27,30 @@ import java.util.Arrays;
*/
/* package */ final class Sonic {
private static final boolean USE_CHORD_PITCH = false;
private static final int MINIMUM_PITCH = 65;
private static final int MAXIMUM_PITCH = 400;
private static final int AMDF_FREQUENCY = 4000;
private static final int BYTES_PER_SAMPLE = 2;
private final int sampleRate;
private final int numChannels;
private final int inputSampleRateHz;
private final int channelCount;
private final float speed;
private final float pitch;
private final float rate;
private final int minPeriod;
private final int maxPeriod;
private final int maxRequired;
private final int maxRequiredFrameCount;
private final short[] downSampleBuffer;
private int inputBufferSize;
private short[] inputBuffer;
private int outputBufferSize;
private int inputFrameCount;
private short[] outputBuffer;
private int pitchBufferSize;
private int outputFrameCount;
private short[] pitchBuffer;
private int pitchFrameCount;
private int oldRatePosition;
private int newRatePosition;
private float speed;
private float pitch;
private int numInputSamples;
private int numOutputSamples;
private int numPitchSamples;
private int remainingInputToCopy;
private int remainingInputToCopyFrameCount;
private int prevPeriod;
private int prevMinDiff;
private int minDiff;
@ -61,55 +59,26 @@ import java.util.Arrays;
/**
* Creates a new Sonic audio stream processor.
*
* @param sampleRate The sample rate of input audio.
* @param numChannels The number of channels in the input audio.
* @param inputSampleRateHz The sample rate of input audio, in hertz.
* @param channelCount The number of channels in the input audio.
* @param speed The speedup factor for output audio.
* @param pitch The pitch factor for output audio.
* @param outputSampleRateHz The sample rate for output audio, in hertz.
*/
public Sonic(int sampleRate, int numChannels) {
this.sampleRate = sampleRate;
this.numChannels = numChannels;
minPeriod = sampleRate / MAXIMUM_PITCH;
maxPeriod = sampleRate / MINIMUM_PITCH;
maxRequired = 2 * maxPeriod;
downSampleBuffer = new short[maxRequired];
inputBufferSize = maxRequired;
inputBuffer = new short[maxRequired * numChannels];
outputBufferSize = maxRequired;
outputBuffer = new short[maxRequired * numChannels];
pitchBufferSize = maxRequired;
pitchBuffer = new short[maxRequired * numChannels];
oldRatePosition = 0;
newRatePosition = 0;
prevPeriod = 0;
speed = 1.0f;
pitch = 1.0f;
}
/**
* Sets the output speed.
*/
public void setSpeed(float speed) {
public Sonic(
int inputSampleRateHz, int channelCount, float speed, float pitch, int outputSampleRateHz) {
this.inputSampleRateHz = inputSampleRateHz;
this.channelCount = channelCount;
this.speed = speed;
}
/**
* Gets the output speed.
*/
public float getSpeed() {
return speed;
}
/**
* Sets the output pitch.
*/
public void setPitch(float pitch) {
this.pitch = pitch;
}
/**
* Gets the output pitch.
*/
public float getPitch() {
return pitch;
rate = (float) inputSampleRateHz / outputSampleRateHz;
minPeriod = inputSampleRateHz / MAXIMUM_PITCH;
maxPeriod = inputSampleRateHz / MINIMUM_PITCH;
maxRequiredFrameCount = 2 * maxPeriod;
downSampleBuffer = new short[maxRequiredFrameCount];
inputBuffer = new short[maxRequiredFrameCount * channelCount];
outputBuffer = new short[maxRequiredFrameCount * channelCount];
pitchBuffer = new short[maxRequiredFrameCount * channelCount];
}
/**
@ -119,11 +88,11 @@ import java.util.Arrays;
* @param buffer A {@link ShortBuffer} containing input data between its position and limit.
*/
public void queueInput(ShortBuffer buffer) {
int samplesToWrite = buffer.remaining() / numChannels;
int bytesToWrite = samplesToWrite * numChannels * 2;
enlargeInputBufferIfNeeded(samplesToWrite);
buffer.get(inputBuffer, numInputSamples * numChannels, bytesToWrite / 2);
numInputSamples += samplesToWrite;
int framesToWrite = buffer.remaining() / channelCount;
int bytesToWrite = framesToWrite * channelCount * 2;
inputBuffer = ensureSpaceForAdditionalFrames(inputBuffer, inputFrameCount, framesToWrite);
buffer.get(inputBuffer, inputFrameCount * channelCount, bytesToWrite / 2);
inputFrameCount += framesToWrite;
processStreamInput();
}
@ -134,11 +103,15 @@ import java.util.Arrays;
* @param buffer A {@link ShortBuffer} into which output will be written.
*/
public void getOutput(ShortBuffer buffer) {
int samplesToRead = Math.min(buffer.remaining() / numChannels, numOutputSamples);
buffer.put(outputBuffer, 0, samplesToRead * numChannels);
numOutputSamples -= samplesToRead;
System.arraycopy(outputBuffer, samplesToRead * numChannels, outputBuffer, 0,
numOutputSamples * numChannels);
int framesToRead = Math.min(buffer.remaining() / channelCount, outputFrameCount);
buffer.put(outputBuffer, 0, framesToRead * channelCount);
outputFrameCount -= framesToRead;
System.arraycopy(
outputBuffer,
framesToRead * channelCount,
outputBuffer,
0,
outputFrameCount * channelCount);
}
/**
@ -146,79 +119,105 @@ import java.util.Arrays;
* added to the output, but flushing in the middle of words could introduce distortion.
*/
public void queueEndOfStream() {
int remainingSamples = numInputSamples;
int remainingFrameCount = inputFrameCount;
float s = speed / pitch;
int expectedOutputSamples =
numOutputSamples + (int) ((remainingSamples / s + numPitchSamples) / pitch + 0.5f);
float r = rate * pitch;
int expectedOutputFrames =
outputFrameCount + (int) ((remainingFrameCount / s + pitchFrameCount) / r + 0.5f);
// Add enough silence to flush both input and pitch buffers.
enlargeInputBufferIfNeeded(remainingSamples + 2 * maxRequired);
for (int xSample = 0; xSample < 2 * maxRequired * numChannels; xSample++) {
inputBuffer[remainingSamples * numChannels + xSample] = 0;
inputBuffer =
ensureSpaceForAdditionalFrames(
inputBuffer, inputFrameCount, remainingFrameCount + 2 * maxRequiredFrameCount);
for (int xSample = 0; xSample < 2 * maxRequiredFrameCount * channelCount; xSample++) {
inputBuffer[remainingFrameCount * channelCount + xSample] = 0;
}
numInputSamples += 2 * maxRequired;
inputFrameCount += 2 * maxRequiredFrameCount;
processStreamInput();
// Throw away any extra samples we generated due to the silence we added.
if (numOutputSamples > expectedOutputSamples) {
numOutputSamples = expectedOutputSamples;
// Throw away any extra frames we generated due to the silence we added.
if (outputFrameCount > expectedOutputFrames) {
outputFrameCount = expectedOutputFrames;
}
// Empty input and pitch buffers.
numInputSamples = 0;
remainingInputToCopy = 0;
numPitchSamples = 0;
inputFrameCount = 0;
remainingInputToCopyFrameCount = 0;
pitchFrameCount = 0;
}
/**
* Returns the number of output samples that can be read with {@link #getOutput(ShortBuffer)}.
*/
public int getSamplesAvailable() {
return numOutputSamples;
/** Clears state in preparation for receiving a new stream of input buffers. */
public void flush() {
inputFrameCount = 0;
outputFrameCount = 0;
pitchFrameCount = 0;
oldRatePosition = 0;
newRatePosition = 0;
remainingInputToCopyFrameCount = 0;
prevPeriod = 0;
prevMinDiff = 0;
minDiff = 0;
maxDiff = 0;
}
/** Returns the size of output that can be read with {@link #getOutput(ShortBuffer)}, in bytes. */
public int getOutputSize() {
return outputFrameCount * channelCount * BYTES_PER_SAMPLE;
}
// Internal methods.
private void enlargeOutputBufferIfNeeded(int numSamples) {
if (numOutputSamples + numSamples > outputBufferSize) {
outputBufferSize += (outputBufferSize / 2) + numSamples;
outputBuffer = Arrays.copyOf(outputBuffer, outputBufferSize * numChannels);
/**
* Returns {@code buffer} or a copy of it, such that there is enough space in the returned buffer
* to store {@code newFrameCount} additional frames.
*
* @param buffer The buffer.
* @param frameCount The number of frames already in the buffer.
* @param additionalFrameCount The number of additional frames that need to be stored in the
* buffer.
* @return A buffer with enough space for the additional frames.
*/
private short[] ensureSpaceForAdditionalFrames(
short[] buffer, int frameCount, int additionalFrameCount) {
int currentCapacityFrames = buffer.length / channelCount;
if (frameCount + additionalFrameCount <= currentCapacityFrames) {
return buffer;
} else {
int newCapacityFrames = 3 * currentCapacityFrames / 2 + additionalFrameCount;
return Arrays.copyOf(buffer, newCapacityFrames * channelCount);
}
}
private void enlargeInputBufferIfNeeded(int numSamples) {
if (numInputSamples + numSamples > inputBufferSize) {
inputBufferSize += (inputBufferSize / 2) + numSamples;
inputBuffer = Arrays.copyOf(inputBuffer, inputBufferSize * numChannels);
}
private void removeProcessedInputFrames(int positionFrames) {
int remainingFrames = inputFrameCount - positionFrames;
System.arraycopy(
inputBuffer, positionFrames * channelCount, inputBuffer, 0, remainingFrames * channelCount);
inputFrameCount = remainingFrames;
}
private void removeProcessedInputSamples(int position) {
int remainingSamples = numInputSamples - position;
System.arraycopy(inputBuffer, position * numChannels, inputBuffer, 0,
remainingSamples * numChannels);
numInputSamples = remainingSamples;
private void copyToOutput(short[] samples, int positionFrames, int frameCount) {
outputBuffer = ensureSpaceForAdditionalFrames(outputBuffer, outputFrameCount, frameCount);
System.arraycopy(
samples,
positionFrames * channelCount,
outputBuffer,
outputFrameCount * channelCount,
frameCount * channelCount);
outputFrameCount += frameCount;
}
private void copyToOutput(short[] samples, int position, int numSamples) {
enlargeOutputBufferIfNeeded(numSamples);
System.arraycopy(samples, position * numChannels, outputBuffer, numOutputSamples * numChannels,
numSamples * numChannels);
numOutputSamples += numSamples;
}
private int copyInputToOutput(int position) {
int numSamples = Math.min(maxRequired, remainingInputToCopy);
copyToOutput(inputBuffer, position, numSamples);
remainingInputToCopy -= numSamples;
return numSamples;
private int copyInputToOutput(int positionFrames) {
int frameCount = Math.min(maxRequiredFrameCount, remainingInputToCopyFrameCount);
copyToOutput(inputBuffer, positionFrames, frameCount);
remainingInputToCopyFrameCount -= frameCount;
return frameCount;
}
private void downSampleInput(short[] samples, int position, int skip) {
// If skip is greater than one, average skip samples together and write them to the down-sample
// buffer. If numChannels is greater than one, mix the channels together as we down sample.
int numSamples = maxRequired / skip;
int samplesPerValue = numChannels * skip;
position *= numChannels;
for (int i = 0; i < numSamples; i++) {
// buffer. If channelCount is greater than one, mix the channels together as we down sample.
int frameCount = maxRequiredFrameCount / skip;
int samplesPerValue = channelCount * skip;
position *= channelCount;
for (int i = 0; i < frameCount; i++) {
int value = 0;
for (int j = 0; j < samplesPerValue; j++) {
value += samples[position + i * samplesPerValue + j];
@ -235,13 +234,13 @@ import java.util.Arrays;
int worstPeriod = 255;
int minDiff = 1;
int maxDiff = 0;
position *= numChannels;
position *= channelCount;
for (int period = minPeriod; period <= maxPeriod; period++) {
int diff = 0;
for (int i = 0; i < period; i++) {
short sVal = samples[position + i];
short pVal = samples[position + period + i];
diff += sVal >= pVal ? sVal - pVal : pVal - sVal;
diff += Math.abs(sVal - pVal);
}
// Note that the highest number of samples we add into diff will be less than 256, since we
// skip samples. Thus, diff is a 24 bit number, and we can safely multiply by numSamples
@ -264,36 +263,30 @@ import java.util.Arrays;
* Returns whether the previous pitch period estimate is a better approximation, which can occur
* at the abrupt end of voiced words.
*/
private boolean previousPeriodBetter(int minDiff, int maxDiff, boolean preferNewPeriod) {
private boolean previousPeriodBetter(int minDiff, int maxDiff) {
if (minDiff == 0 || prevPeriod == 0) {
return false;
}
if (preferNewPeriod) {
if (maxDiff > minDiff * 3) {
// Got a reasonable match this period
return false;
}
if (minDiff * 2 <= prevMinDiff * 3) {
// Mismatch is not that much greater this period
return false;
}
} else {
if (minDiff <= prevMinDiff) {
return false;
}
if (maxDiff > minDiff * 3) {
// Got a reasonable match this period.
return false;
}
if (minDiff * 2 <= prevMinDiff * 3) {
// Mismatch is not that much greater this period.
return false;
}
return true;
}
private int findPitchPeriod(short[] samples, int position, boolean preferNewPeriod) {
private int findPitchPeriod(short[] samples, int position) {
// Find the pitch period. This is a critical step, and we may have to try multiple ways to get a
// good answer. This version uses AMDF. To improve speed, we down sample by an integer factor
// get in the 11 kHz range, and then do it again with a narrower frequency range without down
// sampling.
int period;
int retPeriod;
int skip = sampleRate > AMDF_FREQUENCY ? sampleRate / AMDF_FREQUENCY : 1;
if (numChannels == 1 && skip == 1) {
int skip = inputSampleRateHz > AMDF_FREQUENCY ? inputSampleRateHz / AMDF_FREQUENCY : 1;
if (channelCount == 1 && skip == 1) {
period = findPitchPeriodInRange(samples, position, minPeriod, maxPeriod);
} else {
downSampleInput(samples, position, skip);
@ -308,7 +301,7 @@ import java.util.Arrays;
if (maxP > maxPeriod) {
maxP = maxPeriod;
}
if (numChannels == 1) {
if (channelCount == 1) {
period = findPitchPeriodInRange(samples, position, minP, maxP);
} else {
downSampleInput(samples, position, 1);
@ -316,7 +309,7 @@ import java.util.Arrays;
}
}
}
if (previousPeriodBetter(minDiff, maxDiff, preferNewPeriod)) {
if (previousPeriodBetter(minDiff, maxDiff)) {
retPeriod = prevPeriod;
} else {
retPeriod = period;
@ -326,56 +319,35 @@ import java.util.Arrays;
return retPeriod;
}
private void moveNewSamplesToPitchBuffer(int originalNumOutputSamples) {
int numSamples = numOutputSamples - originalNumOutputSamples;
if (numPitchSamples + numSamples > pitchBufferSize) {
pitchBufferSize += (pitchBufferSize / 2) + numSamples;
pitchBuffer = Arrays.copyOf(pitchBuffer, pitchBufferSize * numChannels);
}
System.arraycopy(outputBuffer, originalNumOutputSamples * numChannels, pitchBuffer,
numPitchSamples * numChannels, numSamples * numChannels);
numOutputSamples = originalNumOutputSamples;
numPitchSamples += numSamples;
private void moveNewSamplesToPitchBuffer(int originalOutputFrameCount) {
int frameCount = outputFrameCount - originalOutputFrameCount;
pitchBuffer = ensureSpaceForAdditionalFrames(pitchBuffer, pitchFrameCount, frameCount);
System.arraycopy(
outputBuffer,
originalOutputFrameCount * channelCount,
pitchBuffer,
pitchFrameCount * channelCount,
frameCount * channelCount);
outputFrameCount = originalOutputFrameCount;
pitchFrameCount += frameCount;
}
private void removePitchSamples(int numSamples) {
if (numSamples == 0) {
private void removePitchFrames(int frameCount) {
if (frameCount == 0) {
return;
}
System.arraycopy(pitchBuffer, numSamples * numChannels, pitchBuffer, 0,
(numPitchSamples - numSamples) * numChannels);
numPitchSamples -= numSamples;
}
private void adjustPitch(int originalNumOutputSamples) {
// Latency due to pitch changes could be reduced by looking at past samples to determine pitch,
// rather than future.
if (numOutputSamples == originalNumOutputSamples) {
return;
}
moveNewSamplesToPitchBuffer(originalNumOutputSamples);
int position = 0;
while (numPitchSamples - position >= maxRequired) {
int period = findPitchPeriod(pitchBuffer, position, false);
int newPeriod = (int) (period / pitch);
enlargeOutputBufferIfNeeded(newPeriod);
if (pitch >= 1.0f) {
overlapAdd(newPeriod, numChannels, outputBuffer, numOutputSamples, pitchBuffer, position,
pitchBuffer, position + period - newPeriod);
} else {
int separation = newPeriod - period;
overlapAddWithSeparation(period, numChannels, separation, outputBuffer, numOutputSamples,
pitchBuffer, position, pitchBuffer, position);
}
numOutputSamples += newPeriod;
position += period;
}
removePitchSamples(position);
System.arraycopy(
pitchBuffer,
frameCount * channelCount,
pitchBuffer,
0,
(pitchFrameCount - frameCount) * channelCount);
pitchFrameCount -= frameCount;
}
private short interpolate(short[] in, int inPos, int oldSampleRate, int newSampleRate) {
short left = in[inPos * numChannels];
short right = in[inPos * numChannels + numChannels];
short left = in[inPos];
short right = in[inPos + channelCount];
int position = newRatePosition * oldSampleRate;
int leftPosition = oldRatePosition * newSampleRate;
int rightPosition = (oldRatePosition + 1) * newSampleRate;
@ -384,28 +356,30 @@ import java.util.Arrays;
return (short) ((ratio * left + (width - ratio) * right) / width);
}
private void adjustRate(float rate, int originalNumOutputSamples) {
if (numOutputSamples == originalNumOutputSamples) {
private void adjustRate(float rate, int originalOutputFrameCount) {
if (outputFrameCount == originalOutputFrameCount) {
return;
}
int newSampleRate = (int) (sampleRate / rate);
int oldSampleRate = sampleRate;
int newSampleRate = (int) (inputSampleRateHz / rate);
int oldSampleRate = inputSampleRateHz;
// Set these values to help with the integer math.
while (newSampleRate > (1 << 14) || oldSampleRate > (1 << 14)) {
newSampleRate /= 2;
oldSampleRate /= 2;
}
moveNewSamplesToPitchBuffer(originalNumOutputSamples);
moveNewSamplesToPitchBuffer(originalOutputFrameCount);
// Leave at least one pitch sample in the buffer.
for (int position = 0; position < numPitchSamples - 1; position++) {
for (int position = 0; position < pitchFrameCount - 1; position++) {
while ((oldRatePosition + 1) * newSampleRate > newRatePosition * oldSampleRate) {
enlargeOutputBufferIfNeeded(1);
for (int i = 0; i < numChannels; i++) {
outputBuffer[numOutputSamples * numChannels + i] =
interpolate(pitchBuffer, position + i, oldSampleRate, newSampleRate);
outputBuffer =
ensureSpaceForAdditionalFrames(
outputBuffer, outputFrameCount, /* additionalFrameCount= */ 1);
for (int i = 0; i < channelCount; i++) {
outputBuffer[outputFrameCount * channelCount + i] =
interpolate(pitchBuffer, position * channelCount + i, oldSampleRate, newSampleRate);
}
newRatePosition++;
numOutputSamples++;
outputFrameCount++;
}
oldRatePosition++;
if (oldRatePosition == oldSampleRate) {
@ -414,119 +388,117 @@ import java.util.Arrays;
newRatePosition = 0;
}
}
removePitchSamples(numPitchSamples - 1);
removePitchFrames(pitchFrameCount - 1);
}
private int skipPitchPeriod(short[] samples, int position, float speed, int period) {
// Skip over a pitch period, and copy period/speed samples to the output.
int newSamples;
int newFrameCount;
if (speed >= 2.0f) {
newSamples = (int) (period / (speed - 1.0f));
newFrameCount = (int) (period / (speed - 1.0f));
} else {
newSamples = period;
remainingInputToCopy = (int) (period * (2.0f - speed) / (speed - 1.0f));
newFrameCount = period;
remainingInputToCopyFrameCount = (int) (period * (2.0f - speed) / (speed - 1.0f));
}
enlargeOutputBufferIfNeeded(newSamples);
overlapAdd(newSamples, numChannels, outputBuffer, numOutputSamples, samples, position, samples,
outputBuffer = ensureSpaceForAdditionalFrames(outputBuffer, outputFrameCount, newFrameCount);
overlapAdd(
newFrameCount,
channelCount,
outputBuffer,
outputFrameCount,
samples,
position,
samples,
position + period);
numOutputSamples += newSamples;
return newSamples;
outputFrameCount += newFrameCount;
return newFrameCount;
}
private int insertPitchPeriod(short[] samples, int position, float speed, int period) {
// Insert a pitch period, and determine how much input to copy directly.
int newSamples;
int newFrameCount;
if (speed < 0.5f) {
newSamples = (int) (period * speed / (1.0f - speed));
newFrameCount = (int) (period * speed / (1.0f - speed));
} else {
newSamples = period;
remainingInputToCopy = (int) (period * (2.0f * speed - 1.0f) / (1.0f - speed));
newFrameCount = period;
remainingInputToCopyFrameCount = (int) (period * (2.0f * speed - 1.0f) / (1.0f - speed));
}
enlargeOutputBufferIfNeeded(period + newSamples);
System.arraycopy(samples, position * numChannels, outputBuffer, numOutputSamples * numChannels,
period * numChannels);
overlapAdd(newSamples, numChannels, outputBuffer, numOutputSamples + period, samples,
position + period, samples, position);
numOutputSamples += period + newSamples;
return newSamples;
outputBuffer =
ensureSpaceForAdditionalFrames(outputBuffer, outputFrameCount, period + newFrameCount);
System.arraycopy(
samples,
position * channelCount,
outputBuffer,
outputFrameCount * channelCount,
period * channelCount);
overlapAdd(
newFrameCount,
channelCount,
outputBuffer,
outputFrameCount + period,
samples,
position + period,
samples,
position);
outputFrameCount += period + newFrameCount;
return newFrameCount;
}
private void changeSpeed(float speed) {
if (numInputSamples < maxRequired) {
if (inputFrameCount < maxRequiredFrameCount) {
return;
}
int numSamples = numInputSamples;
int position = 0;
int frameCount = inputFrameCount;
int positionFrames = 0;
do {
if (remainingInputToCopy > 0) {
position += copyInputToOutput(position);
if (remainingInputToCopyFrameCount > 0) {
positionFrames += copyInputToOutput(positionFrames);
} else {
int period = findPitchPeriod(inputBuffer, position, true);
int period = findPitchPeriod(inputBuffer, positionFrames);
if (speed > 1.0) {
position += period + skipPitchPeriod(inputBuffer, position, speed, period);
positionFrames += period + skipPitchPeriod(inputBuffer, positionFrames, speed, period);
} else {
position += insertPitchPeriod(inputBuffer, position, speed, period);
positionFrames += insertPitchPeriod(inputBuffer, positionFrames, speed, period);
}
}
} while (position + maxRequired <= numSamples);
removeProcessedInputSamples(position);
} while (positionFrames + maxRequiredFrameCount <= frameCount);
removeProcessedInputFrames(positionFrames);
}
private void processStreamInput() {
// Resample as many pitch periods as we have buffered on the input.
int originalNumOutputSamples = numOutputSamples;
int originalOutputFrameCount = outputFrameCount;
float s = speed / pitch;
float r = rate * pitch;
if (s > 1.00001 || s < 0.99999) {
changeSpeed(s);
} else {
copyToOutput(inputBuffer, 0, numInputSamples);
numInputSamples = 0;
copyToOutput(inputBuffer, 0, inputFrameCount);
inputFrameCount = 0;
}
if (USE_CHORD_PITCH) {
if (pitch != 1.0f) {
adjustPitch(originalNumOutputSamples);
}
} else if (!USE_CHORD_PITCH && pitch != 1.0f) {
adjustRate(pitch, originalNumOutputSamples);
if (r != 1.0f) {
adjustRate(r, originalOutputFrameCount);
}
}
private static void overlapAdd(int numSamples, int numChannels, short[] out, int outPos,
short[] rampDown, int rampDownPos, short[] rampUp, int rampUpPos) {
for (int i = 0; i < numChannels; i++) {
int o = outPos * numChannels + i;
int u = rampUpPos * numChannels + i;
int d = rampDownPos * numChannels + i;
for (int t = 0; t < numSamples; t++) {
out[o] = (short) ((rampDown[d] * (numSamples - t) + rampUp[u] * t) / numSamples);
o += numChannels;
d += numChannels;
u += numChannels;
}
}
}
private static void overlapAddWithSeparation(int numSamples, int numChannels, int separation,
short[] out, int outPos, short[] rampDown, int rampDownPos, short[] rampUp, int rampUpPos) {
for (int i = 0; i < numChannels; i++) {
int o = outPos * numChannels + i;
int u = rampUpPos * numChannels + i;
int d = rampDownPos * numChannels + i;
for (int t = 0; t < numSamples + separation; t++) {
if (t < separation) {
out[o] = (short) (rampDown[d] * (numSamples - t) / numSamples);
d += numChannels;
} else if (t < numSamples) {
out[o] =
(short) ((rampDown[d] * (numSamples - t) + rampUp[u] * (t - separation))
/ numSamples);
d += numChannels;
u += numChannels;
} else {
out[o] = (short) (rampUp[u] * (t - separation) / numSamples);
u += numChannels;
}
o += numChannels;
private static void overlapAdd(
int frameCount,
int channelCount,
short[] out,
int outPosition,
short[] rampDown,
int rampDownPosition,
short[] rampUp,
int rampUpPosition) {
for (int i = 0; i < channelCount; i++) {
int o = outPosition * channelCount + i;
int u = rampUpPosition * channelCount + i;
int d = rampDownPosition * channelCount + i;
for (int t = 0; t < frameCount; t++) {
out[o] = (short) ((rampDown[d] * (frameCount - t) + rampUp[u] * t) / frameCount);
o += channelCount;
d += channelCount;
u += channelCount;
}
}
}

Просмотреть файл

@ -15,16 +15,17 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C.Encoding;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
/**
* An {@link AudioProcessor} that uses the Sonic library to modify the speed/pitch of audio.
* An {@link AudioProcessor} that uses the Sonic library to modify audio speed/pitch/sample rate.
*/
public final class SonicAudioProcessor implements AudioProcessor {
@ -44,19 +45,33 @@ public final class SonicAudioProcessor implements AudioProcessor {
* The minimum allowed pitch in {@link #setPitch(float)}.
*/
public static final float MINIMUM_PITCH = 0.1f;
/**
* Indicates that the output sample rate should be the same as the input.
*/
public static final int SAMPLE_RATE_NO_CHANGE = -1;
/**
* The threshold below which the difference between two pitch/speed factors is negligible.
*/
private static final float CLOSE_THRESHOLD = 0.01f;
private int channelCount;
private int sampleRateHz;
/**
* The minimum number of output bytes at which the speedup is calculated using the input/output
* byte counts, rather than using the current playback parameters speed.
*/
private static final int MIN_BYTES_FOR_SPEEDUP_CALCULATION = 1024;
private Sonic sonic;
private int pendingOutputSampleRate;
private float speed;
private float pitch;
private AudioFormat pendingInputAudioFormat;
private AudioFormat pendingOutputAudioFormat;
private AudioFormat inputAudioFormat;
private AudioFormat outputAudioFormat;
private boolean pendingSonicRecreation;
@Nullable private Sonic sonic;
private ByteBuffer buffer;
private ShortBuffer shortBuffer;
private ByteBuffer outputBuffer;
@ -70,80 +85,110 @@ public final class SonicAudioProcessor implements AudioProcessor {
public SonicAudioProcessor() {
speed = 1f;
pitch = 1f;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
pendingInputAudioFormat = AudioFormat.NOT_SET;
pendingOutputAudioFormat = AudioFormat.NOT_SET;
inputAudioFormat = AudioFormat.NOT_SET;
outputAudioFormat = AudioFormat.NOT_SET;
buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER;
pendingOutputSampleRate = SAMPLE_RATE_NO_CHANGE;
}
/**
* Sets the playback speed. The new speed will take effect after a call to {@link #flush()}.
* Sets the playback speed. This method may only be called after draining data through the
* processor. The value returned by {@link #isActive()} may change, and the processor must be
* {@link #flush() flushed} before queueing more data.
*
* @param speed The requested new playback speed.
* @return The actual new playback speed.
*/
public float setSpeed(float speed) {
this.speed = Util.constrainValue(speed, MINIMUM_SPEED, MAXIMUM_SPEED);
return this.speed;
speed = Util.constrainValue(speed, MINIMUM_SPEED, MAXIMUM_SPEED);
if (this.speed != speed) {
this.speed = speed;
pendingSonicRecreation = true;
}
return speed;
}
/**
* Sets the playback pitch. The new pitch will take effect after a call to {@link #flush()}.
* Sets the playback pitch. This method may only be called after draining data through the
* processor. The value returned by {@link #isActive()} may change, and the processor must be
* {@link #flush() flushed} before queueing more data.
*
* @param pitch The requested new pitch.
* @return The actual new pitch.
*/
public float setPitch(float pitch) {
this.pitch = Util.constrainValue(pitch, MINIMUM_PITCH, MAXIMUM_PITCH);
pitch = Util.constrainValue(pitch, MINIMUM_PITCH, MAXIMUM_PITCH);
if (this.pitch != pitch) {
this.pitch = pitch;
pendingSonicRecreation = true;
}
return pitch;
}
/**
* Returns the number of bytes of input queued since the last call to {@link #flush()}.
* Sets the sample rate for output audio, in Hertz. Pass {@link #SAMPLE_RATE_NO_CHANGE} to output
* audio at the same sample rate as the input. After calling this method, call {@link
* #configure(AudioFormat)} to configure the processor with the new sample rate.
*
* @param sampleRateHz The sample rate for output audio, in Hertz.
* @see #configure(AudioFormat)
*/
public long getInputByteCount() {
return inputBytes;
public void setOutputSampleRateHz(int sampleRateHz) {
pendingOutputSampleRate = sampleRateHz;
}
/**
* Returns the number of bytes of output dequeued since the last call to {@link #flush()}.
* Returns the specified duration scaled to take into account the speedup factor of this instance,
* in the same units as {@code duration}.
*
* @param duration The duration to scale taking into account speedup.
* @return The specified duration scaled to take into account speedup, in the same units as
* {@code duration}.
*/
public long getOutputByteCount() {
return outputBytes;
public long scaleDurationForSpeedup(long duration) {
if (outputBytes >= MIN_BYTES_FOR_SPEEDUP_CALCULATION) {
return outputAudioFormat.sampleRate == inputAudioFormat.sampleRate
? Util.scaleLargeTimestamp(duration, inputBytes, outputBytes)
: Util.scaleLargeTimestamp(
duration,
inputBytes * outputAudioFormat.sampleRate,
outputBytes * inputAudioFormat.sampleRate);
} else {
return (long) ((double) speed * duration);
}
}
@Override
public boolean configure(int sampleRateHz, int channelCount, @Encoding int encoding)
throws UnhandledFormatException {
if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
public AudioFormat configure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException {
if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount) {
return false;
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
return true;
int outputSampleRateHz =
pendingOutputSampleRate == SAMPLE_RATE_NO_CHANGE
? inputAudioFormat.sampleRate
: pendingOutputSampleRate;
pendingInputAudioFormat = inputAudioFormat;
pendingOutputAudioFormat =
new AudioFormat(outputSampleRateHz, inputAudioFormat.channelCount, C.ENCODING_PCM_16BIT);
pendingSonicRecreation = true;
return pendingOutputAudioFormat;
}
@Override
public boolean isActive() {
return Math.abs(speed - 1f) >= CLOSE_THRESHOLD || Math.abs(pitch - 1f) >= CLOSE_THRESHOLD;
}
@Override
public int getOutputChannelCount() {
return channelCount;
}
@Override
public int getOutputEncoding() {
return C.ENCODING_PCM_16BIT;
return pendingOutputAudioFormat.sampleRate != Format.NO_VALUE
&& (Math.abs(speed - 1f) >= CLOSE_THRESHOLD
|| Math.abs(pitch - 1f) >= CLOSE_THRESHOLD
|| pendingOutputAudioFormat.sampleRate != pendingInputAudioFormat.sampleRate);
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
Sonic sonic = Assertions.checkNotNull(this.sonic);
if (inputBuffer.hasRemaining()) {
ShortBuffer shortBuffer = inputBuffer.asShortBuffer();
int inputSize = inputBuffer.remaining();
@ -151,7 +196,7 @@ public final class SonicAudioProcessor implements AudioProcessor {
sonic.queueInput(shortBuffer);
inputBuffer.position(inputBuffer.position() + inputSize);
}
int outputSize = sonic.getSamplesAvailable() * channelCount * 2;
int outputSize = sonic.getOutputSize();
if (outputSize > 0) {
if (buffer.capacity() < outputSize) {
buffer = ByteBuffer.allocateDirect(outputSize).order(ByteOrder.nativeOrder());
@ -169,7 +214,9 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override
public void queueEndOfStream() {
sonic.queueEndOfStream();
if (sonic != null) {
sonic.queueEndOfStream();
}
inputEnded = true;
}
@ -182,14 +229,26 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override
public boolean isEnded() {
return inputEnded && (sonic == null || sonic.getSamplesAvailable() == 0);
return inputEnded && (sonic == null || sonic.getOutputSize() == 0);
}
@Override
public void flush() {
sonic = new Sonic(sampleRateHz, channelCount);
sonic.setSpeed(speed);
sonic.setPitch(pitch);
if (isActive()) {
inputAudioFormat = pendingInputAudioFormat;
outputAudioFormat = pendingOutputAudioFormat;
if (pendingSonicRecreation) {
sonic =
new Sonic(
inputAudioFormat.sampleRate,
inputAudioFormat.channelCount,
speed,
pitch,
outputAudioFormat.sampleRate);
} else if (sonic != null) {
sonic.flush();
}
}
outputBuffer = EMPTY_BUFFER;
inputBytes = 0;
outputBytes = 0;
@ -198,12 +257,18 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override
public void reset() {
sonic = null;
speed = 1f;
pitch = 1f;
pendingInputAudioFormat = AudioFormat.NOT_SET;
pendingOutputAudioFormat = AudioFormat.NOT_SET;
inputAudioFormat = AudioFormat.NOT_SET;
outputAudioFormat = AudioFormat.NOT_SET;
buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
pendingOutputSampleRate = SAMPLE_RATE_NO_CHANGE;
pendingSonicRecreation = false;
sonic = null;
inputBytes = 0;
outputBytes = 0;
inputEnded = false;

Просмотреть файл

@ -0,0 +1,235 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Log;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Audio processor that outputs its input unmodified and also outputs its input to a given sink.
* This is intended to be used for diagnostics and debugging.
*
* <p>This audio processor can be inserted into the audio processor chain to access audio data
* before/after particular processing steps have been applied. For example, to get audio output
* after playback speed adjustment and silence skipping have been applied it is necessary to pass a
* custom {@link org.mozilla.thirdparty.com.google.android.exoplayer2audio.DefaultAudioSink.AudioProcessorChain} when
* creating the audio sink, and include this audio processor after all other audio processors.
*/
public final class TeeAudioProcessor extends BaseAudioProcessor {
/** A sink for audio buffers handled by the audio processor. */
public interface AudioBufferSink {
/** Called when the audio processor is flushed with a format of subsequent input. */
void flush(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding);
/**
* Called when data is written to the audio processor.
*
* @param buffer A read-only buffer containing input which the audio processor will handle.
*/
void handleBuffer(ByteBuffer buffer);
}
private final AudioBufferSink audioBufferSink;
/**
* Creates a new tee audio processor, sending incoming data to the given {@link AudioBufferSink}.
*
* @param audioBufferSink The audio buffer sink that will receive input queued to this audio
* processor.
*/
public TeeAudioProcessor(AudioBufferSink audioBufferSink) {
this.audioBufferSink = Assertions.checkNotNull(audioBufferSink);
}
@Override
public AudioFormat onConfigure(AudioFormat inputAudioFormat) {
// This processor is always active (if passed to the sink) and outputs its input.
return inputAudioFormat;
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
int remaining = inputBuffer.remaining();
if (remaining == 0) {
return;
}
audioBufferSink.handleBuffer(inputBuffer.asReadOnlyBuffer());
replaceOutputBuffer(remaining).put(inputBuffer).flip();
}
@Override
protected void onQueueEndOfStream() {
flushSinkIfActive();
}
@Override
protected void onReset() {
flushSinkIfActive();
}
private void flushSinkIfActive() {
if (isActive()) {
audioBufferSink.flush(
inputAudioFormat.sampleRate, inputAudioFormat.channelCount, inputAudioFormat.encoding);
}
}
/**
* A sink for audio buffers that writes output audio as .wav files with a given path prefix. When
* new audio data is handled after flushing the audio processor, a counter is incremented and its
* value is appended to the output file name.
*
* <p>Note: if writing to external storage it's necessary to grant the {@code
* WRITE_EXTERNAL_STORAGE} permission.
*/
public static final class WavFileAudioBufferSink implements AudioBufferSink {
private static final String TAG = "WaveFileAudioBufferSink";
private static final int FILE_SIZE_MINUS_8_OFFSET = 4;
private static final int FILE_SIZE_MINUS_44_OFFSET = 40;
private static final int HEADER_LENGTH = 44;
private final String outputFileNamePrefix;
private final byte[] scratchBuffer;
private final ByteBuffer scratchByteBuffer;
private int sampleRateHz;
private int channelCount;
@C.PcmEncoding private int encoding;
@Nullable private RandomAccessFile randomAccessFile;
private int counter;
private int bytesWritten;
/**
* Creates a new audio buffer sink that writes to .wav files with the given prefix.
*
* @param outputFileNamePrefix The prefix for output files.
*/
public WavFileAudioBufferSink(String outputFileNamePrefix) {
this.outputFileNamePrefix = outputFileNamePrefix;
scratchBuffer = new byte[1024];
scratchByteBuffer = ByteBuffer.wrap(scratchBuffer).order(ByteOrder.LITTLE_ENDIAN);
}
@Override
public void flush(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) {
try {
reset();
} catch (IOException e) {
Log.e(TAG, "Error resetting", e);
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
this.encoding = encoding;
}
@Override
public void handleBuffer(ByteBuffer buffer) {
try {
maybePrepareFile();
writeBuffer(buffer);
} catch (IOException e) {
Log.e(TAG, "Error writing data", e);
}
}
private void maybePrepareFile() throws IOException {
if (randomAccessFile != null) {
return;
}
RandomAccessFile randomAccessFile = new RandomAccessFile(getNextOutputFileName(), "rw");
writeFileHeader(randomAccessFile);
this.randomAccessFile = randomAccessFile;
bytesWritten = HEADER_LENGTH;
}
private void writeFileHeader(RandomAccessFile randomAccessFile) throws IOException {
// Write the start of the header as big endian data.
randomAccessFile.writeInt(WavUtil.RIFF_FOURCC);
randomAccessFile.writeInt(-1);
randomAccessFile.writeInt(WavUtil.WAVE_FOURCC);
randomAccessFile.writeInt(WavUtil.FMT_FOURCC);
// Write the rest of the header as little endian data.
scratchByteBuffer.clear();
scratchByteBuffer.putInt(16);
scratchByteBuffer.putShort((short) WavUtil.getTypeForPcmEncoding(encoding));
scratchByteBuffer.putShort((short) channelCount);
scratchByteBuffer.putInt(sampleRateHz);
int bytesPerSample = Util.getPcmFrameSize(encoding, channelCount);
scratchByteBuffer.putInt(bytesPerSample * sampleRateHz);
scratchByteBuffer.putShort((short) bytesPerSample);
scratchByteBuffer.putShort((short) (8 * bytesPerSample / channelCount));
randomAccessFile.write(scratchBuffer, 0, scratchByteBuffer.position());
// Write the start of the data chunk as big endian data.
randomAccessFile.writeInt(WavUtil.DATA_FOURCC);
randomAccessFile.writeInt(-1);
}
private void writeBuffer(ByteBuffer buffer) throws IOException {
RandomAccessFile randomAccessFile = Assertions.checkNotNull(this.randomAccessFile);
while (buffer.hasRemaining()) {
int bytesToWrite = Math.min(buffer.remaining(), scratchBuffer.length);
buffer.get(scratchBuffer, 0, bytesToWrite);
randomAccessFile.write(scratchBuffer, 0, bytesToWrite);
bytesWritten += bytesToWrite;
}
}
private void reset() throws IOException {
RandomAccessFile randomAccessFile = this.randomAccessFile;
if (randomAccessFile == null) {
return;
}
try {
scratchByteBuffer.clear();
scratchByteBuffer.putInt(bytesWritten - 8);
randomAccessFile.seek(FILE_SIZE_MINUS_8_OFFSET);
randomAccessFile.write(scratchBuffer, 0, 4);
scratchByteBuffer.clear();
scratchByteBuffer.putInt(bytesWritten - 44);
randomAccessFile.seek(FILE_SIZE_MINUS_44_OFFSET);
randomAccessFile.write(scratchBuffer, 0, 4);
} catch (IOException e) {
// The file may still be playable, so just log a warning.
Log.w(TAG, "Error updating file size", e);
}
try {
randomAccessFile.close();
} finally {
this.randomAccessFile = null;
}
}
private String getNextOutputFileName() {
return Util.formatInvariant("%s-%04d.wav", outputFileNamePrefix, counter++);
}
}
}

Просмотреть файл

@ -0,0 +1,178 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
/** Audio processor for trimming samples from the start/end of data. */
/* package */ final class TrimmingAudioProcessor extends BaseAudioProcessor {
@C.PcmEncoding private static final int OUTPUT_ENCODING = C.ENCODING_PCM_16BIT;
private int trimStartFrames;
private int trimEndFrames;
private boolean reconfigurationPending;
private int pendingTrimStartBytes;
private byte[] endBuffer;
private int endBufferSize;
private long trimmedFrameCount;
/** Creates a new audio processor for trimming samples from the start/end of data. */
public TrimmingAudioProcessor() {
endBuffer = Util.EMPTY_BYTE_ARRAY;
}
/**
* Sets the number of audio frames to trim from the start and end of audio passed to this
* processor. After calling this method, call {@link #configure(AudioFormat)} to apply the new
* trimming frame counts.
*
* @param trimStartFrames The number of audio frames to trim from the start of audio.
* @param trimEndFrames The number of audio frames to trim from the end of audio.
* @see AudioSink#configure(int, int, int, int, int[], int, int)
*/
public void setTrimFrameCount(int trimStartFrames, int trimEndFrames) {
this.trimStartFrames = trimStartFrames;
this.trimEndFrames = trimEndFrames;
}
/** Sets the trimmed frame count returned by {@link #getTrimmedFrameCount()} to zero. */
public void resetTrimmedFrameCount() {
trimmedFrameCount = 0;
}
/**
* Returns the number of audio frames trimmed since the last call to {@link
* #resetTrimmedFrameCount()}.
*/
public long getTrimmedFrameCount() {
return trimmedFrameCount;
}
@Override
public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
if (inputAudioFormat.encoding != OUTPUT_ENCODING) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
reconfigurationPending = true;
return trimStartFrames != 0 || trimEndFrames != 0 ? inputAudioFormat : AudioFormat.NOT_SET;
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
int position = inputBuffer.position();
int limit = inputBuffer.limit();
int remaining = limit - position;
if (remaining == 0) {
return;
}
// Trim any pending start bytes from the input buffer.
int trimBytes = Math.min(remaining, pendingTrimStartBytes);
trimmedFrameCount += trimBytes / inputAudioFormat.bytesPerFrame;
pendingTrimStartBytes -= trimBytes;
inputBuffer.position(position + trimBytes);
if (pendingTrimStartBytes > 0) {
// Nothing to output yet.
return;
}
remaining -= trimBytes;
// endBuffer must be kept as full as possible, so that we trim the right amount of media if we
// don't receive any more input. After taking into account the number of bytes needed to keep
// endBuffer as full as possible, the output should be any surplus bytes currently in endBuffer
// followed by any surplus bytes in the new inputBuffer.
int remainingBytesToOutput = endBufferSize + remaining - endBuffer.length;
ByteBuffer buffer = replaceOutputBuffer(remainingBytesToOutput);
// Output from endBuffer.
int endBufferBytesToOutput = Util.constrainValue(remainingBytesToOutput, 0, endBufferSize);
buffer.put(endBuffer, 0, endBufferBytesToOutput);
remainingBytesToOutput -= endBufferBytesToOutput;
// Output from inputBuffer, restoring its limit afterwards.
int inputBufferBytesToOutput = Util.constrainValue(remainingBytesToOutput, 0, remaining);
inputBuffer.limit(inputBuffer.position() + inputBufferBytesToOutput);
buffer.put(inputBuffer);
inputBuffer.limit(limit);
remaining -= inputBufferBytesToOutput;
// Compact endBuffer, then repopulate it using the new input.
endBufferSize -= endBufferBytesToOutput;
System.arraycopy(endBuffer, endBufferBytesToOutput, endBuffer, 0, endBufferSize);
inputBuffer.get(endBuffer, endBufferSize, remaining);
endBufferSize += remaining;
buffer.flip();
}
@Override
public ByteBuffer getOutput() {
if (super.isEnded() && endBufferSize > 0) {
// Because audio processors may be drained in the middle of the stream we assume that the
// contents of the end buffer need to be output. For gapless transitions, configure will
// always be called, so the end buffer is cleared in onQueueEndOfStream.
replaceOutputBuffer(endBufferSize).put(endBuffer, 0, endBufferSize).flip();
endBufferSize = 0;
}
return super.getOutput();
}
@Override
public boolean isEnded() {
return super.isEnded() && endBufferSize == 0;
}
@Override
protected void onQueueEndOfStream() {
if (reconfigurationPending) {
// Trim audio in the end buffer.
if (endBufferSize > 0) {
trimmedFrameCount += endBufferSize / inputAudioFormat.bytesPerFrame;
}
endBufferSize = 0;
}
}
@Override
protected void onFlush() {
if (reconfigurationPending) {
// This is the initial flush after reconfiguration. Prepare to trim bytes from the start/end.
reconfigurationPending = false;
endBuffer = new byte[trimEndFrames * inputAudioFormat.bytesPerFrame];
pendingTrimStartBytes = trimStartFrames * inputAudioFormat.bytesPerFrame;
} else {
// This is a flush during playback (after the initial flush). We assume this was caused by a
// seek to a non-zero position and clear pending start bytes. This assumption may be wrong (we
// may be seeking to zero), but playing data that should have been trimmed shouldn't be
// noticeable after a seek. Ideally we would check the timestamp of the first input buffer
// queued after flushing to decide whether to trim (see also [Internal: b/77292509]).
pendingTrimStartBytes = 0;
}
endBufferSize = 0;
}
@Override
protected void onReset() {
endBuffer = Util.EMPTY_BYTE_ARRAY;
}
}

Просмотреть файл

@ -0,0 +1,91 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
/** Utilities for handling WAVE files. */
public final class WavUtil {
/** Four character code for "RIFF". */
public static final int RIFF_FOURCC = 0x52494646;
/** Four character code for "WAVE". */
public static final int WAVE_FOURCC = 0x57415645;
/** Four character code for "fmt ". */
public static final int FMT_FOURCC = 0x666d7420;
/** Four character code for "data". */
public static final int DATA_FOURCC = 0x64617461;
/** WAVE type value for integer PCM audio data. */
public static final int TYPE_PCM = 0x0001;
/** WAVE type value for float PCM audio data. */
public static final int TYPE_FLOAT = 0x0003;
/** WAVE type value for 8-bit ITU-T G.711 A-law audio data. */
public static final int TYPE_ALAW = 0x0006;
/** WAVE type value for 8-bit ITU-T G.711 mu-law audio data. */
public static final int TYPE_MLAW = 0x0007;
/** WAVE type value for IMA ADPCM audio data. */
public static final int TYPE_IMA_ADPCM = 0x0011;
/** WAVE type value for extended WAVE format. */
public static final int TYPE_WAVE_FORMAT_EXTENSIBLE = 0xFFFE;
/**
* Returns the WAVE format type value for the given {@link C.PcmEncoding}.
*
* @param pcmEncoding The {@link C.PcmEncoding} value.
* @return The corresponding WAVE format type.
* @throws IllegalArgumentException If {@code pcmEncoding} is not a {@link C.PcmEncoding}, or if
* it's {@link C#ENCODING_INVALID} or {@link Format#NO_VALUE}.
*/
public static int getTypeForPcmEncoding(@C.PcmEncoding int pcmEncoding) {
switch (pcmEncoding) {
case C.ENCODING_PCM_8BIT:
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_24BIT:
case C.ENCODING_PCM_32BIT:
return TYPE_PCM;
case C.ENCODING_PCM_FLOAT:
return TYPE_FLOAT;
case C.ENCODING_PCM_16BIT_BIG_ENDIAN: // Not TYPE_PCM, because TYPE_PCM is little endian.
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:
throw new IllegalArgumentException();
}
}
/**
* Returns the {@link C.PcmEncoding} for the given WAVE format type value, or {@link
* C#ENCODING_INVALID} if the type is not a known PCM type.
*/
public static @C.PcmEncoding int getPcmEncodingForType(int type, int bitsPerSample) {
switch (type) {
case TYPE_PCM:
case TYPE_WAVE_FORMAT_EXTENSIBLE:
return Util.getPcmEncoding(bitsPerSample);
case TYPE_FLOAT:
return bitsPerSample == 32 ? C.ENCODING_PCM_FLOAT : C.ENCODING_INVALID;
default:
return C.ENCODING_INVALID;
}
}
private WavUtil() {
// Prevent instantiation.
}
}

Просмотреть файл

@ -0,0 +1,19 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@NonNullApi
package org.mozilla.thirdparty.com.google.android.exoplayer2.audio;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.NonNullApi;

Просмотреть файл

@ -0,0 +1,31 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.database;
import android.database.SQLException;
import java.io.IOException;
/** An {@link IOException} whose cause is an {@link SQLException}. */
public final class DatabaseIOException extends IOException {
public DatabaseIOException(SQLException cause) {
super(cause);
}
public DatabaseIOException(SQLException cause, String message) {
super(message, cause);
}
}

Просмотреть файл

@ -0,0 +1,56 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.database;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteException;
/**
* Provides {@link SQLiteDatabase} instances to ExoPlayer components, which may read and write
* tables prefixed with {@link #TABLE_PREFIX}.
*/
public interface DatabaseProvider {
/** Prefix for tables that can be read and written by ExoPlayer components. */
String TABLE_PREFIX = "ExoPlayer";
/**
* Creates and/or opens a database that will be used for reading and writing.
*
* <p>Once opened successfully, the database is cached, so you can call this method every time you
* need to write to the database. Errors such as bad permissions or a full disk may cause this
* method to fail, but future attempts may succeed if the problem is fixed.
*
* @throws SQLiteException If the database cannot be opened for writing.
* @return A read/write database object.
*/
SQLiteDatabase getWritableDatabase();
/**
* Creates and/or opens a database. This will be the same object returned by {@link
* #getWritableDatabase()} unless some problem, such as a full disk, requires the database to be
* opened read-only. In that case, a read-only database object will be returned. If the problem is
* fixed, a future call to {@link #getWritableDatabase()} may succeed, in which case the read-only
* database object will be closed and the read/write object will be returned in the future.
*
* <p>Once opened successfully, the database is cached, so you can call this method every time you
* need to read from the database.
*
* @throws SQLiteException If the database cannot be opened.
* @return A database object valid until {@link #getWritableDatabase()} is called.
*/
SQLiteDatabase getReadableDatabase();
}

Просмотреть файл

@ -0,0 +1,42 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.database;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
/** A {@link DatabaseProvider} that provides instances obtained from a {@link SQLiteOpenHelper}. */
public final class DefaultDatabaseProvider implements DatabaseProvider {
private final SQLiteOpenHelper sqliteOpenHelper;
/**
* @param sqliteOpenHelper An {@link SQLiteOpenHelper} from which to obtain database instances.
*/
public DefaultDatabaseProvider(SQLiteOpenHelper sqliteOpenHelper) {
this.sqliteOpenHelper = sqliteOpenHelper;
}
@Override
public SQLiteDatabase getWritableDatabase() {
return sqliteOpenHelper.getWritableDatabase();
}
@Override
public SQLiteDatabase getReadableDatabase() {
return sqliteOpenHelper.getReadableDatabase();
}
}

Просмотреть файл

@ -0,0 +1,95 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.database;
import android.content.Context;
import android.database.Cursor;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Log;
/**
* An {@link SQLiteOpenHelper} that provides instances of a standalone ExoPlayer database.
*
* <p>Suitable for use by applications that do not already have their own database, or that would
* prefer to keep ExoPlayer tables isolated in their own database. Other applications should prefer
* to use {@link DefaultDatabaseProvider} with their own {@link SQLiteOpenHelper}.
*/
public final class ExoDatabaseProvider extends SQLiteOpenHelper implements DatabaseProvider {
/** The file name used for the standalone ExoPlayer database. */
public static final String DATABASE_NAME = "exoplayer_internal.db";
private static final int VERSION = 1;
private static final String TAG = "ExoDatabaseProvider";
/**
* Provides instances of the database located by passing {@link #DATABASE_NAME} to {@link
* Context#getDatabasePath(String)}.
*
* @param context Any context.
*/
public ExoDatabaseProvider(Context context) {
super(context.getApplicationContext(), DATABASE_NAME, /* factory= */ null, VERSION);
}
@Override
public void onCreate(SQLiteDatabase db) {
// Features create their own tables.
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
// Features handle their own upgrades.
}
@Override
public void onDowngrade(SQLiteDatabase db, int oldVersion, int newVersion) {
wipeDatabase(db);
}
/**
* Makes a best effort to wipe the existing database. The wipe may be incomplete if the database
* contains foreign key constraints.
*/
private static void wipeDatabase(SQLiteDatabase db) {
String[] columns = {"type", "name"};
try (Cursor cursor =
db.query(
"sqlite_master",
columns,
/* selection= */ null,
/* selectionArgs= */ null,
/* groupBy= */ null,
/* having= */ null,
/* orderBy= */ null)) {
while (cursor.moveToNext()) {
String type = cursor.getString(0);
String name = cursor.getString(1);
if (!"sqlite_sequence".equals(name)) {
// If it's not an SQL-controlled entity, drop it
String sql = "DROP " + type + " IF EXISTS " + name;
try {
db.execSQL(sql);
} catch (SQLException e) {
Log.e(TAG, "Error executing " + sql, e);
}
}
}
}
}
}

Просмотреть файл

@ -0,0 +1,170 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.database;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.DatabaseUtils;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import androidx.annotation.IntDef;
import androidx.annotation.VisibleForTesting;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Utility methods for accessing versions of ExoPlayer database components. This allows them to be
* versioned independently to the version of the containing database.
*/
public final class VersionTable {
/** Returned by {@link #getVersion(SQLiteDatabase, int, String)} if the version is unset. */
public static final int VERSION_UNSET = -1;
/** Version of tables used for offline functionality. */
public static final int FEATURE_OFFLINE = 0;
/** Version of tables used for cache content metadata. */
public static final int FEATURE_CACHE_CONTENT_METADATA = 1;
/** Version of tables used for cache file metadata. */
public static final int FEATURE_CACHE_FILE_METADATA = 2;
private static final String TABLE_NAME = DatabaseProvider.TABLE_PREFIX + "Versions";
private static final String COLUMN_FEATURE = "feature";
private static final String COLUMN_INSTANCE_UID = "instance_uid";
private static final String COLUMN_VERSION = "version";
private static final String WHERE_FEATURE_AND_INSTANCE_UID_EQUALS =
COLUMN_FEATURE + " = ? AND " + COLUMN_INSTANCE_UID + " = ?";
private static final String PRIMARY_KEY =
"PRIMARY KEY (" + COLUMN_FEATURE + ", " + COLUMN_INSTANCE_UID + ")";
private static final String SQL_CREATE_TABLE_IF_NOT_EXISTS =
"CREATE TABLE IF NOT EXISTS "
+ TABLE_NAME
+ " ("
+ COLUMN_FEATURE
+ " INTEGER NOT NULL,"
+ COLUMN_INSTANCE_UID
+ " TEXT NOT NULL,"
+ COLUMN_VERSION
+ " INTEGER NOT NULL,"
+ PRIMARY_KEY
+ ")";
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({FEATURE_OFFLINE, FEATURE_CACHE_CONTENT_METADATA, FEATURE_CACHE_FILE_METADATA})
private @interface Feature {}
private VersionTable() {}
/**
* Sets the version of a specified instance of a specified feature.
*
* @param writableDatabase The database to update.
* @param feature The feature.
* @param instanceUid The unique identifier of the instance of the feature.
* @param version The version.
* @throws DatabaseIOException If an error occurs executing the SQL.
*/
public static void setVersion(
SQLiteDatabase writableDatabase, @Feature int feature, String instanceUid, int version)
throws DatabaseIOException {
try {
writableDatabase.execSQL(SQL_CREATE_TABLE_IF_NOT_EXISTS);
ContentValues values = new ContentValues();
values.put(COLUMN_FEATURE, feature);
values.put(COLUMN_INSTANCE_UID, instanceUid);
values.put(COLUMN_VERSION, version);
writableDatabase.replaceOrThrow(TABLE_NAME, /* nullColumnHack= */ null, values);
} catch (SQLException e) {
throw new DatabaseIOException(e);
}
}
/**
* Removes the version of a specified instance of a feature.
*
* @param writableDatabase The database to update.
* @param feature The feature.
* @param instanceUid The unique identifier of the instance of the feature.
* @throws DatabaseIOException If an error occurs executing the SQL.
*/
public static void removeVersion(
SQLiteDatabase writableDatabase, @Feature int feature, String instanceUid)
throws DatabaseIOException {
try {
if (!tableExists(writableDatabase, TABLE_NAME)) {
return;
}
writableDatabase.delete(
TABLE_NAME,
WHERE_FEATURE_AND_INSTANCE_UID_EQUALS,
featureAndInstanceUidArguments(feature, instanceUid));
} catch (SQLException e) {
throw new DatabaseIOException(e);
}
}
/**
* Returns the version of a specified instance of a feature, or {@link #VERSION_UNSET} if no
* version is set.
*
* @param database The database to query.
* @param feature The feature.
* @param instanceUid The unique identifier of the instance of the feature.
* @return The version, or {@link #VERSION_UNSET} if no version is set.
* @throws DatabaseIOException If an error occurs executing the SQL.
*/
public static int getVersion(SQLiteDatabase database, @Feature int feature, String instanceUid)
throws DatabaseIOException {
try {
if (!tableExists(database, TABLE_NAME)) {
return VERSION_UNSET;
}
try (Cursor cursor =
database.query(
TABLE_NAME,
new String[] {COLUMN_VERSION},
WHERE_FEATURE_AND_INSTANCE_UID_EQUALS,
featureAndInstanceUidArguments(feature, instanceUid),
/* groupBy= */ null,
/* having= */ null,
/* orderBy= */ null)) {
if (cursor.getCount() == 0) {
return VERSION_UNSET;
}
cursor.moveToNext();
return cursor.getInt(/* COLUMN_VERSION index */ 0);
}
} catch (SQLException e) {
throw new DatabaseIOException(e);
}
}
@VisibleForTesting
/* package */ static boolean tableExists(SQLiteDatabase readableDatabase, String tableName) {
long count =
DatabaseUtils.queryNumEntries(
readableDatabase, "sqlite_master", "tbl_name = ?", new String[] {tableName});
return count > 0;
}
private static String[] featureAndInstanceUidArguments(int feature, String instance) {
return new String[] {Integer.toString(feature), instance};
}
}

Просмотреть файл

@ -0,0 +1,19 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@NonNullApi
package org.mozilla.thirdparty.com.google.android.exoplayer2.database;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.NonNullApi;

Просмотреть файл

@ -53,6 +53,11 @@ public abstract class Buffer {
return getFlag(C.BUFFER_FLAG_KEY_FRAME);
}
/** Returns whether the {@link C#BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA} flag is set. */
public final boolean hasSupplementalData() {
return getFlag(C.BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA);
}
/**
* Replaces this buffer's flags with {@code flags}.
*

Просмотреть файл

@ -25,44 +25,58 @@ import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
public final class CryptoInfo {
/**
* The 16 byte initialization vector. If the initialization vector of the content is shorter than
* 16 bytes, 0 byte padding is appended to extend the vector to the required 16 byte length.
*
* @see android.media.MediaCodec.CryptoInfo#iv
*/
public byte[] iv;
/**
* The 16 byte key id.
*
* @see android.media.MediaCodec.CryptoInfo#key
*/
public byte[] key;
/**
* The type of encryption that has been applied. Must be one of the {@link C.CryptoMode} values.
*
* @see android.media.MediaCodec.CryptoInfo#mode
*/
@C.CryptoMode
public int mode;
@C.CryptoMode public int mode;
/**
* The number of leading unencrypted bytes in each sub-sample. If null, all bytes are treated as
* encrypted and {@link #numBytesOfEncryptedData} must be specified.
*
* @see android.media.MediaCodec.CryptoInfo#numBytesOfClearData
*/
public int[] numBytesOfClearData;
/**
* The number of trailing encrypted bytes in each sub-sample. If null, all bytes are treated as
* clear and {@link #numBytesOfClearData} must be specified.
*
* @see android.media.MediaCodec.CryptoInfo#numBytesOfEncryptedData
*/
public int[] numBytesOfEncryptedData;
/**
* The number of subSamples that make up the buffer's contents.
*
* @see android.media.MediaCodec.CryptoInfo#numSubSamples
*/
public int numSubSamples;
/**
* @see android.media.MediaCodec.CryptoInfo.Pattern
*/
public int patternBlocksToEncrypt;
public int encryptedBlocks;
/**
* @see android.media.MediaCodec.CryptoInfo.Pattern
*/
public int patternBlocksToSkip;
public int clearBlocks;
private final android.media.MediaCodec.CryptoInfo frameworkCryptoInfo;
private final PatternHolderV24 patternHolder;
public CryptoInfo() {
frameworkCryptoInfo = Util.SDK_INT >= 16 ? newFrameworkCryptoInfoV16() : null;
frameworkCryptoInfo = new android.media.MediaCodec.CryptoInfo();
patternHolder = Util.SDK_INT >= 24 ? new PatternHolderV24(frameworkCryptoInfo) : null;
}
@ -70,51 +84,17 @@ public final class CryptoInfo {
* @see android.media.MediaCodec.CryptoInfo#set(int, int[], int[], byte[], byte[], int)
*/
public void set(int numSubSamples, int[] numBytesOfClearData, int[] numBytesOfEncryptedData,
byte[] key, byte[] iv, @C.CryptoMode int mode) {
byte[] key, byte[] iv, @C.CryptoMode int mode, int encryptedBlocks, int clearBlocks) {
this.numSubSamples = numSubSamples;
this.numBytesOfClearData = numBytesOfClearData;
this.numBytesOfEncryptedData = numBytesOfEncryptedData;
this.key = key;
this.iv = iv;
this.mode = mode;
patternBlocksToEncrypt = 0;
patternBlocksToSkip = 0;
if (Util.SDK_INT >= 16) {
updateFrameworkCryptoInfoV16();
}
}
public void setPattern(int patternBlocksToEncrypt, int patternBlocksToSkip) {
this.patternBlocksToEncrypt = patternBlocksToEncrypt;
this.patternBlocksToSkip = patternBlocksToSkip;
if (Util.SDK_INT >= 24) {
patternHolder.set(patternBlocksToEncrypt, patternBlocksToSkip);
}
}
/**
* Returns an equivalent {@link android.media.MediaCodec.CryptoInfo} instance.
* <p>
* Successive calls to this method on a single {@link CryptoInfo} will return the same instance.
* Changes to the {@link CryptoInfo} will be reflected in the returned object. The return object
* should not be modified directly.
*
* @return The equivalent {@link android.media.MediaCodec.CryptoInfo} instance.
*/
@TargetApi(16)
public android.media.MediaCodec.CryptoInfo getFrameworkCryptoInfoV16() {
return frameworkCryptoInfo;
}
@TargetApi(16)
private android.media.MediaCodec.CryptoInfo newFrameworkCryptoInfoV16() {
return new android.media.MediaCodec.CryptoInfo();
}
@TargetApi(16)
private void updateFrameworkCryptoInfoV16() {
// Update fields directly because the framework's CryptoInfo.set performs an unnecessary object
// allocation on Android N.
this.encryptedBlocks = encryptedBlocks;
this.clearBlocks = clearBlocks;
// Update frameworkCryptoInfo fields directly because CryptoInfo.set performs an unnecessary
// object allocation on Android N.
frameworkCryptoInfo.numSubSamples = numSubSamples;
frameworkCryptoInfo.numBytesOfClearData = numBytesOfClearData;
frameworkCryptoInfo.numBytesOfEncryptedData = numBytesOfEncryptedData;
@ -122,26 +102,43 @@ public final class CryptoInfo {
frameworkCryptoInfo.iv = iv;
frameworkCryptoInfo.mode = mode;
if (Util.SDK_INT >= 24) {
patternHolder.set(patternBlocksToEncrypt, patternBlocksToSkip);
patternHolder.set(encryptedBlocks, clearBlocks);
}
}
/**
* Returns an equivalent {@link android.media.MediaCodec.CryptoInfo} instance.
*
* <p>Successive calls to this method on a single {@link CryptoInfo} will return the same
* instance. Changes to the {@link CryptoInfo} will be reflected in the returned object. The
* return object should not be modified directly.
*
* @return The equivalent {@link android.media.MediaCodec.CryptoInfo} instance.
*/
public android.media.MediaCodec.CryptoInfo getFrameworkCryptoInfo() {
return frameworkCryptoInfo;
}
/** @deprecated Use {@link #getFrameworkCryptoInfo()}. */
@Deprecated
public android.media.MediaCodec.CryptoInfo getFrameworkCryptoInfoV16() {
return getFrameworkCryptoInfo();
}
@TargetApi(24)
private static final class PatternHolderV24 {
private final android.media.MediaCodec.CryptoInfo frameworkCryptoInfo;
// Reference to the two tickets (Bug 1259098, Bug 1365543)
// private final android.media.MediaCodec.CryptoInfo.Pattern pattern;
private final android.media.MediaCodec.CryptoInfo.Pattern pattern;
private PatternHolderV24(android.media.MediaCodec.CryptoInfo frameworkCryptoInfo) {
this.frameworkCryptoInfo = frameworkCryptoInfo;
// pattern = new android.media.MediaCodec.CryptoInfo.Pattern(0, 0);
pattern = new android.media.MediaCodec.CryptoInfo.Pattern(0, 0);
}
private void set(int blocksToEncrypt, int blocksToSkip) {
// pattern.set(blocksToEncrypt, blocksToSkip);
// frameworkCryptoInfo.setPattern(pattern);
private void set(int encryptedBlocks, int clearBlocks) {
pattern.set(encryptedBlocks, clearBlocks);
frameworkCryptoInfo.setPattern(pattern);
}
}

Просмотреть файл

@ -15,6 +15,8 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.decoder;
import androidx.annotation.Nullable;
/**
* A media decoder.
*
@ -37,6 +39,7 @@ public interface Decoder<I, O, E extends Exception> {
* @return The input buffer, which will have been cleared, or null if a buffer isn't available.
* @throws E If a decoder error has occurred.
*/
@Nullable
I dequeueInputBuffer() throws E;
/**
@ -53,6 +56,7 @@ public interface Decoder<I, O, E extends Exception> {
* @return The output buffer, or null if an output buffer isn't available.
* @throws E If a decoder error has occurred.
*/
@Nullable
O dequeueOutputBuffer() throws E;
/**

Просмотреть файл

@ -36,6 +36,12 @@ public final class DecoderCounters {
* The number of queued input buffers.
*/
public int inputBufferCount;
/**
* The number of skipped input buffers.
* <p>
* A skipped input buffer is an input buffer that was deliberately not sent to the decoder.
*/
public int skippedInputBufferCount;
/**
* The number of rendered output buffers.
*/
@ -47,18 +53,26 @@ public final class DecoderCounters {
*/
public int skippedOutputBufferCount;
/**
* The number of dropped output buffers.
* The number of dropped buffers.
* <p>
* A dropped output buffer is an output buffer that was supposed to be rendered, but was instead
* A dropped buffer is an buffer that was supposed to be decoded/rendered, but was instead
* dropped because it could not be rendered in time.
*/
public int droppedOutputBufferCount;
public int droppedBufferCount;
/**
* The maximum number of dropped output buffers without an interleaving rendered output buffer.
* The maximum number of dropped buffers without an interleaving rendered output buffer.
* <p>
* Skipped output buffers are ignored for the purposes of calculating this value.
*/
public int maxConsecutiveDroppedOutputBufferCount;
public int maxConsecutiveDroppedBufferCount;
/**
* The number of times all buffers to a keyframe were dropped.
* <p>
* Each time buffers to a keyframe are dropped, this counter is increased by one, and the dropped
* buffer counters are increased by one (for the current output buffer) plus the number of buffers
* dropped from the source to advance to the keyframe.
*/
public int droppedToKeyframeCount;
/**
* Should be called to ensure counter values are made visible across threads. The playback thread
@ -79,11 +93,13 @@ public final class DecoderCounters {
decoderInitCount += other.decoderInitCount;
decoderReleaseCount += other.decoderReleaseCount;
inputBufferCount += other.inputBufferCount;
skippedInputBufferCount += other.skippedInputBufferCount;
renderedOutputBufferCount += other.renderedOutputBufferCount;
skippedOutputBufferCount += other.skippedOutputBufferCount;
droppedOutputBufferCount += other.droppedOutputBufferCount;
maxConsecutiveDroppedOutputBufferCount = Math.max(maxConsecutiveDroppedOutputBufferCount,
other.maxConsecutiveDroppedOutputBufferCount);
droppedBufferCount += other.droppedBufferCount;
maxConsecutiveDroppedBufferCount = Math.max(maxConsecutiveDroppedBufferCount,
other.maxConsecutiveDroppedBufferCount);
droppedToKeyframeCount += other.droppedToKeyframeCount;
}
}

Просмотреть файл

@ -15,11 +15,14 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.decoder;
import android.support.annotation.IntDef;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
/**
* Holds input for a decoder.
@ -27,11 +30,17 @@ import java.nio.ByteBuffer;
public class DecoderInputBuffer extends Buffer {
/**
* The buffer replacement mode, which may disable replacement.
* The buffer replacement mode, which may disable replacement. One of {@link
* #BUFFER_REPLACEMENT_MODE_DISABLED}, {@link #BUFFER_REPLACEMENT_MODE_NORMAL} or {@link
* #BUFFER_REPLACEMENT_MODE_DIRECT}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({BUFFER_REPLACEMENT_MODE_DISABLED, BUFFER_REPLACEMENT_MODE_NORMAL,
BUFFER_REPLACEMENT_MODE_DIRECT})
@IntDef({
BUFFER_REPLACEMENT_MODE_DISABLED,
BUFFER_REPLACEMENT_MODE_NORMAL,
BUFFER_REPLACEMENT_MODE_DIRECT
})
public @interface BufferReplacementMode {}
/**
* Disallows buffer replacement.
@ -51,16 +60,28 @@ public class DecoderInputBuffer extends Buffer {
*/
public final CryptoInfo cryptoInfo;
/** The buffer's data, or {@code null} if no data has been set. */
@Nullable public ByteBuffer data;
// TODO: Remove this temporary signaling once end-of-stream propagation for clips using content
// protection is fixed. See [Internal: b/153326944] for details.
/**
* The buffer's data, or {@code null} if no data has been set.
* Whether the last attempt to read a sample into this buffer failed due to not yet having the DRM
* keys associated with the next sample.
*/
public ByteBuffer data;
public boolean waitingForKeys;
/**
* The time at which the sample should be presented.
*/
public long timeUs;
/**
* Supplemental data related to the buffer, if {@link #hasSupplementalData()} returns true. If
* present, the buffer is populated with supplemental data from position 0 to its limit.
*/
@Nullable public ByteBuffer supplementalData;
@BufferReplacementMode private final int bufferReplacementMode;
/**
@ -82,11 +103,26 @@ public class DecoderInputBuffer extends Buffer {
this.bufferReplacementMode = bufferReplacementMode;
}
/**
* Clears {@link #supplementalData} and ensures that it's large enough to accommodate {@code
* length} bytes.
*
* @param length The length of the supplemental data that must be accommodated, in bytes.
*/
@EnsuresNonNull("supplementalData")
public void resetSupplementalData(int length) {
if (supplementalData == null || supplementalData.capacity() < length) {
supplementalData = ByteBuffer.allocate(length);
} else {
supplementalData.clear();
}
}
/**
* Ensures that {@link #data} is large enough to accommodate a write of a given length at its
* current position.
* <p>
* If the capacity of {@link #data} is sufficient this method does nothing. If the capacity is
*
* <p>If the capacity of {@link #data} is sufficient this method does nothing. If the capacity is
* insufficient then an attempt is made to replace {@link #data} with a new {@link ByteBuffer}
* whose capacity is sufficient. Data up to the current position is copied to the new buffer.
*
@ -94,7 +130,8 @@ public class DecoderInputBuffer extends Buffer {
* @throws IllegalStateException If there is insufficient capacity to accommodate the write and
* the buffer replacement mode of the holder is {@link #BUFFER_REPLACEMENT_MODE_DISABLED}.
*/
public void ensureSpaceForWrite(int length) throws IllegalStateException {
@EnsuresNonNull("data")
public void ensureSpaceForWrite(int length) {
if (data == null) {
data = createReplacementByteBuffer(length);
return;
@ -108,10 +145,10 @@ public class DecoderInputBuffer extends Buffer {
}
// Instantiate a new buffer if possible.
ByteBuffer newData = createReplacementByteBuffer(requiredCapacity);
newData.order(data.order());
// Copy data up to the current position from the old buffer to the new one.
if (position > 0) {
data.position(0);
data.limit(position);
data.flip();
newData.put(data);
}
// Set the new buffer.
@ -134,12 +171,15 @@ public class DecoderInputBuffer extends Buffer {
}
/**
* Flips {@link #data} in preparation for being queued to a decoder.
* Flips {@link #data} and {@link #supplementalData} in preparation for being queued to a decoder.
*
* @see java.nio.Buffer#flip()
*/
public final void flip() {
data.flip();
if (supplementalData != null) {
supplementalData.flip();
}
}
@Override
@ -148,6 +188,10 @@ public class DecoderInputBuffer extends Buffer {
if (data != null) {
data.clear();
}
if (supplementalData != null) {
supplementalData.clear();
}
waitingForKeys = false;
}
private ByteBuffer createReplacementByteBuffer(int requiredCapacity) {

Просмотреть файл

@ -15,21 +15,23 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.decoder;
import androidx.annotation.CallSuper;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import java.util.LinkedList;
import java.util.ArrayDeque;
/**
* Base class for {@link Decoder}s that use their own decode thread.
*/
public abstract class SimpleDecoder<I extends DecoderInputBuffer, O extends OutputBuffer,
E extends Exception> implements Decoder<I, O, E> {
/** Base class for {@link Decoder}s that use their own decode thread. */
@SuppressWarnings("UngroupedOverloads")
public abstract class SimpleDecoder<
I extends DecoderInputBuffer, O extends OutputBuffer, E extends Exception>
implements Decoder<I, O, E> {
private final Thread decodeThread;
private final Object lock;
private final LinkedList<I> queuedInputBuffers;
private final LinkedList<O> queuedOutputBuffers;
private final ArrayDeque<I> queuedInputBuffers;
private final ArrayDeque<O> queuedOutputBuffers;
private final I[] availableInputBuffers;
private final O[] availableOutputBuffers;
@ -48,8 +50,8 @@ public abstract class SimpleDecoder<I extends DecoderInputBuffer, O extends Outp
*/
protected SimpleDecoder(I[] inputBuffers, O[] outputBuffers) {
lock = new Object();
queuedInputBuffers = new LinkedList<>();
queuedOutputBuffers = new LinkedList<>();
queuedInputBuffers = new ArrayDeque<>();
queuedOutputBuffers = new ArrayDeque<>();
availableInputBuffers = inputBuffers;
availableInputBufferCount = inputBuffers.length;
for (int i = 0; i < availableInputBufferCount; i++) {
@ -85,6 +87,7 @@ public abstract class SimpleDecoder<I extends DecoderInputBuffer, O extends Outp
}
@Override
@Nullable
public final I dequeueInputBuffer() throws E {
synchronized (lock) {
maybeThrowException();
@ -107,6 +110,7 @@ public abstract class SimpleDecoder<I extends DecoderInputBuffer, O extends Outp
}
@Override
@Nullable
public final O dequeueOutputBuffer() throws E {
synchronized (lock) {
maybeThrowException();
@ -122,6 +126,7 @@ public abstract class SimpleDecoder<I extends DecoderInputBuffer, O extends Outp
*
* @param outputBuffer The output buffer being released.
*/
@CallSuper
protected void releaseOutputBuffer(O outputBuffer) {
synchronized (lock) {
releaseOutputBufferInternal(outputBuffer);
@ -142,11 +147,13 @@ public abstract class SimpleDecoder<I extends DecoderInputBuffer, O extends Outp
releaseInputBufferInternal(queuedInputBuffers.removeFirst());
}
while (!queuedOutputBuffers.isEmpty()) {
releaseOutputBufferInternal(queuedOutputBuffers.removeFirst());
queuedOutputBuffers.removeFirst().release();
}
exception = null;
}
}
@CallSuper
@Override
public void release() {
synchronized (lock) {
@ -219,20 +226,33 @@ public abstract class SimpleDecoder<I extends DecoderInputBuffer, O extends Outp
if (inputBuffer.isDecodeOnly()) {
outputBuffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY);
}
exception = decode(inputBuffer, outputBuffer, resetDecoder);
@Nullable E exception;
try {
exception = decode(inputBuffer, outputBuffer, resetDecoder);
} catch (RuntimeException e) {
// This can occur if a sample is malformed in a way that the decoder is not robust against.
// We don't want the process to die in this case, but we do want to propagate the error.
exception = createUnexpectedDecodeException(e);
} catch (OutOfMemoryError e) {
// This can occur if a sample is malformed in a way that causes the decoder to think it
// needs to allocate a large amount of memory. We don't want the process to die in this
// case, but we do want to propagate the error.
exception = createUnexpectedDecodeException(e);
}
if (exception != null) {
// Memory barrier to ensure that the decoder exception is visible from the playback thread.
synchronized (lock) {}
synchronized (lock) {
this.exception = exception;
}
return false;
}
}
synchronized (lock) {
if (flushed) {
releaseOutputBufferInternal(outputBuffer);
outputBuffer.release();
} else if (outputBuffer.isDecodeOnly()) {
skippedOutputBufferCount++;
releaseOutputBufferInternal(outputBuffer);
outputBuffer.release();
} else {
outputBuffer.skippedOutputBufferCount = skippedOutputBufferCount;
skippedOutputBufferCount = 0;
@ -269,18 +289,26 @@ public abstract class SimpleDecoder<I extends DecoderInputBuffer, O extends Outp
*/
protected abstract O createOutputBuffer();
/**
* Creates an exception to propagate for an unexpected decode error.
*
* @param error The unexpected decode error.
* @return The exception to propagate.
*/
protected abstract E createUnexpectedDecodeException(Throwable error);
/**
* Decodes the {@code inputBuffer} and stores any decoded output in {@code outputBuffer}.
*
* @param inputBuffer The buffer to decode.
* @param outputBuffer The output buffer to store decoded data. The flag
* {@link C#BUFFER_FLAG_DECODE_ONLY} will be set if the same flag is set on
* {@code inputBuffer}, but may be set/unset as required. If the flag is set when the call
* returns then the output buffer will not be made available to dequeue. The output buffer
* may not have been populated in this case.
* @param outputBuffer The output buffer to store decoded data. The flag {@link
* C#BUFFER_FLAG_DECODE_ONLY} will be set if the same flag is set on {@code inputBuffer}, but
* may be set/unset as required. If the flag is set when the call returns then the output
* buffer will not be made available to dequeue. The output buffer may not have been populated
* in this case.
* @param reset Whether the decoder must be reset before decoding.
* @return A decoder exception if an error occurred, or null if decoding was successful.
*/
@Nullable
protected abstract E decode(I inputBuffer, O outputBuffer, boolean reset);
}

Просмотреть файл

@ -15,7 +15,9 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.decoder;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Buffer for {@link SimpleDecoder} output.
@ -24,7 +26,7 @@ public class SimpleOutputBuffer extends OutputBuffer {
private final SimpleDecoder<?, SimpleOutputBuffer, ?> owner;
public ByteBuffer data;
@Nullable public ByteBuffer data;
public SimpleOutputBuffer(SimpleDecoder<?, SimpleOutputBuffer, ?> owner) {
this.owner = owner;
@ -40,7 +42,7 @@ public class SimpleOutputBuffer extends OutputBuffer {
public ByteBuffer init(long timeUs, int size) {
this.timeUs = timeUs;
if (data == null || data.capacity() < size) {
data = ByteBuffer.allocateDirect(size);
data = ByteBuffer.allocateDirect(size).order(ByteOrder.nativeOrder());
}
data.position(0);
data.limit(size);

Просмотреть файл

@ -0,0 +1,19 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@NonNullApi
package org.mozilla.thirdparty.com.google.android.exoplayer2.decoder;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.NonNullApi;

Просмотреть файл

@ -0,0 +1,97 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.drm;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Log;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
/**
* Utility methods for ClearKey.
*/
/* package */ final class ClearKeyUtil {
private static final String TAG = "ClearKeyUtil";
private ClearKeyUtil() {}
/**
* Adjusts ClearKey request data obtained from the Android ClearKey CDM to be spec compliant.
*
* @param request The request data.
* @return The adjusted request data.
*/
public static byte[] adjustRequestData(byte[] request) {
if (Util.SDK_INT >= 27) {
return request;
}
// Prior to O-MR1 the ClearKey CDM encoded the values in the "kids" array using Base64 encoding
// rather than Base64Url encoding. See [Internal: b/64388098]. We know the exact request format
// from the platform's InitDataParser.cpp. Since there aren't any "+" or "/" symbols elsewhere
// in the request, it's safe to fix the encoding by replacement through the whole request.
String requestString = Util.fromUtf8Bytes(request);
return Util.getUtf8Bytes(base64ToBase64Url(requestString));
}
/**
* Adjusts ClearKey response data to be suitable for providing to the Android ClearKey CDM.
*
* @param response The response data.
* @return The adjusted response data.
*/
public static byte[] adjustResponseData(byte[] response) {
if (Util.SDK_INT >= 27) {
return response;
}
// Prior to O-MR1 the ClearKey CDM expected Base64 encoding rather than Base64Url encoding for
// the "k" and "kid" strings. See [Internal: b/64388098]. We know that the ClearKey CDM only
// looks at the k, kid and kty parameters in each key, so can ignore the rest of the response.
try {
JSONObject responseJson = new JSONObject(Util.fromUtf8Bytes(response));
StringBuilder adjustedResponseBuilder = new StringBuilder("{\"keys\":[");
JSONArray keysArray = responseJson.getJSONArray("keys");
for (int i = 0; i < keysArray.length(); i++) {
if (i != 0) {
adjustedResponseBuilder.append(",");
}
JSONObject key = keysArray.getJSONObject(i);
adjustedResponseBuilder.append("{\"k\":\"");
adjustedResponseBuilder.append(base64UrlToBase64(key.getString("k")));
adjustedResponseBuilder.append("\",\"kid\":\"");
adjustedResponseBuilder.append(base64UrlToBase64(key.getString("kid")));
adjustedResponseBuilder.append("\",\"kty\":\"");
adjustedResponseBuilder.append(key.getString("kty"));
adjustedResponseBuilder.append("\"}");
}
adjustedResponseBuilder.append("]}");
return Util.getUtf8Bytes(adjustedResponseBuilder.toString());
} catch (JSONException e) {
Log.e(TAG, "Failed to adjust response data: " + Util.fromUtf8Bytes(response), e);
return response;
}
}
private static String base64ToBase64Url(String base64) {
return base64.replace('+', '-').replace('/', '_');
}
private static String base64UrlToBase64(String base64Url) {
return base64Url.replace('-', '+').replace('_', '/');
}
}

Просмотреть файл

@ -1,20 +1,37 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.drm;
/**
* An exception when doing drm decryption using the In-App Drm
* Thrown when a non-platform component fails to decrypt data.
*/
public class DecryptionException extends Exception {
private final int errorCode;
/**
* A component specific error code.
*/
public final int errorCode;
/**
* @param errorCode A component specific error code.
* @param message The detail message.
*/
public DecryptionException(int errorCode, String message) {
super(message);
this.errorCode = errorCode;
}
/**
* Get error code
*/
public int getErrorCode() {
return errorCode;
}
}

Просмотреть файл

@ -0,0 +1,607 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.drm;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.media.NotProvisionedException;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import android.util.Pair;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmInitData.SchemeData;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.ExoMediaDrm.KeyRequest;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.ExoMediaDrm.ProvisionRequest;
import org.mozilla.thirdparty.com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.EventDispatcher;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Log;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/** A {@link DrmSession} that supports playbacks using {@link ExoMediaDrm}. */
@TargetApi(18)
/* package */ class DefaultDrmSession<T extends ExoMediaCrypto> implements DrmSession<T> {
/** Thrown when an unexpected exception or error is thrown during provisioning or key requests. */
public static final class UnexpectedDrmSessionException extends IOException {
public UnexpectedDrmSessionException(Throwable cause) {
super("Unexpected " + cause.getClass().getSimpleName() + ": " + cause.getMessage(), cause);
}
}
/** Manages provisioning requests. */
public interface ProvisioningManager<T extends ExoMediaCrypto> {
/**
* Called when a session requires provisioning. The manager <em>may</em> call {@link
* #provision()} to have this session perform the provisioning operation. The manager
* <em>will</em> call {@link DefaultDrmSession#onProvisionCompleted()} when provisioning has
* completed, or {@link DefaultDrmSession#onProvisionError} if provisioning fails.
*
* @param session The session.
*/
void provisionRequired(DefaultDrmSession<T> session);
/**
* Called by a session when it fails to perform a provisioning operation.
*
* @param error The error that occurred.
*/
void onProvisionError(Exception error);
/** Called by a session when it successfully completes a provisioning operation. */
void onProvisionCompleted();
}
/** Callback to be notified when the session is released. */
public interface ReleaseCallback<T extends ExoMediaCrypto> {
/**
* Called immediately after releasing session resources.
*
* @param session The session.
*/
void onSessionReleased(DefaultDrmSession<T> session);
}
private static final String TAG = "DefaultDrmSession";
private static final int MSG_PROVISION = 0;
private static final int MSG_KEYS = 1;
private static final int MAX_LICENSE_DURATION_TO_RENEW_SECONDS = 60;
/** The DRM scheme datas, or null if this session uses offline keys. */
@Nullable public final List<SchemeData> schemeDatas;
private final ExoMediaDrm<T> mediaDrm;
private final ProvisioningManager<T> provisioningManager;
private final ReleaseCallback<T> releaseCallback;
private final @DefaultDrmSessionManager.Mode int mode;
private final boolean playClearSamplesWithoutKeys;
private final boolean isPlaceholderSession;
private final HashMap<String, String> keyRequestParameters;
private final EventDispatcher<DefaultDrmSessionEventListener> eventDispatcher;
private final LoadErrorHandlingPolicy loadErrorHandlingPolicy;
/* package */ final MediaDrmCallback callback;
/* package */ final UUID uuid;
/* package */ final ResponseHandler responseHandler;
private @DrmSession.State int state;
private int referenceCount;
@Nullable private HandlerThread requestHandlerThread;
@Nullable private RequestHandler requestHandler;
@Nullable private T mediaCrypto;
@Nullable private DrmSessionException lastException;
@Nullable private byte[] sessionId;
@MonotonicNonNull private byte[] offlineLicenseKeySetId;
@Nullable private KeyRequest currentKeyRequest;
@Nullable private ProvisionRequest currentProvisionRequest;
/**
* Instantiates a new DRM session.
*
* @param uuid The UUID of the drm scheme.
* @param mediaDrm The media DRM.
* @param provisioningManager The manager for provisioning.
* @param releaseCallback The {@link ReleaseCallback}.
* @param schemeDatas DRM scheme datas for this session, or null if an {@code
* offlineLicenseKeySetId} is provided or if {@code isPlaceholderSession} is true.
* @param mode The DRM mode. Ignored if {@code isPlaceholderSession} is true.
* @param isPlaceholderSession Whether this session is not expected to acquire any keys.
* @param offlineLicenseKeySetId The offline license key set identifier, or null when not using
* offline keys.
* @param keyRequestParameters Key request parameters.
* @param callback The media DRM callback.
* @param playbackLooper The playback looper.
* @param eventDispatcher The dispatcher for DRM session manager events.
* @param loadErrorHandlingPolicy The {@link LoadErrorHandlingPolicy} for key and provisioning
* requests.
*/
// the constructor does not initialize fields: sessionId
@SuppressWarnings("nullness:initialization.fields.uninitialized")
public DefaultDrmSession(
UUID uuid,
ExoMediaDrm<T> mediaDrm,
ProvisioningManager<T> provisioningManager,
ReleaseCallback<T> releaseCallback,
@Nullable List<SchemeData> schemeDatas,
@DefaultDrmSessionManager.Mode int mode,
boolean playClearSamplesWithoutKeys,
boolean isPlaceholderSession,
@Nullable byte[] offlineLicenseKeySetId,
HashMap<String, String> keyRequestParameters,
MediaDrmCallback callback,
Looper playbackLooper,
EventDispatcher<DefaultDrmSessionEventListener> eventDispatcher,
LoadErrorHandlingPolicy loadErrorHandlingPolicy) {
if (mode == DefaultDrmSessionManager.MODE_QUERY
|| mode == DefaultDrmSessionManager.MODE_RELEASE) {
Assertions.checkNotNull(offlineLicenseKeySetId);
}
this.uuid = uuid;
this.provisioningManager = provisioningManager;
this.releaseCallback = releaseCallback;
this.mediaDrm = mediaDrm;
this.mode = mode;
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
this.isPlaceholderSession = isPlaceholderSession;
if (offlineLicenseKeySetId != null) {
this.offlineLicenseKeySetId = offlineLicenseKeySetId;
this.schemeDatas = null;
} else {
this.schemeDatas = Collections.unmodifiableList(Assertions.checkNotNull(schemeDatas));
}
this.keyRequestParameters = keyRequestParameters;
this.callback = callback;
this.eventDispatcher = eventDispatcher;
this.loadErrorHandlingPolicy = loadErrorHandlingPolicy;
state = STATE_OPENING;
responseHandler = new ResponseHandler(playbackLooper);
}
public boolean hasSessionId(byte[] sessionId) {
return Arrays.equals(this.sessionId, sessionId);
}
public void onMediaDrmEvent(int what) {
switch (what) {
case ExoMediaDrm.EVENT_KEY_REQUIRED:
onKeysRequired();
break;
default:
break;
}
}
// Provisioning implementation.
public void provision() {
currentProvisionRequest = mediaDrm.getProvisionRequest();
Util.castNonNull(requestHandler)
.post(
MSG_PROVISION,
Assertions.checkNotNull(currentProvisionRequest),
/* allowRetry= */ true);
}
public void onProvisionCompleted() {
if (openInternal(false)) {
doLicense(true);
}
}
public void onProvisionError(Exception error) {
onError(error);
}
// DrmSession implementation.
@Override
@DrmSession.State
public final int getState() {
return state;
}
@Override
public boolean playClearSamplesWithoutKeys() {
return playClearSamplesWithoutKeys;
}
@Override
public final @Nullable DrmSessionException getError() {
return state == STATE_ERROR ? lastException : null;
}
@Override
public final @Nullable T getMediaCrypto() {
return mediaCrypto;
}
@Override
@Nullable
public Map<String, String> queryKeyStatus() {
return sessionId == null ? null : mediaDrm.queryKeyStatus(sessionId);
}
@Override
@Nullable
public byte[] getOfflineLicenseKeySetId() {
return offlineLicenseKeySetId;
}
@Override
public void acquire() {
Assertions.checkState(referenceCount >= 0);
if (++referenceCount == 1) {
Assertions.checkState(state == STATE_OPENING);
requestHandlerThread = new HandlerThread("DrmRequestHandler");
requestHandlerThread.start();
requestHandler = new RequestHandler(requestHandlerThread.getLooper());
if (openInternal(true)) {
doLicense(true);
}
}
}
@Override
public void release() {
if (--referenceCount == 0) {
// Assigning null to various non-null variables for clean-up.
state = STATE_RELEASED;
Util.castNonNull(responseHandler).removeCallbacksAndMessages(null);
Util.castNonNull(requestHandler).removeCallbacksAndMessages(null);
requestHandler = null;
Util.castNonNull(requestHandlerThread).quit();
requestHandlerThread = null;
mediaCrypto = null;
lastException = null;
currentKeyRequest = null;
currentProvisionRequest = null;
if (sessionId != null) {
mediaDrm.closeSession(sessionId);
sessionId = null;
eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmSessionReleased);
}
releaseCallback.onSessionReleased(this);
}
}
// Internal methods.
/**
* Try to open a session, do provisioning if necessary.
*
* @param allowProvisioning if provisioning is allowed, set this to false when calling from
* processing provision response.
* @return true on success, false otherwise.
*/
@EnsuresNonNullIf(result = true, expression = "sessionId")
private boolean openInternal(boolean allowProvisioning) {
if (isOpen()) {
// Already opened
return true;
}
try {
sessionId = mediaDrm.openSession();
mediaCrypto = mediaDrm.createMediaCrypto(sessionId);
eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmSessionAcquired);
state = STATE_OPENED;
Assertions.checkNotNull(sessionId);
return true;
} catch (NotProvisionedException e) {
if (allowProvisioning) {
provisioningManager.provisionRequired(this);
} else {
onError(e);
}
} catch (Exception e) {
onError(e);
}
return false;
}
private void onProvisionResponse(Object request, Object response) {
if (request != currentProvisionRequest || (state != STATE_OPENING && !isOpen())) {
// This event is stale.
return;
}
currentProvisionRequest = null;
if (response instanceof Exception) {
provisioningManager.onProvisionError((Exception) response);
return;
}
try {
mediaDrm.provideProvisionResponse((byte[]) response);
} catch (Exception e) {
provisioningManager.onProvisionError(e);
return;
}
provisioningManager.onProvisionCompleted();
}
@RequiresNonNull("sessionId")
private void doLicense(boolean allowRetry) {
if (isPlaceholderSession) {
return;
}
byte[] sessionId = Util.castNonNull(this.sessionId);
switch (mode) {
case DefaultDrmSessionManager.MODE_PLAYBACK:
case DefaultDrmSessionManager.MODE_QUERY:
if (offlineLicenseKeySetId == null) {
postKeyRequest(sessionId, ExoMediaDrm.KEY_TYPE_STREAMING, allowRetry);
} else if (state == STATE_OPENED_WITH_KEYS || restoreKeys()) {
long licenseDurationRemainingSec = getLicenseDurationRemainingSec();
if (mode == DefaultDrmSessionManager.MODE_PLAYBACK
&& licenseDurationRemainingSec <= MAX_LICENSE_DURATION_TO_RENEW_SECONDS) {
Log.d(
TAG,
"Offline license has expired or will expire soon. "
+ "Remaining seconds: "
+ licenseDurationRemainingSec);
postKeyRequest(sessionId, ExoMediaDrm.KEY_TYPE_OFFLINE, allowRetry);
} else if (licenseDurationRemainingSec <= 0) {
onError(new KeysExpiredException());
} else {
state = STATE_OPENED_WITH_KEYS;
eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmKeysRestored);
}
}
break;
case DefaultDrmSessionManager.MODE_DOWNLOAD:
if (offlineLicenseKeySetId == null || restoreKeys()) {
postKeyRequest(sessionId, ExoMediaDrm.KEY_TYPE_OFFLINE, allowRetry);
}
break;
case DefaultDrmSessionManager.MODE_RELEASE:
Assertions.checkNotNull(offlineLicenseKeySetId);
Assertions.checkNotNull(this.sessionId);
// It's not necessary to restore the key (and open a session to do that) before releasing it
// but this serves as a good sanity/fast-failure check.
if (restoreKeys()) {
postKeyRequest(offlineLicenseKeySetId, ExoMediaDrm.KEY_TYPE_RELEASE, allowRetry);
}
break;
default:
break;
}
}
@RequiresNonNull({"sessionId", "offlineLicenseKeySetId"})
private boolean restoreKeys() {
try {
mediaDrm.restoreKeys(sessionId, offlineLicenseKeySetId);
return true;
} catch (Exception e) {
Log.e(TAG, "Error trying to restore keys.", e);
onError(e);
}
return false;
}
private long getLicenseDurationRemainingSec() {
if (!C.WIDEVINE_UUID.equals(uuid)) {
return Long.MAX_VALUE;
}
Pair<Long, Long> pair =
Assertions.checkNotNull(WidevineUtil.getLicenseDurationRemainingSec(this));
return Math.min(pair.first, pair.second);
}
private void postKeyRequest(byte[] scope, int type, boolean allowRetry) {
try {
currentKeyRequest = mediaDrm.getKeyRequest(scope, schemeDatas, type, keyRequestParameters);
Util.castNonNull(requestHandler)
.post(MSG_KEYS, Assertions.checkNotNull(currentKeyRequest), allowRetry);
} catch (Exception e) {
onKeysError(e);
}
}
private void onKeyResponse(Object request, Object response) {
if (request != currentKeyRequest || !isOpen()) {
// This event is stale.
return;
}
currentKeyRequest = null;
if (response instanceof Exception) {
onKeysError((Exception) response);
return;
}
try {
byte[] responseData = (byte[]) response;
if (mode == DefaultDrmSessionManager.MODE_RELEASE) {
mediaDrm.provideKeyResponse(Util.castNonNull(offlineLicenseKeySetId), responseData);
eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmKeysRestored);
} else {
byte[] keySetId = mediaDrm.provideKeyResponse(sessionId, responseData);
if ((mode == DefaultDrmSessionManager.MODE_DOWNLOAD
|| (mode == DefaultDrmSessionManager.MODE_PLAYBACK
&& offlineLicenseKeySetId != null))
&& keySetId != null
&& keySetId.length != 0) {
offlineLicenseKeySetId = keySetId;
}
state = STATE_OPENED_WITH_KEYS;
eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmKeysLoaded);
}
} catch (Exception e) {
onKeysError(e);
}
}
private void onKeysRequired() {
if (mode == DefaultDrmSessionManager.MODE_PLAYBACK && state == STATE_OPENED_WITH_KEYS) {
Util.castNonNull(sessionId);
doLicense(/* allowRetry= */ false);
}
}
private void onKeysError(Exception e) {
if (e instanceof NotProvisionedException) {
provisioningManager.provisionRequired(this);
} else {
onError(e);
}
}
private void onError(final Exception e) {
lastException = new DrmSessionException(e);
eventDispatcher.dispatch(listener -> listener.onDrmSessionManagerError(e));
if (state != STATE_OPENED_WITH_KEYS) {
state = STATE_ERROR;
}
}
@EnsuresNonNullIf(result = true, expression = "sessionId")
@SuppressWarnings("contracts.conditional.postcondition.not.satisfied")
private boolean isOpen() {
return state == STATE_OPENED || state == STATE_OPENED_WITH_KEYS;
}
// Internal classes.
@SuppressLint("HandlerLeak")
private class ResponseHandler extends Handler {
public ResponseHandler(Looper looper) {
super(looper);
}
@Override
@SuppressWarnings("unchecked")
public void handleMessage(Message msg) {
Pair<Object, Object> requestAndResponse = (Pair<Object, Object>) msg.obj;
Object request = requestAndResponse.first;
Object response = requestAndResponse.second;
switch (msg.what) {
case MSG_PROVISION:
onProvisionResponse(request, response);
break;
case MSG_KEYS:
onKeyResponse(request, response);
break;
default:
break;
}
}
}
@SuppressLint("HandlerLeak")
private class RequestHandler extends Handler {
public RequestHandler(Looper backgroundLooper) {
super(backgroundLooper);
}
void post(int what, Object request, boolean allowRetry) {
RequestTask requestTask =
new RequestTask(allowRetry, /* startTimeMs= */ SystemClock.elapsedRealtime(), request);
obtainMessage(what, requestTask).sendToTarget();
}
@Override
public void handleMessage(Message msg) {
RequestTask requestTask = (RequestTask) msg.obj;
Object response;
try {
switch (msg.what) {
case MSG_PROVISION:
response =
callback.executeProvisionRequest(uuid, (ProvisionRequest) requestTask.request);
break;
case MSG_KEYS:
response = callback.executeKeyRequest(uuid, (KeyRequest) requestTask.request);
break;
default:
throw new RuntimeException();
}
} catch (Exception e) {
if (maybeRetryRequest(msg, e)) {
return;
}
response = e;
}
responseHandler
.obtainMessage(msg.what, Pair.create(requestTask.request, response))
.sendToTarget();
}
private boolean maybeRetryRequest(Message originalMsg, Exception e) {
RequestTask requestTask = (RequestTask) originalMsg.obj;
if (!requestTask.allowRetry) {
return false;
}
requestTask.errorCount++;
if (requestTask.errorCount
> loadErrorHandlingPolicy.getMinimumLoadableRetryCount(C.DATA_TYPE_DRM)) {
return false;
}
IOException ioException =
e instanceof IOException ? (IOException) e : new UnexpectedDrmSessionException(e);
long retryDelayMs =
loadErrorHandlingPolicy.getRetryDelayMsFor(
C.DATA_TYPE_DRM,
/* loadDurationMs= */ SystemClock.elapsedRealtime() - requestTask.startTimeMs,
ioException,
requestTask.errorCount);
if (retryDelayMs == C.TIME_UNSET) {
// The error is fatal.
return false;
}
sendMessageDelayed(Message.obtain(originalMsg), retryDelayMs);
return true;
}
}
private static final class RequestTask {
public final boolean allowRetry;
public final long startTimeMs;
public final Object request;
public int errorCount;
public RequestTask(boolean allowRetry, long startTimeMs, Object request) {
this.allowRetry = allowRetry;
this.startTimeMs = startTimeMs;
this.request = request;
}
}
}

Просмотреть файл

@ -0,0 +1,51 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.drm;
import org.mozilla.thirdparty.com.google.android.exoplayer2.Player;
/** Listener of {@link DefaultDrmSessionManager} events. */
public interface DefaultDrmSessionEventListener {
/** Called each time a drm session is acquired. */
default void onDrmSessionAcquired() {}
/** Called each time keys are loaded. */
default void onDrmKeysLoaded() {}
/**
* Called when a drm error occurs.
*
* <p>This method being called does not indicate that playback has failed, or that it will fail.
* The player may be able to recover from the error and continue. Hence applications should
* <em>not</em> implement this method to display a user visible error or initiate an application
* level retry ({@link Player.EventListener#onPlayerError} is the appropriate place to implement
* such behavior). This method is called to provide the application with an opportunity to log the
* error if it wishes to do so.
*
* @param error The corresponding exception.
*/
default void onDrmSessionManagerError(Exception error) {}
/** Called each time offline keys are restored. */
default void onDrmKeysRestored() {}
/** Called each time offline keys are removed. */
default void onDrmKeysRemoved() {}
/** Called each time a drm session is released. */
default void onDrmSessionReleased() {}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -17,10 +17,13 @@ package org.mozilla.thirdparty.com.google.android.exoplayer2.drm;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.TextUtils;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmInitData.SchemeData;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
@ -31,11 +34,61 @@ import java.util.UUID;
*/
public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
/**
* Merges {@link DrmInitData} obtained from a media manifest and a media stream.
*
* <p>The result is generated as follows.
*
* <ol>
* <li>Include all {@link SchemeData}s from {@code manifestData} where {@link
* SchemeData#hasData()} is true.
* <li>Include all {@link SchemeData}s in {@code mediaData} where {@link SchemeData#hasData()}
* is true and for which we did not include an entry from the manifest targeting the same
* UUID.
* <li>If available, the scheme type from the manifest is used. If not, the scheme type from the
* media is used.
* </ol>
*
* @param manifestData DRM session acquisition data obtained from the manifest.
* @param mediaData DRM session acquisition data obtained from the media.
* @return A {@link DrmInitData} obtained from merging a media manifest and a media stream.
*/
public static @Nullable DrmInitData createSessionCreationData(
@Nullable DrmInitData manifestData, @Nullable DrmInitData mediaData) {
ArrayList<SchemeData> result = new ArrayList<>();
String schemeType = null;
if (manifestData != null) {
schemeType = manifestData.schemeType;
for (SchemeData data : manifestData.schemeDatas) {
if (data.hasData()) {
result.add(data);
}
}
}
if (mediaData != null) {
if (schemeType == null) {
schemeType = mediaData.schemeType;
}
int manifestDatasCount = result.size();
for (SchemeData data : mediaData.schemeDatas) {
if (data.hasData() && !containsSchemeDataWithUuid(result, manifestDatasCount, data.uuid)) {
result.add(data);
}
}
}
return result.isEmpty() ? null : new DrmInitData(schemeType, result);
}
private final SchemeData[] schemeDatas;
// Lazily initialized hashcode.
private int hashCode;
/** The protection scheme type, or null if not applicable or unknown. */
@Nullable public final String schemeType;
/**
* Number of {@link SchemeData}s.
*/
@ -45,44 +98,61 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
* @param schemeDatas Scheme initialization data for possibly multiple DRM schemes.
*/
public DrmInitData(List<SchemeData> schemeDatas) {
this(false, schemeDatas.toArray(new SchemeData[schemeDatas.size()]));
this(null, false, schemeDatas.toArray(new SchemeData[0]));
}
/**
* @param schemeType See {@link #schemeType}.
* @param schemeDatas Scheme initialization data for possibly multiple DRM schemes.
*/
public DrmInitData(@Nullable String schemeType, List<SchemeData> schemeDatas) {
this(schemeType, false, schemeDatas.toArray(new SchemeData[0]));
}
/**
* @param schemeDatas Scheme initialization data for possibly multiple DRM schemes.
*/
public DrmInitData(SchemeData... schemeDatas) {
this(true, schemeDatas);
this(null, schemeDatas);
}
private DrmInitData(boolean cloneSchemeDatas, SchemeData... schemeDatas) {
/**
* @param schemeType See {@link #schemeType}.
* @param schemeDatas Scheme initialization data for possibly multiple DRM schemes.
*/
public DrmInitData(@Nullable String schemeType, SchemeData... schemeDatas) {
this(schemeType, true, schemeDatas);
}
private DrmInitData(@Nullable String schemeType, boolean cloneSchemeDatas,
SchemeData... schemeDatas) {
this.schemeType = schemeType;
if (cloneSchemeDatas) {
schemeDatas = schemeDatas.clone();
}
// Sorting ensures that universal scheme data(i.e. data that applies to all schemes) is matched
// last. It's also required by the equals and hashcode implementations.
Arrays.sort(schemeDatas, this);
// Check for no duplicates.
for (int i = 1; i < schemeDatas.length; i++) {
if (schemeDatas[i - 1].uuid.equals(schemeDatas[i].uuid)) {
throw new IllegalArgumentException("Duplicate data for uuid: " + schemeDatas[i].uuid);
}
}
this.schemeDatas = schemeDatas;
schemeDataCount = schemeDatas.length;
// Sorting ensures that universal scheme data (i.e. data that applies to all schemes) is matched
// last. It's also required by the equals and hashcode implementations.
Arrays.sort(this.schemeDatas, this);
}
/* package */ DrmInitData(Parcel in) {
schemeDatas = in.createTypedArray(SchemeData.CREATOR);
/* package */
DrmInitData(Parcel in) {
schemeType = in.readString();
schemeDatas = Util.castNonNull(in.createTypedArray(SchemeData.CREATOR));
schemeDataCount = schemeDatas.length;
}
/**
* Retrieves data for a given DRM scheme, specified by its UUID.
*
* @deprecated Use {@link #get(int)} and {@link SchemeData#matches(UUID)} instead.
* @param uuid The DRM scheme's UUID.
* @return The initialization data for the scheme, or null if the scheme is not supported.
*/
@Deprecated
@Nullable
public SchemeData get(UUID uuid) {
for (SchemeData schemeData : schemeDatas) {
if (schemeData.matches(uuid)) {
@ -95,30 +165,66 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
/**
* Retrieves the {@link SchemeData} at a given index.
*
* @param index index of the scheme to return.
* @return The {@link SchemeData} at the index.
* @param index The index of the scheme to return. Must not exceed {@link #schemeDataCount}.
* @return The {@link SchemeData} at the specified index.
*/
public SchemeData get(int index) {
return schemeDatas[index];
}
/**
* Returns a copy with the specified protection scheme type.
*
* @param schemeType A protection scheme type. May be null.
* @return A copy with the specified protection scheme type.
*/
public DrmInitData copyWithSchemeType(@Nullable String schemeType) {
if (Util.areEqual(this.schemeType, schemeType)) {
return this;
}
return new DrmInitData(schemeType, false, schemeDatas);
}
/**
* Returns an instance containing the {@link #schemeDatas} from both this and {@code other}. The
* {@link #schemeType} of the instances being merged must either match, or at least one scheme
* type must be {@code null}.
*
* @param drmInitData The instance to merge.
* @return The merged result.
*/
public DrmInitData merge(DrmInitData drmInitData) {
Assertions.checkState(
schemeType == null
|| drmInitData.schemeType == null
|| TextUtils.equals(schemeType, drmInitData.schemeType));
String mergedSchemeType = schemeType != null ? this.schemeType : drmInitData.schemeType;
SchemeData[] mergedSchemeDatas =
Util.nullSafeArrayConcatenation(schemeDatas, drmInitData.schemeDatas);
return new DrmInitData(mergedSchemeType, mergedSchemeDatas);
}
@Override
public int hashCode() {
if (hashCode == 0) {
hashCode = Arrays.hashCode(schemeDatas);
int result = (schemeType == null ? 0 : schemeType.hashCode());
result = 31 * result + Arrays.hashCode(schemeDatas);
hashCode = result;
}
return hashCode;
}
@Override
public boolean equals(Object obj) {
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
return Arrays.equals(schemeDatas, ((DrmInitData) obj).schemeDatas);
DrmInitData other = (DrmInitData) obj;
return Util.areEqual(schemeType, other.schemeType)
&& Arrays.equals(schemeDatas, other.schemeDatas);
}
@Override
@ -136,6 +242,7 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(schemeType);
dest.writeTypedArray(schemeDatas, 0);
}
@ -154,6 +261,18 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
};
// Internal methods.
private static boolean containsSchemeDataWithUuid(
ArrayList<SchemeData> datas, int limit, UUID uuid) {
for (int i = 0; i < limit; i++) {
if (datas.get(i).uuid.equals(uuid)) {
return true;
}
}
return false;
}
/**
* Scheme initialization data.
*/
@ -167,48 +286,43 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
* applies to all schemes).
*/
private final UUID uuid;
/**
* The mimeType of {@link #data}.
*/
/** The URL of the server to which license requests should be made. May be null if unknown. */
@Nullable public final String licenseServerUrl;
/** The mimeType of {@link #data}. */
public final String mimeType;
/**
* The initialization data.
*/
public final byte[] data;
/**
* Whether secure decryption is required.
*/
public final boolean requiresSecureDecryption;
/** The initialization data. May be null for scheme support checks only. */
@Nullable public final byte[] data;
/**
* @param uuid The {@link UUID} of the DRM scheme, or {@link C#UUID_NIL} if the data is
* universal (i.e. applies to all schemes).
* @param mimeType The mimeType of the initialization data.
* @param data The initialization data.
* @param mimeType See {@link #mimeType}.
* @param data See {@link #data}.
*/
public SchemeData(UUID uuid, String mimeType, byte[] data) {
this(uuid, mimeType, data, false);
public SchemeData(UUID uuid, String mimeType, @Nullable byte[] data) {
this(uuid, /* licenseServerUrl= */ null, mimeType, data);
}
/**
* @param uuid The {@link UUID} of the DRM scheme, or {@link C#UUID_NIL} if the data is
* universal (i.e. applies to all schemes).
* @param mimeType The mimeType of the initialization data.
* @param data The initialization data.
* @param requiresSecureDecryption Whether secure decryption is required.
* @param licenseServerUrl See {@link #licenseServerUrl}.
* @param mimeType See {@link #mimeType}.
* @param data See {@link #data}.
*/
public SchemeData(UUID uuid, String mimeType, byte[] data, boolean requiresSecureDecryption) {
public SchemeData(
UUID uuid, @Nullable String licenseServerUrl, String mimeType, @Nullable byte[] data) {
this.uuid = Assertions.checkNotNull(uuid);
this.licenseServerUrl = licenseServerUrl;
this.mimeType = Assertions.checkNotNull(mimeType);
this.data = Assertions.checkNotNull(data);
this.requiresSecureDecryption = requiresSecureDecryption;
this.data = data;
}
/* package */ SchemeData(Parcel in) {
uuid = new UUID(in.readLong(), in.readLong());
mimeType = in.readString();
licenseServerUrl = in.readString();
mimeType = Util.castNonNull(in.readString());
data = in.createByteArray();
requiresSecureDecryption = in.readByte() != 0;
}
/**
@ -221,8 +335,35 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
return C.UUID_NIL.equals(uuid) || schemeUuid.equals(uuid);
}
/**
* Returns whether this {@link SchemeData} can be used to replace {@code other}.
*
* @param other A {@link SchemeData}.
* @return Whether this {@link SchemeData} can be used to replace {@code other}.
*/
public boolean canReplace(SchemeData other) {
return hasData() && !other.hasData() && matches(other.uuid);
}
/**
* Returns whether {@link #data} is non-null.
*/
public boolean hasData() {
return data != null;
}
/**
* Returns a copy of this instance with the specified data.
*
* @param data The data to include in the copy.
* @return The new instance.
*/
public SchemeData copyWithData(@Nullable byte[] data) {
return new SchemeData(uuid, licenseServerUrl, mimeType, data);
}
@Override
public boolean equals(Object obj) {
public boolean equals(@Nullable Object obj) {
if (!(obj instanceof SchemeData)) {
return false;
}
@ -230,7 +371,9 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
return true;
}
SchemeData other = (SchemeData) obj;
return mimeType.equals(other.mimeType) && Util.areEqual(uuid, other.uuid)
return Util.areEqual(licenseServerUrl, other.licenseServerUrl)
&& Util.areEqual(mimeType, other.mimeType)
&& Util.areEqual(uuid, other.uuid)
&& Arrays.equals(data, other.data);
}
@ -238,6 +381,7 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
public int hashCode() {
if (hashCode == 0) {
int result = uuid.hashCode();
result = 31 * result + (licenseServerUrl == null ? 0 : licenseServerUrl.hashCode());
result = 31 * result + mimeType.hashCode();
result = 31 * result + Arrays.hashCode(data);
hashCode = result;
@ -256,12 +400,11 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
public void writeToParcel(Parcel dest, int flags) {
dest.writeLong(uuid.getMostSignificantBits());
dest.writeLong(uuid.getLeastSignificantBits());
dest.writeString(licenseServerUrl);
dest.writeString(mimeType);
dest.writeByteArray(data);
dest.writeByte((byte) (requiresSecureDecryption ? 1 : 0));
}
@SuppressWarnings("hiding")
public static final Parcelable.Creator<SchemeData> CREATOR =
new Parcelable.Creator<SchemeData>() {

Просмотреть файл

@ -15,9 +15,11 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.drm;
import android.annotation.TargetApi;
import android.media.MediaDrm;
import android.support.annotation.IntDef;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import java.io.IOException;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.Map;
@ -25,107 +27,118 @@ import java.util.Map;
/**
* A DRM session.
*/
@TargetApi(16)
public interface DrmSession<T extends ExoMediaCrypto> {
/** Wraps the exception which is the cause of the error state. */
class DrmSessionException extends Exception {
/**
* Invokes {@code newSession's} {@link #acquire()} and {@code previousSession's} {@link
* #release()} in that order. Null arguments are ignored. Does nothing if {@code previousSession}
* and {@code newSession} are the same session.
*/
static <T extends ExoMediaCrypto> void replaceSession(
@Nullable DrmSession<T> previousSession, @Nullable DrmSession<T> newSession) {
if (previousSession == newSession) {
// Do nothing.
return;
}
if (newSession != null) {
newSession.acquire();
}
if (previousSession != null) {
previousSession.release();
}
}
public DrmSessionException(Exception e) {
super(e);
/** Wraps the throwable which is the cause of the error state. */
class DrmSessionException extends IOException {
public DrmSessionException(Throwable cause) {
super(cause);
}
}
/**
* The state of the DRM session.
* The state of the DRM session. One of {@link #STATE_RELEASED}, {@link #STATE_ERROR}, {@link
* #STATE_OPENING}, {@link #STATE_OPENED} or {@link #STATE_OPENED_WITH_KEYS}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({STATE_ERROR, STATE_CLOSED, STATE_OPENING, STATE_OPENED, STATE_OPENED_WITH_KEYS})
@IntDef({STATE_RELEASED, STATE_ERROR, STATE_OPENING, STATE_OPENED, STATE_OPENED_WITH_KEYS})
@interface State {}
/**
* The session has been released.
*/
int STATE_RELEASED = 0;
/**
* The session has encountered an error. {@link #getError()} can be used to retrieve the cause.
*/
int STATE_ERROR = 0;
/**
* The session is closed.
*/
int STATE_CLOSED = 1;
int STATE_ERROR = 1;
/**
* The session is being opened.
*/
int STATE_OPENING = 2;
/**
* The session is open, but does not yet have the keys required for decryption.
*/
/** The session is open, but does not have keys required for decryption. */
int STATE_OPENED = 3;
/**
* The session is open and has the keys required for decryption.
*/
/** The session is open and has keys required for decryption. */
int STATE_OPENED_WITH_KEYS = 4;
/**
* Returns the current state of the session.
*
* @return One of {@link #STATE_ERROR}, {@link #STATE_CLOSED}, {@link #STATE_OPENING},
* {@link #STATE_OPENED} and {@link #STATE_OPENED_WITH_KEYS}.
* Returns the current state of the session, which is one of {@link #STATE_ERROR},
* {@link #STATE_RELEASED}, {@link #STATE_OPENING}, {@link #STATE_OPENED} and
* {@link #STATE_OPENED_WITH_KEYS}.
*/
@State int getState();
/**
* Returns a {@link ExoMediaCrypto} for the open session.
* <p>
* This method may be called when the session is in the following states:
* {@link #STATE_OPENED}, {@link #STATE_OPENED_WITH_KEYS}
*
* @return A {@link ExoMediaCrypto} for the open session.
* @throws IllegalStateException If called when a session isn't opened.
*/
T getMediaCrypto();
/** Returns whether this session allows playback of clear samples prior to keys being loaded. */
default boolean playClearSamplesWithoutKeys() {
return false;
}
/**
* Whether the session requires a secure decoder for the specified mime type.
* <p>
* Normally this method should return
* {@link ExoMediaCrypto#requiresSecureDecoderComponent(String)}, however in some cases
* implementations may wish to modify the return value (i.e. to force a secure decoder even when
* one is not required).
* <p>
* This method may be called when the session is in the following states:
* {@link #STATE_OPENED}, {@link #STATE_OPENED_WITH_KEYS}
*
* @return Whether the open session requires a secure decoder for the specified mime type.
* @throws IllegalStateException If called when a session isn't opened.
*/
boolean requiresSecureDecoderComponent(String mimeType);
/**
* Returns the cause of the error state.
* <p>
* This method may be called when the session is in any state.
*
* @return An exception if the state is {@link #STATE_ERROR}. Null otherwise.
* Returns the cause of the error state, or null if {@link #getState()} is not {@link
* #STATE_ERROR}.
*/
@Nullable
DrmSessionException getError();
/**
* Returns an informative description of the key status for the session. The status is in the form
* of {name, value} pairs.
* Returns a {@link ExoMediaCrypto} for the open session, or null if called before the session has
* been opened or after it's been released.
*/
@Nullable
T getMediaCrypto();
/**
* Returns a map describing the key status for the session, or null if called before the session
* has been opened or after it's been released.
*
* <p>Since DRM license policies vary by vendor, the specific status field names are determined by
* each DRM vendor. Refer to your DRM provider documentation for definitions of the field names
* for a particular DRM engine plugin.
*
* @return A map of key status.
* @throws IllegalStateException If called when the session isn't opened.
* @return A map describing the key status for the session, or null if called before the session
* has been opened or after it's been released.
* @see MediaDrm#queryKeyStatus(byte[])
*/
@Nullable
Map<String, String> queryKeyStatus();
/**
* Returns the key set id of the offline license loaded into this session, if there is one. Null
* otherwise.
* Returns the key set id of the offline license loaded into this session, or null if there isn't
* one.
*/
@Nullable
byte[] getOfflineLicenseKeySetId();
/**
* Increments the reference count. When the caller no longer needs to use the instance, it must
* call {@link #release()} to decrement the reference count.
*/
void acquire();
/**
* Decrements the reference count. If the reference count drops to 0 underlying resources are
* released, and the instance cannot be re-used.
*/
void release();
}

Просмотреть файл

@ -15,28 +15,107 @@
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.drm;
import android.annotation.TargetApi;
import android.os.Looper;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmInitData.SchemeData;
/**
* Manages a DRM session.
*/
@TargetApi(16)
public interface DrmSessionManager<T extends ExoMediaCrypto> {
/** Returns {@link #DUMMY}. */
@SuppressWarnings("unchecked")
static <T extends ExoMediaCrypto> DrmSessionManager<T> getDummyDrmSessionManager() {
return (DrmSessionManager<T>) DUMMY;
}
/** {@link DrmSessionManager} that supports no DRM schemes. */
DrmSessionManager<ExoMediaCrypto> DUMMY =
new DrmSessionManager<ExoMediaCrypto>() {
@Override
public boolean canAcquireSession(DrmInitData drmInitData) {
return false;
}
@Override
public DrmSession<ExoMediaCrypto> acquireSession(
Looper playbackLooper, DrmInitData drmInitData) {
return new ErrorStateDrmSession<>(
new DrmSession.DrmSessionException(
new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)));
}
@Override
@Nullable
public Class<ExoMediaCrypto> getExoMediaCryptoType(DrmInitData drmInitData) {
return null;
}
};
/**
* Acquires a {@link DrmSession} for the specified {@link DrmInitData}. The {@link DrmSession}
* must be returned to {@link #releaseSession(DrmSession)} when it is no longer required.
* Acquires any required resources.
*
* <p>{@link #release()} must be called to ensure the acquired resources are released. After
* releasing, an instance may be re-prepared.
*/
default void prepare() {
// Do nothing.
}
/** Releases any acquired resources. */
default void release() {
// Do nothing.
}
/**
* Returns whether the manager is capable of acquiring a session for the given
* {@link DrmInitData}.
*
* @param drmInitData DRM initialization data.
* @return Whether the manager is capable of acquiring a session for the given
* {@link DrmInitData}.
*/
boolean canAcquireSession(DrmInitData drmInitData);
/**
* Returns a {@link DrmSession} that does not execute key requests, with an incremented reference
* count. When the caller no longer needs to use the instance, it must call {@link
* DrmSession#release()} to decrement the reference count.
*
* <p>Placeholder {@link DrmSession DrmSessions} may be used to configure secure decoders for
* playback of clear content periods. This can reduce the cost of transitioning between clear and
* encrypted content periods.
*
* @param playbackLooper The looper associated with the media playback thread.
* @param drmInitData DRM initialization data.
* @param trackType The type of the track to acquire a placeholder session for. Must be one of the
* {@link C}{@code .TRACK_TYPE_*} constants.
* @return The placeholder DRM session, or null if this DRM session manager does not support
* placeholder sessions.
*/
@Nullable
default DrmSession<T> acquirePlaceholderSession(Looper playbackLooper, int trackType) {
return null;
}
/**
* Returns a {@link DrmSession} for the specified {@link DrmInitData}, with an incremented
* reference count. When the caller no longer needs to use the instance, it must call {@link
* DrmSession#release()} to decrement the reference count.
*
* @param playbackLooper The looper associated with the media playback thread.
* @param drmInitData DRM initialization data. All contained {@link SchemeData}s must contain
* non-null {@link SchemeData#data}.
* @return The DRM session.
*/
DrmSession<T> acquireSession(Looper playbackLooper, DrmInitData drmInitData);
/**
* Releases a {@link DrmSession}.
* Returns the {@link ExoMediaCrypto} type returned by sessions acquired using the given {@link
* DrmInitData}, or null if a session cannot be acquired with the given {@link DrmInitData}.
*/
void releaseSession(DrmSession<T> drmSession);
@Nullable
Class<? extends ExoMediaCrypto> getExoMediaCryptoType(DrmInitData drmInitData);
}

Просмотреть файл

@ -0,0 +1,146 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.drm;
import android.media.MediaDrmException;
import android.os.PersistableBundle;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** An {@link ExoMediaDrm} that does not support any protection schemes. */
@RequiresApi(18)
public final class DummyExoMediaDrm<T extends ExoMediaCrypto> implements ExoMediaDrm<T> {
/** Returns a new instance. */
@SuppressWarnings("unchecked")
public static <T extends ExoMediaCrypto> DummyExoMediaDrm<T> getInstance() {
return (DummyExoMediaDrm<T>) new DummyExoMediaDrm<>();
}
@Override
public void setOnEventListener(OnEventListener<? super T> listener) {
// Do nothing.
}
@Override
public void setOnKeyStatusChangeListener(OnKeyStatusChangeListener<? super T> listener) {
// Do nothing.
}
@Override
public byte[] openSession() throws MediaDrmException {
throw new MediaDrmException("Attempting to open a session using a dummy ExoMediaDrm.");
}
@Override
public void closeSession(byte[] sessionId) {
// Do nothing.
}
@Override
public KeyRequest getKeyRequest(
byte[] scope,
@Nullable List<DrmInitData.SchemeData> schemeDatas,
int keyType,
@Nullable HashMap<String, String> optionalParameters) {
// Should not be invoked. No session should exist.
throw new IllegalStateException();
}
@Nullable
@Override
public byte[] provideKeyResponse(byte[] scope, byte[] response) {
// Should not be invoked. No session should exist.
throw new IllegalStateException();
}
@Override
public ProvisionRequest getProvisionRequest() {
// Should not be invoked. No provision should be required.
throw new IllegalStateException();
}
@Override
public void provideProvisionResponse(byte[] response) {
// Should not be invoked. No provision should be required.
throw new IllegalStateException();
}
@Override
public Map<String, String> queryKeyStatus(byte[] sessionId) {
// Should not be invoked. No session should exist.
throw new IllegalStateException();
}
@Override
public void acquire() {
// Do nothing.
}
@Override
public void release() {
// Do nothing.
}
@Override
public void restoreKeys(byte[] sessionId, byte[] keySetId) {
// Should not be invoked. No session should exist.
throw new IllegalStateException();
}
@Override
@Nullable
public PersistableBundle getMetrics() {
return null;
}
@Override
public String getPropertyString(String propertyName) {
return "";
}
@Override
public byte[] getPropertyByteArray(String propertyName) {
return Util.EMPTY_BYTE_ARRAY;
}
@Override
public void setPropertyString(String propertyName, String value) {
// Do nothing.
}
@Override
public void setPropertyByteArray(String propertyName, byte[] value) {
// Do nothing.
}
@Override
public T createMediaCrypto(byte[] sessionId) {
// Should not be invoked. No session should exist.
throw new IllegalStateException();
}
@Override
@Nullable
public Class<T> getExoMediaCryptoType() {
// No ExoMediaCrypto type is supported.
return null;
}
}

Просмотреть файл

@ -0,0 +1,74 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mozilla.thirdparty.com.google.android.exoplayer2.drm;
import androidx.annotation.Nullable;
import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
import java.util.Map;
/** A {@link DrmSession} that's in a terminal error state. */
public final class ErrorStateDrmSession<T extends ExoMediaCrypto> implements DrmSession<T> {
private final DrmSessionException error;
public ErrorStateDrmSession(DrmSessionException error) {
this.error = Assertions.checkNotNull(error);
}
@Override
public int getState() {
return STATE_ERROR;
}
@Override
public boolean playClearSamplesWithoutKeys() {
return false;
}
@Override
@Nullable
public DrmSessionException getError() {
return error;
}
@Override
@Nullable
public T getMediaCrypto() {
return null;
}
@Override
@Nullable
public Map<String, String> queryKeyStatus() {
return null;
}
@Override
@Nullable
public byte[] getOfflineLicenseKeySetId() {
return null;
}
@Override
public void acquire() {
// Do nothing.
}
@Override
public void release() {
// Do nothing.
}
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше