Bug 1498253 - Remove mozAvSyncDelay and mozJitterBufferDelay; r=ng

The value for mozAvSyncDelay has been broken since the branch 57 update
(Bug 1341285). We added SetCurrentSyncOffset() but never called it from
anywhere.

In the future we should be getting stats from AudioReceiveStream rather than
modifying the channel code, the delay_estimate_ms field provides almost the
same information.

Since we're attempting to get rid of moz prefixed stats, it makes sense to just
remove this code rather than fix it. The associated telemetry code has been
broken since Bug 1341285 as well so I think it is safe to remove.

Differential Revision: https://phabricator.services.mozilla.com/D14462

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Dan Minor 2018-12-14 13:08:12 +00:00
Родитель b946e7ce00
Коммит 73d9c4cce6
13 изменённых файлов: 0 добавлений и 124 удалений

Просмотреть файл

@ -308,8 +308,6 @@ struct ParamTraits<mozilla::dom::RTCInboundRTPStreamStats> {
WriteParam(aMsg, aParam.mDiscardedPackets);
WriteParam(aMsg, aParam.mFramesDecoded);
WriteParam(aMsg, aParam.mJitter);
WriteParam(aMsg, aParam.mMozAvSyncDelay);
WriteParam(aMsg, aParam.mMozJitterBufferDelay);
WriteParam(aMsg, aParam.mRoundTripTime);
WriteParam(aMsg, aParam.mPacketsLost);
WriteParam(aMsg, aParam.mPacketsReceived);
@ -323,8 +321,6 @@ struct ParamTraits<mozilla::dom::RTCInboundRTPStreamStats> {
!ReadParam(aMsg, aIter, &(aResult->mDiscardedPackets)) ||
!ReadParam(aMsg, aIter, &(aResult->mFramesDecoded)) ||
!ReadParam(aMsg, aIter, &(aResult->mJitter)) ||
!ReadParam(aMsg, aIter, &(aResult->mMozAvSyncDelay)) ||
!ReadParam(aMsg, aIter, &(aResult->mMozJitterBufferDelay)) ||
!ReadParam(aMsg, aIter, &(aResult->mRoundTripTime)) ||
!ReadParam(aMsg, aIter, &(aResult->mPacketsLost)) ||
!ReadParam(aMsg, aIter, &(aResult->mPacketsReceived)) ||

Просмотреть файл

@ -174,15 +174,6 @@ bool WebrtcAudioConduit::GetRecvPacketTypeStats(
return mRecvChannelProxy->GetRTCPPacketTypeCounters(*aPacketCounts);
}
bool WebrtcAudioConduit::GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs) {
// Called from GetAudioFrame and from STS thread
mRecvChannelProxy->GetDelayEstimates(jitterBufferDelayMs,
playoutBufferDelayMs, avSyncOffsetMs);
return true;
}
bool WebrtcAudioConduit::GetRTPStats(unsigned int* jitterMs,
unsigned int* cumulativeLost) {
ASSERT_ON_THREAD(mStsThread);
@ -623,32 +614,6 @@ MediaConduitErrorCode WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
MOZ_RELEASE_ASSERT(lengthSamples <= lengthSamplesAllowed);
PodCopy(speechData, mAudioFrame.data(), lengthSamples);
// Not #ifdef DEBUG or on a log module so we can use it for about:webrtc/etc
mSamples += lengthSamples;
if (mSamples >= mLastSyncLog + samplingFreqHz) {
int jitter_buffer_delay_ms;
int playout_buffer_delay_ms;
int avsync_offset_ms;
if (GetAVStats(&jitter_buffer_delay_ms, &playout_buffer_delay_ms,
&avsync_offset_ms)) {
if (avsync_offset_ms < 0) {
Telemetry::Accumulate(Telemetry::WEBRTC_AVSYNC_WHEN_VIDEO_LAGS_AUDIO_MS,
-avsync_offset_ms);
} else {
Telemetry::Accumulate(Telemetry::WEBRTC_AVSYNC_WHEN_AUDIO_LAGS_VIDEO_MS,
avsync_offset_ms);
}
CSFLogDebug(LOGTAG,
"A/V sync: sync delta: %dms, audio jitter delay %dms, "
"playout delay %dms",
avsync_offset_ms, jitter_buffer_delay_ms,
playout_buffer_delay_ms);
} else {
CSFLogError(LOGTAG, "A/V sync: GetAVStats failed");
}
mLastSyncLog = mSamples;
}
CSFLogDebug(LOGTAG, "%s GetAudioFrame:Got samples: length %d ", __FUNCTION__,
lengthSamples);
return kMediaConduitNoError;

Просмотреть файл

@ -193,8 +193,6 @@ class WebrtcAudioConduit : public AudioSessionConduit,
mDtmfEnabled(false),
mMutex("WebrtcAudioConduit::mMutex"),
mCaptureDelay(150),
mSamples(0),
mLastSyncLog(0),
mStsThread(aStsThread) {}
virtual ~WebrtcAudioConduit();
@ -238,8 +236,6 @@ class WebrtcAudioConduit : public AudioSessionConduit,
uint32_t* framesDecoded) override {
return false;
}
bool GetAVStats(int32_t* jitterBufferDelayMs, int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs) override;
bool GetRTPStats(unsigned int* jitterMs,
unsigned int* cumulativeLost) override;
bool GetRTCPReceiverReport(uint32_t* jitterMs, uint32_t* packetsReceived,
@ -341,9 +337,6 @@ class WebrtcAudioConduit : public AudioSessionConduit,
webrtc::AudioFrame mAudioFrame; // for output pulls
uint32_t mSamples;
uint32_t mLastSyncLog;
RtpSourceObserver mRtpSourceObserver;
// Socket transport service thread. Any thread.

Просмотреть файл

@ -237,9 +237,6 @@ class MediaSessionConduit {
double* bitrateMean, double* bitrateStdDev,
uint32_t* discardedPackets,
uint32_t* framesDecoded) = 0;
virtual bool GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs) = 0;
virtual bool GetRTPStats(unsigned int* jitterMs,
unsigned int* cumulativeLost) = 0;
virtual bool GetRTCPReceiverReport(uint32_t* jitterMs,

Просмотреть файл

@ -1159,14 +1159,6 @@ bool WebrtcVideoConduit::GetVideoDecoderStats(double* framerateMean,
return true;
}
bool WebrtcVideoConduit::GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs) {
ASSERT_ON_THREAD(mStsThread);
return false;
}
bool WebrtcVideoConduit::GetRTPStats(uint32_t* jitterMs,
uint32_t* packetsLost) {
ASSERT_ON_THREAD(mStsThread);

Просмотреть файл

@ -262,8 +262,6 @@ class WebrtcVideoConduit
double* bitrateMean, double* bitrateStdDev,
uint32_t* discardedPackets,
uint32_t* framesDecoded) override;
bool GetAVStats(int32_t* jitterBufferDelayMs, int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs) override;
bool GetRTPStats(unsigned int* jitterMs,
unsigned int* cumulativeLost) override;
bool GetRTCPReceiverReport(uint32_t* jitterMs, uint32_t* packetsReceived,

Просмотреть файл

@ -2937,16 +2937,6 @@ RefPtr<RTCStatsQueryPromise> PeerConnectionImpl::ExecuteStatsQuery_s(
s.mPacketsReceived.Construct(mp.RtpPacketsReceived());
s.mBytesReceived.Construct(mp.RtpBytesReceived());
if (query->internalStats && isAudio) {
int32_t jitterBufferDelay;
int32_t playoutBufferDelay;
int32_t avSyncDelta;
if (mp.Conduit()->GetAVStats(&jitterBufferDelay, &playoutBufferDelay,
&avSyncDelta)) {
s.mMozJitterBufferDelay.Construct(jitterBufferDelay);
s.mMozAvSyncDelay.Construct(avSyncDelta);
}
}
// Fill in packet type statistics
webrtc::RtcpPacketTypeCounter counters;
if (mp.Conduit()->GetRecvPacketTypeStats(&counters)) {

Просмотреть файл

@ -912,7 +912,6 @@ Channel::Channel(int32_t channelId,
transport_overhead_per_packet_(0),
rtp_overhead_per_packet_(0),
_outputSpeechType(AudioFrame::kNormalSpeech),
_current_sync_offset(0),
rtcp_observer_(new VoERtcpObserver(this)),
associate_send_channel_(ChannelOwner(nullptr)),
pacing_enabled_(config.enable_voice_pacing),
@ -1834,14 +1833,6 @@ uint32_t Channel::GetDelayEstimate() const {
return audio_coding_->FilteredCurrentDelayMs() + playout_delay_ms_;
}
void Channel::GetDelayEstimates(int* jitter_buffer_delay_ms,
int* playout_buffer_delay_ms,
int* avsync_offset_ms) const {
rtc::CritScope lock(&video_sync_lock_);
*jitter_buffer_delay_ms = audio_coding_->FilteredCurrentDelayMs();
*playout_buffer_delay_ms = playout_delay_ms_;
*avsync_offset_ms = _current_sync_offset;
}
int Channel::SetMinimumPlayoutDelay(int delayMs) {
if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
(delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {

Просмотреть файл

@ -221,11 +221,7 @@ class Channel
// Audio+Video Sync.
uint32_t GetDelayEstimate() const;
void GetDelayEstimates(int* jitter_buffer_delay_ms,
int* playout_buffer_delay_ms,
int* avsync_offset_ms) const;
int SetMinimumPlayoutDelay(int delayMs);
void SetCurrentSyncOffset(int offsetMs) { _current_sync_offset = offsetMs; }
int GetPlayoutTimestamp(unsigned int& timestamp);
int GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const;
@ -427,7 +423,6 @@ class Channel
rtc::CriticalSection video_sync_lock_;
uint32_t playout_timestamp_rtp_ RTC_GUARDED_BY(video_sync_lock_);
uint32_t playout_delay_ms_ RTC_GUARDED_BY(video_sync_lock_);
int _current_sync_offset;
uint16_t send_sequence_number_;
rtc::CriticalSection ts_stats_lock_;

Просмотреть файл

@ -252,17 +252,6 @@ uint32_t ChannelProxy::GetDelayEstimate() const {
return channel()->GetDelayEstimate();
}
void ChannelProxy::GetDelayEstimates(int* jitter_buffer_delay_ms,
int* playout_buffer_delay_ms,
int* avsync_offset_ms) const {
//Called from AudioIPC thread
//RTC_DCHECK(worker_thread_checker_.CalledOnValidThread() ||
// module_process_thread_checker_.CalledOnValidThread());
return channel()->GetDelayEstimates(jitter_buffer_delay_ms,
playout_buffer_delay_ms,
avsync_offset_ms);
}
bool ChannelProxy::SetSendTelephoneEventPayloadType(int payload_type,
int payload_frequency) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());

Просмотреть файл

@ -102,9 +102,6 @@ class ChannelProxy : public RtpPacketSinkInterface {
virtual double GetTotalOutputEnergy() const;
virtual double GetTotalOutputDuration() const;
virtual uint32_t GetDelayEstimate() const;
virtual void GetDelayEstimates(int* jitter_buffer_delay_ms,
int* playout_buffer_delay_ms,
int* avsync_offset_ms) const;
virtual bool SetSendTelephoneEventPayloadType(int payload_type,
int payload_frequency);
virtual bool SendTelephoneEventOutband(int event, int duration_ms);

Просмотреть файл

@ -517,19 +517,6 @@ RTPStats.prototype = {
this._stats = rtpStats;
},
renderAvStats(stats) {
let statsString = "";
if (stats.mozAvSyncDelay) {
statsString += `${getString("av_sync_label")}: ${stats.mozAvSyncDelay} ms `;
}
if (stats.mozJitterBufferDelay) {
statsString += `${getString("jitter_buffer_delay_label")}: ${stats.mozJitterBufferDelay} ms`;
}
return renderElement("p", statsString);
},
renderCoderStats(stats) {
let statsString = "";
let label;
@ -593,10 +580,6 @@ RTPStats.prototype = {
let div = document.createElement("div");
div.appendChild(renderElement("h5", stats.id));
if (stats.MozAvSyncDelay || stats.mozJitterBufferDelay) {
div.appendChild(this.renderAvStats(stats));
}
div.appendChild(this.renderCoderStats(stats));
div.appendChild(this.renderTransportStats(stats, getString("typeLocal")));

Просмотреть файл

@ -73,16 +73,6 @@ ice_pair_bytes_sent = Bytes sent
ice_pair_bytes_received = Bytes received
ice_component_id = Component ID
# LOCALIZATION NOTE (av_sync_label): "A/V" stands for Audio/Video.
# "sync" is an abbreviation for sychronization. This is used as
# a data label.
av_sync_label = A/V sync
# LOCALIZATION NOTE (jitter_buffer_delay_label): A jitter buffer is an
# element in the processing chain, see http://wikipedia.org/wiki/Jitter
# This is used as a data label.
jitter_buffer_delay_label = Jitter-buffer delay
# LOCALIZATION NOTE (avg_bitrate_label, avg_framerate_label): "Avg." is an abbreviation
# for Average. These are used as data labels.
avg_bitrate_label = Avg. bitrate