Bug 1487057 - Part 4 - Remove AsyncLatencyLogger and associated code. r=pehrsons

It's not maintained and probably does not work anymore.

Differential Revision: https://phabricator.services.mozilla.com/D5438

--HG--
extra : rebase_source : ccd622e40844dda5d16266e49991462d4ea94224
This commit is contained in:
Paul Adenot 2018-08-30 17:11:57 +02:00
Родитель 6a77327e7f
Коммит 3f4724fcbb
17 изменённых файлов: 3 добавлений и 578 удалений

Просмотреть файл

@ -7,7 +7,6 @@
#include "AudioMixer.h"
#include "AudioChannelFormat.h"
#include "Latency.h"
#include <speex/speex_resampler.h>
namespace mozilla {
@ -164,7 +163,7 @@ AudioSegment::Mix(AudioMixer& aMixer, uint32_t aOutputChannels,
}
void
AudioSegment::WriteTo(uint64_t aID, AudioMixer& aMixer, uint32_t aOutputChannels, uint32_t aSampleRate)
AudioSegment::WriteTo(AudioMixer& aMixer, uint32_t aOutputChannels, uint32_t aSampleRate)
{
AutoTArray<AudioDataValue,SilentChannel::AUDIO_PROCESSING_FRAMES*GUESS_AUDIO_CHANNELS> buf;
// Offset in the buffer that will be written to the mixer, in samples.
@ -198,13 +197,6 @@ AudioSegment::WriteTo(uint64_t aID, AudioMixer& aMixer, uint32_t aOutputChannels
}
offset += c.mDuration * aOutputChannels;
if (!c.mTimeStamp.IsNull()) {
TimeStamp now = TimeStamp::Now();
// would be more efficient to c.mTimeStamp to ms on create time then pass here
LogTime(AsyncLatencyLogger::AudioMediaStreamTrack, aID,
(now - c.mTimeStamp).ToMilliseconds(), c.mTimeStamp);
}
}
if (offset) {

Просмотреть файл

@ -290,9 +290,6 @@ struct AudioChunk {
float mVolume = 1.0f; // volume multiplier to apply
// format of frames in mBuffer (or silence if mBuffer is null)
SampleFormat mBufferFormat = AUDIO_FORMAT_SILENCE;
#ifdef MOZILLA_INTERNAL_API
mozilla::TimeStamp mTimeStamp; // time at which this has been fetched from the MediaEngine
#endif
// principalHandle for the data in this chunk.
// This can be compared to an nsIPrincipal* when back on main thread.
PrincipalHandle mPrincipalHandle = PRINCIPAL_HANDLE_NONE;
@ -384,9 +381,6 @@ public:
chunk->mChannelData.AppendElement(aChannelData[channel]);
}
chunk->mBufferFormat = AUDIO_FORMAT_FLOAT32;
#ifdef MOZILLA_INTERNAL_API
chunk->mTimeStamp = TimeStamp::Now();
#endif
chunk->mPrincipalHandle = aPrincipalHandle;
}
void AppendFrames(already_AddRefed<ThreadSharedObject> aBuffer,
@ -402,9 +396,6 @@ public:
chunk->mChannelData.AppendElement(aChannelData[channel]);
}
chunk->mBufferFormat = AUDIO_FORMAT_S16;
#ifdef MOZILLA_INTERNAL_API
chunk->mTimeStamp = TimeStamp::Now();
#endif
chunk->mPrincipalHandle = aPrincipalHandle;
}
@ -420,9 +411,6 @@ public:
chunk->mVolume = aChunk->mVolume;
chunk->mBufferFormat = aChunk->mBufferFormat;
#ifdef MOZILLA_INTERNAL_API
chunk->mTimeStamp = TimeStamp::Now();
#endif
chunk->mPrincipalHandle = aChunk->mPrincipalHandle;
return chunk;
}
@ -430,7 +418,7 @@ public:
// Mix the segment into a mixer, interleaved. This is useful to output a
// segment to a system audio callback. It up or down mixes to aChannelCount
// channels.
void WriteTo(uint64_t aID, AudioMixer& aMixer, uint32_t aChannelCount,
void WriteTo(AudioMixer& aMixer, uint32_t aChannelCount,
uint32_t aSampleRate);
// Mix the segment into a mixer, keeping it planar, up or down mixing to
// aChannelCount channels.

Просмотреть файл

@ -1,229 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "Latency.h"
#include "nsThreadUtils.h"
#include "mozilla/Logging.h"
#include <cmath>
#include <algorithm>
#include <mozilla/Services.h>
#include <mozilla/StaticPtr.h>
#include "nsContentUtils.h"
using namespace mozilla;
const char* LatencyLogIndex2Strings[] = {
"Audio MediaStreamTrack",
"Video MediaStreamTrack",
"Cubeb",
"AudioStream",
"NetEQ",
"AudioCapture Base",
"AudioCapture Samples",
"AudioTrackInsertion",
"MediaPipeline Audio Insertion",
"AudioTransmit",
"AudioReceive",
"MediaPipelineAudioPlayout",
"MediaStream Create",
"AudioStream Create",
"AudioSendRTP",
"AudioRecvRTP"
};
static StaticRefPtr<AsyncLatencyLogger> gAsyncLogger;
LogModule*
GetLatencyLog()
{
static LazyLogModule sLog("MediaLatency");
return sLog;
}
class LogEvent : public Runnable
{
public:
LogEvent(AsyncLatencyLogger::LatencyLogIndex aIndex,
uint64_t aID,
int64_t aValue,
TimeStamp aTimeStamp)
: mozilla::Runnable("LogEvent")
, mIndex(aIndex)
, mID(aID)
, mValue(aValue)
, mTimeStamp(aTimeStamp)
{}
LogEvent(AsyncLatencyLogger::LatencyLogIndex aIndex,
uint64_t aID,
int64_t aValue)
: mozilla::Runnable("LogEvent")
, mIndex(aIndex)
, mID(aID)
, mValue(aValue)
, mTimeStamp(TimeStamp())
{}
~LogEvent() {}
NS_IMETHOD Run() override {
AsyncLatencyLogger::Get(true)->WriteLog(mIndex, mID, mValue, mTimeStamp);
return NS_OK;
}
protected:
AsyncLatencyLogger::LatencyLogIndex mIndex;
uint64_t mID;
int64_t mValue;
TimeStamp mTimeStamp;
};
void LogLatency(AsyncLatencyLogger::LatencyLogIndex aIndex, uint64_t aID, int64_t aValue)
{
AsyncLatencyLogger::Get()->Log(aIndex, aID, aValue);
}
void LogTime(AsyncLatencyLogger::LatencyLogIndex aIndex, uint64_t aID, int64_t aValue)
{
TimeStamp now = TimeStamp::Now();
AsyncLatencyLogger::Get()->Log(aIndex, aID, aValue, now);
}
void LogTime(AsyncLatencyLogger::LatencyLogIndex aIndex, uint64_t aID, int64_t aValue, TimeStamp &aTime)
{
AsyncLatencyLogger::Get()->Log(aIndex, aID, aValue, aTime);
}
void LogTime(uint32_t aIndex, uint64_t aID, int64_t aValue)
{
LogTime(static_cast<AsyncLatencyLogger::LatencyLogIndex>(aIndex), aID, aValue);
}
void LogTime(uint32_t aIndex, uint64_t aID, int64_t aValue, TimeStamp &aTime)
{
LogTime(static_cast<AsyncLatencyLogger::LatencyLogIndex>(aIndex), aID, aValue, aTime);
}
void LogLatency(uint32_t aIndex, uint64_t aID, int64_t aValue)
{
LogLatency(static_cast<AsyncLatencyLogger::LatencyLogIndex>(aIndex), aID, aValue);
}
/* static */
void AsyncLatencyLogger::InitializeStatics()
{
NS_ASSERTION(NS_IsMainThread(), "Main thread only");
//Make sure that the underlying logger is allocated.
GetLatencyLog();
gAsyncLogger = new AsyncLatencyLogger();
}
/* static */
void AsyncLatencyLogger::ShutdownLogger()
{
gAsyncLogger = nullptr;
}
/* static */
AsyncLatencyLogger* AsyncLatencyLogger::Get(bool aStartTimer)
{
// Users don't generally null-check the result since we should live longer than they
MOZ_ASSERT(gAsyncLogger);
if (aStartTimer) {
gAsyncLogger->Init();
}
return gAsyncLogger;
}
NS_IMPL_ISUPPORTS(AsyncLatencyLogger, nsIObserver)
AsyncLatencyLogger::AsyncLatencyLogger()
: mThread(nullptr),
mMutex("AsyncLatencyLogger")
{
NS_ASSERTION(NS_IsMainThread(), "Main thread only");
nsContentUtils::RegisterShutdownObserver(this);
}
AsyncLatencyLogger::~AsyncLatencyLogger()
{
AsyncLatencyLogger::Shutdown();
}
void AsyncLatencyLogger::Shutdown()
{
nsContentUtils::UnregisterShutdownObserver(this);
MutexAutoLock lock(mMutex);
if (mThread) {
mThread->Shutdown();
}
mStart = TimeStamp(); // make sure we don't try to restart it for any reason
}
void AsyncLatencyLogger::Init()
{
MutexAutoLock lock(mMutex);
if (mStart.IsNull()) {
nsresult rv = NS_NewNamedThread("Latency Logger", getter_AddRefs(mThread));
NS_ENSURE_SUCCESS_VOID(rv);
mStart = TimeStamp::Now();
}
}
void AsyncLatencyLogger::GetStartTime(TimeStamp &aStart)
{
MutexAutoLock lock(mMutex);
aStart = mStart;
}
nsresult
AsyncLatencyLogger::Observe(nsISupports* aSubject, const char* aTopic,
const char16_t* aData)
{
MOZ_ASSERT(NS_IsMainThread());
if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
Shutdown();
}
return NS_OK;
}
// aID is a sub-identifier (in particular a specific MediaStramTrack)
void AsyncLatencyLogger::WriteLog(LatencyLogIndex aIndex, uint64_t aID, int64_t aValue,
TimeStamp aTimeStamp)
{
if (aTimeStamp.IsNull()) {
MOZ_LOG(GetLatencyLog(), LogLevel::Debug,
("Latency: %s,%" PRIu64 ",%" PRId64 ",%" PRId64,
LatencyLogIndex2Strings[aIndex], aID, GetTimeStamp(), aValue));
} else {
MOZ_LOG(GetLatencyLog(), LogLevel::Debug,
("Latency: %s,%" PRIu64 ",%" PRId64 ",%" PRId64 ",%" PRId64,
LatencyLogIndex2Strings[aIndex], aID, GetTimeStamp(), aValue,
static_cast<int64_t>((aTimeStamp - gAsyncLogger->mStart).ToMilliseconds())));
}
}
int64_t AsyncLatencyLogger::GetTimeStamp()
{
TimeDuration t = TimeStamp::Now() - mStart;
return t.ToMilliseconds();
}
void AsyncLatencyLogger::Log(LatencyLogIndex aIndex, uint64_t aID, int64_t aValue)
{
TimeStamp null;
Log(aIndex, aID, aValue, null);
}
void AsyncLatencyLogger::Log(LatencyLogIndex aIndex, uint64_t aID, int64_t aValue, TimeStamp &aTime)
{
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
nsCOMPtr<nsIRunnable> event = new LogEvent(aIndex, aID, aValue, aTime);
if (mThread) {
mThread->Dispatch(event, NS_DISPATCH_NORMAL);
}
}
}

Просмотреть файл

@ -1,99 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MOZILLA_LATENCY_H
#define MOZILLA_LATENCY_H
#include "mozilla/TimeStamp.h"
#include "mozilla/Logging.h"
#include "nsCOMPtr.h"
#include "nsIThread.h"
#include "mozilla/Monitor.h"
#include "nsISupportsImpl.h"
#include "nsIObserver.h"
class AsyncLatencyLogger;
mozilla::LogModule* GetLatencyLog();
// This class is a singleton. It is refcounted.
class AsyncLatencyLogger : public nsIObserver
{
NS_DECL_THREADSAFE_ISUPPORTS
NS_DECL_NSIOBSERVER
public:
enum LatencyLogIndex {
AudioMediaStreamTrack = 0,
VideoMediaStreamTrack,
Cubeb,
AudioStream,
NetEQ,
AudioCaptureBase, // base time for capturing an audio stream
AudioCapture, // records number of samples captured and the time
AudioTrackInsertion, // # of samples inserted into a mediastreamtrack and the time
MediaPipelineAudioInsertion, // Timestamp and time of timestamp
AudioTransmit, // Timestamp and socket send time
AudioReceive, // Timestamp and receive time
MediaPipelineAudioPlayout, // Timestamp and playout into MST time
MediaStreamCreate, // Source and TrackUnion streams
AudioStreamCreate, // TrackUnion stream and AudioStream
AudioSendRTP,
AudioRecvRTP,
_MAX_INDEX
};
// Log with a null timestamp
void Log(LatencyLogIndex index, uint64_t aID, int64_t aValue);
// Log with a timestamp
void Log(LatencyLogIndex index, uint64_t aID, int64_t aValue,
mozilla::TimeStamp &aTime);
// Write a log message to NSPR
void WriteLog(LatencyLogIndex index, uint64_t aID, int64_t aValue,
mozilla::TimeStamp timestamp);
// Get the base time used by the logger for delta calculations
void GetStartTime(mozilla::TimeStamp &aStart);
static AsyncLatencyLogger* Get(bool aStartTimer = false);
static void InitializeStatics();
// After this is called, the global log object may go away
static void ShutdownLogger();
private:
AsyncLatencyLogger();
virtual ~AsyncLatencyLogger();
int64_t GetTimeStamp();
void Init();
// Shut down the thread associated with this, and make sure it doesn't
// start up again.
void Shutdown();
// The thread on which the IO happens
nsCOMPtr<nsIThread> mThread;
// This can be initialized on multiple threads, but is protected by a
// monitor. After the initialization phase, it is accessed on the log
// thread only.
mozilla::TimeStamp mStart;
// This monitor protects mStart and mMediaLatencyLog for the
// initialization sequence. It is initialized at layout startup, and
// destroyed at layout shutdown.
mozilla::Mutex mMutex;
};
// need uint32_t versions for access from webrtc/trunk code
// Log without a time delta
void LogLatency(AsyncLatencyLogger::LatencyLogIndex index, uint64_t aID, int64_t aValue);
void LogLatency(uint32_t index, uint64_t aID, int64_t aValue);
// Log TimeStamp::Now() (as delta)
void LogTime(AsyncLatencyLogger::LatencyLogIndex index, uint64_t aID, int64_t aValue);
void LogTime(uint32_t index, uint64_t aID, int64_t aValue);
// Log the specified time (as delta)
void LogTime(AsyncLatencyLogger::LatencyLogIndex index, uint64_t aID, int64_t aValue,
mozilla::TimeStamp &aTime);
// For generating unique-ish ids for logged sources
#define LATENCY_STREAM_ID(source, trackID) \
((((uint64_t) (source)) & ~0x0F) | (trackID))
#endif

Просмотреть файл

@ -58,7 +58,6 @@
#include "MediaTrackConstraints.h"
#include "VideoUtils.h"
#include "ThreadSafeRefcountingWithMainThreadDestruction.h"
#include "Latency.h"
#include "nsProxyRelease.h"
#include "nsVariant.h"

Просмотреть файл

@ -13,7 +13,6 @@
#include "mozilla/TimeStamp.h"
#endif
#include <algorithm>
#include "Latency.h"
namespace mozilla {
@ -331,9 +330,6 @@ public:
} else {
mChunks.InsertElementAt(0)->SetNull(aDuration);
}
#ifdef MOZILLA_INTERNAL_API
mChunks[0].mTimeStamp = mozilla::TimeStamp::Now();
#endif
mDuration += aDuration;
}
void AppendNullData(StreamTime aDuration) override
@ -418,12 +414,6 @@ public:
RemoveLeading(aDuration, 0);
}
#ifdef MOZILLA_INTERNAL_API
void GetStartTime(TimeStamp &aTime) {
aTime = mChunks[0].mTimeStamp;
}
#endif
size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override
{
size_t amount = mChunks.ShallowSizeOfExcludingThis(aMallocSizeOf);
@ -455,9 +445,6 @@ protected:
MediaSegmentBase(MediaSegmentBase&& aSegment)
: MediaSegment(std::move(aSegment))
, mChunks()
#ifdef MOZILLA_INTERNAL_API
, mTimeStamp(std::move(aSegment.mTimeStamp))
#endif
{
mChunks.SwapElements(aSegment.mChunks);
MOZ_ASSERT(mChunks.Capacity() >= DEFAULT_SEGMENT_CAPACITY,
@ -576,9 +563,6 @@ protected:
}
AutoTArray<Chunk, DEFAULT_SEGMENT_CAPACITY> mChunks;
#ifdef MOZILLA_INTERNAL_API
mozilla::TimeStamp mTimeStamp;
#endif
};
} // namespace mozilla

Просмотреть файл

@ -783,11 +783,7 @@ MediaStreamGraphImpl::PlayAudio(MediaStream* aStream)
}
audioOutput.mLastTickWritten = offset;
// Need unique id for stream & track - and we want it to match the inserter
output.WriteTo(LATENCY_STREAM_ID(aStream, track->GetID()),
mMixer,
AudioOutputChannelCount(),
mSampleRate);
output.WriteTo(mMixer, AudioOutputChannelCount(), mSampleRate);
}
return ticksWritten;
}
@ -3714,7 +3710,6 @@ MediaStreamGraphImpl::MediaStreamGraphImpl(GraphDriverType aDriverRequested,
, mRealtime(aDriverRequested != OFFLINE_THREAD_DRIVER)
, mNonRealtimeProcessing(false)
, mStreamOrderDirty(false)
, mLatencyLog(AsyncLatencyLogger::Get())
, mAbstractMainThread(aMainThread)
, mSelfRef(this)
, mOutputChannels(std::min<uint32_t>(8, CubebUtils::MaxNumberOfChannels()))

Просмотреть файл

@ -10,7 +10,6 @@
#include "AudioMixer.h"
#include "GraphDriver.h"
#include "Latency.h"
#include "mozilla/Atomics.h"
#include "mozilla/Monitor.h"
#include "mozilla/Services.h"
@ -886,10 +885,6 @@ public:
* blocking order.
*/
bool mStreamOrderDirty;
/**
* Hold a ref to the Latency logger
*/
RefPtr<AsyncLatencyLogger> mLatencyLog;
AudioMixer mMixer;
const RefPtr<AbstractThread> mAbstractMainThread;

Просмотреть файл

@ -118,7 +118,6 @@ EXPORTS += [
'FrameStatistics.h',
'ImageToI420.h',
'Intervals.h',
'Latency.h',
'MediaCache.h',
'MediaContainerType.h',
'MediaData.h',
@ -238,7 +237,6 @@ UNIFIED_SOURCES += [
'GetUserMediaRequest.cpp',
'GraphDriver.cpp',
'ImageToI420.cpp',
'Latency.cpp',
'MediaCache.cpp',
'MediaContainerType.cpp',
'MediaData.cpp',

Просмотреть файл

@ -1,104 +0,0 @@
#!/usr/bin/env python
# graph_latency.py - graph media latency
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# needs matplotlib (sudo aptitude install python-matplotlib)
import matplotlib.pyplot as plt
from matplotlib import rc
import sys
from pprint import pprint
import re
# FIX! needs to be sum of a single mediastreamtrack and any output overhead for it
# So there is one sum per MST
def compute_sum(data):
'Compute the sum for each timestamp. This expects the output of parse_data.'
last_values = {}
out = ([],[])
for i in data:
if i[0] not in last_values.keys():
last_values[i[0]] = 0
last_values[i[0]] = float(i[3])
print last_values
out[0].append(i[2])
out[1].append(sum(last_values.values()))
return out
def clean_data(raw_data):
'''
Remove the PR_LOG cruft at the beginning of each line and returns a list of
tuple.
'''
out = []
for line in raw_data:
match = re.match(r'(.*)#(.*)', line)
if match:
continue
else:
out.append(line.split(": ")[1])
return out
# returns a list of tuples
def parse_data(raw_lines):
'''
Split each line by , and put every bit in a tuple.
'''
out = []
for line in raw_lines:
out.append(line.split(','))
return out
if len(sys.argv) == 3:
name = sys.argv[1]
channels = int(sys.argv[2])
else:
print sys.argv[0] + "latency_log"
try:
f = open(sys.argv[1])
except:
print "cannot open " + name
raw_lines = f.readlines()
lines = clean_data(raw_lines)
data = parse_data(lines)
final_data = {}
for tupl in data:
name = tupl[0]
if tupl[1] != 0:
name = name+tupl[1]
if name not in final_data.keys():
final_data[name] = ([], [])
# sanity-check values
if float(tupl[3]) < 10*1000:
final_data[name][0].append(float(tupl[2]))
final_data[name][1].append(float(tupl[3]))
#overall = compute_sum(data)
#final_data["overall"] = overall
pprint(final_data)
fig = plt.figure()
for i in final_data.keys():
plt.plot(final_data[i][0], final_data[i][1], label=i)
plt.legend()
plt.suptitle("Latency in ms (y-axis) against time in ms (x-axis).")
size = fig.get_size_inches()
# make it gigantic so we can see things. sometimes, if the graph is too big,
# this errors. reduce the factor so it stays under 2**15.
fig.set_size_inches((size[0]*10, size[1]*2))
name = sys.argv[1][:-4] + ".pdf"
fig.savefig(name)

Просмотреть файл

@ -50,11 +50,6 @@ LogModule* GetMediaManagerLog();
#define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
#define LOG_FRAMES(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg)
LogModule* AudioLogModule() {
static mozilla::LazyLogModule log("AudioLatency");
return static_cast<LogModule*>(log);
}
void
WebRTCAudioDataListener::NotifyOutputData(MediaStreamGraphImpl* aGraph,
AudioDataValue* aBuffer,
@ -144,8 +139,6 @@ MediaEngineWebRTCMicrophoneSource::MediaEngineWebRTCMicrophoneSource(
// It would be great if it did but we're already on the media thread.
/* aStrict = */ false))
, mRequestedInputChannelCount(aMaxChannelCount)
, mTotalFrames(0)
, mLastLogFrames(0)
, mSkipProcessing(false)
, mInputDownmixBuffer(MAX_SAMPLING_FREQ * MAX_CHANNELS / 100)
{
@ -758,9 +751,6 @@ MediaEngineWebRTCMicrophoneSource::Start(const RefPtr<const AllocationHandle>& a
mListener = new WebRTCAudioDataListener(this);
}
// Make sure logger starts before capture
AsyncLatencyLogger::Get(true);
mAllocation->mStream->OpenAudioInput(deviceID, mListener);
MOZ_ASSERT(mState != kReleased);
@ -1131,18 +1121,6 @@ MediaEngineWebRTCMicrophoneSource::InsertInGraph(const T* aBuffer,
return;
}
if (MOZ_LOG_TEST(AudioLogModule(), LogLevel::Debug)) {
mTotalFrames += aFrames;
if (mAllocation->mStream &&
mTotalFrames > mLastLogFrames +
mAllocation->mStream->GraphRate()) { // ~ 1 second
MOZ_LOG(AudioLogModule(), LogLevel::Debug,
("%p: Inserting %zu samples into graph, total frames = %" PRIu64,
(void*)this, aFrames, mTotalFrames));
mLastLogFrames = mTotalFrames;
}
}
if (!mAllocation->mStream) {
return;
}

Просмотреть файл

@ -279,8 +279,6 @@ private:
// legal channel count for this particular device. This is the number of
// channels of the input buffer passed as parameter in NotifyInputData.
uint32_t mRequestedInputChannelCount;
uint64_t mTotalFrames;
uint64_t mLastLogFrames;
// mSkipProcessing is true if none of the processing passes are enabled,
// because of prefs or constraints. This allows simply copying the audio into

Просмотреть файл

@ -79,7 +79,6 @@
#endif
#include "CubebUtils.h"
#include "Latency.h"
#include "WebAudioUtils.h"
#include "nsError.h"
@ -221,7 +220,6 @@ nsLayoutStatics::Initialize()
return rv;
}
AsyncLatencyLogger::InitializeStatics();
DecoderDoctorLogger::Init();
MediaManager::StartupInit();
CubebUtils::InitLibrary();
@ -359,7 +357,6 @@ nsLayoutStatics::Shutdown()
FrameLayerBuilder::Shutdown();
CubebUtils::ShutdownLibrary();
AsyncLatencyLogger::ShutdownLogger();
WebAudioUtils::Shutdown();
nsCORSListenerProxy::Shutdown();

Просмотреть файл

@ -18,7 +18,6 @@
#include "nsIPrefService.h"
#include "nsIPrefBranch.h"
#include "nsThreadUtils.h"
#include "Latency.h"
#include "mozilla/Telemetry.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
@ -705,10 +704,6 @@ WebrtcAudioConduit::SendAudioFrame(const int16_t audio_data[],
return kMediaConduitSessionNotInited;
}
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
struct Processing insert = { TimeStamp::Now(), 0 };
mProcessing.AppendElement(insert);
}
capture_delay = mCaptureDelay;
// Insert the samples
@ -807,28 +802,6 @@ WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
mLastSyncLog = mSamples;
}
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
if (mProcessing.Length() > 0) {
unsigned int now;
mPtrVoEVideoSync->GetPlayoutTimestamp(mChannel, now);
if (static_cast<uint32_t>(now) != mLastTimestamp) {
mLastTimestamp = static_cast<uint32_t>(now);
// Find the block that includes this timestamp in the network input
while (mProcessing.Length() > 0) {
// FIX! assumes 20ms @ 48000Hz
// FIX handle wrap-around
if (mProcessing[0].mRTPTimeStamp + 20*(48000/1000) >= now) {
TimeDuration t = TimeStamp::Now() - mProcessing[0].mTimeStamp;
// Wrap-around?
int64_t delta = t.ToMilliseconds() + (now - mProcessing[0].mRTPTimeStamp)/(48000/1000);
LogTime(AsyncLatencyLogger::AudioRecvRTP, ((uint64_t) this), delta);
break;
}
mProcessing.RemoveElementAt(0);
}
}
}
}
CSFLogDebug(LOGTAG,"%s GetAudioFrame:Got samples: length %d ",__FUNCTION__,
lengthSamples);
return kMediaConduitNoError;
@ -842,13 +815,6 @@ WebrtcAudioConduit::ReceivedRTPPacket(const void *data, int len, uint32_t ssrc)
if(mEngineReceiving)
{
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
// timestamp is at 32 bits in ([1])
struct Processing insert = { TimeStamp::Now(),
ntohl(static_cast<const uint32_t *>(data)[1]) };
mProcessing.AppendElement(insert);
}
// XXX we need to get passed the time the packet was received
if(mPtrVoENetwork->ReceivedRTPPacket(mChannel, data, len) == -1)
{
@ -987,16 +953,6 @@ WebrtcAudioConduit::SendRtp(const uint8_t* data,
{
CSFLogDebug(LOGTAG, "%s: len %lu", __FUNCTION__, (unsigned long)len);
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
if (mProcessing.Length() > 0) {
TimeStamp started = mProcessing[0].mTimeStamp;
mProcessing.RemoveElementAt(0);
mProcessing.RemoveElementAt(0); // 20ms packetization! Could automate this by watching sizes
TimeDuration t = TimeStamp::Now() - started;
int64_t delta = t.ToMilliseconds();
LogTime(AsyncLatencyLogger::AudioSendRTP, ((uint64_t) this), delta);
}
}
ReentrantMonitorAutoEnter enter(mTransportMonitor);
// XXX(pkerr) - the PacketOptions are being ignored. This parameter was added along
// with the Call API update in the webrtc.org codebase.

Просмотреть файл

@ -185,7 +185,6 @@ public:
mDtmfEnabled(false),
mCodecMutex("AudioConduit codec db"),
mCaptureDelay(150),
mLastTimestamp(0),
mSamples(0),
mLastSyncLog(0)
{
@ -339,8 +338,6 @@ private:
// Current "capture" delay (really output plus input delay)
int32_t mCaptureDelay;
uint32_t mLastTimestamp;
webrtc::AudioFrame mAudioFrame; // for output pulls
uint32_t mSamples;

Просмотреть файл

@ -82,8 +82,6 @@ using namespace mozilla::layers;
mozilla::LazyLogModule gMediaPipelineLog("MediaPipeline");
namespace mozilla {
extern mozilla::LogModule*
AudioLogModule();
class VideoConverterListener
{
@ -1933,7 +1931,6 @@ public:
, mTaskQueue(
new TaskQueue(GetMediaThreadPool(MediaThreadType::WEBRTC_DECODER),
"AudioPipelineListener"))
, mLastLog(0)
{
AddTrackToSource(mRate);
}
@ -2031,18 +2028,6 @@ private:
if (mSource->AppendToTrack(mTrackId, &segment)) {
framesNeeded -= frames;
mPlayedTicks += frames;
if (MOZ_LOG_TEST(AudioLogModule(), LogLevel::Debug)) {
if (mPlayedTicks > mLastLog + mRate) {
MOZ_LOG(AudioLogModule(),
LogLevel::Debug,
("%p: Inserting samples into track %d, total = "
"%" PRIu64,
(void*)this,
mTrackId,
mPlayedTicks));
mLastLog = mPlayedTicks;
}
}
} else {
MOZ_LOG(gMediaPipelineLog, LogLevel::Error, ("AppendToTrack failed"));
// we can't un-read the data, but that's ok since we don't want to
@ -2055,8 +2040,6 @@ private:
RefPtr<MediaSessionConduit> mConduit;
const TrackRate mRate;
const RefPtr<TaskQueue> mTaskQueue;
// Graph's current sampling rate
TrackTicks mLastLog = 0; // mPlayedTicks when we last logged
};
MediaPipelineReceiveAudio::MediaPipelineReceiveAudio(

Просмотреть файл

@ -10,9 +10,6 @@
#include "nspr.h"
#include "nsError.h"
void LogTime(AsyncLatencyLogger::LatencyLogIndex index, uint64_t b, int64_t c) {}
void LogLatency(AsyncLatencyLogger::LatencyLogIndex index, uint64_t b, int64_t c) {}
static const int AUDIO_BUFFER_SIZE = 1600;
static const int NUM_CHANNELS = 2;
static const int GRAPH_RATE = 16000;