зеркало из https://github.com/mozilla/gecko-dev.git
Merge inbound to mozilla-central. a=merge
This commit is contained in:
Коммит
26b40a4469
|
@ -196,20 +196,19 @@
|
|||
|
||||
<stack id="dialogStack" hidden="true"/>
|
||||
<vbox id="dialogTemplate" class="dialogOverlay" align="center" pack="center" topmost="true" hidden="true">
|
||||
<groupbox class="dialogBox"
|
||||
orient="vertical"
|
||||
pack="end"
|
||||
role="dialog"
|
||||
aria-labelledby="dialogTitle">
|
||||
<caption flex="1" align="center">
|
||||
<label class="dialogTitle" flex="1"></label>
|
||||
<vbox class="dialogBox"
|
||||
pack="end"
|
||||
role="dialog"
|
||||
aria-labelledby="dialogTitle">
|
||||
<hbox class="dialogTitleBar" align="center">
|
||||
<label class="dialogTitle" flex="1"/>
|
||||
<button class="dialogClose close-icon"
|
||||
data-l10n-id="close-button"/>
|
||||
</caption>
|
||||
</hbox>
|
||||
<browser class="dialogFrame"
|
||||
autoscroll="false"
|
||||
disablehistory="true"/>
|
||||
</groupbox>
|
||||
</vbox>
|
||||
</vbox>
|
||||
</stack>
|
||||
</page>
|
||||
|
|
|
@ -18,10 +18,11 @@ function SubDialog({template, parentElement, id}) {
|
|||
this._id = id;
|
||||
|
||||
this._overlay = template.cloneNode(true);
|
||||
this._frame = this._overlay.querySelector(".dialogFrame");
|
||||
this._box = this._overlay.querySelector(".dialogBox");
|
||||
this._closeButton = this._overlay.querySelector(".dialogClose");
|
||||
this._titleBar = this._overlay.querySelector(".dialogTitleBar");
|
||||
this._titleElement = this._overlay.querySelector(".dialogTitle");
|
||||
this._closeButton = this._overlay.querySelector(".dialogClose");
|
||||
this._frame = this._overlay.querySelector(".dialogFrame");
|
||||
|
||||
this._overlay.id = `dialogOverlay-${id}`;
|
||||
this._frame.setAttribute("name", `dialogFrame-${id}`);
|
||||
|
@ -281,16 +282,14 @@ SubDialog.prototype = {
|
|||
// Do this on load to wait for the CSS to load and apply before calculating the size.
|
||||
let docEl = this._frame.contentDocument.documentElement;
|
||||
|
||||
let groupBoxTitle = document.getAnonymousElementByAttribute(this._box, "class", "groupbox-title");
|
||||
let groupBoxTitleHeight = groupBoxTitle.clientHeight +
|
||||
parseFloat(getComputedStyle(groupBoxTitle).borderBottomWidth);
|
||||
let titleBarHeight = this._titleBar.clientHeight +
|
||||
parseFloat(getComputedStyle(this._titleBar).borderBottomWidth);
|
||||
|
||||
let groupBoxBody = document.getAnonymousElementByAttribute(this._box, "class", "groupbox-body");
|
||||
// These are deduced from styles which we don't change, so it's safe to get them now:
|
||||
let boxVerticalPadding = 2 * parseFloat(getComputedStyle(groupBoxBody).paddingTop);
|
||||
let boxHorizontalPadding = 2 * parseFloat(getComputedStyle(groupBoxBody).paddingLeft);
|
||||
let boxHorizontalBorder = 2 * parseFloat(getComputedStyle(this._box).borderLeftWidth);
|
||||
let boxVerticalBorder = 2 * parseFloat(getComputedStyle(this._box).borderTopWidth);
|
||||
let frameHorizontalMargin = 2 * parseFloat(getComputedStyle(this._frame).marginLeft);
|
||||
let frameVerticalMargin = 2 * parseFloat(getComputedStyle(this._frame).marginTop);
|
||||
|
||||
// The difference between the frame and box shouldn't change, either:
|
||||
let boxRect = this._box.getBoundingClientRect();
|
||||
|
@ -312,7 +311,7 @@ SubDialog.prototype = {
|
|||
frameMinWidth;
|
||||
this._frame.style.width = frameWidth;
|
||||
this._box.style.minWidth = "calc(" +
|
||||
(boxHorizontalBorder + boxHorizontalPadding) +
|
||||
(boxHorizontalBorder + frameHorizontalMargin) +
|
||||
"px + " + frameMinWidth + ")";
|
||||
|
||||
// Now do the same but for the height. We need to do this afterwards because otherwise
|
||||
|
@ -352,7 +351,7 @@ SubDialog.prototype = {
|
|||
|
||||
this._frame.style.height = frameHeight;
|
||||
this._box.style.minHeight = "calc(" +
|
||||
(boxVerticalBorder + groupBoxTitleHeight + boxVerticalPadding) +
|
||||
(boxVerticalBorder + titleBarHeight + frameVerticalMargin) +
|
||||
"px + " + frameMinHeight + ")";
|
||||
|
||||
this._overlay.dispatchEvent(new CustomEvent("dialogopen", {
|
||||
|
|
|
@ -61,14 +61,12 @@
|
|||
onchange="gSetBackground.updateColor(this.value);"/>
|
||||
</hbox>
|
||||
#endif
|
||||
<groupbox align="center">
|
||||
<caption label="&preview.label;"/>
|
||||
<stack>
|
||||
<!-- if width and height are not present, they default to 300x150 and stretch the stack -->
|
||||
<html:canvas id="screen" width="1" height="1"/>
|
||||
<image id="monitor"/>
|
||||
</stack>
|
||||
</groupbox>
|
||||
|
||||
<stack>
|
||||
<!-- if width and height are not present, they default to 300x150 and stretch the stack -->
|
||||
<html:canvas id="screen" width="1" height="1" role="presentation"/>
|
||||
<image id="monitor"/>
|
||||
</stack>
|
||||
|
||||
#ifdef XP_MACOSX
|
||||
<separator/>
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
<!ENTITY stretch.label "Stretch">
|
||||
<!ENTITY fill.label "Fill">
|
||||
<!ENTITY fit.label "Fit">
|
||||
<!ENTITY preview.label "Preview">
|
||||
<!ENTITY color.label "Color:">
|
||||
<!ENTITY setDesktopBackground.title "Set Desktop Background">
|
||||
<!ENTITY openDesktopPrefs.label "Open Desktop Preferences">
|
||||
|
|
|
@ -507,7 +507,7 @@ button > hbox > label {
|
|||
min-width: 66ch;
|
||||
}
|
||||
|
||||
.dialogBox > .groupbox-title {
|
||||
.dialogTitleBar {
|
||||
margin-top: 0;
|
||||
padding: 3.5px 0;
|
||||
background-color: #F1F1F1;
|
||||
|
@ -516,6 +516,7 @@ button > hbox > label {
|
|||
|
||||
.dialogTitle {
|
||||
font-size: .9em;
|
||||
font-weight: 600;
|
||||
text-align: center;
|
||||
-moz-user-select: none;
|
||||
}
|
||||
|
@ -527,12 +528,8 @@ button > hbox > label {
|
|||
min-height: auto;
|
||||
}
|
||||
|
||||
.dialogBox > .groupbox-body {
|
||||
-moz-appearance: none;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.dialogFrame {
|
||||
margin: 20px;
|
||||
-moz-box-flex: 1;
|
||||
/* Default dialog dimensions */
|
||||
width: 66ch;
|
||||
|
|
|
@ -432,44 +432,16 @@ void
|
|||
Location::SetHref(const nsAString& aHref,
|
||||
ErrorResult& aRv)
|
||||
{
|
||||
JSContext *cx = nsContentUtils::GetCurrentJSContext();
|
||||
if (cx) {
|
||||
aRv = SetHrefWithContext(cx, aHref, false);
|
||||
return;
|
||||
}
|
||||
|
||||
nsAutoString oldHref;
|
||||
aRv = GetHref(oldHref);
|
||||
if (NS_WARN_IF(aRv.Failed())) {
|
||||
return;
|
||||
}
|
||||
|
||||
nsCOMPtr<nsIURI> oldUri;
|
||||
aRv = NS_NewURI(getter_AddRefs(oldUri), oldHref);
|
||||
if (NS_WARN_IF(aRv.Failed())) {
|
||||
return;
|
||||
}
|
||||
|
||||
aRv = SetHrefWithBase(aHref, oldUri, false);
|
||||
if (NS_WARN_IF(aRv.Failed())) {
|
||||
return;
|
||||
}
|
||||
DoSetHref(aHref, false, aRv);
|
||||
}
|
||||
|
||||
nsresult
|
||||
Location::SetHrefWithContext(JSContext* cx, const nsAString& aHref,
|
||||
bool aReplace)
|
||||
void
|
||||
Location::DoSetHref(const nsAString& aHref, bool aReplace, ErrorResult& aRv)
|
||||
{
|
||||
nsCOMPtr<nsIURI> base;
|
||||
|
||||
// Get the source of the caller
|
||||
nsresult result = GetSourceBaseURL(cx, getter_AddRefs(base));
|
||||
nsCOMPtr<nsIURI> base = GetSourceBaseURL();
|
||||
|
||||
if (NS_FAILED(result)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
return SetHrefWithBase(aHref, base, aReplace);
|
||||
aRv = SetHrefWithBase(aHref, base, aReplace);
|
||||
}
|
||||
|
||||
nsresult
|
||||
|
@ -886,25 +858,7 @@ Location::Replace(const nsAString& aUrl,
|
|||
nsIPrincipal& aSubjectPrincipal,
|
||||
ErrorResult& aRv)
|
||||
{
|
||||
if (JSContext *cx = nsContentUtils::GetCurrentJSContext()) {
|
||||
aRv = SetHrefWithContext(cx, aUrl, true);
|
||||
return;
|
||||
}
|
||||
|
||||
nsAutoString oldHref;
|
||||
aRv = GetHref(oldHref);
|
||||
if (NS_WARN_IF(aRv.Failed())) {
|
||||
return;
|
||||
}
|
||||
|
||||
nsCOMPtr<nsIURI> oldUri;
|
||||
|
||||
aRv = NS_NewURI(getter_AddRefs(oldUri), oldHref);
|
||||
if (NS_WARN_IF(aRv.Failed())) {
|
||||
return;
|
||||
}
|
||||
|
||||
aRv = SetHrefWithBase(aUrl, oldUri, true);
|
||||
DoSetHref(aUrl, true, aRv);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -917,32 +871,12 @@ Location::Assign(const nsAString& aUrl,
|
|||
return;
|
||||
}
|
||||
|
||||
if (JSContext *cx = nsContentUtils::GetCurrentJSContext()) {
|
||||
aRv = SetHrefWithContext(cx, aUrl, false);
|
||||
return;
|
||||
}
|
||||
|
||||
nsAutoString oldHref;
|
||||
aRv = GetHref(oldHref);
|
||||
if (NS_WARN_IF(aRv.Failed())) {
|
||||
return;
|
||||
}
|
||||
|
||||
nsCOMPtr<nsIURI> oldUri;
|
||||
aRv = NS_NewURI(getter_AddRefs(oldUri), oldHref);
|
||||
if (NS_WARN_IF(aRv.Failed())) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (oldUri) {
|
||||
aRv = SetHrefWithBase(aUrl, oldUri, false);
|
||||
}
|
||||
DoSetHref(aUrl, false, aRv);
|
||||
}
|
||||
|
||||
nsresult
|
||||
Location::GetSourceBaseURL(JSContext* cx, nsIURI** sourceURL)
|
||||
already_AddRefed<nsIURI>
|
||||
Location::GetSourceBaseURL()
|
||||
{
|
||||
*sourceURL = nullptr;
|
||||
nsIDocument* doc = GetEntryDocument();
|
||||
// If there's no entry document, we either have no Script Entry Point or one
|
||||
// that isn't a DOM Window. This doesn't generally happen with the DOM, but
|
||||
|
@ -958,9 +892,8 @@ Location::GetSourceBaseURL(JSContext* cx, nsIURI** sourceURL)
|
|||
doc = docShellWin->GetDoc();
|
||||
}
|
||||
}
|
||||
NS_ENSURE_TRUE(doc, NS_OK);
|
||||
*sourceURL = doc->GetBaseURI().take();
|
||||
return NS_OK;
|
||||
NS_ENSURE_TRUE(doc, nullptr);
|
||||
return doc->GetBaseURI();
|
||||
}
|
||||
|
||||
bool
|
||||
|
|
|
@ -169,10 +169,13 @@ protected:
|
|||
nsresult SetURI(nsIURI* aURL, bool aReplace = false);
|
||||
nsresult SetHrefWithBase(const nsAString& aHref, nsIURI* aBase,
|
||||
bool aReplace);
|
||||
nsresult SetHrefWithContext(JSContext* cx, const nsAString& aHref,
|
||||
bool aReplace);
|
||||
|
||||
nsresult GetSourceBaseURL(JSContext* cx, nsIURI** sourceURL);
|
||||
// Helper for Assign/SetHref/Replace
|
||||
void DoSetHref(const nsAString& aHref, bool aReplace, ErrorResult& aRv);
|
||||
|
||||
// Get the base URL we should be using for our relative URL
|
||||
// resolution for SetHref/Assign/Replace.
|
||||
already_AddRefed<nsIURI> GetSourceBaseURL();
|
||||
nsresult CheckURL(nsIURI *url, nsDocShellLoadInfo** aLoadInfo);
|
||||
bool CallerSubsumes(nsIPrincipal* aSubjectPrincipal);
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
|
||||
#include "AudioMixer.h"
|
||||
#include "AudioChannelFormat.h"
|
||||
#include "Latency.h"
|
||||
#include <speex/speex_resampler.h>
|
||||
|
||||
namespace mozilla {
|
||||
|
@ -164,7 +163,7 @@ AudioSegment::Mix(AudioMixer& aMixer, uint32_t aOutputChannels,
|
|||
}
|
||||
|
||||
void
|
||||
AudioSegment::WriteTo(uint64_t aID, AudioMixer& aMixer, uint32_t aOutputChannels, uint32_t aSampleRate)
|
||||
AudioSegment::WriteTo(AudioMixer& aMixer, uint32_t aOutputChannels, uint32_t aSampleRate)
|
||||
{
|
||||
AutoTArray<AudioDataValue,SilentChannel::AUDIO_PROCESSING_FRAMES*GUESS_AUDIO_CHANNELS> buf;
|
||||
// Offset in the buffer that will be written to the mixer, in samples.
|
||||
|
@ -198,13 +197,6 @@ AudioSegment::WriteTo(uint64_t aID, AudioMixer& aMixer, uint32_t aOutputChannels
|
|||
}
|
||||
|
||||
offset += c.mDuration * aOutputChannels;
|
||||
|
||||
if (!c.mTimeStamp.IsNull()) {
|
||||
TimeStamp now = TimeStamp::Now();
|
||||
// would be more efficient to c.mTimeStamp to ms on create time then pass here
|
||||
LogTime(AsyncLatencyLogger::AudioMediaStreamTrack, aID,
|
||||
(now - c.mTimeStamp).ToMilliseconds(), c.mTimeStamp);
|
||||
}
|
||||
}
|
||||
|
||||
if (offset) {
|
||||
|
|
|
@ -290,9 +290,6 @@ struct AudioChunk {
|
|||
float mVolume = 1.0f; // volume multiplier to apply
|
||||
// format of frames in mBuffer (or silence if mBuffer is null)
|
||||
SampleFormat mBufferFormat = AUDIO_FORMAT_SILENCE;
|
||||
#ifdef MOZILLA_INTERNAL_API
|
||||
mozilla::TimeStamp mTimeStamp; // time at which this has been fetched from the MediaEngine
|
||||
#endif
|
||||
// principalHandle for the data in this chunk.
|
||||
// This can be compared to an nsIPrincipal* when back on main thread.
|
||||
PrincipalHandle mPrincipalHandle = PRINCIPAL_HANDLE_NONE;
|
||||
|
@ -384,9 +381,6 @@ public:
|
|||
chunk->mChannelData.AppendElement(aChannelData[channel]);
|
||||
}
|
||||
chunk->mBufferFormat = AUDIO_FORMAT_FLOAT32;
|
||||
#ifdef MOZILLA_INTERNAL_API
|
||||
chunk->mTimeStamp = TimeStamp::Now();
|
||||
#endif
|
||||
chunk->mPrincipalHandle = aPrincipalHandle;
|
||||
}
|
||||
void AppendFrames(already_AddRefed<ThreadSharedObject> aBuffer,
|
||||
|
@ -402,9 +396,6 @@ public:
|
|||
chunk->mChannelData.AppendElement(aChannelData[channel]);
|
||||
}
|
||||
chunk->mBufferFormat = AUDIO_FORMAT_S16;
|
||||
#ifdef MOZILLA_INTERNAL_API
|
||||
chunk->mTimeStamp = TimeStamp::Now();
|
||||
#endif
|
||||
chunk->mPrincipalHandle = aPrincipalHandle;
|
||||
|
||||
}
|
||||
|
@ -420,9 +411,6 @@ public:
|
|||
|
||||
chunk->mVolume = aChunk->mVolume;
|
||||
chunk->mBufferFormat = aChunk->mBufferFormat;
|
||||
#ifdef MOZILLA_INTERNAL_API
|
||||
chunk->mTimeStamp = TimeStamp::Now();
|
||||
#endif
|
||||
chunk->mPrincipalHandle = aChunk->mPrincipalHandle;
|
||||
return chunk;
|
||||
}
|
||||
|
@ -430,7 +418,7 @@ public:
|
|||
// Mix the segment into a mixer, interleaved. This is useful to output a
|
||||
// segment to a system audio callback. It up or down mixes to aChannelCount
|
||||
// channels.
|
||||
void WriteTo(uint64_t aID, AudioMixer& aMixer, uint32_t aChannelCount,
|
||||
void WriteTo(AudioMixer& aMixer, uint32_t aChannelCount,
|
||||
uint32_t aSampleRate);
|
||||
// Mix the segment into a mixer, keeping it planar, up or down mixing to
|
||||
// aChannelCount channels.
|
||||
|
|
|
@ -1037,12 +1037,12 @@ AudioCallbackDriver::DataCallback(const AudioDataValue* aInputBuffer,
|
|||
// StateCallback() receives an error for this stream while the main thread
|
||||
// or another driver has control of the graph.
|
||||
mShouldFallbackIfError = false;
|
||||
RemoveMixerCallback();
|
||||
// Update the flag before handing over the graph and going to drain.
|
||||
mAudioThreadRunning = false;
|
||||
// Enter shutdown mode. The stable-state handler will detect this
|
||||
// and complete shutdown if the graph does not get restarted.
|
||||
mGraphImpl->SignalMainThreadCleanup();
|
||||
RemoveMixerCallback();
|
||||
// Update the flag before go to drain
|
||||
mAudioThreadRunning = false;
|
||||
return aFrames - 1;
|
||||
}
|
||||
|
||||
|
@ -1062,8 +1062,8 @@ AudioCallbackDriver::DataCallback(const AudioDataValue* aInputBuffer,
|
|||
}
|
||||
LOG(LogLevel::Debug, ("%p: Switching to system driver.", GraphImpl()));
|
||||
RemoveMixerCallback();
|
||||
SwitchToNextDriver();
|
||||
mAudioThreadRunning = false;
|
||||
SwitchToNextDriver();
|
||||
// Returning less than aFrames starts the draining and eventually stops the
|
||||
// audio thread. This function will never get called again.
|
||||
return aFrames - 1;
|
||||
|
|
|
@ -1,229 +0,0 @@
|
|||
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
||||
/* vim:set ts=2 sw=2 sts=2 et cindent: */
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include "Latency.h"
|
||||
#include "nsThreadUtils.h"
|
||||
#include "mozilla/Logging.h"
|
||||
#include <cmath>
|
||||
#include <algorithm>
|
||||
|
||||
#include <mozilla/Services.h>
|
||||
#include <mozilla/StaticPtr.h>
|
||||
#include "nsContentUtils.h"
|
||||
|
||||
using namespace mozilla;
|
||||
|
||||
const char* LatencyLogIndex2Strings[] = {
|
||||
"Audio MediaStreamTrack",
|
||||
"Video MediaStreamTrack",
|
||||
"Cubeb",
|
||||
"AudioStream",
|
||||
"NetEQ",
|
||||
"AudioCapture Base",
|
||||
"AudioCapture Samples",
|
||||
"AudioTrackInsertion",
|
||||
"MediaPipeline Audio Insertion",
|
||||
"AudioTransmit",
|
||||
"AudioReceive",
|
||||
"MediaPipelineAudioPlayout",
|
||||
"MediaStream Create",
|
||||
"AudioStream Create",
|
||||
"AudioSendRTP",
|
||||
"AudioRecvRTP"
|
||||
};
|
||||
|
||||
static StaticRefPtr<AsyncLatencyLogger> gAsyncLogger;
|
||||
|
||||
LogModule*
|
||||
GetLatencyLog()
|
||||
{
|
||||
static LazyLogModule sLog("MediaLatency");
|
||||
return sLog;
|
||||
}
|
||||
|
||||
class LogEvent : public Runnable
|
||||
{
|
||||
public:
|
||||
LogEvent(AsyncLatencyLogger::LatencyLogIndex aIndex,
|
||||
uint64_t aID,
|
||||
int64_t aValue,
|
||||
TimeStamp aTimeStamp)
|
||||
: mozilla::Runnable("LogEvent")
|
||||
, mIndex(aIndex)
|
||||
, mID(aID)
|
||||
, mValue(aValue)
|
||||
, mTimeStamp(aTimeStamp)
|
||||
{}
|
||||
LogEvent(AsyncLatencyLogger::LatencyLogIndex aIndex,
|
||||
uint64_t aID,
|
||||
int64_t aValue)
|
||||
: mozilla::Runnable("LogEvent")
|
||||
, mIndex(aIndex)
|
||||
, mID(aID)
|
||||
, mValue(aValue)
|
||||
, mTimeStamp(TimeStamp())
|
||||
{}
|
||||
~LogEvent() {}
|
||||
|
||||
NS_IMETHOD Run() override {
|
||||
AsyncLatencyLogger::Get(true)->WriteLog(mIndex, mID, mValue, mTimeStamp);
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
protected:
|
||||
AsyncLatencyLogger::LatencyLogIndex mIndex;
|
||||
uint64_t mID;
|
||||
int64_t mValue;
|
||||
TimeStamp mTimeStamp;
|
||||
};
|
||||
|
||||
void LogLatency(AsyncLatencyLogger::LatencyLogIndex aIndex, uint64_t aID, int64_t aValue)
|
||||
{
|
||||
AsyncLatencyLogger::Get()->Log(aIndex, aID, aValue);
|
||||
}
|
||||
|
||||
void LogTime(AsyncLatencyLogger::LatencyLogIndex aIndex, uint64_t aID, int64_t aValue)
|
||||
{
|
||||
TimeStamp now = TimeStamp::Now();
|
||||
AsyncLatencyLogger::Get()->Log(aIndex, aID, aValue, now);
|
||||
}
|
||||
|
||||
void LogTime(AsyncLatencyLogger::LatencyLogIndex aIndex, uint64_t aID, int64_t aValue, TimeStamp &aTime)
|
||||
{
|
||||
AsyncLatencyLogger::Get()->Log(aIndex, aID, aValue, aTime);
|
||||
}
|
||||
|
||||
void LogTime(uint32_t aIndex, uint64_t aID, int64_t aValue)
|
||||
{
|
||||
LogTime(static_cast<AsyncLatencyLogger::LatencyLogIndex>(aIndex), aID, aValue);
|
||||
}
|
||||
void LogTime(uint32_t aIndex, uint64_t aID, int64_t aValue, TimeStamp &aTime)
|
||||
{
|
||||
LogTime(static_cast<AsyncLatencyLogger::LatencyLogIndex>(aIndex), aID, aValue, aTime);
|
||||
}
|
||||
void LogLatency(uint32_t aIndex, uint64_t aID, int64_t aValue)
|
||||
{
|
||||
LogLatency(static_cast<AsyncLatencyLogger::LatencyLogIndex>(aIndex), aID, aValue);
|
||||
}
|
||||
|
||||
/* static */
|
||||
void AsyncLatencyLogger::InitializeStatics()
|
||||
{
|
||||
NS_ASSERTION(NS_IsMainThread(), "Main thread only");
|
||||
|
||||
//Make sure that the underlying logger is allocated.
|
||||
GetLatencyLog();
|
||||
gAsyncLogger = new AsyncLatencyLogger();
|
||||
}
|
||||
|
||||
/* static */
|
||||
void AsyncLatencyLogger::ShutdownLogger()
|
||||
{
|
||||
gAsyncLogger = nullptr;
|
||||
}
|
||||
|
||||
/* static */
|
||||
AsyncLatencyLogger* AsyncLatencyLogger::Get(bool aStartTimer)
|
||||
{
|
||||
// Users don't generally null-check the result since we should live longer than they
|
||||
MOZ_ASSERT(gAsyncLogger);
|
||||
|
||||
if (aStartTimer) {
|
||||
gAsyncLogger->Init();
|
||||
}
|
||||
return gAsyncLogger;
|
||||
}
|
||||
|
||||
NS_IMPL_ISUPPORTS(AsyncLatencyLogger, nsIObserver)
|
||||
|
||||
AsyncLatencyLogger::AsyncLatencyLogger()
|
||||
: mThread(nullptr),
|
||||
mMutex("AsyncLatencyLogger")
|
||||
{
|
||||
NS_ASSERTION(NS_IsMainThread(), "Main thread only");
|
||||
nsContentUtils::RegisterShutdownObserver(this);
|
||||
}
|
||||
|
||||
AsyncLatencyLogger::~AsyncLatencyLogger()
|
||||
{
|
||||
AsyncLatencyLogger::Shutdown();
|
||||
}
|
||||
|
||||
void AsyncLatencyLogger::Shutdown()
|
||||
{
|
||||
nsContentUtils::UnregisterShutdownObserver(this);
|
||||
|
||||
MutexAutoLock lock(mMutex);
|
||||
if (mThread) {
|
||||
mThread->Shutdown();
|
||||
}
|
||||
mStart = TimeStamp(); // make sure we don't try to restart it for any reason
|
||||
}
|
||||
|
||||
void AsyncLatencyLogger::Init()
|
||||
{
|
||||
MutexAutoLock lock(mMutex);
|
||||
if (mStart.IsNull()) {
|
||||
nsresult rv = NS_NewNamedThread("Latency Logger", getter_AddRefs(mThread));
|
||||
NS_ENSURE_SUCCESS_VOID(rv);
|
||||
mStart = TimeStamp::Now();
|
||||
}
|
||||
}
|
||||
|
||||
void AsyncLatencyLogger::GetStartTime(TimeStamp &aStart)
|
||||
{
|
||||
MutexAutoLock lock(mMutex);
|
||||
aStart = mStart;
|
||||
}
|
||||
|
||||
nsresult
|
||||
AsyncLatencyLogger::Observe(nsISupports* aSubject, const char* aTopic,
|
||||
const char16_t* aData)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
|
||||
Shutdown();
|
||||
}
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
// aID is a sub-identifier (in particular a specific MediaStramTrack)
|
||||
void AsyncLatencyLogger::WriteLog(LatencyLogIndex aIndex, uint64_t aID, int64_t aValue,
|
||||
TimeStamp aTimeStamp)
|
||||
{
|
||||
if (aTimeStamp.IsNull()) {
|
||||
MOZ_LOG(GetLatencyLog(), LogLevel::Debug,
|
||||
("Latency: %s,%" PRIu64 ",%" PRId64 ",%" PRId64,
|
||||
LatencyLogIndex2Strings[aIndex], aID, GetTimeStamp(), aValue));
|
||||
} else {
|
||||
MOZ_LOG(GetLatencyLog(), LogLevel::Debug,
|
||||
("Latency: %s,%" PRIu64 ",%" PRId64 ",%" PRId64 ",%" PRId64,
|
||||
LatencyLogIndex2Strings[aIndex], aID, GetTimeStamp(), aValue,
|
||||
static_cast<int64_t>((aTimeStamp - gAsyncLogger->mStart).ToMilliseconds())));
|
||||
}
|
||||
}
|
||||
|
||||
int64_t AsyncLatencyLogger::GetTimeStamp()
|
||||
{
|
||||
TimeDuration t = TimeStamp::Now() - mStart;
|
||||
return t.ToMilliseconds();
|
||||
}
|
||||
|
||||
void AsyncLatencyLogger::Log(LatencyLogIndex aIndex, uint64_t aID, int64_t aValue)
|
||||
{
|
||||
TimeStamp null;
|
||||
Log(aIndex, aID, aValue, null);
|
||||
}
|
||||
|
||||
void AsyncLatencyLogger::Log(LatencyLogIndex aIndex, uint64_t aID, int64_t aValue, TimeStamp &aTime)
|
||||
{
|
||||
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
|
||||
nsCOMPtr<nsIRunnable> event = new LogEvent(aIndex, aID, aValue, aTime);
|
||||
if (mThread) {
|
||||
mThread->Dispatch(event, NS_DISPATCH_NORMAL);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
||||
/* vim:set ts=2 sw=2 sts=2 et cindent: */
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#ifndef MOZILLA_LATENCY_H
|
||||
#define MOZILLA_LATENCY_H
|
||||
|
||||
#include "mozilla/TimeStamp.h"
|
||||
#include "mozilla/Logging.h"
|
||||
#include "nsCOMPtr.h"
|
||||
#include "nsIThread.h"
|
||||
#include "mozilla/Monitor.h"
|
||||
#include "nsISupportsImpl.h"
|
||||
#include "nsIObserver.h"
|
||||
|
||||
class AsyncLatencyLogger;
|
||||
|
||||
mozilla::LogModule* GetLatencyLog();
|
||||
|
||||
// This class is a singleton. It is refcounted.
|
||||
class AsyncLatencyLogger : public nsIObserver
|
||||
{
|
||||
NS_DECL_THREADSAFE_ISUPPORTS
|
||||
NS_DECL_NSIOBSERVER
|
||||
|
||||
public:
|
||||
|
||||
enum LatencyLogIndex {
|
||||
AudioMediaStreamTrack = 0,
|
||||
VideoMediaStreamTrack,
|
||||
Cubeb,
|
||||
AudioStream,
|
||||
NetEQ,
|
||||
AudioCaptureBase, // base time for capturing an audio stream
|
||||
AudioCapture, // records number of samples captured and the time
|
||||
AudioTrackInsertion, // # of samples inserted into a mediastreamtrack and the time
|
||||
MediaPipelineAudioInsertion, // Timestamp and time of timestamp
|
||||
AudioTransmit, // Timestamp and socket send time
|
||||
AudioReceive, // Timestamp and receive time
|
||||
MediaPipelineAudioPlayout, // Timestamp and playout into MST time
|
||||
MediaStreamCreate, // Source and TrackUnion streams
|
||||
AudioStreamCreate, // TrackUnion stream and AudioStream
|
||||
AudioSendRTP,
|
||||
AudioRecvRTP,
|
||||
_MAX_INDEX
|
||||
};
|
||||
// Log with a null timestamp
|
||||
void Log(LatencyLogIndex index, uint64_t aID, int64_t aValue);
|
||||
// Log with a timestamp
|
||||
void Log(LatencyLogIndex index, uint64_t aID, int64_t aValue,
|
||||
mozilla::TimeStamp &aTime);
|
||||
// Write a log message to NSPR
|
||||
void WriteLog(LatencyLogIndex index, uint64_t aID, int64_t aValue,
|
||||
mozilla::TimeStamp timestamp);
|
||||
// Get the base time used by the logger for delta calculations
|
||||
void GetStartTime(mozilla::TimeStamp &aStart);
|
||||
|
||||
static AsyncLatencyLogger* Get(bool aStartTimer = false);
|
||||
static void InitializeStatics();
|
||||
// After this is called, the global log object may go away
|
||||
static void ShutdownLogger();
|
||||
private:
|
||||
AsyncLatencyLogger();
|
||||
virtual ~AsyncLatencyLogger();
|
||||
int64_t GetTimeStamp();
|
||||
void Init();
|
||||
// Shut down the thread associated with this, and make sure it doesn't
|
||||
// start up again.
|
||||
void Shutdown();
|
||||
// The thread on which the IO happens
|
||||
nsCOMPtr<nsIThread> mThread;
|
||||
// This can be initialized on multiple threads, but is protected by a
|
||||
// monitor. After the initialization phase, it is accessed on the log
|
||||
// thread only.
|
||||
mozilla::TimeStamp mStart;
|
||||
// This monitor protects mStart and mMediaLatencyLog for the
|
||||
// initialization sequence. It is initialized at layout startup, and
|
||||
// destroyed at layout shutdown.
|
||||
mozilla::Mutex mMutex;
|
||||
};
|
||||
|
||||
// need uint32_t versions for access from webrtc/trunk code
|
||||
// Log without a time delta
|
||||
void LogLatency(AsyncLatencyLogger::LatencyLogIndex index, uint64_t aID, int64_t aValue);
|
||||
void LogLatency(uint32_t index, uint64_t aID, int64_t aValue);
|
||||
// Log TimeStamp::Now() (as delta)
|
||||
void LogTime(AsyncLatencyLogger::LatencyLogIndex index, uint64_t aID, int64_t aValue);
|
||||
void LogTime(uint32_t index, uint64_t aID, int64_t aValue);
|
||||
// Log the specified time (as delta)
|
||||
void LogTime(AsyncLatencyLogger::LatencyLogIndex index, uint64_t aID, int64_t aValue,
|
||||
mozilla::TimeStamp &aTime);
|
||||
|
||||
// For generating unique-ish ids for logged sources
|
||||
#define LATENCY_STREAM_ID(source, trackID) \
|
||||
((((uint64_t) (source)) & ~0x0F) | (trackID))
|
||||
|
||||
#endif
|
|
@ -59,7 +59,6 @@
|
|||
#include "MediaTrackConstraints.h"
|
||||
#include "VideoUtils.h"
|
||||
#include "ThreadSafeRefcountingWithMainThreadDestruction.h"
|
||||
#include "Latency.h"
|
||||
#include "nsProxyRelease.h"
|
||||
#include "nsVariant.h"
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
#include "mozilla/TimeStamp.h"
|
||||
#endif
|
||||
#include <algorithm>
|
||||
#include "Latency.h"
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
|
@ -331,9 +330,6 @@ public:
|
|||
} else {
|
||||
mChunks.InsertElementAt(0)->SetNull(aDuration);
|
||||
}
|
||||
#ifdef MOZILLA_INTERNAL_API
|
||||
mChunks[0].mTimeStamp = mozilla::TimeStamp::Now();
|
||||
#endif
|
||||
mDuration += aDuration;
|
||||
}
|
||||
void AppendNullData(StreamTime aDuration) override
|
||||
|
@ -418,12 +414,6 @@ public:
|
|||
RemoveLeading(aDuration, 0);
|
||||
}
|
||||
|
||||
#ifdef MOZILLA_INTERNAL_API
|
||||
void GetStartTime(TimeStamp &aTime) {
|
||||
aTime = mChunks[0].mTimeStamp;
|
||||
}
|
||||
#endif
|
||||
|
||||
size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override
|
||||
{
|
||||
size_t amount = mChunks.ShallowSizeOfExcludingThis(aMallocSizeOf);
|
||||
|
@ -455,9 +445,6 @@ protected:
|
|||
MediaSegmentBase(MediaSegmentBase&& aSegment)
|
||||
: MediaSegment(std::move(aSegment))
|
||||
, mChunks()
|
||||
#ifdef MOZILLA_INTERNAL_API
|
||||
, mTimeStamp(std::move(aSegment.mTimeStamp))
|
||||
#endif
|
||||
{
|
||||
mChunks.SwapElements(aSegment.mChunks);
|
||||
MOZ_ASSERT(mChunks.Capacity() >= DEFAULT_SEGMENT_CAPACITY,
|
||||
|
@ -576,9 +563,6 @@ protected:
|
|||
}
|
||||
|
||||
AutoTArray<Chunk, DEFAULT_SEGMENT_CAPACITY> mChunks;
|
||||
#ifdef MOZILLA_INTERNAL_API
|
||||
mozilla::TimeStamp mTimeStamp;
|
||||
#endif
|
||||
};
|
||||
|
||||
} // namespace mozilla
|
||||
|
|
|
@ -783,11 +783,7 @@ MediaStreamGraphImpl::PlayAudio(MediaStream* aStream)
|
|||
}
|
||||
audioOutput.mLastTickWritten = offset;
|
||||
|
||||
// Need unique id for stream & track - and we want it to match the inserter
|
||||
output.WriteTo(LATENCY_STREAM_ID(aStream, track->GetID()),
|
||||
mMixer,
|
||||
AudioOutputChannelCount(),
|
||||
mSampleRate);
|
||||
output.WriteTo(mMixer, AudioOutputChannelCount(), mSampleRate);
|
||||
}
|
||||
return ticksWritten;
|
||||
}
|
||||
|
@ -2834,7 +2830,6 @@ SourceMediaStream::SourceMediaStream()
|
|||
, mUpdateKnownTracksTime(0)
|
||||
, mPullEnabled(false)
|
||||
, mFinishPending(false)
|
||||
, mNeedsMixing(false)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -3328,7 +3323,7 @@ SourceMediaStream::GetEndOfAppendedData(TrackID aID)
|
|||
if (track) {
|
||||
return track->mEndOfFlushedData + track->mData->GetDuration();
|
||||
}
|
||||
NS_ERROR("Track not found");
|
||||
MOZ_CRASH("Track not found");
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -3428,20 +3423,6 @@ SourceMediaStream::~SourceMediaStream()
|
|||
{
|
||||
}
|
||||
|
||||
void
|
||||
SourceMediaStream::RegisterForAudioMixing()
|
||||
{
|
||||
MutexAutoLock lock(mMutex);
|
||||
mNeedsMixing = true;
|
||||
}
|
||||
|
||||
bool
|
||||
SourceMediaStream::NeedsMixing()
|
||||
{
|
||||
MutexAutoLock lock(mMutex);
|
||||
return mNeedsMixing;
|
||||
}
|
||||
|
||||
bool
|
||||
SourceMediaStream::HasPendingAudioTrack()
|
||||
{
|
||||
|
@ -3729,7 +3710,6 @@ MediaStreamGraphImpl::MediaStreamGraphImpl(GraphDriverType aDriverRequested,
|
|||
, mRealtime(aDriverRequested != OFFLINE_THREAD_DRIVER)
|
||||
, mNonRealtimeProcessing(false)
|
||||
, mStreamOrderDirty(false)
|
||||
, mLatencyLog(AsyncLatencyLogger::Get())
|
||||
, mAbstractMainThread(aMainThread)
|
||||
, mSelfRef(this)
|
||||
, mOutputChannels(std::min<uint32_t>(8, CubebUtils::MaxNumberOfChannels()))
|
||||
|
|
|
@ -830,8 +830,6 @@ public:
|
|||
*/
|
||||
void EndAllTrackAndFinish();
|
||||
|
||||
void RegisterForAudioMixing();
|
||||
|
||||
/**
|
||||
* Returns true if this SourceMediaStream contains at least one audio track
|
||||
* that is in pending state.
|
||||
|
@ -938,7 +936,6 @@ protected:
|
|||
nsTArray<TrackBound<DirectMediaStreamTrackListener>> mDirectTrackListeners;
|
||||
bool mPullEnabled;
|
||||
bool mFinishPending;
|
||||
bool mNeedsMixing;
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
@ -10,7 +10,6 @@
|
|||
|
||||
#include "AudioMixer.h"
|
||||
#include "GraphDriver.h"
|
||||
#include "Latency.h"
|
||||
#include "mozilla/Atomics.h"
|
||||
#include "mozilla/Monitor.h"
|
||||
#include "mozilla/Services.h"
|
||||
|
@ -886,10 +885,6 @@ public:
|
|||
* blocking order.
|
||||
*/
|
||||
bool mStreamOrderDirty;
|
||||
/**
|
||||
* Hold a ref to the Latency logger
|
||||
*/
|
||||
RefPtr<AsyncLatencyLogger> mLatencyLog;
|
||||
AudioMixer mMixer;
|
||||
const RefPtr<AbstractThread> mAbstractMainThread;
|
||||
|
||||
|
|
|
@ -119,7 +119,6 @@ EXPORTS += [
|
|||
'FrameStatistics.h',
|
||||
'ImageToI420.h',
|
||||
'Intervals.h',
|
||||
'Latency.h',
|
||||
'MediaCache.h',
|
||||
'MediaContainerType.h',
|
||||
'MediaData.h',
|
||||
|
@ -239,7 +238,6 @@ UNIFIED_SOURCES += [
|
|||
'GetUserMediaRequest.cpp',
|
||||
'GraphDriver.cpp',
|
||||
'ImageToI420.cpp',
|
||||
'Latency.cpp',
|
||||
'MediaCache.cpp',
|
||||
'MediaContainerType.cpp',
|
||||
'MediaData.cpp',
|
||||
|
|
|
@ -1,104 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# graph_latency.py - graph media latency
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
# needs matplotlib (sudo aptitude install python-matplotlib)
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib import rc
|
||||
import sys
|
||||
from pprint import pprint
|
||||
import re
|
||||
|
||||
|
||||
# FIX! needs to be sum of a single mediastreamtrack and any output overhead for it
|
||||
# So there is one sum per MST
|
||||
def compute_sum(data):
|
||||
'Compute the sum for each timestamp. This expects the output of parse_data.'
|
||||
last_values = {}
|
||||
out = ([],[])
|
||||
|
||||
for i in data:
|
||||
if i[0] not in last_values.keys():
|
||||
last_values[i[0]] = 0
|
||||
last_values[i[0]] = float(i[3])
|
||||
print last_values
|
||||
out[0].append(i[2])
|
||||
out[1].append(sum(last_values.values()))
|
||||
return out
|
||||
|
||||
|
||||
def clean_data(raw_data):
|
||||
'''
|
||||
Remove the PR_LOG cruft at the beginning of each line and returns a list of
|
||||
tuple.
|
||||
'''
|
||||
out = []
|
||||
for line in raw_data:
|
||||
match = re.match(r'(.*)#(.*)', line)
|
||||
if match:
|
||||
continue
|
||||
else:
|
||||
out.append(line.split(": ")[1])
|
||||
return out
|
||||
|
||||
# returns a list of tuples
|
||||
def parse_data(raw_lines):
|
||||
'''
|
||||
Split each line by , and put every bit in a tuple.
|
||||
'''
|
||||
out = []
|
||||
for line in raw_lines:
|
||||
out.append(line.split(','))
|
||||
return out
|
||||
|
||||
if len(sys.argv) == 3:
|
||||
name = sys.argv[1]
|
||||
channels = int(sys.argv[2])
|
||||
else:
|
||||
print sys.argv[0] + "latency_log"
|
||||
|
||||
try:
|
||||
f = open(sys.argv[1])
|
||||
except:
|
||||
print "cannot open " + name
|
||||
|
||||
raw_lines = f.readlines()
|
||||
lines = clean_data(raw_lines)
|
||||
data = parse_data(lines)
|
||||
|
||||
final_data = {}
|
||||
|
||||
for tupl in data:
|
||||
name = tupl[0]
|
||||
if tupl[1] != 0:
|
||||
name = name+tupl[1]
|
||||
if name not in final_data.keys():
|
||||
final_data[name] = ([], [])
|
||||
# sanity-check values
|
||||
if float(tupl[3]) < 10*1000:
|
||||
final_data[name][0].append(float(tupl[2]))
|
||||
final_data[name][1].append(float(tupl[3]))
|
||||
|
||||
#overall = compute_sum(data)
|
||||
#final_data["overall"] = overall
|
||||
|
||||
pprint(final_data)
|
||||
|
||||
fig = plt.figure()
|
||||
for i in final_data.keys():
|
||||
plt.plot(final_data[i][0], final_data[i][1], label=i)
|
||||
|
||||
plt.legend()
|
||||
plt.suptitle("Latency in ms (y-axis) against time in ms (x-axis).")
|
||||
|
||||
size = fig.get_size_inches()
|
||||
# make it gigantic so we can see things. sometimes, if the graph is too big,
|
||||
# this errors. reduce the factor so it stays under 2**15.
|
||||
fig.set_size_inches((size[0]*10, size[1]*2))
|
||||
name = sys.argv[1][:-4] + ".pdf"
|
||||
fig.savefig(name)
|
||||
|
|
@ -11,6 +11,7 @@
|
|||
#include "CSFLog.h"
|
||||
#include "MediaEngineTabVideoSource.h"
|
||||
#include "MediaEngineRemoteVideoSource.h"
|
||||
#include "MediaEngineWebRTCAudio.h"
|
||||
#include "MediaTrackConstraints.h"
|
||||
#include "mozilla/dom/MediaDeviceInfo.h"
|
||||
#include "mozilla/Logging.h"
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
@ -5,52 +6,40 @@
|
|||
#ifndef MEDIAENGINEWEBRTC_H_
|
||||
#define MEDIAENGINEWEBRTC_H_
|
||||
|
||||
#include "AudioPacketizer.h"
|
||||
#include "AudioSegment.h"
|
||||
#include "AudioDeviceInfo.h"
|
||||
#include "CamerasChild.h"
|
||||
#include "cubeb/cubeb.h"
|
||||
#include "CubebUtils.h"
|
||||
#include "DOMMediaStream.h"
|
||||
#include "ipc/IPCMessageUtils.h"
|
||||
#include "MediaEngine.h"
|
||||
#include "MediaEnginePrefs.h"
|
||||
#include "MediaEngineSource.h"
|
||||
#include "MediaEngineWrapper.h"
|
||||
#include "MediaStreamGraph.h"
|
||||
#include "mozilla/dom/File.h"
|
||||
#include "mozilla/dom/MediaStreamTrackBinding.h"
|
||||
#include "NullTransport.h"
|
||||
#include "StreamTracks.h"
|
||||
#include "VideoSegment.h"
|
||||
#include "VideoUtils.h"
|
||||
#include "cubeb/cubeb.h"
|
||||
#include "ipc/IPCMessageUtils.h"
|
||||
#include "mozilla/Mutex.h"
|
||||
#include "mozilla/Mutex.h"
|
||||
#include "mozilla/Sprintf.h"
|
||||
#include "mozilla/StaticMutex.h"
|
||||
#include "mozilla/UniquePtr.h"
|
||||
#include "mozilla/dom/File.h"
|
||||
#include "mozilla/dom/MediaStreamTrackBinding.h"
|
||||
#include "nsAutoPtr.h"
|
||||
#include "nsComponentManagerUtils.h"
|
||||
#include "nsCOMPtr.h"
|
||||
#include "nsComponentManagerUtils.h"
|
||||
#include "nsDirectoryServiceDefs.h"
|
||||
#include "nsIThread.h"
|
||||
#include "nsIRunnable.h"
|
||||
#include "nsIThread.h"
|
||||
#include "nsRefPtrHashtable.h"
|
||||
#include "nsThreadUtils.h"
|
||||
#include "NullTransport.h"
|
||||
#include "prcvar.h"
|
||||
#include "prthread.h"
|
||||
#include "StreamTracks.h"
|
||||
#include "VideoSegment.h"
|
||||
#include "VideoUtils.h"
|
||||
|
||||
// WebRTC library includes follow
|
||||
// Audio Engine
|
||||
#include "webrtc/voice_engine/include/voe_base.h"
|
||||
#include "webrtc/voice_engine/include/voe_codec.h"
|
||||
#include "webrtc/voice_engine/include/voe_network.h"
|
||||
#include "webrtc/voice_engine/include/voe_audio_processing.h"
|
||||
#include "webrtc/voice_engine/include/voe_volume_control.h"
|
||||
#include "webrtc/voice_engine/include/voe_external_media.h"
|
||||
#include "webrtc/voice_engine/include/voe_audio_processing.h"
|
||||
#include "webrtc/modules/audio_device/include/audio_device.h"
|
||||
#include "webrtc/modules/audio_processing/include/audio_processing.h"
|
||||
// Video Engine
|
||||
// conflicts with #include of scoped_ptr.h
|
||||
#undef FF
|
||||
|
@ -58,70 +47,6 @@
|
|||
|
||||
namespace mozilla {
|
||||
|
||||
class MediaEngineWebRTCMicrophoneSource;
|
||||
|
||||
class MediaEngineWebRTCAudioCaptureSource : public MediaEngineSource
|
||||
{
|
||||
public:
|
||||
explicit MediaEngineWebRTCAudioCaptureSource(const char* aUuid)
|
||||
{
|
||||
}
|
||||
nsString GetName() const override;
|
||||
nsCString GetUUID() const override;
|
||||
nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
|
||||
const MediaEnginePrefs &aPrefs,
|
||||
const nsString& aDeviceId,
|
||||
const ipc::PrincipalInfo& aPrincipalInfo,
|
||||
AllocationHandle** aOutHandle,
|
||||
const char** aOutBadConstraint) override
|
||||
{
|
||||
// Nothing to do here, everything is managed in MediaManager.cpp
|
||||
*aOutHandle = nullptr;
|
||||
return NS_OK;
|
||||
}
|
||||
nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override
|
||||
{
|
||||
// Nothing to do here, everything is managed in MediaManager.cpp
|
||||
MOZ_ASSERT(!aHandle);
|
||||
return NS_OK;
|
||||
}
|
||||
nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
|
||||
const RefPtr<SourceMediaStream>& aStream,
|
||||
TrackID aTrackID,
|
||||
const PrincipalHandle& aPrincipal) override;
|
||||
nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
|
||||
nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
|
||||
nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
|
||||
const dom::MediaTrackConstraints& aConstraints,
|
||||
const MediaEnginePrefs& aPrefs,
|
||||
const nsString& aDeviceId,
|
||||
const char** aOutBadConstraint) override;
|
||||
|
||||
void Pull(const RefPtr<const AllocationHandle>& aHandle,
|
||||
const RefPtr<SourceMediaStream>& aStream,
|
||||
TrackID aTrackID,
|
||||
StreamTime aDesiredTime,
|
||||
const PrincipalHandle& aPrincipalHandle) override
|
||||
{}
|
||||
|
||||
dom::MediaSourceEnum GetMediaSource() const override
|
||||
{
|
||||
return dom::MediaSourceEnum::AudioCapture;
|
||||
}
|
||||
|
||||
nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
|
||||
{
|
||||
return NS_ERROR_NOT_IMPLEMENTED;
|
||||
}
|
||||
|
||||
uint32_t GetBestFitnessDistance(
|
||||
const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
|
||||
const nsString& aDeviceId) const override;
|
||||
|
||||
protected:
|
||||
virtual ~MediaEngineWebRTCAudioCaptureSource() = default;
|
||||
};
|
||||
|
||||
// This class implements a cache for accessing the audio device list. It can be
|
||||
// accessed on any thread.
|
||||
class CubebDeviceEnumerator final
|
||||
|
@ -160,293 +85,6 @@ private:
|
|||
bool mManualInvalidation;
|
||||
};
|
||||
|
||||
// This class is instantiated on the MediaManager thread, and is then sent and
|
||||
// only ever access again on the MediaStreamGraph.
|
||||
class WebRTCAudioDataListener : public AudioDataListener
|
||||
{
|
||||
protected:
|
||||
// Protected destructor, to discourage deletion outside of Release():
|
||||
virtual ~WebRTCAudioDataListener() {}
|
||||
|
||||
public:
|
||||
explicit WebRTCAudioDataListener(MediaEngineWebRTCMicrophoneSource* aAudioSource)
|
||||
: mAudioSource(aAudioSource)
|
||||
{}
|
||||
|
||||
// AudioDataListenerInterface methods
|
||||
void NotifyOutputData(MediaStreamGraphImpl* aGraph,
|
||||
AudioDataValue* aBuffer,
|
||||
size_t aFrames,
|
||||
TrackRate aRate,
|
||||
uint32_t aChannels) override;
|
||||
|
||||
void NotifyInputData(MediaStreamGraphImpl* aGraph,
|
||||
const AudioDataValue* aBuffer,
|
||||
size_t aFrames,
|
||||
TrackRate aRate,
|
||||
uint32_t aChannels) override;
|
||||
|
||||
uint32_t RequestedInputChannelCount(MediaStreamGraphImpl* aGraph) override;
|
||||
|
||||
void DeviceChanged(MediaStreamGraphImpl* aGraph) override;
|
||||
|
||||
void Disconnect(MediaStreamGraphImpl* aGraph) override;
|
||||
|
||||
private:
|
||||
RefPtr<MediaEngineWebRTCMicrophoneSource> mAudioSource;
|
||||
};
|
||||
|
||||
class MediaEngineWebRTCMicrophoneSource : public MediaEngineSource,
|
||||
public AudioDataListenerInterface
|
||||
{
|
||||
public:
|
||||
MediaEngineWebRTCMicrophoneSource(RefPtr<AudioDeviceInfo> aInfo,
|
||||
const nsString& name,
|
||||
const nsCString& uuid,
|
||||
uint32_t maxChannelCount,
|
||||
bool aDelayAgnostic,
|
||||
bool aExtendedFilter);
|
||||
|
||||
bool RequiresSharing() const override
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
nsString GetName() const override;
|
||||
nsCString GetUUID() const override;
|
||||
|
||||
nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
|
||||
const MediaEnginePrefs& aPrefs,
|
||||
const nsString& aDeviceId,
|
||||
const ipc::PrincipalInfo& aPrincipalInfo,
|
||||
AllocationHandle** aOutHandle,
|
||||
const char** aOutBadConstraint) override;
|
||||
nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
|
||||
nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
|
||||
const RefPtr<SourceMediaStream>& aStream,
|
||||
TrackID aTrackID,
|
||||
const PrincipalHandle& aPrincipal) override;
|
||||
nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
|
||||
nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
|
||||
nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
|
||||
const dom::MediaTrackConstraints& aConstraints,
|
||||
const MediaEnginePrefs& aPrefs,
|
||||
const nsString& aDeviceId,
|
||||
const char** aOutBadConstraint) override;
|
||||
|
||||
/**
|
||||
* Assigns the current settings of the capture to aOutSettings.
|
||||
* Main thread only.
|
||||
*/
|
||||
void GetSettings(dom::MediaTrackSettings& aOutSettings) const override;
|
||||
|
||||
void Pull(const RefPtr<const AllocationHandle>& aHandle,
|
||||
const RefPtr<SourceMediaStream>& aStream,
|
||||
TrackID aTrackID,
|
||||
StreamTime aDesiredTime,
|
||||
const PrincipalHandle& aPrincipalHandle) override;
|
||||
|
||||
// AudioDataListenerInterface methods
|
||||
void NotifyOutputData(MediaStreamGraphImpl* aGraph,
|
||||
AudioDataValue* aBuffer, size_t aFrames,
|
||||
TrackRate aRate, uint32_t aChannels) override;
|
||||
void NotifyInputData(MediaStreamGraphImpl* aGraph,
|
||||
const AudioDataValue* aBuffer, size_t aFrames,
|
||||
TrackRate aRate, uint32_t aChannels) override;
|
||||
|
||||
void DeviceChanged(MediaStreamGraphImpl* aGraph) override;
|
||||
|
||||
uint32_t RequestedInputChannelCount(MediaStreamGraphImpl* aGraph) override
|
||||
{
|
||||
return GetRequestedInputChannelCount(aGraph);
|
||||
}
|
||||
|
||||
void Disconnect(MediaStreamGraphImpl* aGraph) override;
|
||||
|
||||
dom::MediaSourceEnum GetMediaSource() const override
|
||||
{
|
||||
return dom::MediaSourceEnum::Microphone;
|
||||
}
|
||||
|
||||
nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
|
||||
{
|
||||
return NS_ERROR_NOT_IMPLEMENTED;
|
||||
}
|
||||
|
||||
uint32_t GetBestFitnessDistance(
|
||||
const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
|
||||
const nsString& aDeviceId) const override;
|
||||
|
||||
void Shutdown() override;
|
||||
|
||||
protected:
|
||||
~MediaEngineWebRTCMicrophoneSource() {}
|
||||
|
||||
private:
|
||||
/**
|
||||
* Representation of data tied to an AllocationHandle rather than to the source.
|
||||
*/
|
||||
struct Allocation {
|
||||
Allocation() = delete;
|
||||
explicit Allocation(const RefPtr<AllocationHandle>& aHandle);
|
||||
~Allocation();
|
||||
|
||||
#ifdef DEBUG
|
||||
// The MSGImpl::IterationEnd() of the last time we appended data from an
|
||||
// audio callback.
|
||||
// Guarded by MediaEngineWebRTCMicrophoneSource::mMutex.
|
||||
GraphTime mLastCallbackAppendTime = 0;
|
||||
#endif
|
||||
// Set to false by Start(). Becomes true after the first time we append real
|
||||
// audio frames from the audio callback.
|
||||
// Guarded by MediaEngineWebRTCMicrophoneSource::mMutex.
|
||||
bool mLiveFramesAppended = false;
|
||||
|
||||
// Set to false by Start(). Becomes true after the first time we append
|
||||
// silence *after* the first audio callback has appended real frames.
|
||||
// Guarded by MediaEngineWebRTCMicrophoneSource::mMutex.
|
||||
bool mLiveSilenceAppended = false;
|
||||
|
||||
const RefPtr<AllocationHandle> mHandle;
|
||||
RefPtr<SourceMediaStream> mStream;
|
||||
TrackID mTrackID = TRACK_NONE;
|
||||
PrincipalHandle mPrincipal = PRINCIPAL_HANDLE_NONE;
|
||||
bool mEnabled = false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Used with nsTArray<Allocation>::IndexOf to locate an Allocation by a handle.
|
||||
*/
|
||||
class AllocationHandleComparator {
|
||||
public:
|
||||
bool Equals(const Allocation& aAllocation,
|
||||
const RefPtr<const AllocationHandle>& aHandle) const
|
||||
{
|
||||
return aHandle == aAllocation.mHandle;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Reevaluates the aggregated constraints of all allocations and restarts the
|
||||
* underlying device if necessary.
|
||||
*
|
||||
* If the given AllocationHandle was already registered, its constraints will
|
||||
* be updated before reevaluation. If not, they will be added before
|
||||
* reevaluation.
|
||||
*/
|
||||
nsresult ReevaluateAllocation(const RefPtr<AllocationHandle>& aHandle,
|
||||
const NormalizedConstraints* aConstraintsUpdate,
|
||||
const MediaEnginePrefs& aPrefs,
|
||||
const nsString& aDeviceId,
|
||||
const char** aOutBadConstraint);
|
||||
|
||||
/**
|
||||
* Updates the underlying (single) device with the aggregated constraints
|
||||
* aNetConstraints. If the chosen settings for the device changes based on
|
||||
* these new constraints, and capture is active, the device will be restarted.
|
||||
*/
|
||||
nsresult UpdateSingleSource(const RefPtr<const AllocationHandle>& aHandle,
|
||||
const NormalizedConstraints& aNetConstraints,
|
||||
const MediaEnginePrefs& aPrefs,
|
||||
const nsString& aDeviceId,
|
||||
const char** aOutBadConstraint);
|
||||
|
||||
|
||||
void UpdateAECSettingsIfNeeded(bool aEnable, webrtc::EcModes aMode);
|
||||
void UpdateAGCSettingsIfNeeded(bool aEnable, webrtc::AgcModes aMode);
|
||||
void UpdateNSSettingsIfNeeded(bool aEnable, webrtc::NsModes aMode);
|
||||
|
||||
void ApplySettings(const MediaEnginePrefs& aPrefs,
|
||||
RefPtr<MediaStreamGraphImpl> aGraph);
|
||||
|
||||
bool HasEnabledTrack() const;
|
||||
|
||||
template<typename T>
|
||||
void InsertInGraph(const T* aBuffer,
|
||||
size_t aFrames,
|
||||
uint32_t aChannels);
|
||||
|
||||
void PacketizeAndProcess(MediaStreamGraphImpl* aGraph,
|
||||
const AudioDataValue* aBuffer,
|
||||
size_t aFrames,
|
||||
TrackRate aRate,
|
||||
uint32_t aChannels);
|
||||
|
||||
|
||||
// This is true when all processing is disabled, we can skip
|
||||
// packetization, resampling and other processing passes.
|
||||
// Graph thread only.
|
||||
bool PassThrough(MediaStreamGraphImpl* aGraphImpl) const;
|
||||
|
||||
// Graph thread only.
|
||||
void SetPassThrough(bool aPassThrough);
|
||||
uint32_t GetRequestedInputChannelCount(MediaStreamGraphImpl* aGraphImpl);
|
||||
void SetRequestedInputChannelCount(uint32_t aRequestedInputChannelCount);
|
||||
|
||||
// mListener is created on the MediaManager thread, and then sent to the MSG
|
||||
// thread. On shutdown, we send this pointer to the MSG thread again, telling
|
||||
// it to clean up.
|
||||
RefPtr<WebRTCAudioDataListener> mListener;
|
||||
|
||||
// Can be shared on any thread.
|
||||
const RefPtr<AudioDeviceInfo> mDeviceInfo;
|
||||
|
||||
const UniquePtr<webrtc::AudioProcessing> mAudioProcessing;
|
||||
|
||||
// accessed from the GraphDriver thread except for deletion.
|
||||
nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerInput;
|
||||
nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerOutput;
|
||||
|
||||
// mMutex protects some of our members off the owning thread.
|
||||
Mutex mMutex;
|
||||
|
||||
// We append an allocation in Allocate() and remove it in Deallocate().
|
||||
// Both the array and the Allocation members are modified under mMutex on
|
||||
// the owning thread. Accessed under one of the two.
|
||||
nsTArray<Allocation> mAllocations;
|
||||
|
||||
// Current state of the shared resource for this source. Written on the
|
||||
// owning thread, read on either the owning thread or the MSG thread.
|
||||
Atomic<MediaEngineSourceState> mState;
|
||||
|
||||
bool mDelayAgnostic;
|
||||
bool mExtendedFilter;
|
||||
bool mStarted;
|
||||
|
||||
const nsString mDeviceName;
|
||||
const nsCString mDeviceUUID;
|
||||
|
||||
// The current settings for the underlying device.
|
||||
// Member access is main thread only after construction.
|
||||
const nsMainThreadPtrHandle<media::Refcountable<dom::MediaTrackSettings>> mSettings;
|
||||
|
||||
// The number of channels asked for by content, after clamping to the range of
|
||||
// legal channel count for this particular device. This is the number of
|
||||
// channels of the input buffer passed as parameter in NotifyInputData.
|
||||
uint32_t mRequestedInputChannelCount;
|
||||
uint64_t mTotalFrames;
|
||||
uint64_t mLastLogFrames;
|
||||
|
||||
// mSkipProcessing is true if none of the processing passes are enabled,
|
||||
// because of prefs or constraints. This allows simply copying the audio into
|
||||
// the MSG, skipping resampling and the whole webrtc.org code.
|
||||
// This is read and written to only on the MSG thread.
|
||||
bool mSkipProcessing;
|
||||
|
||||
// To only update microphone when needed, we keep track of the prefs
|
||||
// representing the currently applied settings for this source. This is the
|
||||
// net result of the prefs across all allocations.
|
||||
// Owning thread only.
|
||||
MediaEnginePrefs mNetPrefs;
|
||||
|
||||
// Stores the mixed audio output for the reverse-stream of the AEC.
|
||||
AlignedFloatBuffer mOutputBuffer;
|
||||
|
||||
AlignedFloatBuffer mInputBuffer;
|
||||
AlignedFloatBuffer mDeinterleavedBuffer;
|
||||
AlignedFloatBuffer mInputDownmixBuffer;
|
||||
};
|
||||
|
||||
class MediaEngineWebRTC : public MediaEngine
|
||||
{
|
||||
typedef MediaEngine Super;
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,352 @@
|
|||
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#ifndef MediaEngineWebRTCAudio_h
|
||||
#define MediaEngineWebRTCAudio_h
|
||||
|
||||
#include "MediaEngineWebRTC.h"
|
||||
#include "AudioPacketizer.h"
|
||||
#include "AudioSegment.h"
|
||||
#include "AudioDeviceInfo.h"
|
||||
#include "webrtc/modules/audio_processing/include/audio_processing.h"
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
class AudioInputProcessing;
|
||||
|
||||
// This class is created and used exclusively on the Media Manager thread, with
|
||||
// exactly two exceptions:
|
||||
// - Pull is always called on the MSG thread. It only ever uses
|
||||
// mInputProcessing. mInputProcessing is set, then a message is sent first to
|
||||
// the main thread and then the MSG thread so that it can be used as part of
|
||||
// the graph processing. On destruction, similarly, a message is sent to the
|
||||
// graph so that it stops using it, and then it is deleted.
|
||||
// - mSettings is created on the MediaManager thread is always ever accessed on
|
||||
// the Main Thread. It is const.
|
||||
class MediaEngineWebRTCMicrophoneSource : public MediaEngineSource
|
||||
{
|
||||
public:
|
||||
MediaEngineWebRTCMicrophoneSource(RefPtr<AudioDeviceInfo> aInfo,
|
||||
const nsString& name,
|
||||
const nsCString& uuid,
|
||||
uint32_t maxChannelCount,
|
||||
bool aDelayAgnostic,
|
||||
bool aExtendedFilter);
|
||||
|
||||
bool RequiresSharing() const override
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
nsString GetName() const override;
|
||||
nsCString GetUUID() const override;
|
||||
|
||||
nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
|
||||
const MediaEnginePrefs& aPrefs,
|
||||
const nsString& aDeviceId,
|
||||
const ipc::PrincipalInfo& aPrincipalInfo,
|
||||
AllocationHandle** aOutHandle,
|
||||
const char** aOutBadConstraint) override;
|
||||
nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
|
||||
nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
|
||||
const RefPtr<SourceMediaStream>& aStream,
|
||||
TrackID aTrackID,
|
||||
const PrincipalHandle& aPrincipal) override;
|
||||
nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
|
||||
nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
|
||||
nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
|
||||
const dom::MediaTrackConstraints& aConstraints,
|
||||
const MediaEnginePrefs& aPrefs,
|
||||
const nsString& aDeviceId,
|
||||
const char** aOutBadConstraint) override;
|
||||
|
||||
void Pull(const RefPtr<const AllocationHandle>& aHandle,
|
||||
const RefPtr<SourceMediaStream>& aStream,
|
||||
TrackID aTrackID,
|
||||
StreamTime aDesiredTime,
|
||||
const PrincipalHandle& aPrincipalHandle) override;
|
||||
|
||||
/**
|
||||
* Assigns the current settings of the capture to aOutSettings.
|
||||
* Main thread only.
|
||||
*/
|
||||
void GetSettings(dom::MediaTrackSettings& aOutSettings) const override;
|
||||
|
||||
dom::MediaSourceEnum GetMediaSource() const override
|
||||
{
|
||||
return dom::MediaSourceEnum::Microphone;
|
||||
}
|
||||
|
||||
nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
|
||||
{
|
||||
return NS_ERROR_NOT_IMPLEMENTED;
|
||||
}
|
||||
|
||||
uint32_t GetBestFitnessDistance(
|
||||
const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
|
||||
const nsString& aDeviceId) const override;
|
||||
|
||||
void Shutdown() override;
|
||||
|
||||
protected:
|
||||
~MediaEngineWebRTCMicrophoneSource() = default;
|
||||
|
||||
private:
|
||||
/**
|
||||
* Reevaluates the aggregated constraints of all allocations and restarts the
|
||||
* underlying device if necessary.
|
||||
*
|
||||
* If the given AllocationHandle was already registered, its constraints will
|
||||
* be updated before reevaluation. If not, they will be added before
|
||||
* reevaluation.
|
||||
*/
|
||||
nsresult ReevaluateAllocation(const RefPtr<AllocationHandle>& aHandle,
|
||||
const NormalizedConstraints* aConstraintsUpdate,
|
||||
const MediaEnginePrefs& aPrefs,
|
||||
const nsString& aDeviceId,
|
||||
const char** aOutBadConstraint);
|
||||
|
||||
/**
|
||||
* Updates the underlying (single) device with the aggregated constraints
|
||||
* aNetConstraints. If the chosen settings for the device changes based on
|
||||
* these new constraints, and capture is active, the device will be restarted.
|
||||
*/
|
||||
nsresult UpdateSingleSource(const RefPtr<const AllocationHandle>& aHandle,
|
||||
const NormalizedConstraints& aNetConstraints,
|
||||
const MediaEnginePrefs& aPrefs,
|
||||
const nsString& aDeviceId,
|
||||
const char** aOutBadConstraint);
|
||||
|
||||
// These methods send a message to the AudioInputProcessing instance.
|
||||
void UpdateAECSettingsIfNeeded(bool aEnable, webrtc::EcModes aMode);
|
||||
void UpdateAGCSettingsIfNeeded(bool aEnable, webrtc::AgcModes aMode);
|
||||
void UpdateNSSettingsIfNeeded(bool aEnable, webrtc::NsModes aMode);
|
||||
void UpdateAPMExtraOptions(bool aExtendedFilter, bool aDelayAgnostic);
|
||||
void ApplySettings(const MediaEnginePrefs& aPrefs,
|
||||
RefPtr<MediaStreamGraphImpl> aGraph);
|
||||
|
||||
bool HasEnabledTrack() const;
|
||||
|
||||
RefPtr<AllocationHandle> mHandle;
|
||||
|
||||
TrackID mTrackID = TRACK_NONE;
|
||||
PrincipalHandle mPrincipal = PRINCIPAL_HANDLE_NONE;
|
||||
bool mEnabled = false;
|
||||
|
||||
const RefPtr<AudioDeviceInfo> mDeviceInfo;
|
||||
const bool mDelayAgnostic;
|
||||
const bool mExtendedFilter;
|
||||
const nsString mDeviceName;
|
||||
const nsCString mDeviceUUID;
|
||||
|
||||
// The maximum number of channels that this device supports.
|
||||
const uint32_t mDeviceMaxChannelCount;
|
||||
// The current settings for the underlying device.
|
||||
// Constructed on the MediaManager thread, and then only ever accessed on the
|
||||
// main thread.
|
||||
const nsMainThreadPtrHandle<media::Refcountable<dom::MediaTrackSettings>> mSettings;
|
||||
// To only update microphone when needed, we keep track of the prefs
|
||||
// representing the currently applied settings for this source. This is the
|
||||
// net result of the prefs across all allocations.
|
||||
MediaEnginePrefs mNetPrefs;
|
||||
|
||||
// Current state of the resource for this source.
|
||||
MediaEngineSourceState mState;
|
||||
|
||||
// The SourecMediaStream on which to append data for this microphone. Set in
|
||||
// SetTrack as part of the initialization, and nulled in ::Deallocate.
|
||||
RefPtr<SourceMediaStream> mStream;
|
||||
|
||||
// See note at the top of this class.
|
||||
RefPtr<AudioInputProcessing> mInputProcessing;
|
||||
};
|
||||
|
||||
// This class is created on the MediaManager thread, and then exclusively used
|
||||
// on the MSG thread.
|
||||
// All communication is done via message passing using MSG ControlMessages
|
||||
class AudioInputProcessing : public AudioDataListener
|
||||
{
|
||||
public:
|
||||
AudioInputProcessing(uint32_t aMaxChannelCount,
|
||||
RefPtr<SourceMediaStream> aStream,
|
||||
TrackID aTrackID,
|
||||
const PrincipalHandle& aPrincipalHandle);
|
||||
|
||||
void Pull(const RefPtr<const AllocationHandle>& aHandle,
|
||||
const RefPtr<SourceMediaStream>& aStream,
|
||||
TrackID aTrackID,
|
||||
StreamTime aDesiredTime,
|
||||
const PrincipalHandle& aPrincipalHandle);
|
||||
|
||||
void NotifyOutputData(MediaStreamGraphImpl* aGraph,
|
||||
AudioDataValue* aBuffer,
|
||||
size_t aFrames,
|
||||
TrackRate aRate,
|
||||
uint32_t aChannels) override;
|
||||
void NotifyInputData(MediaStreamGraphImpl* aGraph,
|
||||
const AudioDataValue* aBuffer,
|
||||
size_t aFrames,
|
||||
TrackRate aRate,
|
||||
uint32_t aChannels) override;
|
||||
|
||||
void Start();
|
||||
void Stop();
|
||||
|
||||
void DeviceChanged(MediaStreamGraphImpl* aGraph) override;
|
||||
|
||||
uint32_t RequestedInputChannelCount(MediaStreamGraphImpl* aGraph) override
|
||||
{
|
||||
return GetRequestedInputChannelCount(aGraph);
|
||||
}
|
||||
|
||||
void Disconnect(MediaStreamGraphImpl* aGraph) override;
|
||||
|
||||
template<typename T>
|
||||
void InsertInGraph(const T* aBuffer, size_t aFrames, uint32_t aChannels);
|
||||
|
||||
void PacketizeAndProcess(MediaStreamGraphImpl* aGraph,
|
||||
const AudioDataValue* aBuffer,
|
||||
size_t aFrames,
|
||||
TrackRate aRate,
|
||||
uint32_t aChannels);
|
||||
|
||||
void SetPassThrough(bool aPassThrough);
|
||||
uint32_t GetRequestedInputChannelCount(MediaStreamGraphImpl* aGraphImpl);
|
||||
void SetRequestedInputChannelCount(uint32_t aRequestedInputChannelCount);
|
||||
// This is true when all processing is disabled, we can skip
|
||||
// packetization, resampling and other processing passes.
|
||||
bool PassThrough(MediaStreamGraphImpl* aGraphImpl) const;
|
||||
|
||||
// This allow changing the APM options, enabling or disabling processing
|
||||
// steps.
|
||||
void UpdateAECSettingsIfNeeded(bool aEnable, webrtc::EcModes aMode);
|
||||
void UpdateAGCSettingsIfNeeded(bool aEnable, webrtc::AgcModes aMode);
|
||||
void UpdateNSSettingsIfNeeded(bool aEnable, webrtc::NsModes aMode);
|
||||
void UpdateAPMExtraOptions(bool aExtendedFilter, bool aDelayAgnostic);
|
||||
|
||||
void End();
|
||||
|
||||
private:
|
||||
~AudioInputProcessing() = default;
|
||||
RefPtr<SourceMediaStream> mStream;
|
||||
// This implements the processing algoritm to apply to the input (e.g. a
|
||||
// microphone). If all algorithms are disabled, this class in not used. This
|
||||
// class only accepts audio chunks of 10ms. It has two inputs and one output:
|
||||
// it is fed the speaker data and the microphone data. It outputs processed
|
||||
// input data.
|
||||
const UniquePtr<webrtc::AudioProcessing> mAudioProcessing;
|
||||
// Packetizer to be able to feed 10ms packets to the input side of
|
||||
// mAudioProcessing. Not used if the processing is bypassed.
|
||||
nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerInput;
|
||||
// Packetizer to be able to feed 10ms packets to the output side of
|
||||
// mAudioProcessing. Not used if the processing is bypassed.
|
||||
nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerOutput;
|
||||
// The number of channels asked for by content, after clamping to the range of
|
||||
// legal channel count for this particular device. This is the number of
|
||||
// channels of the input buffer passed as parameter in NotifyInputData.
|
||||
uint32_t mRequestedInputChannelCount;
|
||||
// mSkipProcessing is true if none of the processing passes are enabled,
|
||||
// because of prefs or constraints. This allows simply copying the audio into
|
||||
// the MSG, skipping resampling and the whole webrtc.org code.
|
||||
bool mSkipProcessing;
|
||||
// Stores the mixed audio output for the reverse-stream of the AEC (the
|
||||
// speaker data).
|
||||
AlignedFloatBuffer mOutputBuffer;
|
||||
// Stores the input audio, to be processed by the APM.
|
||||
AlignedFloatBuffer mInputBuffer;
|
||||
// Stores the deinterleaved microphone audio
|
||||
AlignedFloatBuffer mDeinterleavedBuffer;
|
||||
// Stores the mixed down input audio
|
||||
AlignedFloatBuffer mInputDownmixBuffer;
|
||||
#ifdef DEBUG
|
||||
// The MSGImpl::IterationEnd() of the last time we appended data from an
|
||||
// audio callback.
|
||||
GraphTime mLastCallbackAppendTime;
|
||||
#endif
|
||||
// Set to false by Start(). Becomes true after the first time we append real
|
||||
// audio frames from the audio callback.
|
||||
bool mLiveFramesAppended;
|
||||
// Set to false by Start(). Becomes true after the first time we append
|
||||
// silence *after* the first audio callback has appended real frames.
|
||||
bool mLiveSilenceAppended;
|
||||
// Track ID on which the data is to be appended after processing
|
||||
TrackID mTrackID;
|
||||
// Principal for the data that flows through this class.
|
||||
PrincipalHandle mPrincipal;
|
||||
// Whether or not this MediaEngine is enabled. If it's not enabled, it
|
||||
// operates in "pull" mode, and we append silence only, releasing the audio
|
||||
// input stream.
|
||||
bool mEnabled;
|
||||
// Whether or not we've ended and removed the track in the SourceMediaStream
|
||||
bool mEnded;
|
||||
};
|
||||
|
||||
|
||||
class MediaEngineWebRTCAudioCaptureSource : public MediaEngineSource
|
||||
{
|
||||
public:
|
||||
explicit MediaEngineWebRTCAudioCaptureSource(const char* aUuid)
|
||||
{
|
||||
}
|
||||
nsString GetName() const override;
|
||||
nsCString GetUUID() const override;
|
||||
nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
|
||||
const MediaEnginePrefs &aPrefs,
|
||||
const nsString& aDeviceId,
|
||||
const ipc::PrincipalInfo& aPrincipalInfo,
|
||||
AllocationHandle** aOutHandle,
|
||||
const char** aOutBadConstraint) override
|
||||
{
|
||||
// Nothing to do here, everything is managed in MediaManager.cpp
|
||||
*aOutHandle = nullptr;
|
||||
return NS_OK;
|
||||
}
|
||||
nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override
|
||||
{
|
||||
// Nothing to do here, everything is managed in MediaManager.cpp
|
||||
MOZ_ASSERT(!aHandle);
|
||||
return NS_OK;
|
||||
}
|
||||
nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
|
||||
const RefPtr<SourceMediaStream>& aStream,
|
||||
TrackID aTrackID,
|
||||
const PrincipalHandle& aPrincipal) override;
|
||||
nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
|
||||
nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
|
||||
nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
|
||||
const dom::MediaTrackConstraints& aConstraints,
|
||||
const MediaEnginePrefs& aPrefs,
|
||||
const nsString& aDeviceId,
|
||||
const char** aOutBadConstraint) override;
|
||||
|
||||
void Pull(const RefPtr<const AllocationHandle>& aHandle,
|
||||
const RefPtr<SourceMediaStream>& aStream,
|
||||
TrackID aTrackID,
|
||||
StreamTime aDesiredTime,
|
||||
const PrincipalHandle& aPrincipalHandle) override
|
||||
{
|
||||
}
|
||||
|
||||
dom::MediaSourceEnum GetMediaSource() const override
|
||||
{
|
||||
return dom::MediaSourceEnum::AudioCapture;
|
||||
}
|
||||
|
||||
nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
|
||||
{
|
||||
return NS_ERROR_NOT_IMPLEMENTED;
|
||||
}
|
||||
|
||||
uint32_t GetBestFitnessDistance(
|
||||
const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
|
||||
const nsString& aDeviceId) const override;
|
||||
|
||||
protected:
|
||||
virtual ~MediaEngineWebRTCAudioCaptureSource() = default;
|
||||
};
|
||||
|
||||
} // end namespace mozilla
|
||||
|
||||
#endif // MediaEngineWebRTCAudio_h
|
|
@ -60,11 +60,14 @@ NS_IMPL_RELEASE_INHERITED(PaymentRequest, DOMEventTargetHelper)
|
|||
bool
|
||||
PaymentRequest::PrefEnabled(JSContext* aCx, JSObject* aObj)
|
||||
{
|
||||
#ifdef NIGHTLY_BUILD
|
||||
if (!XRE_IsContentProcess()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return StaticPrefs::dom_payments_request_enabled();
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
nsresult
|
||||
|
|
|
@ -71,8 +71,6 @@ pluginSupportsWindowlessMode()
|
|||
NPError
|
||||
pluginInstanceInit(InstanceData* instanceData)
|
||||
{
|
||||
NPP npp = instanceData->npp;
|
||||
|
||||
instanceData->platformData = static_cast<PlatformData*>
|
||||
(NPN_MemAlloc(sizeof(PlatformData)));
|
||||
if (!instanceData->platformData)
|
||||
|
@ -398,26 +396,26 @@ drawToDC(InstanceData* instanceData, HDC dc,
|
|||
{
|
||||
HDC offscreenDC = ::CreateCompatibleDC(dc);
|
||||
if (!offscreenDC)
|
||||
return;
|
||||
return;
|
||||
|
||||
const BITMAPV4HEADER bitmapheader = {
|
||||
sizeof(BITMAPV4HEADER),
|
||||
width,
|
||||
height,
|
||||
1, // planes
|
||||
32, // bits
|
||||
BI_BITFIELDS,
|
||||
0, // unused size
|
||||
0, 0, // unused metrics
|
||||
0, 0, // unused colors used/important
|
||||
0x00FF0000, 0x0000FF00, 0x000000FF, 0xFF000000, // ARGB masks
|
||||
sizeof(BITMAPV4HEADER),
|
||||
width,
|
||||
height,
|
||||
1, // planes
|
||||
32, // bits
|
||||
BI_BITFIELDS,
|
||||
0, // unused size
|
||||
0, 0, // unused metrics
|
||||
0, 0, // unused colors used/important
|
||||
0x00FF0000, 0x0000FF00, 0x000000FF, 0xFF000000, // ARGB masks
|
||||
};
|
||||
uint32_t *pixelData;
|
||||
HBITMAP offscreenBitmap =
|
||||
::CreateDIBSection(dc, reinterpret_cast<const BITMAPINFO*>(&bitmapheader),
|
||||
0, reinterpret_cast<void**>(&pixelData), 0, 0);
|
||||
::CreateDIBSection(dc, reinterpret_cast<const BITMAPINFO*>(&bitmapheader),
|
||||
0, reinterpret_cast<void**>(&pixelData), 0, 0);
|
||||
if (!offscreenBitmap)
|
||||
return;
|
||||
return;
|
||||
|
||||
uint32_t rgba = instanceData->scriptableObject->drawColor;
|
||||
unsigned int alpha = ((rgba & 0xFF000000) >> 24);
|
||||
|
@ -430,12 +428,12 @@ drawToDC(InstanceData* instanceData, HDC dc,
|
|||
g = BYTE(float(alpha * g) / 0xFF);
|
||||
b = BYTE(float(alpha * b) / 0xFF);
|
||||
uint32_t premultiplied =
|
||||
(alpha << 24) + (r << 16) + (g << 8) + b;
|
||||
(alpha << 24) + (r << 16) + (g << 8) + b;
|
||||
|
||||
for (uint32_t* lastPixel = pixelData + width * height;
|
||||
pixelData < lastPixel;
|
||||
++pixelData)
|
||||
*pixelData = premultiplied;
|
||||
pixelData < lastPixel;
|
||||
++pixelData)
|
||||
*pixelData = premultiplied;
|
||||
|
||||
::SelectObject(offscreenDC, offscreenBitmap);
|
||||
BLENDFUNCTION blendFunc;
|
||||
|
@ -444,7 +442,7 @@ drawToDC(InstanceData* instanceData, HDC dc,
|
|||
blendFunc.SourceConstantAlpha = 255;
|
||||
blendFunc.AlphaFormat = AC_SRC_ALPHA;
|
||||
::AlphaBlend(dc, x, y, width, height, offscreenDC, 0, 0, width, height,
|
||||
blendFunc);
|
||||
blendFunc);
|
||||
|
||||
::DeleteObject(offscreenDC);
|
||||
::DeleteObject(offscreenBitmap);
|
||||
|
@ -800,7 +798,7 @@ pluginHandleEvent(InstanceData* instanceData, void* event)
|
|||
|
||||
LRESULT CALLBACK PluginWndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
|
||||
{
|
||||
WNDPROC wndProc = (WNDPROC)GetProp(hWnd, "MozillaWndProc");
|
||||
WNDPROC wndProc = (WNDPROC)GetProp(hWnd, "MozillaWndProc");
|
||||
if (!wndProc)
|
||||
return 0;
|
||||
InstanceData* pInstance = (InstanceData*)GetProp(hWnd, "InstanceData");
|
||||
|
|
|
@ -80,7 +80,7 @@ public:
|
|||
HANDLE mFontRef;
|
||||
};
|
||||
|
||||
BYTE
|
||||
BYTE
|
||||
FontTypeToOutPrecision(uint8_t fontType)
|
||||
{
|
||||
BYTE ret;
|
||||
|
@ -154,9 +154,9 @@ GDIFontEntry::ReadCMAP(FontInfoData *aFontInfoData)
|
|||
}
|
||||
|
||||
// skip non-SFNT fonts completely
|
||||
if (mFontType != GFX_FONT_TYPE_PS_OPENTYPE &&
|
||||
if (mFontType != GFX_FONT_TYPE_PS_OPENTYPE &&
|
||||
mFontType != GFX_FONT_TYPE_TT_OPENTYPE &&
|
||||
mFontType != GFX_FONT_TYPE_TRUETYPE)
|
||||
mFontType != GFX_FONT_TYPE_TRUETYPE)
|
||||
{
|
||||
mCharacterMap = new gfxCharacterMap();
|
||||
mCharacterMap->mBuildOnTheFly = true;
|
||||
|
@ -288,7 +288,7 @@ GDIFontEntry::FillLogFont(LOGFONTW *aLogFont,
|
|||
#define MISSING_GLYPH 0x1F // glyph index returned for missing characters
|
||||
// on WinXP with .fon fonts, but not Type1 (.pfb)
|
||||
|
||||
bool
|
||||
bool
|
||||
GDIFontEntry::TestCharacterMap(uint32_t aCh)
|
||||
{
|
||||
if (!mCharacterMap) {
|
||||
|
@ -322,13 +322,13 @@ GDIFontEntry::TestCharacterMap(uint32_t aCh)
|
|||
|
||||
bool hasGlyph = false;
|
||||
|
||||
// Bug 573038 - in some cases GetGlyphIndicesW returns 0xFFFF for a
|
||||
// missing glyph or 0x1F in other cases to indicate the "invalid"
|
||||
// Bug 573038 - in some cases GetGlyphIndicesW returns 0xFFFF for a
|
||||
// missing glyph or 0x1F in other cases to indicate the "invalid"
|
||||
// glyph. Map both cases to "not found"
|
||||
if (IsType1() || mForceGDI) {
|
||||
// Type1 fonts and uniscribe APIs don't get along.
|
||||
// Type1 fonts and uniscribe APIs don't get along.
|
||||
// ScriptGetCMap will return E_HANDLE
|
||||
DWORD ret = GetGlyphIndicesW(dc, str, 1,
|
||||
DWORD ret = GetGlyphIndicesW(dc, str, 1,
|
||||
glyph, GGI_MARK_NONEXISTING_GLYPHS);
|
||||
if (ret != GDI_ERROR
|
||||
&& glyph[0] != 0xFFFF
|
||||
|
@ -337,7 +337,7 @@ GDIFontEntry::TestCharacterMap(uint32_t aCh)
|
|||
hasGlyph = true;
|
||||
}
|
||||
} else {
|
||||
// ScriptGetCMap works better than GetGlyphIndicesW
|
||||
// ScriptGetCMap works better than GetGlyphIndicesW
|
||||
// for things like bitmap/vector fonts
|
||||
SCRIPT_CACHE sc = nullptr;
|
||||
HRESULT rv = ScriptGetCMap(dc, &sc, str, 1, 0, glyph);
|
||||
|
@ -392,7 +392,7 @@ GDIFontEntry::InitLogFont(const nsACString& aName,
|
|||
mLogFont.lfFaceName[len] = '\0';
|
||||
}
|
||||
|
||||
GDIFontEntry*
|
||||
GDIFontEntry*
|
||||
GDIFontEntry::CreateFontEntry(const nsACString& aName,
|
||||
gfxWindowsFontType aFontType,
|
||||
SlantStyleRange aStyle,
|
||||
|
@ -474,7 +474,7 @@ GDIFontFamily::FamilyAddStylesProc(const ENUMLOGFONTEXW *lpelfe,
|
|||
// XXX Can we still do this now that we store mCharset
|
||||
// on the font family rather than the font entry?
|
||||
ff->mCharset.set(metrics.tmCharSet);
|
||||
return 1;
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -585,7 +585,7 @@ gfxGDIFontList::GetFontSubstitutes()
|
|||
WCHAR aliasName[MAX_VALUE_NAME];
|
||||
WCHAR actualName[MAX_VALUE_DATA];
|
||||
|
||||
if (RegOpenKeyExW(HKEY_LOCAL_MACHINE,
|
||||
if (RegOpenKeyExW(HKEY_LOCAL_MACHINE,
|
||||
L"SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\FontSubstitutes",
|
||||
0, KEY_READ, &hKey) != ERROR_SUCCESS)
|
||||
{
|
||||
|
@ -617,7 +617,7 @@ gfxGDIFontList::GetFontSubstitutes()
|
|||
gfxFontFamily *ff;
|
||||
NS_ConvertUTF16toUTF8 substitute(substituteName);
|
||||
NS_ConvertUTF16toUTF8 actual(actualFontName);
|
||||
if (!actual.IsEmpty() &&
|
||||
if (!actual.IsEmpty() &&
|
||||
(ff = mFontFamilies.GetWeak(actual))) {
|
||||
mFontSubstitutes.Put(substitute, ff);
|
||||
} else {
|
||||
|
@ -660,9 +660,8 @@ gfxGDIFontList::InitFontListForPlatform()
|
|||
logfont.lfCharSet = DEFAULT_CHARSET;
|
||||
|
||||
AutoDC hdc;
|
||||
int result = EnumFontFamiliesExW(hdc.GetDC(), &logfont,
|
||||
(FONTENUMPROCW)&EnumFontFamExProc,
|
||||
0, 0);
|
||||
(void)EnumFontFamiliesExW(hdc.GetDC(), &logfont,
|
||||
(FONTENUMPROCW)&EnumFontFamExProc, 0, 0);
|
||||
|
||||
GetFontSubstitutes();
|
||||
|
||||
|
@ -717,7 +716,7 @@ gfxGDIFontList::EnumFontFamExProc(ENUMLOGFONTEXW *lpelfe,
|
|||
return 1;
|
||||
}
|
||||
|
||||
gfxFontEntry*
|
||||
gfxFontEntry*
|
||||
gfxGDIFontList::LookupLocalFont(const nsACString& aFontName,
|
||||
WeightRange aWeightForEntry,
|
||||
StretchRange aStretchForEntry,
|
||||
|
@ -731,13 +730,13 @@ gfxGDIFontList::LookupLocalFont(const nsACString& aFontName,
|
|||
}
|
||||
|
||||
bool isCFF = false; // jtdfix -- need to determine this
|
||||
|
||||
|
||||
// use the face name from the lookup font entry, which will be the localized
|
||||
// face name which GDI mapping tables use (e.g. with the system locale set to
|
||||
// Dutch, a fullname of 'Arial Bold' will find a font entry with the face name
|
||||
// 'Arial Vet' which can be used as a key in GDI font lookups).
|
||||
GDIFontEntry *fe = GDIFontEntry::CreateFontEntry(lookup->Name(),
|
||||
gfxWindowsFontType(isCFF ? GFX_FONT_TYPE_PS_OPENTYPE : GFX_FONT_TYPE_TRUETYPE) /*type*/,
|
||||
GDIFontEntry *fe = GDIFontEntry::CreateFontEntry(lookup->Name(),
|
||||
gfxWindowsFontType(isCFF ? GFX_FONT_TYPE_PS_OPENTYPE : GFX_FONT_TYPE_TRUETYPE) /*type*/,
|
||||
lookup->SlantStyle(), lookup->Weight(), aStretchForEntry, nullptr);
|
||||
|
||||
if (!fe)
|
||||
|
@ -845,7 +844,7 @@ gfxGDIFontList::MakePlatformFont(const nsACString& aFontName,
|
|||
|
||||
if (NS_FAILED(rv))
|
||||
return nullptr;
|
||||
|
||||
|
||||
DWORD numFonts = 0;
|
||||
|
||||
uint8_t *fontData = reinterpret_cast<uint8_t*> (newFontData.Elements());
|
||||
|
@ -853,9 +852,9 @@ gfxGDIFontList::MakePlatformFont(const nsACString& aFontName,
|
|||
NS_ASSERTION(fontData, "null font data after renaming");
|
||||
|
||||
// http://msdn.microsoft.com/en-us/library/ms533942(VS.85).aspx
|
||||
// "A font that is added by AddFontMemResourceEx is always private
|
||||
// "A font that is added by AddFontMemResourceEx is always private
|
||||
// to the process that made the call and is not enumerable."
|
||||
fontRef = AddFontMemResourceEx(fontData, fontLength,
|
||||
fontRef = AddFontMemResourceEx(fontData, fontLength,
|
||||
0 /* reserved */, &numFonts);
|
||||
if (!fontRef) {
|
||||
if (FixupSymbolEncodedFont(fontData, fontLength)) {
|
||||
|
@ -924,7 +923,7 @@ gfxGDIFontList::GetDefaultFontForPlatform(const gfxFontStyle* aStyle)
|
|||
// this really shouldn't fail to find a font....
|
||||
NONCLIENTMETRICSW ncm;
|
||||
ncm.cbSize = sizeof(ncm);
|
||||
BOOL status = ::SystemParametersInfoW(SPI_GETNONCLIENTMETRICS,
|
||||
BOOL status = ::SystemParametersInfoW(SPI_GETNONCLIENTMETRICS,
|
||||
sizeof(ncm), &ncm, 0);
|
||||
if (status) {
|
||||
ff = FindFamily(NS_ConvertUTF16toUTF8(ncm.lfMessageFont.lfFaceName));
|
||||
|
|
|
@ -1298,7 +1298,7 @@ gfxWindowsPlatform::IsOptimus()
|
|||
}
|
||||
return knowIsOptimus;
|
||||
}
|
||||
|
||||
/*
|
||||
static inline bool
|
||||
IsWARPStable()
|
||||
{
|
||||
|
@ -1308,7 +1308,7 @@ IsWARPStable()
|
|||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
*/
|
||||
static void
|
||||
InitializeANGLEConfig()
|
||||
{
|
||||
|
|
|
@ -69,8 +69,6 @@ ClipVertexInfo write_clip_tile_vertex(RectWithSize local_clip_rect,
|
|||
|
||||
vec2 snap_offsets = compute_snap_offset_impl(
|
||||
device_pos,
|
||||
snap_mat,
|
||||
local_clip_rect,
|
||||
RectWithSize(snap_positions.xy, snap_positions.zw - snap_positions.xy),
|
||||
snap_positions
|
||||
);
|
||||
|
|
|
@ -2,87 +2,57 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include shared,clip_shared
|
||||
#include shared
|
||||
|
||||
#define LINE_STYLE_SOLID 0
|
||||
#define LINE_STYLE_DOTTED 1
|
||||
#define LINE_STYLE_DASHED 2
|
||||
#define LINE_STYLE_WAVY 3
|
||||
|
||||
varying vec3 vLocalPos;
|
||||
flat varying int vStyle;
|
||||
// Local space position
|
||||
varying vec2 vLocalPos;
|
||||
|
||||
flat varying float vAxisSelect;
|
||||
flat varying int vStyle;
|
||||
flat varying vec4 vParams;
|
||||
flat varying vec2 vLocalOrigin;
|
||||
|
||||
#ifdef WR_VERTEX_SHADER
|
||||
|
||||
#define LINE_ORIENTATION_VERTICAL 0
|
||||
#define LINE_ORIENTATION_HORIZONTAL 1
|
||||
|
||||
struct LineDecorationData {
|
||||
RectWithSize local_rect;
|
||||
float wavyLineThickness;
|
||||
float style;
|
||||
float orientation;
|
||||
};
|
||||
|
||||
LineDecorationData fetch_data(ivec2 address) {
|
||||
vec4 data[2] = fetch_from_gpu_cache_2_direct(address);
|
||||
RectWithSize local_rect = RectWithSize(data[0].xy, data[0].zw);
|
||||
LineDecorationData line_data = LineDecorationData(
|
||||
local_rect,
|
||||
data[1].x,
|
||||
data[1].y,
|
||||
data[1].z
|
||||
);
|
||||
return line_data;
|
||||
}
|
||||
in vec4 aTaskRect;
|
||||
in vec2 aLocalSize;
|
||||
in int aStyle;
|
||||
in int aOrientation;
|
||||
in float aWavyLineThickness;
|
||||
|
||||
void main(void) {
|
||||
ClipMaskInstance cmi = fetch_clip_item();
|
||||
ClipArea area = fetch_clip_area(cmi.render_task_address);
|
||||
Transform clip_transform = fetch_transform(cmi.clip_transform_id);
|
||||
Transform prim_transform = fetch_transform(cmi.prim_transform_id);
|
||||
LineDecorationData data = fetch_data(cmi.clip_data_address);
|
||||
vec2 size;
|
||||
|
||||
ClipVertexInfo vi = write_clip_tile_vertex(
|
||||
data.local_rect,
|
||||
prim_transform,
|
||||
clip_transform,
|
||||
area
|
||||
);
|
||||
vLocalPos = vi.local_pos;
|
||||
|
||||
vec2 pos, size;
|
||||
|
||||
switch (int(data.orientation)) {
|
||||
switch (aOrientation) {
|
||||
case LINE_ORIENTATION_HORIZONTAL:
|
||||
vAxisSelect = 0.0;
|
||||
pos = data.local_rect.p0;
|
||||
size = data.local_rect.size;
|
||||
size = aLocalSize;
|
||||
break;
|
||||
case LINE_ORIENTATION_VERTICAL:
|
||||
vAxisSelect = 1.0;
|
||||
pos = data.local_rect.p0.yx;
|
||||
size = data.local_rect.size.yx;
|
||||
size = aLocalSize.yx;
|
||||
break;
|
||||
default:
|
||||
vAxisSelect = 0.0;
|
||||
pos = size = vec2(0.0);
|
||||
size = vec2(0.0);
|
||||
}
|
||||
|
||||
vLocalOrigin = pos;
|
||||
vStyle = int(data.style);
|
||||
vStyle = aStyle;
|
||||
|
||||
switch (vStyle) {
|
||||
case LINE_STYLE_SOLID: {
|
||||
break;
|
||||
}
|
||||
case LINE_STYLE_DASHED: {
|
||||
float dash_length = size.y * 3.0;
|
||||
vParams = vec4(2.0 * dash_length, // period
|
||||
dash_length, // dash length
|
||||
vParams = vec4(size.x, // period
|
||||
0.5 * size.x, // dash length
|
||||
0.0,
|
||||
0.0);
|
||||
break;
|
||||
|
@ -90,17 +60,16 @@ void main(void) {
|
|||
case LINE_STYLE_DOTTED: {
|
||||
float diameter = size.y;
|
||||
float period = diameter * 2.0;
|
||||
float center_line = pos.y + 0.5 * size.y;
|
||||
float max_x = floor(size.x / period) * period;
|
||||
float center_line = 0.5 * size.y;
|
||||
vParams = vec4(period,
|
||||
diameter / 2.0, // radius
|
||||
center_line,
|
||||
max_x);
|
||||
0.0);
|
||||
break;
|
||||
}
|
||||
case LINE_STYLE_WAVY: {
|
||||
// This logic copied from gecko to get the same results
|
||||
float line_thickness = max(data.wavyLineThickness, 1.0);
|
||||
float line_thickness = max(aWavyLineThickness, 1.0);
|
||||
// Difference in height between peaks and troughs
|
||||
// (and since slopes are 45 degrees, the length of each slope)
|
||||
float slope_length = size.y - line_thickness;
|
||||
|
@ -116,6 +85,10 @@ void main(void) {
|
|||
default:
|
||||
vParams = vec4(0.0);
|
||||
}
|
||||
|
||||
vLocalPos = aPosition.xy * aLocalSize;
|
||||
|
||||
gl_Position = uTransform * vec4(aTaskRect.xy + aTaskRect.zw * aPosition.xy, 0.0, 1.0);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -125,7 +98,7 @@ void main(void) {
|
|||
|
||||
void main(void) {
|
||||
// Find the appropriate distance to apply the step over.
|
||||
vec2 local_pos = vLocalPos.xy / vLocalPos.z;
|
||||
vec2 local_pos = vLocalPos;
|
||||
float aa_range = compute_aa_range(local_pos);
|
||||
float alpha = 1.0;
|
||||
|
||||
|
@ -137,28 +110,18 @@ void main(void) {
|
|||
break;
|
||||
}
|
||||
case LINE_STYLE_DASHED: {
|
||||
// Get the main-axis position relative to closest dot or dash.
|
||||
float x = mod(pos.x - vLocalOrigin.x, vParams.x);
|
||||
|
||||
// Calculate dash alpha (on/off) based on dash length
|
||||
alpha = step(x, vParams.y);
|
||||
alpha = step(floor(pos.x + 0.5), vParams.y);
|
||||
break;
|
||||
}
|
||||
case LINE_STYLE_DOTTED: {
|
||||
// Get the main-axis position relative to closest dot or dash.
|
||||
float x = mod(pos.x - vLocalOrigin.x, vParams.x);
|
||||
|
||||
// Get the dot alpha
|
||||
vec2 dot_relative_pos = vec2(x, pos.y) - vParams.yz;
|
||||
vec2 dot_relative_pos = pos - vParams.yz;
|
||||
float dot_distance = length(dot_relative_pos) - vParams.y;
|
||||
alpha = distance_aa(aa_range, dot_distance);
|
||||
// Clip off partial dots
|
||||
alpha *= step(pos.x - vLocalOrigin.x, vParams.w);
|
||||
break;
|
||||
}
|
||||
case LINE_STYLE_WAVY: {
|
||||
vec2 normalized_local_pos = pos - vLocalOrigin.xy;
|
||||
|
||||
float half_line_thickness = vParams.x;
|
||||
float slope_length = vParams.y;
|
||||
float flat_length = vParams.z;
|
||||
|
@ -169,24 +132,24 @@ void main(void) {
|
|||
float mid_height = vertical_bounds / 2.0;
|
||||
float peak_offset = mid_height - half_line_thickness;
|
||||
// Flip the wave every half period
|
||||
float flip = -2.0 * (step(mod(normalized_local_pos.x, 2.0 * half_period), half_period) - 0.5);
|
||||
float flip = -2.0 * (step(mod(pos.x, 2.0 * half_period), half_period) - 0.5);
|
||||
// float flip = -1.0;
|
||||
peak_offset *= flip;
|
||||
float peak_height = mid_height + peak_offset;
|
||||
|
||||
// Convert pos to a local position within one half period
|
||||
normalized_local_pos.x = mod(normalized_local_pos.x, half_period);
|
||||
pos.x = mod(pos.x, half_period);
|
||||
|
||||
// Compute signed distance to the 3 lines that make up an arc
|
||||
float dist1 = distance_to_line(vec2(0.0, peak_height),
|
||||
vec2(1.0, -flip),
|
||||
normalized_local_pos);
|
||||
pos);
|
||||
float dist2 = distance_to_line(vec2(0.0, peak_height),
|
||||
vec2(0, -flip),
|
||||
normalized_local_pos);
|
||||
pos);
|
||||
float dist3 = distance_to_line(vec2(flat_length, peak_height),
|
||||
vec2(-1.0, -flip),
|
||||
normalized_local_pos);
|
||||
pos);
|
||||
float dist = abs(max(max(dist1, dist2), dist3));
|
||||
|
||||
// Apply AA based on the thickness of the wave
|
|
@ -130,14 +130,12 @@ VertexInfo write_text_vertex(RectWithSize local_clip_rect,
|
|||
|
||||
// Map the clamped local space corner into device space.
|
||||
vec4 world_pos = transform.m * vec4(local_pos, 0.0, 1.0);
|
||||
vec2 device_pos = world_pos.xy / world_pos.w * task.common_data.device_pixel_scale;
|
||||
vec2 device_pos = world_pos.xy * task.common_data.device_pixel_scale;
|
||||
|
||||
// Apply offsets for the render task to get correct screen location.
|
||||
vec2 final_pos = device_pos -
|
||||
task.content_origin +
|
||||
task.common_data.task_rect.p0;
|
||||
vec2 final_offset = -task.content_origin + task.common_data.task_rect.p0;
|
||||
|
||||
gl_Position = uTransform * vec4(final_pos, z, 1.0);
|
||||
gl_Position = uTransform * vec4(device_pos + final_offset * world_pos.w, z * world_pos.w, world_pos.w);
|
||||
|
||||
VertexInfo vi = VertexInfo(
|
||||
local_pos,
|
||||
|
|
|
@ -28,11 +28,9 @@ vec4 compute_snap_positions(
|
|||
|
||||
vec2 compute_snap_offset_impl(
|
||||
vec2 reference_pos,
|
||||
mat4 transform,
|
||||
RectWithSize snap_rect,
|
||||
RectWithSize reference_rect,
|
||||
vec4 snap_positions) {
|
||||
|
||||
vec4 snap_positions
|
||||
) {
|
||||
/// World offsets applied to the corners of the snap rectangle.
|
||||
vec4 snap_offsets = floor(snap_positions + 0.5) - snap_positions;
|
||||
|
||||
|
@ -57,8 +55,6 @@ vec2 compute_snap_offset(vec2 local_pos,
|
|||
|
||||
vec2 snap_offsets = compute_snap_offset_impl(
|
||||
local_pos,
|
||||
transform,
|
||||
snap_rect,
|
||||
snap_rect,
|
||||
snap_positions
|
||||
);
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use api::{AlphaType, ClipMode, DeviceIntRect, DeviceIntSize};
|
||||
use api::{AlphaType, ClipMode, DeviceIntRect, DeviceIntSize, LineStyle};
|
||||
use api::{DeviceUintRect, DeviceUintPoint, ExternalImageType, FilterOp, ImageRendering};
|
||||
use api::{YuvColorSpace, YuvFormat, WorldPixel, WorldRect, ColorDepth};
|
||||
use clip::{ClipDataStore, ClipNodeFlags, ClipNodeRange, ClipItem, ClipStore};
|
||||
|
@ -1413,6 +1413,35 @@ impl BrushPrimitive {
|
|||
))
|
||||
}
|
||||
}
|
||||
BrushKind::LineDecoration { ref handle, style, .. } => {
|
||||
match style {
|
||||
LineStyle::Solid => {
|
||||
Some((
|
||||
BrushBatchKind::Solid,
|
||||
BatchTextures::no_texture(),
|
||||
[0; 3],
|
||||
))
|
||||
}
|
||||
LineStyle::Dotted |
|
||||
LineStyle::Dashed |
|
||||
LineStyle::Wavy => {
|
||||
let rt_cache_entry = resource_cache
|
||||
.get_cached_render_task(handle.as_ref().unwrap());
|
||||
let cache_item = resource_cache.get_texture_cache_item(&rt_cache_entry.handle);
|
||||
let textures = BatchTextures::color(cache_item.texture_id);
|
||||
Some((
|
||||
BrushBatchKind::Image(get_buffer_kind(cache_item.texture_id)),
|
||||
textures,
|
||||
[
|
||||
cache_item.uv_rect_handle.as_int(gpu_cache),
|
||||
(ShaderColorMode::Image as i32) << 16|
|
||||
RasterizationSpace::Local as i32,
|
||||
0,
|
||||
],
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
BrushKind::Border { ref source, .. } => {
|
||||
let cache_item = match *source {
|
||||
BorderSource::Image(request) => {
|
||||
|
@ -1568,6 +1597,7 @@ impl Primitive {
|
|||
AlphaType::Alpha => BlendMode::Alpha,
|
||||
}
|
||||
}
|
||||
BrushKind::LineDecoration { .. } |
|
||||
BrushKind::Solid { .. } |
|
||||
BrushKind::YuvImage { .. } |
|
||||
BrushKind::RadialGradient { .. } |
|
||||
|
@ -1711,7 +1741,6 @@ pub struct ClipBatcher {
|
|||
/// Image draws apply the image masking.
|
||||
pub images: FastHashMap<TextureSource, Vec<ClipMaskInstance>>,
|
||||
pub box_shadows: FastHashMap<TextureSource, Vec<ClipMaskInstance>>,
|
||||
pub line_decorations: Vec<ClipMaskInstance>,
|
||||
}
|
||||
|
||||
impl ClipBatcher {
|
||||
|
@ -1720,7 +1749,6 @@ impl ClipBatcher {
|
|||
rectangles: Vec::new(),
|
||||
images: FastHashMap::default(),
|
||||
box_shadows: FastHashMap::default(),
|
||||
line_decorations: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1803,12 +1831,6 @@ impl ClipBatcher {
|
|||
continue;
|
||||
}
|
||||
}
|
||||
ClipItem::LineDecoration(..) => {
|
||||
self.line_decorations.push(ClipMaskInstance {
|
||||
clip_data_address: gpu_address,
|
||||
..instance
|
||||
});
|
||||
}
|
||||
ClipItem::BoxShadow(ref info) => {
|
||||
let rt_handle = info
|
||||
.cache_handle
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
use api::{BorderRadius, ClipMode, ComplexClipRegion, DeviceIntRect, DevicePixelScale, ImageMask};
|
||||
use api::{ImageRendering, LayoutRect, LayoutSize, LayoutPoint, LayoutVector2D};
|
||||
use api::{BoxShadowClipMode, LayoutToWorldScale, LineOrientation, LineStyle, PicturePixel, WorldPixel};
|
||||
use api::{BoxShadowClipMode, LayoutToWorldScale, PicturePixel, WorldPixel};
|
||||
use api::{PictureRect, LayoutPixel, WorldPoint, WorldSize, WorldRect, LayoutToWorldTransform};
|
||||
use api::{VoidPtrToSizeFn, LayoutRectAu, ImageKey, AuHelpers};
|
||||
use app_units::Au;
|
||||
|
@ -21,7 +21,7 @@ use render_task::to_cache_size;
|
|||
use resource_cache::{ImageRequest, ResourceCache};
|
||||
use std::{cmp, u32};
|
||||
use std::os::raw::c_void;
|
||||
use util::{extract_inner_rect_safe, pack_as_float, project_rect, ScaleOffset};
|
||||
use util::{extract_inner_rect_safe, project_rect, ScaleOffset};
|
||||
|
||||
/*
|
||||
|
||||
|
@ -144,14 +144,6 @@ impl From<ClipItemKey> for ClipNode {
|
|||
mode,
|
||||
)
|
||||
}
|
||||
ClipItemKey::LineDecoration(rect, style, orientation, wavy_line_thickness) => {
|
||||
ClipItem::LineDecoration(LineDecorationClipSource {
|
||||
rect: LayoutRect::from_au(rect),
|
||||
style,
|
||||
orientation,
|
||||
wavy_line_thickness: wavy_line_thickness.to_f32_px(),
|
||||
})
|
||||
}
|
||||
ClipItemKey::ImageMask(rect, image, repeat) => {
|
||||
ClipItem::Image(ImageMask {
|
||||
image,
|
||||
|
@ -298,15 +290,6 @@ impl ClipNode {
|
|||
let data = ClipData::rounded_rect(rect, radius, mode);
|
||||
data.write(&mut request);
|
||||
}
|
||||
ClipItem::LineDecoration(ref info) => {
|
||||
request.push(info.rect);
|
||||
request.push([
|
||||
info.wavy_line_thickness,
|
||||
pack_as_float(info.style as u32),
|
||||
pack_as_float(info.orientation as u32),
|
||||
0.0,
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -353,8 +336,7 @@ impl ClipNode {
|
|||
}
|
||||
}
|
||||
ClipItem::Rectangle(..) |
|
||||
ClipItem::RoundedRectangle(..) |
|
||||
ClipItem::LineDecoration(..) => {}
|
||||
ClipItem::RoundedRectangle(..) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -599,8 +581,7 @@ impl ClipStore {
|
|||
ClipItem::Rectangle(_, ClipMode::ClipOut) |
|
||||
ClipItem::RoundedRectangle(..) |
|
||||
ClipItem::Image(..) |
|
||||
ClipItem::BoxShadow(..) |
|
||||
ClipItem::LineDecoration(..) => {
|
||||
ClipItem::BoxShadow(..) => {
|
||||
true
|
||||
}
|
||||
|
||||
|
@ -651,17 +632,6 @@ impl ClipStore {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[cfg_attr(feature = "capture", derive(Serialize))]
|
||||
#[cfg_attr(feature = "replay", derive(Deserialize))]
|
||||
pub struct LineDecorationClipSource {
|
||||
rect: LayoutRect,
|
||||
style: LineStyle,
|
||||
orientation: LineOrientation,
|
||||
wavy_line_thickness: f32,
|
||||
}
|
||||
|
||||
|
||||
pub struct ComplexTranslateIter<I> {
|
||||
source: I,
|
||||
offset: LayoutVector2D,
|
||||
|
@ -739,7 +709,6 @@ pub enum ClipItemKey {
|
|||
RoundedRectangle(LayoutRectAu, BorderRadiusAu, ClipMode),
|
||||
ImageMask(LayoutRectAu, ImageKey, bool),
|
||||
BoxShadow(LayoutRectAu, BorderRadiusAu, LayoutRectAu, Au, BoxShadowClipMode),
|
||||
LineDecoration(LayoutRectAu, LineStyle, LineOrientation, Au),
|
||||
}
|
||||
|
||||
impl ClipItemKey {
|
||||
|
@ -768,20 +737,6 @@ impl ClipItemKey {
|
|||
)
|
||||
}
|
||||
|
||||
pub fn line_decoration(
|
||||
rect: LayoutRect,
|
||||
style: LineStyle,
|
||||
orientation: LineOrientation,
|
||||
wavy_line_thickness: f32,
|
||||
) -> Self {
|
||||
ClipItemKey::LineDecoration(
|
||||
rect.to_au(),
|
||||
style,
|
||||
orientation,
|
||||
Au::from_f32_px(wavy_line_thickness),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn box_shadow(
|
||||
shadow_rect: LayoutRect,
|
||||
shadow_radius: BorderRadius,
|
||||
|
@ -797,25 +752,6 @@ impl ClipItemKey {
|
|||
clip_mode,
|
||||
)
|
||||
}
|
||||
|
||||
// Return a modified clip source that is the same as self
|
||||
// but offset in local-space by a specified amount.
|
||||
pub fn offset(&self, offset: &LayoutVector2D) -> Self {
|
||||
let offset = offset.to_au();
|
||||
match *self {
|
||||
ClipItemKey::LineDecoration(rect, style, orientation, wavy_line_thickness) => {
|
||||
ClipItemKey::LineDecoration(
|
||||
rect.translate(&offset),
|
||||
style,
|
||||
orientation,
|
||||
wavy_line_thickness,
|
||||
)
|
||||
}
|
||||
_ => {
|
||||
panic!("bug: other clip sources not expected here yet");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -826,7 +762,6 @@ pub enum ClipItem {
|
|||
RoundedRectangle(LayoutRect, BorderRadius, ClipMode),
|
||||
Image(ImageMask),
|
||||
BoxShadow(BoxShadowClipSource),
|
||||
LineDecoration(LineDecorationClipSource),
|
||||
}
|
||||
|
||||
impl ClipItem {
|
||||
|
@ -941,7 +876,6 @@ impl ClipItem {
|
|||
ClipItem::Image(ref mask) if mask.repeat => None,
|
||||
ClipItem::Image(ref mask) => Some(mask.rect),
|
||||
ClipItem::BoxShadow(..) => None,
|
||||
ClipItem::LineDecoration(..) => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -962,8 +896,7 @@ impl ClipItem {
|
|||
ClipItem::Rectangle(_, ClipMode::ClipOut) |
|
||||
ClipItem::RoundedRectangle(_, _, ClipMode::ClipOut) |
|
||||
ClipItem::Image(..) |
|
||||
ClipItem::BoxShadow(..) |
|
||||
ClipItem::LineDecoration(..) => {
|
||||
ClipItem::BoxShadow(..) => {
|
||||
return ClipResult::Partial
|
||||
}
|
||||
};
|
||||
|
@ -1087,8 +1020,7 @@ impl ClipItem {
|
|||
}
|
||||
}
|
||||
}
|
||||
ClipItem::BoxShadow(..) |
|
||||
ClipItem::LineDecoration(..) => {
|
||||
ClipItem::BoxShadow(..) => {
|
||||
ClipResult::Partial
|
||||
}
|
||||
}
|
||||
|
|
|
@ -279,8 +279,6 @@ impl<'a> DisplayListFlattener<'a> {
|
|||
reference_frame_info,
|
||||
&info,
|
||||
bg_color,
|
||||
None,
|
||||
Vec::new(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -563,8 +561,6 @@ impl<'a> DisplayListFlattener<'a> {
|
|||
clip_and_scroll,
|
||||
&prim_info,
|
||||
info.color,
|
||||
None,
|
||||
Vec::new(),
|
||||
);
|
||||
}
|
||||
SpecificDisplayItem::ClearRectangle => {
|
||||
|
@ -918,12 +914,13 @@ impl<'a> DisplayListFlattener<'a> {
|
|||
self.add_primitive_to_draw_list(prim_instance);
|
||||
}
|
||||
} else {
|
||||
debug_assert!(clip_items.is_empty(), "No per-prim clips expected for shadowed primitives");
|
||||
|
||||
// There is an active shadow context. Store as a pending primitive
|
||||
// for processing during pop_all_shadows.
|
||||
self.pending_shadow_items.push_back(ShadowItem::Primitive(PendingPrimitive {
|
||||
clip_and_scroll,
|
||||
info: *info,
|
||||
clip_items,
|
||||
container,
|
||||
}));
|
||||
}
|
||||
|
@ -1482,22 +1479,10 @@ impl<'a> DisplayListFlattener<'a> {
|
|||
info.rect = info.rect.translate(&pending_shadow.shadow.offset);
|
||||
info.clip_rect = info.clip_rect.translate(&pending_shadow.shadow.offset);
|
||||
|
||||
// Offset any local clip sources by the shadow offset.
|
||||
let clip_items: Vec<ClipItemKey> = pending_primitive
|
||||
.clip_items
|
||||
.iter()
|
||||
.map(|cs| cs.offset(&pending_shadow.shadow.offset))
|
||||
.collect();
|
||||
let clip_chain_id = self.build_clip_chain(
|
||||
clip_items,
|
||||
pending_primitive.clip_and_scroll.spatial_node_index,
|
||||
pending_primitive.clip_and_scroll.clip_chain_id,
|
||||
);
|
||||
|
||||
// Construct and add a primitive for the given shadow.
|
||||
let shadow_prim_instance = self.create_primitive(
|
||||
&info,
|
||||
clip_chain_id,
|
||||
pending_primitive.clip_and_scroll.clip_chain_id,
|
||||
pending_primitive.clip_and_scroll.spatial_node_index,
|
||||
pending_primitive.container.create_shadow(&pending_shadow.shadow),
|
||||
);
|
||||
|
@ -1553,14 +1538,9 @@ impl<'a> DisplayListFlattener<'a> {
|
|||
// For a normal primitive, if it has alpha > 0, then we add this
|
||||
// as a normal primitive to the parent picture.
|
||||
if pending_primitive.container.is_visible() {
|
||||
let clip_chain_id = self.build_clip_chain(
|
||||
pending_primitive.clip_items,
|
||||
pending_primitive.clip_and_scroll.spatial_node_index,
|
||||
pending_primitive.clip_and_scroll.clip_chain_id,
|
||||
);
|
||||
let prim_instance = self.create_primitive(
|
||||
&pending_primitive.info,
|
||||
clip_chain_id,
|
||||
pending_primitive.clip_and_scroll.clip_chain_id,
|
||||
pending_primitive.clip_and_scroll.spatial_node_index,
|
||||
pending_primitive.container,
|
||||
);
|
||||
|
@ -1584,8 +1564,6 @@ impl<'a> DisplayListFlattener<'a> {
|
|||
clip_and_scroll: ScrollNodeAndClipChain,
|
||||
info: &LayoutPrimitiveInfo,
|
||||
color: ColorF,
|
||||
segments: Option<BrushSegmentDescriptor>,
|
||||
extra_clips: Vec<ClipItemKey>,
|
||||
) {
|
||||
if color.a == 0.0 {
|
||||
// Don't add transparent rectangles to the draw list, but do consider them for hit
|
||||
|
@ -1596,13 +1574,13 @@ impl<'a> DisplayListFlattener<'a> {
|
|||
|
||||
let prim = BrushPrimitive::new(
|
||||
BrushKind::new_solid(color),
|
||||
segments,
|
||||
None,
|
||||
);
|
||||
|
||||
self.add_primitive(
|
||||
clip_and_scroll,
|
||||
info,
|
||||
extra_clips,
|
||||
Vec::new(),
|
||||
PrimitiveContainer::Brush(prim),
|
||||
);
|
||||
}
|
||||
|
@ -1631,36 +1609,20 @@ impl<'a> DisplayListFlattener<'a> {
|
|||
info: &LayoutPrimitiveInfo,
|
||||
wavy_line_thickness: f32,
|
||||
orientation: LineOrientation,
|
||||
line_color: &ColorF,
|
||||
color: &ColorF,
|
||||
style: LineStyle,
|
||||
) {
|
||||
let prim = BrushPrimitive::new(
|
||||
BrushKind::new_solid(*line_color),
|
||||
None,
|
||||
let prim = BrushPrimitive::new_line_decoration(
|
||||
*color,
|
||||
style,
|
||||
orientation,
|
||||
wavy_line_thickness,
|
||||
);
|
||||
|
||||
let extra_clips = match style {
|
||||
LineStyle::Solid => {
|
||||
Vec::new()
|
||||
}
|
||||
LineStyle::Wavy |
|
||||
LineStyle::Dotted |
|
||||
LineStyle::Dashed => {
|
||||
vec![
|
||||
ClipItemKey::line_decoration(
|
||||
info.rect,
|
||||
style,
|
||||
orientation,
|
||||
wavy_line_thickness,
|
||||
),
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
self.add_primitive(
|
||||
clip_and_scroll,
|
||||
info,
|
||||
extra_clips,
|
||||
Vec::new(),
|
||||
PrimitiveContainer::Brush(prim),
|
||||
);
|
||||
}
|
||||
|
@ -2217,7 +2179,6 @@ struct FlattenedStackingContext {
|
|||
struct PendingPrimitive {
|
||||
clip_and_scroll: ScrollNodeAndClipChain,
|
||||
info: LayoutPrimitiveInfo,
|
||||
clip_items: Vec<ClipItemKey>,
|
||||
container: PrimitiveContainer,
|
||||
}
|
||||
|
||||
|
|
|
@ -40,7 +40,6 @@ impl HitTestClipNode {
|
|||
ClipItem::RoundedRectangle(ref rect, ref radii, ref mode) =>
|
||||
HitTestRegion::RoundedRectangle(*rect, *radii, *mode),
|
||||
ClipItem::Image(ref mask) => HitTestRegion::Rectangle(mask.rect, ClipMode::Clip),
|
||||
ClipItem::LineDecoration(_) |
|
||||
ClipItem::BoxShadow(_) => HitTestRegion::Invalid,
|
||||
};
|
||||
|
||||
|
|
|
@ -237,12 +237,12 @@ pub fn compute_tile_range(
|
|||
let t0 = point2(
|
||||
f32::floor(visible_area.origin.x as f32 * tw),
|
||||
f32::floor(visible_area.origin.y as f32 * th),
|
||||
).cast::<u16>();
|
||||
).try_cast::<u16>().unwrap_or_else(|| panic!("compute_tile_range bad values {:?} {:?}", visible_area, tile_size));
|
||||
|
||||
let t1 = point2(
|
||||
f32::ceil(visible_area.max_x() as f32 * tw),
|
||||
f32::ceil(visible_area.max_y() as f32 * th),
|
||||
).cast::<u16>();
|
||||
).try_cast::<u16>().unwrap_or_else(|| panic!("compute_tile_range bad values {:?} {:?}", visible_area, tile_size));
|
||||
|
||||
TileRange {
|
||||
origin: t0,
|
||||
|
|
|
@ -8,12 +8,12 @@ use api::{FilterOp, GlyphInstance, GradientStop, ImageKey, ImageRendering, ItemR
|
|||
use api::{RasterSpace, LayoutPoint, LayoutRect, LayoutSideOffsets, LayoutSize, LayoutToWorldTransform};
|
||||
use api::{LayoutVector2D, PremultipliedColorF, PropertyBinding, Shadow, YuvColorSpace, YuvFormat};
|
||||
use api::{DeviceIntSideOffsets, WorldPixel, BoxShadowClipMode, LayoutToWorldScale, NormalBorder, WorldRect};
|
||||
use api::{PicturePixel, RasterPixel, ColorDepth};
|
||||
use api::{PicturePixel, RasterPixel, ColorDepth, LineStyle, LineOrientation, LayoutSizeAu, AuHelpers};
|
||||
use app_units::Au;
|
||||
use border::{BorderCacheKey, BorderRenderTaskInfo};
|
||||
use clip_scroll_tree::{ClipScrollTree, CoordinateSystemId, SpatialNodeIndex};
|
||||
use clip::{ClipNodeFlags, ClipChainId, ClipChainInstance, ClipItem, ClipNodeCollector};
|
||||
use euclid::{TypedTransform3D, TypedRect};
|
||||
use euclid::{TypedTransform3D, TypedRect, TypedScale};
|
||||
use frame_builder::{FrameBuildingContext, FrameBuildingState, PictureContext, PictureState};
|
||||
use frame_builder::PrimitiveContext;
|
||||
use glyph_rasterizer::{FontInstance, FontTransform, GlyphKey, FONT_SIZE_LIMIT};
|
||||
|
@ -407,6 +407,13 @@ pub enum BrushKind {
|
|||
visible_tiles: Vec<VisibleGradientTile>,
|
||||
stops_opacity: PrimitiveOpacity,
|
||||
},
|
||||
LineDecoration {
|
||||
color: ColorF,
|
||||
style: LineStyle,
|
||||
orientation: LineOrientation,
|
||||
wavy_line_thickness: f32,
|
||||
handle: Option<RenderTaskCacheEntryHandle>,
|
||||
},
|
||||
Border {
|
||||
source: BorderSource,
|
||||
},
|
||||
|
@ -434,6 +441,8 @@ impl BrushKind {
|
|||
BrushKind::Picture { .. } => false,
|
||||
|
||||
BrushKind::Clear => false,
|
||||
|
||||
BrushKind::LineDecoration { .. } => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -638,6 +647,24 @@ impl BrushPrimitive {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn new_line_decoration(
|
||||
color: ColorF,
|
||||
style: LineStyle,
|
||||
orientation: LineOrientation,
|
||||
wavy_line_thickness: f32,
|
||||
) -> Self {
|
||||
BrushPrimitive::new(
|
||||
BrushKind::LineDecoration {
|
||||
color,
|
||||
style,
|
||||
orientation,
|
||||
wavy_line_thickness,
|
||||
handle: None,
|
||||
},
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
fn write_gpu_blocks(
|
||||
&self,
|
||||
request: &mut GpuDataRequest,
|
||||
|
@ -688,6 +715,38 @@ impl BrushPrimitive {
|
|||
0.0,
|
||||
]);
|
||||
}
|
||||
BrushKind::LineDecoration { style, ref color, orientation, wavy_line_thickness, .. } => {
|
||||
// Work out the stretch parameters (for image repeat) based on the
|
||||
// line decoration parameters.
|
||||
|
||||
let size = get_line_decoration_sizes(
|
||||
&local_rect.size,
|
||||
orientation,
|
||||
style,
|
||||
wavy_line_thickness,
|
||||
);
|
||||
|
||||
match size {
|
||||
Some((inline_size, _)) => {
|
||||
let (sx, sy) = match orientation {
|
||||
LineOrientation::Horizontal => (inline_size, local_rect.size.height),
|
||||
LineOrientation::Vertical => (local_rect.size.width, inline_size),
|
||||
};
|
||||
|
||||
request.push(color.premultiplied());
|
||||
request.push(PremultipliedColorF::WHITE);
|
||||
request.push([
|
||||
sx,
|
||||
sy,
|
||||
0.0,
|
||||
0.0,
|
||||
]);
|
||||
}
|
||||
None => {
|
||||
request.push(color.premultiplied());
|
||||
}
|
||||
}
|
||||
}
|
||||
// Solid rects also support opacity collapsing.
|
||||
BrushKind::Solid { color, ref opacity_binding, .. } => {
|
||||
request.push(color.scale_alpha(opacity_binding.current).premultiplied());
|
||||
|
@ -738,6 +797,16 @@ pub struct ImageCacheKey {
|
|||
pub texel_rect: Option<DeviceIntRect>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "capture", derive(Serialize))]
|
||||
#[cfg_attr(feature = "replay", derive(Deserialize))]
|
||||
pub struct LineDecorationCacheKey {
|
||||
style: LineStyle,
|
||||
orientation: LineOrientation,
|
||||
wavy_line_thickness: Au,
|
||||
size: LayoutSizeAu,
|
||||
}
|
||||
|
||||
// Where to find the texture data for an image primitive.
|
||||
#[derive(Debug)]
|
||||
pub enum ImageSource {
|
||||
|
@ -1323,6 +1392,9 @@ impl PrimitiveContainer {
|
|||
BrushKind::Solid { ref color, .. } => {
|
||||
color.a > 0.0
|
||||
}
|
||||
BrushKind::LineDecoration { ref color, .. } => {
|
||||
color.a > 0.0
|
||||
}
|
||||
BrushKind::Clear |
|
||||
BrushKind::Picture { .. } |
|
||||
BrushKind::Image { .. } |
|
||||
|
@ -1385,6 +1457,14 @@ impl PrimitiveContainer {
|
|||
None,
|
||||
))
|
||||
}
|
||||
BrushKind::LineDecoration { style, orientation, wavy_line_thickness, .. } => {
|
||||
PrimitiveContainer::Brush(BrushPrimitive::new_line_decoration(
|
||||
shadow.color,
|
||||
style,
|
||||
orientation,
|
||||
wavy_line_thickness,
|
||||
))
|
||||
}
|
||||
BrushKind::Image { request, stretch_size, .. } => {
|
||||
PrimitiveContainer::Brush(BrushPrimitive::new(
|
||||
BrushKind::new_image(request.clone(),
|
||||
|
@ -1609,6 +1689,7 @@ impl PrimitiveStore {
|
|||
BrushKind::YuvImage { .. } |
|
||||
BrushKind::LinearGradient { .. } |
|
||||
BrushKind::RadialGradient { .. } |
|
||||
BrushKind::LineDecoration { .. } |
|
||||
BrushKind::Clear => {}
|
||||
}
|
||||
}
|
||||
|
@ -1655,6 +1736,7 @@ impl PrimitiveStore {
|
|||
BrushKind::YuvImage { .. } |
|
||||
BrushKind::Border { .. } |
|
||||
BrushKind::LinearGradient { .. } |
|
||||
BrushKind::LineDecoration { .. } |
|
||||
BrushKind::RadialGradient { .. } => {
|
||||
unreachable!("bug: invalid prim type for opacity collapse");
|
||||
}
|
||||
|
@ -2222,7 +2304,7 @@ fn write_brush_segment_description(
|
|||
|
||||
continue;
|
||||
}
|
||||
ClipItem::LineDecoration(..) | ClipItem::Image(..) => {
|
||||
ClipItem::Image(..) => {
|
||||
rect_clips_only = false;
|
||||
continue;
|
||||
}
|
||||
|
@ -2643,6 +2725,85 @@ impl Primitive {
|
|||
}
|
||||
}
|
||||
}
|
||||
BrushKind::LineDecoration { ref mut handle, style, orientation, wavy_line_thickness, .. } => {
|
||||
// Work out the device pixel size to be used to cache this line decoration.
|
||||
|
||||
let size = get_line_decoration_sizes(
|
||||
&metadata.local_rect.size,
|
||||
orientation,
|
||||
style,
|
||||
wavy_line_thickness,
|
||||
);
|
||||
|
||||
if let Some((inline_size, block_size)) = size {
|
||||
let size = match orientation {
|
||||
LineOrientation::Horizontal => LayoutSize::new(inline_size, block_size),
|
||||
LineOrientation::Vertical => LayoutSize::new(block_size, inline_size),
|
||||
};
|
||||
|
||||
// If dotted, adjust the clip rect to ensure we don't draw a final
|
||||
// partial dot.
|
||||
if style == LineStyle::Dotted {
|
||||
let clip_size = match orientation {
|
||||
LineOrientation::Horizontal => {
|
||||
LayoutSize::new(
|
||||
inline_size * (metadata.local_rect.size.width / inline_size).floor(),
|
||||
metadata.local_rect.size.height,
|
||||
)
|
||||
}
|
||||
LineOrientation::Vertical => {
|
||||
LayoutSize::new(
|
||||
metadata.local_rect.size.width,
|
||||
inline_size * (metadata.local_rect.size.height / inline_size).floor(),
|
||||
)
|
||||
}
|
||||
};
|
||||
let clip_rect = LayoutRect::new(
|
||||
metadata.local_rect.origin,
|
||||
clip_size,
|
||||
);
|
||||
prim_instance.combined_local_clip_rect = clip_rect
|
||||
.intersection(&prim_instance.combined_local_clip_rect)
|
||||
.unwrap_or(LayoutRect::zero());
|
||||
}
|
||||
|
||||
// TODO(gw): Do we ever need / want to support scales for text decorations
|
||||
// based on the current transform?
|
||||
let scale_factor = TypedScale::new(1.0) * frame_context.device_pixel_scale;
|
||||
let task_size = (size * scale_factor).ceil().to_i32();
|
||||
|
||||
let cache_key = LineDecorationCacheKey {
|
||||
style,
|
||||
orientation,
|
||||
wavy_line_thickness: Au::from_f32_px(wavy_line_thickness),
|
||||
size: size.to_au(),
|
||||
};
|
||||
|
||||
// Request a pre-rendered image task.
|
||||
*handle = Some(frame_state.resource_cache.request_render_task(
|
||||
RenderTaskCacheKey {
|
||||
size: task_size,
|
||||
kind: RenderTaskCacheKeyKind::LineDecoration(cache_key),
|
||||
},
|
||||
frame_state.gpu_cache,
|
||||
frame_state.render_tasks,
|
||||
None,
|
||||
false,
|
||||
|render_tasks| {
|
||||
let task = RenderTask::new_line_decoration(
|
||||
task_size,
|
||||
style,
|
||||
orientation,
|
||||
wavy_line_thickness,
|
||||
size,
|
||||
);
|
||||
let task_id = render_tasks.add(task);
|
||||
pic_state.tasks.push(task_id);
|
||||
task_id
|
||||
}
|
||||
));
|
||||
}
|
||||
}
|
||||
BrushKind::YuvImage { format, yuv_key, image_rendering, .. } => {
|
||||
prim_instance.opacity = PrimitiveOpacity::opaque();
|
||||
|
||||
|
@ -3053,3 +3214,48 @@ pub fn get_raster_rects(
|
|||
|
||||
Some((clipped.to_i32(), unclipped, transform))
|
||||
}
|
||||
|
||||
/// Get the inline (horizontal) and block (vertical) sizes
|
||||
/// for a given line decoration.
|
||||
fn get_line_decoration_sizes(
|
||||
rect_size: &LayoutSize,
|
||||
orientation: LineOrientation,
|
||||
style: LineStyle,
|
||||
wavy_line_thickness: f32,
|
||||
) -> Option<(f32, f32)> {
|
||||
let h = match orientation {
|
||||
LineOrientation::Horizontal => rect_size.height,
|
||||
LineOrientation::Vertical => rect_size.width,
|
||||
};
|
||||
|
||||
// TODO(gw): The formulae below are based on the existing gecko and line
|
||||
// shader code. They give reasonable results for most inputs,
|
||||
// but could definitely do with a detailed pass to get better
|
||||
// quality on a wider range of inputs!
|
||||
// See nsCSSRendering::PaintDecorationLine in Gecko.
|
||||
|
||||
match style {
|
||||
LineStyle::Solid => {
|
||||
None
|
||||
}
|
||||
LineStyle::Dashed => {
|
||||
let dash_length = (3.0 * h).min(64.0).max(1.0);
|
||||
|
||||
Some((2.0 * dash_length, 4.0))
|
||||
}
|
||||
LineStyle::Dotted => {
|
||||
let diameter = h.min(64.0).max(1.0);
|
||||
let period = 2.0 * diameter;
|
||||
|
||||
Some((period, diameter))
|
||||
}
|
||||
LineStyle::Wavy => {
|
||||
let line_thickness = wavy_line_thickness.max(1.0);
|
||||
let slope_length = h - line_thickness;
|
||||
let flat_length = ((line_thickness - 1.0) * 2.0).max(1.0);
|
||||
let approx_period = 2.0 * (slope_length + flat_length);
|
||||
|
||||
Some((approx_period, h))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
use api::{DeviceIntPoint, DeviceIntRect, DeviceIntSize, DeviceSize, DeviceIntSideOffsets};
|
||||
use api::{DevicePixelScale, ImageDescriptor, ImageFormat};
|
||||
use api::{LineStyle, LineOrientation, LayoutSize};
|
||||
#[cfg(feature = "pathfinder")]
|
||||
use api::FontRenderMode;
|
||||
use border::BorderCacheKey;
|
||||
|
@ -21,7 +22,7 @@ use internal_types::{CacheTextureId, FastHashMap, SavedTargetIndex};
|
|||
#[cfg(feature = "pathfinder")]
|
||||
use pathfinder_partitioner::mesh::Mesh;
|
||||
use picture::PictureCacheKey;
|
||||
use prim_store::{PrimitiveIndex, ImageCacheKey};
|
||||
use prim_store::{PrimitiveIndex, ImageCacheKey, LineDecorationCacheKey};
|
||||
#[cfg(feature = "debugger")]
|
||||
use print_tree::{PrintTreePrinter};
|
||||
use render_backend::FrameId;
|
||||
|
@ -117,6 +118,12 @@ impl RenderTaskTree {
|
|||
}
|
||||
}
|
||||
|
||||
let pass_index = if task.is_global_cached_task() {
|
||||
0
|
||||
} else {
|
||||
pass_index
|
||||
};
|
||||
|
||||
let pass = &mut passes[pass_index];
|
||||
pass.add_render_task(id, task.get_dynamic_size(), task.target_kind());
|
||||
}
|
||||
|
@ -290,6 +297,16 @@ pub struct BlitTask {
|
|||
pub padding: DeviceIntSideOffsets,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[cfg_attr(feature = "capture", derive(Serialize))]
|
||||
#[cfg_attr(feature = "replay", derive(Deserialize))]
|
||||
pub struct LineDecorationTask {
|
||||
pub wavy_line_thickness: f32,
|
||||
pub style: LineStyle,
|
||||
pub orientation: LineOrientation,
|
||||
pub local_size: LayoutSize,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[cfg_attr(feature = "capture", derive(Serialize))]
|
||||
#[cfg_attr(feature = "replay", derive(Deserialize))]
|
||||
|
@ -312,6 +329,7 @@ pub enum RenderTaskKind {
|
|||
Scaling(ScalingTask),
|
||||
Blit(BlitTask),
|
||||
Border(BorderTask),
|
||||
LineDecoration(LineDecorationTask),
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
||||
|
@ -438,6 +456,26 @@ impl RenderTask {
|
|||
)
|
||||
}
|
||||
|
||||
pub fn new_line_decoration(
|
||||
size: DeviceIntSize,
|
||||
style: LineStyle,
|
||||
orientation: LineOrientation,
|
||||
wavy_line_thickness: f32,
|
||||
local_size: LayoutSize,
|
||||
) -> Self {
|
||||
RenderTask::with_dynamic_location(
|
||||
size,
|
||||
Vec::new(),
|
||||
RenderTaskKind::LineDecoration(LineDecorationTask {
|
||||
style,
|
||||
orientation,
|
||||
wavy_line_thickness,
|
||||
local_size,
|
||||
}),
|
||||
ClearMode::Transparent,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn new_mask(
|
||||
outer_rect: DeviceIntRect,
|
||||
clip_node_range: ClipNodeRange,
|
||||
|
@ -509,8 +547,7 @@ impl RenderTask {
|
|||
}
|
||||
ClipItem::Rectangle(..) |
|
||||
ClipItem::RoundedRectangle(..) |
|
||||
ClipItem::Image(..) |
|
||||
ClipItem::LineDecoration(..) => {}
|
||||
ClipItem::Image(..) => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -701,6 +738,7 @@ impl RenderTask {
|
|||
RenderTaskKind::ClipRegion(..) |
|
||||
RenderTaskKind::Glyph(_) |
|
||||
RenderTaskKind::Border(..) |
|
||||
RenderTaskKind::LineDecoration(..) |
|
||||
RenderTaskKind::Blit(..) => {
|
||||
UvRectKind::Rect
|
||||
}
|
||||
|
@ -747,6 +785,7 @@ impl RenderTask {
|
|||
RenderTaskKind::Readback(..) |
|
||||
RenderTaskKind::Scaling(..) |
|
||||
RenderTaskKind::Border(..) |
|
||||
RenderTaskKind::LineDecoration(..) |
|
||||
RenderTaskKind::Blit(..) => {
|
||||
[0.0; 2]
|
||||
}
|
||||
|
@ -789,6 +828,7 @@ impl RenderTask {
|
|||
RenderTaskKind::Blit(..) |
|
||||
RenderTaskKind::Border(..) |
|
||||
RenderTaskKind::CacheMask(..) |
|
||||
RenderTaskKind::LineDecoration(..) |
|
||||
RenderTaskKind::Glyph(..) => {
|
||||
panic!("texture handle not supported for this task kind");
|
||||
}
|
||||
|
@ -838,6 +878,8 @@ impl RenderTask {
|
|||
match self.kind {
|
||||
RenderTaskKind::Readback(..) => RenderTargetKind::Color,
|
||||
|
||||
RenderTaskKind::LineDecoration(..) => RenderTargetKind::Color,
|
||||
|
||||
RenderTaskKind::ClipRegion(..) |
|
||||
RenderTaskKind::CacheMask(..) => {
|
||||
RenderTargetKind::Alpha
|
||||
|
@ -867,6 +909,30 @@ impl RenderTask {
|
|||
}
|
||||
}
|
||||
|
||||
/// If true, draw this task in the first pass. This is useful
|
||||
/// for simple texture cached render tasks that we want to be made
|
||||
/// available to all subsequent render passes.
|
||||
pub fn is_global_cached_task(&self) -> bool {
|
||||
match self.kind {
|
||||
RenderTaskKind::LineDecoration(..) => {
|
||||
true
|
||||
}
|
||||
|
||||
RenderTaskKind::Readback(..) |
|
||||
RenderTaskKind::ClipRegion(..) |
|
||||
RenderTaskKind::CacheMask(..) |
|
||||
RenderTaskKind::VerticalBlur(..) |
|
||||
RenderTaskKind::HorizontalBlur(..) |
|
||||
RenderTaskKind::Glyph(..) |
|
||||
RenderTaskKind::Scaling(..) |
|
||||
RenderTaskKind::Border(..) |
|
||||
RenderTaskKind::Picture(..) |
|
||||
RenderTaskKind::Blit(..) => {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Optionally, prepare the render task for drawing. This is executed
|
||||
// after all resource cache items (textures and glyphs) have been
|
||||
// resolved and can be queried. It also allows certain render tasks
|
||||
|
@ -894,6 +960,7 @@ impl RenderTask {
|
|||
RenderTaskKind::ClipRegion(..) |
|
||||
RenderTaskKind::Border(..) |
|
||||
RenderTaskKind::CacheMask(..) |
|
||||
RenderTaskKind::LineDecoration(..) |
|
||||
RenderTaskKind::Glyph(..) => {
|
||||
return;
|
||||
}
|
||||
|
@ -924,6 +991,9 @@ impl RenderTask {
|
|||
pt.new_level(format!("CacheMask with {} clips", task.clip_node_range.count));
|
||||
pt.add_item(format!("rect: {:?}", task.actual_rect));
|
||||
}
|
||||
RenderTaskKind::LineDecoration(..) => {
|
||||
pt.new_level("LineDecoration".to_owned());
|
||||
}
|
||||
RenderTaskKind::ClipRegion(..) => {
|
||||
pt.new_level("ClipRegion".to_owned());
|
||||
}
|
||||
|
@ -991,6 +1061,7 @@ pub enum RenderTaskCacheKeyKind {
|
|||
Glyph(GpuGlyphCacheKey),
|
||||
Picture(PictureCacheKey),
|
||||
Border(BorderCacheKey),
|
||||
LineDecoration(LineDecorationCacheKey),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
|
|
|
@ -156,6 +156,10 @@ const GPU_TAG_CACHE_BORDER: GpuProfileTag = GpuProfileTag {
|
|||
label: "C_Border",
|
||||
color: debug_colors::CORNSILK,
|
||||
};
|
||||
const GPU_TAG_CACHE_LINE_DECORATION: GpuProfileTag = GpuProfileTag {
|
||||
label: "C_LineDecoration",
|
||||
color: debug_colors::YELLOWGREEN,
|
||||
};
|
||||
const GPU_TAG_SETUP_TARGET: GpuProfileTag = GpuProfileTag {
|
||||
label: "target init",
|
||||
color: debug_colors::SLATEGREY,
|
||||
|
@ -392,6 +396,43 @@ pub(crate) mod desc {
|
|||
],
|
||||
};
|
||||
|
||||
pub const LINE: VertexDescriptor = VertexDescriptor {
|
||||
vertex_attributes: &[
|
||||
VertexAttribute {
|
||||
name: "aPosition",
|
||||
count: 2,
|
||||
kind: VertexAttributeKind::F32,
|
||||
},
|
||||
],
|
||||
instance_attributes: &[
|
||||
VertexAttribute {
|
||||
name: "aTaskRect",
|
||||
count: 4,
|
||||
kind: VertexAttributeKind::F32,
|
||||
},
|
||||
VertexAttribute {
|
||||
name: "aLocalSize",
|
||||
count: 2,
|
||||
kind: VertexAttributeKind::F32,
|
||||
},
|
||||
VertexAttribute {
|
||||
name: "aWavyLineThickness",
|
||||
count: 1,
|
||||
kind: VertexAttributeKind::F32,
|
||||
},
|
||||
VertexAttribute {
|
||||
name: "aStyle",
|
||||
count: 1,
|
||||
kind: VertexAttributeKind::I32,
|
||||
},
|
||||
VertexAttribute {
|
||||
name: "aOrientation",
|
||||
count: 1,
|
||||
kind: VertexAttributeKind::I32,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
pub const BORDER: VertexDescriptor = VertexDescriptor {
|
||||
vertex_attributes: &[
|
||||
VertexAttribute {
|
||||
|
@ -618,6 +659,7 @@ pub(crate) enum VertexArrayKind {
|
|||
VectorCover,
|
||||
Border,
|
||||
Scale,
|
||||
LineDecoration,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
|
@ -1324,14 +1366,18 @@ impl VertexDataTexture {
|
|||
}
|
||||
|
||||
fn update<T>(&mut self, device: &mut Device, data: &mut Vec<T>) {
|
||||
if data.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
debug_assert!(mem::size_of::<T>() % 16 == 0);
|
||||
let texels_per_item = mem::size_of::<T>() / 16;
|
||||
let items_per_row = MAX_VERTEX_TEXTURE_WIDTH / texels_per_item;
|
||||
|
||||
// Ensure we always end up with a texture when leaving this method.
|
||||
if data.is_empty() {
|
||||
if self.texture.is_some() {
|
||||
return;
|
||||
}
|
||||
data.push(unsafe { mem::uninitialized() });
|
||||
}
|
||||
|
||||
// Extend the data array to be a multiple of the row size.
|
||||
// This ensures memory safety when the array is passed to
|
||||
// OpenGL to upload to the GPU.
|
||||
|
@ -1447,6 +1493,7 @@ pub struct RendererVAOs {
|
|||
blur_vao: VAO,
|
||||
clip_vao: VAO,
|
||||
border_vao: VAO,
|
||||
line_vao: VAO,
|
||||
scale_vao: VAO,
|
||||
}
|
||||
|
||||
|
@ -1751,6 +1798,7 @@ impl Renderer {
|
|||
let clip_vao = device.create_vao_with_new_instances(&desc::CLIP, &prim_vao);
|
||||
let border_vao = device.create_vao_with_new_instances(&desc::BORDER, &prim_vao);
|
||||
let scale_vao = device.create_vao_with_new_instances(&desc::SCALE, &prim_vao);
|
||||
let line_vao = device.create_vao_with_new_instances(&desc::LINE, &prim_vao);
|
||||
let texture_cache_upload_pbo = device.create_pbo();
|
||||
|
||||
let texture_resolver = TextureResolver::new(&mut device);
|
||||
|
@ -1943,6 +1991,7 @@ impl Renderer {
|
|||
clip_vao,
|
||||
border_vao,
|
||||
scale_vao,
|
||||
line_vao,
|
||||
},
|
||||
transforms_texture,
|
||||
prim_header_i_texture,
|
||||
|
@ -2164,11 +2213,6 @@ impl Renderer {
|
|||
"BoxShadows",
|
||||
target.clip_batcher.box_shadows.len(),
|
||||
);
|
||||
debug_target.add(
|
||||
debug_server::BatchKind::Clip,
|
||||
"LineDecorations",
|
||||
target.clip_batcher.line_decorations.len(),
|
||||
);
|
||||
debug_target.add(
|
||||
debug_server::BatchKind::Cache,
|
||||
"Vertical Blur",
|
||||
|
@ -3450,22 +3494,6 @@ impl Renderer {
|
|||
);
|
||||
}
|
||||
|
||||
// draw line decoration clips
|
||||
if !target.clip_batcher.line_decorations.is_empty() {
|
||||
let _gm2 = self.gpu_profile.start_marker("clip lines");
|
||||
self.shaders.borrow_mut().cs_clip_line.bind(
|
||||
&mut self.device,
|
||||
projection,
|
||||
&mut self.renderer_errors,
|
||||
);
|
||||
self.draw_instanced_batch(
|
||||
&target.clip_batcher.line_decorations,
|
||||
VertexArrayKind::Clip,
|
||||
&BatchTextures::no_texture(),
|
||||
stats,
|
||||
);
|
||||
}
|
||||
|
||||
// draw image masks
|
||||
for (mask_texture_id, items) in target.clip_batcher.images.iter() {
|
||||
let _gm2 = self.gpu_profile.start_marker("clip images");
|
||||
|
@ -3584,6 +3612,31 @@ impl Renderer {
|
|||
self.set_blend(false, FramebufferKind::Other);
|
||||
}
|
||||
|
||||
// Draw any line decorations for this target.
|
||||
if !target.line_decorations.is_empty() {
|
||||
let _timer = self.gpu_profile.start_timer(GPU_TAG_CACHE_LINE_DECORATION);
|
||||
|
||||
self.set_blend(true, FramebufferKind::Other);
|
||||
self.set_blend_mode_premultiplied_alpha(FramebufferKind::Other);
|
||||
|
||||
if !target.line_decorations.is_empty() {
|
||||
self.shaders.borrow_mut().cs_line_decoration.bind(
|
||||
&mut self.device,
|
||||
&projection,
|
||||
&mut self.renderer_errors,
|
||||
);
|
||||
|
||||
self.draw_instanced_batch(
|
||||
&target.line_decorations,
|
||||
VertexArrayKind::LineDecoration,
|
||||
&BatchTextures::no_texture(),
|
||||
stats,
|
||||
);
|
||||
}
|
||||
|
||||
self.set_blend(false, FramebufferKind::Other);
|
||||
}
|
||||
|
||||
// Draw any blurs for this target.
|
||||
if !target.horizontal_blurs.is_empty() {
|
||||
let _timer = self.gpu_profile.start_timer(GPU_TAG_BLUR);
|
||||
|
@ -4247,6 +4300,7 @@ impl Renderer {
|
|||
self.device.delete_vao(self.vaos.prim_vao);
|
||||
self.device.delete_vao(self.vaos.clip_vao);
|
||||
self.device.delete_vao(self.vaos.blur_vao);
|
||||
self.device.delete_vao(self.vaos.line_vao);
|
||||
self.device.delete_vao(self.vaos.border_vao);
|
||||
self.device.delete_vao(self.vaos.scale_vao);
|
||||
|
||||
|
@ -4975,6 +5029,7 @@ fn get_vao<'a>(vertex_array_kind: VertexArrayKind,
|
|||
VertexArrayKind::VectorCover => &gpu_glyph_renderer.vector_cover_vao,
|
||||
VertexArrayKind::Border => &vaos.border_vao,
|
||||
VertexArrayKind::Scale => &vaos.scale_vao,
|
||||
VertexArrayKind::LineDecoration => &vaos.line_vao,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4990,6 +5045,7 @@ fn get_vao<'a>(vertex_array_kind: VertexArrayKind,
|
|||
VertexArrayKind::VectorStencil | VertexArrayKind::VectorCover => unreachable!(),
|
||||
VertexArrayKind::Border => &vaos.border_vao,
|
||||
VertexArrayKind::Scale => &vaos.scale_vao,
|
||||
VertexArrayKind::LineDecoration => &vaos.line_vao,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -170,6 +170,7 @@ impl LazilyCompiledShader {
|
|||
|
||||
let vertex_descriptor = match vertex_format {
|
||||
VertexArrayKind::Primitive => &desc::PRIM_INSTANCES,
|
||||
VertexArrayKind::LineDecoration => &desc::LINE,
|
||||
VertexArrayKind::Blur => &desc::BLUR,
|
||||
VertexArrayKind::Clip => &desc::CLIP,
|
||||
VertexArrayKind::VectorStencil => &desc::VECTOR_STENCIL,
|
||||
|
@ -449,6 +450,7 @@ pub struct Shaders {
|
|||
pub cs_border_solid: LazilyCompiledShader,
|
||||
pub cs_scale_a8: LazilyCompiledShader,
|
||||
pub cs_scale_rgba8: LazilyCompiledShader,
|
||||
pub cs_line_decoration: LazilyCompiledShader,
|
||||
|
||||
// Brush shaders
|
||||
brush_solid: BrushShader,
|
||||
|
@ -465,7 +467,6 @@ pub struct Shaders {
|
|||
pub cs_clip_rectangle: LazilyCompiledShader,
|
||||
pub cs_clip_box_shadow: LazilyCompiledShader,
|
||||
pub cs_clip_image: LazilyCompiledShader,
|
||||
pub cs_clip_line: LazilyCompiledShader,
|
||||
|
||||
// The are "primitive shaders". These shaders draw and blend
|
||||
// final results on screen. They are aware of tile boundaries.
|
||||
|
@ -566,14 +567,6 @@ impl Shaders {
|
|||
options.precache_flags,
|
||||
)?;
|
||||
|
||||
let cs_clip_line = LazilyCompiledShader::new(
|
||||
ShaderKind::ClipCache,
|
||||
"cs_clip_line",
|
||||
&[],
|
||||
device,
|
||||
options.precache_flags,
|
||||
)?;
|
||||
|
||||
let cs_clip_image = LazilyCompiledShader::new(
|
||||
ShaderKind::ClipCache,
|
||||
"cs_clip_image",
|
||||
|
@ -684,6 +677,14 @@ impl Shaders {
|
|||
}
|
||||
}
|
||||
|
||||
let cs_line_decoration = LazilyCompiledShader::new(
|
||||
ShaderKind::Cache(VertexArrayKind::LineDecoration),
|
||||
"cs_line_decoration",
|
||||
&[],
|
||||
device,
|
||||
options.precache_flags,
|
||||
)?;
|
||||
|
||||
let cs_border_segment = LazilyCompiledShader::new(
|
||||
ShaderKind::Cache(VertexArrayKind::Border),
|
||||
"cs_border_segment",
|
||||
|
@ -712,6 +713,7 @@ impl Shaders {
|
|||
cs_blur_a8,
|
||||
cs_blur_rgba8,
|
||||
cs_border_segment,
|
||||
cs_line_decoration,
|
||||
cs_border_solid,
|
||||
cs_scale_a8,
|
||||
cs_scale_rgba8,
|
||||
|
@ -725,7 +727,6 @@ impl Shaders {
|
|||
cs_clip_rectangle,
|
||||
cs_clip_box_shadow,
|
||||
cs_clip_image,
|
||||
cs_clip_line,
|
||||
ps_text_run,
|
||||
ps_text_run_dual_source,
|
||||
ps_split_composite,
|
||||
|
@ -801,7 +802,6 @@ impl Shaders {
|
|||
self.cs_clip_rectangle.deinit(device);
|
||||
self.cs_clip_box_shadow.deinit(device);
|
||||
self.cs_clip_image.deinit(device);
|
||||
self.cs_clip_line.deinit(device);
|
||||
self.ps_text_run.deinit(device);
|
||||
self.ps_text_run_dual_source.deinit(device);
|
||||
for shader in self.brush_image {
|
||||
|
@ -815,6 +815,7 @@ impl Shaders {
|
|||
}
|
||||
}
|
||||
self.cs_border_solid.deinit(device);
|
||||
self.cs_line_decoration.deinit(device);
|
||||
self.cs_border_segment.deinit(device);
|
||||
self.ps_split_composite.deinit(device);
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
use api::{ColorF, BorderStyle, DeviceIntPoint, DeviceIntRect, DeviceIntSize, DevicePixelScale};
|
||||
use api::{DeviceUintPoint, DeviceUintRect, DeviceUintSize, DocumentLayer, FilterOp, ImageFormat};
|
||||
use api::{MixBlendMode, PipelineId};
|
||||
use api::{MixBlendMode, PipelineId, DeviceRect, LayoutSize};
|
||||
use batch::{AlphaBatchBuilder, AlphaBatchContainer, ClipBatcher, resolve_image};
|
||||
use clip::ClipStore;
|
||||
use clip_scroll_tree::{ClipScrollTree};
|
||||
|
@ -319,6 +319,16 @@ pub struct BlitJob {
|
|||
pub target_rect: DeviceIntRect,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "capture", derive(Serialize))]
|
||||
#[cfg_attr(feature = "replay", derive(Deserialize))]
|
||||
pub struct LineDecorationJob {
|
||||
pub task_rect: DeviceRect,
|
||||
pub local_size: LayoutSize,
|
||||
pub wavy_line_thickness: f32,
|
||||
pub style: i32,
|
||||
pub orientation: i32,
|
||||
}
|
||||
|
||||
#[cfg(feature = "pathfinder")]
|
||||
#[cfg_attr(feature = "capture", derive(Serialize))]
|
||||
#[cfg_attr(feature = "replay", derive(Deserialize))]
|
||||
|
@ -478,7 +488,8 @@ impl RenderTarget for ColorRenderTarget {
|
|||
}
|
||||
RenderTaskKind::ClipRegion(..) |
|
||||
RenderTaskKind::Border(..) |
|
||||
RenderTaskKind::CacheMask(..) => {
|
||||
RenderTaskKind::CacheMask(..) |
|
||||
RenderTaskKind::LineDecoration(..) => {
|
||||
panic!("Should not be added to color target!");
|
||||
}
|
||||
RenderTaskKind::Glyph(..) => {
|
||||
|
@ -616,6 +627,7 @@ impl RenderTarget for AlphaRenderTarget {
|
|||
RenderTaskKind::Picture(..) |
|
||||
RenderTaskKind::Blit(..) |
|
||||
RenderTaskKind::Border(..) |
|
||||
RenderTaskKind::LineDecoration(..) |
|
||||
RenderTaskKind::Glyph(..) => {
|
||||
panic!("BUG: should not be added to alpha target!");
|
||||
}
|
||||
|
@ -685,6 +697,7 @@ pub struct TextureCacheRenderTarget {
|
|||
pub border_segments_complex: Vec<BorderInstance>,
|
||||
pub border_segments_solid: Vec<BorderInstance>,
|
||||
pub clears: Vec<DeviceIntRect>,
|
||||
pub line_decorations: Vec<LineDecorationJob>,
|
||||
}
|
||||
|
||||
impl TextureCacheRenderTarget {
|
||||
|
@ -697,6 +710,7 @@ impl TextureCacheRenderTarget {
|
|||
border_segments_complex: vec![],
|
||||
border_segments_solid: vec![],
|
||||
clears: vec![],
|
||||
line_decorations: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -714,6 +728,17 @@ impl TextureCacheRenderTarget {
|
|||
let target_rect = task.get_target_rect();
|
||||
|
||||
match task.kind {
|
||||
RenderTaskKind::LineDecoration(ref info) => {
|
||||
self.clears.push(target_rect.0);
|
||||
|
||||
self.line_decorations.push(LineDecorationJob {
|
||||
task_rect: target_rect.0.to_f32(),
|
||||
local_size: info.local_size,
|
||||
style: info.style as i32,
|
||||
orientation: info.orientation as i32,
|
||||
wavy_line_thickness: info.wavy_line_thickness,
|
||||
});
|
||||
}
|
||||
RenderTaskKind::HorizontalBlur(ref info) => {
|
||||
info.add_instances(
|
||||
&mut self.horizontal_blurs,
|
||||
|
|
|
@ -38,10 +38,6 @@ const SHADERS: &[Shader] = &[
|
|||
name: "cs_clip_box_shadow",
|
||||
features: CLIP_FEATURES,
|
||||
},
|
||||
Shader {
|
||||
name: "cs_clip_line",
|
||||
features: CLIP_FEATURES,
|
||||
},
|
||||
// Cache shaders
|
||||
Shader {
|
||||
name: "cs_blur",
|
||||
|
@ -51,6 +47,10 @@ const SHADERS: &[Shader] = &[
|
|||
name: "cs_border_segment",
|
||||
features: CACHE_FEATURES,
|
||||
},
|
||||
Shader {
|
||||
name: "cs_line_decoration",
|
||||
features: CACHE_FEATURES,
|
||||
},
|
||||
Shader {
|
||||
name: "cs_border_solid",
|
||||
features: CACHE_FEATURES,
|
||||
|
|
|
@ -1 +1 @@
|
|||
f83c387824b156e7f97f88edee96956bd0de482d
|
||||
7aa1d42ad41097b68e8026e3384127242601c95b
|
||||
|
|
|
@ -37,7 +37,8 @@ AnimationState::UpdateState(bool aAnimationFinished,
|
|||
SurfaceCache::Lookup(ImageKey(aImage),
|
||||
RasterSurfaceKey(aSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eAnimated));
|
||||
PlaybackType::eAnimated),
|
||||
/* aMarkUsed = */ false);
|
||||
|
||||
return UpdateStateInternal(result, aAnimationFinished, aSize, aAllowInvalidation);
|
||||
}
|
||||
|
@ -398,7 +399,8 @@ FrameAnimator::ResetAnimation(AnimationState& aState)
|
|||
SurfaceCache::Lookup(ImageKey(mImage),
|
||||
RasterSurfaceKey(mSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eAnimated));
|
||||
PlaybackType::eAnimated),
|
||||
/* aMarkUsed = */ false);
|
||||
if (!result) {
|
||||
return;
|
||||
}
|
||||
|
@ -427,7 +429,8 @@ FrameAnimator::RequestRefresh(AnimationState& aState,
|
|||
SurfaceCache::Lookup(ImageKey(mImage),
|
||||
RasterSurfaceKey(mSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eAnimated));
|
||||
PlaybackType::eAnimated),
|
||||
/* aMarkUsed = */ true);
|
||||
|
||||
ret.mDirtyRect = aState.UpdateStateInternal(result, aAnimationFinished, mSize);
|
||||
if (aState.IsDiscarded() || !result) {
|
||||
|
@ -502,7 +505,7 @@ FrameAnimator::RequestRefresh(AnimationState& aState,
|
|||
}
|
||||
|
||||
LookupResult
|
||||
FrameAnimator::GetCompositedFrame(AnimationState& aState)
|
||||
FrameAnimator::GetCompositedFrame(AnimationState& aState, bool aMarkUsed)
|
||||
{
|
||||
aState.mCompositedFrameRequested = true;
|
||||
|
||||
|
@ -517,7 +520,8 @@ FrameAnimator::GetCompositedFrame(AnimationState& aState)
|
|||
SurfaceCache::Lookup(ImageKey(mImage),
|
||||
RasterSurfaceKey(mSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eAnimated));
|
||||
PlaybackType::eAnimated),
|
||||
aMarkUsed);
|
||||
|
||||
if (aState.mCompositedFrameInvalid) {
|
||||
MOZ_ASSERT(gfxPrefs::ImageMemAnimatedDiscardable());
|
||||
|
|
|
@ -323,7 +323,8 @@ public:
|
|||
* not have required compositing). It may not be available because it hasn't
|
||||
* been decoded yet, in which case we return an empty LookupResult.
|
||||
*/
|
||||
LookupResult GetCompositedFrame(AnimationState& aState);
|
||||
LookupResult GetCompositedFrame(AnimationState& aState,
|
||||
bool aMarkUsed);
|
||||
|
||||
/**
|
||||
* Collect an accounting of the memory occupied by the compositing surfaces we
|
||||
|
|
|
@ -298,13 +298,14 @@ RasterImage::GetType(uint16_t* aType)
|
|||
LookupResult
|
||||
RasterImage::LookupFrameInternal(const IntSize& aSize,
|
||||
uint32_t aFlags,
|
||||
PlaybackType aPlaybackType)
|
||||
PlaybackType aPlaybackType,
|
||||
bool aMarkUsed)
|
||||
{
|
||||
if (mAnimationState && aPlaybackType == PlaybackType::eAnimated) {
|
||||
MOZ_ASSERT(mFrameAnimator);
|
||||
MOZ_ASSERT(ToSurfaceFlags(aFlags) == DefaultSurfaceFlags(),
|
||||
"Can't composite frames with non-default surface flags");
|
||||
return mFrameAnimator->GetCompositedFrame(*mAnimationState);
|
||||
return mFrameAnimator->GetCompositedFrame(*mAnimationState, aMarkUsed);
|
||||
}
|
||||
|
||||
SurfaceFlags surfaceFlags = ToSurfaceFlags(aFlags);
|
||||
|
@ -316,20 +317,23 @@ RasterImage::LookupFrameInternal(const IntSize& aSize,
|
|||
return SurfaceCache::Lookup(ImageKey(this),
|
||||
RasterSurfaceKey(aSize,
|
||||
surfaceFlags,
|
||||
PlaybackType::eStatic));
|
||||
PlaybackType::eStatic),
|
||||
aMarkUsed);
|
||||
}
|
||||
|
||||
// We'll return the best match we can find to the requested frame.
|
||||
return SurfaceCache::LookupBestMatch(ImageKey(this),
|
||||
RasterSurfaceKey(aSize,
|
||||
surfaceFlags,
|
||||
PlaybackType::eStatic));
|
||||
PlaybackType::eStatic),
|
||||
aMarkUsed);
|
||||
}
|
||||
|
||||
LookupResult
|
||||
RasterImage::LookupFrame(const IntSize& aSize,
|
||||
uint32_t aFlags,
|
||||
PlaybackType aPlaybackType)
|
||||
PlaybackType aPlaybackType,
|
||||
bool aMarkUsed)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
|
||||
|
@ -347,7 +351,7 @@ RasterImage::LookupFrame(const IntSize& aSize,
|
|||
}
|
||||
|
||||
LookupResult result =
|
||||
LookupFrameInternal(requestedSize, aFlags, aPlaybackType);
|
||||
LookupFrameInternal(requestedSize, aFlags, aPlaybackType, aMarkUsed);
|
||||
|
||||
if (!result && !mHasSize) {
|
||||
// We can't request a decode without knowing our intrinsic size. Give up.
|
||||
|
@ -377,7 +381,7 @@ RasterImage::LookupFrame(const IntSize& aSize,
|
|||
|
||||
// If we can or did sync decode, we should already have the frame.
|
||||
if (ranSync || (aFlags & FLAG_SYNC_DECODE)) {
|
||||
result = LookupFrameInternal(requestedSize, aFlags, aPlaybackType);
|
||||
result = LookupFrameInternal(requestedSize, aFlags, aPlaybackType, aMarkUsed);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -462,7 +466,8 @@ RasterImage::WillDrawOpaqueNow()
|
|||
SurfaceCache::LookupBestMatch(ImageKey(this),
|
||||
RasterSurfaceKey(mSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eStatic));
|
||||
PlaybackType::eStatic),
|
||||
/* aMarkUsed = */ false);
|
||||
MatchType matchType = result.Type();
|
||||
if (matchType == MatchType::NOT_FOUND || matchType == MatchType::PENDING ||
|
||||
!result.Surface()->IsFinished()) {
|
||||
|
@ -604,7 +609,7 @@ RasterImage::GetFrameInternal(const IntSize& aSize,
|
|||
// not waiting for the data to be loaded from the network or not passing
|
||||
// FLAG_SYNC_DECODE.
|
||||
LookupResult result =
|
||||
LookupFrame(aSize, aFlags, ToPlaybackType(aWhichFrame));
|
||||
LookupFrame(aSize, aFlags, ToPlaybackType(aWhichFrame), /* aMarkUsed = */ true);
|
||||
|
||||
// The surface cache may have suggested we use a different size than the
|
||||
// given size in the future. This may or may not be accompanied by an
|
||||
|
@ -1200,7 +1205,7 @@ RasterImage::RequestDecodeForSizeInternal(const IntSize& aSize, uint32_t aFlags)
|
|||
// Perform a frame lookup, which will implicitly start decoding if needed.
|
||||
PlaybackType playbackType = mAnimationState ? PlaybackType::eAnimated
|
||||
: PlaybackType::eStatic;
|
||||
LookupResult result = LookupFrame(aSize, flags, playbackType);
|
||||
LookupResult result = LookupFrame(aSize, flags, playbackType, /* aMarkUsed = */ false);
|
||||
return std::move(result.Surface());
|
||||
}
|
||||
|
||||
|
@ -1518,7 +1523,7 @@ RasterImage::Draw(gfxContext* aContext,
|
|||
: aFlags & ~FLAG_HIGH_QUALITY_SCALING;
|
||||
|
||||
LookupResult result =
|
||||
LookupFrame(aSize, flags, ToPlaybackType(aWhichFrame));
|
||||
LookupFrame(aSize, flags, ToPlaybackType(aWhichFrame), /* aMarkUsed = */ true);
|
||||
if (!result) {
|
||||
// Getting the frame (above) touches the image and kicks off decoding.
|
||||
if (mDrawStartTime.IsNull()) {
|
||||
|
|
|
@ -284,19 +284,22 @@ private:
|
|||
* data, we'll attempt a sync decode if no matching surface is found. If
|
||||
* FLAG_SYNC_DECODE was not specified and no matching surface was found, we'll
|
||||
* kick off an async decode so that the surface is (hopefully) available next
|
||||
* time it's requested.
|
||||
* time it's requested. aMarkUsed determines if we mark the surface used in
|
||||
* the surface cache or not.
|
||||
*
|
||||
* @return a drawable surface, which may be empty if the requested surface
|
||||
* could not be found.
|
||||
*/
|
||||
LookupResult LookupFrame(const gfx::IntSize& aSize,
|
||||
uint32_t aFlags,
|
||||
PlaybackType aPlaybackType);
|
||||
PlaybackType aPlaybackType,
|
||||
bool aMarkUsed);
|
||||
|
||||
/// Helper method for LookupFrame().
|
||||
LookupResult LookupFrameInternal(const gfx::IntSize& aSize,
|
||||
uint32_t aFlags,
|
||||
PlaybackType aPlaybackType);
|
||||
PlaybackType aPlaybackType,
|
||||
bool aMarkUsed);
|
||||
|
||||
ImgDrawResult DrawInternal(DrawableSurface&& aFrameRef,
|
||||
gfxContext* aContext,
|
||||
|
|
|
@ -960,7 +960,7 @@ public:
|
|||
LookupResult Lookup(const ImageKey aImageKey,
|
||||
const SurfaceKey& aSurfaceKey,
|
||||
const StaticMutexAutoLock& aAutoLock,
|
||||
bool aMarkUsed = true)
|
||||
bool aMarkUsed)
|
||||
{
|
||||
RefPtr<ImageSurfaceCache> cache = GetImageCache(aImageKey);
|
||||
if (!cache) {
|
||||
|
@ -999,7 +999,8 @@ public:
|
|||
|
||||
LookupResult LookupBestMatch(const ImageKey aImageKey,
|
||||
const SurfaceKey& aSurfaceKey,
|
||||
const StaticMutexAutoLock& aAutoLock)
|
||||
const StaticMutexAutoLock& aAutoLock,
|
||||
bool aMarkUsed)
|
||||
{
|
||||
RefPtr<ImageSurfaceCache> cache = GetImageCache(aImageKey);
|
||||
if (!cache) {
|
||||
|
@ -1045,7 +1046,8 @@ public:
|
|||
|
||||
if (matchType == MatchType::EXACT ||
|
||||
matchType == MatchType::SUBSTITUTE_BECAUSE_BEST) {
|
||||
if (!MarkUsed(WrapNotNull(surface), WrapNotNull(cache), aAutoLock)) {
|
||||
if (aMarkUsed &&
|
||||
!MarkUsed(WrapNotNull(surface), WrapNotNull(cache), aAutoLock)) {
|
||||
Remove(WrapNotNull(surface), /* aStopTracking */ false, aAutoLock);
|
||||
}
|
||||
}
|
||||
|
@ -1516,7 +1518,8 @@ SurfaceCache::Shutdown()
|
|||
|
||||
/* static */ LookupResult
|
||||
SurfaceCache::Lookup(const ImageKey aImageKey,
|
||||
const SurfaceKey& aSurfaceKey)
|
||||
const SurfaceKey& aSurfaceKey,
|
||||
bool aMarkUsed)
|
||||
{
|
||||
nsTArray<RefPtr<CachedSurface>> discard;
|
||||
LookupResult rv(MatchType::NOT_FOUND);
|
||||
|
@ -1527,7 +1530,7 @@ SurfaceCache::Lookup(const ImageKey aImageKey,
|
|||
return rv;
|
||||
}
|
||||
|
||||
rv = sInstance->Lookup(aImageKey, aSurfaceKey, lock);
|
||||
rv = sInstance->Lookup(aImageKey, aSurfaceKey, lock, aMarkUsed);
|
||||
sInstance->TakeDiscard(discard, lock);
|
||||
}
|
||||
|
||||
|
@ -1536,7 +1539,8 @@ SurfaceCache::Lookup(const ImageKey aImageKey,
|
|||
|
||||
/* static */ LookupResult
|
||||
SurfaceCache::LookupBestMatch(const ImageKey aImageKey,
|
||||
const SurfaceKey& aSurfaceKey)
|
||||
const SurfaceKey& aSurfaceKey,
|
||||
bool aMarkUsed)
|
||||
{
|
||||
nsTArray<RefPtr<CachedSurface>> discard;
|
||||
LookupResult rv(MatchType::NOT_FOUND);
|
||||
|
@ -1547,7 +1551,7 @@ SurfaceCache::LookupBestMatch(const ImageKey aImageKey,
|
|||
return rv;
|
||||
}
|
||||
|
||||
rv = sInstance->LookupBestMatch(aImageKey, aSurfaceKey, lock);
|
||||
rv = sInstance->LookupBestMatch(aImageKey, aSurfaceKey, lock, aMarkUsed);
|
||||
sInstance->TakeDiscard(discard, lock);
|
||||
}
|
||||
|
||||
|
|
|
@ -232,7 +232,8 @@ struct SurfaceCache
|
|||
* if the cache entry was found.
|
||||
*/
|
||||
static LookupResult Lookup(const ImageKey aImageKey,
|
||||
const SurfaceKey& aSurfaceKey);
|
||||
const SurfaceKey& aSurfaceKey,
|
||||
bool aMarkUsed);
|
||||
|
||||
/**
|
||||
* Looks up the best matching cache entry and returns a drawable reference to
|
||||
|
@ -251,7 +252,8 @@ struct SurfaceCache
|
|||
* returned surface exactly matches @aSurfaceKey.
|
||||
*/
|
||||
static LookupResult LookupBestMatch(const ImageKey aImageKey,
|
||||
const SurfaceKey& aSurfaceKey);
|
||||
const SurfaceKey& aSurfaceKey,
|
||||
bool aMarkUsed);
|
||||
|
||||
/**
|
||||
* Insert an ISurfaceProvider into the cache. If an entry with the same
|
||||
|
|
|
@ -1131,9 +1131,9 @@ VectorImage::LookupCachedSurface(const IntSize& aSize,
|
|||
LookupResult result(MatchType::NOT_FOUND);
|
||||
SurfaceKey surfaceKey = VectorSurfaceKey(aSize, aSVGContext);
|
||||
if ((aFlags & FLAG_SYNC_DECODE) || !(aFlags & FLAG_HIGH_QUALITY_SCALING)) {
|
||||
result = SurfaceCache::Lookup(ImageKey(this), surfaceKey);
|
||||
result = SurfaceCache::Lookup(ImageKey(this), surfaceKey, /* aMarkUsed = */ true);
|
||||
} else {
|
||||
result = SurfaceCache::LookupBestMatch(ImageKey(this), surfaceKey);
|
||||
result = SurfaceCache::LookupBestMatch(ImageKey(this), surfaceKey, /* aMarkUsed = */ true);
|
||||
}
|
||||
|
||||
IntSize rasterSize = result.SuggestedSize().IsEmpty()
|
||||
|
|
|
@ -476,11 +476,7 @@ interface imgIContainer : nsISupports
|
|||
* @param aSize The size to which the image should be scaled while decoding,
|
||||
* if possible. If the image cannot be scaled to this size while
|
||||
* being decoded, it will be decoded at its intrinsic size.
|
||||
* @param aFlags Flags of the FLAG_* variety. Only the decode flags
|
||||
* (FLAG_DECODE_*) and FLAG_SYNC_DECODE (which will
|
||||
* synchronously decode images that can be decoded "quickly",
|
||||
* just like startDecoding() does) are accepted; others will be
|
||||
* ignored.
|
||||
* @param aFlags Flags of the FLAG_* variety.
|
||||
*/
|
||||
[noscript] void requestDecodeForSize([const] in nsIntSize aSize,
|
||||
in uint32_t aFlags);
|
||||
|
|
|
@ -573,7 +573,8 @@ TEST_F(ImageDecoders, AnimatedGIFWithFRAME_FIRST)
|
|||
SurfaceCache::Lookup(ImageKey(image.get()),
|
||||
RasterSurfaceKey(imageSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eStatic));
|
||||
PlaybackType::eStatic),
|
||||
/* aMarkUsed = */ false);
|
||||
ASSERT_EQ(MatchType::EXACT, result.Type());
|
||||
EXPECT_TRUE(bool(result.Surface()));
|
||||
}
|
||||
|
@ -584,7 +585,8 @@ TEST_F(ImageDecoders, AnimatedGIFWithFRAME_FIRST)
|
|||
SurfaceCache::Lookup(ImageKey(image.get()),
|
||||
RasterSurfaceKey(imageSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eAnimated));
|
||||
PlaybackType::eAnimated),
|
||||
/* aMarkUsed = */ false);
|
||||
ASSERT_EQ(MatchType::NOT_FOUND, result.Type());
|
||||
}
|
||||
|
||||
|
@ -600,7 +602,8 @@ TEST_F(ImageDecoders, AnimatedGIFWithFRAME_FIRST)
|
|||
SurfaceCache::Lookup(ImageKey(image.get()),
|
||||
RasterSurfaceKey(imageSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eAnimated));
|
||||
PlaybackType::eAnimated),
|
||||
/* aMarkUsed = */ true);
|
||||
ASSERT_EQ(MatchType::EXACT, result.Type());
|
||||
|
||||
EXPECT_TRUE(NS_SUCCEEDED(result.Surface().Seek(0)));
|
||||
|
@ -616,7 +619,8 @@ TEST_F(ImageDecoders, AnimatedGIFWithFRAME_FIRST)
|
|||
SurfaceCache::Lookup(ImageKey(image.get()),
|
||||
RasterSurfaceKey(imageSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eStatic));
|
||||
PlaybackType::eStatic),
|
||||
/* aMarkUsed = */ true);
|
||||
ASSERT_EQ(MatchType::EXACT, result.Type());
|
||||
EXPECT_TRUE(bool(result.Surface()));
|
||||
}
|
||||
|
@ -686,7 +690,8 @@ TEST_F(ImageDecoders, AnimatedGIFWithFRAME_CURRENT)
|
|||
SurfaceCache::Lookup(ImageKey(image.get()),
|
||||
RasterSurfaceKey(imageSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eAnimated));
|
||||
PlaybackType::eAnimated),
|
||||
/* aMarkUsed = */ true);
|
||||
ASSERT_EQ(MatchType::EXACT, result.Type());
|
||||
|
||||
EXPECT_TRUE(NS_SUCCEEDED(result.Surface().Seek(0)));
|
||||
|
@ -702,7 +707,8 @@ TEST_F(ImageDecoders, AnimatedGIFWithFRAME_CURRENT)
|
|||
SurfaceCache::Lookup(ImageKey(image.get()),
|
||||
RasterSurfaceKey(imageSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eStatic));
|
||||
PlaybackType::eStatic),
|
||||
/* aMarkUsed = */ false);
|
||||
ASSERT_EQ(MatchType::NOT_FOUND, result.Type());
|
||||
}
|
||||
|
||||
|
@ -718,7 +724,8 @@ TEST_F(ImageDecoders, AnimatedGIFWithFRAME_CURRENT)
|
|||
SurfaceCache::Lookup(ImageKey(image.get()),
|
||||
RasterSurfaceKey(imageSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eStatic));
|
||||
PlaybackType::eStatic),
|
||||
/* aMarkUsed = */ true);
|
||||
ASSERT_EQ(MatchType::EXACT, result.Type());
|
||||
EXPECT_TRUE(bool(result.Surface()));
|
||||
}
|
||||
|
@ -729,7 +736,8 @@ TEST_F(ImageDecoders, AnimatedGIFWithFRAME_CURRENT)
|
|||
SurfaceCache::Lookup(ImageKey(image.get()),
|
||||
RasterSurfaceKey(imageSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eAnimated));
|
||||
PlaybackType::eAnimated),
|
||||
/* aMarkUsed = */ true);
|
||||
ASSERT_EQ(MatchType::EXACT, result.Type());
|
||||
|
||||
EXPECT_TRUE(NS_SUCCEEDED(result.Surface().Seek(0)));
|
||||
|
@ -798,7 +806,8 @@ TEST_F(ImageDecoders, AnimatedGIFWithExtraImageSubBlocks)
|
|||
SurfaceCache::Lookup(ImageKey(image.get()),
|
||||
RasterSurfaceKey(imageSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eAnimated));
|
||||
PlaybackType::eAnimated),
|
||||
/* aMarkUsed = */ true);
|
||||
ASSERT_EQ(MatchType::EXACT, result.Type());
|
||||
|
||||
EXPECT_TRUE(NS_SUCCEEDED(result.Surface().Seek(0)));
|
||||
|
|
|
@ -250,7 +250,8 @@ TEST_F(ImageDecoderMetadata, NoFrameDelayGIFFullDecode)
|
|||
SurfaceCache::Lookup(ImageKey(image.get()),
|
||||
RasterSurfaceKey(imageSize,
|
||||
DefaultSurfaceFlags(),
|
||||
PlaybackType::eAnimated));
|
||||
PlaybackType::eAnimated),
|
||||
/* aMarkUsed = */ true);
|
||||
ASSERT_EQ(MatchType::EXACT, result.Type());
|
||||
|
||||
EXPECT_TRUE(NS_SUCCEEDED(result.Surface().Seek(0)));
|
||||
|
|
|
@ -79,6 +79,7 @@ constexpr char16_t GREEK_SMALL_LETTER_FINAL_SIGMA = 0x03C2;
|
|||
constexpr char16_t GREEK_SMALL_LETTER_SIGMA = 0x03C3;
|
||||
constexpr char16_t LINE_SEPARATOR = 0x2028;
|
||||
constexpr char16_t PARA_SEPARATOR = 0x2029;
|
||||
constexpr char16_t REPLACEMENT_CHARACTER = 0xFFFD;
|
||||
constexpr char16_t BYTE_ORDER_MARK2 = 0xFFFE;
|
||||
|
||||
const char16_t LeadSurrogateMin = 0xD800;
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
#include <algorithm>
|
||||
#include <type_traits>
|
||||
|
||||
#include "util/Unicode.h" // unicode::REPLACEMENT_CHARACTER
|
||||
#include "vm/JSContext.h"
|
||||
|
||||
using namespace js;
|
||||
|
@ -80,8 +81,6 @@ JS::GetDeflatedUTF8StringLength(JSFlatString* s)
|
|||
: ::GetDeflatedUTF8StringLength(s->twoByteChars(nogc), s->length());
|
||||
}
|
||||
|
||||
static const char16_t UTF8_REPLACEMENT_CHAR = 0xFFFD;
|
||||
|
||||
template <typename CharT>
|
||||
static void
|
||||
DeflateStringToUTF8Buffer(const CharT* src, size_t srclen, mozilla::RangedPtr<char> dst,
|
||||
|
@ -101,16 +100,16 @@ DeflateStringToUTF8Buffer(const CharT* src, size_t srclen, mozilla::RangedPtr<ch
|
|||
char16_t c = *src++;
|
||||
srclen--;
|
||||
if (c >= 0xDC00 && c <= 0xDFFF) {
|
||||
v = UTF8_REPLACEMENT_CHAR;
|
||||
v = unicode::REPLACEMENT_CHARACTER;
|
||||
} else if (c < 0xD800 || c > 0xDBFF) {
|
||||
v = c;
|
||||
} else {
|
||||
if (srclen < 1) {
|
||||
v = UTF8_REPLACEMENT_CHAR;
|
||||
v = unicode::REPLACEMENT_CHARACTER;
|
||||
} else {
|
||||
char16_t c2 = *src;
|
||||
if (c2 < 0xDC00 || c2 > 0xDFFF) {
|
||||
v = UTF8_REPLACEMENT_CHAR;
|
||||
v = unicode::REPLACEMENT_CHARACTER;
|
||||
} else {
|
||||
src++;
|
||||
srclen--;
|
||||
|
@ -271,13 +270,6 @@ enum class OnUTF8Error {
|
|||
Crash,
|
||||
};
|
||||
|
||||
// The Unicode REPLACEMENT CHARACTER, rendered as a diamond with a question
|
||||
// mark, meaning "someone screwed up here but it wasn't me".
|
||||
static const char16_t REPLACEMENT_CHARACTER = 0xFFFD;
|
||||
|
||||
// If making changes to this algorithm, make sure to also update
|
||||
// LossyConvertUTF8toUTF16() in dom/wifi/WifiUtils.cpp
|
||||
//
|
||||
// Scan UTF8 input and (internally, at least) convert it to a series of UTF-16
|
||||
// code units. But you can also do odd things like pass an empty lambda for
|
||||
// `dst`, in which case the output is discarded entirely--the only effect of
|
||||
|
@ -311,7 +303,7 @@ InflateUTF8ToUTF16(JSContext* cx, const UTF8Chars src, OutputFn dst)
|
|||
} else { \
|
||||
char16_t replacement; \
|
||||
if (ErrorAction == OnUTF8Error::InsertReplacementCharacter) { \
|
||||
replacement = REPLACEMENT_CHARACTER; \
|
||||
replacement = unicode::REPLACEMENT_CHARACTER; \
|
||||
} else { \
|
||||
MOZ_ASSERT(ErrorAction == OnUTF8Error::InsertQuestionMark); \
|
||||
replacement = '?'; \
|
||||
|
@ -398,7 +390,7 @@ InflateUTF8StringHelper(JSContext* cx, const UTF8Chars src, size_t* outlen)
|
|||
|
||||
size_t len = 0;
|
||||
bool allASCII = true;
|
||||
auto count = [&](char16_t c) -> LoopDisposition {
|
||||
auto count = [&len, &allASCII](char16_t c) -> LoopDisposition {
|
||||
len++;
|
||||
allASCII &= (c < 0x80);
|
||||
return LoopDisposition::Continue;
|
||||
|
@ -425,7 +417,7 @@ InflateUTF8StringHelper(JSContext* cx, const UTF8Chars src, size_t* outlen)
|
|||
? OnUTF8Error::InsertQuestionMark
|
||||
: OnUTF8Error::InsertReplacementCharacter;
|
||||
size_t j = 0;
|
||||
auto push = [&](char16_t c) -> LoopDisposition {
|
||||
auto push = [dst, &j](char16_t c) -> LoopDisposition {
|
||||
dst[j++] = CharT(c);
|
||||
return LoopDisposition::Continue;
|
||||
};
|
||||
|
@ -467,7 +459,7 @@ JS::SmallestEncoding
|
|||
JS::FindSmallestEncoding(UTF8Chars utf8)
|
||||
{
|
||||
JS::SmallestEncoding encoding = JS::SmallestEncoding::ASCII;
|
||||
auto onChar = [&](char16_t c) -> LoopDisposition {
|
||||
auto onChar = [&encoding](char16_t c) -> LoopDisposition {
|
||||
if (c >= 0x80) {
|
||||
if (c < 0x100) {
|
||||
encoding = JS::SmallestEncoding::Latin1;
|
||||
|
|
|
@ -1067,12 +1067,6 @@ js::XDRAtom(XDRState<mode>* xdr, MutableHandleAtom atomp)
|
|||
latin1 = lengthAndEncoding & 0x1;
|
||||
}
|
||||
|
||||
// We need to align the string in the XDR buffer such that we can avoid
|
||||
// non-align loads of 16bits characters.
|
||||
if (!latin1) {
|
||||
MOZ_TRY(xdr->codeAlign(sizeof(char16_t)));
|
||||
}
|
||||
|
||||
if (mode == XDR_ENCODE) {
|
||||
JS::AutoCheckCannotGC nogc;
|
||||
if (latin1) {
|
||||
|
@ -1100,7 +1094,13 @@ js::XDRAtom(XDRState<mode>* xdr, MutableHandleAtom atomp)
|
|||
/* Directly access the little endian chars in the XDR buffer. */
|
||||
const char16_t* chars = nullptr;
|
||||
if (length) {
|
||||
const uint8_t *ptr;
|
||||
// In the |mode == XDR_ENCODE| case above, when |nchars > 0|,
|
||||
// |XDRState::codeChars(char16_t*, size_t nchars)| will align the
|
||||
// buffer. This code never calls that function, but it must act
|
||||
// *as if* it had, so we must align manually here.
|
||||
MOZ_TRY(xdr->codeAlign(sizeof(char16_t)));
|
||||
|
||||
const uint8_t* ptr;
|
||||
size_t nbyte = length * sizeof(char16_t);
|
||||
MOZ_TRY(xdr->peekData(&ptr, nbyte));
|
||||
MOZ_ASSERT(reinterpret_cast<uintptr_t>(ptr) % sizeof(char16_t) == 0,
|
||||
|
|
|
@ -98,6 +98,10 @@ XDRState<mode>::codeChars(char16_t* chars, size_t nchars)
|
|||
if (nchars == 0) {
|
||||
return Ok();
|
||||
}
|
||||
|
||||
// Align the buffer to avoid unaligned loads.
|
||||
MOZ_TRY(codeAlign(sizeof(char16_t)));
|
||||
|
||||
size_t nbytes = nchars * sizeof(char16_t);
|
||||
if (mode == XDR_ENCODE) {
|
||||
uint8_t* ptr = buf.write(nbytes);
|
||||
|
|
|
@ -492,6 +492,8 @@ class XDRState : public XDRCoderBase
|
|||
XDRResult codeChars(JS::Latin1Char* chars, size_t nchars);
|
||||
XDRResult codeChars(mozilla::Utf8Unit* units, size_t nchars);
|
||||
|
||||
// If |nchars > 0|, this calls |codeAlign(sizeof(char16_t))| so callers
|
||||
// don't have to.
|
||||
XDRResult codeChars(char16_t* chars, size_t nchars);
|
||||
|
||||
XDRResult codeFunction(JS::MutableHandleFunction objp,
|
||||
|
|
|
@ -79,7 +79,6 @@
|
|||
#endif
|
||||
|
||||
#include "CubebUtils.h"
|
||||
#include "Latency.h"
|
||||
#include "WebAudioUtils.h"
|
||||
|
||||
#include "nsError.h"
|
||||
|
@ -221,7 +220,6 @@ nsLayoutStatics::Initialize()
|
|||
return rv;
|
||||
}
|
||||
|
||||
AsyncLatencyLogger::InitializeStatics();
|
||||
DecoderDoctorLogger::Init();
|
||||
MediaManager::StartupInit();
|
||||
CubebUtils::InitLibrary();
|
||||
|
@ -359,7 +357,6 @@ nsLayoutStatics::Shutdown()
|
|||
FrameLayerBuilder::Shutdown();
|
||||
|
||||
CubebUtils::ShutdownLibrary();
|
||||
AsyncLatencyLogger::ShutdownLogger();
|
||||
WebAudioUtils::Shutdown();
|
||||
|
||||
nsCORSListenerProxy::Shutdown();
|
||||
|
|
|
@ -43,14 +43,31 @@ nsRect
|
|||
nsFieldSetFrame::VisualBorderRectRelativeToSelf() const
|
||||
{
|
||||
WritingMode wm = GetWritingMode();
|
||||
Side legendSide = wm.PhysicalSide(eLogicalSideBStart);
|
||||
nscoord legendBorder = StyleBorder()->GetComputedBorderWidth(legendSide);
|
||||
LogicalRect r(wm, LogicalPoint(wm, 0, 0), GetLogicalSize(wm));
|
||||
nsSize containerSize = r.Size(wm).GetPhysicalSize(wm);
|
||||
if (legendBorder < mLegendRect.BSize(wm)) {
|
||||
nscoord off = (mLegendRect.BSize(wm) - legendBorder) / 2;
|
||||
r.BStart(wm) += off;
|
||||
r.BSize(wm) -= off;
|
||||
if (nsIFrame* legend = GetLegend()) {
|
||||
nscoord legendSize = legend->GetLogicalSize(wm).BSize(wm);
|
||||
auto legendMargin = legend->GetLogicalUsedMargin(wm);
|
||||
nscoord legendStartMargin = legendMargin.BStart(wm);
|
||||
nscoord legendEndMargin = legendMargin.BEnd(wm);
|
||||
nscoord border = GetUsedBorder().Side(wm.PhysicalSide(eLogicalSideBStart));
|
||||
// Calculate the offset from the border area block-axis start edge needed to
|
||||
// center-align our border with the legend's border-box (in the block-axis).
|
||||
nscoord off = (legendStartMargin + legendSize / 2) - border / 2;
|
||||
// We don't want to display our border above our border area.
|
||||
if (off > nscoord(0)) {
|
||||
nscoord marginBoxSize = legendStartMargin + legendSize + legendEndMargin;
|
||||
if (marginBoxSize > border) {
|
||||
// We don't want to display our border below the legend's margin-box,
|
||||
// so we align it to the block-axis end if that happens.
|
||||
nscoord overflow = off + border - marginBoxSize;
|
||||
if (overflow > nscoord(0)) {
|
||||
off -= overflow;
|
||||
}
|
||||
r.BStart(wm) += off;
|
||||
r.BSize(wm) -= off;
|
||||
}
|
||||
}
|
||||
}
|
||||
return r.GetPhysicalRect(wm, containerSize);
|
||||
}
|
||||
|
@ -445,7 +462,7 @@ nsFieldSetFrame::Reflow(nsPresContext* aPresContext,
|
|||
printf(" returned (%d, %d)\n",
|
||||
legendDesiredSize.Width(), legendDesiredSize.Height());
|
||||
#endif
|
||||
// figure out the legend's rectangle
|
||||
// Calculate the legend's margin-box rectangle.
|
||||
legendMargin = legend->GetLogicalUsedMargin(wm);
|
||||
mLegendRect =
|
||||
LogicalRect(wm, 0, 0,
|
||||
|
@ -453,12 +470,23 @@ nsFieldSetFrame::Reflow(nsPresContext* aPresContext,
|
|||
legendDesiredSize.BSize(wm) + legendMargin.BStartEnd(wm));
|
||||
nscoord oldSpace = mLegendSpace;
|
||||
mLegendSpace = 0;
|
||||
if (mLegendRect.BSize(wm) > border.BStart(wm)) {
|
||||
// center the border on the legend
|
||||
mLegendSpace = mLegendRect.BSize(wm) - border.BStart(wm);
|
||||
nscoord borderBStart = border.BStart(wm);
|
||||
if (mLegendRect.BSize(wm) > borderBStart) {
|
||||
// mLegendSpace is the space to subtract from our content-box size below.
|
||||
mLegendSpace = mLegendRect.BSize(wm) - borderBStart;
|
||||
} else {
|
||||
mLegendRect.BStart(wm) =
|
||||
(border.BStart(wm) - mLegendRect.BSize(wm)) / 2;
|
||||
// Calculate the border-box position that would center the legend's
|
||||
// border-box within the fieldset border:
|
||||
nscoord off = (borderBStart - legendDesiredSize.BSize(wm)) / 2;
|
||||
off -= legendMargin.BStart(wm); // convert to a margin-box position
|
||||
if (off > nscoord(0)) {
|
||||
// Align the legend to the end if center-aligning it would overflow.
|
||||
nscoord overflow = off + mLegendRect.BSize(wm) - borderBStart;
|
||||
if (overflow > nscoord(0)) {
|
||||
off -= overflow;
|
||||
}
|
||||
mLegendRect.BStart(wm) += off;
|
||||
}
|
||||
}
|
||||
|
||||
// if the legend space changes then we need to reflow the
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
<?xml version="1.0"?>
|
||||
<?xml-stylesheet href="chrome://global/skin/" type="text/css"?>
|
||||
|
||||
<window id="window403458"
|
||||
xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul">
|
||||
<hbox style="-moz-appearance: menuarrow; direction: ltr" />
|
||||
</window>
|
|
@ -1,7 +0,0 @@
|
|||
<?xml version="1.0"?>
|
||||
<?xml-stylesheet href="chrome://global/skin/" type="text/css"?>
|
||||
|
||||
<window id="window403458"
|
||||
xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul">
|
||||
<hbox style="-moz-appearance: menuarrow; direction: rtl" />
|
||||
</window>
|
|
@ -29,9 +29,6 @@ fails-if(!nativeThemePref) != checkbox-native.html checkbox-nonnative.html
|
|||
|
||||
!= 427122-1.html 427122-1-ref.html
|
||||
|
||||
# 403458 is a Windows-only bug
|
||||
skip-if(!winWidget) != 403458-winmenu-ltr.xul 403458-winmenu-rtl.xul
|
||||
|
||||
== 470711-1.xul 470711-1-ref.xul
|
||||
|
||||
== 482955-1.xul 482955-1-ref.xul
|
||||
|
|
|
@ -165,11 +165,13 @@ ImageLoader::AssociateRequestToFrame(imgIRequest* aRequest,
|
|||
// We want to request decode in such a way that avoids triggering
|
||||
// sync decode. First, we attempt to convert the aRequest into
|
||||
// a imgIContainer. If that succeeds, then aRequest has an image
|
||||
// and we can request decoding for size at zero size, and that will
|
||||
// trigger async decode. If the conversion to imgIContainer is
|
||||
// unsuccessful, then that means aRequest doesn't have an image yet,
|
||||
// which means we can safely call StartDecoding() on it without
|
||||
// triggering any synchronous work.
|
||||
// and we can request decoding for size at zero size, the size will
|
||||
// be ignored because we don't pass the FLAG_HIGH_QUALITY_SCALING
|
||||
// flag and an async decode (because we didn't pass any sync decoding
|
||||
// flags) at the intrinsic size will be requested. If the conversion
|
||||
// to imgIContainer is unsuccessful, then that means aRequest doesn't
|
||||
// have an image yet, which means we can safely call StartDecoding()
|
||||
// on it without triggering any synchronous work.
|
||||
nsCOMPtr<imgIContainer> imgContainer;
|
||||
aRequest->GetImage(getter_AddRefs(imgContainer));
|
||||
if (imgContainer) {
|
||||
|
|
|
@ -17,7 +17,98 @@ const NON_CONTENT_ACCESSIBLE_VALUES = {
|
|||
"-moz-groupbox",
|
||||
],
|
||||
"-moz-appearance": [
|
||||
"button-arrow-down",
|
||||
"button-arrow-next",
|
||||
"button-arrow-previous",
|
||||
"button-arrow-up",
|
||||
"button-focus",
|
||||
"dualbutton",
|
||||
"groupbox",
|
||||
"menubar",
|
||||
"menuitem",
|
||||
"checkmenuitem",
|
||||
"radiomenuitem",
|
||||
"menuitemtext",
|
||||
"menupopup",
|
||||
"menucheckbox",
|
||||
"menuradio",
|
||||
"menuseparator",
|
||||
"menuarrow",
|
||||
"menuimage",
|
||||
"-moz-menulist-button",
|
||||
"checkbox-container",
|
||||
"radio-container",
|
||||
"checkbox-label",
|
||||
"radio-label",
|
||||
"resizerpanel",
|
||||
"resizer",
|
||||
"scrollbar",
|
||||
"scrollbar-small",
|
||||
"scrollbar-horizontal",
|
||||
"scrollbar-vertical",
|
||||
"scrollbarbutton-up",
|
||||
"scrollbarbutton-down",
|
||||
"scrollbarbutton-left",
|
||||
"scrollbarbutton-right",
|
||||
"scrollcorner",
|
||||
"separator",
|
||||
"spinner",
|
||||
"spinner-upbutton",
|
||||
"spinner-downbutton",
|
||||
"spinner-textfield",
|
||||
"splitter",
|
||||
"statusbar",
|
||||
"statusbarpanel",
|
||||
"tab",
|
||||
"tabpanel",
|
||||
"tabpanels",
|
||||
"tab-scroll-arrow-back",
|
||||
"tab-scroll-arrow-forward",
|
||||
"toolbar",
|
||||
"toolbarbutton",
|
||||
"toolbarbutton-dropdown",
|
||||
"toolbargripper",
|
||||
"toolbox",
|
||||
"tooltip",
|
||||
"treeheader",
|
||||
"treeheadercell",
|
||||
"treeheadersortarrow",
|
||||
"treeitem",
|
||||
"treeline",
|
||||
"treetwisty",
|
||||
"treetwistyopen",
|
||||
"treeview",
|
||||
"window",
|
||||
"dialog",
|
||||
"-moz-win-communications-toolbox",
|
||||
"-moz-win-media-toolbox",
|
||||
"-moz-win-browsertabbar-toolbox",
|
||||
"-moz-win-glass",
|
||||
"-moz-win-borderless-glass",
|
||||
"-moz-win-exclude-glass",
|
||||
"-moz-mac-fullscreen-button",
|
||||
"-moz-mac-help-button",
|
||||
"-moz-window-button-box",
|
||||
"-moz-window-button-box-maximized",
|
||||
"-moz-window-button-close",
|
||||
"-moz-window-button-maximize",
|
||||
"-moz-window-button-minimize",
|
||||
"-moz-window-button-restore",
|
||||
"-moz-window-frame-bottom",
|
||||
"-moz-window-frame-left",
|
||||
"-moz-window-frame-right",
|
||||
"-moz-window-titlebar",
|
||||
"-moz-window-titlebar-maximized",
|
||||
"-moz-gtk-info-bar",
|
||||
"-moz-mac-active-source-list-selection",
|
||||
"-moz-mac-disclosure-button-closed",
|
||||
"-moz-mac-disclosure-button-open",
|
||||
"-moz-mac-source-list",
|
||||
"-moz-mac-source-list-selection",
|
||||
"-moz-mac-vibrancy-dark",
|
||||
"-moz-mac-vibrancy-light",
|
||||
"-moz-mac-vibrant-titlebar-dark",
|
||||
"-moz-mac-vibrant-titlebar-light",
|
||||
],
|
||||
};
|
||||
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
#include "nsIPrefService.h"
|
||||
#include "nsIPrefBranch.h"
|
||||
#include "nsThreadUtils.h"
|
||||
#include "Latency.h"
|
||||
#include "mozilla/Telemetry.h"
|
||||
|
||||
#include "webrtc/modules/audio_processing/include/audio_processing.h"
|
||||
|
@ -705,10 +704,6 @@ WebrtcAudioConduit::SendAudioFrame(const int16_t audio_data[],
|
|||
return kMediaConduitSessionNotInited;
|
||||
}
|
||||
|
||||
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
|
||||
struct Processing insert = { TimeStamp::Now(), 0 };
|
||||
mProcessing.AppendElement(insert);
|
||||
}
|
||||
|
||||
capture_delay = mCaptureDelay;
|
||||
// Insert the samples
|
||||
|
@ -807,28 +802,6 @@ WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
|
|||
mLastSyncLog = mSamples;
|
||||
}
|
||||
|
||||
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
|
||||
if (mProcessing.Length() > 0) {
|
||||
unsigned int now;
|
||||
mPtrVoEVideoSync->GetPlayoutTimestamp(mChannel, now);
|
||||
if (static_cast<uint32_t>(now) != mLastTimestamp) {
|
||||
mLastTimestamp = static_cast<uint32_t>(now);
|
||||
// Find the block that includes this timestamp in the network input
|
||||
while (mProcessing.Length() > 0) {
|
||||
// FIX! assumes 20ms @ 48000Hz
|
||||
// FIX handle wrap-around
|
||||
if (mProcessing[0].mRTPTimeStamp + 20*(48000/1000) >= now) {
|
||||
TimeDuration t = TimeStamp::Now() - mProcessing[0].mTimeStamp;
|
||||
// Wrap-around?
|
||||
int64_t delta = t.ToMilliseconds() + (now - mProcessing[0].mRTPTimeStamp)/(48000/1000);
|
||||
LogTime(AsyncLatencyLogger::AudioRecvRTP, ((uint64_t) this), delta);
|
||||
break;
|
||||
}
|
||||
mProcessing.RemoveElementAt(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
CSFLogDebug(LOGTAG,"%s GetAudioFrame:Got samples: length %d ",__FUNCTION__,
|
||||
lengthSamples);
|
||||
return kMediaConduitNoError;
|
||||
|
@ -842,13 +815,6 @@ WebrtcAudioConduit::ReceivedRTPPacket(const void *data, int len, uint32_t ssrc)
|
|||
|
||||
if(mEngineReceiving)
|
||||
{
|
||||
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
|
||||
// timestamp is at 32 bits in ([1])
|
||||
struct Processing insert = { TimeStamp::Now(),
|
||||
ntohl(static_cast<const uint32_t *>(data)[1]) };
|
||||
mProcessing.AppendElement(insert);
|
||||
}
|
||||
|
||||
// XXX we need to get passed the time the packet was received
|
||||
if(mPtrVoENetwork->ReceivedRTPPacket(mChannel, data, len) == -1)
|
||||
{
|
||||
|
@ -987,16 +953,6 @@ WebrtcAudioConduit::SendRtp(const uint8_t* data,
|
|||
{
|
||||
CSFLogDebug(LOGTAG, "%s: len %lu", __FUNCTION__, (unsigned long)len);
|
||||
|
||||
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
|
||||
if (mProcessing.Length() > 0) {
|
||||
TimeStamp started = mProcessing[0].mTimeStamp;
|
||||
mProcessing.RemoveElementAt(0);
|
||||
mProcessing.RemoveElementAt(0); // 20ms packetization! Could automate this by watching sizes
|
||||
TimeDuration t = TimeStamp::Now() - started;
|
||||
int64_t delta = t.ToMilliseconds();
|
||||
LogTime(AsyncLatencyLogger::AudioSendRTP, ((uint64_t) this), delta);
|
||||
}
|
||||
}
|
||||
ReentrantMonitorAutoEnter enter(mTransportMonitor);
|
||||
// XXX(pkerr) - the PacketOptions are being ignored. This parameter was added along
|
||||
// with the Call API update in the webrtc.org codebase.
|
||||
|
|
|
@ -185,7 +185,6 @@ public:
|
|||
mDtmfEnabled(false),
|
||||
mCodecMutex("AudioConduit codec db"),
|
||||
mCaptureDelay(150),
|
||||
mLastTimestamp(0),
|
||||
mSamples(0),
|
||||
mLastSyncLog(0)
|
||||
{
|
||||
|
@ -339,8 +338,6 @@ private:
|
|||
// Current "capture" delay (really output plus input delay)
|
||||
int32_t mCaptureDelay;
|
||||
|
||||
uint32_t mLastTimestamp;
|
||||
|
||||
webrtc::AudioFrame mAudioFrame; // for output pulls
|
||||
|
||||
uint32_t mSamples;
|
||||
|
|
|
@ -82,8 +82,6 @@ using namespace mozilla::layers;
|
|||
mozilla::LazyLogModule gMediaPipelineLog("MediaPipeline");
|
||||
|
||||
namespace mozilla {
|
||||
extern mozilla::LogModule*
|
||||
AudioLogModule();
|
||||
|
||||
class VideoConverterListener
|
||||
{
|
||||
|
@ -1933,7 +1931,6 @@ public:
|
|||
, mTaskQueue(
|
||||
new TaskQueue(GetMediaThreadPool(MediaThreadType::WEBRTC_DECODER),
|
||||
"AudioPipelineListener"))
|
||||
, mLastLog(0)
|
||||
{
|
||||
AddTrackToSource(mRate);
|
||||
}
|
||||
|
@ -2031,18 +2028,6 @@ private:
|
|||
if (mSource->AppendToTrack(mTrackId, &segment)) {
|
||||
framesNeeded -= frames;
|
||||
mPlayedTicks += frames;
|
||||
if (MOZ_LOG_TEST(AudioLogModule(), LogLevel::Debug)) {
|
||||
if (mPlayedTicks > mLastLog + mRate) {
|
||||
MOZ_LOG(AudioLogModule(),
|
||||
LogLevel::Debug,
|
||||
("%p: Inserting samples into track %d, total = "
|
||||
"%" PRIu64,
|
||||
(void*)this,
|
||||
mTrackId,
|
||||
mPlayedTicks));
|
||||
mLastLog = mPlayedTicks;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
MOZ_LOG(gMediaPipelineLog, LogLevel::Error, ("AppendToTrack failed"));
|
||||
// we can't un-read the data, but that's ok since we don't want to
|
||||
|
@ -2053,10 +2038,12 @@ private:
|
|||
}
|
||||
|
||||
RefPtr<MediaSessionConduit> mConduit;
|
||||
// This conduit's sampling rate. This is either 16, 32, 44.1 or 48kHz, and
|
||||
// tries to be the same as the graph rate. If the graph rate is higher than
|
||||
// 48kHz, mRate is capped to 48kHz. If mRate does not match the graph rate,
|
||||
// audio is resampled to the graph rate.
|
||||
const TrackRate mRate;
|
||||
const RefPtr<TaskQueue> mTaskQueue;
|
||||
// Graph's current sampling rate
|
||||
TrackTicks mLastLog = 0; // mPlayedTicks when we last logged
|
||||
};
|
||||
|
||||
MediaPipelineReceiveAudio::MediaPipelineReceiveAudio(
|
||||
|
|
|
@ -10,9 +10,6 @@
|
|||
#include "nspr.h"
|
||||
#include "nsError.h"
|
||||
|
||||
void LogTime(AsyncLatencyLogger::LatencyLogIndex index, uint64_t b, int64_t c) {}
|
||||
void LogLatency(AsyncLatencyLogger::LatencyLogIndex index, uint64_t b, int64_t c) {}
|
||||
|
||||
static const int AUDIO_BUFFER_SIZE = 1600;
|
||||
static const int NUM_CHANNELS = 2;
|
||||
static const int GRAPH_RATE = 16000;
|
||||
|
|
|
@ -966,20 +966,27 @@ pub enum Appearance {
|
|||
/// A typical dialog button.
|
||||
Button,
|
||||
/// Various arrows that go in buttons
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
ButtonArrowDown,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
ButtonArrowNext,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
ButtonArrowPrevious,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
ButtonArrowUp,
|
||||
/// A rectangular button that contains complex content
|
||||
/// like images (e.g. HTML <button> elements)
|
||||
ButtonBevel,
|
||||
/// The focus outline box inside of a button.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
ButtonFocus,
|
||||
/// The caret of a text area
|
||||
Caret,
|
||||
/// A dual toolbar button (e.g., a Back button with a dropdown)
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Dualbutton,
|
||||
/// A groupbox.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Groupbox,
|
||||
/// A inner-spin button.
|
||||
InnerSpinButton,
|
||||
|
@ -988,12 +995,17 @@ pub enum Appearance {
|
|||
/// A listbox item.
|
||||
Listitem,
|
||||
/// Menu Bar background
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Menubar,
|
||||
/// <menu> and <menuitem> appearances
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Menuitem,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Checkmenuitem,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Radiomenuitem,
|
||||
/// For text on non-iconic menuitems only
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Menuitemtext,
|
||||
/// A dropdown list.
|
||||
Menulist,
|
||||
|
@ -1004,13 +1016,19 @@ pub enum Appearance {
|
|||
/// An editable textfield with a dropdown list (a combobox).
|
||||
MenulistTextfield,
|
||||
/// Menu Popup background.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Menupopup,
|
||||
/// menu checkbox/radio appearances
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Menucheckbox,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Menuradio,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Menuseparator,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Menuarrow,
|
||||
/// An image in the menu gutter, like in bookmarks or history.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Menuimage,
|
||||
/// A horizontal meter bar.
|
||||
Meterbar,
|
||||
|
@ -1035,19 +1053,25 @@ pub enum Appearance {
|
|||
Radio,
|
||||
/// A generic container that always repaints on state changes. This is a
|
||||
/// hack to make XUL checkboxes and radio buttons work.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
CheckboxContainer,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
RadioContainer,
|
||||
/// The label part of a checkbox or radio button, used for painting a focus
|
||||
/// outline.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
CheckboxLabel,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
RadioLabel,
|
||||
/// nsRangeFrame and its subparts
|
||||
Range,
|
||||
RangeThumb,
|
||||
/// The resizer background area in a status bar for the resizer widget in
|
||||
/// the corner of a window.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Resizerpanel,
|
||||
/// The resizer itself.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Resizer,
|
||||
/// A slider.
|
||||
ScaleHorizontal,
|
||||
|
@ -1061,18 +1085,26 @@ pub enum Appearance {
|
|||
/// The ticks for a slider.
|
||||
Scalethumbtick,
|
||||
/// A scrollbar.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Scrollbar,
|
||||
/// A small scrollbar.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
ScrollbarSmall,
|
||||
/// The scrollbar slider
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
ScrollbarHorizontal,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
ScrollbarVertical,
|
||||
/// A scrollbar button (up/down/left/right).
|
||||
/// Keep these in order (some code casts these values to `int` in order to
|
||||
/// compare them against each other).
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
ScrollbarbuttonUp,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
ScrollbarbuttonDown,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
ScrollbarbuttonLeft,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
ScrollbarbuttonRight,
|
||||
/// The scrollbar thumb.
|
||||
ScrollbarthumbHorizontal,
|
||||
|
@ -1081,107 +1113,166 @@ pub enum Appearance {
|
|||
ScrollbartrackHorizontal,
|
||||
ScrollbartrackVertical,
|
||||
/// The scroll corner
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Scrollcorner,
|
||||
/// A searchfield.
|
||||
Searchfield,
|
||||
/// A separator. Can be horizontal or vertical.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Separator,
|
||||
/// A spin control (up/down control for time/date pickers).
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Spinner,
|
||||
/// The up button of a spin control.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
SpinnerUpbutton,
|
||||
/// The down button of a spin control.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
SpinnerDownbutton,
|
||||
/// The textfield of a spin control
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
SpinnerTextfield,
|
||||
/// A splitter. Can be horizontal or vertical.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Splitter,
|
||||
/// A status bar in a main application window.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Statusbar,
|
||||
/// A single pane of a status bar.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Statusbarpanel,
|
||||
/// A single tab in a tab widget.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Tab,
|
||||
/// A single pane (inside the tabpanels container).
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Tabpanel,
|
||||
/// The tab panels container.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Tabpanels,
|
||||
/// The tabs scroll arrows (left/right).
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
TabScrollArrowBack,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
TabScrollArrowForward,
|
||||
/// A textfield or text area.
|
||||
Textfield,
|
||||
/// A multiline text field.
|
||||
TextfieldMultiline,
|
||||
/// A toolbar in an application window.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Toolbar,
|
||||
/// A single toolbar button (with no associated dropdown).
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Toolbarbutton,
|
||||
/// The dropdown portion of a toolbar button
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
ToolbarbuttonDropdown,
|
||||
/// The gripper for a toolbar.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Toolbargripper,
|
||||
/// The toolbox that contains the toolbars.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Toolbox,
|
||||
/// A tooltip.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Tooltip,
|
||||
/// A listbox or tree widget header
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Treeheader,
|
||||
/// An individual header cell
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Treeheadercell,
|
||||
/// The sort arrow for a header.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Treeheadersortarrow,
|
||||
/// A tree item.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Treeitem,
|
||||
/// A tree widget branch line
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Treeline,
|
||||
/// A tree widget twisty.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Treetwisty,
|
||||
/// Open tree widget twisty.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Treetwistyopen,
|
||||
/// A tree widget.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Treeview,
|
||||
/// Window and dialog backgrounds.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Window,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
Dialog,
|
||||
|
||||
/// Vista Rebars.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWinCommunicationsToolbox,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWinMediaToolbox,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWinBrowsertabbarToolbox,
|
||||
/// Vista glass.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWinGlass,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWinBorderlessGlass,
|
||||
/// -moz-apperance style used in setting proper glass margins.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWinExcludeGlass,
|
||||
|
||||
/// Titlebar elements on the Mac.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozMacFullscreenButton,
|
||||
/// Mac help button.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozMacHelpButton,
|
||||
|
||||
/// Windows themed window frame elements.
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWindowButtonBox,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWindowButtonBoxMaximized,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWindowButtonClose,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWindowButtonMaximize,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWindowButtonMinimize,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWindowButtonRestore,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWindowFrameBottom,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWindowFrameLeft,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWindowFrameRight,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWindowTitlebar,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozWindowTitlebarMaximized,
|
||||
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozGtkInfoBar,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozMacActiveSourceListSelection,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozMacDisclosureButtonClosed,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozMacDisclosureButtonOpen,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozMacSourceList,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozMacSourceListSelection,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozMacVibrancyDark,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozMacVibrancyLight,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozMacVibrantTitlebarDark,
|
||||
#[parse(condition = "in_ua_or_chrome_sheet")]
|
||||
MozMacVibrantTitlebarLight,
|
||||
|
||||
/// A non-disappearing scrollbar.
|
||||
|
|
|
@ -185959,6 +185959,18 @@
|
|||
{}
|
||||
]
|
||||
],
|
||||
"html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-2.html": [
|
||||
[
|
||||
"/html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-2.html",
|
||||
[
|
||||
[
|
||||
"/html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-2-ref.html",
|
||||
"=="
|
||||
]
|
||||
],
|
||||
{}
|
||||
]
|
||||
],
|
||||
"html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-display-none-rendering.html": [
|
||||
[
|
||||
"/html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-display-none-rendering.html",
|
||||
|
@ -288812,6 +288824,11 @@
|
|||
{}
|
||||
]
|
||||
],
|
||||
"html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-2-ref.html": [
|
||||
[
|
||||
{}
|
||||
]
|
||||
],
|
||||
"html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-display-none-rendering-ref.html": [
|
||||
[
|
||||
{}
|
||||
|
@ -608839,6 +608856,14 @@
|
|||
"98cd16c7c147316669eb6c456538f43ae90fbf44",
|
||||
"reftest"
|
||||
],
|
||||
"html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-2-ref.html": [
|
||||
"f3975502148a936856a25722a1c80d59322c11f3",
|
||||
"support"
|
||||
],
|
||||
"html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-2.html": [
|
||||
"bfca4b86a6ae4399d4f8ea5d0cfadbb234d79b88",
|
||||
"reftest"
|
||||
],
|
||||
"html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-display-none-rendering-ref.html": [
|
||||
"e6eff47e53c7a40e973b7f9dc298af2343f59941",
|
||||
"support"
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
[abort.https.window.html]
|
||||
[Calling BackgroundFetchRegistration.abort sets the correct fields and responses are still available]
|
||||
expected: FAIL
|
||||
|
||||
[Aborting the same registration twice fails]
|
||||
expected: FAIL
|
||||
|
|
@ -1,14 +1,7 @@
|
|||
[only-valid-whitespaces-are-allowed.html]
|
||||
expected: TIMEOUT
|
||||
[U+00A0 NBSP should not be parsed between directive name and value - HTTP header]
|
||||
expected: TIMEOUT
|
||||
|
||||
[U+00A0 NBSP should not be parsed inside directive value - meta tag]
|
||||
expected: TIMEOUT
|
||||
|
||||
[U+00A0 NBSP should not be parsed between directive name and value - meta tag]
|
||||
expected: TIMEOUT
|
||||
expected: FAIL
|
||||
|
||||
[U+00A0 NBSP should not be parsed inside directive value - HTTP header]
|
||||
expected: TIMEOUT
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
[blob-url-self-navigate-inherits.sub.html]
|
||||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
|
@ -2,3 +2,9 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child can't navigate the parent because the relevant policy belongs to the navigation initiator (in this case the child)]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child can't navigate the parent because the relevant policy belongs to the navigation initiator (in this case the child which has the policy `navigate-to 'none'`)]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -2,3 +2,6 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child iframe navigation is not allowed]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -5,3 +5,6 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child iframe navigation is not allowed]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -5,3 +5,6 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child iframe navigation is not allowed]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -2,3 +2,6 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child iframe navigation is not allowed]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -5,3 +5,6 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child iframe navigation is not allowed]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -5,3 +5,6 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child iframe navigation is not allowed]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -2,3 +2,6 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child iframe navigation is not allowed]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -5,3 +5,6 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child iframe navigation is not allowed]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -5,3 +5,6 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child iframe navigation is not allowed]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -2,3 +2,6 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child iframe navigation is not allowed]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -5,3 +5,6 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child iframe navigation is not allowed]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -5,3 +5,6 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the child iframe navigation is not allowed]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -5,3 +5,6 @@
|
|||
[Violation report status OK.]
|
||||
expected: FAIL
|
||||
|
||||
[Test that the parent can't navigate the child because the relevant policy belongs to the navigation initiator (in this case the parent, which has the policy `navigate-to support/wait_for_navigation.html;`)]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
[spv-only-sent-to-initiator.html]
|
||||
expected: TIMEOUT
|
||||
[Test that no spv event is raised]
|
||||
expected: NOTRUN
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
[blocked-end-of-chain.sub.html]
|
||||
[Test that the child iframe navigation is blocked]
|
||||
expected: FAIL
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
[iso-8859-1.html]
|
||||
[Should convert the script contents to UTF-8 before hashing]
|
||||
expected: FAIL
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
[iso-8859-3.html]
|
||||
[Should convert the script contents to UTF-8 before hashing]
|
||||
expected: FAIL
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
[iso-8859-7.html]
|
||||
[Should convert the script contents to UTF-8 before hashing]
|
||||
expected: FAIL
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
[iso-8859-9.html]
|
||||
[Should convert the script contents to UTF-8 before hashing]
|
||||
expected: FAIL
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
[host-context-specificity-001.html]
|
||||
expected: FAIL
|
|
@ -0,0 +1,2 @@
|
|||
[host-context-specificity-002.html]
|
||||
expected: FAIL
|
|
@ -0,0 +1,2 @@
|
|||
[host-context-specificity-003.html]
|
||||
expected: FAIL
|
|
@ -0,0 +1,4 @@
|
|||
[text-anchor-in-vertical-rl.html]
|
||||
[Line at edge of scrollport shouldn't jump visually when content is inserted before]
|
||||
expected: FAIL
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
[text-indent-percentage-002.html]
|
||||
expected: FAIL
|
|
@ -0,0 +1,2 @@
|
|||
[text-indent-percentage-004.html]
|
||||
expected: FAIL
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче