--HG--
extra : commitid : LoYhgKpsXCV
This commit is contained in:
Wes Kocher 2015-12-30 17:34:05 -08:00
Родитель 2982520fd0 cde8e15ecd
Коммит a3a5bda88d
176 изменённых файлов: 2064 добавлений и 2438 удалений

Просмотреть файл

@ -1573,11 +1573,9 @@ pref("browser.tabs.crashReporting.includeURL", false);
pref("browser.tabs.crashReporting.emailMe", false);
pref("browser.tabs.crashReporting.email", "");
#ifdef NIGHTLY_BUILD
#ifndef MOZ_MULET
pref("layers.async-pan-zoom.enabled", true);
#endif
#endif
#ifdef E10S_TESTING_ONLY
// Enable e10s add-on interposition by default.

Просмотреть файл

@ -7991,6 +7991,15 @@ MOZ_ARG_ENABLE_BOOL(skia,
MOZ_ENABLE_SKIA=1,
MOZ_ENABLE_SKIA=)
dnl Skia GPU support may not reliably build on certain *BSDs (see bug 1234494).
if test "${OS_TARGET}" = "OpenBSD" -o \
"${OS_TARGET}" = "NetBSD" -o \
"${OS_ARCH}" = "SunOS"; then
MOZ_DISABLE_SKIA_GPU=1
else
MOZ_DISABLE_SKIA_GPU=
fi
MOZ_ARG_DISABLE_BOOL(skia-gpu,
[ --disable-skia-gpu Disable use of Skia-GPU],
MOZ_DISABLE_SKIA_GPU=1,

Просмотреть файл

@ -113,7 +113,7 @@ struct DevTools : public ::testing::Test {
/* Create the global object. */
JS::RootedObject newGlobal(cx);
JS::CompartmentOptions options;
options.setVersion(JSVERSION_LATEST);
options.behaviors().setVersion(JSVERSION_LATEST);
newGlobal = JS_NewGlobalObject(cx, getGlobalClass(), nullptr,
JS::FireOnNewGlobalHook, options);
if (!newGlobal)

Просмотреть файл

@ -0,0 +1,163 @@
#!/usr/bin/env python
import jwt
receipt1 = {
"typ": "purchase-receipt",
"product": {
"url": "https://www.mozilla.org",
"storedata": "5169314356"
},
"user": {
"type": "directed-identifier",
"value": "4fb35151-2b9b-4ba2-8283-c49d381640bd"
},
"iss": "http://mochi.test:8888",
"nbf": 131360185,
"iat": 131360188,
"detail": "http://mochi.test:8888/receipt/5169314356",
"verify": "http://mochi.test:8888/verify/5169314356",
"reissue": "http://mochi.test:8888/reissue/5169314356"
}
receipt2 = {
"typ": "purchase-receipt",
"product": {
"url": "https://www.mozilla.org",
"storedata": "5169314357"
},
"user": {
"type": "directed-identifier",
"value": "4fb35151-2b9b-4ba2-8283-c49d381640bd"
},
"iss": "http://mochi.test:8888",
"nbf": 131360185,
"iat": 131360188,
"detail": "http://mochi.test:8888/receipt/5169314356",
"verify": "http://mochi.test:8888/verify/5169314356",
"reissue": "http://mochi.test:8888/reissue/5169314356"
}
receipt_without_typ = {
"product": {
"url": "https://www.mozilla.org",
"storedata": "5169314358"
},
"user": {
"type": "directed-identifier",
"value": "4fb35151-2b9b-4ba2-8283-c49d381640bd"
},
"iss": "http://mochi.test:8888",
"nbf": 131360185,
"iat": 131360188,
"detail": "http://mochi.test:8888/receipt/5169314356",
"verify": "http://mochi.test:8888/verify/5169314356",
"reissue": "http://mochi.test:8888/reissue/5169314356"
}
receipt_without_product = {
"typ": "purchase-receipt",
"user": {
"type": "directed-identifier",
"value": "4fb35151-2b9b-4ba2-8283-c49d381640bd"
},
"iss": "http://mochi.test:8888",
"nbf": 131360185,
"iat": 131360188,
"detail": "http://mochi.test:8888/receipt/5169314356",
"verify": "http://mochi.test:8888/verify/5169314356",
"reissue": "http://mochi.test:8888/reissue/5169314356"
}
receipt_without_user = {
"typ": "purchase-receipt",
"product": {
"url": "https://www.mozilla.org",
"storedata": "5169314358"
},
"iss": "http://mochi.test:8888",
"nbf": 131360185,
"iat": 131360188,
"detail": "http://mochi.test:8888/receipt/5169314356",
"verify": "http://mochi.test:8888/verify/5169314356",
"reissue": "http://mochi.test:8888/reissue/5169314356"
}
receipt_without_iss = {
"typ": "purchase-receipt",
"product": {
"url": "https://www.mozilla.org",
"storedata": "5169314358"
},
"user": {
"type": "directed-identifier",
"value": "4fb35151-2b9b-4ba2-8283-c49d381640bd"
},
"nbf": 131360185,
"iat": 131360188,
"detail": "http://mochi.test:8888/receipt/5169314356",
"verify": "http://mochi.test:8888/verify/5169314356",
"reissue": "http://mochi.test:8888/reissue/5169314356"
}
receipt_without_nbf = {
"typ": "purchase-receipt",
"product": {
"url": "https://www.mozilla.org",
"storedata": "5169314358"
},
"user": {
"type": "directed-identifier",
"value": "4fb35151-2b9b-4ba2-8283-c49d381640bd"
},
"iss": "http://mochi.test:8888",
"iat": 131360188,
"detail": "http://mochi.test:8888/receipt/5169314356",
"verify": "http://mochi.test:8888/verify/5169314356",
"reissue": "http://mochi.test:8888/reissue/5169314356"
}
receipt_without_iat = {
"typ": "purchase-receipt",
"product": {
"url": "https://www.mozilla.org",
"storedata": "5169314358"
},
"user": {
"type": "directed-identifier",
"value": "4fb35151-2b9b-4ba2-8283-c49d381640bd"
},
"iss": "http://mochi.test:8888",
"nbf": 131360185,
"detail": "http://mochi.test:8888/receipt/5169314356",
"verify": "http://mochi.test:8888/verify/5169314356",
"reissue": "http://mochi.test:8888/reissue/5169314356"
}
receipt_with_wrong_typ = {
"typ": "fake",
"product": {
"url": "https://www.mozilla.org",
"storedata": "5169314358"
},
"user": {
"type": "directed-identifier",
"value": "4fb35151-2b9b-4ba2-8283-c49d381640bd"
},
"iss": "http://mochi.test:8888",
"nbf": 131360185,
"iat": 131360188,
"detail": "http://mochi.test:8888/receipt/5169314356",
"verify": "http://mochi.test:8888/verify/5169314356",
"reissue": "http://mochi.test:8888/reissue/5169314356"
}
print("let valid_receipt1 = \"" + jwt.encode(receipt1, "") + "\";\n")
print("let valid_receipt2 = \"" + jwt.encode(receipt2, "") + "\";\n")
print("let receipt_without_typ = \"" + jwt.encode(receipt_without_typ, "") + "\";\n")
print("let receipt_without_product = \"" + jwt.encode(receipt_without_product, "") + "\";\n")
print("let receipt_without_user = \"" + jwt.encode(receipt_without_user, "") + "\";\n")
print("let receipt_without_iss = \"" + jwt.encode(receipt_without_iss, "") + "\";\n")
print("let receipt_without_nbf = \"" + jwt.encode(receipt_without_nbf, "") + "\";\n")
print("let receipt_without_iat = \"" + jwt.encode(receipt_without_iat, "") + "\";\n")
print("let receipt_with_wrong_typ = \"" + jwt.encode(receipt_with_wrong_typ, "") + "\";\n")

Просмотреть файл

@ -1809,8 +1809,8 @@ nsMessageManagerScriptExecutor::InitChildGlobalInternal(
const uint32_t flags = nsIXPConnect::INIT_JS_STANDARD_CLASSES;
JS::CompartmentOptions options;
options.setZone(JS::SystemZone)
.setVersion(JSVERSION_LATEST);
options.creationOptions().setZone(JS::SystemZone);
options.behaviors().setVersion(JSVERSION_LATEST);
nsresult rv =
xpc->InitClassesWithNewWrappedGlobal(cx, aScope, mPrincipal,

Просмотреть файл

@ -2325,13 +2325,11 @@ CreateNativeGlobalForInner(JSContext* aCx,
// windows or inside a browser element. In such cases we want to tag the
// window's compartment with the add-on ID. See bug 1092156.
if (nsContentUtils::IsSystemPrincipal(aPrincipal)) {
options.setAddonId(MapURIToAddonID(aURI));
options.creationOptions().setAddonId(MapURIToAddonID(aURI));
}
if (top) {
if (top->GetGlobalJSObject()) {
options.setSameZoneAs(top->GetGlobalJSObject());
}
if (top && top->GetGlobalJSObject()) {
options.creationOptions().setSameZoneAs(top->GetGlobalJSObject());
}
// Determine if we need the Components object.

Просмотреть файл

@ -9,6 +9,7 @@
#include "js/Class.h"
#include "js/Proxy.h"
#include "mozilla/dom/DOMJSProxyHandler.h"
#include "mozilla/CycleCollectedJSRuntime.h"
#include "mozilla/HoldDropJSObjects.h"
#include "nsCycleCollectionTraversalCallback.h"
#include "nsCycleCollector.h"
@ -24,11 +25,25 @@ nsWrapperCache::HasJSObjectMovedOp(JSObject* aWrapper)
}
#endif
/* static */ void
void
nsWrapperCache::HoldJSObjects(void* aScriptObjectHolder,
nsScriptObjectTracer* aTracer)
{
cyclecollector::HoldJSObjectsImpl(aScriptObjectHolder, aTracer);
if (mWrapper && !JS::ObjectIsTenured(mWrapper)) {
CycleCollectedJSRuntime::Get()->NurseryWrapperPreserved(mWrapper);
}
}
void
nsWrapperCache::SetWrapperJSObject(JSObject* aWrapper)
{
mWrapper = aWrapper;
UnsetWrapperFlags(kWrapperFlagsMask & ~WRAPPER_IS_NOT_DOM_BINDING);
if (aWrapper && !JS::ObjectIsTenured(aWrapper)) {
CycleCollectedJSRuntime::Get()->NurseryWrapperAdded(this);
}
}
void

Просмотреть файл

@ -258,14 +258,15 @@ protected:
void TraceWrapper(JSTracer* aTrc, const char* name)
{
if (mWrapper) {
JS_CallObjectTracer(aTrc, &mWrapper, name);
JS_CallUnbarrieredObjectTracer(aTrc, &mWrapper, name);
}
}
void PoisonWrapper()
{
if (mWrapper) {
mWrapper.setToCrashOnTouch();
// See setToCrashOnTouch() in RootingAPI.h
mWrapper = reinterpret_cast<JSObject*>(1);
}
}
@ -287,13 +288,7 @@ private:
return mWrapper;
}
void SetWrapperJSObject(JSObject* aWrapper)
{
mWrapper = aWrapper;
UnsetWrapperFlags(kWrapperFlagsMask & ~WRAPPER_IS_NOT_DOM_BINDING);
}
void TraceWrapperJSObject(JSTracer* aTrc, const char* aName);
void SetWrapperJSObject(JSObject* aWrapper);
FlagsType GetWrapperFlags() const
{
@ -318,8 +313,8 @@ private:
mFlags &= ~aFlagsToUnset;
}
static void HoldJSObjects(void* aScriptObjectHolder,
nsScriptObjectTracer* aTracer);
void HoldJSObjects(void* aScriptObjectHolder,
nsScriptObjectTracer* aTracer);
#ifdef DEBUG
public:
@ -349,8 +344,8 @@ private:
enum { kWrapperFlagsMask = (WRAPPER_BIT_PRESERVED | WRAPPER_IS_NOT_DOM_BINDING) };
JS::Heap<JSObject*> mWrapper;
FlagsType mFlags;
JSObject* mWrapper;
FlagsType mFlags;
};
enum { WRAPPER_CACHE_FLAGS_BITS_USED = 2 };

Просмотреть файл

@ -53,10 +53,4 @@ nsWrapperCache::IsBlackAndDoesNotNeedTracing(nsISupports* aThis)
return IsBlack() && HasNothingToTrace(aThis);
}
inline void
nsWrapperCache::TraceWrapperJSObject(JSTracer* aTrc, const char* aName)
{
JS_CallObjectTracer(aTrc, &mWrapper, aName);
}
#endif /* nsWrapperCache_h___ */

Просмотреть файл

@ -3048,7 +3048,7 @@ CreateGlobal(JSContext* aCx, T* aNative, nsWrapperCache* aCache,
JSPrincipals* aPrincipal, bool aInitStandardClasses,
JS::MutableHandle<JSObject*> aGlobal)
{
aOptions.setTrace(CreateGlobalOptions<T>::TraceGlobal);
aOptions.creationOptions().setTrace(CreateGlobalOptions<T>::TraceGlobal);
aGlobal.set(JS_NewGlobalObject(aCx, aClass, aPrincipal,
JS::DontFireOnNewGlobalHook, aOptions));

Просмотреть файл

@ -422,6 +422,8 @@ class CGDOMJSClass(CGThing):
classFlags += "JSCLASS_HAS_RESERVED_SLOTS(%d)" % slotCount
traceHook = 'nullptr'
reservedSlots = slotCount
if self.descriptor.interface.isProbablyShortLivingObject():
classFlags += " | JSCLASS_SKIP_NURSERY_FINALIZE"
if self.descriptor.interface.getExtendedAttribute("NeedResolve"):
resolveHook = RESOLVE_HOOK_NAME
mayResolveHook = MAY_RESOLVE_HOOK_NAME

Просмотреть файл

@ -564,6 +564,9 @@ class IDLExternalInterface(IDLObjectWithIdentifier, IDLExposureMixins):
def isJSImplemented(self):
return False
def isProbablyShortLivingObject(self):
return False
def getNavigatorProperty(self):
return None
@ -1408,7 +1411,8 @@ class IDLInterface(IDLObjectWithScope, IDLExposureMixins):
identifier == "ChromeOnly" or
identifier == "Unforgeable" or
identifier == "UnsafeInPrerendering" or
identifier == "LegacyEventInit"):
identifier == "LegacyEventInit" or
identifier == "ProbablyShortLivingObject"):
# Known extended attributes that do not take values
if not attr.noArguments():
raise WebIDLError("[%s] must take no arguments" % identifier,
@ -1522,6 +1526,14 @@ class IDLInterface(IDLObjectWithScope, IDLExposureMixins):
def isJSImplemented(self):
return bool(self.getJSImplementation())
def isProbablyShortLivingObject(self):
current = self
while current:
if current.getExtendedAttribute("ProbablyShortLivingObject"):
return True
current = current.parent
return False
def getNavigatorProperty(self):
naviProp = self.getExtendedAttribute("NavigatorProperty")
if not naviProp:

Просмотреть файл

@ -3811,6 +3811,10 @@ CanvasRenderingContext2D::DrawOrMeasureText(const nsAString& aRawText,
return NS_OK;
}
if (!IsFinite(aX) || !IsFinite(aY)) {
return NS_OK;
}
const ContextState &state = CurrentState();
// This is only needed to know if we can know the drawing bounding box easily.

Просмотреть файл

@ -0,0 +1,16 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<script>
function boom() {
var canvas = document.createElement('canvas');
var ctx = canvas.getContext('2d');
ctx.fillText("A", 0, 1e308);
}
</script>
</head>
<body onload="boom();"></body>
</html>

Просмотреть файл

@ -25,4 +25,5 @@ load 1183363.html
load 1190705.html
load 1223740-1.html
load 1225381-1.html
load 1229932-1.html
load texImage2D.html

Просмотреть файл

@ -237,9 +237,6 @@ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
JSObject*
Event::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
{
if (mIsMainThreadEvent && !GetWrapperPreserveColor()) {
nsJSContext::LikelyShortLivingObjectCreated();
}
return WrapObjectInternal(aCx, aGivenProto);
}

Просмотреть файл

@ -34,7 +34,20 @@
namespace mozilla {
namespace dom {
NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE_0(Directory)
NS_IMPL_CYCLE_COLLECTION_CLASS(Directory)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(Directory)
tmp->mFileSystem->Unlink();
NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(Directory)
tmp->mFileSystem->Traverse(cb);
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
NS_IMPL_CYCLE_COLLECTION_TRACE_WRAPPERCACHE(Directory)
NS_IMPL_CYCLE_COLLECTING_ADDREF(Directory)
NS_IMPL_CYCLE_COLLECTING_RELEASE(Directory)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(Directory)

Просмотреть файл

@ -20,7 +20,7 @@ class Directory;
class FileSystemBase
{
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(FileSystemBase)
NS_INLINE_DECL_REFCOUNTING(FileSystemBase)
public:
// Create file system object from its string representation.
@ -95,6 +95,11 @@ public:
{
return mRequiresPermissionChecks;
}
// CC methods
virtual void Unlink() {}
virtual void Traverse(nsCycleCollectionTraversalCallback &cb) {}
protected:
virtual ~FileSystemBase();

Просмотреть файл

@ -16,10 +16,34 @@
#include "mozilla/dom/PContent.h"
#include "mozilla/dom/ipc/BlobParent.h"
#include "mozilla/unused.h"
#include "nsProxyRelease.h"
namespace mozilla {
namespace dom {
namespace {
class FileSystemReleaseRunnable : public nsRunnable
{
public:
explicit FileSystemReleaseRunnable(RefPtr<FileSystemBase>& aDoomed)
: mDoomed(nullptr)
{
aDoomed.swap(mDoomed);
}
NS_IMETHOD Run()
{
mDoomed->Release();
return NS_OK;
}
private:
FileSystemBase* MOZ_OWNING_REF mDoomed;
};
} // anonymous namespace
FileSystemTaskBase::FileSystemTaskBase(FileSystemBase* aFileSystem)
: mErrorValue(NS_OK)
, mFileSystem(aFileSystem)
@ -43,6 +67,12 @@ FileSystemTaskBase::FileSystemTaskBase(FileSystemBase* aFileSystem,
FileSystemTaskBase::~FileSystemTaskBase()
{
if (!NS_IsMainThread()) {
RefPtr<FileSystemReleaseRunnable> runnable =
new FileSystemReleaseRunnable(mFileSystem);
MOZ_ASSERT(!mFileSystem);
NS_DispatchToMainThread(runnable);
}
}
FileSystemBase*

Просмотреть файл

@ -76,5 +76,18 @@ OSFileSystem::IsSafeDirectory(Directory* aDir) const
return true;
}
void
OSFileSystem::Unlink()
{
mWindow = nullptr;
}
void
OSFileSystem::Traverse(nsCycleCollectionTraversalCallback &cb)
{
OSFileSystem* tmp = this;
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mWindow);
}
} // namespace dom
} // namespace mozilla

Просмотреть файл

@ -12,7 +12,7 @@
namespace mozilla {
namespace dom {
class OSFileSystem : public FileSystemBase
class OSFileSystem final : public FileSystemBase
{
public:
explicit OSFileSystem(const nsAString& aRootDir);
@ -34,6 +34,10 @@ public:
virtual bool
IsSafeDirectory(Directory* aDir) const override;
// CC methods
virtual void Unlink() override;
virtual void Traverse(nsCycleCollectionTraversalCallback &cb) override;
private:
virtual ~OSFileSystem() {}

Просмотреть файл

@ -310,8 +310,9 @@ DecoderTraits::IsMP4TypeAndEnabled(const nsACString& aType)
{
#ifdef MOZ_FMP4
return IsMP4SupportedType(aType);
#endif
#else
return false;
#endif
}
static bool
@ -320,8 +321,9 @@ IsMP3SupportedType(const nsACString& aType,
{
#ifdef MOZ_OMX_DECODER
return false;
#endif
#else
return MP3Decoder::CanHandleMediaType(aType, aCodecs);
#endif
}
static bool

Просмотреть файл

@ -11,7 +11,7 @@
*/
[Constructor(DOMString type, optional EventInit eventInitDict),
Exposed=(Window,Worker,System)]
Exposed=(Window,Worker,System), ProbablyShortLivingObject]
interface Event {
[Pure]
readonly attribute DOMString type;

Просмотреть файл

@ -7,6 +7,7 @@
* http://dom.spec.whatwg.org
*/
[ProbablyShortLivingObject]
interface MutationRecord {
[Constant]
readonly attribute DOMString type;

Просмотреть файл

@ -1845,7 +1845,7 @@ RuntimeService::Init()
if (!sDefaultJSSettings.gcSettings[0].IsSet()) {
sDefaultJSSettings.runtimeOptions = JS::RuntimeOptions();
sDefaultJSSettings.chrome.maxScriptRuntime = -1;
sDefaultJSSettings.chrome.compartmentOptions.setVersion(JSVERSION_LATEST);
sDefaultJSSettings.chrome.compartmentOptions.behaviors().setVersion(JSVERSION_LATEST);
sDefaultJSSettings.content.maxScriptRuntime = MAX_SCRIPT_RUN_TIME_SEC;
#ifdef JS_GC_ZEAL
sDefaultJSSettings.gcZealFrequency = JS_DEFAULT_ZEAL_FREQ;
@ -2622,7 +2622,7 @@ RuntimeService::JSVersionChanged(const char* /* aPrefName */, void* /* aClosure
bool useLatest = Preferences::GetBool("dom.workers.latestJSVersion", false);
JS::CompartmentOptions& options = sDefaultJSSettings.content.compartmentOptions;
options.setVersion(useLatest ? JSVERSION_LATEST : JSVERSION_DEFAULT);
options.behaviors().setVersion(useLatest ? JSVERSION_LATEST : JSVERSION_DEFAULT);
}
NS_IMPL_ISUPPORTS_INHERITED0(LogViolationDetailsRunnable, nsRunnable)

Просмотреть файл

@ -432,8 +432,9 @@ DedicatedWorkerGlobalScope::WrapGlobalObject(JSContext* aCx,
const bool extraWarnings = usesSystemPrincipal &&
xpc::ExtraWarningsForSystemJS();
options.setDiscardSource(discardSource)
.extraWarningsOverride().set(extraWarnings);
JS::CompartmentBehaviors& behaviors = options.behaviors();
behaviors.setDiscardSource(discardSource)
.extraWarningsOverride().set(extraWarnings);
return DedicatedWorkerGlobalScopeBinding_workers::Wrap(aCx, this, this,
options,
@ -796,7 +797,7 @@ WorkerDebuggerGlobalScope::CreateSandbox(JSContext* aCx, const nsAString& aName,
mWorkerPrivate->AssertIsOnWorkerThread();
JS::CompartmentOptions options;
options.setInvisibleToDebugger(true);
options.creationOptions().setInvisibleToDebugger(true);
JS::Rooted<JSObject*> sandbox(aCx,
JS_NewGlobalObject(aCx, js::Jsvalify(&workerdebuggersandbox_class), nullptr,

Просмотреть файл

@ -128,6 +128,16 @@ BasicLayerManager::PushGroupForLayer(gfxContext* aContext, Layer* aLayer, const
Matrix maskTransform;
RefPtr<SourceSurface> maskSurf = GetMaskForLayer(aLayer, &maskTransform);
if (maskSurf) {
// The returned transform will transform the mask to device space on the
// destination. Since the User->Device space transform will be applied
// to the mask by PopGroupAndBlend we need to adjust the transform to
// transform the mask to user space.
Matrix currentTransform = ToMatrix(group.mFinalTarget->CurrentMatrix());
currentTransform.Invert();
maskTransform = maskTransform * currentTransform;
}
if (aLayer->CanUseOpaqueSurface() &&
((didCompleteClip && aRegion.GetNumRects() == 1) ||
!aContext->CurrentMatrix().HasNonIntegerTranslation())) {
@ -172,8 +182,12 @@ BasicLayerManager::PopGroupForLayer(PushedGroup &group)
RefPtr<SourceSurface> src = sourceDT->Snapshot();
if (group.mMaskSurface) {
dt->SetTransform(group.mMaskTransform * Matrix::Translation(-group.mFinalTarget->GetDeviceOffset()));
dt->MaskSurface(SurfacePattern(src, ExtendMode::CLAMP, Matrix::Translation(group.mGroupOffset.x, group.mGroupOffset.y)),
Point finalOffset = group.mFinalTarget->GetDeviceOffset();
dt->SetTransform(group.mMaskTransform * Matrix::Translation(-finalOffset));
Matrix surfTransform = group.mMaskTransform;
surfTransform.Invert();
dt->MaskSurface(SurfacePattern(src, ExtendMode::CLAMP, surfTransform *
Matrix::Translation(group.mGroupOffset.x, group.mGroupOffset.y)),
group.mMaskSurface, Point(0, 0), DrawOptions(group.mOpacity, group.mOperator));
} else {
// For now this is required since our group offset is in device space of the final target,

Просмотреть файл

@ -363,6 +363,24 @@ TextureClient::Lock(OpenMode aMode)
mIsLocked = mData->Lock(aMode, mReleaseFenceHandle.IsValid() ? &mReleaseFenceHandle : nullptr);
mOpenMode = aMode;
auto format = GetFormat();
if (mIsLocked && CanExposeDrawTarget() &&
aMode == OpenMode::OPEN_READ_WRITE &&
NS_IsMainThread() &&
// the formats that we apparently expect, in the cairo backend. Any other
// format will trigger an assertion in GfxFormatToCairoFormat.
(format == SurfaceFormat::A8R8G8B8_UINT32 ||
format == SurfaceFormat::X8R8G8B8_UINT32 ||
format == SurfaceFormat::A8 ||
format == SurfaceFormat::R5G6B5_UINT16)) {
if (!BorrowDrawTarget()) {
// Failed to get a DrawTarget, means we won't be able to write into the
// texture, might as well fail now.
Unlock();
return false;
}
}
return mIsLocked;
}
@ -427,6 +445,9 @@ TextureClient::UpdateFromSurface(gfx::SourceSurface* aSurface)
MOZ_ASSERT(IsValid());
MOZ_ASSERT(mIsLocked);
MOZ_ASSERT(aSurface);
// If you run into this assertion, make sure the texture was locked write-only
// rather than read-write.
MOZ_ASSERT(!mBorrowedDrawTarget);
// XXX - It would be better to first try the DrawTarget approach and fallback
// to the backend-specific implementation because the latter will usually do

Просмотреть файл

@ -108,6 +108,9 @@ struct AutoEndTransaction {
CompositableTransaction* mTxn;
};
/* static */
Atomic<bool> ImageBridgeChild::sIsShutDown(false);
void
ImageBridgeChild::UseTextures(CompositableClient* aCompositable,
const nsTArray<TimedTextureClient>& aTextures)
@ -371,7 +374,7 @@ static void ReleaseImageClientNow(ImageClient* aClient,
if (aClient) {
aClient->Release();
}
if (aChild && ImageBridgeChild::IsCreated()) {
if (aChild && ImageBridgeChild::IsCreated() && !ImageBridgeChild::IsShutDown()) {
aChild->SendAsyncDelete();
}
}
@ -463,7 +466,7 @@ void ImageBridgeChild::DispatchReleaseTextureClient(TextureClient* aClient)
static void UpdateImageClientNow(ImageClient* aClient, RefPtr<ImageContainer>&& aContainer)
{
if (!ImageBridgeChild::IsCreated()) {
if (!ImageBridgeChild::IsCreated() || ImageBridgeChild::IsShutDown()) {
NS_WARNING("Something is holding on to graphics resources after the shutdown"
"of the graphics subsystem!");
return;
@ -479,7 +482,7 @@ static void UpdateImageClientNow(ImageClient* aClient, RefPtr<ImageContainer>&&
void ImageBridgeChild::DispatchImageClientUpdate(ImageClient* aClient,
ImageContainer* aContainer)
{
if (!ImageBridgeChild::IsCreated()) {
if (!ImageBridgeChild::IsCreated() || ImageBridgeChild::IsShutDown()) {
NS_WARNING("Something is holding on to graphics resources after the shutdown"
"of the graphics subsystem!");
return;
@ -545,7 +548,7 @@ void ImageBridgeChild::UpdateAsyncCanvasRendererNow(AsyncCanvasRenderer* aWrappe
static void FlushAllImagesSync(ImageClient* aClient, ImageContainer* aContainer,
RefPtr<AsyncTransactionWaiter>&& aWaiter)
{
if (!ImageBridgeChild::IsCreated()) {
if (!ImageBridgeChild::IsCreated() || ImageBridgeChild::IsShutDown()) {
// How sad. If we get into this branch it means that the ImageBridge
// got destroyed between the time we ImageBridgeChild::FlushAllImage
// was called on some thread, and the time this function was proxied
@ -574,7 +577,7 @@ static void FlushAllImagesSync(ImageClient* aClient, ImageContainer* aContainer,
void ImageBridgeChild::FlushAllImages(ImageClient* aClient,
ImageContainer* aContainer)
{
if (!IsCreated()) {
if (!IsCreated() || IsShutDown()) {
return;
}
MOZ_ASSERT(aClient);
@ -705,6 +708,9 @@ ImageBridgeChild::StartUpInChildProcess(Transport* aTransport,
void ImageBridgeChild::ShutDown()
{
MOZ_ASSERT(NS_IsMainThread());
sIsShutDown = true;
if (ImageBridgeChild::IsCreated()) {
MOZ_ASSERT(!sImageBridgeChildSingleton->mShuttingDown);

Просмотреть файл

@ -9,6 +9,7 @@
#include <stddef.h> // for size_t
#include <stdint.h> // for uint32_t, uint64_t
#include "mozilla/Attributes.h" // for override
#include "mozilla/Atomics.h"
#include "mozilla/RefPtr.h" // for already_AddRefed
#include "mozilla/ipc/SharedMemory.h" // for SharedMemory, etc
#include "mozilla/layers/AsyncTransactionTracker.h" // for AsyncTransactionTrackerHolder
@ -140,6 +141,15 @@ public:
* Can be called from any thread.
*/
static bool IsCreated();
/**
* Returns true if the singleton's ShutDown() was called.
*
* Can be called from any thread.
*/
static bool IsShutDown()
{
return sIsShutDown;
}
/**
* returns the singleton instance.
@ -316,7 +326,8 @@ protected:
bool aUnsafe);
CompositableTransaction* mTxn;
bool mShuttingDown;
Atomic<bool> mShuttingDown;
static Atomic<bool> sIsShutDown;
};
} // namespace layers

Просмотреть файл

@ -102,6 +102,9 @@ elif CONFIG['CLANG_CL']:
DEFINES['SKIA_IMPLEMENTATION'] = 1
if not CONFIG['MOZ_ENABLE_SKIA_GPU']:
DEFINES['SK_SUPPORT_GPU'] = 0
if CONFIG['GNU_CXX']:
CXXFLAGS += [
'-Wno-deprecated-declarations',
@ -227,7 +230,8 @@ def generate_separated_sources(platform_sources):
'skia/src/opts/SkOpts_neon.cpp',
'skia/src/opts/SkBitmapProcState_arm_neon.cpp',
},
'none': set()
'none': set(),
'gpu': set()
})
for plat in platform_sources.keys():
@ -248,6 +252,8 @@ def generate_separated_sources(platform_sources):
key = 'arm'
elif '_none' in value:
key = 'none'
elif 'gpu' in value or 'Gpu' in value:
key = 'gpu'
elif all(value in platform_sources.get(p, {})
for p in platforms if p != plat):
key = 'common'
@ -358,6 +364,9 @@ def write_mozbuild(sources):
write_sources(f, sources['common'], 0)
f.write("if CONFIG['MOZ_ENABLE_SKIA_GPU']:\n")
write_sources(f, sources['gpu'], 4)
f.write("if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('android', 'gonk'):\n")
write_sources(f, sources['android'], 4)

Просмотреть файл

@ -181,7 +181,6 @@ UNIFIED_SOURCES += [
'skia/src/effects/gradients/SkRadialGradient.cpp',
'skia/src/effects/gradients/SkSweepGradient.cpp',
'skia/src/effects/gradients/SkTwoPointConicalGradient.cpp',
'skia/src/effects/gradients/SkTwoPointConicalGradient_gpu.cpp',
'skia/src/effects/GrCircleBlurFragmentProcessor.cpp',
'skia/src/effects/Sk1DPathEffect.cpp',
'skia/src/effects/Sk2DPathEffect.cpp',
@ -205,7 +204,6 @@ UNIFIED_SOURCES += [
'skia/src/effects/SkDropShadowImageFilter.cpp',
'skia/src/effects/SkEmbossMask.cpp',
'skia/src/effects/SkEmbossMaskFilter.cpp',
'skia/src/effects/SkGpuBlurUtils.cpp',
'skia/src/effects/SkImageSource.cpp',
'skia/src/effects/SkLayerDrawLooper.cpp',
'skia/src/effects/SkLayerRasterizer.cpp',
@ -232,133 +230,11 @@ UNIFIED_SOURCES += [
'skia/src/fonts/SkRandomScalerContext.cpp',
'skia/src/fonts/SkRemotableFontMgr.cpp',
'skia/src/fonts/SkTestScalerContext.cpp',
'skia/src/gpu/batches/GrAtlasTextBatch.cpp',
'skia/src/gpu/batches/GrBatch.cpp',
'skia/src/gpu/batches/GrCopySurfaceBatch.cpp',
'skia/src/gpu/batches/GrDashLinePathRenderer.cpp',
'skia/src/gpu/batches/GrDefaultPathRenderer.cpp',
'skia/src/gpu/batches/GrDrawAtlasBatch.cpp',
'skia/src/gpu/batches/GrDrawBatch.cpp',
'skia/src/gpu/batches/GrDrawPathBatch.cpp',
'skia/src/gpu/batches/GrDrawVerticesBatch.cpp',
'skia/src/gpu/batches/GrNinePatch.cpp',
'skia/src/gpu/batches/GrNonAAFillRectBatch.cpp',
'skia/src/gpu/batches/GrNonAAStrokeRectBatch.cpp',
'skia/src/gpu/batches/GrRectBatchFactory.cpp',
'skia/src/gpu/batches/GrStencilAndCoverPathRenderer.cpp',
'skia/src/gpu/batches/GrTessellatingPathRenderer.cpp',
'skia/src/gpu/batches/GrVertexBatch.cpp',
'skia/src/gpu/effects/GrBezierEffect.cpp',
'skia/src/gpu/effects/GrBicubicEffect.cpp',
'skia/src/gpu/effects/GrBitmapTextGeoProc.cpp',
'skia/src/gpu/effects/GrConfigConversionEffect.cpp',
'skia/src/gpu/effects/GrConstColorProcessor.cpp',
'skia/src/gpu/effects/GrConvexPolyEffect.cpp',
'skia/src/gpu/effects/GrConvolutionEffect.cpp',
'skia/src/gpu/effects/GrCoverageSetOpXP.cpp',
'skia/src/gpu/effects/GrCustomXfermode.cpp',
'skia/src/gpu/effects/GrDashingEffect.cpp',
'skia/src/gpu/effects/GrDisableColorXP.cpp',
'skia/src/gpu/effects/GrDistanceFieldGeoProc.cpp',
'skia/src/gpu/effects/GrDitherEffect.cpp',
'skia/src/gpu/effects/GrMatrixConvolutionEffect.cpp',
'skia/src/gpu/effects/GrOvalEffect.cpp',
'skia/src/gpu/effects/GrPorterDuffXferProcessor.cpp',
'skia/src/gpu/effects/GrRRectEffect.cpp',
'skia/src/gpu/effects/GrSimpleTextureEffect.cpp',
'skia/src/gpu/effects/GrSingleTextureEffect.cpp',
'skia/src/gpu/effects/GrTextureDomain.cpp',
'skia/src/gpu/effects/GrTextureStripAtlas.cpp',
'skia/src/gpu/effects/GrXfermodeFragmentProcessor.cpp',
'skia/src/gpu/effects/GrYUVtoRGBEffect.cpp',
'skia/src/gpu/gl/debug/GrBufferObj.cpp',
'skia/src/gpu/gl/debug/GrDebugGL.cpp',
'skia/src/gpu/gl/debug/GrFrameBufferObj.cpp',
'skia/src/gpu/gl/debug/GrProgramObj.cpp',
'skia/src/gpu/gl/debug/GrShaderObj.cpp',
'skia/src/gpu/gl/debug/GrTextureObj.cpp',
'skia/src/gpu/gl/debug/GrTextureUnitObj.cpp',
'skia/src/gpu/gl/debug/SkDebugGLContext.cpp',
'skia/src/gpu/gl/SkGLContext.cpp',
'skia/src/gpu/gl/SkNullGLContext.cpp',
'skia/src/gpu/GrAtlasTextBlob.cpp',
'skia/src/gpu/GrAtlasTextContext.cpp',
'skia/src/gpu/GrBatchFlushState.cpp',
'skia/src/gpu/GrBatchFontCache.cpp',
'skia/src/gpu/GrBatchTest.cpp',
'skia/src/gpu/GrBlend.cpp',
'skia/src/gpu/GrBlurUtils.cpp',
'skia/src/gpu/GrBufferAllocPool.cpp',
'skia/src/gpu/GrCaps.cpp',
'skia/src/gpu/GrClip.cpp',
'skia/src/gpu/GrClipMaskManager.cpp',
'skia/src/gpu/GrContext.cpp',
'skia/src/gpu/GrCoordTransform.cpp',
'skia/src/gpu/GrDefaultGeoProcFactory.cpp',
'skia/src/gpu/GrDrawingManager.cpp',
'skia/src/gpu/GrDrawTarget.cpp',
'skia/src/gpu/GrFontScaler.cpp',
'skia/src/gpu/GrFragmentProcessor.cpp',
'skia/src/gpu/GrGpu.cpp',
'skia/src/gpu/GrGpuFactory.cpp',
'skia/src/gpu/GrGpuResource.cpp',
'skia/src/gpu/GrGpuResourceRef.cpp',
'skia/src/gpu/GrImageIDTextureAdjuster.cpp',
'skia/src/gpu/GrInvariantOutput.cpp',
'skia/src/gpu/GrLayerAtlas.cpp',
'skia/src/gpu/GrLayerCache.cpp',
'skia/src/gpu/GrLayerHoister.cpp',
'skia/src/gpu/GrMemoryPool.cpp',
'skia/src/gpu/GrOvalRenderer.cpp',
'skia/src/gpu/GrPaint.cpp',
'skia/src/gpu/GrPath.cpp',
'skia/src/gpu/GrPathProcessor.cpp',
'skia/src/gpu/GrPathRange.cpp',
'skia/src/gpu/GrPathRenderer.cpp',
'skia/src/gpu/GrPathRendererChain.cpp',
'skia/src/gpu/GrPathRendering.cpp',
'skia/src/gpu/GrPathUtils.cpp',
'skia/src/gpu/GrPipeline.cpp',
'skia/src/gpu/GrPipelineBuilder.cpp',
'skia/src/gpu/GrPrimitiveProcessor.cpp',
'skia/src/gpu/GrProcessor.cpp',
'skia/src/gpu/GrProcessorUnitTest.cpp',
'skia/src/gpu/GrProcOptInfo.cpp',
'skia/src/gpu/GrProgramElement.cpp',
'skia/src/gpu/GrRecordReplaceDraw.cpp',
'skia/src/gpu/GrRectanizer_pow2.cpp',
'skia/src/gpu/GrRectanizer_skyline.cpp',
'skia/src/gpu/GrReducedClip.cpp',
'skia/src/gpu/GrRenderTarget.cpp',
'skia/src/gpu/GrResourceProvider.cpp',
'skia/src/gpu/GrSoftwarePathRenderer.cpp',
'skia/src/gpu/GrStencil.cpp',
'skia/src/gpu/GrStencilAndCoverTextContext.cpp',
'skia/src/gpu/GrStencilAttachment.cpp',
'skia/src/gpu/GrStrokeInfo.cpp',
'skia/src/gpu/GrSurface.cpp',
'skia/src/gpu/GrSWMaskHelper.cpp',
'skia/src/gpu/GrTestUtils.cpp',
'skia/src/gpu/GrTextBlobCache.cpp',
'skia/src/gpu/GrTextContext.cpp',
'skia/src/gpu/GrTexture.cpp',
'skia/src/gpu/GrTextureAccess.cpp',
'skia/src/gpu/GrTextureParamsAdjuster.cpp',
'skia/src/gpu/GrTextureProvider.cpp',
'skia/src/gpu/GrTraceMarker.cpp',
'skia/src/gpu/GrXferProcessor.cpp',
'skia/src/gpu/GrYUVProvider.cpp',
'skia/src/gpu/SkGpuDevice.cpp',
'skia/src/gpu/SkGpuDevice_drawTexture.cpp',
'skia/src/gpu/SkGr.cpp',
'skia/src/gpu/SkGrPixelRef.cpp',
'skia/src/gpu/SkGrTexturePixelRef.cpp',
'skia/src/image/SkImage.cpp',
'skia/src/image/SkImage_Generator.cpp',
'skia/src/image/SkImage_Raster.cpp',
'skia/src/image/SkImageShader.cpp',
'skia/src/image/SkSurface.cpp',
'skia/src/image/SkSurface_Gpu.cpp',
'skia/src/image/SkSurface_Raster.cpp',
'skia/src/images/bmpdecoderhelper.cpp',
'skia/src/images/SkDecodingImageGenerator.cpp',
@ -455,71 +331,200 @@ SOURCES += [
'skia/src/core/SkPictureData.cpp',
'skia/src/core/SkRecorder.cpp',
'skia/src/core/SkScan_Antihair.cpp',
'skia/src/effects/SkArithmeticMode_gpu.cpp',
'skia/src/gpu/batches/GrAAConvexPathRenderer.cpp',
'skia/src/gpu/batches/GrAAConvexTessellator.cpp',
'skia/src/gpu/batches/GrAADistanceFieldPathRenderer.cpp',
'skia/src/gpu/batches/GrAAFillRectBatch.cpp',
'skia/src/gpu/batches/GrAAHairLinePathRenderer.cpp',
'skia/src/gpu/batches/GrAALinearizingConvexPathRenderer.cpp',
'skia/src/gpu/batches/GrAAStrokeRectBatch.cpp',
'skia/src/gpu/gl/builders/GrGLProgramBuilder.cpp',
'skia/src/gpu/gl/builders/GrGLShaderStringBuilder.cpp',
'skia/src/gpu/gl/builders/GrGLSLPrettyPrint.cpp',
'skia/src/gpu/gl/debug/GrGLCreateDebugInterface.cpp',
'skia/src/gpu/gl/GrGLAssembleInterface.cpp',
'skia/src/gpu/gl/GrGLBufferImpl.cpp',
'skia/src/gpu/gl/GrGLCaps.cpp',
'skia/src/gpu/gl/GrGLContext.cpp',
'skia/src/gpu/gl/GrGLCreateNativeInterface_none.cpp',
'skia/src/gpu/gl/GrGLCreateNullInterface.cpp',
'skia/src/gpu/gl/GrGLDefaultInterface_native.cpp',
'skia/src/gpu/gl/GrGLExtensions.cpp',
'skia/src/gpu/gl/GrGLGLSL.cpp',
'skia/src/gpu/gl/GrGLGpu.cpp',
'skia/src/gpu/gl/GrGLGpuProgramCache.cpp',
'skia/src/gpu/gl/GrGLIndexBuffer.cpp',
'skia/src/gpu/gl/GrGLInterface.cpp',
'skia/src/gpu/gl/GrGLNameAllocator.cpp',
'skia/src/gpu/gl/GrGLNoOpInterface.cpp',
'skia/src/gpu/gl/GrGLPath.cpp',
'skia/src/gpu/gl/GrGLPathRange.cpp',
'skia/src/gpu/gl/GrGLPathRendering.cpp',
'skia/src/gpu/gl/GrGLProgram.cpp',
'skia/src/gpu/gl/GrGLProgramDataManager.cpp',
'skia/src/gpu/gl/GrGLProgramDesc.cpp',
'skia/src/gpu/gl/GrGLRenderTarget.cpp',
'skia/src/gpu/gl/GrGLStencilAttachment.cpp',
'skia/src/gpu/gl/GrGLTexture.cpp',
'skia/src/gpu/gl/GrGLTextureRenderTarget.cpp',
'skia/src/gpu/gl/GrGLUtil.cpp',
'skia/src/gpu/gl/GrGLVaryingHandler.cpp',
'skia/src/gpu/gl/GrGLVertexArray.cpp',
'skia/src/gpu/gl/GrGLVertexBuffer.cpp',
'skia/src/gpu/glsl/GrGLSL.cpp',
'skia/src/gpu/glsl/GrGLSLBlend.cpp',
'skia/src/gpu/glsl/GrGLSLCaps.cpp',
'skia/src/gpu/glsl/GrGLSLFragmentProcessor.cpp',
'skia/src/gpu/glsl/GrGLSLFragmentShaderBuilder.cpp',
'skia/src/gpu/glsl/GrGLSLGeometryProcessor.cpp',
'skia/src/gpu/glsl/GrGLSLGeometryShaderBuilder.cpp',
'skia/src/gpu/glsl/GrGLSLPrimitiveProcessor.cpp',
'skia/src/gpu/glsl/GrGLSLProgramBuilder.cpp',
'skia/src/gpu/glsl/GrGLSLShaderBuilder.cpp',
'skia/src/gpu/glsl/GrGLSLUtil.cpp',
'skia/src/gpu/glsl/GrGLSLVarying.cpp',
'skia/src/gpu/glsl/GrGLSLVertexShaderBuilder.cpp',
'skia/src/gpu/glsl/GrGLSLXferProcessor.cpp',
'skia/src/gpu/GrBatchAtlas.cpp',
'skia/src/gpu/GrDrawContext.cpp',
'skia/src/gpu/GrResourceCache.cpp',
'skia/src/image/SkImage_Gpu.cpp',
'skia/src/pathops/SkPathOpsDebug.cpp',
'skia/src/utils/SkMD5.cpp',
'skia/src/utils/SkParse.cpp',
'skia/src/utils/SkParsePath.cpp',
'skia/src/utils/SkSHA1.cpp',
]
if CONFIG['MOZ_ENABLE_SKIA_GPU']:
UNIFIED_SOURCES += [
'skia/src/effects/gradients/SkTwoPointConicalGradient_gpu.cpp',
'skia/src/effects/SkGpuBlurUtils.cpp',
'skia/src/gpu/batches/GrAtlasTextBatch.cpp',
'skia/src/gpu/batches/GrBatch.cpp',
'skia/src/gpu/batches/GrCopySurfaceBatch.cpp',
'skia/src/gpu/batches/GrDashLinePathRenderer.cpp',
'skia/src/gpu/batches/GrDefaultPathRenderer.cpp',
'skia/src/gpu/batches/GrDrawAtlasBatch.cpp',
'skia/src/gpu/batches/GrDrawBatch.cpp',
'skia/src/gpu/batches/GrDrawPathBatch.cpp',
'skia/src/gpu/batches/GrDrawVerticesBatch.cpp',
'skia/src/gpu/batches/GrNinePatch.cpp',
'skia/src/gpu/batches/GrNonAAFillRectBatch.cpp',
'skia/src/gpu/batches/GrNonAAStrokeRectBatch.cpp',
'skia/src/gpu/batches/GrRectBatchFactory.cpp',
'skia/src/gpu/batches/GrStencilAndCoverPathRenderer.cpp',
'skia/src/gpu/batches/GrTessellatingPathRenderer.cpp',
'skia/src/gpu/batches/GrVertexBatch.cpp',
'skia/src/gpu/effects/GrBezierEffect.cpp',
'skia/src/gpu/effects/GrBicubicEffect.cpp',
'skia/src/gpu/effects/GrBitmapTextGeoProc.cpp',
'skia/src/gpu/effects/GrConfigConversionEffect.cpp',
'skia/src/gpu/effects/GrConstColorProcessor.cpp',
'skia/src/gpu/effects/GrConvexPolyEffect.cpp',
'skia/src/gpu/effects/GrConvolutionEffect.cpp',
'skia/src/gpu/effects/GrCoverageSetOpXP.cpp',
'skia/src/gpu/effects/GrCustomXfermode.cpp',
'skia/src/gpu/effects/GrDashingEffect.cpp',
'skia/src/gpu/effects/GrDisableColorXP.cpp',
'skia/src/gpu/effects/GrDistanceFieldGeoProc.cpp',
'skia/src/gpu/effects/GrDitherEffect.cpp',
'skia/src/gpu/effects/GrMatrixConvolutionEffect.cpp',
'skia/src/gpu/effects/GrOvalEffect.cpp',
'skia/src/gpu/effects/GrPorterDuffXferProcessor.cpp',
'skia/src/gpu/effects/GrRRectEffect.cpp',
'skia/src/gpu/effects/GrSimpleTextureEffect.cpp',
'skia/src/gpu/effects/GrSingleTextureEffect.cpp',
'skia/src/gpu/effects/GrTextureDomain.cpp',
'skia/src/gpu/effects/GrTextureStripAtlas.cpp',
'skia/src/gpu/effects/GrXfermodeFragmentProcessor.cpp',
'skia/src/gpu/effects/GrYUVtoRGBEffect.cpp',
'skia/src/gpu/gl/debug/GrBufferObj.cpp',
'skia/src/gpu/gl/debug/GrDebugGL.cpp',
'skia/src/gpu/gl/debug/GrFrameBufferObj.cpp',
'skia/src/gpu/gl/debug/GrProgramObj.cpp',
'skia/src/gpu/gl/debug/GrShaderObj.cpp',
'skia/src/gpu/gl/debug/GrTextureObj.cpp',
'skia/src/gpu/gl/debug/GrTextureUnitObj.cpp',
'skia/src/gpu/gl/debug/SkDebugGLContext.cpp',
'skia/src/gpu/gl/SkGLContext.cpp',
'skia/src/gpu/gl/SkNullGLContext.cpp',
'skia/src/gpu/GrAtlasTextBlob.cpp',
'skia/src/gpu/GrAtlasTextContext.cpp',
'skia/src/gpu/GrBatchFlushState.cpp',
'skia/src/gpu/GrBatchFontCache.cpp',
'skia/src/gpu/GrBatchTest.cpp',
'skia/src/gpu/GrBlend.cpp',
'skia/src/gpu/GrBlurUtils.cpp',
'skia/src/gpu/GrBufferAllocPool.cpp',
'skia/src/gpu/GrCaps.cpp',
'skia/src/gpu/GrClip.cpp',
'skia/src/gpu/GrClipMaskManager.cpp',
'skia/src/gpu/GrContext.cpp',
'skia/src/gpu/GrCoordTransform.cpp',
'skia/src/gpu/GrDefaultGeoProcFactory.cpp',
'skia/src/gpu/GrDrawingManager.cpp',
'skia/src/gpu/GrDrawTarget.cpp',
'skia/src/gpu/GrFontScaler.cpp',
'skia/src/gpu/GrFragmentProcessor.cpp',
'skia/src/gpu/GrGpu.cpp',
'skia/src/gpu/GrGpuFactory.cpp',
'skia/src/gpu/GrGpuResource.cpp',
'skia/src/gpu/GrGpuResourceRef.cpp',
'skia/src/gpu/GrImageIDTextureAdjuster.cpp',
'skia/src/gpu/GrInvariantOutput.cpp',
'skia/src/gpu/GrLayerAtlas.cpp',
'skia/src/gpu/GrLayerCache.cpp',
'skia/src/gpu/GrLayerHoister.cpp',
'skia/src/gpu/GrMemoryPool.cpp',
'skia/src/gpu/GrOvalRenderer.cpp',
'skia/src/gpu/GrPaint.cpp',
'skia/src/gpu/GrPath.cpp',
'skia/src/gpu/GrPathProcessor.cpp',
'skia/src/gpu/GrPathRange.cpp',
'skia/src/gpu/GrPathRenderer.cpp',
'skia/src/gpu/GrPathRendererChain.cpp',
'skia/src/gpu/GrPathRendering.cpp',
'skia/src/gpu/GrPathUtils.cpp',
'skia/src/gpu/GrPipeline.cpp',
'skia/src/gpu/GrPipelineBuilder.cpp',
'skia/src/gpu/GrPrimitiveProcessor.cpp',
'skia/src/gpu/GrProcessor.cpp',
'skia/src/gpu/GrProcessorUnitTest.cpp',
'skia/src/gpu/GrProcOptInfo.cpp',
'skia/src/gpu/GrProgramElement.cpp',
'skia/src/gpu/GrRecordReplaceDraw.cpp',
'skia/src/gpu/GrRectanizer_pow2.cpp',
'skia/src/gpu/GrRectanizer_skyline.cpp',
'skia/src/gpu/GrReducedClip.cpp',
'skia/src/gpu/GrRenderTarget.cpp',
'skia/src/gpu/GrResourceProvider.cpp',
'skia/src/gpu/GrSoftwarePathRenderer.cpp',
'skia/src/gpu/GrStencil.cpp',
'skia/src/gpu/GrStencilAndCoverTextContext.cpp',
'skia/src/gpu/GrStencilAttachment.cpp',
'skia/src/gpu/GrStrokeInfo.cpp',
'skia/src/gpu/GrSurface.cpp',
'skia/src/gpu/GrSWMaskHelper.cpp',
'skia/src/gpu/GrTestUtils.cpp',
'skia/src/gpu/GrTextBlobCache.cpp',
'skia/src/gpu/GrTextContext.cpp',
'skia/src/gpu/GrTexture.cpp',
'skia/src/gpu/GrTextureAccess.cpp',
'skia/src/gpu/GrTextureParamsAdjuster.cpp',
'skia/src/gpu/GrTextureProvider.cpp',
'skia/src/gpu/GrTraceMarker.cpp',
'skia/src/gpu/GrXferProcessor.cpp',
'skia/src/gpu/GrYUVProvider.cpp',
'skia/src/gpu/SkGpuDevice.cpp',
'skia/src/gpu/SkGpuDevice_drawTexture.cpp',
'skia/src/gpu/SkGr.cpp',
'skia/src/gpu/SkGrPixelRef.cpp',
'skia/src/gpu/SkGrTexturePixelRef.cpp',
'skia/src/image/SkSurface_Gpu.cpp',
]
SOURCES += [
'skia/src/effects/SkArithmeticMode_gpu.cpp',
'skia/src/gpu/batches/GrAAConvexPathRenderer.cpp',
'skia/src/gpu/batches/GrAAConvexTessellator.cpp',
'skia/src/gpu/batches/GrAADistanceFieldPathRenderer.cpp',
'skia/src/gpu/batches/GrAAFillRectBatch.cpp',
'skia/src/gpu/batches/GrAAHairLinePathRenderer.cpp',
'skia/src/gpu/batches/GrAALinearizingConvexPathRenderer.cpp',
'skia/src/gpu/batches/GrAAStrokeRectBatch.cpp',
'skia/src/gpu/gl/builders/GrGLProgramBuilder.cpp',
'skia/src/gpu/gl/builders/GrGLShaderStringBuilder.cpp',
'skia/src/gpu/gl/builders/GrGLSLPrettyPrint.cpp',
'skia/src/gpu/gl/debug/GrGLCreateDebugInterface.cpp',
'skia/src/gpu/gl/GrGLAssembleInterface.cpp',
'skia/src/gpu/gl/GrGLBufferImpl.cpp',
'skia/src/gpu/gl/GrGLCaps.cpp',
'skia/src/gpu/gl/GrGLContext.cpp',
'skia/src/gpu/gl/GrGLCreateNullInterface.cpp',
'skia/src/gpu/gl/GrGLDefaultInterface_native.cpp',
'skia/src/gpu/gl/GrGLExtensions.cpp',
'skia/src/gpu/gl/GrGLGLSL.cpp',
'skia/src/gpu/gl/GrGLGpu.cpp',
'skia/src/gpu/gl/GrGLGpuProgramCache.cpp',
'skia/src/gpu/gl/GrGLIndexBuffer.cpp',
'skia/src/gpu/gl/GrGLInterface.cpp',
'skia/src/gpu/gl/GrGLNameAllocator.cpp',
'skia/src/gpu/gl/GrGLNoOpInterface.cpp',
'skia/src/gpu/gl/GrGLPath.cpp',
'skia/src/gpu/gl/GrGLPathRange.cpp',
'skia/src/gpu/gl/GrGLPathRendering.cpp',
'skia/src/gpu/gl/GrGLProgram.cpp',
'skia/src/gpu/gl/GrGLProgramDataManager.cpp',
'skia/src/gpu/gl/GrGLProgramDesc.cpp',
'skia/src/gpu/gl/GrGLRenderTarget.cpp',
'skia/src/gpu/gl/GrGLStencilAttachment.cpp',
'skia/src/gpu/gl/GrGLTexture.cpp',
'skia/src/gpu/gl/GrGLTextureRenderTarget.cpp',
'skia/src/gpu/gl/GrGLUtil.cpp',
'skia/src/gpu/gl/GrGLVaryingHandler.cpp',
'skia/src/gpu/gl/GrGLVertexArray.cpp',
'skia/src/gpu/gl/GrGLVertexBuffer.cpp',
'skia/src/gpu/glsl/GrGLSL.cpp',
'skia/src/gpu/glsl/GrGLSLBlend.cpp',
'skia/src/gpu/glsl/GrGLSLCaps.cpp',
'skia/src/gpu/glsl/GrGLSLFragmentProcessor.cpp',
'skia/src/gpu/glsl/GrGLSLFragmentShaderBuilder.cpp',
'skia/src/gpu/glsl/GrGLSLGeometryProcessor.cpp',
'skia/src/gpu/glsl/GrGLSLGeometryShaderBuilder.cpp',
'skia/src/gpu/glsl/GrGLSLPrimitiveProcessor.cpp',
'skia/src/gpu/glsl/GrGLSLProgramBuilder.cpp',
'skia/src/gpu/glsl/GrGLSLShaderBuilder.cpp',
'skia/src/gpu/glsl/GrGLSLUtil.cpp',
'skia/src/gpu/glsl/GrGLSLVarying.cpp',
'skia/src/gpu/glsl/GrGLSLVertexShaderBuilder.cpp',
'skia/src/gpu/glsl/GrGLSLXferProcessor.cpp',
'skia/src/gpu/GrBatchAtlas.cpp',
'skia/src/gpu/GrDrawContext.cpp',
'skia/src/gpu/GrResourceCache.cpp',
'skia/src/image/SkImage_Gpu.cpp',
]
if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('android', 'gonk'):
UNIFIED_SOURCES += [
'skia/src/ports/SkDebug_android.cpp',
@ -713,6 +718,9 @@ elif CONFIG['CLANG_CL']:
DEFINES['SKIA_IMPLEMENTATION'] = 1
if not CONFIG['MOZ_ENABLE_SKIA_GPU']:
DEFINES['SK_SUPPORT_GPU'] = 0
if CONFIG['GNU_CXX']:
CXXFLAGS += [
'-Wno-deprecated-declarations',

Просмотреть файл

@ -13,6 +13,8 @@
#include <algorithm>
#include <dlfcn.h>
using namespace mozilla;
// standard font descriptors that we construct the first time they're needed
@ -21,28 +23,85 @@ CTFontDescriptorRef gfxCoreTextShaper::sDisableLigaturesDescriptor = nullptr;
CTFontDescriptorRef gfxCoreTextShaper::sIndicFeaturesDescriptor = nullptr;
CTFontDescriptorRef gfxCoreTextShaper::sIndicDisableLigaturesDescriptor = nullptr;
static CFStringRef sCTWritingDirectionAttributeName = nullptr;
// See CTStringAttributes.h
enum {
kMyCTWritingDirectionEmbedding = (0 << 1),
kMyCTWritingDirectionOverride = (1 << 1)
};
// Helper to create a CFDictionary with the right attributes for shaping our
// text, including imposing the given directionality.
// This will only be called if we're on 10.8 or later.
CFDictionaryRef
gfxCoreTextShaper::CreateAttrDict(bool aRightToLeft)
{
// Because we always shape unidirectional runs, and may have applied
// directional overrides, we want to force a direction rather than
// allowing CoreText to do its own unicode-based bidi processing.
SInt16 dirOverride = kMyCTWritingDirectionOverride |
(aRightToLeft ? kCTWritingDirectionRightToLeft
: kCTWritingDirectionLeftToRight);
CFNumberRef dirNumber =
::CFNumberCreate(kCFAllocatorDefault,
kCFNumberSInt16Type, &dirOverride);
CFArrayRef dirArray =
::CFArrayCreate(kCFAllocatorDefault,
(const void **) &dirNumber, 1,
&kCFTypeArrayCallBacks);
::CFRelease(dirNumber);
CFTypeRef attrs[] = { kCTFontAttributeName, sCTWritingDirectionAttributeName };
CFTypeRef values[] = { mCTFont, dirArray };
CFDictionaryRef attrDict =
::CFDictionaryCreate(kCFAllocatorDefault,
attrs, values, ArrayLength(attrs),
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
::CFRelease(dirArray);
return attrDict;
}
CFDictionaryRef
gfxCoreTextShaper::CreateAttrDictWithoutDirection()
{
CFTypeRef attrs[] = { kCTFontAttributeName };
CFTypeRef values[] = { mCTFont };
CFDictionaryRef attrDict =
::CFDictionaryCreate(kCFAllocatorDefault,
attrs, values, ArrayLength(attrs),
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
return attrDict;
}
gfxCoreTextShaper::gfxCoreTextShaper(gfxMacFont *aFont)
: gfxFontShaper(aFont)
, mAttributesDictLTR(nullptr)
, mAttributesDictRTL(nullptr)
{
static bool sInitialized = false;
if (!sInitialized) {
CFStringRef* pstr = (CFStringRef*)
dlsym(RTLD_DEFAULT, "kCTWritingDirectionAttributeName");
if (pstr) {
sCTWritingDirectionAttributeName = *pstr;
}
sInitialized = true;
}
// Create our CTFontRef
mCTFont = CreateCTFontWithFeatures(aFont->GetAdjustedSize(),
GetDefaultFeaturesDescriptor());
// Set up the default attribute dictionary that we will need each time we
// create a CFAttributedString (unless we need to use custom features,
// in which case a new dictionary will be created on the fly).
mAttributesDict = ::CFDictionaryCreate(kCFAllocatorDefault,
(const void**) &kCTFontAttributeName,
(const void**) &mCTFont,
1, // count of attributes
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
}
gfxCoreTextShaper::~gfxCoreTextShaper()
{
if (mAttributesDict) {
::CFRelease(mAttributesDict);
if (mAttributesDictLTR) {
::CFRelease(mAttributesDictLTR);
}
if (mAttributesDictRTL) {
::CFRelease(mAttributesDictRTL);
}
if (mCTFont) {
::CFRelease(mCTFont);
@ -65,54 +124,81 @@ gfxCoreTextShaper::ShapeText(gfxContext *aContext,
gfxShapedText *aShapedText)
{
// Create a CFAttributedString with text and style info, so we can use CoreText to lay it out.
bool isRightToLeft = aShapedText->IsRightToLeft();
const UniChar* text = reinterpret_cast<const UniChar*>(aText);
uint32_t length = aLength;
// we need to bidi-wrap the text if the run is RTL,
// or if it is an LTR run but may contain (overridden) RTL chars
bool bidiWrap = isRightToLeft;
if (!bidiWrap && !aShapedText->TextIs8Bit()) {
uint32_t i;
for (i = 0; i < length; ++i) {
if (gfxFontUtils::PotentialRTLChar(aText[i])) {
bidiWrap = true;
break;
}
}
}
// If there's a possibility of any bidi, we wrap the text with direction overrides
// to ensure neutrals or characters that were bidi-overridden in HTML behave properly.
const UniChar beginLTR[] = { 0x202d, 0x20 };
const UniChar beginRTL[] = { 0x202e, 0x20 };
const UniChar endBidiWrap[] = { 0x20, 0x2e, 0x202c };
uint32_t startOffset;
CFStringRef stringObj;
if (bidiWrap) {
startOffset = isRightToLeft ?
mozilla::ArrayLength(beginRTL) : mozilla::ArrayLength(beginLTR);
CFMutableStringRef mutableString =
::CFStringCreateMutable(kCFAllocatorDefault,
length + startOffset + mozilla::ArrayLength(endBidiWrap));
::CFStringAppendCharacters(mutableString,
isRightToLeft ? beginRTL : beginLTR,
startOffset);
::CFStringAppendCharacters(mutableString, reinterpret_cast<const UniChar*>(aText), length);
::CFStringAppendCharacters(mutableString,
endBidiWrap, mozilla::ArrayLength(endBidiWrap));
stringObj = mutableString;
} else {
CFDictionaryRef attrObj;
if (sCTWritingDirectionAttributeName) {
startOffset = 0;
stringObj = ::CFStringCreateWithCharactersNoCopy(kCFAllocatorDefault,
reinterpret_cast<const UniChar*>(aText),
length, kCFAllocatorNull);
text, length,
kCFAllocatorNull);
// Get an attributes dictionary suitable for shaping text in the
// current direction, creating it if necessary.
attrObj = isRightToLeft ? mAttributesDictRTL : mAttributesDictLTR;
if (!attrObj) {
attrObj = CreateAttrDict(isRightToLeft);
(isRightToLeft ? mAttributesDictRTL : mAttributesDictLTR) = attrObj;
}
} else {
// OS is too old to support kCTWritingDirectionAttributeName:
// we need to bidi-wrap the text if the run is RTL,
// or if it is an LTR run but may contain (overridden) RTL chars
bool bidiWrap = isRightToLeft;
if (!bidiWrap && !aShapedText->TextIs8Bit()) {
uint32_t i;
for (i = 0; i < length; ++i) {
if (gfxFontUtils::PotentialRTLChar(aText[i])) {
bidiWrap = true;
break;
}
}
}
// If there's a possibility of any bidi, we wrap the text with
// direction overrides to ensure neutrals or characters that were
// bidi-overridden in HTML behave properly.
static const UniChar beginLTR[] = { 0x202d, 0x20 };
static const UniChar beginRTL[] = { 0x202e, 0x20 };
static const UniChar endBidiWrap[] = { 0x20, 0x2e, 0x202c };
if (bidiWrap) {
startOffset = isRightToLeft ? ArrayLength(beginRTL)
: ArrayLength(beginLTR);
CFMutableStringRef mutableString =
::CFStringCreateMutable(kCFAllocatorDefault,
length + startOffset +
ArrayLength(endBidiWrap));
::CFStringAppendCharacters(mutableString,
isRightToLeft ? beginRTL : beginLTR,
startOffset);
::CFStringAppendCharacters(mutableString, text, length);
::CFStringAppendCharacters(mutableString, endBidiWrap,
ArrayLength(endBidiWrap));
stringObj = mutableString;
} else {
startOffset = 0;
stringObj =
::CFStringCreateWithCharactersNoCopy(kCFAllocatorDefault,
text, length,
kCFAllocatorNull);
}
// Get an attributes dictionary suitable for shaping text,
// creating it if necessary. (This dict is not LTR-specific,
// but we use that field to store it anyway.)
if (!mAttributesDictLTR) {
mAttributesDictLTR = CreateAttrDictWithoutDirection();
}
attrObj = mAttributesDictLTR;
}
CFDictionaryRef attrObj;
CTFontRef tempCTFont = nullptr;
if (IsBuggyIndicScript(aScript)) {
// To work around buggy Indic AAT fonts shipped with OS X,
// we re-enable the Line Initial Smart Swashes feature that is needed
@ -132,21 +218,18 @@ gfxCoreTextShaper::ShapeText(gfxContext *aContext,
GetDisableLigaturesDescriptor());
}
// For the disabled-ligature or buggy-indic-font case, we need to replace
// the standard CTFont in the attribute dictionary with a tweaked version.
CFMutableDictionaryRef mutableAttr = nullptr;
if (tempCTFont) {
attrObj =
::CFDictionaryCreate(kCFAllocatorDefault,
(const void**) &kCTFontAttributeName,
(const void**) &tempCTFont,
1, // count of attributes
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
mutableAttr = ::CFDictionaryCreateMutableCopy(kCFAllocatorDefault, 2,
attrObj);
::CFDictionaryReplaceValue(mutableAttr,
kCTFontAttributeName, tempCTFont);
// Having created the dict, we're finished with our temporary
// Indic and/or ligature-disabled CTFontRef.
::CFRelease(tempCTFont);
} else {
// The default case is to use our preallocated attr dict
attrObj = mAttributesDict;
::CFRetain(attrObj);
attrObj = mutableAttr;
}
// Now we can create an attributed string
@ -180,8 +263,10 @@ gfxCoreTextShaper::ShapeText(gfxContext *aContext,
// If Core Text manufactured a new dictionary, this may indicate
// unexpected font substitution. In that case, we fail (and fall
// back to harfbuzz shaping)...
const void* font1 = ::CFDictionaryGetValue(attrObj, kCTFontAttributeName);
const void* font2 = ::CFDictionaryGetValue(runAttr, kCTFontAttributeName);
const void* font1 =
::CFDictionaryGetValue(attrObj, kCTFontAttributeName);
const void* font2 =
::CFDictionaryGetValue(runAttr, kCTFontAttributeName);
if (font1 != font2) {
// ...except that if the fallback was only for a variation
// selector or join control that is otherwise unsupported,
@ -198,13 +283,16 @@ gfxCoreTextShaper::ShapeText(gfxContext *aContext,
break;
}
}
if (SetGlyphsFromRun(aShapedText, aOffset, aLength, aCTRun, startOffset) != NS_OK) {
if (SetGlyphsFromRun(aShapedText, aOffset, aLength, aCTRun,
startOffset) != NS_OK) {
success = false;
break;
}
}
::CFRelease(attrObj);
if (mutableAttr) {
::CFRelease(mutableAttr);
}
::CFRelease(line);
return success;

Просмотреть файл

@ -31,7 +31,10 @@ public:
protected:
CTFontRef mCTFont;
CFDictionaryRef mAttributesDict;
// attributes for shaping text with LTR or RTL directionality
CFDictionaryRef mAttributesDictLTR;
CFDictionaryRef mAttributesDictRTL;
nsresult SetGlyphsFromRun(gfxShapedText *aShapedText,
uint32_t aOffset,
@ -42,6 +45,9 @@ protected:
CTFontRef CreateCTFontWithFeatures(CGFloat aSize,
CTFontDescriptorRef aDescriptor);
CFDictionaryRef CreateAttrDict(bool aRightToLeft);
CFDictionaryRef CreateAttrDictWithoutDirection();
static CTFontDescriptorRef
CreateFontFeaturesDescriptor(const std::pair<SInt16,SInt16> aFeatures[],
size_t aCount);

Просмотреть файл

@ -6,16 +6,24 @@
#include "Downscaler.h"
#include "mozilla/UniquePtrExtensions.h"
namespace mozilla {
namespace image {
Deinterlacer::Deinterlacer(const nsIntSize& aImageSize)
: mImageSize(aImageSize)
, mBuffer(MakeUnique<uint8_t[]>(mImageSize.width *
mImageSize.height *
sizeof(uint32_t)))
{ }
{
CheckedInt<size_t> bufferSize = mImageSize.width;
bufferSize *= mImageSize.height;
bufferSize *= sizeof(uint32_t);
if (!bufferSize.isValid()) {
return;
}
mBuffer = MakeUniqueFallible<uint8_t[]>(bufferSize.value());
}
uint32_t
Deinterlacer::RowSize() const
@ -27,6 +35,7 @@ uint8_t*
Deinterlacer::RowBuffer(uint32_t aRow)
{
uint32_t offset = aRow * RowSize();
MOZ_ASSERT(IsValid(), "Deinterlacer in invalid state");
MOZ_ASSERT(offset < mImageSize.width * mImageSize.height * sizeof(uint32_t),
"Row is outside of image");
return mBuffer.get() + offset;
@ -35,6 +44,7 @@ Deinterlacer::RowBuffer(uint32_t aRow)
void
Deinterlacer::PropagatePassToDownscaler(Downscaler& aDownscaler)
{
MOZ_ASSERT(IsValid(), "Deinterlacer in invalid state");
for (int32_t row = 0 ; row < mImageSize.height ; ++row) {
memcpy(aDownscaler.RowBuffer(), RowBuffer(row), RowSize());
aDownscaler.CommitRow();

Просмотреть файл

@ -32,6 +32,7 @@ class Deinterlacer
{
public:
explicit Deinterlacer(const nsIntSize& aImageSize);
bool IsValid() { return !!mBuffer; }
uint8_t* RowBuffer(uint32_t aRow);
void PropagatePassToDownscaler(Downscaler& aDownscaler);

Просмотреть файл

@ -1178,6 +1178,12 @@ nsGIFDecoder2::WriteInternal(const char* aBuffer, uint32_t aCount)
mGIFStruct.ipass = 1;
if (mDownscaler) {
mDeinterlacer.emplace(mDownscaler->FrameSize());
if (!mDeinterlacer->IsValid()) {
mDeinterlacer.reset();
mGIFStruct.state = gif_error;
break;
}
}
} else {
mGIFStruct.interlaced = false;

Двоичные данные
image/test/crashtests/1235605.gif Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 2.3 KiB

Просмотреть файл

@ -14,6 +14,7 @@ load 856616.gif
skip-if(B2G) load 944353.jpg
load 1205923-1.html
load 1212954-1.svg
load 1235605.gif
load colormap-range.gif
HTTP load delayedframe.sjs # A 3-frame animated GIF with an inordinate delay between the second and third frame

Просмотреть файл

@ -514,8 +514,9 @@ XPCShellEnvironment::Init()
}
JS::CompartmentOptions options;
options.setZone(JS::SystemZone)
.setVersion(JSVERSION_LATEST);
options.creationOptions().setZone(JS::SystemZone);
options.behaviors().setVersion(JSVERSION_LATEST);
nsCOMPtr<nsIXPConnectJSObjectHolder> holder;
rv = xpc->InitClassesWithNewWrappedGlobal(cx,
static_cast<nsIGlobalObject *>(backstagePass),

Просмотреть файл

@ -132,7 +132,7 @@ class js::AsmJSModule
{
public:
enum Which { Variable, FFI, ArrayView, ArrayViewCtor, MathBuiltinFunction,
AtomicsBuiltinFunction, Constant, SimdCtor, SimdOperation, ByteLength };
AtomicsBuiltinFunction, Constant, SimdCtor, SimdOperation };
enum VarInitKind { InitConstant, InitImport };
enum ConstantKind { GlobalConstant, MathConstant };
@ -301,21 +301,19 @@ class js::AsmJSModule
PropertyName* name_;
PropertyName* maybeFieldName_;
struct CacheablePod {
uint32_t wasmIndex_;
uint32_t startOffsetInModule_; // Store module-start-relative offsets
uint32_t endOffsetInModule_; // so preserved by serialization.
} pod;
public:
Export() {}
Export(PropertyName* name, PropertyName* maybeFieldName, uint32_t wasmIndex,
Export(PropertyName* name, PropertyName* maybeFieldName,
uint32_t startOffsetInModule, uint32_t endOffsetInModule)
: name_(name),
maybeFieldName_(maybeFieldName)
{
MOZ_ASSERT(name_->isTenured());
MOZ_ASSERT_IF(maybeFieldName_, maybeFieldName_->isTenured());
pod.wasmIndex_ = wasmIndex;
pod.startOffsetInModule_ = startOffsetInModule;
pod.endOffsetInModule_ = endOffsetInModule;
}
@ -338,14 +336,6 @@ class js::AsmJSModule
uint32_t endOffsetInModule() const {
return pod.endOffsetInModule_;
}
static const uint32_t ChangeHeap = UINT32_MAX;
bool isChangeHeap() const {
return pod.wasmIndex_ == ChangeHeap;
}
uint32_t wasmIndex() const {
MOZ_ASSERT(!isChangeHeap());
return pod.wasmIndex_;
}
WASM_DECLARE_SERIALIZABLE(Export)
};
@ -359,15 +349,12 @@ class js::AsmJSModule
wasm::UniqueStaticLinkData linkData_;
struct CacheablePod {
uint32_t minHeapLength_;
uint32_t maxHeapLength_;
uint32_t heapLengthMask_;
uint32_t numFFIs_;
uint32_t srcLength_;
uint32_t srcLengthWithRightBrace_;
bool strict_;
bool hasArrayView_;
bool isSharedView_;
bool hasFixedMinHeapLength_;
} pod;
const ScriptSourceHolder scriptSource_;
const uint32_t srcStart_;
@ -391,7 +378,6 @@ class js::AsmJSModule
{
mozilla::PodZero(&pod);
pod.minHeapLength_ = RoundUpToNextValidAsmJSHeapLength(0);
pod.maxHeapLength_ = 0x80000000;
pod.strict_ = strict;
MOZ_ASSERT(srcStart_ <= srcBodyStart_);
@ -446,13 +432,6 @@ class js::AsmJSModule
uint32_t minHeapLength() const {
return pod.minHeapLength_;
}
uint32_t maxHeapLength() const {
return pod.maxHeapLength_;
}
uint32_t heapLengthMask() const {
MOZ_ASSERT(pod.hasFixedMinHeapLength_);
return pod.heapLengthMask_;
}
void initGlobalArgumentName(PropertyName* n) {
MOZ_ASSERT(!isFinished());
@ -520,11 +499,6 @@ class js::AsmJSModule
g.pod.u.viewType_ = vt;
return globals_.append(g);
}
bool addByteLength() {
MOZ_ASSERT(!isFinished());
Global g(Global::ByteLength, nullptr);
return globals_.append(g);
}
bool addMathBuiltinFunction(AsmJSMathBuiltinFunction func, PropertyName* field) {
MOZ_ASSERT(!isFinished());
Global g(Global::MathBuiltinFunction, field);
@ -569,30 +543,16 @@ class js::AsmJSModule
MOZ_ASSERT(imports_.length() == importIndex);
return imports_.emplaceBack(ffiIndex);
}
bool addExport(PropertyName* name, PropertyName* maybeFieldName, uint32_t wasmIndex,
uint32_t funcSrcBegin, uint32_t funcSrcEnd)
{
// NB: funcSrcBegin/funcSrcEnd are given relative to the ScriptSource
// (the entire file) and ExportedFunctions store offsets relative to
// the beginning of the module (so that they are caching-invariant).
bool addExport(PropertyName* name, PropertyName* maybeFieldName, uint32_t begin, uint32_t end) {
// The begin/end offsets are given relative to the ScriptSource (the
// entire file) and ExportedFunctions store offsets relative to the
// beginning of the module (so that they are caching-invariant).
MOZ_ASSERT(!isFinished());
MOZ_ASSERT(srcStart_ < funcSrcBegin);
MOZ_ASSERT(funcSrcBegin < funcSrcEnd);
return exports_.emplaceBack(name, maybeFieldName, wasmIndex,
funcSrcBegin - srcStart_, funcSrcEnd - srcStart_);
}
bool addChangeHeap(uint32_t mask, uint32_t min, uint32_t max) {
MOZ_ASSERT(!isFinished());
MOZ_ASSERT(!pod.hasFixedMinHeapLength_);
MOZ_ASSERT(IsValidAsmJSHeapLength(mask + 1));
MOZ_ASSERT(min >= RoundUpToNextValidAsmJSHeapLength(0));
MOZ_ASSERT(max <= pod.maxHeapLength_);
MOZ_ASSERT(min <= max);
pod.heapLengthMask_ = mask;
pod.minHeapLength_ = min;
pod.maxHeapLength_ = max;
pod.hasFixedMinHeapLength_ = true;
return true;
MOZ_ASSERT(srcStart_ < begin);
MOZ_ASSERT(begin < end);
uint32_t startOffsetInModule = begin - srcStart_;
uint32_t endOffsetInModule = end - srcStart_;
return exports_.emplaceBack(name, maybeFieldName, startOffsetInModule, endOffsetInModule);
}
const GlobalVector& globals() const {
@ -615,16 +575,11 @@ class js::AsmJSModule
bool isSharedView() const {
return pod.isSharedView_;
}
bool tryRequireHeapLengthToBeAtLeast(uint32_t len) {
void requireHeapLengthToBeAtLeast(uint32_t len) {
MOZ_ASSERT(!isFinished());
if (pod.hasFixedMinHeapLength_ && len > pod.minHeapLength_)
return false;
if (len > pod.maxHeapLength_)
return false;
len = RoundUpToNextValidAsmJSHeapLength(len);
if (len > pod.minHeapLength_)
pod.minHeapLength_ = len;
return true;
}
/*************************************************************************/
@ -1720,9 +1675,7 @@ class MOZ_STACK_CLASS ModuleValidator
MathBuiltinFunction,
AtomicsBuiltinFunction,
SimdCtor,
SimdOperation,
ByteLength,
ChangeHeap
SimdOperation
};
private:
@ -1746,10 +1699,6 @@ class MOZ_STACK_CLASS ModuleValidator
AsmJSSimdType type_;
AsmJSSimdOperation which_;
} simdOp;
struct {
uint32_t srcBegin_;
uint32_t srcEnd_;
} changeHeap;
} u;
friend class ModuleValidator;
@ -1827,14 +1776,6 @@ class MOZ_STACK_CLASS ModuleValidator
MOZ_ASSERT(which_ == SimdOperation);
return u.simdOp.type_;
}
uint32_t changeHeapSrcBegin() const {
MOZ_ASSERT(which_ == ChangeHeap);
return u.changeHeap.srcBegin_;
}
uint32_t changeHeapSrcEnd() const {
MOZ_ASSERT(which_ == ChangeHeap);
return u.changeHeap.srcEnd_;
}
};
struct MathBuiltin
@ -1931,8 +1872,6 @@ class MOZ_STACK_CLASS ModuleValidator
uint32_t errorOffset_;
bool errorOverRecursed_;
bool canValidateChangeHeap_;
bool hasChangeHeap_;
bool supportsSimd_;
bool atomicsPresent_;
@ -1955,8 +1894,6 @@ class MOZ_STACK_CLASS ModuleValidator
errorString_(nullptr),
errorOffset_(UINT32_MAX),
errorOverRecursed_(false),
canValidateChangeHeap_(false),
hasChangeHeap_(false),
supportsSimd_(cx->jitSupportsSimd()),
atomicsPresent_(false)
{
@ -2089,8 +2026,12 @@ class MOZ_STACK_CLASS ModuleValidator
JS_ALWAYS_TRUE(tokenStream().peekTokenPos(&pos, TokenStream::Operand));
uint32_t endAfterCurly = pos.end;
auto usesHeap = Module::HeapBool(module_->hasArrayView());
auto sharedHeap = Module::SharedBool(module_->isSharedView());
HeapUsage heapUsage = module_->hasArrayView()
? module_->isSharedView()
? HeapUsage::Shared
: HeapUsage::Unshared
: HeapUsage::None;
auto mutedErrors = Module::MutedBool(parser_.ss->mutedErrors());
CacheableChars filename = make_string_copy(parser_.ss->filename());
@ -2107,8 +2048,7 @@ class MOZ_STACK_CLASS ModuleValidator
}
UniqueStaticLinkData linkData;
Module* wasm = mg_.finish(usesHeap, sharedHeap, mutedErrors,
Move(filename), Move(displayURL),
Module* wasm = mg_.finish(heapUsage, mutedErrors, Move(filename), Move(displayURL),
&linkData, slowFuncs);
if (!wasm)
return false;
@ -2219,23 +2159,6 @@ class MOZ_STACK_CLASS ModuleValidator
return globals_.putNew(var, global) &&
module().addSimdOperation(type, op, opName);
}
bool addByteLength(PropertyName* name) {
canValidateChangeHeap_ = true;
Global* global = validationLifo_.new_<Global>(Global::ByteLength);
return global &&
globals_.putNew(name, global) &&
module().addByteLength();
}
bool addChangeHeap(PropertyName* name, ParseNode* fn, uint32_t mask, uint32_t min, uint32_t max) {
hasChangeHeap_ = true;
Global* global = validationLifo_.new_<Global>(Global::ChangeHeap);
if (!global)
return false;
global->u.changeHeap.srcBegin_ = fn->pn_pos.begin;
global->u.changeHeap.srcEnd_ = fn->pn_pos.end;
return globals_.putNew(name, global) &&
module().addChangeHeap(mask, min, max);
}
bool addArrayViewCtor(PropertyName* var, Scalar::Type vt, PropertyName* field) {
Global* global = validationLifo_.new_<Global>(Global::ArrayViewCtor);
if (!global)
@ -2259,17 +2182,8 @@ class MOZ_STACK_CLASS ModuleValidator
if (!args.appendAll(func.sig().args()))
return false;
MallocSig sig(Move(args), func.sig().ret());
uint32_t wasmIndex;
if (!mg_.declareExport(Move(sig), func.index(), &wasmIndex))
return false;
if (wasmIndex == AsmJSModule::Export::ChangeHeap)
return fail(pn, "too many exports");
return module().addExport(func.name(), maybeFieldName, wasmIndex,
func.srcBegin(), func.srcEnd());
}
bool addChangeHeapExport(PropertyName* name, const Global& g, PropertyName* maybeFieldName) {
return module().addExport(name, maybeFieldName, AsmJSModule::Export::ChangeHeap,
g.changeHeapSrcBegin(), g.changeHeapSrcEnd());
return mg_.declareExport(Move(sig), func.index()) &&
module().addExport(func.name(), maybeFieldName, func.srcBegin(), func.srcEnd());
}
private:
const LifoSig* getLifoSig(const LifoSig& sig) {
@ -2338,19 +2252,13 @@ class MOZ_STACK_CLASS ModuleValidator
module().addImport(ffiIndex, *importIndex);
}
bool tryOnceToValidateChangeHeap() {
bool ret = canValidateChangeHeap_;
canValidateChangeHeap_ = false;
return ret;
}
bool hasChangeHeap() const {
return hasChangeHeap_;
}
bool tryRequireHeapLengthToBeAtLeast(uint32_t len) {
return module().tryRequireHeapLengthToBeAtLeast(len);
}
uint32_t minHeapLength() const {
return module().minHeapLength();
bool tryConstantAccess(uint64_t start, uint64_t width) {
MOZ_ASSERT(UINT64_MAX - start > width);
uint64_t end = start + width;
if (end > uint64_t(INT32_MAX) + 1)
return false;
module().requireHeapLengthToBeAtLeast(end);
return true;
}
bool usesSharedMemory() const {
@ -2822,8 +2730,6 @@ class MOZ_STACK_CLASS FunctionValidator
LocalMap locals_;
LabelMap labels_;
unsigned heapExpressionDepth_;
bool hasAlreadyReturned_;
ExprType ret_;
@ -2833,7 +2739,6 @@ class MOZ_STACK_CLASS FunctionValidator
fn_(fn),
locals_(m.cx()),
labels_(m.cx()),
heapExpressionDepth_(0),
hasAlreadyReturned_(false)
{}
@ -2887,19 +2792,6 @@ class MOZ_STACK_CLASS FunctionValidator
return funcIR().addVariable(init.value());
}
/*************************************************************************/
void enterHeapExpression() {
heapExpressionDepth_++;
}
void leaveHeapExpression() {
MOZ_ASSERT(heapExpressionDepth_ > 0);
heapExpressionDepth_--;
}
bool canCall() const {
return heapExpressionDepth_ == 0 || !m_.hasChangeHeap();
}
/****************************** For consistency of returns in a function */
bool hasAlreadyReturned() const {
@ -3471,8 +3363,6 @@ CheckGlobalDotImport(ModuleValidator& m, PropertyName* varName, ParseNode* initN
return m.addGlobalConstant(varName, GenericNaN(), field);
if (field == m.cx()->names().Infinity)
return m.addGlobalConstant(varName, PositiveInfinity<double>(), field);
if (field == m.cx()->names().byteLength)
return m.addByteLength(varName);
Scalar::Type type;
if (IsArrayViewCtorName(m, field, &type))
@ -3783,8 +3673,6 @@ CheckVarRef(FunctionValidator& f, ParseNode* varRef, Type* type)
case ModuleValidator::Global::ArrayViewCtor:
case ModuleValidator::Global::SimdCtor:
case ModuleValidator::Global::SimdOperation:
case ModuleValidator::Global::ByteLength:
case ModuleValidator::Global::ChangeHeap:
return f.failName(varRef, "'%s' may not be accessed by ordinary expressions", name);
}
return true;
@ -3819,7 +3707,7 @@ FoldMaskedArrayIndex(FunctionValidator& f, ParseNode** indexExpr, int32_t* mask,
// constraint. The unsigned maximum of a masked index is the mask
// itself, so check that the mask is not negative and compare the mask
// to the known minimum heap length.
if (int32_t(mask2) >= 0 && mask2 < f.m().minHeapLength())
if (int32_t(mask2) >= 0 && mask2 < f.m().module().minHeapLength())
*needsBoundsCheck = NO_BOUNDS_CHECK;
*mask &= mask2;
*indexExpr = indexNode;
@ -3849,16 +3737,9 @@ CheckArrayAccess(FunctionValidator& f, ParseNode* viewName, ParseNode* indexExpr
uint32_t index;
if (IsLiteralOrConstInt(f, indexExpr, &index)) {
uint64_t byteOffset = uint64_t(index) << TypedArrayShift(*viewType);
if (byteOffset > INT32_MAX)
if (!f.m().tryConstantAccess(byteOffset, TypedArrayElemSize(*viewType)))
return f.fail(indexExpr, "constant index out of range");
unsigned elementSize = TypedArrayElemSize(*viewType);
if (!f.m().tryRequireHeapLengthToBeAtLeast(byteOffset + elementSize)) {
return f.failf(indexExpr, "constant index outside heap size range declared by the "
"change-heap function (0x%x - 0x%x)",
f.m().minHeapLength(), f.m().module().maxHeapLength());
}
*mask = NoMask;
*needsBoundsCheck = NO_BOUNDS_CHECK;
f.writeInt32Lit(byteOffset);
@ -3886,14 +3767,10 @@ CheckArrayAccess(FunctionValidator& f, ParseNode* viewName, ParseNode* indexExpr
if (pointerNode->isKind(PNK_BITAND))
FoldMaskedArrayIndex(f, &pointerNode, mask, needsBoundsCheck);
f.enterHeapExpression();
Type pointerType;
if (!CheckExpr(f, pointerNode, &pointerType))
return false;
f.leaveHeapExpression();
if (!pointerType.isIntish())
return f.failf(pointerNode, "%s is not a subtype of int", pointerType.toChars());
} else {
@ -3909,14 +3786,10 @@ CheckArrayAccess(FunctionValidator& f, ParseNode* viewName, ParseNode* indexExpr
if (pointerNode->isKind(PNK_BITAND))
folded = FoldMaskedArrayIndex(f, &pointerNode, mask, needsBoundsCheck);
f.enterHeapExpression();
Type pointerType;
if (!CheckExpr(f, pointerNode, &pointerType))
return false;
f.leaveHeapExpression();
if (folded) {
if (!pointerType.isIntish())
return f.failf(pointerNode, "%s is not a subtype of intish", pointerType.toChars());
@ -4010,14 +3883,10 @@ CheckStoreArray(FunctionValidator& f, ParseNode* lhs, ParseNode* rhs, Type* type
if (!CheckAndPrepareArrayAccess(f, ElemBase(lhs), ElemIndex(lhs), &viewType, &needsBoundsCheck, &mask))
return false;
f.enterHeapExpression();
Type rhsType;
if (!CheckExpr(f, rhs, &rhsType))
return false;
f.leaveHeapExpression();
switch (viewType) {
case Scalar::Int8:
case Scalar::Int16:
@ -4657,11 +4526,6 @@ static bool
CheckInternalCall(FunctionValidator& f, ParseNode* callNode, PropertyName* calleeName,
ExprType ret, Type* type)
{
if (!f.canCall()) {
return f.fail(callNode, "call expressions may not be nested inside heap expressions "
"when the module contains a change-heap function");
}
switch (ret) {
case ExprType::Void: f.writeOp(Stmt::CallInternal); break;
case ExprType::I32: f.writeOp(I32::CallInternal); break;
@ -4728,11 +4592,6 @@ CheckFuncPtrTableAgainstExisting(ModuleValidator& m, ParseNode* usepn, PropertyN
static bool
CheckFuncPtrCall(FunctionValidator& f, ParseNode* callNode, ExprType ret, Type* type)
{
if (!f.canCall()) {
return f.fail(callNode, "function-pointer call expressions may not be nested inside heap "
"expressions when the module contains a change-heap function");
}
ParseNode* callee = CallCallee(callNode);
ParseNode* tableNode = ElemBase(callee);
ParseNode* indexExpr = ElemIndex(callee);
@ -4813,11 +4672,6 @@ static bool
CheckFFICall(FunctionValidator& f, ParseNode* callNode, unsigned ffiIndex, ExprType ret,
Type* type)
{
if (!f.canCall()) {
return f.fail(callNode, "FFI call expressions may not be nested inside heap "
"expressions when the module contains a change-heap function");
}
PropertyName* calleeName = CallCallee(callNode)->name();
if (ret == ExprType::F32)
@ -5504,30 +5358,21 @@ CheckSimdLoadStoreArgs(FunctionValidator& f, ParseNode* call, AsmJSSimdType opTy
ParseNode* indexExpr = NextNode(view);
uint32_t indexLit;
if (IsLiteralOrConstInt(f, indexExpr, &indexLit)) {
if (indexLit > INT32_MAX)
if (!f.m().tryConstantAccess(indexLit, Simd128DataSize))
return f.fail(indexExpr, "constant index out of range");
if (!f.m().tryRequireHeapLengthToBeAtLeast(indexLit + Simd128DataSize)) {
return f.failf(indexExpr, "constant index outside heap size range declared by the "
"change-heap function (0x%x - 0x%x)",
f.m().minHeapLength(), f.m().module().maxHeapLength());
}
*needsBoundsCheck = NO_BOUNDS_CHECK;
f.writeInt32Lit(indexLit);
return true;
}
f.enterHeapExpression();
Type indexType;
if (!CheckExpr(f, indexExpr, &indexType))
return false;
if (!indexType.isIntish())
return f.failf(indexExpr, "%s is not a subtype of intish", indexType.toChars());
f.leaveHeapExpression();
return true;
}
@ -5953,8 +5798,6 @@ CheckCoercedCall(FunctionValidator& f, ParseNode* call, ExprType ret, Type* type
case ModuleValidator::Global::FuncPtrTable:
case ModuleValidator::Global::ArrayView:
case ModuleValidator::Global::ArrayViewCtor:
case ModuleValidator::Global::ByteLength:
case ModuleValidator::Global::ChangeHeap:
return f.failName(callee, "'%s' is not callable function", callee->name());
case ModuleValidator::Global::SimdCtor:
case ModuleValidator::Global::SimdOperation:
@ -7020,225 +6863,6 @@ CheckStatement(FunctionValidator& f, ParseNode* stmt)
return f.fail(stmt, "unexpected statement kind");
}
static bool
CheckByteLengthCall(ModuleValidator& m, ParseNode* pn, PropertyName* newBufferName)
{
if (!pn->isKind(PNK_CALL) || !CallCallee(pn)->isKind(PNK_NAME))
return m.fail(pn, "expecting call to imported byteLength");
const ModuleValidator::Global* global = m.lookupGlobal(CallCallee(pn)->name());
if (!global || global->which() != ModuleValidator::Global::ByteLength)
return m.fail(pn, "expecting call to imported byteLength");
if (CallArgListLength(pn) != 1 || !IsUseOfName(CallArgList(pn), newBufferName))
return m.failName(pn, "expecting %s as argument to byteLength call", newBufferName);
return true;
}
static bool
CheckHeapLengthCondition(ModuleValidator& m, ParseNode* cond, PropertyName* newBufferName,
uint32_t* mask, uint32_t* minLength, uint32_t* maxLength)
{
if (!cond->isKind(PNK_OR) || !AndOrLeft(cond)->isKind(PNK_OR))
return m.fail(cond, "expecting byteLength & K || byteLength <= L || byteLength > M");
ParseNode* cond1 = AndOrLeft(AndOrLeft(cond));
ParseNode* cond2 = AndOrRight(AndOrLeft(cond));
ParseNode* cond3 = AndOrRight(cond);
if (!cond1->isKind(PNK_BITAND))
return m.fail(cond1, "expecting byteLength & K");
if (!CheckByteLengthCall(m, BitwiseLeft(cond1), newBufferName))
return false;
ParseNode* maskNode = BitwiseRight(cond1);
if (!IsLiteralInt(m, maskNode, mask))
return m.fail(maskNode, "expecting integer literal mask");
if (*mask == UINT32_MAX)
return m.fail(maskNode, "invalid mask value");
if ((*mask & 0xffffff) != 0xffffff)
return m.fail(maskNode, "mask value must have the bits 0xffffff set");
if (!cond2->isKind(PNK_LE))
return m.fail(cond2, "expecting byteLength <= L");
if (!CheckByteLengthCall(m, RelationalLeft(cond2), newBufferName))
return false;
ParseNode* minLengthNode = RelationalRight(cond2);
uint32_t minLengthExclusive;
if (!IsLiteralInt(m, minLengthNode, &minLengthExclusive))
return m.fail(minLengthNode, "expecting integer literal");
if (minLengthExclusive < 0xffffff || minLengthExclusive == UINT32_MAX)
return m.fail(minLengthNode, "literal must be >= 0xffffff and < 0xffffffff");
// Add one to convert from exclusive (the branch rejects if ==) to inclusive.
*minLength = minLengthExclusive + 1;
if (!cond3->isKind(PNK_GT))
return m.fail(cond3, "expecting byteLength > M");
if (!CheckByteLengthCall(m, RelationalLeft(cond3), newBufferName))
return false;
ParseNode* maxLengthNode = RelationalRight(cond3);
if (!IsLiteralInt(m, maxLengthNode, maxLength))
return m.fail(maxLengthNode, "expecting integer literal");
if (*maxLength > 0x80000000)
return m.fail(maxLengthNode, "literal must be <= 0x80000000");
if (*maxLength < *minLength)
return m.fail(maxLengthNode, "maximum length must be greater or equal to minimum length");
return true;
}
static bool
CheckReturnBoolLiteral(ModuleValidator& m, ParseNode* stmt, bool retval)
{
if (stmt->isKind(PNK_STATEMENTLIST)) {
ParseNode* next = SkipEmptyStatements(ListHead(stmt));
if (!next)
return m.fail(stmt, "expected return statement");
stmt = next;
if (NextNonEmptyStatement(stmt))
return m.fail(stmt, "expected single return statement");
}
if (!stmt->isKind(PNK_RETURN))
return m.fail(stmt, "expected return statement");
ParseNode* returnExpr = ReturnExpr(stmt);
if (!returnExpr || !returnExpr->isKind(retval ? PNK_TRUE : PNK_FALSE))
return m.failf(stmt, "expected 'return %s;'", retval ? "true" : "false");
return true;
}
static bool
CheckReassignmentTo(ModuleValidator& m, ParseNode* stmt, PropertyName* lhsName, ParseNode** rhs)
{
if (!stmt->isKind(PNK_SEMI))
return m.fail(stmt, "missing reassignment");
ParseNode* assign = UnaryKid(stmt);
if (!assign || !assign->isKind(PNK_ASSIGN))
return m.fail(stmt, "missing reassignment");
ParseNode* lhs = BinaryLeft(assign);
if (!IsUseOfName(lhs, lhsName))
return m.failName(lhs, "expecting reassignment of %s", lhsName);
*rhs = BinaryRight(assign);
return true;
}
static bool
CheckChangeHeap(ModuleValidator& m, ParseNode* fn, bool* validated)
{
MOZ_ASSERT(fn->isKind(PNK_FUNCTION));
// We don't yet know whether this is a change-heap function.
// The point at which we know we have a change-heap function is once we see
// whether the argument is coerced according to the normal asm.js rules. If
// it is coerced, it's not change-heap and must validate according to normal
// rules; otherwise it must validate as a change-heap function.
*validated = false;
PropertyName* changeHeapName = FunctionName(fn);
if (!CheckModuleLevelName(m, fn, changeHeapName))
return false;
unsigned numFormals;
ParseNode* arg = FunctionArgsList(fn, &numFormals);
if (numFormals != 1)
return true;
PropertyName* newBufferName;
if (!CheckArgument(m, arg, &newBufferName))
return false;
ParseNode* stmtIter = SkipEmptyStatements(ListHead(FunctionStatementList(fn)));
if (!stmtIter || !stmtIter->isKind(PNK_IF))
return true;
// We can now issue validation failures if we see something that isn't a
// valid change-heap function.
*validated = true;
PropertyName* bufferName = m.module().bufferArgumentName();
if (!bufferName)
return m.fail(fn, "to change heaps, the module must have a buffer argument");
ParseNode* cond = TernaryKid1(stmtIter);
ParseNode* thenStmt = TernaryKid2(stmtIter);
if (ParseNode* elseStmt = TernaryKid3(stmtIter))
return m.fail(elseStmt, "unexpected else statement");
uint32_t mask, min = 0, max; // initialize min to silence GCC warning
if (!CheckHeapLengthCondition(m, cond, newBufferName, &mask, &min, &max))
return false;
if (!CheckReturnBoolLiteral(m, thenStmt, false))
return false;
ParseNode* next = NextNonEmptyStatement(stmtIter);
for (unsigned i = 0; i < m.numArrayViews(); i++, next = NextNonEmptyStatement(stmtIter)) {
if (!next)
return m.failOffset(stmtIter->pn_pos.end, "missing reassignment");
stmtIter = next;
const ModuleValidator::ArrayView& view = m.arrayView(i);
ParseNode* rhs;
if (!CheckReassignmentTo(m, stmtIter, view.name, &rhs))
return false;
if (!rhs->isKind(PNK_NEW))
return m.failName(rhs, "expecting assignment of new array view to %s", view.name);
ParseNode* ctorExpr = ListHead(rhs);
if (!ctorExpr->isKind(PNK_NAME))
return m.fail(rhs, "expecting name of imported typed array constructor");
const ModuleValidator::Global* global = m.lookupGlobal(ctorExpr->name());
if (!global || global->which() != ModuleValidator::Global::ArrayViewCtor)
return m.fail(rhs, "expecting name of imported typed array constructor");
if (global->viewType() != view.type)
return m.fail(rhs, "can't change the type of a global view variable");
if (!CheckNewArrayViewArgs(m, ctorExpr, newBufferName))
return false;
}
if (!next)
return m.failOffset(stmtIter->pn_pos.end, "missing reassignment");
stmtIter = next;
ParseNode* rhs;
if (!CheckReassignmentTo(m, stmtIter, bufferName, &rhs))
return false;
if (!IsUseOfName(rhs, newBufferName))
return m.failName(stmtIter, "expecting assignment of new buffer to %s", bufferName);
next = NextNonEmptyStatement(stmtIter);
if (!next)
return m.failOffset(stmtIter->pn_pos.end, "expected return statement");
stmtIter = next;
if (!CheckReturnBoolLiteral(m, stmtIter, true))
return false;
stmtIter = NextNonEmptyStatement(stmtIter);
if (stmtIter)
return m.fail(stmtIter, "expecting end of function");
return m.addChangeHeap(changeHeapName, fn, mask, min, max);
}
static bool
ParseFunction(ModuleValidator& m, ParseNode** fnOut, unsigned* line, unsigned* column)
{
@ -7319,14 +6943,6 @@ CheckFunction(ModuleValidator& m)
if (!CheckFunctionHead(m, fn))
return false;
if (m.tryOnceToValidateChangeHeap()) {
bool validated;
if (!CheckChangeHeap(m, fn, &validated))
return false;
if (validated)
return true;
}
FunctionValidator f(m, fn);
if (!f.init(FunctionName(fn), line, column))
return m.fail(fn, "internal compiler failure (probably out of memory)");
@ -7493,13 +7109,10 @@ CheckModuleExportFunction(ModuleValidator& m, ParseNode* pn, PropertyName* maybe
if (!global)
return m.failName(pn, "exported function name '%s' not found", funcName);
if (global->which() == ModuleValidator::Global::Function)
return m.addExport(pn, m.function(global->funcIndex()), maybeFieldName);
if (global->which() != ModuleValidator::Global::Function)
return m.failName(pn, "'%s' is not a function", funcName);
if (global->which() == ModuleValidator::Global::ChangeHeap)
return m.addChangeHeapExport(funcName, *global, maybeFieldName);
return m.failName(pn, "'%s' is not a function", funcName);
return m.addExport(pn, m.function(global->funcIndex()), maybeFieldName);
}
static bool
@ -7636,13 +7249,13 @@ CheckModule(ExclusiveContext* cx, AsmJSParser& parser, ParseNode* stmtList, Hand
}
/*****************************************************************************/
// Runtime calls to asm.js module exports
// Link-time validation
static AsmJSModuleObject&
FunctionToModuleObject(JSFunction* fun)
{
MOZ_ASSERT(IsAsmJSFunction(fun) || IsAsmJSModule(fun));
const Value& v = fun->getExtendedSlot(FunctionExtended::ASM_MODULE_SLOT);
const Value& v = fun->getExtendedSlot(FunctionExtended::WASM_MODULE_SLOT);
return v.toObject().as<AsmJSModuleObject>();
}
@ -7650,44 +7263,10 @@ static unsigned
FunctionToExportIndex(JSFunction* fun)
{
MOZ_ASSERT(IsAsmJSFunction(fun));
const Value& v = fun->getExtendedSlot(FunctionExtended::ASM_EXPORT_INDEX_SLOT);
const Value& v = fun->getExtendedSlot(FunctionExtended::WASM_EXPORT_INDEX_SLOT);
return v.toInt32();
}
static bool
ChangeHeap(JSContext* cx, AsmJSModule& module, const CallArgs& args)
{
HandleValue bufferArg = args.get(0);
if (!IsArrayBuffer(bufferArg)) {
ReportIncompatible(cx, args);
return false;
}
Rooted<ArrayBufferObject*> newBuffer(cx, &bufferArg.toObject().as<ArrayBufferObject>());
uint32_t heapLength = newBuffer->byteLength();
if (heapLength & module.heapLengthMask() ||
heapLength < module.minHeapLength() ||
heapLength > module.maxHeapLength())
{
args.rval().set(BooleanValue(false));
return true;
}
if (!module.hasArrayView()) {
args.rval().set(BooleanValue(true));
return true;
}
MOZ_ASSERT(IsValidAsmJSHeapLength(heapLength));
bool useSignalHandlers = module.wasmModule().compileArgs().useSignalHandlersForOOB;
if (!ArrayBufferObject::prepareForAsmJS(cx, newBuffer, useSignalHandlers))
return false;
args.rval().set(BooleanValue(module.wasmModule().changeHeap(newBuffer, cx)));
return true;
}
static bool
CallAsmJS(JSContext* cx, unsigned argc, Value* vp)
{
@ -7695,18 +7274,11 @@ CallAsmJS(JSContext* cx, unsigned argc, Value* vp)
RootedFunction callee(cx, &args.callee().as<JSFunction>());
AsmJSModule& module = FunctionToModuleObject(callee).module();
const AsmJSModule::Export& exp = module.exports()[FunctionToExportIndex(callee)];
uint32_t exportIndex = FunctionToExportIndex(callee);
// The heap-changing function is a special-case and is implemented by C++.
if (exp.isChangeHeap())
return ChangeHeap(cx, module, args);
return module.wasmModule().callExport(cx, exp.wasmIndex(), args);
return module.wasmModule().callExport(cx, exportIndex, args);
}
/*****************************************************************************/
// Link-time validation
static bool
LinkFail(JSContext* cx, const char* str)
{
@ -7885,30 +7457,6 @@ ValidateArrayView(JSContext* cx, const AsmJSModule::Global& global, HandleValue
return true;
}
static bool
ValidateByteLength(JSContext* cx, HandleValue globalVal)
{
RootedPropertyName field(cx, cx->names().byteLength);
RootedValue v(cx);
if (!GetDataProperty(cx, globalVal, field, &v))
return false;
if (!v.isObject() || !v.toObject().isBoundFunction())
return LinkFail(cx, "byteLength must be a bound function object");
RootedFunction fun(cx, &v.toObject().as<JSFunction>());
RootedValue boundTarget(cx, ObjectValue(*fun->getBoundFunctionTarget()));
if (!IsNativeFunction(boundTarget, fun_call))
return LinkFail(cx, "bound target of byteLength must be Function.prototype.call");
RootedValue boundThis(cx, fun->getBoundFunctionThis());
if (!IsNativeFunction(boundThis, ArrayBufferObject::byteLengthGetter))
return LinkFail(cx, "bound this value must be ArrayBuffer.protototype.byteLength accessor");
return true;
}
static bool
ValidateMathBuiltinFunction(JSContext* cx, const AsmJSModule::Global& global, HandleValue globalVal)
{
@ -8151,20 +7699,12 @@ CheckBuffer(JSContext* cx, AsmJSModule& module, HandleValue bufferVal,
if (heapLength < module.minHeapLength()) {
UniqueChars msg(
JS_smprintf("ArrayBuffer byteLength of 0x%x is less than 0x%x (the size implied "
"by const heap accesses and/or change-heap minimum-length requirements).",
"by const heap accesses).",
heapLength,
module.minHeapLength()));
return LinkFail(cx, msg.get());
}
if (heapLength > module.maxHeapLength()) {
UniqueChars msg(
JS_smprintf("ArrayBuffer byteLength 0x%x is greater than maximum length of 0x%x",
heapLength,
module.maxHeapLength()));
return LinkFail(cx, msg.get());
}
// Shell builtins may have disabled signal handlers since the module we're
// cloning was compiled. LookupAsmJSModuleInCache checks for signal handlers
// as well for the caching case.
@ -8211,10 +7751,6 @@ DynamicallyLinkModule(JSContext* cx, const CallArgs& args, AsmJSModule& module)
if (!ValidateArrayView(cx, global, globalVal))
return false;
break;
case AsmJSModule::Global::ByteLength:
if (!ValidateByteLength(cx, globalVal))
return false;
break;
case AsmJSModule::Global::MathBuiltinFunction:
if (!ValidateMathBuiltinFunction(cx, global, globalVal))
return false;
@ -8251,9 +7787,7 @@ static JSFunction*
NewExportedFunction(JSContext* cx, const AsmJSModule& module, const AsmJSModule::Export& func,
HandleObject moduleObj, unsigned exportIndex)
{
unsigned numArgs = func.isChangeHeap()
? 1
: module.wasmModule().exports()[func.wasmIndex()].sig().args().length();
unsigned numArgs = module.wasmModule().exports()[exportIndex].sig().args().length();
RootedPropertyName name(cx, func.name());
JSFunction* fun =
@ -8263,8 +7797,8 @@ NewExportedFunction(JSContext* cx, const AsmJSModule& module, const AsmJSModule:
if (!fun)
return nullptr;
fun->setExtendedSlot(FunctionExtended::ASM_MODULE_SLOT, ObjectValue(*moduleObj));
fun->setExtendedSlot(FunctionExtended::ASM_EXPORT_INDEX_SLOT, Int32Value(exportIndex));
fun->setExtendedSlot(FunctionExtended::WASM_MODULE_SLOT, ObjectValue(*moduleObj));
fun->setExtendedSlot(FunctionExtended::WASM_EXPORT_INDEX_SLOT, Int32Value(exportIndex));
return fun;
}
@ -8434,7 +7968,7 @@ NewModuleFunction(ExclusiveContext* cx, JSFunction* origFun, HandleObject module
if (!moduleFun)
return nullptr;
moduleFun->setExtendedSlot(FunctionExtended::ASM_MODULE_SLOT, ObjectValue(*moduleObj));
moduleFun->setExtendedSlot(FunctionExtended::WASM_MODULE_SLOT, ObjectValue(*moduleObj));
return moduleFun;
}
@ -9390,14 +8924,3 @@ js::RoundUpToNextValidAsmJSHeapLength(uint32_t length)
MOZ_ASSERT(length <= 0xff000000);
return (length + 0x00ffffff) & ~0x00ffffff;
}
bool
js::OnDetachAsmJSArrayBuffer(JSContext* cx, Handle<ArrayBufferObject*> buffer)
{
for (Module* m = cx->runtime()->linkedWasmModules; m; m = m->nextLinked()) {
if (buffer == m->maybeBuffer() && !m->detachHeap(cx))
return false;
}
return true;
}

Просмотреть файл

@ -109,9 +109,6 @@ IsValidAsmJSHeapLength(uint32_t length);
extern uint32_t
RoundUpToNextValidAsmJSHeapLength(uint32_t length);
extern bool
OnDetachAsmJSArrayBuffer(JSContext* cx, Handle<ArrayBufferObject*> buffer);
// The assumed page size; dynamically checked in CompileAsmJS.
#ifdef _MIPS_ARCH_LOONGSON3A
static const size_t AsmJSPageSize = 16384;

Просмотреть файл

@ -312,9 +312,8 @@ ModuleGenerator::defineImport(uint32_t index, ProfilingOffsets interpExit, Profi
}
bool
ModuleGenerator::declareExport(MallocSig&& sig, uint32_t funcIndex, uint32_t* index)
ModuleGenerator::declareExport(MallocSig&& sig, uint32_t funcIndex)
{
*index = exports_.length();
return exports_.emplaceBack(Move(sig), funcIndex);
}
@ -499,8 +498,7 @@ ModuleGenerator::defineOutOfBoundsStub(Offsets offsets)
}
Module*
ModuleGenerator::finish(Module::HeapBool usesHeap,
Module::SharedBool sharedHeap,
ModuleGenerator::finish(HeapUsage heapUsage,
Module::MutedBool mutedErrors,
CacheableChars filename,
CacheableTwoByteChars displayURL,
@ -510,7 +508,7 @@ ModuleGenerator::finish(Module::HeapBool usesHeap,
MOZ_ASSERT(!activeFunc_);
MOZ_ASSERT(finishedFuncs_);
if (!GenerateStubs(*this, usesHeap))
if (!GenerateStubs(*this, UsesHeap(heapUsage)))
return nullptr;
masm_.finish();
@ -616,8 +614,7 @@ ModuleGenerator::finish(Module::HeapBool usesHeap,
funcBytes_,
codeBytes,
globalBytes_,
usesHeap,
sharedHeap,
heapUsage,
mutedErrors,
Move(code),
Move(imports_),

Просмотреть файл

@ -123,7 +123,7 @@ class MOZ_STACK_CLASS ModuleGenerator
bool defineImport(uint32_t index, ProfilingOffsets interpExit, ProfilingOffsets jitExit);
// Exports:
bool declareExport(MallocSig&& sig, uint32_t funcIndex, uint32_t* index);
bool declareExport(MallocSig&& sig, uint32_t funcIndex);
uint32_t numDeclaredExports() const;
uint32_t exportFuncIndex(uint32_t index) const;
const MallocSig& exportSig(uint32_t index) const;
@ -147,8 +147,7 @@ class MOZ_STACK_CLASS ModuleGenerator
// Null return indicates failure. The caller must immediately root a
// non-null return value.
Module* finish(Module::HeapBool usesHeap,
Module::SharedBool sharedHeap,
Module* finish(HeapUsage heapUsage,
Module::MutedBool mutedErrors,
CacheableChars filename,
CacheableTwoByteChars displayURL,

Просмотреть файл

@ -511,8 +511,9 @@ Module::activation()
void
Module::specializeToHeap(ArrayBufferObjectMaybeShared* heap)
{
MOZ_ASSERT(usesHeap());
MOZ_ASSERT_IF(heap->is<ArrayBufferObject>(), heap->as<ArrayBufferObject>().isAsmJS());
MOZ_ASSERT(!maybeHeap_);
MOZ_ASSERT(!heap_);
MOZ_ASSERT(!rawHeapPtr());
uint8_t* ptrBase = heap->dataPointerEither().unwrap(/*safe - protected by Module methods*/);
@ -550,14 +551,17 @@ Module::specializeToHeap(ArrayBufferObjectMaybeShared* heap)
Assembler::UpdateBoundsCheck(heapLength, (Instruction*)(access.insnOffset() + code()));
#endif
maybeHeap_ = heap;
heap_ = heap;
rawHeapPtr() = ptrBase;
}
void
Module::despecializeFromHeap(ArrayBufferObjectMaybeShared* heap)
{
MOZ_ASSERT_IF(maybeHeap_, maybeHeap_ == heap);
// heap_/rawHeapPtr can be null if this module holds cloned code from
// another dynamically-linked module which we are despecializing from that
// module's heap.
MOZ_ASSERT_IF(heap_, heap_ == heap);
MOZ_ASSERT_IF(rawHeapPtr(), rawHeapPtr() == heap->dataPointerEither().unwrap());
#if defined(JS_CODEGEN_X86)
@ -581,7 +585,7 @@ Module::despecializeFromHeap(ArrayBufferObjectMaybeShared* heap)
}
#endif
maybeHeap_ = nullptr;
heap_ = nullptr;
rawHeapPtr() = nullptr;
}
@ -700,7 +704,7 @@ Module::importToExit(const Import& import)
/* static */ Module::CacheablePod
Module::zeroPod()
{
CacheablePod pod = {0, 0, 0, false, false, false, false, false};
CacheablePod pod = {0, 0, 0, HeapUsage::None, false, false, false};
return pod;
}
@ -711,9 +715,6 @@ Module::init()
interrupt_ = nullptr;
outOfBounds_ = nullptr;
dynamicallyLinked_ = false;
prev_ = nullptr;
next_ = nullptr;
interrupted_ = false;
*(double*)(globalData() + NaN64GlobalDataOffset) = GenericNaN();
*(float*)(globalData() + NaN32GlobalDataOffset) = GenericNaN();
@ -757,8 +758,7 @@ Module::Module(CompileArgs args,
uint32_t functionBytes,
uint32_t codeBytes,
uint32_t globalBytes,
HeapBool usesHeap,
SharedBool sharedHeap,
HeapUsage heapUsage,
MutedBool mutedErrors,
UniqueCodePtr code,
ImportVector&& imports,
@ -786,20 +786,16 @@ Module::Module(CompileArgs args,
const_cast<uint32_t&>(pod.functionBytes_) = functionBytes;
const_cast<uint32_t&>(pod.codeBytes_) = codeBytes;
const_cast<uint32_t&>(pod.globalBytes_) = globalBytes;
const_cast<bool&>(pod.usesHeap_) = bool(usesHeap);
const_cast<bool&>(pod.sharedHeap_) = bool(sharedHeap);
const_cast<HeapUsage&>(pod.heapUsage_) = heapUsage;
const_cast<bool&>(pod.mutedErrors_) = bool(mutedErrors);
const_cast<bool&>(pod.usesSignalHandlersForOOB_) = args.useSignalHandlersForOOB;
const_cast<bool&>(pod.usesSignalHandlersForInterrupt_) = args.useSignalHandlersForInterrupt;
MOZ_ASSERT_IF(sharedHeap, usesHeap);
init();
}
Module::~Module()
{
MOZ_ASSERT(!interrupted_);
if (code_) {
for (unsigned i = 0; i < imports_.length(); i++) {
ImportExit& exit = importToExit(imports_[i]);
@ -807,11 +803,6 @@ Module::~Module()
exit.baselineScript->removeDependentWasmModule(*this, i);
}
}
if (prev_)
*prev_ = next_;
if (next_)
next_->prev_ = prev_;
}
void
@ -822,8 +813,8 @@ Module::trace(JSTracer* trc)
TraceEdge(trc, &importToExit(import).fun, "wasm function import");
}
if (maybeHeap_)
TraceEdge(trc, &maybeHeap_, "wasm buffer");
if (heap_)
TraceEdge(trc, &heap_, "wasm buffer");
}
CompileArgs
@ -982,13 +973,6 @@ Module::dynamicallyLink(JSContext* cx, Handle<ArrayBufferObjectMaybeShared*> hea
MOZ_ASSERT(!dynamicallyLinked_);
dynamicallyLinked_ = true;
// Add this module to the JSRuntime-wide list of dynamically-linked modules.
next_ = cx->runtime()->linkedWasmModules;
prev_ = &cx->runtime()->linkedWasmModules;
cx->runtime()->linkedWasmModules = this;
if (next_)
next_->prev_ = &next_;
// Push a JitContext for benefit of IsCompilingAsmJS and flush the ICache.
// We've been inhibiting flushing up to this point so flush it all now.
JitContext jcx(CompileRuntime::get(cx->compartment()->runtimeFromAnyThread()));
@ -1007,7 +991,7 @@ Module::dynamicallyLink(JSContext* cx, Handle<ArrayBufferObjectMaybeShared*> hea
}
// Specialize code to the actual heap.
if (heap)
if (usesHeap())
specializeToHeap(heap);
// See AllocateCode comment above.
@ -1017,19 +1001,13 @@ Module::dynamicallyLink(JSContext* cx, Handle<ArrayBufferObjectMaybeShared*> hea
return true;
}
ArrayBufferObjectMaybeShared*
Module::maybeBuffer() const
{
MOZ_ASSERT(dynamicallyLinked_);
return maybeHeap_;
}
SharedMem<uint8_t*>
Module::maybeHeap() const
Module::heap() const
{
MOZ_ASSERT(dynamicallyLinked_);
MOZ_ASSERT_IF(!pod.usesHeap_, rawHeapPtr() == nullptr);
return pod.sharedHeap_
MOZ_ASSERT(usesHeap());
MOZ_ASSERT(rawHeapPtr());
return hasSharedHeap()
? SharedMem<uint8_t*>::shared(rawHeapPtr())
: SharedMem<uint8_t*>::unshared(rawHeapPtr());
}
@ -1038,7 +1016,8 @@ size_t
Module::heapLength() const
{
MOZ_ASSERT(dynamicallyLinked_);
return maybeHeap_ ? maybeHeap_->byteLength() : 0;
MOZ_ASSERT(usesHeap());
return heap_->byteLength();
}
void
@ -1051,65 +1030,6 @@ Module::deoptimizeImportExit(uint32_t importIndex)
exit.baselineScript = nullptr;
}
bool
Module::changeHeap(Handle<ArrayBufferObject*> newHeap, JSContext* cx)
{
MOZ_ASSERT(dynamicallyLinked_);
MOZ_ASSERT(pod.usesHeap_);
// Content JS should not be able to run (and change heap) from within an
// interrupt callback, but in case it does, fail to change heap. Otherwise,
// the heap can change at every single instruction which would prevent
// future optimizations like heap-base hoisting.
if (interrupted_)
return false;
AutoMutateCode amc(cx, *this, "Module::changeHeap");
if (maybeHeap_)
despecializeFromHeap(maybeHeap_);
specializeToHeap(newHeap);
return true;
}
bool
Module::detachHeap(JSContext* cx)
{
MOZ_ASSERT(dynamicallyLinked_);
MOZ_ASSERT(pod.usesHeap_);
// Content JS should not be able to run (and detach heap) from within an
// interrupt callback, but in case it does, fail. Otherwise, the heap can
// change at an arbitrary instruction and break the assumption below.
if (interrupted_) {
JS_ReportError(cx, "attempt to detach from inside interrupt handler");
return false;
}
// Even if this->active(), to reach here, the activation must have called
// out via an import exit stub. FFI stubs check if heapDatum() is null on
// reentry and throw an exception if so.
MOZ_ASSERT_IF(activation(), activation()->exitReason() == ExitReason::ImportJit ||
activation()->exitReason() == ExitReason::ImportInterp);
AutoMutateCode amc(cx, *this, "Module::detachHeap");
despecializeFromHeap(maybeHeap_);
return true;
}
void
Module::setInterrupted(bool interrupted)
{
MOZ_ASSERT(dynamicallyLinked_);
interrupted_ = interrupted;
}
Module*
Module::nextLinked() const
{
MOZ_ASSERT(dynamicallyLinked_);
return next_;
}
bool
Module::callExport(JSContext* cx, uint32_t exportIndex, CallArgs args)
{
@ -1179,17 +1099,6 @@ Module::callExport(JSContext* cx, uint32_t exportIndex, CallArgs args)
}
}
// The correct way to handle this situation would be to allocate a new range
// of PROT_NONE memory and module.changeHeap to this memory. That would
// cause every access to take the out-of-bounds signal-handler path which
// does the right thing. For now, just throw an out-of-memory exception
// since these can technically pop out anywhere and the full fix may
// actually OOM when trying to allocate the PROT_NONE memory.
if (usesHeap() && !maybeHeap_) {
JS_ReportErrorNumber(cx, GetErrorMessage, nullptr, JSMSG_OUT_OF_MEMORY);
return false;
}
{
// Push a WasmActivation to describe the wasm frames we're about to push
// when running this module. Additionally, push a JitActivation so that
@ -1518,8 +1427,8 @@ Module::clone(JSContext* cx, const StaticLinkData& linkData) const
// If the copied machine code has been specialized to the heap, it must be
// unspecialized in the copy.
if (maybeHeap_)
out->despecializeFromHeap(maybeHeap_);
if (usesHeap())
out->despecializeFromHeap(heap_);
if (!out->staticallyLink(cx, linkData))
return nullptr;

Просмотреть файл

@ -325,6 +325,22 @@ typedef JS::UniquePtr<uint8_t, CodeDeleter> UniqueCodePtr;
UniqueCodePtr
AllocateCode(ExclusiveContext* cx, size_t bytes);
// A wasm module can either use no heap, a unshared heap (ArrayBuffer) or shared
// heap (SharedArrayBuffer).
enum class HeapUsage
{
None = false,
Unshared = 1,
Shared = 2
};
static inline bool
UsesHeap(HeapUsage heapUsage)
{
return bool(heapUsage);
}
// Module represents a compiled WebAssembly module which lives until the last
// reference to any exported functions is dropped. Modules must be wrapped by a
// rooted JSObject immediately after creation so that Module::trace() is called
@ -375,8 +391,7 @@ class Module
const uint32_t functionBytes_;
const uint32_t codeBytes_;
const uint32_t globalBytes_;
const bool usesHeap_;
const bool sharedHeap_;
const HeapUsage heapUsage_;
const bool mutedErrors_;
const bool usesSignalHandlersForOOB_;
const bool usesSignalHandlersForInterrupt_;
@ -400,14 +415,11 @@ class Module
// Initialized during dynamicallyLink:
bool dynamicallyLinked_;
BufferPtr maybeHeap_;
Module** prev_;
Module* next_;
BufferPtr heap_;
// Mutated after dynamicallyLink:
bool profilingEnabled_;
FuncLabelVector funcLabels_;
bool interrupted_;
class AutoMutateCode;
@ -448,16 +460,13 @@ class Module
static const unsigned OffsetOfImportExitFun = offsetof(ImportExit, fun);
static const unsigned SizeOfEntryArg = sizeof(EntryArg);
enum HeapBool { DoesntUseHeap = false, UsesHeap = true };
enum SharedBool { UnsharedHeap = false, SharedHeap = true };
enum MutedBool { DontMuteErrors = false, MuteErrors = true };
Module(CompileArgs args,
uint32_t functionBytes,
uint32_t codeBytes,
uint32_t globalBytes,
HeapBool usesHeap,
SharedBool sharedHeap,
HeapUsage heapUsage,
MutedBool mutedErrors,
UniqueCodePtr code,
ImportVector&& imports,
@ -474,8 +483,9 @@ class Module
uint8_t* code() const { return code_.get(); }
uint8_t* globalData() const { return code() + pod.codeBytes_; }
uint32_t globalBytes() const { return pod.globalBytes_; }
bool usesHeap() const { return pod.usesHeap_; }
bool sharedHeap() const { return pod.sharedHeap_; }
HeapUsage heapUsage() const { return pod.heapUsage_; }
bool usesHeap() const { return UsesHeap(pod.heapUsage_); }
bool hasSharedHeap() const { return pod.heapUsage_ == HeapUsage::Shared; }
bool mutedErrors() const { return pod.mutedErrors_; }
CompileArgs compileArgs() const;
const ImportVector& imports() const { return imports_; }
@ -515,21 +525,9 @@ class Module
// The wasm heap, established by dynamicallyLink.
ArrayBufferObjectMaybeShared* maybeBuffer() const;
SharedMem<uint8_t*> maybeHeap() const;
SharedMem<uint8_t*> heap() const;
size_t heapLength() const;
// asm.js may detach and change the heap at any time. As an internal detail,
// the heap may not be changed while the module has been asynchronously
// interrupted.
//
// N.B. These methods and asm.js change-heap support will be removed soon.
bool changeHeap(Handle<ArrayBufferObject*> newBuffer, JSContext* cx);
bool detachHeap(JSContext* cx);
void setInterrupted(bool interrupted);
Module* nextLinked() const;
// The exports of a wasm module are called by preparing an array of
// arguments (coerced to the corresponding types of the Export signature)
// and calling the export's entry trampoline.

Просмотреть файл

@ -627,7 +627,7 @@ EmulateHeapAccess(EMULATOR_CONTEXT* context, uint8_t* pc, uint8_t* faultingAddre
uintptr_t base;
StoreValueFromGPReg(SharedMem<void*>::unshared(&base), sizeof(uintptr_t),
AddressOfGPRegisterSlot(context, address.base()));
MOZ_RELEASE_ASSERT(reinterpret_cast<uint8_t*>(base) == module.maybeHeap());
MOZ_RELEASE_ASSERT(reinterpret_cast<uint8_t*>(base) == module.heap());
}
if (address.hasIndex()) {
uintptr_t index;
@ -645,11 +645,11 @@ EmulateHeapAccess(EMULATOR_CONTEXT* context, uint8_t* pc, uint8_t* faultingAddre
MOZ_RELEASE_ASSERT(size_t(faultingAddress - accessAddress) < access.size(),
"Given faulting address does not appear to be within computed "
"faulting address range");
MOZ_RELEASE_ASSERT(accessAddress >= module.maybeHeap(),
MOZ_RELEASE_ASSERT(accessAddress >= module.heap(),
"Access begins outside the asm.js heap");
MOZ_RELEASE_ASSERT(accessAddress + access.size() <= module.maybeHeap() + AsmJSMappedSize,
MOZ_RELEASE_ASSERT(accessAddress + access.size() <= module.heap() + AsmJSMappedSize,
"Access extends beyond the asm.js heap guard region");
MOZ_RELEASE_ASSERT(accessAddress + access.size() > module.maybeHeap() + module.heapLength(),
MOZ_RELEASE_ASSERT(accessAddress + access.size() > module.heap() + module.heapLength(),
"Computed access address is not actually out of bounds");
// The basic sandbox model is that all heap accesses are a heap base
@ -666,7 +666,7 @@ EmulateHeapAccess(EMULATOR_CONTEXT* context, uint8_t* pc, uint8_t* faultingAddre
//
// Taking a signal is really slow, but in theory programs really shouldn't
// be hitting this anyway.
intptr_t unwrappedOffset = accessAddress - module.maybeHeap().unwrap(/*safe - for value*/);
intptr_t unwrappedOffset = accessAddress - module.heap().unwrap(/*safe - for value*/);
uint32_t wrappedOffset = uint32_t(unwrappedOffset);
size_t size = access.size();
MOZ_RELEASE_ASSERT(wrappedOffset + size > wrappedOffset);
@ -684,10 +684,10 @@ EmulateHeapAccess(EMULATOR_CONTEXT* context, uint8_t* pc, uint8_t* faultingAddre
// We now know that this is an access that is actually in bounds when
// properly wrapped. Complete the load or store with the wrapped
// address.
SharedMem<uint8_t*> wrappedAddress = module.maybeHeap() + wrappedOffset;
MOZ_RELEASE_ASSERT(wrappedAddress >= module.maybeHeap());
SharedMem<uint8_t*> wrappedAddress = module.heap() + wrappedOffset;
MOZ_RELEASE_ASSERT(wrappedAddress >= module.heap());
MOZ_RELEASE_ASSERT(wrappedAddress + size > wrappedAddress);
MOZ_RELEASE_ASSERT(wrappedAddress + size <= module.maybeHeap() + module.heapLength());
MOZ_RELEASE_ASSERT(wrappedAddress + size <= module.heap() + module.heapLength());
switch (access.kind()) {
case Disassembler::HeapAccess::Load:
SetRegisterToLoadedValue(context, wrappedAddress.cast<void*>(), size, access.otherOperand());
@ -762,9 +762,9 @@ HandleFault(PEXCEPTION_POINTERS exception)
// These checks aren't necessary, but, since we can, check anyway to make
// sure we aren't covering up a real bug.
uint8_t* faultingAddress = reinterpret_cast<uint8_t*>(record->ExceptionInformation[1]);
if (!module.maybeHeap() ||
faultingAddress < module.maybeHeap() ||
faultingAddress >= module.maybeHeap() + AsmJSMappedSize)
if (!module.usesHeap() ||
faultingAddress < module.heap() ||
faultingAddress >= module.heap() + AsmJSMappedSize)
{
return false;
}
@ -907,9 +907,9 @@ HandleMachException(JSRuntime* rt, const ExceptionRequest& request)
// These checks aren't necessary, but, since we can, check anyway to make
// sure we aren't covering up a real bug.
uint8_t* faultingAddress = reinterpret_cast<uint8_t*>(request.body.code[1]);
if (!module.maybeHeap() ||
faultingAddress < module.maybeHeap() ||
faultingAddress >= module.maybeHeap() + AsmJSMappedSize)
if (!module.usesHeap() ||
faultingAddress < module.heap() ||
faultingAddress >= module.heap() + AsmJSMappedSize)
{
return false;
}
@ -1117,9 +1117,9 @@ HandleFault(int signum, siginfo_t* info, void* ctx)
// These checks aren't necessary, but, since we can, check anyway to make
// sure we aren't covering up a real bug.
uint8_t* faultingAddress = reinterpret_cast<uint8_t*>(info->si_addr);
if (!module.maybeHeap() ||
faultingAddress < module.maybeHeap() ||
faultingAddress >= module.maybeHeap() + AsmJSMappedSize)
if (!module.usesHeap() ||
faultingAddress < module.heap() ||
faultingAddress >= module.heap() + AsmJSMappedSize)
{
return false;
}

Просмотреть файл

@ -99,7 +99,7 @@ static const unsigned FramePushedForEntrySP = FramePushedAfterSave + sizeof(void
// function has an ABI derived from its specific signature, so this function
// must map from the ABI of CodePtr to the export's signature's ABI.
static bool
GenerateEntry(ModuleGenerator& mg, unsigned exportIndex, Module::HeapBool usesHeap)
GenerateEntry(ModuleGenerator& mg, unsigned exportIndex, bool usesHeap)
{
MacroAssembler& masm = mg.masm();
const MallocSig& sig = mg.exportSig(exportIndex);
@ -332,30 +332,12 @@ FillArgumentArray(MacroAssembler& masm, const MallocSig::ArgVector& args, unsign
}
}
// If an import call detaches its heap (viz., via ArrayBuffer.transfer), it must
// call change-heap to another heap (viz., the new heap returned by transfer)
// before returning to asm.js code. If the application fails to do this (if the
// heap pointer is null), jump to a stub.
static void
CheckForHeapDetachment(MacroAssembler& masm, Register scratch, Label* onDetached)
{
MOZ_ASSERT(int(masm.framePushed()) >= int(ShadowStackSpace));
AssertStackAlignment(masm, ABIStackAlignment);
#if defined(JS_CODEGEN_X86)
CodeOffset offset = masm.movlWithPatch(PatchedAbsoluteAddress(), scratch);
masm.append(AsmJSGlobalAccess(offset, HeapGlobalDataOffset));
masm.branchTestPtr(Assembler::Zero, scratch, scratch, onDetached);
#else
masm.branchTestPtr(Assembler::Zero, HeapReg, HeapReg, onDetached);
#endif
}
// Generate a stub that is called via the internal ABI derived from the
// signature of the import and calls into an appropriate InvokeImport C++
// function, having boxed all the ABI arguments into a homogeneous Value array.
static bool
GenerateInterpExitStub(ModuleGenerator& mg, unsigned importIndex, Module::HeapBool usesHeap,
Label* throwLabel, Label* onDetached, ProfilingOffsets* offsets)
GenerateInterpExitStub(ModuleGenerator& mg, unsigned importIndex, Label* throwLabel,
ProfilingOffsets* offsets)
{
MacroAssembler& masm = mg.masm();
const MallocSig& sig = mg.importSig(importIndex);
@ -440,13 +422,6 @@ GenerateInterpExitStub(ModuleGenerator& mg, unsigned importIndex, Module::HeapBo
MOZ_CRASH("SIMD types shouldn't be returned from a FFI");
}
// The heap pointer may have changed during the FFI, so reload it and test
// for detachment.
if (usesHeap) {
masm.loadAsmJSHeapRegisterFromGlobalData();
CheckForHeapDetachment(masm, ABIArgGenerator::NonReturn_VolatileReg0, onDetached);
}
GenerateExitEpilogue(masm, framePushed, ExitReason::ImportInterp, offsets);
if (masm.oom())
@ -466,8 +441,8 @@ static const unsigned MaybeSavedGlobalReg = 0;
// signature of the import and calls into a compatible JIT function,
// having boxed all the ABI arguments into the JIT stack frame layout.
static bool
GenerateJitExitStub(ModuleGenerator& mg, unsigned importIndex, Module::HeapBool usesHeap,
Label* throwLabel, Label* onDetached, ProfilingOffsets* offsets)
GenerateJitExitStub(ModuleGenerator& mg, unsigned importIndex, bool usesHeap,
Label* throwLabel, ProfilingOffsets* offsets)
{
MacroAssembler& masm = mg.masm();
const MallocSig& sig = mg.importSig(importIndex);
@ -540,8 +515,7 @@ GenerateJitExitStub(ModuleGenerator& mg, unsigned importIndex, Module::HeapBool
// HeapReg are removed from the general register set for asm.js code, so
// these will not have been saved by the caller like all other registers,
// so they must be explicitly preserved. Only save GlobalReg since
// HeapReg must be reloaded (from global data) after the call since the
// heap may change during the FFI call.
// HeapReg can be reloaded (from global data) after the call.
#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
static_assert(MaybeSavedGlobalReg == sizeof(void*), "stack frame accounting");
masm.storePtr(GlobalReg, Address(masm.getStackPointer(), jitFrameBytes));
@ -698,12 +672,10 @@ GenerateJitExitStub(ModuleGenerator& mg, unsigned importIndex, Module::HeapBool
Label done;
masm.bind(&done);
// The heap pointer has to be reloaded anyway since JIT code could have
// clobbered it. Additionally, the import may have detached the heap buffer.
if (usesHeap) {
// Ion code does not respect system callee-saved register conventions so
// reload the heap register.
if (usesHeap)
masm.loadAsmJSHeapRegisterFromGlobalData();
CheckForHeapDetachment(masm, ABIArgGenerator::NonReturn_VolatileReg0, onDetached);
}
GenerateExitEpilogue(masm, masm.framePushed(), ExitReason::ImportJit, offsets);
@ -764,32 +736,6 @@ GenerateJitExitStub(ModuleGenerator& mg, unsigned importIndex, Module::HeapBool
return true;
}
// Generate a stub that is called when returning from an exit where the module's
// buffer has been detached. This stub first calls a C++ function to report an
// exception and then jumps to the generic throw stub to pop everything off the
// stack.
static bool
GenerateOnDetachedStub(ModuleGenerator& mg, Label* onDetached, Label* throwLabel)
{
MacroAssembler& masm = mg.masm();
masm.haltingAlign(CodeAlignment);
Offsets offsets;
offsets.begin = masm.currentOffset();
masm.bind(onDetached);
// For now, OnDetached always throws (see OnDetached comment).
masm.assertStackAlignment(ABIStackAlignment);
masm.call(SymbolicAddress::OnDetached);
masm.jump(throwLabel);
if (masm.oom())
return false;
offsets.end = masm.currentOffset();
return mg.defineInlineStub(offsets);
}
// Generate a stub that is called immediately after the prologue when there is a
// stack overflow. This stub calls a C++ function to report the error and then
// jumps to the throw stub to pop the activation.
@ -929,7 +875,7 @@ static const LiveRegisterSet AllRegsExceptSP(
// after restoring all registers. To hack around this, push the resumePC on the
// stack so that it can be popped directly into PC.
static bool
GenerateAsyncInterruptStub(ModuleGenerator& mg, Module::HeapBool usesHeap, Label* throwLabel)
GenerateAsyncInterruptStub(ModuleGenerator& mg, Label* throwLabel)
{
MacroAssembler& masm = mg.masm();
@ -1126,7 +1072,7 @@ GenerateThrowStub(ModuleGenerator& mg, Label* throwLabel)
}
bool
wasm::GenerateStubs(ModuleGenerator& mg, Module::HeapBool usesHeap)
wasm::GenerateStubs(ModuleGenerator& mg, bool usesHeap)
{
for (unsigned i = 0; i < mg.numDeclaredExports(); i++) {
if (!GenerateEntry(mg, i, usesHeap))
@ -1135,26 +1081,17 @@ wasm::GenerateStubs(ModuleGenerator& mg, Module::HeapBool usesHeap)
Label onThrow;
{
Label onDetached;
for (size_t i = 0; i < mg.numDeclaredImports(); i++) {
ProfilingOffsets interp;
if (!GenerateInterpExitStub(mg, i, &onThrow, &interp))
return false;
for (size_t i = 0; i < mg.numDeclaredImports(); i++) {
ProfilingOffsets interp;
if (!GenerateInterpExitStub(mg, i, usesHeap, &onThrow, &onDetached, &interp))
return false;
ProfilingOffsets jit;
if (!GenerateJitExitStub(mg, i, usesHeap, &onThrow, &jit))
return false;
ProfilingOffsets jit;
if (!GenerateJitExitStub(mg, i, usesHeap, &onThrow, &onDetached, &jit))
return false;
if (!mg.defineImport(i, interp, jit))
return false;
}
if (onDetached.used()) {
if (!GenerateOnDetachedStub(mg, &onDetached, &onThrow))
return false;
}
if (!mg.defineImport(i, interp, jit))
return false;
}
if (mg.masm().asmStackOverflowLabel()->used()) {
@ -1178,7 +1115,7 @@ wasm::GenerateStubs(ModuleGenerator& mg, Module::HeapBool usesHeap)
return false;
// Generate unconditionally: the async interrupt may be taken at any time.
if (!GenerateAsyncInterruptStub(mg, usesHeap, &onThrow))
if (!GenerateAsyncInterruptStub(mg, &onThrow))
return false;
if (onThrow.used()) {

Просмотреть файл

@ -25,7 +25,7 @@ namespace js {
namespace wasm {
bool
GenerateStubs(ModuleGenerator& mg, Module::HeapBool usesHeap);
GenerateStubs(ModuleGenerator& mg, bool usesHeap);
} // namespace wasm
} // namespace js

Просмотреть файл

@ -43,34 +43,16 @@ __aeabi_uidivmod(int, int);
}
#endif
namespace js {
namespace wasm {
void
ReportOverRecursed()
{
JSContext* cx = JSRuntime::innermostWasmActivation()->cx();
ReportOverRecursed(cx);
}
bool
HandleExecutionInterrupt()
{
WasmActivation* act = JSRuntime::innermostWasmActivation();
act->module().setInterrupted(true);
bool ret = CheckForInterrupt(act->cx());
act->module().setInterrupted(false);
return ret;
}
} // namespace wasm
} // namespace js
static void
OnDetached()
WasmReportOverRecursed()
{
JSContext* cx = JSRuntime::innermostWasmActivation()->cx();
JS_ReportErrorNumber(cx, GetErrorMessage, nullptr, JSMSG_OUT_OF_MEMORY);
ReportOverRecursed(JSRuntime::innermostWasmActivation()->cx());
}
static bool
WasmHandleExecutionInterrupt()
{
return CheckForInterrupt(JSRuntime::innermostWasmActivation()->cx());
}
static void
@ -187,15 +169,13 @@ wasm::AddressOf(SymbolicAddress imm, ExclusiveContext* cx)
case SymbolicAddress::StackLimit:
return cx->stackLimitAddressForJitCode(StackForUntrustedScript);
case SymbolicAddress::ReportOverRecursed:
return FuncCast(wasm::ReportOverRecursed, Args_General0);
case SymbolicAddress::OnDetached:
return FuncCast(OnDetached, Args_General0);
return FuncCast(WasmReportOverRecursed, Args_General0);
case SymbolicAddress::OnOutOfBounds:
return FuncCast(OnOutOfBounds, Args_General0);
case SymbolicAddress::OnImpreciseConversion:
return FuncCast(OnImpreciseConversion, Args_General0);
case SymbolicAddress::HandleExecutionInterrupt:
return FuncCast(wasm::HandleExecutionInterrupt, Args_General0);
return FuncCast(WasmHandleExecutionInterrupt, Args_General0);
case SymbolicAddress::InvokeImport_Void:
return FuncCast(InvokeImport_Void, Args_General3);
case SymbolicAddress::InvokeImport_I32:

Просмотреть файл

@ -557,7 +557,6 @@ enum class SymbolicAddress
RuntimeInterruptUint32,
StackLimit,
ReportOverRecursed,
OnDetached,
OnOutOfBounds,
OnImpreciseConversion,
HandleExecutionInterrupt,

Просмотреть файл

@ -524,7 +524,7 @@ GetCurrentAsmJSHeap(SharedMem<void*>* heap, size_t* length)
{
JSRuntime* rt = js::TlsPerThreadData.get()->runtimeFromMainThread();
wasm::Module& module = rt->wasmActivationStack()->module();
*heap = module.maybeHeap().cast<void*>();
*heap = module.heap().cast<void*>();
*length = module.heapLength();
}

Просмотреть файл

@ -68,7 +68,7 @@ function String_substr(start, length) {
intStart = std_Math_max(intStart + size, 0);
// Step 9.
var resultLength = std_Math_min(std_Math_max(end, 0), size - intStart)
var resultLength = std_Math_min(std_Math_max(end, 0), size - intStart);
// Step 10.
if (resultLength <= 0)

Просмотреть файл

@ -2753,7 +2753,7 @@ SetLazyParsingDisabled(JSContext* cx, unsigned argc, Value* vp)
CallArgs args = CallArgsFromVp(argc, vp);
bool disable = !args.hasDefined(0) || ToBoolean(args[0]);
JS::CompartmentOptionsRef(cx->compartment()).setDisableLazyParsing(disable);
cx->compartment()->behaviors().setDisableLazyParsing(disable);
args.rval().setUndefined();
return true;
@ -2765,7 +2765,7 @@ SetDiscardSource(JSContext* cx, unsigned argc, Value* vp)
CallArgs args = CallArgsFromVp(argc, vp);
bool discard = !args.hasDefined(0) || ToBoolean(args[0]);
JS::CompartmentOptionsRef(cx->compartment()).setDiscardSource(discard);
cx->compartment()->behaviors().setDiscardSource(discard);
args.rval().setUndefined();
return true;

Просмотреть файл

@ -78,7 +78,8 @@ var ignoreCallees = {
"z_stream_s.zfree" : true,
"GrGLInterface.fCallback" : true,
"std::strstreambuf._M_alloc_fun" : true,
"std::strstreambuf._M_free_fun" : true
"std::strstreambuf._M_free_fun" : true,
"struct js::gc::Callback<void (*)(JSRuntime*, void*)>.op" : true,
};
function fieldCallCannotGC(csu, fullfield)
@ -187,6 +188,8 @@ var ignoreFunctions = {
"void test::RingbufferDumper::OnTestPartResult(testing::TestPartResult*)" : true,
"float64 JS_GetCurrentEmbedderTime()" : true,
"uint64 js::TenuringTracer::moveObjectToTenured(JSObject*, JSObject*, int32)" : true,
};
function isProtobuf(name)

Просмотреть файл

@ -199,7 +199,7 @@ BytecodeCompiler::maybeCompressSource()
sourceCompressor = maybeSourceCompressor.ptr();
}
if (!cx->compartment()->options().discardSource()) {
if (!cx->compartment()->behaviors().discardSource()) {
if (options.sourceIsLazy) {
scriptSource->setSourceRetrievable();
} else if (!scriptSource->setSourceCopy(cx, sourceBuffer, sourceArgumentsNotIncluded,
@ -217,8 +217,8 @@ BytecodeCompiler::canLazilyParse()
{
return options.canLazilyParse &&
!HasNonSyntacticStaticScopeChain(enclosingStaticScope) &&
!cx->compartment()->options().disableLazyParsing() &&
!cx->compartment()->options().discardSource() &&
!cx->compartment()->behaviors().disableLazyParsing() &&
!cx->compartment()->behaviors().discardSource() &&
!options.sourceIsLazy &&
!cx->lcovEnabled();
}

Просмотреть файл

@ -756,6 +756,9 @@ class GCRuntime
void setGCCallback(JSGCCallback callback, void* data);
void callGCCallback(JSGCStatus status) const;
void setObjectsTenuredCallback(JSObjectsTenuredCallback callback,
void* data);
void callObjectsTenuredCallback();
bool addFinalizeCallback(JSFinalizeCallback callback, void* data);
void removeFinalizeCallback(JSFinalizeCallback func);
bool addWeakPointerZoneGroupCallback(JSWeakPointerZoneGroupCallback callback, void* data);
@ -1273,6 +1276,7 @@ class GCRuntime
bool fullCompartmentChecks;
Callback<JSGCCallback> gcCallback;
Callback<JSObjectsTenuredCallback> tenuredCallback;
CallbackVector<JSFinalizeCallback> finalizeCallbacks;
CallbackVector<JSWeakPointerZoneGroupCallback> updateWeakPointerZoneGroupCallbacks;
CallbackVector<JSWeakPointerCompartmentCallback> updateWeakPointerCompartmentCallbacks;

Просмотреть файл

@ -2215,6 +2215,8 @@ js::TenuringTracer::moveObjectToTenured(JSObject* dst, JSObject* src, AllocKind
tenuredSize += UnboxedArrayObject::objectMovedDuringMinorGC(this, dst, src, dstKind);
} else if (src->is<ArgumentsObject>()) {
tenuredSize += ArgumentsObject::objectMovedDuringMinorGC(this, dst, src);
} else if (JSObjectMovedOp op = dst->getClass()->ext.objectMovedOp) {
op(dst, src);
} else {
// Objects with JSCLASS_SKIP_NURSERY_FINALIZE need to be handled above
// to ensure any additional nursery buffers they hold are moved.

Просмотреть файл

@ -496,6 +496,10 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason, ObjectGroupList
forwardedBuffers.finish();
TIME_END(updateJitActivations);
TIME_START(objectsTenuredCallback);
rt->gc.callObjectsTenuredCallback();
TIME_END(objectsTenuredCallback);
// Sweep.
TIME_START(freeMallocedBuffers);
freeMallocedBuffers();
@ -576,6 +580,7 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason, ObjectGroupList
{"mkDbgr", TIME_TOTAL(markDebugger)},
{"clrNOC", TIME_TOTAL(clearNewObjectCache)},
{"collct", TIME_TOTAL(collectToFP)},
{" tenCB", TIME_TOTAL(objectsTenuredCallback)},
{"swpABO", TIME_TOTAL(sweepArrayBufferViewList)},
{"updtIn", TIME_TOTAL(updateJitActivations)},
{"frSlts", TIME_TOTAL(freeMallocedBuffers)},

Просмотреть файл

@ -60,7 +60,7 @@ void breakpoint() {
GDBFragment* GDBFragment::allFragments = nullptr;
int
main (int argc, const char** argv)
main(int argc, const char** argv)
{
if (!JS_Init()) return 1;
JSRuntime* runtime = checkPtr(JS_NewRuntime(1024 * 1024));
@ -74,7 +74,8 @@ main (int argc, const char** argv)
/* Create the global object. */
JS::CompartmentOptions options;
options.setVersion(JSVERSION_LATEST);
options.behaviors().setVersion(JSVERSION_LATEST);
RootedObject global(cx, checkPtr(JS_NewGlobalObject(cx, &global_class,
nullptr, JS::FireOnNewGlobalHook, options)));
JSAutoCompartment ac(cx, global);

Просмотреть файл

@ -1,33 +0,0 @@
load(libdir + "asm.js");
var byteLength = Function.prototype.call.bind(
Object.getOwnPropertyDescriptor(ArrayBuffer.prototype, "byteLength").get
);
var m = asmCompile("glob", "s", "b", `
"use asm";
var I32 = glob.Int32Array;
var i32 = new I32(b);
var len = glob.byteLength;
function ch(b2) {
if (len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 80000000) {
return false;
}
i32 = new I32(b2);
b = b2;
return true
}
function get(i) {
i = i | 0;
return i32[i >> 2] | 0
}
return {
get: get,
changeHeap: ch
}
`);
var buf1 = new ArrayBuffer(16777216)
var { get, changeHeap } = asmLink(m, this, null, buf1)
assertEq(changeHeap(new ArrayBuffer(33554432)), true)
assertEq(get(), 0)
assertEq(changeHeap(buf1), true);
get();

Просмотреть файл

@ -23,19 +23,8 @@ var buffer = new ArrayBuffer(BUF_MIN);
var {get, set} = asmLink(m, this, null, buffer);
set(4, 42);
assertEq(get(4), 42);
neuter(buffer, "change-data");
neuter(buffer, "same-data");
assertThrowsInstanceOf(() => get(4), InternalError);
var buf1 = new ArrayBuffer(BUF_MIN);
var buf2 = new ArrayBuffer(BUF_MIN);
var {get:get1, set:set1} = asmLink(m, this, null, buf1);
var {get:get2, set:set2} = asmLink(m, this, null, buf2);
set1(0, 13);
set2(0, 42);
neuter(buf1, "change-data");
assertThrowsInstanceOf(() => get1(0), InternalError);
assertEq(get2(0), 42);
assertThrowsInstanceOf(() => neuter(buffer, "change-data"), InternalError);
assertThrowsInstanceOf(() => neuter(buffer, "same-data"), InternalError);
var m = asmCompile('stdlib', 'foreign', 'buffer',
`"use asm";
@ -49,66 +38,5 @@ var m = asmCompile('stdlib', 'foreign', 'buffer',
return inner`);
var buffer = new ArrayBuffer(BUF_MIN);
function ffi1() { neuter(buffer, "change-data"); }
function ffi1() { assertThrowsInstanceOf(() => neuter(buffer, "change-data"), InternalError) }
var inner = asmLink(m, this, {ffi:ffi1}, buffer);
assertThrowsInstanceOf(() => inner(8), InternalError);
var byteLength = Function.prototype.call.bind(Object.getOwnPropertyDescriptor(ArrayBuffer.prototype, 'byteLength').get);
var m = asmCompile('stdlib', 'foreign', 'buffer',
`"use asm";
var ffi = foreign.ffi;
var I32 = stdlib.Int32Array;
var i32 = new I32(buffer);
var len = stdlib.byteLength;
function changeHeap(newBuffer) {
if (len(newBuffer) & 0xffffff || len(newBuffer) <= 0xffffff || len(newBuffer) > 0x80000000)
return false;
i32 = new I32(newBuffer);
buffer = newBuffer;
return true;
}
function get(i) {
i=i|0;
return i32[i>>2]|0;
}
function inner(i) {
i=i|0;
ffi();
return get(i)|0;
}
return {changeHeap:changeHeap, get:get, inner:inner}`);
var buf1 = new ArrayBuffer(BUF_CHANGE_MIN);
var buf2 = new ArrayBuffer(BUF_CHANGE_MIN);
var buf3 = new ArrayBuffer(BUF_CHANGE_MIN);
var buf4 = new ArrayBuffer(BUF_CHANGE_MIN);
new Int32Array(buf2)[13] = 42;
new Int32Array(buf3)[13] = 1024;
new Int32Array(buf4)[13] = 1337;
function ffi2() { neuter(buf1, "change-data"); assertEq(changeHeap(buf2), true); }
var {changeHeap, get:get2, inner} = asmLink(m, this, {ffi:ffi2}, buf1);
assertEq(inner(13*4), 42);
function ffi3() {
assertEq(get2(13*4), 42);
assertEq(get2(BUF_CHANGE_MIN), 0)
assertEq(get3(13*4), 42);
assertEq(get3(BUF_CHANGE_MIN), 0)
neuter(buf2, "change-data");
assertThrowsInstanceOf(()=>get2(13*4), InternalError);
assertThrowsInstanceOf(()=>get2(BUF_CHANGE_MIN), InternalError);
assertThrowsInstanceOf(()=>get3(13*4), InternalError);
assertThrowsInstanceOf(()=>get3(BUF_CHANGE_MIN), InternalError);
assertEq(changeHeap(buf3), true);
assertThrowsInstanceOf(()=>get2(13*4), InternalError);
assertThrowsInstanceOf(()=>get2(BUF_CHANGE_MIN), InternalError);
assertEq(get3(13*4), 1024);
assertEq(get3(BUF_CHANGE_MIN), 0);
assertEq(changeHeap(buf4), true);
}
var {changeHeap, get:get3, inner} = asmLink(m, this, {ffi:ffi3}, buf2);
assertEq(inner(13*4), 1337);
assertThrowsInstanceOf(()=>get2(0), InternalError);
assertEq(get3(BUF_CHANGE_MIN), 0);
assertEq(get3(13*4), 1337);

Просмотреть файл

@ -202,16 +202,6 @@ var stacks = disableSingleStepProfiling();
assertStackContainsSeq(stacks, ">,f1,>,<,f1,>,>,<,f1,>,f2,>,<,f1,>,<,f2,>,<,f1,>,f2,>,<,f1,>,>,<,f1,>,<,f1,>,f1,>,>");
// Detachment exit
var buf = new ArrayBuffer(BUF_CHANGE_MIN);
var ffi = function() { neuter(buf, 'change-data') }
var f = asmLink(asmCompile('g','ffis','buf', USE_ASM + 'var ffi = ffis.ffi; var i32 = new g.Int32Array(buf); function f() { ffi() } return f'), this, {ffi:ffi}, buf);
enableSingleStepProfiling();
assertThrowsInstanceOf(f, InternalError);
var stacks = disableSingleStepProfiling();
assertStackContainsSeq(stacks, ">,f,>,<,f,>,inline stub,f,>,<,f,>,inline stub,f,>");
if (isSimdAvailable() && typeof SIMD !== 'undefined') {
// SIMD out-of-bounds exit
var buf = new ArrayBuffer(0x10000);

Просмотреть файл

@ -1,366 +0,0 @@
// |jit-test| test-also-noasmjs
load(libdir + "asm.js");
load(libdir + "asserts.js");
// Tests for importing typed array view constructors
assertAsmTypeFail('glob', USE_ASM + "var I32=glob.Int32Arra; function f() {} return f");
var m = asmCompile('glob', USE_ASM + "var I32=glob.Int32Array; function f() {} return f");
assertAsmLinkFail(m, {});
assertAsmLinkFail(m, {Int32Array:null});
assertAsmLinkFail(m, {Int32Array:{}});
assertAsmLinkFail(m, {Int32Array:Uint32Array});
assertEq(asmLink(m, {Int32Array:Int32Array})(), undefined);
var m = asmCompile('glob', 'ffis', 'buf', USE_ASM + "var I32=glob.Int32Array; function f() {} return f");
assertEq(asmLink(m, this)(), undefined);
assertEq(asmLink(m, this, null, BUF_64KB)(), undefined);
assertAsmTypeFail('glob', 'ffis', 'buf', USE_ASM + 'var I32=glob.Int32Array; var i32=new I3(buf); function f() {} return f');
assertAsmTypeFail('glob', 'ffis', 'buf', USE_ASM + 'var I32=0; var i32=new I32(buf); function f() {} return f');
var m = asmCompile('glob', 'ffis', 'buf', USE_ASM + 'var I32=glob.Int32Array; var i32=new I32(buf); function f() {} return f');
assertAsmLinkFail(m, this, null, {});
assertAsmLinkAlwaysFail(m, this, null, null);
assertAsmLinkFail(m, this, null, new ArrayBuffer(100));
assertEq(asmLink(m, this, null, BUF_64KB)(), undefined);
var m = asmCompile('glob', 'ffis', 'buf', USE_ASM + 'var I32=glob.Int32Array; var i32=new glob.Int32Array(buf); function f() {} return f');
assertAsmLinkFail(m, this, null, {});
assertAsmLinkAlwaysFail(m, this, null, null);
assertAsmLinkFail(m, this, null, new ArrayBuffer(100));
assertEq(asmLink(m, this, null, BUF_64KB)(), undefined);
var m = asmCompile('glob', 'ffis', 'buf', USE_ASM + 'var F32=glob.Float32Array; var i32=new glob.Int32Array(buf); function f() {} return f');
assertAsmLinkFail(m, this, null, {});
assertAsmLinkAlwaysFail(m, this, null, null);
assertAsmLinkFail(m, this, null, new ArrayBuffer(100));
assertEq(asmLink(m, this, null, BUF_64KB)(), undefined);
// Tests for link-time validation of byteLength import
assertAsmTypeFail('glob', 'ffis', 'buf', USE_ASM + 'var byteLength=glob.byteLength; function f() { return byteLength(1)|0 } return f');
var m = asmCompile('glob', 'ffis', 'buf', USE_ASM + 'var byteLength=glob.byteLength; function f() { return 42 } return f');
assertEq('byteLength' in this, false);
assertAsmLinkFail(m, this);
this['byteLength'] = null;
assertAsmLinkFail(m, this);
this['byteLength'] = {};
assertAsmLinkFail(m, this);
this['byteLength'] = function(){}
assertAsmLinkFail(m, this);
this['byteLength'] = (function(){}).bind(null);
assertAsmLinkFail(m, this);
this['byteLength'] = Function.prototype.call.bind();
assertAsmLinkFail(m, this);
this['byteLength'] = Function.prototype.call.bind({});
assertAsmLinkFail(m, this);
this['byteLength'] = Function.prototype.call.bind(function f() {});
assertAsmLinkFail(m, this);
this['byteLength'] = Function.prototype.call.bind(Math.sin);
assertAsmLinkFail(m, this);
this['byteLength'] =
Function.prototype.call.bind(Object.getOwnPropertyDescriptor(ArrayBuffer.prototype, 'byteLength').get);
assertEq(asmLink(m, this)(), 42);
var m = asmCompile('glob', 'ffis', 'buf', USE_ASM + 'var b1=glob.byteLength, b2=glob.byteLength; function f() { return 43 } return f');
assertEq(asmLink(m, this)(), 43);
// Tests for validation of change-heap function
const BYTELENGTH_IMPORT = "var len = glob.byteLength; ";
const IMPORT0 = BYTELENGTH_IMPORT;
const IMPORT1 = "var I8=glob.Int8Array; var i8=new I8(b); " + BYTELENGTH_IMPORT;
const IMPORT2 = "var I8=glob.Int8Array; var i8=new I8(b); var I32=glob.Int32Array; var i32=new I32(b); var II32=glob.Int32Array; " + BYTELENGTH_IMPORT;
asmCompile('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function f() { return 42 } function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function b(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function f(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2=1) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2,xyz) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(...r) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2,...r) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch({b2}) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
asmCompile('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
asmCompile('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { ;if((len((b2))) & (0xffffff) || (len((b2)) <= (0xffffff)) || len(b2) > 0x80000000) {;;return false;;} ; i8=new I8(b2);; b=b2;; return true;; } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function ch2(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { 3; if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { b2=b2|0; if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(1) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(1 || 1) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(1 || 1 || 1) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(1 || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(1 & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || 1 || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(i8(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(xyz) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff && len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) | 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) == 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xfffffe || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
asmCompile('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0x1ffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
asmCompile('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0x7fffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) < 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xfffffe || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
asmCompile('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0x1000000 || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || 1) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) < 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || 1 > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0.0) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0xffffff) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
asmCompile('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x1000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffffff || len(b2) > 0x1000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0x1000000 || len(b2) > 0x1000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
asmCompile('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0x1000000 || len(b2) > 0x1000001) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000001) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) ; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) {} i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
asmCompile('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) {return false} i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return true; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
asmCompile('glob', 'ffis', 'b', USE_ASM + IMPORT0 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i7=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; b=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=1; b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new 1; b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I7(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new b(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8; b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(1); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2,1); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); xyz=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=1; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; 1; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return 1 } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return false } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; if (0) return true; 1 } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT2 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT2 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i32=new I32(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT2 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i32=new I32(b2); i8=new I8(b2); b=b2; return true } function f() { return 42 } return f');
asmCompile('glob', 'ffis', 'b', USE_ASM + IMPORT2 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); i32=new I32(b2); b=b2; return true } function f() { return 42 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', USE_ASM + IMPORT2 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I32(b2); i32=new I8(b2); b=b2; return true } function f() { return 42 } return f');
asmCompile('glob', 'ffis', 'b', USE_ASM + IMPORT2 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); i32=new II32(b2); b=b2; return true } function f() { return 42 } return f');
// Tests for no calls in heap index expressions
const CHANGE_FUN = 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i8=new I8(b2); i32=new I32(b2); b=b2; return true }';
const SETUP = USE_ASM + IMPORT2 + 'var imul=glob.Math.imul; var ffi=ffis.ffi;' + CHANGE_FUN;
asmCompile('glob', 'ffis', 'b', SETUP + 'function f() { i32[0] } return f');
asmCompile('glob', 'ffis', 'b', SETUP + 'function f() { i32[0] = 0 } return f');
asmCompile('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[i >> 2] } return f');
asmCompile('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[i >> 2] = 0 } return f');
asmCompile('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[(imul(i,i)|0) >> 2] = 0 } return f');
asmCompile('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[i >> 2] = (imul(i,i)|0) } return f');
assertAsmTypeFail('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[(ffi()|0) >> 2] } return f');
assertAsmTypeFail('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[(g()|0) >> 2] } function g() { return 0 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[(TBL[i&0]()|0) >> 2] } function g() { return 0 } var TBL=[g]; return f');
assertAsmTypeFail('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[(g()|0) >> 2] = 0 } function g() { return 0 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[i >> 2] = g()|0 } function g() { return 0 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[i32[(g()|0)>>2] >> 2] } function g() { return 0 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[i32[(g()|0)>>2] >> 2] = 0 } function g() { return 0 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[i >> 2] = i32[(g()|0)>>2] } function g() { return 0 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[((i32[i>>2]|0) + (g()|0)) >> 2] } function g() { return 0 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[((i32[i>>2]|0) + (g()|0)) >> 2] = 0 } function g() { return 0 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', SETUP + 'function f() { var i = 0; i32[i >> 2] = (i32[i>>2]|0) + (g()|0) } function g() { return 0 } return f');
if (isSimdAvailable() && typeof SIMD !== 'undefined')
asmCompile('glob', 'ffis', 'b', USE_ASM + IMPORT2 + 'var i4 = glob.SIMD.Int32x4; var ext = i4.extractLane; var add = i4.add;' + CHANGE_FUN + 'function f(i) { i=i|0; i32[ext(i4(i,1,2,i),0) >> 2]; i32[ext(add(i4(0,0,0,0),i4(1,1,1,1)),0) >> 2]; } return f');
// Tests for constant heap accesses when change-heap is used
const HEADER = USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= MIN || len(b2) > 0x80000000) return false; i8=new I8(b2); b=b2; return true } ';
assertAsmTypeFail('glob', 'ffis', 'b', HEADER.replace('MIN', '0xffffff') + 'function f() { i8[0x1000000] = 0 } return f');
asmCompile('glob', 'ffis', 'b', HEADER.replace('MIN', '0xffffff') + 'function f() { i8[0xffffff] = 0 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', HEADER.replace('MIN', '0x1000000') + 'function f() { i8[0x1000001] = 0 } return f');
asmCompile('glob', 'ffis', 'b', HEADER.replace('MIN', '0x1000000') + 'function f() { i8[0x1000000] = 0 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', HEADER.replace('MIN', '0xffffff') + 'function f() { return i8[0x1000000]|0 } return f');
asmCompile('glob', 'ffis', 'b', HEADER.replace('MIN', '0xffffff') + 'function f() { return i8[0xffffff]|0 } return f');
assertAsmTypeFail('glob', 'ffis', 'b', HEADER.replace('MIN', '0x1000000') + 'function f() { return i8[0x1000001]|0 } return f');
asmCompile('glob', 'ffis', 'b', HEADER.replace('MIN', '0x1000000') + 'function f() { return i8[0x1000000]|0 } return f');
// Tests for validation of heap length
var body = USE_ASM + IMPORT1 + 'function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0x1ffffff || len(b2) > 0x4000000) return false; i8=new I8(b2); b=b2; return true } function f() { return 42 } return ch';
var m = asmCompile('glob', 'ffis', 'b', body);
assertAsmLinkFail(m, this, null, new ArrayBuffer(BUF_CHANGE_MIN));
assertAsmLinkFail(m, this, null, new ArrayBuffer(0x1000000));
var changeHeap = asmLink(m, this, null, new ArrayBuffer(0x2000000));
assertEq(changeHeap(new ArrayBuffer(0x1000000)), false);
assertEq(changeHeap(new ArrayBuffer(0x2000000)), true);
assertEq(changeHeap(new ArrayBuffer(0x2000001)), false);
assertEq(changeHeap(new ArrayBuffer(0x4000000)), true);
assertEq(changeHeap(new ArrayBuffer(0x5000000)), false);
assertThrowsInstanceOf(() => changeHeap(null), TypeError);
assertThrowsInstanceOf(() => changeHeap({}), TypeError);
assertThrowsInstanceOf(() => changeHeap(new Int32Array(100)), TypeError);
var detached = new ArrayBuffer(BUF_CHANGE_MIN);
neuter(detached, "change-data");
assertEq(changeHeap(detached), false);
// Tests for runtime changing heap
const CHANGE_HEAP = 'var changeHeap = glob.byteLength;';
var changeHeapSource = `function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i32=new I32(b2); b=b2; return true }`;
var body = `var I32=glob.Int32Array; var i32=new I32(b);
var len=glob.byteLength;` +
changeHeapSource +
`function get(i) { i=i|0; return i32[i>>2]|0 }
function set(i, v) { i=i|0; v=v|0; i32[i>>2] = v }
return {get:get, set:set, changeHeap:ch}`;
var m = asmCompile('glob', 'ffis', 'b', USE_ASM + body);
var buf1 = new ArrayBuffer(BUF_CHANGE_MIN);
var {get, set, changeHeap} = asmLink(m, this, null, buf1);
assertEq(m.toString(), "function anonymous(glob, ffis, b) {\n" + USE_ASM + body + "\n}");
assertEq(m.toSource(), "(function anonymous(glob, ffis, b) {\n" + USE_ASM + body + "\n})");
assertEq(changeHeap.toString(), changeHeapSource);
assertEq(changeHeap.toSource(), changeHeapSource);
set(0, 42);
set(4, 13);
set(4, 13);
assertEq(get(0), 42);
assertEq(get(4), 13);
set(BUF_CHANGE_MIN, 262);
assertEq(get(BUF_CHANGE_MIN), 0);
var buf2 = new ArrayBuffer(2*BUF_CHANGE_MIN);
assertEq(changeHeap(buf2), true);
assertEq(get(0), 0);
assertEq(get(4), 0);
set(BUF_CHANGE_MIN, 262);
assertEq(get(BUF_CHANGE_MIN), 262);
set(2*BUF_CHANGE_MIN, 262);
assertEq(get(2*BUF_CHANGE_MIN), 0);
changeHeap(buf1);
assertEq(get(0), 42);
assertEq(get(4), 13);
set(BUF_CHANGE_MIN, 262);
assertEq(get(BUF_CHANGE_MIN), 0);
if (ArrayBuffer.transfer) {
var buf1 = new ArrayBuffer(BUF_CHANGE_MIN);
var {get, set, changeHeap} = asmLink(m, this, null, buf1);
set(0, 100);
set(BUF_CHANGE_MIN - 4, 101);
set(BUF_CHANGE_MIN, 102);
var buf2 = ArrayBuffer.transfer(buf1);
assertEq(changeHeap(buf2), true);
assertEq(buf1.byteLength, 0);
assertEq(buf2.byteLength, BUF_CHANGE_MIN);
assertEq(get(0), 100);
assertEq(get(BUF_CHANGE_MIN-4), 101);
assertEq(get(BUF_CHANGE_MIN), 0);
assertEq(get(2*BUF_CHANGE_MIN-4), 0);
var buf3 = ArrayBuffer.transfer(buf2, 3*BUF_CHANGE_MIN);
assertEq(changeHeap(buf3), true);
assertEq(buf2.byteLength, 0);
assertEq(buf3.byteLength, 3*BUF_CHANGE_MIN);
assertEq(get(0), 100);
assertEq(get(BUF_CHANGE_MIN-4), 101);
assertEq(get(BUF_CHANGE_MIN), 0);
assertEq(get(2*BUF_CHANGE_MIN), 0);
set(BUF_CHANGE_MIN, 102);
set(2*BUF_CHANGE_MIN, 103);
assertEq(get(BUF_CHANGE_MIN), 102);
assertEq(get(2*BUF_CHANGE_MIN), 103);
var buf4 = ArrayBuffer.transfer(buf3, 2*BUF_CHANGE_MIN);
assertEq(changeHeap(buf4), true);
assertEq(buf3.byteLength, 0);
assertEq(buf4.byteLength, 2*BUF_CHANGE_MIN);
assertEq(get(0), 100);
assertEq(get(BUF_CHANGE_MIN-4), 101);
assertEq(get(BUF_CHANGE_MIN), 102);
assertEq(get(2*BUF_CHANGE_MIN), 0);
var buf5 = ArrayBuffer.transfer(buf4, 3*BUF_CHANGE_MIN);
assertEq(changeHeap(buf5), true);
assertEq(buf4.byteLength, 0);
assertEq(buf5.byteLength, 3*BUF_CHANGE_MIN);
assertEq(get(0), 100);
assertEq(get(BUF_CHANGE_MIN-4), 101);
assertEq(get(BUF_CHANGE_MIN), 102);
assertEq(get(2*BUF_CHANGE_MIN), 0);
var buf6 = ArrayBuffer.transfer(buf5, 0);
assertEq(buf5.byteLength, 0);
assertEq(buf6.byteLength, 0);
assertEq(changeHeap(buf6), false);
}
var buf1 = new ArrayBuffer(BUF_CHANGE_MIN);
var buf2 = new ArrayBuffer(BUF_CHANGE_MIN);
var m = asmCompile('glob', 'ffis', 'b', USE_ASM +
`var len=glob.byteLength;
function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; b=b2; return true }
return ch`);
var changeHeap = asmLink(m, this, null, buf1);
assertEq(changeHeap(buf2), true);
neuter(buf2, "change-data");
assertEq(changeHeap(buf1), true);
neuter(buf1, "change-data");
var buf1 = new ArrayBuffer(BUF_CHANGE_MIN);
new Int32Array(buf1)[0] = 13;
var buf2 = new ArrayBuffer(BUF_CHANGE_MIN);
new Int32Array(buf2)[0] = 42;
// Tests for changing heap during an FFI:
// Set the warmup to '2' so we can hit both interp and ion FFI exits
setJitCompilerOption("ion.warmup.trigger", 2);
setJitCompilerOption("baseline.warmup.trigger", 0);
setJitCompilerOption("offthread-compilation.enable", 0);
var changeToBuf = null;
var m = asmCompile('glob', 'ffis', 'b', USE_ASM +
`var ffi=ffis.ffi;
var I32=glob.Int32Array; var i32=new I32(b);
var len=glob.byteLength;
function ch(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i32=new I32(b2); b=b2; return true }
function test(i) { i=i|0; var sum=0; sum = i32[i>>2]|0; sum = (sum + (ffi()|0))|0; sum = (sum + (i32[i>>2]|0))|0; return sum|0 }
return {test:test, changeHeap:ch}`);
var ffi = function() { changeHeap(changeToBuf); return 1 }
var {test, changeHeap} = asmLink(m, this, {ffi:ffi}, buf1);
changeToBuf = buf1;
assertEq(test(0), 27);
changeToBuf = buf2;
assertEq(test(0), 56);
changeToBuf = buf2;
assertEq(test(0), 85);
changeToBuf = buf1;
assertEq(test(0), 56);
changeToBuf = buf1;
assertEq(test(0), 27);
var ffi = function() { return { valueOf:function() { changeHeap(changeToBuf); return 100 } } };
var {test, changeHeap} = asmLink(m, this, {ffi:ffi}, buf1);
changeToBuf = buf1;
assertEq(test(0), 126);
changeToBuf = buf2;
assertEq(test(0), 155);
changeToBuf = buf2;
assertEq(test(0), 184);
changeToBuf = buf1;
assertEq(test(0), 155);
changeToBuf = buf1;
assertEq(test(0), 126);
if (ArrayBuffer.transfer) {
var buf = new ArrayBuffer(BUF_CHANGE_MIN);
new Int32Array(buf)[0] = 3;
var ffi = function() {
var buf2 = ArrayBuffer.transfer(buf, 2*BUF_CHANGE_MIN);
new Int32Array(buf2)[BUF_CHANGE_MIN/4] = 13;
assertEq(changeHeap(buf2), true);
return 1
}
var {test, changeHeap} = asmLink(m, this, {ffi:ffi}, buf);
assertEq(test(BUF_CHANGE_MIN), 14);
}

Просмотреть файл

@ -1,29 +0,0 @@
// |jit-test| exitstatus: 6;
load(libdir + "asm.js");
// This test may iloop for valid reasons if not compiled with asm.js (namely,
// inlining may allow the heap load to be hoisted out of the loop).
if (!isAsmJSCompilationAvailable())
quit(6);
setJitCompilerOption("signals.enable", 0);
var byteLength =
Function.prototype.call.bind(Object.getOwnPropertyDescriptor(ArrayBuffer.prototype, 'byteLength').get);
var buf1 = new ArrayBuffer(BUF_CHANGE_MIN);
new Int32Array(buf1)[0] = 13;
var buf2 = new ArrayBuffer(BUF_CHANGE_MIN);
new Int32Array(buf2)[0] = 42;
// Test changeHeap from interrupt (as if that could ever happen...)
var m = asmCompile('glob', 'ffis', 'b', USE_ASM +
`var I32=glob.Int32Array; var i32=new I32(b);
var len=glob.byteLength;
function changeHeap(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i32=new I32(b2); b=b2; return true }
function f() {}
function loop(i) { i=i|0; while((i32[i>>2]|0) == 13) { f() } }
return {loop:loop, changeHeap:changeHeap}`);
var { loop, changeHeap } = asmLink(m, this, null, buf1);
timeout(1, function() { assertEq(changeHeap(buf2), false); return false });
loop(0);

Просмотреть файл

@ -1,27 +0,0 @@
// |jit-test| exitstatus: 6;
load(libdir + "asm.js");
// This test may iloop for valid reasons if not compiled with asm.js (namely,
// inlining may allow the heap load to be hoisted out of the loop).
if (!isAsmJSCompilationAvailable())
quit(6);
var byteLength =
Function.prototype.call.bind(Object.getOwnPropertyDescriptor(ArrayBuffer.prototype, 'byteLength').get);
var buf1 = new ArrayBuffer(BUF_CHANGE_MIN);
new Int32Array(buf1)[0] = 13;
var buf2 = new ArrayBuffer(BUF_CHANGE_MIN);
new Int32Array(buf2)[0] = 42;
// Test changeHeap from interrupt (as if that could ever happen...)
var m = asmCompile('glob', 'ffis', 'b', USE_ASM +
`var I32=glob.Int32Array; var i32=new I32(b);
var len=glob.byteLength;
function changeHeap(b2) { if(len(b2) & 0xffffff || len(b2) <= 0xffffff || len(b2) > 0x80000000) return false; i32=new I32(b2); b=b2; return true }
function f() {}
function loop(i) { i=i|0; while((i32[i>>2]|0) == 13) { f() } }
return {loop:loop, changeHeap:changeHeap}`);
var { loop, changeHeap } = asmLink(m, this, null, buf1);
timeout(1, function() { assertEq(changeHeap(buf2), false); return false });
loop(0);

Просмотреть файл

@ -1,123 +0,0 @@
load(libdir + "asserts.js");
load(libdir + "asm.js");
// Currently, ArrayBuffer.transfer is #ifdef NIGHTLY_BUILD. When
// ArrayBuffer.transfer is enabled on release, this test should be removed.
if (!ArrayBuffer.transfer)
quit();
var XF = ArrayBuffer.transfer;
assertEq(typeof XF, "function");
assertEq(XF.length, 2);
// arg 1 errors
assertThrowsInstanceOf(()=>XF(), Error);
assertThrowsInstanceOf(()=>XF(undefined), Error);
assertThrowsInstanceOf(()=>XF(null), Error);
assertThrowsInstanceOf(()=>XF({}), Error);
assertThrowsInstanceOf(()=>XF(new Int32Array(1)), Error);
var buf = new ArrayBuffer(1);
neuter(buf, 'change-data');
assertThrowsInstanceOf(()=>XF(buf), TypeError);
// arg 2 errors
var buf = new ArrayBuffer(1);
assertThrowsInstanceOf(()=>XF(buf, -1), Error);
assertThrowsInstanceOf(()=>XF(buf, {valueOf() { return -1 }}), Error);
assertThrowsInstanceOf(()=>XF(buf, {toString() { return "-1" }}), Error);
assertThrowsValue(()=>XF(buf, {valueOf() { throw "wee" }}), "wee");
// arg 2 is coerced via ToInt32
var buf = new ArrayBuffer(1);
assertThrowsInstanceOf(()=>XF(buf, Math.pow(2,31)), Error);
buf = XF(buf, Math.pow(2,32));
assertEq(buf.byteLength, 0);
buf = XF(buf, Math.pow(2,32) + 10);
assertEq(buf.byteLength, 10);
assertThrowsInstanceOf(()=>XF(buf, {valueOf() { neuter(buf, "change-data"); return 10; }}), TypeError);
var buf = new ArrayBuffer(100);
assertThrowsInstanceOf(()=>XF(buf, {valueOf() { ArrayBuffer.transfer(buf, 0); return 100; }}), TypeError);
// on undefined second argument, stay the same size:
var buf1 = new ArrayBuffer(0);
var buf2 = XF(buf1);
assertEq(buf1.byteLength, 0);
assertEq(buf2.byteLength, 0);
assertThrowsInstanceOf(()=>XF(buf1), TypeError);
var buf1 = new ArrayBuffer(3);
var buf2 = XF(buf1);
assertEq(buf1.byteLength, 0);
assertEq(buf2.byteLength, 3);
assertThrowsInstanceOf(()=>XF(buf1), TypeError);
var buf1 = new ArrayBuffer(9);
var buf2 = XF(buf1, undefined);
assertEq(buf1.byteLength, 0);
assertEq(buf2.byteLength, 9);
assertThrowsInstanceOf(()=>XF(buf1), TypeError);
// cross-compartment wrapper
var buf3 = newGlobal().eval("new ArrayBuffer(10)");
var buf4 = XF(buf3, 20);
assertEq(buf4.byteLength, 20);
assertThrowsInstanceOf(()=>XF(buf3), TypeError);
// test going to from various sizes
function test(N1, N2) {
var buf1 = new ArrayBuffer(N1);
var i32 = new Int32Array(buf1);
for (var i = 0; i < i32.length; i++)
i32[i] = i;
var buf2 = XF(buf1, N2);
assertEq(buf1.byteLength, 0);
assertEq(i32.length, 0);
assertEq(buf2.byteLength, N2);
var i32 = new Int32Array(buf2);
for (var i = 0; i < Math.min(N1, N2)/4; i++)
assertEq(i32[i], i);
for (var i = Math.min(N1, N2)/4; i < i32.length; i++) {
assertEq(i32[i], 0);
i32[i] = -i;
}
}
test(0, 0);
test(0, 4);
test(4, 0);
test(4, 4);
test(0, 1000);
test(4, 1000);
test(1000, 0);
test(1000, 4);
test(1000, 1000);
// asm.js:
function testAsmJS(N1, N2) {
var buf1 = new ArrayBuffer(N1);
asmLink(asmCompile('stdlib', 'ffis', 'buf', USE_ASM + "var i32=new stdlib.Int32Array(buf); function f() {} return f"), this, null, buf1);
var i32 = new Int32Array(buf1);
for (var i = 0; i < i32.length; i+=100)
i32[i] = i;
var buf2 = XF(buf1, N2);
assertEq(buf1.byteLength, 0);
assertEq(i32.length, 0);
assertEq(buf2.byteLength, N2);
var i32 = new Int32Array(buf2);
var i = 0;
for (; i < Math.min(N1, N2)/4; i+=100)
assertEq(i32[i], i);
for (; i < i32.length; i+=100) {
assertEq(i32[i], 0);
i32[i] = -i;
}
}
testAsmJS(BUF_MIN, 0);
testAsmJS(BUF_MIN, BUF_MIN);
testAsmJS(BUF_MIN, 2*BUF_MIN);
testAsmJS(2*BUF_MIN, BUF_MIN);

Просмотреть файл

@ -0,0 +1 @@
evaluate('evalcx("1")', { fileName: null });

Просмотреть файл

@ -1491,7 +1491,8 @@ static const VMFunction DeepCloneObjectLiteralInfo =
bool
BaselineCompiler::emit_JSOP_OBJECT()
{
if (JS::CompartmentOptionsRef(cx).cloneSingletons()) {
JSCompartment* comp = cx->compartment();
if (comp->creationOptions().cloneSingletons()) {
RootedObject obj(cx, script->getObject(GET_UINT32_INDEX(pc)));
if (!obj)
return false;
@ -1510,7 +1511,7 @@ BaselineCompiler::emit_JSOP_OBJECT()
return true;
}
JS::CompartmentOptionsRef(cx).setSingletonsAsValues();
comp->behaviors().setSingletonsAsValues();
frame.push(ObjectValue(*script->getObject(pc)));
return true;
}

Просмотреть файл

@ -7896,7 +7896,6 @@ CodeGenerator::generateAsmJS(wasm::FuncOffsets* offsets)
target);
}
if (!generateBody())
return false;
@ -7912,7 +7911,6 @@ CodeGenerator::generateAsmJS(wasm::FuncOffsets* offsets)
masm.jump(masm.asmStackOverflowLabel());
}
#if defined(JS_ION_PERF)
// Note the end of the inline code and start of the OOL code.
gen->perfSpewer().noteEndInlineCode(masm);
@ -7921,10 +7919,6 @@ CodeGenerator::generateAsmJS(wasm::FuncOffsets* offsets)
if (!generateOutOfLineCode())
return false;
// Flush constant pools now so that pool hints encoded in the code stream
// get converted into actual instructions.
masm.flushBuffer();
offsets->end = masm.currentOffset();
MOZ_ASSERT(!masm.failureLabel()->used());

Просмотреть файл

@ -284,7 +284,7 @@ CompileCompartment::hasObjectMetadataCallback()
void
CompileCompartment::setSingletonsAsValues()
{
return JS::CompartmentOptionsRef(compartment()).setSingletonsAsValues();
compartment()->behaviors().setSingletonsAsValues();
}
JitCompileOptions::JitCompileOptions()
@ -296,8 +296,7 @@ JitCompileOptions::JitCompileOptions()
JitCompileOptions::JitCompileOptions(JSContext* cx)
{
JS::CompartmentOptions& options = cx->compartment()->options();
cloneSingletons_ = options.cloneSingletons();
cloneSingletons_ = cx->compartment()->creationOptions().cloneSingletons();
spsSlowAssertionsEnabled_ = cx->runtime()->spsProfiler.enabled() &&
cx->runtime()->spsProfiler.slowAssertionsEnabled();
offThreadCompilationAvailable_ = OffThreadCompilationAvailable(cx);

Просмотреть файл

@ -340,27 +340,25 @@ ExecutableAllocator::addSizeOfCode(JS::CodeSizes* sizes) const
void
ExecutableAllocator::reprotectAll(ProtectionSetting protection)
{
if (!nonWritableJitCode)
return;
#ifdef NON_WRITABLE_JIT_CODE
if (!m_pools.initialized())
return;
for (ExecPoolHashSet::Range r = m_pools.all(); !r.empty(); r.popFront())
reprotectPool(rt_, r.front(), protection);
#endif
}
/* static */ void
ExecutableAllocator::reprotectPool(JSRuntime* rt, ExecutablePool* pool, ProtectionSetting protection)
{
#ifdef NON_WRITABLE_JIT_CODE
// Don't race with reprotectAll called from the signal handler.
MOZ_ASSERT(rt->jitRuntime()->preventBackedgePatching() || rt->handlingJitInterrupt());
if (!nonWritableJitCode)
return;
char* start = pool->m_allocation.pages;
reprotectRegion(start, pool->m_freePtr - start, protection);
#endif
}
/* static */ void
@ -407,5 +405,3 @@ ExecutableAllocator::poisonCode(JSRuntime* rt, JitPoisonRangeVector& ranges)
pool->release();
}
}
bool ExecutableAllocator::nonWritableJitCode = true;

Просмотреть файл

@ -157,6 +157,8 @@ struct JitPoisonRange
typedef Vector<JitPoisonRange, 0, SystemAllocPolicy> JitPoisonRangeVector;
#define NON_WRITABLE_JIT_CODE 1
class ExecutableAllocator
{
#ifdef XP_WIN
@ -183,8 +185,6 @@ class ExecutableAllocator
static void initStatic();
static bool nonWritableJitCode;
private:
static size_t pageSize;
static size_t largeAllocSize;
@ -206,14 +206,16 @@ class ExecutableAllocator
public:
static void makeWritable(void* start, size_t size)
{
if (nonWritableJitCode)
reprotectRegion(start, size, Writable);
#ifdef NON_WRITABLE_JIT_CODE
reprotectRegion(start, size, Writable);
#endif
}
static void makeExecutable(void* start, size_t size)
{
if (nonWritableJitCode)
reprotectRegion(start, size, Executable);
#ifdef NON_WRITABLE_JIT_CODE
reprotectRegion(start, size, Executable);
#endif
}
void makeAllWritable() {

Просмотреть файл

@ -81,7 +81,7 @@ static const unsigned FLAGS_RX = PROT_READ | PROT_EXEC;
void
ExecutableAllocator::reprotectRegion(void* start, size_t size, ProtectionSetting setting)
{
MOZ_ASSERT(nonWritableJitCode);
MOZ_ASSERT(NON_WRITABLE_JIT_CODE);
MOZ_ASSERT(pageSize);
// Calculate the start of the page containing this region,
@ -101,8 +101,9 @@ ExecutableAllocator::reprotectRegion(void* start, size_t size, ProtectionSetting
/* static */ unsigned
ExecutableAllocator::initialProtectionFlags(ProtectionSetting protection)
{
if (!nonWritableJitCode)
return FLAGS_RW | FLAGS_RX;
#ifdef NON_WRITABLE_JIT_CODE
return (protection == Writable) ? FLAGS_RW : FLAGS_RX;
#else
return FLAGS_RW | FLAGS_RX;
#endif
}

Просмотреть файл

@ -242,7 +242,7 @@ ExecutableAllocator::systemRelease(const ExecutablePool::Allocation& alloc)
void
ExecutableAllocator::reprotectRegion(void* start, size_t size, ProtectionSetting setting)
{
MOZ_ASSERT(nonWritableJitCode);
MOZ_ASSERT(NON_WRITABLE_JIT_CODE);
MOZ_ASSERT(pageSize);
// Calculate the start of the page containing this region,
@ -265,8 +265,9 @@ ExecutableAllocator::reprotectRegion(void* start, size_t size, ProtectionSetting
/* static */ unsigned
ExecutableAllocator::initialProtectionFlags(ProtectionSetting protection)
{
if (!nonWritableJitCode)
return PAGE_EXECUTE_READWRITE;
#ifdef NON_WRITABLE_JIT_CODE
return (protection == Writable) ? PAGE_READWRITE : PAGE_EXECUTE_READ;
#else
return PAGE_EXECUTE_READWRITE;
#endif
}

Просмотреть файл

@ -526,9 +526,9 @@ void FinishInvalidation(FreeOp* fop, JSScript* script);
const unsigned WINDOWS_BIG_FRAME_TOUCH_INCREMENT = 4096 - 1;
#endif
// If ExecutableAllocator::nonWritableJitCode is |true|, this class will ensure
// JIT code is writable (has RW permissions) in its scope. If nonWritableJitCode
// is |false|, it's a no-op.
// If NON_WRITABLE_JIT_CODE is enabled, this class will ensure
// JIT code is writable (has RW permissions) in its scope.
// Otherwise it's a no-op.
class MOZ_STACK_CLASS AutoWritableJitCode
{
// Backedge patching from the signal handler will change memory protection

Просмотреть файл

@ -2007,7 +2007,7 @@ MacroAssembler::convertTypedOrValueToInt(TypedOrValueRegister src, FloatRegister
}
bool
MacroAssembler::asmMergeWith(const MacroAssembler& other)
MacroAssembler::asmMergeWith(MacroAssembler& other)
{
size_t sizeBeforeMerge = size();

Просмотреть файл

@ -1427,7 +1427,7 @@ class MacroAssembler : public MacroAssemblerSpecific
return &asmOnConversionErrorLabel_;
}
bool asmMergeWith(const MacroAssembler& masm);
bool asmMergeWith(MacroAssembler& masm);
void finish();
void link(JitCode* code);

Просмотреть файл

@ -634,9 +634,10 @@ Assembler::finish()
}
bool
Assembler::asmMergeWith(const Assembler& other)
Assembler::asmMergeWith(Assembler& other)
{
flush();
other.flush();
if (!AssemblerShared::asmMergeWith(size(), other))
return false;
return m_buffer.appendBuffer(other.m_buffer);

Просмотреть файл

@ -1396,7 +1396,7 @@ class Assembler : public AssemblerShared
bool isFinished;
public:
void finish();
bool asmMergeWith(const Assembler& other);
bool asmMergeWith(Assembler& other);
void executableCopy(void* buffer);
void copyJumpRelocationTable(uint8_t* dest);
void copyDataRelocationTable(uint8_t* dest);

Просмотреть файл

@ -31,7 +31,6 @@ BEGIN_TEST(testRedefineGlobalEval)
/* Create the global object. */
JS::CompartmentOptions options;
options.setVersion(JSVERSION_LATEST);
JS::Rooted<JSObject*> g(cx, JS_NewGlobalObject(cx, &cls, nullptr, JS::FireOnNewGlobalHook, options));
if (!g)
return false;

Просмотреть файл

@ -123,7 +123,6 @@ BEGIN_TEST(testGCFinalizeCallback)
JSObject* createTestGlobal()
{
JS::CompartmentOptions options;
options.setVersion(JSVERSION_LATEST);
return JS_NewGlobalObject(cx, getGlobalClass(), nullptr, JS::FireOnNewGlobalHook, options);
}

Просмотреть файл

@ -90,8 +90,8 @@ JSObject*
createTestGlobal(bool preserveJitCode)
{
JS::CompartmentOptions options;
options.setVersion(JSVERSION_LATEST);
options.setPreserveJitCode(preserveJitCode);
options.creationOptions().setPreserveJitCode(preserveJitCode);
options.behaviors().setVersion(JSVERSION_LATEST);
return JS_NewGlobalObject(cx, getGlobalClass(), nullptr, JS::FireOnNewGlobalHook, options);
}
END_TEST(test_PreserveJitCode)

Просмотреть файл

@ -9,24 +9,32 @@
BEGIN_TEST(testBug795104)
{
JS::CompileOptions opts(cx);
JS::CompartmentOptionsRef(cx->compartment()).setDiscardSource(true);
JS::CompartmentBehaviorsRef(cx->compartment()).setDiscardSource(true);
const size_t strLen = 60002;
char* s = static_cast<char*>(JS_malloc(cx, strLen));
CHECK(s);
s[0] = '"';
memset(s + 1, 'x', strLen - 2);
s[strLen - 1] = '"';
// We don't want an rval for our Evaluate call
opts.setNoScriptRval(true);
JS::RootedValue unused(cx);
CHECK(JS::Evaluate(cx, opts, s, strLen, &unused));
JS::RootedFunction fun(cx);
JS::AutoObjectVector emptyScopeChain(cx);
// But when compiling a function we don't want to use no-rval
// mode, since it's not supported for functions.
opts.setNoScriptRval(false);
CHECK(JS::CompileFunction(cx, emptyScopeChain, opts, "f", 0, nullptr, s, strLen, &fun));
CHECK(fun);
JS_free(cx, s);
return true;

Просмотреть файл

@ -230,12 +230,14 @@ JSObject* newDelegate()
/* Create the global object. */
JS::CompartmentOptions options;
options.setVersion(JSVERSION_LATEST);
JS::RootedObject global(cx);
global = JS_NewGlobalObject(cx, Jsvalify(&delegateClass), nullptr, JS::FireOnNewGlobalHook,
options);
JS_SetReservedSlot(global, 0, JS::Int32Value(42));
options.behaviors().setVersion(JSVERSION_LATEST);
JS::RootedObject global(cx, JS_NewGlobalObject(cx, Jsvalify(&delegateClass), nullptr,
JS::FireOnNewGlobalHook, options));
if (!global)
return nullptr;
JS_SetReservedSlot(global, 0, JS::Int32Value(42));
return global;
}

Просмотреть файл

@ -74,12 +74,12 @@ bool JSAPITest::definePrint()
return JS_DefineFunction(cx, global, "print", (JSNative) print, 0, 0);
}
JSObject * JSAPITest::createGlobal(JSPrincipals* principals)
JSObject* JSAPITest::createGlobal(JSPrincipals* principals)
{
/* Create the global object. */
JS::RootedObject newGlobal(cx);
JS::CompartmentOptions options;
options.setVersion(JSVERSION_LATEST);
options.behaviors().setVersion(JSVERSION_LATEST);
newGlobal = JS_NewGlobalObject(cx, getGlobalClass(), principals, JS::FireOnNewGlobalHook,
options);
if (!newGlobal)
@ -87,8 +87,8 @@ JSObject * JSAPITest::createGlobal(JSPrincipals* principals)
JSAutoCompartment ac(cx, newGlobal);
/* Populate the global object with the standard globals, like Object and
Array. */
// Populate the global object with the standard globals like Object and
// Array.
if (!JS_InitStandardClasses(cx, newGlobal))
return nullptr;

Просмотреть файл

@ -637,7 +637,7 @@ JS_GetVersion(JSContext* cx)
JS_PUBLIC_API(void)
JS_SetVersionForCompartment(JSCompartment* compartment, JSVersion version)
{
compartment->options().setVersion(version);
compartment->behaviors().setVersion(version);
}
static const struct v2smap {
@ -827,7 +827,7 @@ JS::StringOfAddonId(JSAddonId* id)
JS_PUBLIC_API(JSAddonId*)
JS::AddonIdOfObject(JSObject* obj)
{
return obj->compartment()->addonId;
return obj->compartment()->creationOptions().addonIdOrNull();
}
JS_PUBLIC_API(void)
@ -1395,6 +1395,14 @@ JS_SetGCCallback(JSRuntime* rt, JSGCCallback cb, void* data)
rt->gc.setGCCallback(cb, data);
}
JS_PUBLIC_API(void)
JS_SetObjectsTenuredCallback(JSRuntime* rt, JSObjectsTenuredCallback cb,
void* data)
{
AssertHeapIsIdle(rt);
rt->gc.setObjectsTenuredCallback(cb, data);
}
JS_PUBLIC_API(bool)
JS_AddFinalizeCallback(JSRuntime* rt, JSFinalizeCallback cb, void* data)
{
@ -1787,47 +1795,65 @@ JS_GetConstructor(JSContext* cx, HandleObject proto)
}
bool
JS::CompartmentOptions::extraWarnings(JSRuntime* rt) const
JS::CompartmentBehaviors::extraWarnings(JSRuntime* rt) const
{
return extraWarningsOverride_.get(rt->options().extraWarnings());
}
bool
JS::CompartmentOptions::extraWarnings(JSContext* cx) const
JS::CompartmentBehaviors::extraWarnings(JSContext* cx) const
{
return extraWarnings(cx->runtime());
}
JS::CompartmentOptions&
JS::CompartmentOptions::setZone(ZoneSpecifier spec)
JS::CompartmentCreationOptions&
JS::CompartmentCreationOptions::setZone(ZoneSpecifier spec)
{
zone_.spec = spec;
return *this;
}
JS::CompartmentOptions&
JS::CompartmentOptions::setSameZoneAs(JSObject* obj)
JS::CompartmentCreationOptions&
JS::CompartmentCreationOptions::setSameZoneAs(JSObject* obj)
{
zone_.pointer = static_cast<void*>(obj->zone());
return *this;
}
JS::CompartmentOptions&
JS::CompartmentOptionsRef(JSCompartment* compartment)
const JS::CompartmentCreationOptions&
JS::CompartmentCreationOptionsRef(JSCompartment* compartment)
{
return compartment->options();
return compartment->creationOptions();
}
JS::CompartmentOptions&
JS::CompartmentOptionsRef(JSObject* obj)
const JS::CompartmentCreationOptions&
JS::CompartmentCreationOptionsRef(JSObject* obj)
{
return obj->compartment()->options();
return obj->compartment()->creationOptions();
}
JS::CompartmentOptions&
JS::CompartmentOptionsRef(JSContext* cx)
const JS::CompartmentCreationOptions&
JS::CompartmentCreationOptionsRef(JSContext* cx)
{
return cx->compartment()->options();
return cx->compartment()->creationOptions();
}
JS::CompartmentBehaviors&
JS::CompartmentBehaviorsRef(JSCompartment* compartment)
{
return compartment->behaviors();
}
JS::CompartmentBehaviors&
JS::CompartmentBehaviorsRef(JSObject* obj)
{
return obj->compartment()->behaviors();
}
JS::CompartmentBehaviors&
JS::CompartmentBehaviorsRef(JSContext* cx)
{
return cx->compartment()->behaviors();
}
JS_PUBLIC_API(JSObject*)
@ -1857,8 +1883,7 @@ JS_GlobalObjectTraceHook(JSTracer* trc, JSObject* global)
// compartment is live.
global->compartment()->trace(trc);
JSTraceOp trace = global->compartment()->options().getTrace();
if (trace)
if (JSTraceOp trace = global->compartment()->creationOptions().getTrace())
trace(trc, global);
}
@ -3889,7 +3914,7 @@ JS::CompileOptions::CompileOptions(JSContext* cx, JSVersion version)
this->version = (version != JSVERSION_UNKNOWN) ? version : cx->findVersion();
strictOption = cx->runtime()->options().strictMode();
extraWarningsOption = cx->compartment()->options().extraWarnings(cx);
extraWarningsOption = cx->compartment()->behaviors().extraWarnings(cx);
werrorOption = cx->runtime()->options().werror();
if (!cx->runtime()->options().asmJS())
asmJSOption = AsmJSOption::Disabled;
@ -5909,6 +5934,8 @@ JS_PUBLIC_API(bool)
DescribeScriptedCaller(JSContext* cx, UniqueChars* filename, unsigned* lineno,
unsigned* column)
{
if (filename)
filename->reset();
if (lineno)
*lineno = 0;
if (column)
@ -5923,7 +5950,7 @@ DescribeScriptedCaller(JSContext* cx, UniqueChars* filename, unsigned* lineno,
if (i.activation()->scriptedCallerIsHidden())
return false;
if (filename) {
if (filename && i.filename()) {
UniqueChars copy = make_string_copy(i.filename());
if (!copy)
return false;

Просмотреть файл

@ -572,6 +572,9 @@ typedef enum JSGCStatus {
typedef void
(* JSGCCallback)(JSRuntime* rt, JSGCStatus status, void* data);
typedef void
(* JSObjectsTenuredCallback)(JSRuntime* rt, void* data);
typedef enum JSFinalizeStatus {
/**
* Called when preparing to sweep a group of compartments, before anything
@ -1654,6 +1657,10 @@ JS_MaybeGC(JSContext* cx);
extern JS_PUBLIC_API(void)
JS_SetGCCallback(JSRuntime* rt, JSGCCallback cb, void* data);
extern JS_PUBLIC_API(void)
JS_SetObjectsTenuredCallback(JSRuntime* rt, JSObjectsTenuredCallback cb,
void* data);
extern JS_PUBLIC_API(bool)
JS_AddFinalizeCallback(JSRuntime* rt, JSFinalizeCallback cb, void* data);
@ -2156,7 +2163,104 @@ enum ZoneSpecifier {
SystemZone = 1
};
class JS_PUBLIC_API(CompartmentOptions)
/**
* CompartmentCreationOptions specifies options relevant to creating a new
* compartment, that are either immutable characteristics of that compartment
* or that are discarded after the compartment has been created.
*
* Access to these options on an existing compartment is read-only: if you
* need particular selections, make them before you create the compartment.
*/
class JS_PUBLIC_API(CompartmentCreationOptions)
{
public:
CompartmentCreationOptions()
: addonId_(nullptr),
traceGlobal_(nullptr),
invisibleToDebugger_(false),
mergeable_(false),
preserveJitCode_(false),
cloneSingletons_(false)
{
zone_.spec = JS::FreshZone;
}
// A null add-on ID means that the compartment is not associated with an
// add-on.
JSAddonId* addonIdOrNull() const { return addonId_; }
CompartmentCreationOptions& setAddonId(JSAddonId* id) {
addonId_ = id;
return *this;
}
JSTraceOp getTrace() const {
return traceGlobal_;
}
CompartmentCreationOptions& setTrace(JSTraceOp op) {
traceGlobal_ = op;
return *this;
}
void* zonePointer() const {
MOZ_ASSERT(uintptr_t(zone_.pointer) > uintptr_t(JS::SystemZone));
return zone_.pointer;
}
ZoneSpecifier zoneSpecifier() const { return zone_.spec; }
CompartmentCreationOptions& setZone(ZoneSpecifier spec);
CompartmentCreationOptions& setSameZoneAs(JSObject* obj);
// Certain scopes (i.e. XBL compilation scopes) are implementation details
// of the embedding, and references to them should never leak out to script.
// This flag causes the this compartment to skip firing onNewGlobalObject
// and makes addDebuggee a no-op for this global.
bool invisibleToDebugger() const { return invisibleToDebugger_; }
CompartmentCreationOptions& setInvisibleToDebugger(bool flag) {
invisibleToDebugger_ = flag;
return *this;
}
// Compartments used for off-thread compilation have their contents merged
// into a target compartment when the compilation is finished. This is only
// allowed if this flag is set. The invisibleToDebugger flag must also be
// set for such compartments.
bool mergeable() const { return mergeable_; }
CompartmentCreationOptions& setMergeable(bool flag) {
mergeable_ = flag;
return *this;
}
// Determines whether this compartment should preserve JIT code on
// non-shrinking GCs.
bool preserveJitCode() const { return preserveJitCode_; }
CompartmentCreationOptions& setPreserveJitCode(bool flag) {
preserveJitCode_ = flag;
return *this;
}
bool cloneSingletons() const { return cloneSingletons_; }
CompartmentCreationOptions& setCloneSingletons(bool flag) {
cloneSingletons_ = flag;
return *this;
}
private:
JSAddonId* addonId_;
JSTraceOp traceGlobal_;
union {
ZoneSpecifier spec;
void* pointer; // js::Zone* is not exposed in the API.
} zone_;
bool invisibleToDebugger_;
bool mergeable_;
bool preserveJitCode_;
bool cloneSingletons_;
};
/**
* CompartmentBehaviors specifies behaviors of a compartment that can be
* changed after the compartment's been created.
*/
class JS_PUBLIC_API(CompartmentBehaviors)
{
public:
class Override {
@ -2187,140 +2291,120 @@ class JS_PUBLIC_API(CompartmentOptions)
Mode mode_;
};
explicit CompartmentOptions()
CompartmentBehaviors()
: version_(JSVERSION_UNKNOWN)
, invisibleToDebugger_(false)
, mergeable_(false)
, discardSource_(false)
, disableLazyParsing_(false)
, cloneSingletons_(false)
, traceGlobal_(nullptr)
, singletonsAsTemplates_(true)
, addonId_(nullptr)
, preserveJitCode_(false)
{
zone_.spec = JS::FreshZone;
}
JSVersion version() const { return version_; }
CompartmentOptions& setVersion(JSVersion aVersion) {
CompartmentBehaviors& setVersion(JSVersion aVersion) {
MOZ_ASSERT(aVersion != JSVERSION_UNKNOWN);
version_ = aVersion;
return *this;
}
// Certain scopes (i.e. XBL compilation scopes) are implementation details
// of the embedding, and references to them should never leak out to script.
// This flag causes the this compartment to skip firing onNewGlobalObject
// and makes addDebuggee a no-op for this global.
bool invisibleToDebugger() const { return invisibleToDebugger_; }
CompartmentOptions& setInvisibleToDebugger(bool flag) {
invisibleToDebugger_ = flag;
return *this;
}
// Compartments used for off-thread compilation have their contents merged
// into a target compartment when the compilation is finished. This is only
// allowed if this flag is set. The invisibleToDebugger flag must also be
// set for such compartments.
bool mergeable() const { return mergeable_; }
CompartmentOptions& setMergeable(bool flag) {
mergeable_ = flag;
return *this;
}
// For certain globals, we know enough about the code that will run in them
// that we can discard script source entirely.
bool discardSource() const { return discardSource_; }
CompartmentOptions& setDiscardSource(bool flag) {
CompartmentBehaviors& setDiscardSource(bool flag) {
discardSource_ = flag;
return *this;
}
bool disableLazyParsing() const { return disableLazyParsing_; }
CompartmentOptions& setDisableLazyParsing(bool flag) {
CompartmentBehaviors& setDisableLazyParsing(bool flag) {
disableLazyParsing_ = flag;
return *this;
}
bool cloneSingletons() const { return cloneSingletons_; }
CompartmentOptions& setCloneSingletons(bool flag) {
cloneSingletons_ = flag;
return *this;
}
bool extraWarnings(JSRuntime* rt) const;
bool extraWarnings(JSContext* cx) const;
Override& extraWarningsOverride() { return extraWarningsOverride_; }
void* zonePointer() const {
MOZ_ASSERT(uintptr_t(zone_.pointer) > uintptr_t(JS::SystemZone));
return zone_.pointer;
}
ZoneSpecifier zoneSpecifier() const { return zone_.spec; }
CompartmentOptions& setZone(ZoneSpecifier spec);
CompartmentOptions& setSameZoneAs(JSObject* obj);
void setSingletonsAsValues() {
singletonsAsTemplates_ = false;
}
bool getSingletonsAsTemplates() const {
return singletonsAsTemplates_;
}
// A null add-on ID means that the compartment is not associated with an
// add-on.
JSAddonId* addonIdOrNull() const { return addonId_; }
CompartmentOptions& setAddonId(JSAddonId* id) {
addonId_ = id;
return *this;
}
CompartmentOptions& setTrace(JSTraceOp op) {
traceGlobal_ = op;
return *this;
}
JSTraceOp getTrace() const {
return traceGlobal_;
}
bool preserveJitCode() const { return preserveJitCode_; }
CompartmentOptions& setPreserveJitCode(bool flag) {
preserveJitCode_ = flag;
CompartmentBehaviors& setSingletonsAsValues() {
singletonsAsTemplates_ = false;
return *this;
}
private:
JSVersion version_;
bool invisibleToDebugger_;
bool mergeable_;
bool discardSource_;
bool disableLazyParsing_;
bool cloneSingletons_;
Override extraWarningsOverride_;
union {
ZoneSpecifier spec;
void* pointer; // js::Zone* is not exposed in the API.
} zone_;
JSTraceOp traceGlobal_;
// To XDR singletons, we need to ensure that all singletons are all used as
// templates, by making JSOP_OBJECT return a clone of the JSScript
// singleton, instead of returning the value which is baked in the JSScript.
bool singletonsAsTemplates_;
JSAddonId* addonId_;
bool preserveJitCode_;
};
JS_PUBLIC_API(CompartmentOptions&)
CompartmentOptionsRef(JSCompartment* compartment);
/**
* CompartmentOptions specifies compartment characteristics: both those that
* can't be changed on a compartment once it's been created
* (CompartmentCreationOptions), and those that can be changed on an existing
* compartment (CompartmentBehaviors).
*/
class JS_PUBLIC_API(CompartmentOptions)
{
public:
explicit CompartmentOptions()
: creationOptions_(),
behaviors_()
{}
JS_PUBLIC_API(CompartmentOptions&)
CompartmentOptionsRef(JSObject* obj);
CompartmentOptions(const CompartmentCreationOptions& compartmentCreation,
const CompartmentBehaviors& compartmentBehaviors)
: creationOptions_(compartmentCreation),
behaviors_(compartmentBehaviors)
{}
JS_PUBLIC_API(CompartmentOptions&)
CompartmentOptionsRef(JSContext* cx);
// CompartmentCreationOptions specify fundamental compartment
// characteristics that must be specified when the compartment is created,
// that can't be changed after the compartment is created.
CompartmentCreationOptions& creationOptions() {
return creationOptions_;
}
const CompartmentCreationOptions& creationOptions() const {
return creationOptions_;
}
// CompartmentBehaviors specify compartment characteristics that can be
// changed after the compartment is created.
CompartmentBehaviors& behaviors() {
return behaviors_;
}
const CompartmentBehaviors& behaviors() const {
return behaviors_;
}
private:
CompartmentCreationOptions creationOptions_;
CompartmentBehaviors behaviors_;
};
JS_PUBLIC_API(const CompartmentCreationOptions&)
CompartmentCreationOptionsRef(JSCompartment* compartment);
JS_PUBLIC_API(const CompartmentCreationOptions&)
CompartmentCreationOptionsRef(JSObject* obj);
JS_PUBLIC_API(const CompartmentCreationOptions&)
CompartmentCreationOptionsRef(JSContext* cx);
JS_PUBLIC_API(CompartmentBehaviors&)
CompartmentBehaviorsRef(JSCompartment* compartment);
JS_PUBLIC_API(CompartmentBehaviors&)
CompartmentBehaviorsRef(JSObject* obj);
JS_PUBLIC_API(CompartmentBehaviors&)
CompartmentBehaviorsRef(JSContext* cx);
/**
* During global creation, we fire notifications to callbacks registered

Просмотреть файл

@ -398,13 +398,13 @@ checkReportFlags(JSContext* cx, unsigned* flags)
JSScript* script = cx->currentScript(&pc);
if (script && IsCheckStrictOp(JSOp(*pc)))
*flags &= ~JSREPORT_WARNING;
else if (cx->compartment()->options().extraWarnings(cx))
else if (cx->compartment()->behaviors().extraWarnings(cx))
*flags |= JSREPORT_WARNING;
else
return true;
} else if (JSREPORT_IS_STRICT(*flags)) {
/* Warning/error only when JSOPTION_STRICT is set. */
if (!cx->compartment()->options().extraWarnings(cx))
if (!cx->compartment()->behaviors().extraWarnings(cx))
return true;
}
@ -1160,8 +1160,8 @@ JSContext::findVersion() const
if (JSScript* script = currentScript(nullptr, ALLOW_CROSS_COMPARTMENT))
return script->getVersion();
if (compartment() && compartment()->options().version() != JSVERSION_UNKNOWN)
return compartment()->options().version();
if (compartment() && compartment()->behaviors().version() != JSVERSION_UNKNOWN)
return compartment()->behaviors().version();
return runtime()->defaultVersion();
}

Просмотреть файл

@ -41,7 +41,8 @@ using mozilla::DebugOnly;
using mozilla::PodArrayZero;
JSCompartment::JSCompartment(Zone* zone, const JS::CompartmentOptions& options = JS::CompartmentOptions())
: options_(options),
: creationOptions_(options.creationOptions()),
behaviors_(options.behaviors()),
zone_(zone),
runtime_(zone->runtimeFromMainThread()),
principals_(nullptr),
@ -51,7 +52,6 @@ JSCompartment::JSCompartment(Zone* zone, const JS::CompartmentOptions& options =
warnedAboutFlagsArgument(false),
warnedAboutExprClosure(false),
warnedAboutRegExpMultiline(false),
addonId(options.addonIdOrNull()),
#ifdef DEBUG
firedOnNewGlobalObject(false),
#endif
@ -71,7 +71,6 @@ JSCompartment::JSCompartment(Zone* zone, const JS::CompartmentOptions& options =
lazyArrayBuffers(nullptr),
nonSyntacticLexicalScopes_(nullptr),
gcIncomingGrayPointers(nullptr),
gcPreserveJitCode(options.preserveJitCode()),
debugModeBits(0),
watchpointMap(nullptr),
scriptCountsMap(nullptr),
@ -88,7 +87,8 @@ JSCompartment::JSCompartment(Zone* zone, const JS::CompartmentOptions& options =
{
PodArrayZero(sawDeprecatedLanguageExtension);
runtime_->numCompartments++;
MOZ_ASSERT_IF(options.mergeable(), options.invisibleToDebugger());
MOZ_ASSERT_IF(creationOptions_.mergeable(),
creationOptions_.invisibleToDebugger());
}
JSCompartment::~JSCompartment()
@ -168,19 +168,12 @@ JSRuntime::createJitRuntime(JSContext* cx)
JitRuntime::AutoPreventBackedgePatching apbp(cx->runtime(), jrt);
jitRuntime_ = jrt;
AutoEnterOOMUnsafeRegion noOOM;
if (!jitRuntime_->initialize(cx)) {
ReportOutOfMemory(cx);
js_delete(jitRuntime_);
jitRuntime_ = nullptr;
JSCompartment* comp = cx->runtime()->atomsCompartment();
if (comp->jitCompartment_) {
js_delete(comp->jitCompartment_);
comp->jitCompartment_ = nullptr;
}
return nullptr;
// Handling OOM here is complicated: if we delete jitRuntime_ now, we
// will destroy the ExecutableAllocator, even though there may still be
// JitCode instances holding references to ExecutablePools.
noOOM.crash("OOM in createJitRuntime");
}
return jitRuntime_;
@ -1155,7 +1148,7 @@ JSCompartment::reportTelemetry()
// Hazard analysis can't tell that the telemetry callbacks don't GC.
JS::AutoSuppressGCAnalysis nogc;
int id = addonId
int id = creationOptions_.addonIdOrNull()
? JS_TELEMETRY_DEPRECATED_LANGUAGE_EXTENSIONS_IN_ADDONS
: JS_TELEMETRY_DEPRECATED_LANGUAGE_EXTENSIONS_IN_CONTENT;
@ -1170,7 +1163,9 @@ void
JSCompartment::addTelemetry(const char* filename, DeprecatedLanguageExtension e)
{
// Only report telemetry for web content and add-ons, not chrome JS.
if (isSystem_ || (!addonId && (!filename || strncmp(filename, "http", 4) != 0)))
if (isSystem_)
return;
if (!creationOptions_.addonIdOrNull() && (!filename || strncmp(filename, "http", 4) != 0))
return;
sawDeprecatedLanguageExtension[e] = true;

Просмотреть файл

@ -223,7 +223,8 @@ class WeakMapBase;
struct JSCompartment
{
JS::CompartmentOptions options_;
const JS::CompartmentCreationOptions creationOptions_;
JS::CompartmentBehaviors behaviors_;
private:
JS::Zone* zone_;
@ -280,10 +281,6 @@ struct JSCompartment
bool warnedAboutExprClosure;
bool warnedAboutRegExpMultiline;
// A null add-on ID means that the compartment is not associated with an
// add-on.
JSAddonId* const addonId;
#ifdef DEBUG
bool firedOnNewGlobalObject;
#endif
@ -312,8 +309,10 @@ struct JSCompartment
JS::Zone* zone() { return zone_; }
const JS::Zone* zone() const { return zone_; }
JS::CompartmentOptions& options() { return options_; }
const JS::CompartmentOptions& options() const { return options_; }
const JS::CompartmentCreationOptions& creationOptions() const { return creationOptions_; }
JS::CompartmentBehaviors& behaviors() { return behaviors_; }
const JS::CompartmentBehaviors& behaviors() const { return behaviors_; }
JSRuntime* runtimeFromMainThread() {
MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
@ -473,9 +472,6 @@ struct JSCompartment
JSObject* gcIncomingGrayPointers;
private:
/* Whether to preserve JIT code on non-shrinking GCs. */
bool gcPreserveJitCode;
enum {
IsDebuggee = 1 << 0,
DebuggerObservesAllExecution = 1 << 1,
@ -553,7 +549,8 @@ struct JSCompartment
void traceOutgoingCrossCompartmentWrappers(JSTracer* trc);
static void traceIncomingCrossCompartmentEdgesForZoneGC(JSTracer* trc);
bool preserveJitCode() { return gcPreserveJitCode; }
/* Whether to preserve JIT code on non-shrinking GCs. */
bool preserveJitCode() { return creationOptions_.preserveJitCode(); }
void sweepAfterMinorGC();

Просмотреть файл

@ -727,7 +727,7 @@ ErrorReport::ReportAddonExceptionToTelementry(JSContext* cx)
return;
JSCompartment* comp = stack->compartment();
JSAddonId* addonId = comp->addonId;
JSAddonId* addonId = comp->creationOptions().addonIdOrNull();
// We only want to send the report if the scope that just have thrown belongs to an add-on.
// Let's check the compartment of the youngest function on the stack, to determine that.

Просмотреть файл

@ -699,13 +699,13 @@ class FunctionExtended : public JSFunction
* All asm.js/wasm functions store their compiled module (either
* WasmModuleObject or AsmJSModuleObject) in the first extended slot.
*/
static const unsigned ASM_MODULE_SLOT = 0;
static const unsigned WASM_MODULE_SLOT = 0;
/*
* wasm/asm.js exported functions store the index of the export in the
* module's export vector in the second slot.
*/
static const unsigned ASM_EXPORT_INDEX_SLOT = 1;
static const unsigned WASM_EXPORT_INDEX_SLOT = 1;
static inline size_t offsetOfExtendedSlot(unsigned which) {
MOZ_ASSERT(which < NUM_EXTENDED_SLOTS);

Просмотреть файл

@ -1606,6 +1606,21 @@ GCRuntime::callGCCallback(JSGCStatus status) const
gcCallback.op(rt, status, gcCallback.data);
}
void
GCRuntime::setObjectsTenuredCallback(JSObjectsTenuredCallback callback,
void* data)
{
tenuredCallback.op = callback;
tenuredCallback.data = data;
}
void
GCRuntime::callObjectsTenuredCallback()
{
if (tenuredCallback.op)
tenuredCallback.op(rt, tenuredCallback.data);
}
namespace {
class AutoNotifyGCActivity {
@ -6769,9 +6784,11 @@ gc::MergeCompartments(JSCompartment* source, JSCompartment* target)
{
// The source compartment must be specifically flagged as mergable. This
// also implies that the compartment is not visible to the debugger.
MOZ_ASSERT(source->options_.mergeable());
MOZ_ASSERT(source->creationOptions_.mergeable());
MOZ_ASSERT(source->creationOptions_.invisibleToDebugger());
MOZ_ASSERT(source->addonId == target->addonId);
MOZ_ASSERT(source->creationOptions().addonIdOrNull() ==
target->creationOptions().addonIdOrNull());
JSRuntime* rt = source->runtimeFromMainThread();

Просмотреть файл

@ -1275,7 +1275,7 @@ js::DeepCloneObjectLiteral(JSContext* cx, HandleObject obj, NewObjectKind newKin
{
/* NB: Keep this in sync with XDRObjectLiteral. */
MOZ_ASSERT_IF(obj->isSingleton(),
JS::CompartmentOptionsRef(cx).getSingletonsAsTemplates());
cx->compartment()->behaviors().getSingletonsAsTemplates());
MOZ_ASSERT(obj->is<PlainObject>() || obj->is<UnboxedPlainObject>() ||
obj->is<ArrayObject>() || obj->is<UnboxedArrayObject>());
MOZ_ASSERT(newKind != SingletonObject);
@ -1389,7 +1389,7 @@ js::XDRObjectLiteral(XDRState<mode>* xdr, MutableHandleObject obj)
JSContext* cx = xdr->cx();
MOZ_ASSERT_IF(mode == XDR_ENCODE && obj->isSingleton(),
JS::CompartmentOptionsRef(cx).getSingletonsAsTemplates());
cx->compartment()->behaviors().getSingletonsAsTemplates());
// Distinguish between objects and array classes.
uint32_t isArray = 0;

Просмотреть файл

@ -658,8 +658,10 @@ js::XDRScript(XDRState<mode>* xdr, HandleObject enclosingScopeArg, HandleScript
// JSOP_OBJECT that then got modified. So throw if we're not
// cloning in JSOP_OBJECT or if we ever didn't clone in it in the
// past.
const JS::CompartmentOptions& opts = JS::CompartmentOptionsRef(cx);
if (!opts.cloneSingletons() || !opts.getSingletonsAsTemplates()) {
JSCompartment* comp = cx->compartment();
if (!comp->creationOptions().cloneSingletons() ||
!comp->behaviors().getSingletonsAsTemplates())
{
JS_ReportError(cx,
"Can't serialize a run-once non-function script "
"when we're not doing singleton cloning");

Просмотреть файл

@ -1349,14 +1349,14 @@ Evaluate(JSContext* cx, unsigned argc, Value* vp)
{
if (saveBytecode) {
if (!JS::CompartmentOptionsRef(cx).cloneSingletons()) {
if (!JS::CompartmentCreationOptionsRef(cx).cloneSingletons()) {
JS_ReportErrorNumber(cx, my_GetErrorMessage, nullptr,
JSSMSG_CACHE_SINGLETON_FAILED);
return false;
}
// cloneSingletons implies that singletons are used as template objects.
MOZ_ASSERT(JS::CompartmentOptionsRef(cx).getSingletonsAsTemplates());
MOZ_ASSERT(JS::CompartmentBehaviorsRef(cx).getSingletonsAsTemplates());
}
if (loadBytecode) {
@ -2760,7 +2760,7 @@ WorkerMain(void* arg)
JSAutoRequest ar(cx);
JS::CompartmentOptions compartmentOptions;
compartmentOptions.setVersion(JSVERSION_DEFAULT);
compartmentOptions.behaviors().setVersion(JSVERSION_DEFAULT);
RootedObject global(cx, NewGlobalObject(cx, compartmentOptions, nullptr));
if (!global)
break;
@ -3781,11 +3781,13 @@ EscapeForShell(AutoCStringVector& argv)
static Vector<const char*, 4, js::SystemAllocPolicy> sPropagatedFlags;
#if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
static bool
PropagateFlagToNestedShells(const char* flag)
{
return sPropagatedFlags.append(flag);
}
#endif
static bool
NestedShell(JSContext* cx, unsigned argc, Value* vp)
@ -3952,33 +3954,38 @@ static bool
NewGlobal(JSContext* cx, unsigned argc, Value* vp)
{
JSPrincipals* principals = nullptr;
JS::CompartmentOptions options;
options.setVersion(JSVERSION_DEFAULT);
JS::CompartmentCreationOptions& creationOptions = options.creationOptions();
JS::CompartmentBehaviors& behaviors = options.behaviors();
behaviors.setVersion(JSVERSION_DEFAULT);
CallArgs args = CallArgsFromVp(argc, vp);
if (args.length() == 1 && args[0].isObject()) {
RootedObject opts(cx, &args[0].toObject());
RootedValue v(cx);
if (!JS_GetProperty(cx, opts, "sameZoneAs", &v))
return false;
if (v.isObject())
options.setSameZoneAs(UncheckedUnwrap(&v.toObject()));
if (!JS_GetProperty(cx, opts, "invisibleToDebugger", &v))
return false;
if (v.isBoolean())
options.setInvisibleToDebugger(v.toBoolean());
creationOptions.setInvisibleToDebugger(v.toBoolean());
if (!JS_GetProperty(cx, opts, "cloneSingletons", &v))
return false;
if (v.isBoolean())
options.setCloneSingletons(v.toBoolean());
creationOptions.setCloneSingletons(v.toBoolean());
if (!JS_GetProperty(cx, opts, "sameZoneAs", &v))
return false;
if (v.isObject())
creationOptions.setSameZoneAs(UncheckedUnwrap(&v.toObject()));
if (!JS_GetProperty(cx, opts, "disableLazyParsing", &v))
return false;
if (v.isBoolean())
options.setDisableLazyParsing(v.toBoolean());
behaviors.setDisableLazyParsing(v.toBoolean());
if (!JS_GetProperty(cx, opts, "principal", &v))
return false;
@ -6506,10 +6513,9 @@ Shell(JSContext* cx, OptionParser* op, char** envp)
if (op->getBoolOption("disable-oom-functions"))
disableOOMFunctions = true;
RootedObject glob(cx);
JS::CompartmentOptions options;
options.setVersion(JSVERSION_DEFAULT);
glob = NewGlobalObject(cx, options, nullptr);
options.behaviors().setVersion(JSVERSION_DEFAULT);
RootedObject glob(cx, NewGlobalObject(cx, options, nullptr));
if (!glob)
return 1;
@ -6690,7 +6696,7 @@ main(int argc, char** argv, char** envp)
|| !op.addIntOption('\0', "baseline-warmup-threshold", "COUNT",
"Wait for COUNT calls or iterations before baseline-compiling "
"(default: 10)", -1)
|| !op.addBoolOption('\0', "non-writable-jitcode", "Allocate JIT code as non-writable memory.")
|| !op.addBoolOption('\0', "non-writable-jitcode", "(NOP for fuzzers) Allocate JIT code as non-writable memory.")
|| !op.addBoolOption('\0', "no-fpu", "Pretend CPU does not support floating-point operations "
"to test JIT codegen (no-op on platforms other than x86).")
|| !op.addBoolOption('\0', "no-sse3", "Pretend CPU does not support SSE3 instructions and above "
@ -6769,11 +6775,6 @@ main(int argc, char** argv, char** envp)
OOM_printAllocationCount = op.getBoolOption('O');
#endif
if (op.getBoolOption("non-writable-jitcode")) {
js::jit::ExecutableAllocator::nonWritableJitCode = true;
PropagateFlagToNestedShells("--non-writable-jitcode");
}
#ifdef JS_CODEGEN_X86
if (op.getBoolOption("no-fpu"))
js::jit::CPUInfo::SetFloatingPointDisabled();

Просмотреть файл

@ -3,14 +3,10 @@
// http://creativecommons.org/licenses/publicdomain/
function test() {
// Note: -8 and -200 will trigger asm.js link failures because 8 and 200
// bytes are below the minimum allowed size, and the buffer will not
// actually be converted to an asm.js buffer.
for (var size of [0, 8, 16, 200, 1000, 4096, -8, -200, -8192, -65536]) {
var buffer_ctor = (size < 0) ? AsmJSArrayBuffer : ArrayBuffer;
size = Math.abs(size);
var old = new buffer_ctor(size);
var old = new ArrayBuffer(size);
var copy = deserialize(serialize(old, [old]));
assertEq(old.byteLength, 0);
assertEq(copy.byteLength, size);
@ -29,7 +25,7 @@ function test() {
for (var ctor of constructors) {
var dataview = (ctor === DataView);
var buf = new buffer_ctor(size);
var buf = new ArrayBuffer(size);
var old_arr = new ctor(buf);
assertEq(buf.byteLength, size);
assertEq(buf, old_arr.buffer);
@ -52,7 +48,7 @@ function test() {
for (var ctor of constructors) {
var dataview = (ctor === DataView);
var buf = new buffer_ctor(size);
var buf = new ArrayBuffer(size);
var old_arr = new ctor(buf);
var dv = new DataView(buf); // Second view
var copy_arr = deserialize(serialize(old_arr, [ buf ]));
@ -69,7 +65,7 @@ function test() {
// Mutate the buffer during the clone operation. The modifications should be visible.
if (size >= 4) {
old = new buffer_ctor(size);
old = new ArrayBuffer(size);
var view = new Int32Array(old);
view[0] = 1;
var mutator = { get foo() { view[0] = 2; } };
@ -81,7 +77,7 @@ function test() {
// Neuter the buffer during the clone operation. Should throw an exception.
if (size >= 4) {
old = new buffer_ctor(size);
old = new ArrayBuffer(size);
var mutator = {
get foo() {
deserialize(serialize(old, [old]));

Просмотреть файл

@ -16,17 +16,3 @@ if (typeof version != 'undefined')
{
version(185);
}
// Note that AsmJS ArrayBuffers have a minimum size, currently 4096 bytes. If a
// smaller size is given, a regular ArrayBuffer will be returned instead.
function AsmJSArrayBuffer(size) {
var ab = new ArrayBuffer(size);
(new Function('global', 'foreign', 'buffer', '' +
' "use asm";' +
' var i32 = new global.Int32Array(buffer);' +
' function g() {};' +
' return g;' +
''))(Function("return this")(),null,ab);
return ab;
}

Просмотреть файл

@ -130,9 +130,6 @@ const JSFunctionSpec ArrayBufferObject::jsfuncs[] = {
const JSFunctionSpec ArrayBufferObject::jsstaticfuncs[] = {
JS_FN("isView", ArrayBufferObject::fun_isView, 1, 0),
#ifdef NIGHTLY_BUILD
JS_FN("transfer", ArrayBufferObject::fun_transfer, 2, 0),
#endif
JS_FS_END
};
@ -233,223 +230,6 @@ ArrayBufferObject::fun_isView(JSContext* cx, unsigned argc, Value* vp)
return true;
}
#if defined(ASMJS_MAY_USE_SIGNAL_HANDLERS_FOR_OOB)
static void
ReleaseAsmJSMappedData(void* base)
{
MOZ_ASSERT(uintptr_t(base) % AsmJSPageSize == 0);
# ifdef XP_WIN
VirtualFree(base, 0, MEM_RELEASE);
# else
munmap(base, AsmJSMappedSize);
# if defined(MOZ_VALGRIND) && defined(VALGRIND_ENABLE_ADDR_ERROR_REPORTING_IN_RANGE)
// Tell Valgrind/Memcheck to recommence reporting accesses in the
// previously-inaccessible region.
if (AsmJSMappedSize > 0) {
VALGRIND_ENABLE_ADDR_ERROR_REPORTING_IN_RANGE(base, AsmJSMappedSize);
}
# endif
# endif
MemProfiler::RemoveNative(base);
}
#else
static void
ReleaseAsmJSMappedData(void* base)
{
MOZ_CRASH("asm.js only uses mapped buffers when using signal-handler OOB checking");
}
#endif
#ifdef NIGHTLY_BUILD
# if defined(ASMJS_MAY_USE_SIGNAL_HANDLERS_FOR_OOB)
static bool
TransferAsmJSMappedBuffer(JSContext* cx, const CallArgs& args,
Handle<ArrayBufferObject*> oldBuffer, size_t newByteLength)
{
size_t oldByteLength = oldBuffer->byteLength();
MOZ_ASSERT(oldByteLength % AsmJSPageSize == 0);
MOZ_ASSERT(newByteLength % AsmJSPageSize == 0);
ArrayBufferObject::BufferContents stolen =
ArrayBufferObject::stealContents(cx, oldBuffer, /* hasStealableContents = */ true);
if (!stolen)
return false;
MOZ_ASSERT(stolen.kind() == ArrayBufferObject::ASMJS_MAPPED);
uint8_t* data = stolen.data();
if (newByteLength > oldByteLength) {
void* diffStart = data + oldByteLength;
size_t diffLength = newByteLength - oldByteLength;
# ifdef XP_WIN
if (!VirtualAlloc(diffStart, diffLength, MEM_COMMIT, PAGE_READWRITE)) {
ReleaseAsmJSMappedData(data);
ReportOutOfMemory(cx);
return false;
}
# else
// To avoid memset, use MAP_FIXED to clobber the newly-accessible pages
// with zero pages.
int flags = MAP_FIXED | MAP_PRIVATE | MAP_ANON;
if (mmap(diffStart, diffLength, PROT_READ | PROT_WRITE, flags, -1, 0) == MAP_FAILED) {
ReleaseAsmJSMappedData(data);
ReportOutOfMemory(cx);
return false;
}
# endif
MemProfiler::SampleNative(diffStart, diffLength);
} else if (newByteLength < oldByteLength) {
void* diffStart = data + newByteLength;
size_t diffLength = oldByteLength - newByteLength;
# ifdef XP_WIN
if (!VirtualFree(diffStart, diffLength, MEM_DECOMMIT)) {
ReleaseAsmJSMappedData(data);
ReportOutOfMemory(cx);
return false;
}
# else
if (madvise(diffStart, diffLength, MADV_DONTNEED) ||
mprotect(diffStart, diffLength, PROT_NONE))
{
ReleaseAsmJSMappedData(data);
ReportOutOfMemory(cx);
return false;
}
# endif
}
ArrayBufferObject::BufferContents newContents =
ArrayBufferObject::BufferContents::create<ArrayBufferObject::ASMJS_MAPPED>(data);
RootedObject newBuffer(cx, ArrayBufferObject::create(cx, newByteLength, newContents));
if (!newBuffer) {
ReleaseAsmJSMappedData(data);
return false;
}
args.rval().setObject(*newBuffer);
return true;
}
# endif // defined(ASMJS_MAY_USE_SIGNAL_HANDLERS_FOR_OOB)
/*
* Experimental implementation of ArrayBuffer.transfer:
* https://gist.github.com/andhow/95fb9e49996615764eff
* which is currently in the early stages of proposal for ES7.
*/
bool
ArrayBufferObject::fun_transfer(JSContext* cx, unsigned argc, Value* vp)
{
CallArgs args = CallArgsFromVp(argc, vp);
HandleValue oldBufferArg = args.get(0);
HandleValue newByteLengthArg = args.get(1);
if (!oldBufferArg.isObject()) {
JS_ReportErrorNumber(cx, GetErrorMessage, nullptr, JSMSG_TYPED_ARRAY_BAD_ARGS);
return false;
}
RootedObject oldBufferObj(cx, &oldBufferArg.toObject());
ESClassValue cls;
if (!GetBuiltinClass(cx, oldBufferObj, &cls))
return false;
if (cls != ESClass_ArrayBuffer) {
JS_ReportErrorNumber(cx, GetErrorMessage, nullptr, JSMSG_TYPED_ARRAY_BAD_ARGS);
return false;
}
// Beware: oldBuffer can point across compartment boundaries. ArrayBuffer
// contents are not compartment-specific so this is safe.
Rooted<ArrayBufferObject*> oldBuffer(cx);
if (oldBufferObj->is<ArrayBufferObject>()) {
oldBuffer = &oldBufferObj->as<ArrayBufferObject>();
} else {
JSObject* unwrapped = CheckedUnwrap(oldBufferObj);
if (!unwrapped || !unwrapped->is<ArrayBufferObject>()) {
JS_ReportErrorNumber(cx, GetErrorMessage, nullptr, JSMSG_TYPED_ARRAY_BAD_ARGS);
return false;
}
oldBuffer = &unwrapped->as<ArrayBufferObject>();
}
size_t oldByteLength = oldBuffer->byteLength();
size_t newByteLength;
if (newByteLengthArg.isUndefined()) {
newByteLength = oldByteLength;
} else {
int32_t i32;
if (!ToInt32(cx, newByteLengthArg, &i32))
return false;
if (i32 < 0) {
JS_ReportErrorNumber(cx, GetErrorMessage, nullptr, JSMSG_BAD_ARRAY_LENGTH);
return false;
}
newByteLength = size_t(i32);
}
if (oldBuffer->isNeutered()) {
JS_ReportErrorNumber(cx, GetErrorMessage, nullptr, JSMSG_TYPED_ARRAY_DETACHED);
return false;
}
UniquePtr<uint8_t, JS::FreePolicy> newData;
if (!newByteLength) {
if (!ArrayBufferObject::neuter(cx, oldBuffer, oldBuffer->contents()))
return false;
} else {
# if defined(ASMJS_MAY_USE_SIGNAL_HANDLERS_FOR_OOB)
// With a 4gb mapped asm.js buffer, we can simply enable/disable access
// to the delta as long as the requested length is page-sized.
if (oldBuffer->isAsmJSMapped() && (newByteLength % AsmJSPageSize) == 0)
return TransferAsmJSMappedBuffer(cx, args, oldBuffer, newByteLength);
# endif
// Since we try to realloc below, only allow stealing malloc'd buffers.
// If !hasMallocedContents, stealContents will malloc a copy which we
// can then realloc.
bool steal = oldBuffer->hasMallocedContents();
auto stolenContents = ArrayBufferObject::stealContents(cx, oldBuffer, steal);
if (!stolenContents)
return false;
UniquePtr<uint8_t, JS::FreePolicy> oldData(stolenContents.data());
if (newByteLength > oldByteLength) {
// In theory, realloc+memset(0) can be optimized to avoid touching
// any pages (by using OS page mapping tricks). However, in
// practice, we don't seem to get this optimization in Firefox with
// jemalloc so calloc+memcpy are faster.
newData.reset(cx->runtime()->pod_callocCanGC<uint8_t>(newByteLength));
if (newData) {
memcpy(newData.get(), oldData.get(), oldByteLength);
} else {
// Try malloc before giving up since it might be able to succed
// by resizing oldData in-place.
newData.reset(cx->pod_realloc(oldData.get(), oldByteLength, newByteLength));
if (!newData)
return false;
oldData.release();
memset(newData.get() + oldByteLength, 0, newByteLength - oldByteLength);
}
} else if (newByteLength < oldByteLength) {
newData.reset(cx->pod_realloc(oldData.get(), oldByteLength, newByteLength));
if (!newData)
return false;
oldData.release();
} else {
newData = Move(oldData);
}
}
RootedObject newBuffer(cx, JS_NewArrayBufferWithContents(cx, newByteLength, newData.get()));
if (!newBuffer)
return false;
newData.release();
args.rval().setObject(*newBuffer);
return true;
}
#endif // defined(NIGHTLY_BUILD)
/*
* new ArrayBuffer(byteLength)
*/
@ -511,8 +291,10 @@ ArrayBufferObject::neuterView(JSContext* cx, ArrayBufferViewObject* view,
ArrayBufferObject::neuter(JSContext* cx, Handle<ArrayBufferObject*> buffer,
BufferContents newContents)
{
if (buffer->isAsmJS() && !OnDetachAsmJSArrayBuffer(cx, buffer))
if (buffer->isAsmJS()) {
JS_ReportErrorNumber(cx, GetErrorMessage, nullptr, JSMSG_OUT_OF_MEMORY);
return false;
}
// When neutering buffers where we don't know all views, the new data must
// match the old data. All missing views are typed objects, which do not
@ -742,6 +524,33 @@ ArrayBufferObject::dataPointerShared() const
return SharedMem<uint8_t*>::unshared(getSlot(DATA_SLOT).toPrivate());
}
#if defined(ASMJS_MAY_USE_SIGNAL_HANDLERS_FOR_OOB)
static void
ReleaseAsmJSMappedData(void* base)
{
MOZ_ASSERT(uintptr_t(base) % AsmJSPageSize == 0);
# ifdef XP_WIN
VirtualFree(base, 0, MEM_RELEASE);
# else
munmap(base, AsmJSMappedSize);
# if defined(MOZ_VALGRIND) && defined(VALGRIND_ENABLE_ADDR_ERROR_REPORTING_IN_RANGE)
// Tell Valgrind/Memcheck to recommence reporting accesses in the
// previously-inaccessible region.
if (AsmJSMappedSize > 0) {
VALGRIND_ENABLE_ADDR_ERROR_REPORTING_IN_RANGE(base, AsmJSMappedSize);
}
# endif
# endif
MemProfiler::RemoveNative(base);
}
#else
static void
ReleaseAsmJSMappedData(void* base)
{
MOZ_CRASH("asm.js only uses mapped buffers when using signal-handler OOB checking");
}
#endif
void
ArrayBufferObject::releaseData(FreeOp* fop)
{

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше