Bug 1522157 - Part 1: Add fast path when TypedArray.from is called with packed arrays. r=jandem

--HG--
extra : rebase_source : e2061e355eaf7ae8e65f8d20ec7a123f08fbc6ad
This commit is contained in:
André Bargull 2019-01-31 10:36:27 -08:00
Родитель 9cc5cc88f9
Коммит c0de58457b
6 изменённых файлов: 148 добавлений и 6 удалений

Просмотреть файл

@ -1506,6 +1506,26 @@ function TypedArrayStaticFrom(source, mapfn = undefined, thisArg = undefined) {
if (!IsCallable(usingIterator))
ThrowTypeError(JSMSG_NOT_ITERABLE, DecompileArg(0, source));
// Try to take a fast path when there's no mapper function and the
// constructor is a built-in TypedArray constructor.
if (!mapping && IsTypedArrayConstructor(C)) {
// TODO: Add fast path for TypedArray inputs (bug 1491813).
// The source is a packed array using the default iterator.
if (usingIterator === ArrayValues && IsPackedArray(source) &&
ArrayIteratorPrototypeOptimizable())
{
// Steps 7.b-c.
var targetObj = new C(source.length);
// Steps 7.a, 7.d-f.
TypedArrayInitFromPackedArray(targetObj, source);
// Step 7.g.
return targetObj;
}
}
// Step 7.a.
var values = IterableToList(source, usingIterator);

Просмотреть файл

@ -159,6 +159,39 @@ bool js::ForOfPIC::Chain::tryOptimizeArray(JSContext* cx,
return true;
}
bool js::ForOfPIC::Chain::tryOptimizeArrayIteratorNext(JSContext* cx,
bool* optimized) {
MOZ_ASSERT(optimized);
*optimized = false;
if (!initialized_) {
// If PIC is not initialized, initialize it.
if (!initialize(cx)) {
return false;
}
} else if (!disabled_ && !isArrayNextStillSane()) {
// Otherwise, if array iterator state is no longer sane, reinitialize.
reset();
if (!initialize(cx)) {
return false;
}
}
MOZ_ASSERT(initialized_);
// If PIC is disabled, don't bother trying to optimize.
if (disabled_) {
return true;
}
// By the time we get here, we should have a sane iterator state to work with.
MOZ_ASSERT(isArrayNextStillSane());
*optimized = true;
return true;
}
bool js::ForOfPIC::Chain::hasMatchingStub(ArrayObject* obj) {
// Ensure PIC is initialized and not disabled.
MOZ_ASSERT(initialized_ && !disabled_);
@ -291,8 +324,8 @@ const Class ForOfPIC::class_ = {
/* static */ NativeObject* js::ForOfPIC::createForOfPICObject(
JSContext* cx, Handle<GlobalObject*> global) {
cx->check(global);
NativeObject* obj =
NewNativeObjectWithGivenProto(cx, &ForOfPIC::class_, nullptr);
NativeObject* obj = NewNativeObjectWithGivenProto(cx, &ForOfPIC::class_,
nullptr, TenuredObject);
if (!obj) {
return nullptr;
}

Просмотреть файл

@ -203,6 +203,13 @@ struct ForOfPIC {
bool tryOptimizeArray(JSContext* cx, HandleArrayObject array,
bool* optimized);
// Check if %ArrayIteratorPrototype% still uses the default "next" method.
bool tryOptimizeArrayIteratorNext(JSContext* cx, bool* optimized);
void trace(JSTracer* trc);
void sweep(FreeOp* fop);
private:
// Check if the global array-related objects have not been messed with
// in a way that would disable this PIC.
bool isArrayStateStillSane();
@ -215,10 +222,6 @@ struct ForOfPIC {
canonicalNextFunc_);
}
void trace(JSTracer* trc);
void sweep(FreeOp* fop);
private:
// Check if a matching optimized stub for the given object exists.
bool hasMatchingStub(ArrayObject* obj);

Просмотреть файл

@ -50,6 +50,7 @@
#include "vm/JSContext.h"
#include "vm/JSFunction.h"
#include "vm/JSObject.h"
#include "vm/PIC.h"
#include "vm/Printer.h"
#include "vm/Realm.h"
#include "vm/RegExpObject.h"
@ -67,6 +68,7 @@
#include "vm/NativeObject-inl.h"
#include "vm/NumberObject-inl.h"
#include "vm/StringObject-inl.h"
#include "vm/TypedArrayObject-inl.h"
using namespace js;
using namespace js::selfhosted;
@ -796,6 +798,25 @@ bool js::intrinsic_NewArrayIterator(JSContext* cx, unsigned argc, Value* vp) {
return true;
}
static bool intrinsic_ArrayIteratorPrototypeOptimizable(JSContext* cx,
unsigned argc,
Value* vp) {
CallArgs args = CallArgsFromVp(argc, vp);
MOZ_ASSERT(args.length() == 0);
ForOfPIC::Chain* stubChain = ForOfPIC::getOrCreate(cx);
if (!stubChain) {
return false;
}
bool optimized;
if (!stubChain->tryOptimizeArrayIteratorNext(cx, &optimized)) {
return false;
}
args.rval().setBoolean(optimized);
return true;
}
static bool intrinsic_GetNextMapEntryForIterator(JSContext* cx, unsigned argc,
Value* vp) {
CallArgs args = CallArgsFromVp(argc, vp);
@ -1068,6 +1089,16 @@ static bool intrinsic_GetTypedArrayKind(JSContext* cx, unsigned argc,
return true;
}
static bool intrinsic_IsTypedArrayConstructor(JSContext* cx, unsigned argc,
Value* vp) {
CallArgs args = CallArgsFromVp(argc, vp);
MOZ_ASSERT(args.length() == 1);
MOZ_ASSERT(args[0].isObject());
args.rval().setBoolean(js::IsTypedArrayConstructor(&args[0].toObject()));
return true;
}
static bool intrinsic_TypedArrayBuffer(JSContext* cx, unsigned argc,
Value* vp) {
CallArgs args = CallArgsFromVp(argc, vp);
@ -1705,6 +1736,43 @@ static bool intrinsic_TypedArrayBitwiseSlice(JSContext* cx, unsigned argc,
return true;
}
static bool intrinsic_TypedArrayInitFromPackedArray(JSContext* cx,
unsigned argc, Value* vp) {
CallArgs args = CallArgsFromVp(argc, vp);
MOZ_ASSERT(args.length() == 2);
MOZ_ASSERT(args[0].isObject());
MOZ_ASSERT(args[1].isObject());
Rooted<TypedArrayObject*> target(cx,
&args[0].toObject().as<TypedArrayObject>());
MOZ_ASSERT(!target->hasDetachedBuffer());
MOZ_ASSERT(!target->isSharedMemory());
RootedArrayObject source(cx, &args[1].toObject().as<ArrayObject>());
MOZ_ASSERT(IsPackedArray(source));
MOZ_ASSERT(source->length() == target->length());
switch (target->type()) {
#define INIT_TYPED_ARRAY(T, N) \
case Scalar::N: { \
if (!ElementSpecific<T, UnsharedOps>::initFromIterablePackedArray( \
cx, target, source)) { \
return false; \
} \
break; \
}
JS_FOR_EACH_TYPED_ARRAY(INIT_TYPED_ARRAY)
#undef INIT_TYPED_ARRAY
default:
MOZ_CRASH(
"TypedArrayInitFromPackedArray with a typed array with bogus type");
}
args.rval().setUndefined();
return true;
}
static bool intrinsic_RegExpCreate(JSContext* cx, unsigned argc, Value* vp) {
CallArgs args = CallArgsFromVp(argc, vp);
@ -2450,6 +2518,8 @@ static const JSFunctionSpec intrinsic_functions[] = {
JS_INLINABLE_FN("NewArrayIterator", intrinsic_NewArrayIterator, 0, 0,
IntrinsicNewArrayIterator),
JS_FN("ArrayIteratorPrototypeOptimizable",
intrinsic_ArrayIteratorPrototypeOptimizable, 0, 0),
JS_FN("CallArrayIteratorMethodIfWrapped",
CallNonGenericSelfhostedMethod<Is<ArrayIteratorObject>>, 2, 0),
@ -2547,6 +2617,7 @@ static const JSFunctionSpec intrinsic_functions[] = {
"IsPossiblyWrappedTypedArray",
intrinsic_IsPossiblyWrappedInstanceOfBuiltin<TypedArrayObject>, 1, 0,
IntrinsicIsPossiblyWrappedTypedArray),
JS_FN("IsTypedArrayConstructor", intrinsic_IsTypedArrayConstructor, 1, 0),
JS_FN("TypedArrayBuffer", intrinsic_TypedArrayBuffer, 1, 0),
JS_FN("TypedArrayByteOffset", intrinsic_TypedArrayByteOffset, 1, 0),
@ -2572,6 +2643,9 @@ static const JSFunctionSpec intrinsic_functions[] = {
JS_FN("TypedArrayBitwiseSlice", intrinsic_TypedArrayBitwiseSlice, 4, 0),
JS_FN("TypedArrayInitFromPackedArray",
intrinsic_TypedArrayInitFromPackedArray, 2, 0),
JS_FN("CallArrayBufferMethodIfWrapped",
CallNonGenericSelfhostedMethod<Is<ArrayBufferObject>>, 2, 0),
JS_FN("CallSharedArrayBufferMethodIfWrapped",

Просмотреть файл

@ -1890,6 +1890,16 @@ const Class TypedArrayObject::protoClasses[Scalar::MaxTypedArrayViewType] = {
return native == TypedArray_lengthGetter;
}
bool js::IsTypedArrayConstructor(const JSObject* obj) {
#define CHECK_TYPED_ARRAY_CONSTRUCTOR(T, N) \
if (IsNativeFunction(obj, N##Array::class_constructor)) { \
return true; \
}
JS_FOR_EACH_TYPED_ARRAY(CHECK_TYPED_ARRAY_CONSTRUCTOR)
#undef CHECK_TYPED_ARRAY_CONSTRUCTOR
return false;
}
bool js::IsTypedArrayConstructor(HandleValue v, uint32_t type) {
switch (type) {
case Scalar::Int8:

Просмотреть файл

@ -201,6 +201,8 @@ inline Scalar::Type GetTypedArrayClassType(const Class* clasp) {
return static_cast<Scalar::Type>(clasp - &TypedArrayObject::classes[0]);
}
bool IsTypedArrayConstructor(const JSObject* obj);
bool IsTypedArrayConstructor(HandleValue v, uint32_t type);
// In WebIDL terminology, a BufferSource is either an ArrayBuffer or a typed