Bug 1030389 - Infrastructure: Optimization strategy tracking infrastructure. (r=djvj)

This commit is contained in:
Shu-yu Guo 2015-01-28 19:01:02 -08:00
Родитель 6359f5e202
Коммит 4fa0488007
20 изменённых файлов: 2023 добавлений и 21 удалений

Просмотреть файл

@ -3806,10 +3806,20 @@ CodeGenerator::generateBody()
if (!addNativeToBytecodeEntry(iter->mirRaw()->trackedSite()))
return false;
}
// Track the start native offset of optimizations.
if (iter->mirRaw()->trackedOptimizations()) {
if (!addTrackedOptimizationsEntry(iter->mirRaw()->trackedOptimizations()))
return false;
}
}
iter->accept(this);
// Track the end native offset of optimizations.
if (iter->mirRaw() && iter->mirRaw()->trackedOptimizations())
extendTrackedOptimizationsEntry(iter->mirRaw()->trackedOptimizations());
#ifdef DEBUG
if (!counts)
emitDebugResultChecks(*iter);
@ -7235,6 +7245,28 @@ CodeGenerator::link(JSContext *cx, types::CompilerConstraintList *constraints)
// nativeToBytecodeScriptList_ is no longer needed.
js_free(nativeToBytecodeScriptList_);
// Generate the tracked optimizations map.
if (isOptimizationTrackingEnabled()) {
// Treat OOMs and failures as if optimization tracking were turned off.
types::TypeSet::TypeList *allTypes = cx->new_<types::TypeSet::TypeList>();
if (allTypes && generateCompactTrackedOptimizationsMap(cx, code, allTypes)) {
const uint8_t *optsRegionTableAddr = trackedOptimizationsMap_ +
trackedOptimizationsRegionTableOffset_;
const IonTrackedOptimizationsRegionTable *optsRegionTable =
(const IonTrackedOptimizationsRegionTable *) optsRegionTableAddr;
const uint8_t *optsTypesTableAddr = trackedOptimizationsMap_ +
trackedOptimizationsTypesTableOffset_;
const IonTrackedOptimizationsTypesTable *optsTypesTable =
(const IonTrackedOptimizationsTypesTable *) optsTypesTableAddr;
const uint8_t *optsAttemptsTableAddr = trackedOptimizationsMap_ +
trackedOptimizationsAttemptsTableOffset_;
const IonTrackedOptimizationsAttemptsTable *optsAttemptsTable =
(const IonTrackedOptimizationsAttemptsTable *) optsAttemptsTableAddr;
entry.initTrackedOptimizations(optsRegionTable, optsTypesTable, optsAttemptsTable,
allTypes);
}
}
// Add entry to the global table.
JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
if (!globalTable->addEntry(entry, cx->runtime())) {

Просмотреть файл

@ -16,6 +16,8 @@
namespace js {
namespace jit {
class TrackedOptimizations;
inline unsigned
StartArgSlot(JSScript *script)
{
@ -130,13 +132,16 @@ class BytecodeSite : public TempObject
// Bytecode address within innermost active function.
jsbytecode *pc_;
// Optimization information at the pc.
TrackedOptimizations *optimizations_;
public:
BytecodeSite()
: tree_(nullptr), pc_(nullptr)
: tree_(nullptr), pc_(nullptr), optimizations_(nullptr)
{}
BytecodeSite(InlineScriptTree *tree, jsbytecode *pc)
: tree_(tree), pc_(pc)
: tree_(tree), pc_(pc), optimizations_(nullptr)
{
MOZ_ASSERT(tree_ != nullptr);
MOZ_ASSERT(pc_ != nullptr);
@ -153,6 +158,19 @@ class BytecodeSite : public TempObject
JSScript *script() const {
return tree_ ? tree_->script() : nullptr;
}
bool hasOptimizations() const {
return !!optimizations_;
}
TrackedOptimizations *optimizations() const {
MOZ_ASSERT(hasOptimizations());
return optimizations_;
}
void setOptimizations(TrackedOptimizations *optimizations) {
optimizations_ = optimizations;
}
};
enum AnalysisMode {

Просмотреть файл

@ -126,6 +126,7 @@ IonBuilder::IonBuilder(JSContext *analysisContext, CompileCompartment *comp,
typeArrayHint(0),
bytecodeTypeMap(nullptr),
loopDepth_(loopDepth),
trackedOptimizationSites_(*temp),
lexicalCheck_(nullptr),
callerResumePoint_(nullptr),
callerBuilder_(nullptr),

Просмотреть файл

@ -18,6 +18,7 @@
#include "jit/MIR.h"
#include "jit/MIRGenerator.h"
#include "jit/MIRGraph.h"
#include "jit/OptimizationTracking.h"
namespace js {
namespace jit {
@ -948,11 +949,20 @@ class IonBuilder
MBasicBlock *current;
uint32_t loopDepth_;
Vector<BytecodeSite *, 0, JitAllocPolicy> trackedOptimizationSites_;
BytecodeSite *bytecodeSite(jsbytecode *pc) {
MOZ_ASSERT(info().inlineScriptTree()->script()->containsPC(pc));
// See comment in maybeTrackedOptimizationSite.
if (isOptimizationTrackingEnabled()) {
if (BytecodeSite *site = maybeTrackedOptimizationSite(pc))
return site;
}
return new(alloc()) BytecodeSite(info().inlineScriptTree(), pc);
}
BytecodeSite *maybeTrackedOptimizationSite(jsbytecode *pc);
MDefinition *lexicalCheck_;
void setLexicalCheck(MDefinition *lexical) {
@ -1051,6 +1061,16 @@ class IonBuilder
}
MGetPropertyCache *maybeFallbackFunctionGetter_;
// Used in tracking outcomes of optimization strategies for devtools.
void startTrackingOptimizations();
void trackTypeInfo(TrackedTypeSite site, MIRType mirType,
types::TemporaryTypeSet *typeSet);
void trackTypeInfo(TrackedTypeSite site, JSObject *obj);
void trackOptimizationAttempt(TrackedStrategy strategy);
void amendOptimizationAttempt(uint32_t index);
void trackOptimizationOutcome(TrackedOutcome outcome);
void trackOptimizationSuccess();
};
class CallInfo

Просмотреть файл

@ -255,6 +255,7 @@ jit::CheckLogging()
" unroll Loop unrolling\n"
" logs C1 and JSON visualization logging\n"
" profiling Profiling-related information\n"
" trackopts Optimization tracking information\n"
" all Everything\n"
"\n"
" bl-aborts Baseline compiler abort messages\n"
@ -315,6 +316,8 @@ jit::CheckLogging()
EnableIonDebugLogging();
if (ContainsFlag(env, "profiling"))
EnableChannel(JitSpew_Profiling);
if (ContainsFlag(env, "trackopts"))
EnableChannel(JitSpew_OptimizationTracking);
if (ContainsFlag(env, "all"))
LoggingBits = uint32_t(-1);

Просмотреть файл

@ -46,6 +46,8 @@ namespace jit {
_(Pools) \
/* Profiling-related information */ \
_(Profiling) \
/* Information of tracked opt strats */ \
_(OptimizationTracking) \
/* Debug info about the I$ */ \
_(CacheFlush) \
\

Просмотреть файл

@ -109,6 +109,19 @@ JitcodeGlobalEntry::IonEntry::destroy()
// Free the script list
js_free(scriptList_);
scriptList_ = nullptr;
// The optimizations region and attempts table is in the same block of
// memory, the beginning of which is pointed to by
// optimizationsRegionTable_->payloadStart().
if (optsRegionTable_) {
MOZ_ASSERT(optsAttemptsTable_);
js_free((void *) optsRegionTable_->payloadStart());
}
optsRegionTable_ = nullptr;
optsTypesTable_ = nullptr;
optsAttemptsTable_ = nullptr;
js_delete(optsAllTypes_);
optsAllTypes_ = nullptr;
}
bool

Просмотреть файл

@ -10,6 +10,7 @@
#include "ds/SplayTree.h"
#include "jit/CompactBuffer.h"
#include "jit/CompileInfo.h"
#include "jit/OptimizationTracking.h"
#include "jit/shared/CodeGenerator-shared.h"
namespace js {
@ -106,6 +107,23 @@ class JitcodeGlobalEntry
// of the memory space.
JitcodeIonTable *regionTable_;
// optsRegionTable_ points to the table within the compact
// optimizations map indexing all regions that have tracked
// optimization attempts. optsTypesTable_ is the tracked typed info
// associated with the attempts vectors; it is the same length as the
// attempts table. optsAttemptsTable_ is the table indexing those
// attempts vectors.
//
// All pointers point into the same block of memory; the beginning of
// the block is optimizationRegionTable_->payloadStart().
const IonTrackedOptimizationsRegionTable *optsRegionTable_;
const IonTrackedOptimizationsTypesTable *optsTypesTable_;
const IonTrackedOptimizationsAttemptsTable *optsAttemptsTable_;
// The types table above records type sets, which have been gathered
// into one vector here.
types::TypeSet::TypeList *optsAllTypes_;
struct ScriptNamePair {
JSScript *script;
char *str;
@ -136,6 +154,21 @@ class JitcodeGlobalEntry
BaseEntry::init(Ion, nativeStartAddr, nativeEndAddr);
regionTable_ = regionTable;
scriptList_ = scriptList;
optsRegionTable_ = nullptr;
optsTypesTable_ = nullptr;
optsAllTypes_ = nullptr;
optsAttemptsTable_ = nullptr;
}
void initTrackedOptimizations(const IonTrackedOptimizationsRegionTable *regionTable,
const IonTrackedOptimizationsTypesTable *typesTable,
const IonTrackedOptimizationsAttemptsTable *attemptsTable,
types::TypeSet::TypeList *allTypes)
{
optsRegionTable_ = regionTable;
optsTypesTable_ = typesTable;
optsAttemptsTable_ = attemptsTable;
optsAllTypes_ = allTypes;
}
SizedScriptList *sizedScriptList() const {
@ -176,6 +209,12 @@ class JitcodeGlobalEntry
uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
uint32_t maxResults) const;
bool hasTrackedOptimizations() const {
return !!optsRegionTable_;
}
bool optimizationAttemptsAtAddr(void *ptr, mozilla::Maybe<AttemptsVector> &attempts);
};
struct BaselineEntry : public BaseEntry

Просмотреть файл

@ -456,6 +456,11 @@ class MDefinition : public MNode
InlineScriptTree *trackedTree() const {
return trackedSite_ ? trackedSite_->tree() : nullptr;
}
TrackedOptimizations *trackedOptimizations() const {
return trackedSite_ && trackedSite_->hasOptimizations()
? trackedSite_->optimizations()
: nullptr;
}
JSScript *profilerLeaveScript() const {
return trackedTree()->outermostCaller()->script();

Просмотреть файл

@ -87,6 +87,10 @@ class MIRGenerator
return !compilingAsmJS() && instrumentedProfiling();
}
bool isOptimizationTrackingEnabled() {
return isProfilerInstrumentationEnabled() && !info().isAnalysis();
}
// Whether the main thread is trying to cancel this build.
bool shouldCancel(const char *why) {
maybePause();

Просмотреть файл

@ -204,7 +204,7 @@ MIRGraph::unmarkBlocks()
MBasicBlock *
MBasicBlock::New(MIRGraph &graph, BytecodeAnalysis *analysis, CompileInfo &info,
MBasicBlock *pred, const BytecodeSite *site, Kind kind)
MBasicBlock *pred, BytecodeSite *site, Kind kind)
{
MOZ_ASSERT(site->pc() != nullptr);
@ -220,7 +220,7 @@ MBasicBlock::New(MIRGraph &graph, BytecodeAnalysis *analysis, CompileInfo &info,
MBasicBlock *
MBasicBlock::NewPopN(MIRGraph &graph, CompileInfo &info,
MBasicBlock *pred, const BytecodeSite *site, Kind kind, uint32_t popped)
MBasicBlock *pred, BytecodeSite *site, Kind kind, uint32_t popped)
{
MBasicBlock *block = new(graph.alloc()) MBasicBlock(graph, info, site, kind);
if (!block->init())
@ -234,7 +234,7 @@ MBasicBlock::NewPopN(MIRGraph &graph, CompileInfo &info,
MBasicBlock *
MBasicBlock::NewWithResumePoint(MIRGraph &graph, CompileInfo &info,
MBasicBlock *pred, const BytecodeSite *site,
MBasicBlock *pred, BytecodeSite *site,
MResumePoint *resumePoint)
{
MBasicBlock *block = new(graph.alloc()) MBasicBlock(graph, info, site, NORMAL);
@ -256,7 +256,7 @@ MBasicBlock::NewWithResumePoint(MIRGraph &graph, CompileInfo &info,
MBasicBlock *
MBasicBlock::NewPendingLoopHeader(MIRGraph &graph, CompileInfo &info,
MBasicBlock *pred, const BytecodeSite *site,
MBasicBlock *pred, BytecodeSite *site,
unsigned stackPhiCount)
{
MOZ_ASSERT(site->pc() != nullptr);
@ -324,7 +324,7 @@ MBasicBlock::NewAsmJS(MIRGraph &graph, CompileInfo &info, MBasicBlock *pred, Kin
return block;
}
MBasicBlock::MBasicBlock(MIRGraph &graph, CompileInfo &info, const BytecodeSite *site, Kind kind)
MBasicBlock::MBasicBlock(MIRGraph &graph, CompileInfo &info, BytecodeSite *site, Kind kind)
: unreachable_(false),
graph_(graph),
info_(info),

Просмотреть файл

@ -46,7 +46,7 @@ class MBasicBlock : public TempObject, public InlineListNode<MBasicBlock>
};
private:
MBasicBlock(MIRGraph &graph, CompileInfo &info, const BytecodeSite *site, Kind kind);
MBasicBlock(MIRGraph &graph, CompileInfo &info, BytecodeSite *site, Kind kind);
bool init();
void copySlots(MBasicBlock *from);
bool inherit(TempAllocator &alloc, BytecodeAnalysis *analysis, MBasicBlock *pred,
@ -107,14 +107,14 @@ class MBasicBlock : public TempObject, public InlineListNode<MBasicBlock>
// Creates a new basic block for a MIR generator. If |pred| is not nullptr,
// its slots and stack depth are initialized from |pred|.
static MBasicBlock *New(MIRGraph &graph, BytecodeAnalysis *analysis, CompileInfo &info,
MBasicBlock *pred, const BytecodeSite *site, Kind kind);
MBasicBlock *pred, BytecodeSite *site, Kind kind);
static MBasicBlock *NewPopN(MIRGraph &graph, CompileInfo &info,
MBasicBlock *pred, const BytecodeSite *site, Kind kind, uint32_t popn);
MBasicBlock *pred, BytecodeSite *site, Kind kind, uint32_t popn);
static MBasicBlock *NewWithResumePoint(MIRGraph &graph, CompileInfo &info,
MBasicBlock *pred, const BytecodeSite *site,
MBasicBlock *pred, BytecodeSite *site,
MResumePoint *resumePoint);
static MBasicBlock *NewPendingLoopHeader(MIRGraph &graph, CompileInfo &info,
MBasicBlock *pred, const BytecodeSite *site,
MBasicBlock *pred, BytecodeSite *site,
unsigned loopStateSlots);
static MBasicBlock *NewSplitEdge(MIRGraph &graph, CompileInfo &info, MBasicBlock *pred);
static MBasicBlock *NewAsmJS(MIRGraph &graph, CompileInfo &info,
@ -602,13 +602,14 @@ class MBasicBlock : public TempObject, public InlineListNode<MBasicBlock>
void dump(FILE *fp);
void dump();
// Track bailouts by storing the current pc in MIR instruction added at this
// cycle. This is also used for tracking calls when profiling.
void updateTrackedSite(const BytecodeSite *site) {
// Track bailouts by storing the current pc in MIR instruction added at
// this cycle. This is also used for tracking calls and optimizations when
// profiling.
void updateTrackedSite(BytecodeSite *site) {
MOZ_ASSERT(site->tree() == trackedSite_->tree());
trackedSite_ = site;
}
const BytecodeSite *trackedSite() const {
BytecodeSite *trackedSite() const {
return trackedSite_;
}
jsbytecode *trackedPc() const {
@ -657,7 +658,7 @@ class MBasicBlock : public TempObject, public InlineListNode<MBasicBlock>
Vector<MBasicBlock *, 1, JitAllocPolicy> immediatelyDominated_;
MBasicBlock *immediateDominator_;
const BytecodeSite *trackedSite_;
BytecodeSite *trackedSite_;
#if defined(JS_ION_PERF) || defined(DEBUG)
unsigned lineno_;

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,610 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sts=4 et sw=4 tw=99:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef jit_OptimizationTracking_h
#define jit_OptimizationTracking_h
#include "mozilla/Maybe.h"
#include "jsinfer.h"
#include "jit/CompactBuffer.h"
#include "jit/CompileInfo.h"
#include "jit/JitAllocPolicy.h"
#include "jit/shared/CodeGenerator-shared.h"
namespace js {
namespace jit {
#define TRACKED_STRATEGY_LIST(_) \
_(GetProp_ArgumentsLength, \
"getprop arguments.length") \
_(GetProp_ArgumentsCallee, \
"getprop arguments.callee") \
_(GetProp_InferredConstant, \
"getprop inferred constant") \
_(GetProp_Constant, \
"getprop constant") \
_(GetProp_TypedObject, \
"getprop TypedObject") \
_(GetProp_DefiniteSlot, \
"getprop definite slot") \
_(GetProp_CommonGetter, \
"getprop common getter") \
_(GetProp_InlineAccess, \
"getprop inline access") \
_(GetProp_Innerize, \
"getprop innerize (access on global window)") \
_(GetProp_InlineCache, \
"getprop IC")
// Ordering is important below. All outcomes before GenericSuccess will be
// considered failures, and all outcomes after GenericSuccess will be
// considered successes.
#define TRACKED_OUTCOME_LIST(_) \
_(GenericFailure, \
"failure") \
_(NoTypeInfo, \
"no type info") \
_(NoAnalysisInfo, \
"no newscript analysis") \
_(NoShapeInfo, \
"cannot determine shape") \
_(UnknownObject, \
"unknown object") \
_(UnknownProperties, \
"unknown properties") \
_(Singleton, \
"is singleton") \
_(NotSingleton, \
"is not singleton") \
_(NotFixedSlot, \
"property not in fixed slot") \
_(NotObject, \
"not definitely an object") \
_(NeedsTypeBarrier, \
"needs type barrier") \
_(InDictionaryMode, \
"object in dictionary mode") \
\
_(GenericSuccess, \
"success") \
_(Monomorphic, \
"monomorphic") \
_(Polymorphic, \
"polymorphic")
#define TRACKED_TYPESITE_LIST(_) \
_(Receiver, \
"receiver object")
enum class TrackedStrategy : uint32_t {
#define STRATEGY_OP(name, msg) name,
TRACKED_STRATEGY_LIST(STRATEGY_OP)
#undef STRATEGY_OPT
Count
};
enum class TrackedOutcome : uint32_t {
#define OUTCOME_OP(name, msg) name,
TRACKED_OUTCOME_LIST(OUTCOME_OP)
#undef OUTCOME_OP
Count
};
enum class TrackedTypeSite : uint32_t {
#define TYPESITE_OP(name, msg) name,
TRACKED_TYPESITE_LIST(TYPESITE_OP)
#undef TYPESITE_OP
Count
};
class OptimizationAttempt
{
TrackedStrategy strategy_;
TrackedOutcome outcome_;
public:
OptimizationAttempt(TrackedStrategy strategy, TrackedOutcome outcome)
: strategy_(strategy),
outcome_(outcome)
{ }
void setOutcome(TrackedOutcome outcome) { outcome_ = outcome; }
bool succeeded() const { return outcome_ >= TrackedOutcome::GenericSuccess; }
bool failed() const { return outcome_ < TrackedOutcome::GenericSuccess; }
TrackedStrategy strategy() const { return strategy_; }
TrackedOutcome outcome() const { return outcome_; }
bool operator ==(const OptimizationAttempt &other) const {
return strategy_ == other.strategy_ && outcome_ == other.outcome_;
}
bool operator !=(const OptimizationAttempt &other) const {
return strategy_ != other.strategy_ || outcome_ != other.outcome_;
}
HashNumber hash() const {
return (HashNumber(strategy_) << 8) + HashNumber(outcome_);
}
explicit OptimizationAttempt(CompactBufferReader &reader);
void writeCompact(CompactBufferWriter &writer) const;
};
typedef Vector<OptimizationAttempt, 4, JitAllocPolicy> TempAttemptsVector;
typedef Vector<OptimizationAttempt, 4, SystemAllocPolicy> AttemptsVector;
class UniqueTrackedTypes;
class TrackedTypeInfo
{
TrackedTypeSite site_;
MIRType mirType_;
types::TypeSet::TypeList types_;
public:
TrackedTypeInfo(TrackedTypeInfo &&other)
: site_(other.site_),
mirType_(other.mirType_),
types_(mozilla::Move(other.types_))
{ }
TrackedTypeInfo(TrackedTypeSite site, MIRType mirType)
: site_(site),
mirType_(mirType)
{ }
bool trackTypeSet(types::TemporaryTypeSet *typeSet);
bool trackType(types::Type type);
TrackedTypeSite site() const { return site_; }
MIRType mirType() const { return mirType_; }
const types::TypeSet::TypeList &types() const { return types_; }
bool operator ==(const TrackedTypeInfo &other) const;
bool operator !=(const TrackedTypeInfo &other) const;
HashNumber hash() const;
// This constructor is designed to be used in conjunction with readTypes
// below it. The same reader must be passed to readTypes after
// instantiating the TrackedTypeInfo.
explicit TrackedTypeInfo(CompactBufferReader &reader);
bool readTypes(CompactBufferReader &reader, const types::TypeSet::TypeList *allTypes);
bool writeCompact(CompactBufferWriter &writer, UniqueTrackedTypes &uniqueTypes) const;
};
typedef Vector<TrackedTypeInfo, 1, JitAllocPolicy> TempTrackedTypeInfoVector;
typedef Vector<TrackedTypeInfo, 1, SystemAllocPolicy> TrackedTypeInfoVector;
// Tracks the optimization attempts made at a bytecode location.
class TrackedOptimizations : public TempObject
{
friend class UniqueTrackedOptimizations;
TempTrackedTypeInfoVector types_;
TempAttemptsVector attempts_;
uint32_t currentAttempt_;
public:
explicit TrackedOptimizations(TempAllocator &alloc)
: types_(alloc),
attempts_(alloc),
currentAttempt_(UINT32_MAX)
{ }
bool trackTypeInfo(TrackedTypeInfo &&ty);
bool trackAttempt(TrackedStrategy strategy);
void amendAttempt(uint32_t index);
void trackOutcome(TrackedOutcome outcome);
void trackSuccess();
bool matchTypes(const TempTrackedTypeInfoVector &other) const;
bool matchAttempts(const TempAttemptsVector &other) const;
void spew() const;
};
// Assigns each unique sequence of optimization attempts an index; outputs a
// compact table.
class UniqueTrackedOptimizations
{
public:
struct SortEntry
{
const TempTrackedTypeInfoVector *types;
const TempAttemptsVector *attempts;
uint32_t frequency;
};
typedef Vector<SortEntry, 4> SortedVector;
private:
struct Key
{
const TempTrackedTypeInfoVector *types;
const TempAttemptsVector *attempts;
typedef Key Lookup;
static HashNumber hash(const Lookup &lookup);
static bool match(const Key &key, const Lookup &lookup);
static void rekey(Key &key, const Key &newKey) {
key = newKey;
}
};
struct Entry
{
uint8_t index;
uint32_t frequency;
};
// Map of unique (TempTrackedTypeInfoVector, TempAttemptsVector) pairs to
// indices.
typedef HashMap<Key, Entry, Key> AttemptsMap;
AttemptsMap map_;
// TempAttemptsVectors sorted by frequency.
SortedVector sorted_;
public:
explicit UniqueTrackedOptimizations(JSContext *cx)
: map_(cx),
sorted_(cx)
{ }
bool init() { return map_.init(); }
bool add(const TrackedOptimizations *optimizations);
bool sortByFrequency(JSContext *cx);
bool sorted() const { return !sorted_.empty(); }
uint32_t count() const { MOZ_ASSERT(sorted()); return sorted_.length(); }
const SortedVector &sortedVector() const { MOZ_ASSERT(sorted()); return sorted_; }
uint8_t indexOf(const TrackedOptimizations *optimizations) const;
};
// A compact table of tracked optimization information. Pictorially,
//
// +------------------------------------------------+
// | Region 1 | |
// |------------------------------------------------| |
// | Region 2 | |
// |------------------------------------------------| |-- PayloadR of list-of-list of
// | ... | | range triples (see below)
// |------------------------------------------------| |
// | Region M | |
// +================================================+ <- IonTrackedOptimizationsRegionTable
// | uint32_t numRegions_ = M | |
// +------------------------------------------------+ |
// | Region 1 | |
// | uint32_t regionOffset = size(PayloadR) | |
// +------------------------------------------------+ |-- Table
// | ... | |
// +------------------------------------------------+ |
// | Region M | |
// | uint32_t regionOffset | |
// +================================================+
// | Optimization type info 1 | |
// |------------------------------------------------| |
// | Optimization type info 2 | |-- PayloadT of list of
// |------------------------------------------------| | IonTrackedOptimizationTypeInfo in
// | ... | | order of decreasing frequency
// |------------------------------------------------| |
// | Optimization type info N | |
// +================================================+ <- IonTrackedOptimizationsTypesTable
// | uint32_t numEntries_ = N | |
// +------------------------------------------------+ |
// | Optimization type info 1 | |
// | uint32_t entryOffset = size(PayloadT) | |
// +------------------------------------------------+ |-- Table
// | ... | |
// +------------------------------------------------+ |
// | Optimization type info N | |
// | uint32_t entryOffset | |
// +================================================+
// | Optimization attempts 1 | |
// |------------------------------------------------| |
// | Optimization attempts 2 | |-- PayloadA of list of
// |------------------------------------------------| | IonTrackedOptimizationAttempts in
// | ... | | order of decreasing frequency
// |------------------------------------------------| |
// | Optimization attempts N | |
// +================================================+ <- IonTrackedOptimizationsAttemptsTable
// | uint32_t numEntries_ = N | |
// +------------------------------------------------+ |
// | Optimization attempts 1 | |
// | uint32_t entryOffset = size(PayloadA) | |
// +------------------------------------------------+ |-- Table
// | ... | |
// +------------------------------------------------+ |
// | Optimization attempts N | |
// | uint32_t entryOffset | |
// +------------------------------------------------+
//
// Abstractly, each region in the PayloadR section is a list of triples of the
// following, in order of ascending startOffset:
//
// (startOffset, endOffset, optimization attempts index)
//
// The range of [startOffset, endOffset) is the native machine code offsets
// for which the optimization attempts referred to by the index applies.
//
// Concretely, each region starts with a header of:
//
// { startOffset : 32, endOffset : 32 }
//
// followed by an (endOffset, index) pair, then by delta-encoded variants
// triples described below.
//
// Each list of type infos in the PayloadT section is a list of triples:
//
// (kind, MIR type, type set)
//
// The type set is separately in another vector, and what is encoded instead
// is the (offset, length) pair needed to index into that vector.
//
// Each list of optimization attempts in the PayloadA section is a list of
// pairs:
//
// (strategy, outcome)
//
// Both tail tables for PayloadR and PayloadA use reverse offsets from the
// table pointers.
class IonTrackedOptimizationsRegion
{
const uint8_t *start_;
const uint8_t *end_;
// Unpacked state.
uint32_t startOffset_;
uint32_t endOffset_;
const uint8_t *rangesStart_;
void unpackHeader();
public:
IonTrackedOptimizationsRegion(const uint8_t *start, const uint8_t *end)
: start_(start), end_(end),
startOffset_(0), endOffset_(0), rangesStart_(nullptr)
{
MOZ_ASSERT(start < end);
unpackHeader();
}
// Offsets for the entire range that this region covers.
//
// This, as well as the offsets for the deltas, is open at the ending
// address: [startOffset, endOffset).
uint32_t startOffset() const { return startOffset_; }
uint32_t endOffset() const { return endOffset_; }
class RangeIterator {
const uint8_t *cur_;
const uint8_t *start_;
const uint8_t *end_;
uint32_t firstStartOffset_;
uint32_t prevEndOffset_;
public:
RangeIterator(const uint8_t *start, const uint8_t *end, uint32_t startOffset)
: cur_(start), start_(start), end_(end),
firstStartOffset_(startOffset), prevEndOffset_(0)
{ }
bool more() const { return cur_ < end_; }
void readNext(uint32_t *startOffset, uint32_t *endOffset, uint8_t *index);
};
RangeIterator ranges() const { return RangeIterator(rangesStart_, end_, startOffset_); }
mozilla::Maybe<uint8_t> findAttemptsIndex(uint32_t offset) const;
// For the variants below, S stands for startDelta, L for length, and I
// for index. These were automatically generated from training on the
// Octane benchmark.
//
// byte 1 byte 0
// SSSS-SSSL LLLL-LII0
// startDelta max 127, length max 63, index max 3
static const uint32_t ENC1_MASK = 0x1;
static const uint32_t ENC1_MASK_VAL = 0x0;
static const uint32_t ENC1_START_DELTA_MAX = 0x7f;
static const uint32_t ENC1_START_DELTA_SHIFT = 9;
static const uint32_t ENC1_LENGTH_MAX = 0x3f;
static const uint32_t ENC1_LENGTH_SHIFT = 3;
static const uint32_t ENC1_INDEX_MAX = 0x3;
static const uint32_t ENC1_INDEX_SHIFT = 1;
// byte 2 byte 1 byte 0
// SSSS-SSSS SSSS-LLLL LLII-II01
// startDelta max 4095, length max 63, index max 15
static const uint32_t ENC2_MASK = 0x3;
static const uint32_t ENC2_MASK_VAL = 0x1;
static const uint32_t ENC2_START_DELTA_MAX = 0xfff;
static const uint32_t ENC2_START_DELTA_SHIFT = 12;
static const uint32_t ENC2_LENGTH_MAX = 0x3f;
static const uint32_t ENC2_LENGTH_SHIFT = 6;
static const uint32_t ENC2_INDEX_MAX = 0xf;
static const uint32_t ENC2_INDEX_SHIFT = 2;
// byte 3 byte 2 byte 1 byte 0
// SSSS-SSSS SSSL-LLLL LLLL-LIII IIII-I011
// startDelta max 2047, length max 1023, index max 255
static const uint32_t ENC3_MASK = 0x7;
static const uint32_t ENC3_MASK_VAL = 0x3;
static const uint32_t ENC3_START_DELTA_MAX = 0x7ff;
static const uint32_t ENC3_START_DELTA_SHIFT = 21;
static const uint32_t ENC3_LENGTH_MAX = 0x3ff;
static const uint32_t ENC3_LENGTH_SHIFT = 11;
static const uint32_t ENC3_INDEX_MAX = 0xff;
static const uint32_t ENC3_INDEX_SHIFT = 3;
// byte 4 byte 3 byte 2 byte 1 byte 0
// SSSS-SSSS SSSS-SSSL LLLL-LLLL LLLL-LIII IIII-I111
// startDelta max 32767, length max 16383, index max 255
static const uint32_t ENC4_MASK = 0x7;
static const uint32_t ENC4_MASK_VAL = 0x7;
static const uint32_t ENC4_START_DELTA_MAX = 0x7fff;
static const uint32_t ENC4_START_DELTA_SHIFT = 25;
static const uint32_t ENC4_LENGTH_MAX = 0x3fff;
static const uint32_t ENC4_LENGTH_SHIFT = 11;
static const uint32_t ENC4_INDEX_MAX = 0xff;
static const uint32_t ENC4_INDEX_SHIFT = 3;
static bool IsDeltaEncodeable(uint32_t startDelta, uint32_t length) {
MOZ_ASSERT(length != 0);
return startDelta <= ENC4_START_DELTA_MAX && length <= ENC4_LENGTH_MAX;
}
static const uint32_t MAX_RUN_LENGTH = 100;
typedef CodeGeneratorShared::NativeToTrackedOptimizations NativeToTrackedOptimizations;
static uint32_t ExpectedRunLength(const NativeToTrackedOptimizations *start,
const NativeToTrackedOptimizations *end);
static void ReadDelta(CompactBufferReader &reader, uint32_t *startDelta, uint32_t *length,
uint8_t *index);
static void WriteDelta(CompactBufferWriter &writer, uint32_t startDelta, uint32_t length,
uint8_t index);
static bool WriteRun(CompactBufferWriter &writer,
const NativeToTrackedOptimizations *start,
const NativeToTrackedOptimizations *end,
const UniqueTrackedOptimizations &unique);
};
class IonTrackedOptimizationsAttempts
{
const uint8_t *start_;
const uint8_t *end_;
public:
IonTrackedOptimizationsAttempts(const uint8_t *start, const uint8_t *end)
: start_(start), end_(end)
{
// Cannot be empty.
MOZ_ASSERT(start < end);
}
template <class T>
bool readVector(T *attempts) {
CompactBufferReader reader(start_, end_);
const uint8_t *cur = start_;
while (cur != end_) {
if (!attempts->append(OptimizationAttempt(reader)))
return false;
cur = reader.currentPosition();
MOZ_ASSERT(cur <= end_);
}
return true;
}
};
class IonTrackedOptimizationsTypeInfo
{
const uint8_t *start_;
const uint8_t *end_;
public:
IonTrackedOptimizationsTypeInfo(const uint8_t *start, const uint8_t *end)
: start_(start), end_(end)
{
// Can be empty; i.e., no type info was tracked.
}
bool empty() const { return start_ == end_; }
template <class T>
bool readVector(T *types, const types::TypeSet::TypeList *allTypes) {
CompactBufferReader reader(start_, end_);
const uint8_t *cur = start_;
while (cur != end_) {
TrackedTypeInfo ty(reader);
if (!ty.readTypes(reader, allTypes))
return false;
if (!types->append(mozilla::Move(ty)))
return false;
cur = reader.currentPosition();
MOZ_ASSERT(cur <= end_);
}
return true;
}
};
template <class Entry>
class IonTrackedOptimizationsOffsetsTable
{
uint32_t padding_;
uint32_t numEntries_;
uint32_t entryOffsets_[1];
protected:
const uint8_t *payloadEnd() const {
return (uint8_t *)(this) - padding_;
}
public:
uint32_t numEntries() const { return numEntries_; }
uint32_t entryOffset(uint32_t index) const {
MOZ_ASSERT(index < numEntries());
return entryOffsets_[index];
}
Entry entry(uint32_t index) const {
const uint8_t *start = payloadEnd() - entryOffset(index);
const uint8_t *end = payloadEnd();
if (index < numEntries() - 1)
end -= entryOffset(index + 1);
return Entry(start, end);
}
};
class IonTrackedOptimizationsRegionTable
: public IonTrackedOptimizationsOffsetsTable<IonTrackedOptimizationsRegion>
{
public:
mozilla::Maybe<IonTrackedOptimizationsRegion> findRegion(uint32_t offset) const;
const uint8_t *payloadStart() const { return payloadEnd() - entryOffset(0); }
};
typedef IonTrackedOptimizationsOffsetsTable<IonTrackedOptimizationsAttempts>
IonTrackedOptimizationsAttemptsTable;
typedef IonTrackedOptimizationsOffsetsTable<IonTrackedOptimizationsTypeInfo>
IonTrackedOptimizationsTypesTable;
bool
WriteIonTrackedOptimizationsTable(JSContext *cx, CompactBufferWriter &writer,
const CodeGeneratorShared::NativeToTrackedOptimizations *start,
const CodeGeneratorShared::NativeToTrackedOptimizations *end,
const UniqueTrackedOptimizations &unique,
uint32_t *numRegions, uint32_t *regionTableOffsetp,
uint32_t *typesTableOffsetp, uint32_t *attemptsTableOffsetp,
types::TypeSet::TypeList *allTypes);
} // namespace jit
} // namespace js
#endif // jit_OptimizationTracking_h

Просмотреть файл

@ -15,6 +15,7 @@
#include "jit/MacroAssembler.h"
#include "jit/MIR.h"
#include "jit/MIRGenerator.h"
#include "jit/OptimizationTracking.h"
#include "js/Conversions.h"
#include "vm/TraceLogging.h"
@ -59,6 +60,11 @@ CodeGeneratorShared::CodeGeneratorShared(MIRGenerator *gen, LIRGraph *graph, Mac
nativeToBytecodeNumRegions_(0),
nativeToBytecodeScriptList_(nullptr),
nativeToBytecodeScriptListLength_(0),
trackedOptimizationsMap_(nullptr),
trackedOptimizationsMapSize_(0),
trackedOptimizationsRegionTableOffset_(0),
trackedOptimizationsTypesTableOffset_(0),
trackedOptimizationsAttemptsTableOffset_(0),
osrEntryOffset_(0),
skipArgCheckEntryOffset_(0),
#ifdef CHECK_OSIPOINT_REGISTERS
@ -261,6 +267,53 @@ CodeGeneratorShared::dumpNativeToBytecodeEntry(uint32_t idx)
#endif
}
bool
CodeGeneratorShared::addTrackedOptimizationsEntry(const TrackedOptimizations *optimizations)
{
if (!isOptimizationTrackingEnabled())
return true;
MOZ_ASSERT(optimizations);
uint32_t nativeOffset = masm.currentOffset();
if (!trackedOptimizations_.empty()) {
NativeToTrackedOptimizations &lastEntry = trackedOptimizations_.back();
MOZ_ASSERT(nativeOffset >= lastEntry.endOffset.offset());
// If we're still generating code for the same set of optimizations,
// we are done.
if (lastEntry.optimizations == optimizations)
return true;
}
// If we're generating code for a new set of optimizations, add a new
// entry.
NativeToTrackedOptimizations entry;
entry.startOffset = CodeOffsetLabel(nativeOffset);
entry.endOffset = CodeOffsetLabel(nativeOffset);
entry.optimizations = optimizations;
return trackedOptimizations_.append(entry);
}
void
CodeGeneratorShared::extendTrackedOptimizationsEntry(const TrackedOptimizations *optimizations)
{
if (!isOptimizationTrackingEnabled())
return;
uint32_t nativeOffset = masm.currentOffset();
NativeToTrackedOptimizations &entry = trackedOptimizations_.back();
MOZ_ASSERT(entry.optimizations == optimizations);
MOZ_ASSERT(nativeOffset >= entry.endOffset.offset());
entry.endOffset = CodeOffsetLabel(nativeOffset);
// If we generated no code, remove the last entry.
if (nativeOffset == entry.startOffset.offset())
trackedOptimizations_.popBack();
}
// see OffsetOfFrameSlot
static inline int32_t
ToStackIndex(LAllocation *a)
@ -722,6 +775,159 @@ CodeGeneratorShared::verifyCompactNativeToBytecodeMap(JitCode *code)
#endif // DEBUG
}
bool
CodeGeneratorShared::generateCompactTrackedOptimizationsMap(JSContext *cx, JitCode *code,
types::TypeSet::TypeList *allTypes)
{
MOZ_ASSERT(trackedOptimizationsMap_ == nullptr);
MOZ_ASSERT(trackedOptimizationsMapSize_ == 0);
MOZ_ASSERT(trackedOptimizationsRegionTableOffset_ == 0);
MOZ_ASSERT(trackedOptimizationsTypesTableOffset_ == 0);
MOZ_ASSERT(trackedOptimizationsAttemptsTableOffset_ == 0);
if (trackedOptimizations_.empty())
return true;
UniqueTrackedOptimizations unique(cx);
if (!unique.init())
return false;
// Iterate through all entries, fix up their masm offsets and deduplicate
// their optimization attempts.
for (size_t i = 0; i < trackedOptimizations_.length(); i++) {
NativeToTrackedOptimizations &entry = trackedOptimizations_[i];
entry.startOffset = CodeOffsetLabel(masm.actualOffset(entry.startOffset.offset()));
entry.endOffset = CodeOffsetLabel(masm.actualOffset(entry.endOffset.offset()));
if (!unique.add(entry.optimizations))
return false;
}
// Sort the unique optimization attempts by frequency to stabilize the
// attempts' indices in the compact table we will write later.
if (!unique.sortByFrequency(cx))
return false;
// Write out the ranges and the table.
CompactBufferWriter writer;
uint32_t numRegions;
uint32_t regionTableOffset;
uint32_t typesTableOffset;
uint32_t attemptsTableOffset;
if (!WriteIonTrackedOptimizationsTable(cx, writer,
trackedOptimizations_.begin(),
trackedOptimizations_.end(),
unique, &numRegions,
&regionTableOffset, &typesTableOffset,
&attemptsTableOffset, allTypes))
{
return false;
}
MOZ_ASSERT(regionTableOffset > 0);
MOZ_ASSERT(typesTableOffset > 0);
MOZ_ASSERT(attemptsTableOffset > 0);
MOZ_ASSERT(typesTableOffset > regionTableOffset);
MOZ_ASSERT(attemptsTableOffset > typesTableOffset);
// Copy over the table out of the writer's buffer.
uint8_t *data = cx->runtime()->pod_malloc<uint8_t>(writer.length());
if (!data)
return false;
memcpy(data, writer.buffer(), writer.length());
trackedOptimizationsMap_ = data;
trackedOptimizationsMapSize_ = writer.length();
trackedOptimizationsRegionTableOffset_ = regionTableOffset;
trackedOptimizationsTypesTableOffset_ = typesTableOffset;
trackedOptimizationsAttemptsTableOffset_ = attemptsTableOffset;
verifyCompactTrackedOptimizationsMap(code, numRegions, unique, allTypes);
JitSpew(JitSpew_OptimizationTracking,
"== Compact Native To Optimizations Map [%p-%p] size %u",
data, data + trackedOptimizationsMapSize_, trackedOptimizationsMapSize_);
JitSpew(JitSpew_OptimizationTracking,
" with type list of length %u, size %u",
allTypes->length(), allTypes->length() * sizeof(types::Type));
return true;
}
void
CodeGeneratorShared::verifyCompactTrackedOptimizationsMap(JitCode *code, uint32_t numRegions,
const UniqueTrackedOptimizations &unique,
const types::TypeSet::TypeList *allTypes)
{
#ifdef DEBUG
MOZ_ASSERT(trackedOptimizationsMap_ != nullptr);
MOZ_ASSERT(trackedOptimizationsMapSize_ > 0);
MOZ_ASSERT(trackedOptimizationsRegionTableOffset_ > 0);
MOZ_ASSERT(trackedOptimizationsTypesTableOffset_ > 0);
MOZ_ASSERT(trackedOptimizationsAttemptsTableOffset_ > 0);
// Table pointers must all be 4-byte aligned.
const uint8_t *regionTableAddr = trackedOptimizationsMap_ +
trackedOptimizationsRegionTableOffset_;
const uint8_t *typesTableAddr = trackedOptimizationsMap_ +
trackedOptimizationsTypesTableOffset_;
const uint8_t *attemptsTableAddr = trackedOptimizationsMap_ +
trackedOptimizationsAttemptsTableOffset_;
MOZ_ASSERT(uintptr_t(regionTableAddr) % sizeof(uint32_t) == 0);
MOZ_ASSERT(uintptr_t(typesTableAddr) % sizeof(uint32_t) == 0);
MOZ_ASSERT(uintptr_t(attemptsTableAddr) % sizeof(uint32_t) == 0);
// Assert that the number of entries matches up for the tables.
const IonTrackedOptimizationsRegionTable *regionTable =
(const IonTrackedOptimizationsRegionTable *) regionTableAddr;
MOZ_ASSERT(regionTable->numEntries() == numRegions);
const IonTrackedOptimizationsTypesTable *typesTable =
(const IonTrackedOptimizationsTypesTable *) typesTableAddr;
MOZ_ASSERT(typesTable->numEntries() == unique.count());
const IonTrackedOptimizationsAttemptsTable *attemptsTable =
(const IonTrackedOptimizationsAttemptsTable *) attemptsTableAddr;
MOZ_ASSERT(attemptsTable->numEntries() == unique.count());
// Verify each region.
uint32_t trackedIdx = 0;
for (uint32_t regionIdx = 0; regionIdx < regionTable->numEntries(); regionIdx++) {
// Check reverse offsets are within bounds.
MOZ_ASSERT(regionTable->entryOffset(regionIdx) <= trackedOptimizationsRegionTableOffset_);
MOZ_ASSERT_IF(regionIdx > 0, regionTable->entryOffset(regionIdx) <
regionTable->entryOffset(regionIdx - 1));
IonTrackedOptimizationsRegion region = regionTable->entry(regionIdx);
// Check the region range is covered by jitcode.
MOZ_ASSERT(region.startOffset() <= code->instructionsSize());
MOZ_ASSERT(region.endOffset() <= code->instructionsSize());
IonTrackedOptimizationsRegion::RangeIterator iter = region.ranges();
while (iter.more()) {
// Assert that the offsets are correctly decoded from the delta.
uint32_t startOffset, endOffset;
uint8_t index;
iter.readNext(&startOffset, &endOffset, &index);
NativeToTrackedOptimizations &entry = trackedOptimizations_[trackedIdx++];
MOZ_ASSERT(startOffset == entry.startOffset.offset());
MOZ_ASSERT(endOffset == entry.endOffset.offset());
MOZ_ASSERT(index == unique.indexOf(entry.optimizations));
// Assert that the type info and attempts vector are correctly
// decoded. Since this is a DEBUG-only verification, crash on OOM.
IonTrackedOptimizationsTypeInfo typeInfo = typesTable->entry(index);
TempTrackedTypeInfoVector tvec(alloc());
MOZ_ALWAYS_TRUE(typeInfo.readVector(&tvec, allTypes));
MOZ_ASSERT(entry.optimizations->matchTypes(tvec));
IonTrackedOptimizationsAttempts attempts = attemptsTable->entry(index);
TempAttemptsVector avec(alloc());
MOZ_ALWAYS_TRUE(attempts.readVector(&avec));
MOZ_ASSERT(entry.optimizations->matchAttempts(avec));
}
}
#endif
}
void
CodeGeneratorShared::markSafepoint(LInstruction *ins)
{

Просмотреть файл

@ -25,6 +25,7 @@ class OutOfLineCode;
class CodeGenerator;
class MacroAssembler;
class IonCache;
class UniqueTrackedOptimizations;
template <class ArgSeq, class StoreOutputTo>
class OutOfLineCallVM;
@ -114,6 +115,26 @@ class CodeGeneratorShared : public LElementVisitor
return gen->isProfilerInstrumentationEnabled();
}
public:
struct NativeToTrackedOptimizations {
// [startOffset, endOffset)
CodeOffsetLabel startOffset;
CodeOffsetLabel endOffset;
const TrackedOptimizations *optimizations;
};
protected:
js::Vector<NativeToTrackedOptimizations, 0, SystemAllocPolicy> trackedOptimizations_;
uint8_t *trackedOptimizationsMap_;
uint32_t trackedOptimizationsMapSize_;
uint32_t trackedOptimizationsRegionTableOffset_;
uint32_t trackedOptimizationsTypesTableOffset_;
uint32_t trackedOptimizationsAttemptsTableOffset_;
bool isOptimizationTrackingEnabled() {
return gen->isOptimizationTrackingEnabled();
}
protected:
// The offset of the first instruction of the OSR entry block from the
// beginning of the code buffer.
@ -243,6 +264,9 @@ class CodeGeneratorShared : public LElementVisitor
void dumpNativeToBytecodeEntries();
void dumpNativeToBytecodeEntry(uint32_t idx);
bool addTrackedOptimizationsEntry(const TrackedOptimizations *optimizations);
void extendTrackedOptimizationsEntry(const TrackedOptimizations *optimizations);
public:
MIRGenerator &mirGen() const {
return *gen;
@ -313,6 +337,12 @@ class CodeGeneratorShared : public LElementVisitor
bool generateCompactNativeToBytecodeMap(JSContext *cx, JitCode *code);
void verifyCompactNativeToBytecodeMap(JitCode *code);
bool generateCompactTrackedOptimizationsMap(JSContext *cx, JitCode *code,
types::TypeSet::TypeList *allTypes);
void verifyCompactTrackedOptimizationsMap(JitCode *code, uint32_t numRegions,
const UniqueTrackedOptimizations &unique,
const types::TypeSet::TypeList *allTypes);
// Mark the safepoint on |ins| as corresponding to the current assembler location.
// The location should be just after a call.
void markSafepoint(LInstruction *ins);

Просмотреть файл

@ -420,7 +420,7 @@ TypeSet::isSubset(const TypeSet *other) const
}
bool
TypeSet::enumerateTypes(TypeList *list)
TypeSet::enumerateTypes(TypeList *list) const
{
/* If any type is possible, there's no need to worry about specifics. */
if (flags & TYPE_FLAG_UNKNOWN)

Просмотреть файл

@ -511,7 +511,7 @@ class TypeSet
/* Get a list of all types in this set. */
typedef Vector<Type, 1, SystemAllocPolicy> TypeList;
bool enumerateTypes(TypeList *list);
bool enumerateTypes(TypeList *list) const;
/*
* Iterate through the objects in this set. getObjectCount overapproximates

Просмотреть файл

@ -178,6 +178,7 @@ UNIFIED_SOURCES += [
'jit/MIR.cpp',
'jit/MIRGraph.cpp',
'jit/MoveResolver.cpp',
'jit/OptimizationTracking.cpp',
'jit/PerfSpewer.cpp',
'jit/RangeAnalysis.cpp',
'jit/Recover.cpp',

Просмотреть файл

@ -1551,8 +1551,8 @@ js_InitArrayBufferClass(JSContext *cx, HandleObject obj)
RootedId byteLengthId(cx, NameToId(cx->names().byteLength));
unsigned attrs = JSPROP_SHARED | JSPROP_GETTER;
JSObject *getter = NewFunction(cx, NullPtr(), ArrayBufferObject::byteLengthGetter, 0,
JSFunction::NATIVE_FUN, global, NullPtr());
JSObject *getter = NewFunction(cx, js::NullPtr(), ArrayBufferObject::byteLengthGetter, 0,
JSFunction::NATIVE_FUN, global, js::NullPtr());
if (!getter)
return nullptr;