Bug 1170840 - Add testbed allocator for new regalloc features, and use for a change to hot/cold code bundle splitting, r=sunfish.

This commit is contained in:
Brian Hackett 2015-06-15 11:12:45 -07:00
Родитель d8338f8319
Коммит 59ddd53a2e
5 изменённых файлов: 57 добавлений и 18 удалений

Просмотреть файл

@ -2511,6 +2511,13 @@ BacktrackingAllocator::trySplitAcrossHotcode(LiveBundle* bundle, bool* success)
return false;
LiveBundle* preBundle = nullptr;
LiveBundle* postBundle = nullptr;
LiveBundle* coldBundle = nullptr;
if (testbed) {
coldBundle = LiveBundle::New(alloc(), bundle->spillSet(), bundle->spillParent());
if (!coldBundle)
return false;
}
// Accumulate the ranges of hot and cold code in the bundle. Note that
// we are only comparing with the single hot range found, so the cold code
@ -2526,33 +2533,53 @@ BacktrackingAllocator::trySplitAcrossHotcode(LiveBundle* bundle, bool* success)
}
if (!coldPre.empty()) {
if (!preBundle) {
preBundle = LiveBundle::New(alloc(), bundle->spillSet(), bundle->spillParent());
if (!preBundle)
if (testbed) {
if (!coldBundle->addRangeAndDistributeUses(alloc(), range, coldPre.from, coldPre.to))
return false;
} else {
if (!preBundle) {
preBundle = LiveBundle::New(alloc(), bundle->spillSet(), bundle->spillParent());
if (!preBundle)
return false;
}
if (!preBundle->addRangeAndDistributeUses(alloc(), range, coldPre.from, coldPre.to))
return false;
}
if (!preBundle->addRangeAndDistributeUses(alloc(), range, coldPre.from, coldPre.to))
return false;
}
if (!coldPost.empty()) {
if (!postBundle)
postBundle = LiveBundle::New(alloc(), bundle->spillSet(), bundle->spillParent());
if (!postBundle->addRangeAndDistributeUses(alloc(), range, coldPost.from, coldPost.to))
return false;
if (testbed) {
if (!coldBundle->addRangeAndDistributeUses(alloc(), range, coldPost.from, coldPost.to))
return false;
} else {
if (!postBundle) {
postBundle = LiveBundle::New(alloc(), bundle->spillSet(), bundle->spillParent());
if (!postBundle)
return false;
}
if (!postBundle->addRangeAndDistributeUses(alloc(), range, coldPost.from, coldPost.to))
return false;
}
}
}
MOZ_ASSERT(preBundle || postBundle);
MOZ_ASSERT(hotBundle->numRanges() != 0);
LiveBundleVector newBundles;
if (!newBundles.append(hotBundle))
return false;
if (preBundle && !newBundles.append(preBundle))
return false;
if (postBundle && !newBundles.append(postBundle))
return false;
if (testbed) {
MOZ_ASSERT(coldBundle->numRanges() != 0);
if (!newBundles.append(coldBundle))
return false;
} else {
MOZ_ASSERT(preBundle || postBundle);
if (preBundle && !newBundles.append(preBundle))
return false;
if (postBundle && !newBundles.append(postBundle))
return false;
}
*success = true;
return splitAndRequeueBundles(bundle, newBundles);

Просмотреть файл

@ -546,6 +546,9 @@ class BacktrackingAllocator : protected RegisterAllocator
friend class C1Spewer;
friend class JSONSpewer;
// This flag is set when testing new allocator modifications.
bool testbed;
BitSet* liveIn;
FixedList<VirtualRegister> vregs;
@ -606,8 +609,9 @@ class BacktrackingAllocator : protected RegisterAllocator
SpillSlotList normalSlots, doubleSlots, quadSlots;
public:
BacktrackingAllocator(MIRGenerator* mir, LIRGenerator* lir, LIRGraph& graph)
BacktrackingAllocator(MIRGenerator* mir, LIRGenerator* lir, LIRGraph& graph, bool testbed)
: RegisterAllocator(mir, lir, graph),
testbed(testbed),
liveIn(nullptr),
callRanges(nullptr)
{ }

Просмотреть файл

@ -1653,14 +1653,18 @@ GenerateLIR(MIRGenerator* mir)
{
AutoTraceLog log(logger, TraceLogger_RegisterAllocation);
switch (mir->optimizationInfo().registerAllocator()) {
case RegisterAllocator_Backtracking: {
IonRegisterAllocator allocator = mir->optimizationInfo().registerAllocator();
switch (allocator) {
case RegisterAllocator_Backtracking:
case RegisterAllocator_Testbed: {
#ifdef DEBUG
if (!integrity.record())
return nullptr;
#endif
BacktrackingAllocator regalloc(mir, &lirgen, *lir);
BacktrackingAllocator regalloc(mir, &lirgen, *lir,
allocator == RegisterAllocator_Testbed);
if (!regalloc.go())
return nullptr;

Просмотреть файл

@ -23,6 +23,7 @@ static const uint32_t MAX_MAIN_THREAD_LOCALS_AND_ARGS = 256;
// Possible register allocators which may be used.
enum IonRegisterAllocator {
RegisterAllocator_Backtracking,
RegisterAllocator_Testbed,
RegisterAllocator_Stupid
};
@ -31,6 +32,8 @@ LookupRegisterAllocator(const char* name)
{
if (!strcmp(name, "backtracking"))
return mozilla::Some(RegisterAllocator_Backtracking);
if (!strcmp(name, "testbed"))
return mozilla::Some(RegisterAllocator_Testbed);
if (!strcmp(name, "stupid"))
return mozilla::Some(RegisterAllocator_Stupid);
return mozilla::Nothing();

Просмотреть файл

@ -6292,6 +6292,7 @@ main(int argc, char** argv, char** envp)
|| !op.addStringOption('\0', "ion-regalloc", "[mode]",
"Specify Ion register allocation:\n"
" backtracking: Priority based backtracking register allocation (default)\n"
" testbed: Backtracking allocator with experimental features\n"
" stupid: Simple block local register allocation")
|| !op.addBoolOption('\0', "ion-eager", "Always ion-compile methods (implies --baseline-eager)")
|| !op.addStringOption('\0', "ion-offthread-compile", "on/off",