Changed bytecode stream and exception tables to use instruction indexes instead of PC.

This commit is contained in:
jfrijters 2009-04-07 04:53:56 +00:00
Родитель ca62b68b61
Коммит 70b04e174e
3 изменённых файлов: 229 добавлений и 202 удалений

Просмотреть файл

@ -1,5 +1,5 @@
/*
Copyright (C) 2002-2008 Jeroen Frijters
Copyright (C) 2002-2009 Jeroen Frijters
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
@ -2460,15 +2460,6 @@ namespace IKVM.Internal
}
}
// maps a PC to an index in the Instruction[], invalid PCs return -1
internal int[] PcIndexMap
{
get
{
return code.pcIndexMap;
}
}
internal ExceptionTableEntry[] ExceptionTable
{
get
@ -2499,7 +2490,6 @@ namespace IKVM.Internal
internal ushort max_stack;
internal ushort max_locals;
internal Instruction[] instructions;
internal int[] pcIndexMap;
internal ExceptionTableEntry[] exception_table;
internal int[] argmap;
internal LineNumberTableEntry[] lineNumberTable;
@ -2534,6 +2524,48 @@ namespace IKVM.Internal
}
this.instructions = new Instruction[instructionIndex];
Array.Copy(instructions, 0, this.instructions, 0, instructionIndex);
// build the pcIndexMap
int[] pcIndexMap = new int[this.instructions[instructionIndex - 1].PC + 1];
for(int i = 0; i < pcIndexMap.Length; i++)
{
pcIndexMap[i] = -1;
}
for(int i = 0; i < instructionIndex - 1; i++)
{
pcIndexMap[this.instructions[i].PC] = i;
}
// convert branch offsets to indexes
for(int i = 0; i < instructionIndex - 1; i++)
{
switch(this.instructions[i].NormalizedOpCode)
{
case NormalizedByteCode.__ifeq:
case NormalizedByteCode.__ifne:
case NormalizedByteCode.__iflt:
case NormalizedByteCode.__ifge:
case NormalizedByteCode.__ifgt:
case NormalizedByteCode.__ifle:
case NormalizedByteCode.__if_icmpeq:
case NormalizedByteCode.__if_icmpne:
case NormalizedByteCode.__if_icmplt:
case NormalizedByteCode.__if_icmpge:
case NormalizedByteCode.__if_icmpgt:
case NormalizedByteCode.__if_icmple:
case NormalizedByteCode.__if_acmpeq:
case NormalizedByteCode.__if_acmpne:
case NormalizedByteCode.__ifnull:
case NormalizedByteCode.__ifnonnull:
case NormalizedByteCode.__goto:
case NormalizedByteCode.__jsr:
this.instructions[i].SetTargetIndex(pcIndexMap[this.instructions[i].Arg1 + this.instructions[i].PC]);
break;
case NormalizedByteCode.__tableswitch:
case NormalizedByteCode.__lookupswitch:
this.instructions[i].SetSwitchTargets(pcIndexMap);
break;
}
}
// read exception table
ushort exception_table_length = br.ReadUInt16();
exception_table = new ExceptionTableEntry[exception_table_length];
for(int i = 0; i < exception_table_length; i++)
@ -2550,11 +2582,13 @@ namespace IKVM.Internal
throw new ClassFormatError("Illegal exception table: {0}.{1}{2}", classFile.Name, method.Name, method.Signature);
}
exception_table[i] = new ExceptionTableEntry();
exception_table[i].start_pc = start_pc;
exception_table[i].end_pc = end_pc;
exception_table[i].handler_pc = handler_pc;
exception_table[i].catch_type = catch_type;
exception_table[i].ordinal = i;
// if start_pc, end_pc or handler_pc is invalid (i.e. doesn't point to the start of an instruction),
// the index will be -1 and this will be handled by the verifier
exception_table[i].startIndex = pcIndexMap[start_pc];
exception_table[i].endIndex = pcIndexMap[end_pc];
exception_table[i].handlerIndex = pcIndexMap[handler_pc];
}
ushort attributes_count = br.ReadUInt16();
for(int i = 0; i < attributes_count; i++)
@ -2613,16 +2647,6 @@ namespace IKVM.Internal
break;
}
}
// build the pcIndexMap
pcIndexMap = new int[this.instructions[instructionIndex - 1].PC + 1];
for(int i = 0; i < pcIndexMap.Length; i++)
{
pcIndexMap[i] = -1;
}
for(int i = 0; i < instructionIndex - 1; i++)
{
pcIndexMap[this.instructions[i].PC] = i;
}
// build the argmap
string sig = method.Signature;
List<int> args = new List<int>();
@ -2675,9 +2699,9 @@ namespace IKVM.Internal
internal sealed class ExceptionTableEntry
{
internal ushort start_pc;
internal ushort end_pc;
internal ushort handler_pc;
internal int startIndex;
internal int endIndex;
internal int handlerIndex;
internal ushort catch_type;
internal int ordinal;
}
@ -2703,7 +2727,7 @@ namespace IKVM.Internal
struct SwitchEntry
{
internal int value;
internal int target_offset;
internal int target;
}
internal void SetHardError(HardError error, int messageId)
@ -2756,6 +2780,11 @@ namespace IKVM.Internal
this.arg1 = arg1;
}
internal void SetTargetIndex(int targetIndex)
{
this.arg1 = targetIndex;
}
internal void SetTermNop(ushort pc)
{
// TODO what happens if we already have exactly the maximum number of instructions?
@ -2763,6 +2792,15 @@ namespace IKVM.Internal
this.normopcode = NormalizedByteCode.__nop;
}
internal void SetSwitchTargets(int[] pcIndexMap)
{
arg1 = pcIndexMap[arg1 + pc];
for (int i = 0; i < switch_entries.Length; i++)
{
switch_entries[i].target = pcIndexMap[switch_entries[i].target + pc];
}
}
internal void Read(ushort pc, BigEndianBinaryReader br)
{
this.pc = pc;
@ -2824,7 +2862,7 @@ namespace IKVM.Internal
for(int i = low; i <= high; i++)
{
entries[i - low].value = i;
entries[i - low].target_offset = br.ReadInt32();
entries[i - low].target = br.ReadInt32();
}
this.switch_entries = entries;
break;
@ -2846,7 +2884,7 @@ namespace IKVM.Internal
for(int i = 0; i < count; i++)
{
entries[i].value = br.ReadInt32();
entries[i].target_offset = br.ReadInt32();
entries[i].target = br.ReadInt32();
}
this.switch_entries = entries;
break;
@ -2899,6 +2937,14 @@ namespace IKVM.Internal
}
}
internal int TargetIndex
{
get
{
return arg1;
}
}
internal int Arg2
{
get
@ -2915,7 +2961,7 @@ namespace IKVM.Internal
}
}
internal int DefaultOffset
internal int DefaultTarget
{
get
{
@ -2936,9 +2982,9 @@ namespace IKVM.Internal
return switch_entries[i].value;
}
internal int GetSwitchTargetOffset(int i)
internal int GetSwitchTargetIndex(int i)
{
return switch_entries[i].target_offset;
return switch_entries[i].target;
}
}

Просмотреть файл

@ -37,7 +37,6 @@ using IKVM.Attributes;
using IKVM.Internal;
using ExceptionTableEntry = IKVM.Internal.ClassFile.Method.ExceptionTableEntry;
using LocalVariableTableEntry = IKVM.Internal.ClassFile.Method.LocalVariableTableEntry;
using Instruction = IKVM.Internal.ClassFile.Method.Instruction;
static class ByteCodeHelperMethods
@ -234,13 +233,13 @@ class Compiler
{
public int Compare(ExceptionTableEntry e1, ExceptionTableEntry e2)
{
if(e1.start_pc < e2.start_pc)
if(e1.startIndex < e2.startIndex)
{
return -1;
}
if(e1.start_pc == e2.start_pc)
if(e1.startIndex == e2.startIndex)
{
if(e1.end_pc == e2.end_pc)
if(e1.endIndex == e2.endIndex)
{
if(e1.ordinal > e2.ordinal)
{
@ -248,7 +247,7 @@ class Compiler
}
return 1;
}
if(e1.end_pc > e2.end_pc)
if(e1.endIndex > e2.endIndex)
{
return -1;
}
@ -352,11 +351,11 @@ class Compiler
for(int i = 0; i < ar.Count; i++)
{
ExceptionTableEntry ei = ar[i];
if(ei.start_pc == ei.handler_pc && ei.catch_type == 0)
if(ei.startIndex == ei.handlerIndex && ei.catch_type == 0)
{
int index = FindPcIndex(ei.start_pc);
int index = ei.startIndex;
if(index + 2 < m.Instructions.Length
&& FindPcIndex(ei.end_pc) == index + 2
&& ei.endIndex == index + 2
&& m.Instructions[index].NormalizedOpCode == NormalizedByteCode.__aload
&& m.Instructions[index + 1].NormalizedOpCode == NormalizedByteCode.__monitorexit
&& m.Instructions[index + 2].NormalizedOpCode == NormalizedByteCode.__athrow)
@ -366,7 +365,7 @@ class Compiler
i--;
}
else if(index + 4 < m.Instructions.Length
&& FindPcIndex(ei.end_pc) == index + 3
&& ei.endIndex == index + 3
&& m.Instructions[index].NormalizedOpCode == NormalizedByteCode.__astore
&& m.Instructions[index + 1].NormalizedOpCode == NormalizedByteCode.__aload
&& m.Instructions[index + 2].NormalizedOpCode == NormalizedByteCode.__monitorexit
@ -388,41 +387,41 @@ class Compiler
for(int j = 0; j < ar.Count; j++)
{
ExceptionTableEntry ej = ar[j];
if(ei.start_pc <= ej.start_pc && ej.start_pc < ei.end_pc)
if(ei.startIndex <= ej.startIndex && ej.startIndex < ei.endIndex)
{
// 0006/test.j
if(ej.end_pc > ei.end_pc)
if(ej.endIndex > ei.endIndex)
{
ExceptionTableEntry emi = new ExceptionTableEntry();
emi.start_pc = ej.start_pc;
emi.end_pc = ei.end_pc;
emi.startIndex = ej.startIndex;
emi.endIndex = ei.endIndex;
emi.catch_type = ei.catch_type;
emi.handler_pc = ei.handler_pc;
emi.handlerIndex = ei.handlerIndex;
ExceptionTableEntry emj = new ExceptionTableEntry();
emj.start_pc = ej.start_pc;
emj.end_pc = ei.end_pc;
emj.startIndex = ej.startIndex;
emj.endIndex = ei.endIndex;
emj.catch_type = ej.catch_type;
emj.handler_pc = ej.handler_pc;
ei.end_pc = emi.start_pc;
ej.start_pc = emj.end_pc;
emj.handlerIndex = ej.handlerIndex;
ei.endIndex = emi.startIndex;
ej.startIndex = emj.endIndex;
ar.Insert(j, emj);
ar.Insert(i + 1, emi);
goto restart;
}
// 0007/test.j
else if(j > i && ej.end_pc < ei.end_pc)
else if(j > i && ej.endIndex < ei.endIndex)
{
ExceptionTableEntry emi = new ExceptionTableEntry();
emi.start_pc = ej.start_pc;
emi.end_pc = ej.end_pc;
emi.startIndex = ej.startIndex;
emi.endIndex = ej.endIndex;
emi.catch_type = ei.catch_type;
emi.handler_pc = ei.handler_pc;
emi.handlerIndex = ei.handlerIndex;
ExceptionTableEntry eei = new ExceptionTableEntry();
eei.start_pc = ej.end_pc;
eei.end_pc = ei.end_pc;
eei.startIndex = ej.endIndex;
eei.endIndex = ei.endIndex;
eei.catch_type = ei.catch_type;
eei.handler_pc = ei.handler_pc;
ei.end_pc = emi.start_pc;
eei.handlerIndex = ei.handlerIndex;
ei.endIndex = emi.startIndex;
ar.Insert(i + 1, eei);
ar.Insert(i + 1, emi);
goto restart;
@ -454,19 +453,19 @@ class Compiler
for(int i = 0; i < ar.Count; i++)
{
ExceptionTableEntry ei = ar[i];
for(int j = FindPcIndex(ei.start_pc), e = FindPcIndex(ei.end_pc); j < e; j++)
for(int j = ei.startIndex, e = ei.endIndex; j < e; j++)
{
if(m.Instructions[j].NormalizedOpCode == NormalizedByteCode.__jsr)
{
int targetPC = m.Instructions[j].NormalizedArg1 + m.Instructions[j].PC;
if(targetPC < ei.start_pc || targetPC >= ei.end_pc)
int targetIndex = m.Instructions[j].TargetIndex;
if(targetIndex < ei.startIndex || targetIndex >= ei.endIndex)
{
ExceptionTableEntry en = new ExceptionTableEntry();
en.catch_type = ei.catch_type;
en.handler_pc = ei.handler_pc;
en.start_pc = (ushort)m.Instructions[j + 1].PC;
en.end_pc = ei.end_pc;
ei.end_pc = (ushort)m.Instructions[j].PC;
en.handlerIndex = ei.handlerIndex;
en.startIndex = j + 1;
en.endIndex = ei.endIndex;
ei.endIndex = j;
ar.Insert(i + 1, en);
goto restart_jsr;
}
@ -477,8 +476,8 @@ class Compiler
for(int i = 0; i < ar.Count; i++)
{
ExceptionTableEntry ei = ar[i];
int start = FindPcIndex(ei.start_pc);
int end = FindPcIndex(ei.end_pc);
int start = ei.startIndex;
int end = ei.endIndex;
for(int j = 0; j < m.Instructions.Length; j++)
{
if(j < start || j >= end)
@ -490,15 +489,15 @@ class Compiler
// start at -1 to have an opportunity to handle the default offset
for(int k = -1; k < m.Instructions[j].SwitchEntryCount; k++)
{
int targetPC = m.Instructions[j].PC + (k == -1 ? m.Instructions[j].DefaultOffset : m.Instructions[j].GetSwitchTargetOffset(k));
if(ei.start_pc < targetPC && targetPC < ei.end_pc)
int targetIndex = (k == -1 ? m.Instructions[j].DefaultTarget : m.Instructions[j].GetSwitchTargetIndex(k));
if(ei.startIndex < targetIndex && targetIndex < ei.endIndex)
{
ExceptionTableEntry en = new ExceptionTableEntry();
en.catch_type = ei.catch_type;
en.handler_pc = ei.handler_pc;
en.start_pc = (ushort)targetPC;
en.end_pc = ei.end_pc;
ei.end_pc = (ushort)targetPC;
en.handlerIndex = ei.handlerIndex;
en.startIndex = targetIndex;
en.endIndex = ei.endIndex;
ei.endIndex = targetIndex;
ar.Insert(i + 1, en);
goto restart_jsr;
}
@ -523,15 +522,15 @@ class Compiler
case NormalizedByteCode.__goto:
case NormalizedByteCode.__jsr:
{
int targetPC = m.Instructions[j].PC + m.Instructions[j].Arg1;
if(ei.start_pc < targetPC && targetPC < ei.end_pc)
int targetIndex = m.Instructions[j].Arg1;
if(ei.startIndex < targetIndex && targetIndex < ei.endIndex)
{
ExceptionTableEntry en = new ExceptionTableEntry();
en.catch_type = ei.catch_type;
en.handler_pc = ei.handler_pc;
en.start_pc = (ushort)targetPC;
en.end_pc = ei.end_pc;
ei.end_pc = (ushort)targetPC;
en.handlerIndex = ei.handlerIndex;
en.startIndex = targetIndex;
en.endIndex = ei.endIndex;
ei.endIndex = targetIndex;
ar.Insert(i + 1, en);
goto restart_jsr;
}
@ -548,14 +547,14 @@ class Compiler
for(int j = 0; j < ar.Count; j++)
{
ExceptionTableEntry ej = ar[j];
if(ei.start_pc < ej.handler_pc && ej.handler_pc < ei.end_pc)
if(ei.startIndex < ej.handlerIndex && ej.handlerIndex < ei.endIndex)
{
ExceptionTableEntry en = new ExceptionTableEntry();
en.catch_type = ei.catch_type;
en.handler_pc = ei.handler_pc;
en.start_pc = ej.handler_pc;
en.end_pc = ei.end_pc;
ei.end_pc = ej.handler_pc;
en.handlerIndex = ei.handlerIndex;
en.startIndex = ej.handlerIndex;
en.endIndex = ei.endIndex;
ei.endIndex = ej.handlerIndex;
ar.Insert(i + 1, en);
goto restart_jsr;
}
@ -565,7 +564,7 @@ class Compiler
for(int i = 0; i < ar.Count; i++)
{
ExceptionTableEntry ei = ar[i];
if(ei.start_pc == ei.end_pc)
if(ei.startIndex == ei.endIndex)
{
ar.RemoveAt(i);
i--;
@ -582,8 +581,8 @@ class Compiler
TypeWrapper exceptionType = classFile.GetConstantPoolClassType(ei.catch_type);
if(!exceptionType.IsUnloadable && !java_lang_ThreadDeath.IsAssignableTo(exceptionType))
{
int start = FindPcIndex(ei.start_pc);
int end = FindPcIndex(ei.end_pc);
int start = ei.startIndex;
int end = ei.endIndex;
for(int j = start; j < end; j++)
{
if(ByteCodeMetaData.CanThrowException(m.Instructions[j].NormalizedOpCode))
@ -618,27 +617,27 @@ class Compiler
for(int j = i + 1; j < exceptions.Length; j++)
{
// check for partially overlapping try blocks (which is legal for the JVM, but not the CLR)
if(exceptions[i].start_pc < exceptions[j].start_pc &&
exceptions[j].start_pc < exceptions[i].end_pc &&
exceptions[i].end_pc < exceptions[j].end_pc)
if(exceptions[i].startIndex < exceptions[j].startIndex &&
exceptions[j].startIndex < exceptions[i].endIndex &&
exceptions[i].endIndex < exceptions[j].endIndex)
{
throw new InvalidOperationException("Partially overlapping try blocks is broken");
}
// check that we didn't destroy the ordering, when sorting
if(exceptions[i].start_pc <= exceptions[j].start_pc &&
exceptions[i].end_pc >= exceptions[j].end_pc &&
if(exceptions[i].startIndex <= exceptions[j].startIndex &&
exceptions[i].endIndex >= exceptions[j].endIndex &&
exceptions[i].ordinal < exceptions[j].ordinal)
{
throw new InvalidOperationException("Non recursive try blocks is broken");
}
}
// make sure __jsr doesn't jump out of try block
for(int j = FindPcIndex(exceptions[i].start_pc), e = FindPcIndex(exceptions[i].end_pc); j < e; j++)
for(int j = exceptions[i].startIndex, e = exceptions[i].endIndex; j < e; j++)
{
if(m.Instructions[j].NormalizedOpCode == NormalizedByteCode.__jsr)
{
int targetPC = m.Instructions[j].NormalizedArg1 + m.Instructions[j].PC;
if(targetPC < exceptions[i].start_pc || targetPC >= exceptions[i].end_pc)
int targetIndex = m.Instructions[j].TargetIndex;
if(targetIndex < exceptions[i].startIndex || targetIndex >= exceptions[i].endIndex)
{
throw new InvalidOperationException("Try block splitting around __jsr is broken");
}
@ -675,20 +674,20 @@ class Compiler
internal CodeEmitterLabel Stub;
internal CodeEmitterLabel TargetLabel;
internal bool ContentOnStack;
internal readonly int TargetPC;
internal readonly int TargetIndex;
internal DupHelper dh;
internal BranchCookie(Compiler compiler, int stackHeight, int targetPC)
internal BranchCookie(Compiler compiler, int stackHeight, int targetIndex)
{
this.Stub = compiler.ilGenerator.DefineLabel();
this.TargetPC = targetPC;
this.TargetIndex = targetIndex;
this.dh = new DupHelper(compiler, stackHeight);
}
internal BranchCookie(CodeEmitterLabel label, int targetPC)
internal BranchCookie(CodeEmitterLabel label, int targetIndex)
{
this.Stub = label;
this.TargetPC = targetPC;
this.TargetIndex = targetIndex;
}
}
@ -963,30 +962,30 @@ class Compiler
{
private Compiler compiler;
private CodeEmitter ilgen;
private int begin;
private int end;
private int beginIndex;
private int endIndex;
private int exceptionIndex;
private List<object> exits;
private bool nested;
private object[] labels;
internal Block(Compiler compiler, int beginPC, int endPC, int exceptionIndex, List<object> exits, bool nested)
internal Block(Compiler compiler, int beginIndex, int endIndex, int exceptionIndex, List<object> exits, bool nested)
{
this.compiler = compiler;
this.ilgen = compiler.ilGenerator;
this.begin = beginPC;
this.end = endPC;
this.beginIndex = beginIndex;
this.endIndex = endIndex;
this.exceptionIndex = exceptionIndex;
this.exits = exits;
this.nested = nested;
labels = new object[compiler.m.Instructions.Length];
}
internal int End
internal int EndIndex
{
get
{
return end;
return endIndex;
}
}
@ -1009,10 +1008,9 @@ class Compiler
exits.Add(bc);
}
internal CodeEmitterLabel GetLabel(int targetPC)
internal CodeEmitterLabel GetLabel(int targetIndex)
{
int targetIndex = compiler.FindPcIndex(targetPC);
if(IsInRange(targetPC))
if(IsInRange(targetIndex))
{
object l = labels[targetIndex];
if(l == null)
@ -1031,7 +1029,7 @@ class Compiler
// that saves the stack and uses leave to leave the exception block (to another stub that recovers
// the stack)
int stackHeight = compiler.ma.GetStackHeight(targetIndex);
BranchCookie bc = new BranchCookie(compiler, stackHeight, targetPC);
BranchCookie bc = new BranchCookie(compiler, stackHeight, targetIndex);
bc.ContentOnStack = true;
for(int i = 0; i < stackHeight; i++)
{
@ -1065,9 +1063,9 @@ class Compiler
}
}
internal bool IsInRange(int pc)
internal bool IsInRange(int index)
{
return begin <= pc && pc < end;
return beginIndex <= index && index < endIndex;
}
internal void Leave()
@ -1097,7 +1095,7 @@ class Compiler
{
bc.dh.Store(n);
}
if(bc.TargetPC == -1)
if(bc.TargetIndex == -1)
{
ilgen.Emit(OpCodes.Br, bc.TargetLabel);
}
@ -1133,12 +1131,12 @@ class Compiler
else
{
BranchCookie bc = exit as BranchCookie;
if(bc != null && bc.TargetPC != -1)
if(bc != null && bc.TargetIndex != -1)
{
Debug.Assert(!bc.ContentOnStack);
// if the target is within the new block, we handle it, otherwise we
// defer the cookie to our caller
if(newBlock.IsInRange(bc.TargetPC))
if(newBlock.IsInRange(bc.TargetIndex))
{
bc.ContentOnStack = true;
ilgen.MarkLabel(bc.Stub);
@ -1147,7 +1145,7 @@ class Compiler
{
bc.dh.Load(n);
}
ilgen.Emit(OpCodes.Br, newBlock.GetLabel(bc.TargetPC));
ilgen.Emit(OpCodes.Br, newBlock.GetLabel(bc.TargetIndex));
}
else
{
@ -1175,12 +1173,12 @@ class Compiler
private bool IsGuardedBlock(Stack<Block> blockStack, int instructionIndex, int instructionCount)
{
int start_pc = m.Instructions[instructionIndex].PC;
int end_pc = m.Instructions[instructionIndex + instructionCount].PC;
int start = instructionIndex;
int end = instructionIndex + instructionCount;
for(int i = 0; i < exceptions.Length; i++)
{
ExceptionTableEntry e = exceptions[i];
if(e.end_pc > start_pc && e.start_pc < end_pc)
if(e.endIndex > start && e.startIndex < end)
{
foreach(Block block in blockStack)
{
@ -1207,7 +1205,7 @@ class Compiler
Instruction instr = code[i];
// if we've left the current exception block, do the exit processing
while(block.End == instr.PC)
while(block.EndIndex == i)
{
block.Leave();
@ -1218,11 +1216,11 @@ class Compiler
exceptionIndex = block.ExceptionIndex + 1;
// skip over exception handlers that are no longer relevant
for(; exceptionIndex < exceptions.Length && exceptions[exceptionIndex].end_pc <= instr.PC; exceptionIndex++)
for(; exceptionIndex < exceptions.Length && exceptions[exceptionIndex].endIndex <= i; exceptionIndex++)
{
}
int handlerIndex = FindPcIndex(exc.handler_pc);
int handlerIndex = exc.handlerIndex;
if(exc.catch_type == 0
&& handlerIndex + 2 < m.Instructions.Length
@ -1282,7 +1280,7 @@ class Compiler
{
ilGenerator.BeginCatchBlock(typeof(Exception));
}
BranchCookie bc = new BranchCookie(this, 1, exc.handler_pc);
BranchCookie bc = new BranchCookie(this, 1, exc.handlerIndex);
prevBlock.AddExitHack(bc);
Instruction handlerInstr = code[handlerIndex];
bool unusedException = mapSafe && (handlerInstr.NormalizedOpCode == NormalizedByteCode.__pop ||
@ -1396,7 +1394,7 @@ class Compiler
// transfer the stack into it
// Note that an exception block that *starts* at an unreachable instruction,
// is completely unreachable, because it is impossible to branch into an exception block.
for(; exceptionIndex < exceptions.Length && exceptions[exceptionIndex].start_pc == instr.PC; exceptionIndex++)
for(; exceptionIndex < exceptions.Length && exceptions[exceptionIndex].startIndex == i; exceptionIndex++)
{
int stackHeight = ma.GetStackHeight(i);
if(stackHeight != 0)
@ -1419,7 +1417,7 @@ class Compiler
ilGenerator.BeginExceptionBlock();
}
blockStack.Push(block);
block = new Block(this, exceptions[exceptionIndex].start_pc, exceptions[exceptionIndex].end_pc, exceptionIndex, new List<object>(), true);
block = new Block(this, exceptions[exceptionIndex].startIndex, exceptions[exceptionIndex].endIndex, exceptionIndex, new List<object>(), true);
block.MarkLabel(i);
}
@ -1428,7 +1426,7 @@ class Compiler
{
for(int j = 0; j < table.Length; j++)
{
if(table[j].start_pc == instr.PC && table[j].line_number != 0)
if(table[j].start_pc == m.Instructions[i].PC && table[j].line_number != 0)
{
if(symboldocument != null)
{
@ -1484,7 +1482,7 @@ class Compiler
case NormalizedByteCode.__if_acmpeq:
case NormalizedByteCode.__if_acmpne:
case NormalizedByteCode.__goto:
if(instr.Arg1 <= 0)
if(instr.TargetIndex <= i)
{
ilGenerator.Emit(OpCodes.Ldarg_0);
ilGenerator.Emit(OpCodes.Call, keepAliveMethod);
@ -2438,55 +2436,55 @@ class Compiler
ilGenerator.LazyEmit_dcmpg();
break;
case NormalizedByteCode.__if_icmpeq:
ilGenerator.Emit(OpCodes.Beq, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.Emit(OpCodes.Beq, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__if_icmpne:
ilGenerator.Emit(OpCodes.Bne_Un, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.Emit(OpCodes.Bne_Un, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__if_icmple:
ilGenerator.Emit(OpCodes.Ble, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.Emit(OpCodes.Ble, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__if_icmplt:
ilGenerator.Emit(OpCodes.Blt, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.Emit(OpCodes.Blt, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__if_icmpge:
ilGenerator.Emit(OpCodes.Bge, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.Emit(OpCodes.Bge, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__if_icmpgt:
ilGenerator.Emit(OpCodes.Bgt, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.Emit(OpCodes.Bgt, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__ifle:
ilGenerator.LazyEmit_if_le_lt_ge_gt(CodeEmitter.Comparison.LessOrEqual, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.LazyEmit_if_le_lt_ge_gt(CodeEmitter.Comparison.LessOrEqual, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__iflt:
ilGenerator.LazyEmit_if_le_lt_ge_gt(CodeEmitter.Comparison.LessThan, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.LazyEmit_if_le_lt_ge_gt(CodeEmitter.Comparison.LessThan, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__ifge:
ilGenerator.LazyEmit_if_le_lt_ge_gt(CodeEmitter.Comparison.GreaterOrEqual, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.LazyEmit_if_le_lt_ge_gt(CodeEmitter.Comparison.GreaterOrEqual, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__ifgt:
ilGenerator.LazyEmit_if_le_lt_ge_gt(CodeEmitter.Comparison.GreaterThan, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.LazyEmit_if_le_lt_ge_gt(CodeEmitter.Comparison.GreaterThan, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__ifne:
ilGenerator.LazyEmit_ifne(block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.LazyEmit_ifne(block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__ifeq:
ilGenerator.LazyEmit_ifeq(block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.LazyEmit_ifeq(block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__ifnonnull:
ilGenerator.Emit(OpCodes.Brtrue, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.Emit(OpCodes.Brtrue, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__ifnull:
ilGenerator.Emit(OpCodes.Brfalse, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.Emit(OpCodes.Brfalse, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__if_acmpeq:
ilGenerator.Emit(OpCodes.Beq, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.Emit(OpCodes.Beq, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__if_acmpne:
ilGenerator.Emit(OpCodes.Bne_Un, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.Emit(OpCodes.Bne_Un, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__goto:
ilGenerator.Emit(OpCodes.Br, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.Emit(OpCodes.Br, block.GetLabel(instr.TargetIndex));
break;
case NormalizedByteCode.__ineg:
case NormalizedByteCode.__lneg:
@ -2887,7 +2885,7 @@ class Compiler
CodeEmitterLabel[] labels = new CodeEmitterLabel[instr.SwitchEntryCount];
for(int j = 0; j < labels.Length; j++)
{
labels[j] = block.GetLabel(instr.PC + instr.GetSwitchTargetOffset(j));
labels[j] = block.GetLabel(instr.GetSwitchTargetIndex(j));
}
if(instr.GetSwitchValue(0) != 0)
{
@ -2895,7 +2893,7 @@ class Compiler
ilGenerator.Emit(OpCodes.Sub);
}
ilGenerator.Emit(OpCodes.Switch, labels);
ilGenerator.Emit(OpCodes.Br, block.GetLabel(instr.PC + instr.DefaultOffset));
ilGenerator.Emit(OpCodes.Br, block.GetLabel(instr.DefaultTarget));
break;
}
case NormalizedByteCode.__lookupswitch:
@ -2906,11 +2904,11 @@ class Compiler
CodeEmitterLabel label = ilGenerator.DefineLabel();
ilGenerator.Emit(OpCodes.Bne_Un_S, label);
ilGenerator.Emit(OpCodes.Pop);
ilGenerator.Emit(OpCodes.Br, block.GetLabel(instr.PC + instr.GetSwitchTargetOffset(j)));
ilGenerator.Emit(OpCodes.Br, block.GetLabel(instr.GetSwitchTargetIndex(j)));
ilGenerator.MarkLabel(label);
}
ilGenerator.Emit(OpCodes.Pop);
ilGenerator.Emit(OpCodes.Br, block.GetLabel(instr.PC + instr.DefaultOffset));
ilGenerator.Emit(OpCodes.Br, block.GetLabel(instr.DefaultTarget));
break;
case NormalizedByteCode.__iinc:
LoadLocal(i);
@ -2957,7 +2955,7 @@ class Compiler
break;
case NormalizedByteCode.__jsr:
{
int index = FindPcIndex(instr.PC + instr.Arg1);
int index = instr.TargetIndex;
int[] callsites = ma.GetCallSites(index);
for(int j = 0; j < callsites.Length; j++)
{
@ -2967,7 +2965,7 @@ class Compiler
break;
}
}
ilGenerator.Emit(OpCodes.Br, block.GetLabel(instr.PC + instr.Arg1));
ilGenerator.Emit(OpCodes.Br, block.GetLabel(instr.TargetIndex));
break;
}
case NormalizedByteCode.__ret:
@ -2982,12 +2980,12 @@ class Compiler
{
LoadLocal(i);
ilGenerator.LazyEmitLdc_I4(j);
ilGenerator.Emit(OpCodes.Beq, block.GetLabel(m.Instructions[callsites[j] + 1].PC));
ilGenerator.Emit(OpCodes.Beq, block.GetLabel(callsites[j] + 1));
}
}
if(m.Instructions[callsites[callsites.Length - 1]].IsReachable)
{
ilGenerator.Emit(OpCodes.Br, block.GetLabel(m.Instructions[callsites[callsites.Length - 1] + 1].PC));
ilGenerator.Emit(OpCodes.Br, block.GetLabel(callsites[callsites.Length - 1] + 1));
}
else
{
@ -3073,10 +3071,10 @@ class Compiler
instructionIsForwardReachable = true;
Debug.Assert(m.Instructions[i + 1].IsReachable);
// don't fall through end of try block
if(m.Instructions[i + 1].PC == block.End)
if(block.EndIndex == i + 1)
{
// TODO instead of emitting a branch to the leave stub, it would be more efficient to put the leave stub here
ilGenerator.Emit(OpCodes.Br, block.GetLabel(m.Instructions[i + 1].PC));
ilGenerator.Emit(OpCodes.Br, block.GetLabel(i + 1));
}
break;
}
@ -3488,20 +3486,6 @@ class Compiler
}
}
private int FindPcIndex(int target)
{
return m.PcIndexMap[target];
}
private int SafeFindPcIndex(int target)
{
if(target < 0 || target >= m.PcIndexMap.Length)
{
return -1;
}
return m.PcIndexMap[target];
}
private LocalVar LoadLocal(int instructionIndex)
{
LocalVar v = ma.GetLocalVar(instructionIndex);

Просмотреть файл

@ -1,5 +1,5 @@
/*
Copyright (C) 2002-2008 Jeroen Frijters
Copyright (C) 2002-2009 Jeroen Frijters
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
@ -1562,16 +1562,13 @@ class MethodAnalyzer
try
{
int end_pc = method.Instructions[method.Instructions.Length - 1].PC;
// ensure that exception blocks and handlers start and end at instruction boundaries
for(int i = 0; i < method.ExceptionTable.Length; i++)
{
int start = method.PcIndexMap[method.ExceptionTable[i].start_pc];
int end = method.PcIndexMap[method.ExceptionTable[i].end_pc];
int handler = method.PcIndexMap[method.ExceptionTable[i].handler_pc];
if((start >= end && end != -1) || start == -1 ||
(end == -1 && method.ExceptionTable[i].end_pc != end_pc) ||
handler <= 0)
int start = method.ExceptionTable[i].startIndex;
int end = method.ExceptionTable[i].endIndex;
int handler = method.ExceptionTable[i].handlerIndex;
if(start >= end || start == -1 || end == -1 || handler <= 0)
{
throw new IndexOutOfRangeException();
}
@ -1642,7 +1639,7 @@ class MethodAnalyzer
// mark the exception handlers reachable from this instruction
for(int j = 0; j < method.ExceptionTable.Length; j++)
{
if(method.ExceptionTable[j].start_pc <= instructions[i].PC && method.ExceptionTable[j].end_pc > instructions[i].PC)
if(method.ExceptionTable[j].startIndex <= i && i < method.ExceptionTable[j].endIndex)
{
// NOTE this used to be CopyLocalsAndSubroutines, but it doesn't (always) make
// sense to copy the subroutine state
@ -1663,7 +1660,7 @@ class MethodAnalyzer
// Throwable as the type and recording a loader constraint
ex.PushType(GetConstantPoolClassType(catch_type));
}
int idx = method.PcIndexMap[method.ExceptionTable[j].handler_pc];
int idx = method.ExceptionTable[j].handlerIndex;
state[idx] += ex;
}
}
@ -2136,15 +2133,15 @@ class MethodAnalyzer
{
// mark the type, so that we can ascertain that it is a "new object"
TypeWrapper type;
if(!newTypes.TryGetValue(instr.PC, out type))
if(!newTypes.TryGetValue(i, out type))
{
type = GetConstantPoolClassType(instr.Arg1);
if(type.IsArray)
{
throw new VerifyError("Illegal use of array type");
}
type = VerifierTypeWrapper.MakeNew(type, instr.PC);
newTypes[instr.PC] = type;
type = VerifierTypeWrapper.MakeNew(type, i);
newTypes[i] = type;
}
s.PushType(type);
break;
@ -2575,9 +2572,9 @@ class MethodAnalyzer
case NormalizedByteCode.__lookupswitch:
for(int j = 0; j < instr.SwitchEntryCount; j++)
{
state[method.PcIndexMap[instr.PC + instr.GetSwitchTargetOffset(j)]] += s;
state[instr.GetSwitchTargetIndex(j)] += s;
}
state[method.PcIndexMap[instr.PC + instr.DefaultOffset]] += s;
state[instr.DefaultTarget] += s;
break;
case NormalizedByteCode.__ifeq:
case NormalizedByteCode.__ifne:
@ -2596,10 +2593,10 @@ class MethodAnalyzer
case NormalizedByteCode.__ifnull:
case NormalizedByteCode.__ifnonnull:
state[i + 1] += s;
state[method.PcIndexMap[instr.PC + instr.Arg1]] += s;
state[instr.TargetIndex] += s;
break;
case NormalizedByteCode.__goto:
state[method.PcIndexMap[instr.PC + instr.Arg1]] += s;
state[instr.TargetIndex] += s;
break;
case NormalizedByteCode.__jsr:
{
@ -2607,7 +2604,7 @@ class MethodAnalyzer
{
state[i + 1] += s;
}
int index = method.PcIndexMap[instr.PC + instr.Arg1];
int index = instr.TargetIndex;
s.SetSubroutineId(index);
TypeWrapper retAddressType;
if(!returnAddressTypes.TryGetValue(index, out retAddressType))
@ -2692,7 +2689,7 @@ class MethodAnalyzer
{
if (instructions[i].NormalizedOpCode == NormalizedByteCode.__getstatic
&& instructions[i + 1].NormalizedOpCode == NormalizedByteCode.__ifne
&& instructions[i + 1].Arg1 > 0
&& instructions[i + 1].TargetIndex > i
&& !instructions[i + 1].IsBranchTarget)
{
ClassFile.ConstantPoolItemFieldref cpi = classFile.GetFieldref(instructions[i].Arg1);
@ -2701,7 +2698,7 @@ class MethodAnalyzer
// We've found an assertion. We patch the instruction to branch around it so that
// the assertion code will be unreachable (and hence optimized away).
// Note that the goto will be optimized away later by the code generator (which removes unnecessary branches).
instructions[i].PatchOpCode(NormalizedByteCode.__goto, instructions[i + 1].Arg1 + 3);
instructions[i].PatchOpCode(NormalizedByteCode.__goto, instructions[i + 1].TargetIndex);
}
}
}
@ -2870,9 +2867,9 @@ class MethodAnalyzer
// mark the exception handlers reachable from this instruction
for(int j = 0; j < method.ExceptionTable.Length; j++)
{
if(method.ExceptionTable[j].start_pc <= instructions[i].PC && method.ExceptionTable[j].end_pc > instructions[i].PC)
if(method.ExceptionTable[j].startIndex <= i && i < method.ExceptionTable[j].endIndex)
{
instructions[method.PcIndexMap[method.ExceptionTable[j].handler_pc]].flags |= InstructionFlags.Reachable | InstructionFlags.BranchTarget;
instructions[method.ExceptionTable[j].handlerIndex].flags |= InstructionFlags.Reachable | InstructionFlags.BranchTarget;
}
}
// mark the successor instructions
@ -2884,11 +2881,11 @@ class MethodAnalyzer
bool hasbackbranch = false;
for(int j = 0; j < instructions[i].SwitchEntryCount; j++)
{
hasbackbranch |= instructions[i].GetSwitchTargetOffset(j) < 0;
instructions[method.PcIndexMap[instructions[i].PC + instructions[i].GetSwitchTargetOffset(j)]].flags |= InstructionFlags.Reachable | InstructionFlags.BranchTarget;
hasbackbranch |= instructions[i].GetSwitchTargetIndex(j) < i;
instructions[instructions[i].GetSwitchTargetIndex(j)].flags |= InstructionFlags.Reachable | InstructionFlags.BranchTarget;
}
hasbackbranch |= instructions[i].DefaultOffset < 0;
instructions[method.PcIndexMap[instructions[i].PC + instructions[i].DefaultOffset]].flags |= InstructionFlags.Reachable | InstructionFlags.BranchTarget;
hasbackbranch |= instructions[i].DefaultTarget < i;
instructions[instructions[i].DefaultTarget].flags |= InstructionFlags.Reachable | InstructionFlags.BranchTarget;
if(hasbackbranch)
{
// backward branches cannot have uninitialized objects on
@ -2898,13 +2895,13 @@ class MethodAnalyzer
break;
}
case NormalizedByteCode.__goto:
if(instructions[i].Arg1 < 0)
if(instructions[i].TargetIndex < i)
{
// backward branches cannot have uninitialized objects on
// the stack or in local variables
state[i].CheckUninitializedObjRefs();
}
instructions[method.PcIndexMap[instructions[i].PC + instructions[i].Arg1]].flags |= InstructionFlags.Reachable | InstructionFlags.BranchTarget;
instructions[instructions[i].TargetIndex].flags |= InstructionFlags.Reachable | InstructionFlags.BranchTarget;
break;
case NormalizedByteCode.__ifeq:
case NormalizedByteCode.__ifne:
@ -2922,18 +2919,18 @@ class MethodAnalyzer
case NormalizedByteCode.__if_acmpne:
case NormalizedByteCode.__ifnull:
case NormalizedByteCode.__ifnonnull:
if(instructions[i].Arg1 < 0)
if(instructions[i].TargetIndex < i)
{
// backward branches cannot have uninitialized objects on
// the stack or in local variables
state[i].CheckUninitializedObjRefs();
}
instructions[method.PcIndexMap[instructions[i].PC + instructions[i].Arg1]].flags |= InstructionFlags.Reachable | InstructionFlags.BranchTarget;
instructions[instructions[i].TargetIndex].flags |= InstructionFlags.Reachable | InstructionFlags.BranchTarget;
instructions[i + 1].flags |= InstructionFlags.Reachable;
break;
case NormalizedByteCode.__jsr:
state[i].CheckUninitializedObjRefs();
instructions[method.PcIndexMap[instructions[i].PC + instructions[i].Arg1]].flags |= InstructionFlags.Reachable | InstructionFlags.BranchTarget;
instructions[instructions[i].TargetIndex].flags |= InstructionFlags.Reachable | InstructionFlags.BranchTarget;
// Note that we don't mark the next instruction as reachable,
// because that depends on the corresponding ret actually being
// reachable. We handle this in the loop below.