use of com.oracle.svm.core.heap.SubstrateReferenceMap in project graal by oracle.
the class Instance method doState.
private void doState(DebugContext debug, FrameMap frameMap, LIRInstruction op, LIRFrameState state) {
SubstrateReferenceMap refMap = (SubstrateReferenceMap) state.debugInfo().getReferenceMap();
/*
* We want to verify explicit deoptimization entry points, and implicit deoptimization entry
* points at call sites. Unfortunately, just checking isDeoptEntry gives us false positives
* for some runtime calls that re-use a state (which is not marked as "during call").
*/
boolean isDeoptEntry = ((HostedMethod) state.topFrame.getMethod()).compilationInfo.isDeoptEntry(state.topFrame.getBCI(), state.topFrame.duringCall, state.topFrame.rethrowException);
if (op instanceof DeoptEntryOp || (state.topFrame.duringCall && isDeoptEntry)) {
BytecodeFrame frame = state.topFrame;
Map<Integer, Object> allUsedRegisters = refMap.getDebugAllUsedRegisters();
Map<Integer, Object> allUsedStackSlots = refMap.getDebugAllUsedStackSlots();
if (allUsedRegisters != null && !allUsedRegisters.isEmpty()) {
throw shouldNotReachHere("Deoptimization target must not use any registers");
}
if (allUsedStackSlots != null) {
Map<Integer, Object> cleanedStackSlots = new HashMap<>(allUsedStackSlots);
do {
/*
* Remove stack slot information for all slots which already have a
* representative in the bytecode frame.
*/
for (JavaValue value : frame.values) {
if (value instanceof StackSlot) {
StackSlot stackSlot = (StackSlot) value;
int offset = stackSlot.getOffset(frameMap.totalFrameSize());
debug.log("remove slot %d: %s", offset, stackSlot);
cleanedStackSlots.remove(offset);
} else if (ValueUtil.isConstantJavaValue(value) || ValueUtil.isIllegalJavaValue(value)) {
/* Nothing to do. */
} else {
throw shouldNotReachHere("unknown value in deopt target: " + value);
}
}
frame = frame.caller();
} while (frame != null);
int firstBci = state.topFrame.getMethod().isSynchronized() ? BytecodeFrame.BEFORE_BCI : 0;
if (state.topFrame.getBCI() == firstBci && state.topFrame.caller() == null && state.topFrame.duringCall == false && state.topFrame.rethrowException == false) {
/*
* Some stack slots, e.g., the return address and manually allocated stack
* memory, are alive the whole method. So all stack slots that are registered
* for the method entry are allowed to be registered in all subsequent states.
*/
assert op instanceof DeoptEntryOp;
assert allowedStackSlots == null;
allowedStackSlots = new HashMap<>(cleanedStackSlots);
} else {
if (allowedStackSlots == null) {
allowedStackSlots = new HashMap<>();
}
for (Integer key : allowedStackSlots.keySet()) {
cleanedStackSlots.remove(key);
}
if (!cleanedStackSlots.isEmpty()) {
throw shouldNotReachHere("unknown values in stack slots: method " + state.topFrame.getMethod().toString() + ", op " + op.id() + " " + op + ": " + cleanedStackSlots);
}
}
}
}
}
use of com.oracle.svm.core.heap.SubstrateReferenceMap in project graal by oracle.
the class UniverseBuilder method createReferenceMap.
private static ReferenceMapEncoder.Input createReferenceMap(HostedType type) {
HostedField[] fields = type.getInstanceFields(true);
SubstrateReferenceMap referenceMap = new SubstrateReferenceMap();
for (HostedField field : fields) {
if (field.getType().getStorageKind() == JavaKind.Object && field.hasLocation() && field.getAnnotation(ExcludeFromReferenceMap.class) == null) {
referenceMap.markReferenceAtIndex(field.getLocation() / ConfigurationValues.getTarget().wordSize);
}
}
if (type.isInstanceClass()) {
final HostedInstanceClass instanceClass = (HostedInstanceClass) type;
/*
* If the instance type has a monitor field, add it to the reference map.
*/
final int monitorOffset = instanceClass.getMonitorFieldOffset();
if (monitorOffset != 0) {
final int monitorIndex = monitorOffset / ConfigurationValues.getTarget().wordSize;
referenceMap.markReferenceAtIndex(monitorIndex);
}
}
return referenceMap;
}
use of com.oracle.svm.core.heap.SubstrateReferenceMap in project graal by oracle.
the class VMThreadMTFeature method beforeCompilation.
@Override
public void beforeCompilation(BeforeCompilationAccess config) {
List<VMThreadLocalInfo> sortedThreadLocalInfos = threadLocalCollector.sortThreadLocals(config, threadLocalAtOffsetZero);
SubstrateReferenceMap referenceMap = new SubstrateReferenceMap();
int nextOffset = 0;
for (VMThreadLocalInfo info : sortedThreadLocalInfos) {
assert nextOffset % Math.min(8, info.sizeInBytes) == 0 : "alignment mismatch: " + info.sizeInBytes + ", " + nextOffset;
if (info.isObject) {
final boolean isCompressed = false;
referenceMap.markReferenceAtIndex(nextOffset / info.sizeInBytes, isCompressed);
}
info.offset = nextOffset;
nextOffset += info.sizeInBytes;
}
VMError.guarantee(threadLocalAtOffsetZero == null || threadLocalCollector.getInfo(threadLocalAtOffsetZero).offset == 0);
ReferenceMapEncoder encoder = new ReferenceMapEncoder();
encoder.add(referenceMap);
objectReferenceWalker.vmThreadReferenceMapEncoding = encoder.encodeAll(null);
objectReferenceWalker.vmThreadReferenceMapIndex = encoder.lookupEncoding(referenceMap);
objectReferenceWalker.vmThreadSize = nextOffset;
/* Remember the final sorted list. */
VMThreadLocalInfos.setInfos(sortedThreadLocalInfos);
}
Aggregations