StaticScope scope, CallConfiguration callConfig, String filename, int line,
MethodNodes methodNodes) {
String sup = COMPILED_SUPER_CLASS_NAME;
ClassWriter cw;
cw = createCompiledCtor(invokerPath, invokerPath, sup);
SkinnyMethodAdapter mv = null;
String signature = null;
boolean specificArity = false;
// if trace, need to at least populate a backtrace frame
if (RubyInstanceConfig.FULL_TRACE_ENABLED) {
switch (callConfig) {
case FrameNoneScopeDummy:
callConfig = CallConfiguration.FrameBacktraceScopeDummy;
break;
case FrameNoneScopeFull:
callConfig = CallConfiguration.FrameBacktraceScopeFull;
break;
case FrameNoneScopeNone:
callConfig = CallConfiguration.FrameBacktraceScopeNone;
break;
}
}
if (scope.getRestArg() >= 0 || scope.getOptionalArgs() > 0 || scope.getRequiredArgs() > 3) {
signature = COMPILED_CALL_SIG_BLOCK;
mv = new SkinnyMethodAdapter(cw, ACC_PUBLIC, "call", signature, null, null);
} else {
specificArity = true;
mv = new SkinnyMethodAdapter(cw, ACC_PUBLIC, "call", COMPILED_CALL_SIG_BLOCK, null, null);
mv.start();
// check arity
mv.aloadMany(0, 1, 4, 5); // method, context, name, args, required
mv.pushInt(scope.getRequiredArgs());
mv.invokestatic(p(JavaMethod.class), "checkArgumentCount", sig(void.class, JavaMethod.class, ThreadContext.class, String.class, IRubyObject[].class, int.class));
mv.aloadMany(0, 1, 2, 3, 4);
for (int i = 0; i < scope.getRequiredArgs(); i++) {
mv.aload(5);
mv.ldc(i);
mv.arrayload();
}
mv.aload(6);
switch (scope.getRequiredArgs()) {
case 0:
signature = COMPILED_CALL_SIG_ZERO_BLOCK;
break;
case 1:
signature = COMPILED_CALL_SIG_ONE_BLOCK;
break;
case 2:
signature = COMPILED_CALL_SIG_TWO_BLOCK;
break;
case 3:
signature = COMPILED_CALL_SIG_THREE_BLOCK;
break;
}
mv.invokevirtual(invokerPath, "call", signature);
mv.areturn();
mv.end();
// Define a second version that doesn't take a block, so we have unique code paths for both cases.
switch (scope.getRequiredArgs()) {
case 0:
signature = COMPILED_CALL_SIG_ZERO;
break;
case 1:
signature = COMPILED_CALL_SIG_ONE;
break;
case 2:
signature = COMPILED_CALL_SIG_TWO;
break;
case 3:
signature = COMPILED_CALL_SIG_THREE;
break;
}
mv = new SkinnyMethodAdapter(cw, ACC_PUBLIC, "call", signature, null, null);
mv.start();
mv.aloadMany(0, 1, 2, 3, 4);
for (int i = 1; i <= scope.getRequiredArgs(); i++) {
mv.aload(4 + i);
}
mv.getstatic(p(Block.class), "NULL_BLOCK", ci(Block.class));
switch (scope.getRequiredArgs()) {
case 0:
signature = COMPILED_CALL_SIG_ZERO_BLOCK;
break;
case 1:
signature = COMPILED_CALL_SIG_ONE_BLOCK;
break;
case 2:
signature = COMPILED_CALL_SIG_TWO_BLOCK;
break;
case 3:
signature = COMPILED_CALL_SIG_THREE_BLOCK;
break;
}
mv.invokevirtual(invokerPath, "call", signature);
mv.areturn();
mv.end();
mv = new SkinnyMethodAdapter(cw, ACC_PUBLIC, "call", signature, null, null);
}
mv.start();
boolean heapScoped = callConfig.scoping() != Scoping.None;
boolean framed = callConfig.framing() != Framing.None;
// save off callNumber if framed or scoped, for non-local returns
int callNumberIndex = -1;
if (framed || heapScoped) {
mv.aload(1);
mv.getfield(p(ThreadContext.class), "callNumber", ci(int.class));
if (specificArity) {
switch (scope.getRequiredArgs()) {
case -1:
callNumberIndex = ARGS_INDEX + 1/*args*/ + 1/*block*/ + 1;
break;
case 0:
callNumberIndex = ARGS_INDEX + 1/*block*/ + 1;
break;
default:
callNumberIndex = ARGS_INDEX + scope.getRequiredArgs() + 1/*block*/ + 1;
}
} else {
callNumberIndex = ARGS_INDEX + 1/*block*/ + 1;
}
mv.istore(callNumberIndex);
}
// invoke pre method stuff
if (!callConfig.isNoop() || RubyInstanceConfig.FULL_TRACE_ENABLED) {
if (specificArity) {
invokeCallConfigPre(mv, COMPILED_SUPER_CLASS_NAME, scope.getRequiredArgs(), true, callConfig);
} else {
invokeCallConfigPre(mv, COMPILED_SUPER_CLASS_NAME, -1, true, callConfig);
}
}
// pre-call trace
int traceBoolIndex = -1;
if (RubyInstanceConfig.FULL_TRACE_ENABLED) {
// load and store trace enabled flag
if (specificArity) {
switch (scope.getRequiredArgs()) {
case -1:
traceBoolIndex = ARGS_INDEX + 1/*args*/ + 1/*block*/ + 2;
break;
case 0:
traceBoolIndex = ARGS_INDEX + 1/*block*/ + 2;
break;
default:
traceBoolIndex = ARGS_INDEX + scope.getRequiredArgs() + 1/*block*/ + 2;
}
} else {
traceBoolIndex = ARGS_INDEX + 1/*block*/ + 2;
}
mv.aload(1);
mv.invokevirtual(p(ThreadContext.class), "getRuntime", sig(Ruby.class));
mv.invokevirtual(p(Ruby.class), "hasEventHooks", sig(boolean.class));
mv.istore(traceBoolIndex);
// tracing pre
invokeTraceCompiledPre(mv, COMPILED_SUPER_CLASS_NAME, traceBoolIndex, filename, line);
}
Label tryBegin = new Label();
Label tryEnd = new Label();
Label doFinally = new Label();
Label doReturnFinally = new Label();
Label doRedoFinally = new Label();
Label catchReturnJump = new Label();
Label catchRedoJump = new Label();
if (framed || heapScoped) mv.trycatch(tryBegin, tryEnd, catchReturnJump, p(JumpException.ReturnJump.class));
if (framed) mv.trycatch(tryBegin, tryEnd, catchRedoJump, p(JumpException.RedoJump.class));
if (framed || heapScoped) mv.trycatch(tryBegin, tryEnd, doFinally, null);
if (framed || heapScoped) mv.trycatch(catchReturnJump, doReturnFinally, doFinally, null);
if (framed) mv.trycatch(catchRedoJump, doRedoFinally, doFinally, null);
if (framed || heapScoped) mv.label(tryBegin);
// main body
{
mv.aload(0);
// FIXME we want to eliminate these type casts when possible
mv.getfield(invokerPath, "$scriptObject", ci(Object.class));
mv.checkcast(className);
mv.aloadMany(THREADCONTEXT_INDEX, RECEIVER_INDEX);
if (specificArity) {
for (int i = 0; i < scope.getRequiredArgs(); i++) {
mv.aload(ARGS_INDEX + i);
}
mv.aload(ARGS_INDEX + scope.getRequiredArgs());
mv.invokestatic(className, method, Helpers.getStaticMethodSignature(className, scope.getRequiredArgs()));
} else {
mv.aloadMany(ARGS_INDEX, BLOCK_INDEX);
mv.invokestatic(className, method, Helpers.getStaticMethodSignature(className, 4));
}
}
if (framed || heapScoped) {
mv.label(tryEnd);
}
// normal exit, perform finally and return
{
if (RubyInstanceConfig.FULL_TRACE_ENABLED) {
invokeTraceCompiledPost(mv, COMPILED_SUPER_CLASS_NAME, traceBoolIndex);
}
if (!callConfig.isNoop()) {
invokeCallConfigPost(mv, COMPILED_SUPER_CLASS_NAME, callConfig);
}
mv.visitInsn(ARETURN);
}
// return jump handling
if (framed || heapScoped) {
mv.label(catchReturnJump);
{
mv.aload(0);
mv.swap();
mv.aload(1);
mv.swap();
mv.iload(callNumberIndex);
mv.invokevirtual(COMPILED_SUPER_CLASS_NAME, "handleReturn", sig(IRubyObject.class, ThreadContext.class, JumpException.ReturnJump.class, int.class));
mv.label(doReturnFinally);
// finally
if (RubyInstanceConfig.FULL_TRACE_ENABLED) {
invokeTraceCompiledPost(mv, COMPILED_SUPER_CLASS_NAME, traceBoolIndex);
}
if (!callConfig.isNoop()) {
invokeCallConfigPost(mv, COMPILED_SUPER_CLASS_NAME, callConfig);
}
// return result if we're still good
mv.areturn();
}
}
if (framed) {
// redo jump handling
mv.label(catchRedoJump);
{
// clear the redo
mv.pop();
// get runtime, create jump error, and throw it
mv.aload(1);
mv.invokevirtual(p(ThreadContext.class), "getRuntime", sig(Ruby.class));
mv.invokevirtual(p(Ruby.class), "newRedoLocalJumpError", sig(RaiseException.class));
mv.label(doRedoFinally);
// finally
if (RubyInstanceConfig.FULL_TRACE_ENABLED) {
invokeTraceCompiledPost(mv, COMPILED_SUPER_CLASS_NAME, traceBoolIndex);
}
if (!callConfig.isNoop()) {
invokeCallConfigPost(mv, COMPILED_SUPER_CLASS_NAME, callConfig);
}
// throw redo error if we're still good
mv.athrow();
}
}
// finally handling for abnormal exit
if (framed || heapScoped) {
mv.label(doFinally);
//call post method stuff (exception raised)
if (RubyInstanceConfig.FULL_TRACE_ENABLED) {
invokeTraceCompiledPost(mv, COMPILED_SUPER_CLASS_NAME, traceBoolIndex);
}
if (!callConfig.isNoop()) {
invokeCallConfigPost(mv, COMPILED_SUPER_CLASS_NAME, callConfig);
}
// rethrow exception
mv.athrow(); // rethrow it
}
mv.end();
return endCallOffline(cw);
}