public static void LdrLit(AILEmitterCtx Context) { IAOpCodeLit Op = (IAOpCodeLit)Context.CurrOp; if (Op.Prefetch) { return; } Context.EmitLdarg(ATranslatedSub.MemoryArgIdx); Context.EmitLdc_I8(Op.Imm); if (Op.Signed) { EmitReadSx64Call(Context, Op.Size); } else { EmitReadZxCall(Context, Op.Size); } if (Op is IAOpCodeSimd) { Context.EmitStvec(Op.Rt); } else { Context.EmitStint(Op.Rt); } }
public static void Blr(AILEmitterCtx Context) { AOpCodeBReg Op = (AOpCodeBReg)Context.CurrOp; Context.EmitLdc_I(Op.Position + 4); Context.EmitStint(AThreadState.LRIndex); Context.EmitStoreState(); Context.EmitLdintzr(Op.Rn); Context.Emit(OpCodes.Ret); }
public static void EmitDataStore(AILEmitterCtx Context, bool SetFlags) { IAOpCodeAlu Op = (IAOpCodeAlu)Context.CurrOp; if (SetFlags || Op is IAOpCodeAluRs) { Context.EmitStintzr(Op.Rd); } else { Context.EmitStint(Op.Rd); } }
private static void EmitWBackIfNeeded(AILEmitterCtx Context) { //Check whenever the current OpCode has post-indexed write back, if so write it. //Note: AOpCodeMemPair inherits from AOpCodeMemImm, so this works for both. if (Context.CurrOp is AOpCodeMemImm Op && Op.WBack) { Context.EmitLdtmp(); if (Op.PostIdx) { Context.EmitLdc_I(Op.Imm); Context.Emit(OpCodes.Add); } Context.EmitStint(Op.Rn); } }
public static void Bl(AILEmitterCtx Context) { AOpCodeBImmAl Op = (AOpCodeBImmAl)Context.CurrOp; if (AOptimizations.GenerateCallStack) { Context.EmitLdarg(ATranslatedSub.StateArgIdx); Context.EmitLdc_I8(Op.Imm); Context.EmitPrivateCall(typeof(AThreadState), nameof(AThreadState.EnterMethod)); } Context.EmitLdc_I(Op.Position + 4); Context.EmitStint(AThreadState.LRIndex); Context.EmitStoreState(); if (Context.TryOptEmitSubroutineCall()) { //Note: the return value of the called method will be placed //at the Stack, the return value is always a Int64 with the //return address of the function. We check if the address is //correct, if it isn't we keep returning until we reach the dispatcher. Context.Emit(OpCodes.Dup); Context.EmitLdc_I8(Op.Position + 4); AILLabel LblContinue = new AILLabel(); Context.Emit(OpCodes.Beq_S, LblContinue); Context.Emit(OpCodes.Ret); Context.MarkLabel(LblContinue); Context.Emit(OpCodes.Pop); Context.EmitLoadState(Context.CurrBlock.Next); } else { Context.EmitLdc_I8(Op.Imm); Context.Emit(OpCodes.Ret); } }
private static void EmitSimdMemWBack(AILEmitterCtx Context, int Offset) { AOpCodeMemReg Op = (AOpCodeMemReg)Context.CurrOp; Context.EmitLdint(Op.Rn); if (Op.Rm != ARegisters.ZRIndex) { Context.EmitLdint(Op.Rm); } else { Context.EmitLdc_I8(Offset); } Context.Emit(OpCodes.Add); Context.EmitStint(Op.Rn); }
public static void Blr(AILEmitterCtx Context) { AOpCodeBReg Op = (AOpCodeBReg)Context.CurrOp; if (AOptimizations.GenerateCallStack) { Context.EmitLdarg(ATranslatedSub.StateArgIdx); Context.EmitLdintzr(Op.Rn); Context.EmitPrivateCall(typeof(AThreadState), nameof(AThreadState.EnterMethod)); } Context.EmitLdc_I(Op.Position + 4); Context.EmitStint(AThreadState.LRIndex); Context.EmitStoreState(); Context.EmitLdintzr(Op.Rn); Context.Emit(OpCodes.Ret); }
public static void Bl(AILEmitterCtx Context) { AOpCodeBImmAl Op = (AOpCodeBImmAl)Context.CurrOp; Context.EmitLdc_I(Op.Position + 4); Context.EmitStint(ARegisters.LRIndex); Context.EmitStoreState(); if (Context.TryOptEmitSubroutineCall()) { //Note: the return value of the called method will be placed //at the Stack, the return value is always a Int64 with the //return address of the function. We check if the address is //correct, if it isn't we keep returning until we reach the dispatcher. Context.Emit(OpCodes.Dup); Context.EmitLdc_I8(Op.Position + 4); AILLabel LblContinue = new AILLabel(); Context.Emit(OpCodes.Beq_S, LblContinue); Context.Emit(OpCodes.Ret); Context.MarkLabel(LblContinue); Context.Emit(OpCodes.Pop); if (Context.CurrBlock.Next != null) { Context.EmitLoadState(Context.CurrBlock.Next); } } else { Context.EmitLdc_I8(Op.Imm); Context.Emit(OpCodes.Ret); } }
private static void EmitSimdMultLdSt(AILEmitterCtx Context, bool IsLoad) { AOpCodeSimdMemMult Op = (AOpCodeSimdMemMult)Context.CurrOp; int Offset = 0; for (int Rep = 0; Rep < Op.Reps; Rep++) { for (int Elem = 0; Elem < Op.Elems; Elem++) { for (int SElem = 0; SElem < Op.SElems; SElem++) { int Rtt = (Op.Rt + Rep + SElem) & 0x1f; if (IsLoad) { Context.EmitLdvec(Rtt); Context.EmitLdc_I4(Elem); Context.EmitLdc_I4(Op.Size); Context.EmitLdarg(ATranslatedSub.MemoryArgIdx); Context.EmitLdint(Op.Rn); Context.EmitLdc_I8(Offset); Context.Emit(OpCodes.Add); EmitReadZxCall(Context, Op.Size); ASoftFallback.EmitCall(Context, nameof(ASoftFallback.InsertVec)); Context.EmitStvec(Rtt); if (Op.RegisterSize == ARegisterSize.SIMD64 && Elem == Op.Elems - 1) { EmitVectorZeroUpper(Context, Rtt); } } else { Context.EmitLdarg(ATranslatedSub.MemoryArgIdx); Context.EmitLdint(Op.Rn); Context.EmitLdc_I8(Offset); Context.Emit(OpCodes.Add); Context.EmitLdvec(Rtt); Context.EmitLdc_I4(Elem); Context.EmitLdc_I4(Op.Size); ASoftFallback.EmitCall(Context, nameof(ASoftFallback.ExtractVec)); EmitWriteCall(Context, Op.Size); } Offset += 1 << Op.Size; } } } if (Op.WBack) { Context.EmitLdint(Op.Rn); if (Op.Rm != ARegisters.ZRIndex) { Context.EmitLdint(Op.Rm); } else { Context.EmitLdc_I8(Offset); } Context.Emit(OpCodes.Add); Context.EmitStint(Op.Rn); } }