private static void Blx(ILEmitterCtx context, bool x) { IOpCode32BImm op = (IOpCode32BImm)context.CurrOp; uint pc = op.GetPc(); bool isThumb = IsThumb(context.CurrOp); if (!isThumb) { context.EmitLdc_I(op.GetPc() - 4); } else { context.EmitLdc_I(op.GetPc() | 1); } context.EmitStint(GetBankedRegisterAlias(context.Mode, RegisterAlias.Aarch32Lr)); //If x is true, then this is a branch with link and exchange. //In this case we need to swap the mode between Arm <-> Thumb. if (x) { context.EmitLdc_I4(isThumb ? 0 : 1); context.EmitStflg((int)PState.TBit); } InstEmitFlowHelper.EmitCall(context, op.Imm); }
public static void Bfm(ILEmitterCtx context) { OpCodeBfm64 op = (OpCodeBfm64)context.CurrOp; if (op.Pos < op.Shift) { //BFI. context.EmitLdintzr(op.Rn); int shift = op.GetBitsCount() - op.Shift; int width = op.Pos + 1; long mask = (long)(ulong.MaxValue >> (64 - width)); context.EmitLdc_I(mask); context.Emit(OpCodes.And); context.EmitLsl(shift); context.EmitLdintzr(op.Rd); context.EmitLdc_I(~(mask << shift)); context.Emit(OpCodes.And); context.Emit(OpCodes.Or); context.EmitStintzr(op.Rd); } else { //BFXIL. context.EmitLdintzr(op.Rn); context.EmitLsr(op.Shift); int width = op.Pos - op.Shift + 1; long mask = (long)(ulong.MaxValue >> (64 - width)); context.EmitLdc_I(mask); context.Emit(OpCodes.And); context.EmitLdintzr(op.Rd); context.EmitLdc_I(~mask); context.Emit(OpCodes.And); context.Emit(OpCodes.Or); context.EmitStintzr(op.Rd); } }
private static void EmitTb(ILEmitterCtx context, OpCode ilOp) { OpCodeBImmTest64 op = (OpCodeBImmTest64)context.CurrOp; context.EmitLdintzr(op.Rt); context.EmitLdc_I(1L << op.Pos); context.Emit(OpCodes.And); context.EmitLdc_I(0); EmitBranch(context, ilOp); }
public static void Movk(ILEmitterCtx context) { OpCodeMov64 op = (OpCodeMov64)context.CurrOp; context.EmitLdintzr(op.Rd); context.EmitLdc_I(~(0xffffL << op.Pos)); context.Emit(OpCodes.And); context.EmitLdc_I(op.Imm); context.Emit(OpCodes.Or); context.EmitStintzr(op.Rd); }
private static void EmitCsel(ILEmitterCtx context, CselOperation cselOp) { OpCodeCsel64 op = (OpCodeCsel64)context.CurrOp; ILLabel lblTrue = new ILLabel(); ILLabel lblEnd = new ILLabel(); context.EmitCondBranch(lblTrue, op.Cond); context.EmitLdintzr(op.Rm); if (cselOp == CselOperation.Increment) { context.EmitLdc_I(1); context.Emit(OpCodes.Add); } else if (cselOp == CselOperation.Invert) { context.Emit(OpCodes.Not); } else if (cselOp == CselOperation.Negate) { context.Emit(OpCodes.Neg); } context.Emit(OpCodes.Br_S, lblEnd); context.MarkLabel(lblTrue); context.EmitLdintzr(op.Rn); context.MarkLabel(lblEnd); context.EmitStintzr(op.Rd); }
public static void EmitDataLoadOper2(ILEmitterCtx context) { switch (context.CurrOp) { case IOpCodeAluImm64 op: context.EmitLdc_I(op.Imm); break; case IOpCodeAluRs64 op: context.EmitLdintzr(op.Rm); switch (op.ShiftType) { case ShiftType.Lsl: context.EmitLsl(op.Shift); break; case ShiftType.Lsr: context.EmitLsr(op.Shift); break; case ShiftType.Asr: context.EmitAsr(op.Shift); break; case ShiftType.Ror: context.EmitRor(op.Shift); break; } break; case IOpCodeAluRx64 op: context.EmitLdintzr(op.Rm); context.EmitCast(op.IntType); context.EmitLsl(op.Shift); break; } }
public static void Sys(ILEmitterCtx context) { //This instruction is used to do some operations on the CPU like cache invalidation, //address translation and the like. //We treat it as no-op here since we don't have any cache being emulated anyway. OpCodeSystem64 op = (OpCodeSystem64)context.CurrOp; switch (GetPackedId(op)) { case 0b11_011_0111_0100_001: { //DC ZVA for (int offs = 0; offs < (4 << CpuThreadState.DczSizeLog2); offs += 8) { context.EmitLdarg(TranslatedSub.MemoryArgIdx); context.EmitLdintzr(op.Rt); context.EmitLdc_I(offs); context.Emit(OpCodes.Add); context.EmitLdc_I8(0); InstEmitMemoryHelper.EmitWriteCall(context, 3); } break; } //No-op case 0b11_011_0111_1110_001: //DC CIVAC break; } }
public static void Movz(ILEmitterCtx context) { OpCodeMov64 op = (OpCodeMov64)context.CurrOp; context.EmitLdc_I(op.Imm); context.EmitStintzr(op.Rd); }
public static void Adr(ILEmitterCtx context) { OpCodeAdr64 op = (OpCodeAdr64)context.CurrOp; context.EmitLdc_I(op.Position + op.Imm); context.EmitStintzr(op.Rd); }
private static void EmitLoadAddress(ILEmitterCtx context) { switch (context.CurrOp) { case OpCodeMemImm64 op: context.EmitLdint(op.Rn); if (!op.PostIdx) { // Pre-indexing. context.EmitLdc_I(op.Imm); context.Emit(OpCodes.Add); } break; case OpCodeMemReg64 op: context.EmitLdint(op.Rn); context.EmitLdintzr(op.Rm); context.EmitCast(op.IntType); if (op.Shift) { context.EmitLsl(op.Size); } context.Emit(OpCodes.Add); break; } // Save address to Scratch var since the register value may change. context.Emit(OpCodes.Dup); context.EmitSttmp(); }
public static void Adrp(ILEmitterCtx context) { OpCodeAdr64 op = (OpCodeAdr64)context.CurrOp; context.EmitLdc_I((op.Position & ~0xfffL) + (op.Imm << 12)); context.EmitStintzr(op.Rd); }
private static void EmitDiv(ILEmitterCtx context, OpCode ilOp) { // If Rm == 0, Rd = 0 (division by zero). context.EmitLdc_I(0); EmitAluLoadRm(context); context.EmitLdc_I(0); ILLabel badDiv = new ILLabel(); context.Emit(OpCodes.Beq_S, badDiv); context.Emit(OpCodes.Pop); if (ilOp == OpCodes.Div) { // If Rn == INT_MIN && Rm == -1, Rd = INT_MIN (overflow). long intMin = 1L << (context.CurrOp.GetBitsCount() - 1); context.EmitLdc_I(intMin); EmitAluLoadRn(context); context.EmitLdc_I(intMin); context.Emit(OpCodes.Ceq); EmitAluLoadRm(context); context.EmitLdc_I(-1); context.Emit(OpCodes.Ceq); context.Emit(OpCodes.And); context.Emit(OpCodes.Brtrue_S, badDiv); context.Emit(OpCodes.Pop); } EmitAluLoadRn(context); EmitAluLoadRm(context); context.Emit(ilOp); context.MarkLabel(badDiv); EmitAluStore(context); }
private static void EmitCb(ILEmitterCtx context, OpCode ilOp) { OpCodeBImmCmp64 op = (OpCodeBImmCmp64)context.CurrOp; context.EmitLdintzr(op.Rt); context.EmitLdc_I(0); EmitBranch(context, ilOp); }
public static void Bl(ILEmitterCtx context) { OpCodeBImmAl64 op = (OpCodeBImmAl64)context.CurrOp; context.EmitLdc_I(op.Position + 4); context.EmitStint(RegisterAlias.Lr); EmitCall(context, op.Imm); }
public static void Bfm(ILEmitterCtx context) { OpCodeBfm64 op = (OpCodeBfm64)context.CurrOp; EmitBfmLoadRn(context); context.EmitLdintzr(op.Rd); context.EmitLdc_I(~op.WMask & op.TMask); context.Emit(OpCodes.And); context.Emit(OpCodes.Or); context.EmitLdintzr(op.Rd); context.EmitLdc_I(~op.TMask); context.Emit(OpCodes.And); context.Emit(OpCodes.Or); context.EmitStintzr(op.Rd); }
public static void Blr(ILEmitterCtx context) { OpCodeBReg64 op = (OpCodeBReg64)context.CurrOp; context.EmitLdintzr(op.Rn); context.EmitLdc_I(op.Position + 4); context.EmitStint(RegisterAlias.Lr); context.EmitStoreState(); context.Emit(OpCodes.Ret); }
public static void Blr(ILEmitterCtx context) { OpCodeBReg64 op = (OpCodeBReg64)context.CurrOp; context.EmitLdintzr(op.Rn); context.EmitLdc_I(op.Position + 4); context.EmitStint(RegisterAlias.Lr); context.EmitStoreContext(); EmitVirtualCall(context); }
public static void Blr(ILEmitterCtx context) { OpCodeBReg64 op = (OpCodeBReg64)context.CurrOp; context.EmitLdintzr(op.Rn); context.EmitLdc_I(op.Position + 4); context.EmitStint(CpuThreadState.LrIndex); context.EmitStoreState(); context.Emit(OpCodes.Ret); }
private static void EmitAddressCheck(ILEmitterCtx context, int size) { long addressCheckMask = ~(context.Memory.AddressSpaceSize - 1); addressCheckMask |= (1u << size) - 1; context.EmitLdint(_tempIntAddress); context.EmitLdc_I(addressCheckMask); context.Emit(OpCodes.And); }
public static void EmitAluLoadOper2(ILEmitterCtx context, bool setCarry = true) { switch (context.CurrOp) { // ARM32. case OpCode32AluImm op: context.EmitLdc_I4(op.Imm); if (op.SetFlags && op.IsRotated) { context.EmitLdc_I4((int)((uint)op.Imm >> 31)); context.EmitStflg((int)PState.CBit); } break; case OpCode32AluRsImm op: EmitLoadRmShiftedByImmediate(context, op, setCarry); break; case OpCodeT16AluImm8 op: context.EmitLdc_I4(op.Imm); break; // ARM64. case IOpCodeAluImm64 op: context.EmitLdc_I(op.Imm); break; case IOpCodeAluRs64 op: context.EmitLdintzr(op.Rm); switch (op.ShiftType) { case ShiftType.Lsl: context.EmitLsl(op.Shift); break; case ShiftType.Lsr: context.EmitLsr(op.Shift); break; case ShiftType.Asr: context.EmitAsr(op.Shift); break; case ShiftType.Ror: context.EmitRor(op.Shift); break; } break; case IOpCodeAluRx64 op: context.EmitLdintzr(op.Rm); context.EmitCast(op.IntType); context.EmitLsl(op.Shift); break; default: throw new InvalidOperationException(); } }
private static void EmitBfmLoadRn(ILEmitterCtx context) { OpCodeBfm64 op = (OpCodeBfm64)context.CurrOp; context.EmitLdintzr(op.Rn); context.EmitRor(op.Shift); context.EmitLdc_I(op.WMask & op.TMask); context.Emit(OpCodes.And); }
private static void EmitAluLoadShift(ILEmitterCtx context) { EmitAluLoadRm(context); context.EmitLdc_I(context.CurrOp.GetBitsCount() - 1); context.Emit(OpCodes.And); // Note: Only 32-bits shift values are valid, so when the value is 64-bits // we need to cast it to a 32-bits integer. This is fine because we // AND the value and only keep the lower 5 or 6 bits anyway -- it // could very well fit on a byte. if (context.CurrOp.RegisterSize != RegisterSize.Int32) { context.Emit(OpCodes.Conv_I4); } }
private static void EmitWBackIfNeeded(ILEmitterCtx context) { // Check whenever the current OpCode has post-indexed write back, if so write it. // Note: AOpCodeMemPair inherits from AOpCodeMemImm, so this works for both. if (context.CurrOp is OpCodeMemImm64 op && op.WBack) { context.EmitLdtmp(); if (op.PostIdx) { context.EmitLdc_I(op.Imm); context.Emit(OpCodes.Add); } context.EmitStint(op.Rn); } }
public static void Sbfm(ILEmitterCtx context) { OpCodeBfm64 op = (OpCodeBfm64)context.CurrOp; int bitsCount = op.GetBitsCount(); if (op.Pos + 1 == bitsCount) { EmitSbfmShift(context); } else if (op.Pos < op.Shift) { EmitSbfiz(context); } else if (op.Pos == 7 && op.Shift == 0) { EmitSbfmCast(context, OpCodes.Conv_I1); } else if (op.Pos == 15 && op.Shift == 0) { EmitSbfmCast(context, OpCodes.Conv_I2); } else if (op.Pos == 31 && op.Shift == 0) { EmitSbfmCast(context, OpCodes.Conv_I4); } else { EmitBfmLoadRn(context); context.EmitLdintzr(op.Rn); context.EmitLsl(bitsCount - 1 - op.Pos); context.EmitAsr(bitsCount - 1); context.EmitLdc_I(~op.TMask); context.Emit(OpCodes.And); context.Emit(OpCodes.Or); context.EmitStintzr(op.Rd); } }
public static void EmitSubsVCheck(ILEmitterCtx context) { // V = (Rd ^ Rn) & (Rn ^ Rm) < 0 context.Emit(OpCodes.Dup); EmitAluLoadRn(context); context.Emit(OpCodes.Xor); EmitAluLoadOpers(context); context.Emit(OpCodes.Xor); context.Emit(OpCodes.And); context.EmitLdc_I(0); context.Emit(OpCodes.Clt); context.EmitStflg((int)PState.VBit); }
public static void Bl(ILEmitterCtx context) { OpCodeBImmAl64 op = (OpCodeBImmAl64)context.CurrOp; context.EmitLdc_I(op.Position + 4); context.EmitStint(CpuThreadState.LrIndex); context.EmitStoreState(); if (context.TryOptEmitSubroutineCall()) { //Note: the return value of the called method will be placed //at the Stack, the return value is always a Int64 with the //return address of the function. We check if the address is //correct, if it isn't we keep returning until we reach the dispatcher. context.Emit(OpCodes.Dup); context.EmitLdc_I8(op.Position + 4); ILLabel lblContinue = new ILLabel(); context.Emit(OpCodes.Beq_S, lblContinue); context.Emit(OpCodes.Ret); context.MarkLabel(lblContinue); context.Emit(OpCodes.Pop); context.EmitLoadState(context.CurrBlock.Next); } else { context.EmitLdc_I8(op.Imm); context.Emit(OpCodes.Ret); } }
private static void EmitPtPointerLoad(ILEmitterCtx context, ILLabel lblFallbackPath) { context.EmitLdc_I8(context.Memory.PageTable.ToInt64()); context.Emit(OpCodes.Conv_I); int bit = MemoryManager.PageBits; do { context.EmitLdint(_tempIntAddress); if (context.CurrOp.RegisterSize == RegisterSize.Int32) { context.Emit(OpCodes.Conv_U8); } context.EmitLsr(bit); bit += context.Memory.PtLevelBits; if (bit < context.Memory.AddressSpaceBits) { context.EmitLdc_I8(context.Memory.PtLevelMask); context.Emit(OpCodes.And); } context.EmitLdc_I8(IntPtr.Size); context.Emit(OpCodes.Mul); context.Emit(OpCodes.Conv_I); context.Emit(OpCodes.Add); context.Emit(OpCodes.Ldind_I); }while (bit < context.Memory.AddressSpaceBits); if (!context.Memory.HasWriteWatchSupport) { context.Emit(OpCodes.Conv_U8); context.EmitStint(_tempIntPtAddr); context.EmitLdint(_tempIntPtAddr); context.EmitLdc_I8(MemoryManager.PteFlagsMask); context.Emit(OpCodes.And); context.Emit(OpCodes.Brtrue, lblFallbackPath); context.EmitLdint(_tempIntPtAddr); context.Emit(OpCodes.Conv_I); } context.EmitLdint(_tempIntAddress); context.EmitLdc_I(MemoryManager.PageMask); context.Emit(OpCodes.And); context.Emit(OpCodes.Conv_I); context.Emit(OpCodes.Add); }