private void ReadFileHeader(HeaderBuffer headerBytes) { while (true) { var prop = headerBytes.ReadProperty(); switch (prop) { case HeaderProperty.kMainStreamsInfo: { FilesInfo = ReadPackedStreams(headerBytes); } break; case HeaderProperty.kFilesInfo: { Entries = ReadFilesInfo(FilesInfo, headerBytes); } break; case HeaderProperty.kEnd: return; default: throw new InvalidFormatException(prop.ToString()); } } }
private static void ReadPackInfo(StreamsInfo info, HeaderBuffer headerBytes) { info.PackPosition = headerBytes.ReadEncodedInt64(); int count = (int)headerBytes.ReadEncodedInt64(); info.PackedStreams = new PackedStreamInfo[count]; for (int i = 0; i < count; i++) { info.PackedStreams[i] = new PackedStreamInfo(); } var prop = headerBytes.ReadProperty(); if (prop != HeaderProperty.kSize) { throw new InvalidFormatException("Expected Size Property"); } for (int i = 0; i < count; i++) { info.PackedStreams[i].PackedSize = headerBytes.ReadEncodedInt64(); } for (int i = 0; i < count; i++) { prop = headerBytes.ReadProperty(); if (prop != HeaderProperty.kCRC) { break; } info.PackedStreams[i].Crc = headerBytes.ReadEncodedInt64(); } }
private StreamsInfo ReadPackedStreams(HeaderBuffer headerBytes) { StreamsInfo info = new StreamsInfo(); while (true) { var prop = headerBytes.ReadProperty(); switch (prop) { case HeaderProperty.kUnPackInfo: { ReadUnPackInfo(info, headerBytes); } break; case HeaderProperty.kPackInfo: { ReadPackInfo(info, headerBytes); } break; case HeaderProperty.kSubStreamsInfo: { ReadSubStreamsInfo(info, headerBytes); } break; case HeaderProperty.kEnd: return(info); default: throw new InvalidFormatException(prop.ToString()); } } }
/// <summary> /// Called when the <see cref="PacketReceived"/> event is raised. /// </summary> /// <remarks> /// When overriding this method in a derived class, call the base implementation after your logic. /// </remarks> /// <param name="sender">The sender of the event.</param> /// <param name="args">The packet data that was received.</param> /// <exception cref="ArgumentNullException">Thrown if <paramref name="args"/> is <see langword="null"/>.</exception> protected virtual void OnDataArrived(object sender, DataArrivedEventArgs args) { Guard.NotNull(() => args, args); byte[] data = args.Data; int position = 0, remaining = data.Length; while (PacketBuffer.FreeSpace == 0) { byte[] rawData = PacketBuffer.ExtractAndReset(0); if (rawData.Length > 0) { Crypto.Decrypt(rawData); var incomingPacketArgs = new PacketReceivedEventArgs(rawData); OnPacketReceived(incomingPacketArgs); } if (remaining == 0) { break; } int bufferred; int headerRemaining = HeaderBuffer.FreeSpace; if (headerRemaining > 0) { bufferred = HeaderBuffer.AppendFill(data, position, headerRemaining); // For the confused: if we didn't fill the header, it // means the data array didn't have enough elements. // We move on. if (bufferred < headerRemaining) { break; } position += bufferred; remaining -= bufferred; } byte[] header = HeaderBuffer.ExtractAndReset(4); int length; if (!Crypto.TryGetLength(header, out length)) { Close(@"Could not decode packet length."); return; } PacketBuffer.Reset(length); bufferred = PacketBuffer.AppendFill(data, position, remaining); position += bufferred; remaining -= bufferred; } }
private void ReadUnPackInfo(StreamsInfo info, HeaderBuffer headerBytes) { var prop = headerBytes.ReadProperty(); int count = (int)headerBytes.ReadEncodedInt64(); info.Folders = new Folder[count]; if (headerBytes.ReadByte() != 0) { throw new NotSupportedException("External flag"); } for (int i = 0; i < count; i++) { info.Folders[i] = ReadFolder(headerBytes); } prop = headerBytes.ReadProperty(); if (prop != HeaderProperty.kCodersUnPackSize) { throw new InvalidFormatException("Expected Size Property"); } foreach (var folder in info.Folders) { int numOutStreams = folder.Coders.Aggregate(0, (sum, coder) => sum + (int)coder.NumberOfOutStreams); folder.UnpackedStreamSizes = new ulong[numOutStreams]; for (uint j = 0; j < numOutStreams; j++) { folder.UnpackedStreamSizes[j] = headerBytes.ReadEncodedInt64(); } } prop = headerBytes.ReadProperty(); if (prop != HeaderProperty.kCRC) { return; } uint?[] crcs; UnPackDigests(headerBytes, info.Folders.Length, out crcs); for (int i = 0; i < info.Folders.Length; i++) { Folder folder = info.Folders[i]; folder.UnpackCRC = crcs[i]; } prop = headerBytes.ReadProperty(); if (prop != HeaderProperty.kEnd) { throw new InvalidFormatException("Expected End property"); } }
private static void UnPackDigests(HeaderBuffer headerBytes, int numItems, out uint?[] digests) { var digestsDefined = headerBytes.ReadBoolFlagsDefaultTrue(numItems); digests = new uint?[numItems]; for (int i = 0; i < numItems; i++) { if (digestsDefined[i]) { digests[i] = headerBytes.ReadUInt32(); } } }
public unsafe MarkupType SyncHeader(int received) { MarkupType noiseKind = MarkupType.None; lock (binReceive) { int offset = 0, length = received; bool inprogress = false; if (SerialPacketSize == 0) { SerialPacketSize = *((int *)(headerBufferAddress + 4).ToPointer()); DeserialPacketId = *((int *)(headerBufferAddress + 12).ToPointer()); binReceive = new byte[SerialPacketSize]; GCHandle gc = GCHandle.Alloc(binReceive, GCHandleType.Pinned); binReceiveHandler = GCHandle.ToIntPtr(gc); binReceiveAddress = gc.AddrOfPinnedObject(); offset = SerialPacketOffset; length -= SerialPacketOffset; } if (SerialPacketSize > 0) { inprogress = true; } SerialPacketSize -= length; if (SerialPacketSize < 1) { long endPosition = length; noiseKind = HeaderBuffer.SeekMarkup(out endPosition, SeekDirection.Backward); } int destid = (int)(binReceive.Length - (SerialPacketSize + length)); if (inprogress) { Extractor.CopyBlock(binReceiveAddress, destid, headerBufferAddress, offset, length); } } return(noiseKind); }
private void ReadArchive(HeaderBuffer headerBytes) { while (true) { var prop = headerBytes.ReadProperty(); switch (prop) { case HeaderProperty.kEncodedHeader: { ArchiveInfo = ReadPackedStreams(headerBytes); stream.Seek((long)ArchiveInfo.PackPosition + BaseOffset, SeekOrigin.Begin); var firstFolder = ArchiveInfo.Folders.First(); ulong unpackSize = firstFolder.GetUnpackSize(); ulong packSize = ArchiveInfo.PackedStreams.Select(x => x.PackedSize) .Aggregate((ulong)0, (sum, size) => sum + size); byte[] unpackedBytes = new byte[(int)unpackSize]; Decoder decoder = new Decoder(); decoder.SetDecoderProperties(firstFolder.Coders[0].Properties); using (MemoryStream outStream = new MemoryStream(unpackedBytes)) { decoder.Code(stream, outStream, (long)(packSize), (long)unpackSize, null); } headerBytes = new HeaderBuffer { Bytes = unpackedBytes }; } break; case HeaderProperty.kHeader: { ReadFileHeader(headerBytes); return; } default: throw new NotSupportedException("7Zip header " + prop); } } }
private void Initialize() { if (!SignatureMatch(stream)) { throw new InvalidFormatException("Not a 7Zip archive."); } BinaryReader reader = new BinaryReader(stream); reader.ReadByte(); //major reader.ReadByte(); //minor uint crc = reader.ReadUInt32(); ulong nextHeaderOffset = reader.ReadUInt64(); ulong nextHeaderSize = reader.ReadUInt64(); uint nextHeaderCRC = reader.ReadUInt32(); BaseOffset = stream.Position; stream.Seek(BaseOffset + (long)nextHeaderOffset, SeekOrigin.Begin); var headerBytes = new HeaderBuffer(); headerBytes.Bytes = reader.ReadBytes((int)nextHeaderSize); ReadArchive(headerBytes); PostProcess(); }
private Folder ReadFolder(HeaderBuffer headerBytes) { Folder folder = new Folder(this); folder.Coders = headerBytes.CreateArray<CodersInfo>(); int numInStreams = 0; int numOutStreams = 0; foreach (var coder in folder.Coders) { byte mainByte = headerBytes.ReadByte(); int size = (byte)(mainByte & 0xF); coder.Method = headerBytes.ReadBytes(size); if ((mainByte & 0x10) != 0) { coder.NumberOfInStreams = headerBytes.ReadEncodedInt64(); coder.NumberOfOutStreams = headerBytes.ReadEncodedInt64(); } else { coder.NumberOfInStreams = 1; coder.NumberOfOutStreams = 1; } if ((mainByte & 0x20) != 0) { ulong propertiesSize = headerBytes.ReadEncodedInt64(); coder.Properties = headerBytes.ReadBytes((int)propertiesSize); } while ((mainByte & 0x80) != 0) { mainByte = headerBytes.ReadByte(); headerBytes.ReadBytes(mainByte & 0xF); if ((mainByte & 0x10) != 0) { headerBytes.ReadEncodedInt64(); headerBytes.ReadEncodedInt64(); } if ((mainByte & 0x20) != 0) { ulong propertiesSize = headerBytes.ReadEncodedInt64(); headerBytes.ReadBytes((int)propertiesSize); } } numInStreams += (int)coder.NumberOfInStreams; numOutStreams += (int)coder.NumberOfOutStreams; } int numBindPairs = numOutStreams - 1; folder.BindPairs = new BindPair[numBindPairs]; for (int i = 0; i < numBindPairs; i++) { BindPair bindpair = new BindPair(); folder.BindPairs[i] = bindpair; bindpair.InIndex = headerBytes.ReadEncodedInt64(); bindpair.OutIndex = headerBytes.ReadEncodedInt64(); } int numPackedStreams = numInStreams - numBindPairs; folder.PackedStreamIndices = new ulong[numPackedStreams]; if (numPackedStreams == 1) { uint pi = 0; for (uint j = 0; j < numInStreams; j++) { if (!folder.BindPairs.Where(x => x.InIndex == j).Any()) { folder.PackedStreamIndices[pi++] = j; break; } } } else { for (uint i = 0; i < numPackedStreams; i++) { folder.PackedStreamIndices[i] = headerBytes.ReadEncodedInt64(); } } return folder; }
private void ReadFileHeader(HeaderBuffer headerBytes) { while (true) { var prop = headerBytes.ReadProperty(); switch (prop) { case HeaderProperty.kMainStreamsInfo: { FilesInfo = ReadPackedStreams(headerBytes); } break; case HeaderProperty.kFilesInfo: { Entries = ReadFilesInfo(FilesInfo, headerBytes); } break; case HeaderProperty.kEnd: return; default: throw new InvalidFormatException(prop.ToString()); } } }
private void ReadArchive(HeaderBuffer headerBytes) { while (true) { var prop = headerBytes.ReadProperty(); switch (prop) { case HeaderProperty.kEncodedHeader: { ArchiveInfo = ReadPackedStreams(headerBytes); stream.Seek((long)ArchiveInfo.PackPosition + BaseOffset, SeekOrigin.Begin); var firstFolder = ArchiveInfo.Folders.First(); ulong unpackSize = firstFolder.GetUnpackSize(); ulong packSize = ArchiveInfo.PackedStreams.Select(x => x.PackedSize) .Aggregate((ulong)0, (sum, size) => sum + size); byte[] unpackedBytes = new byte[(int)unpackSize]; Decoder decoder = new Decoder(); decoder.SetDecoderProperties(firstFolder.Coders[0].Properties); using (MemoryStream outStream = new MemoryStream(unpackedBytes)) { decoder.Code(stream, outStream, (long)(packSize), (long)unpackSize, null); } headerBytes = new HeaderBuffer { Bytes = unpackedBytes }; } break; case HeaderProperty.kHeader: { ReadFileHeader(headerBytes); return; } default: throw new NotSupportedException("7Zip header " + prop); } } }
private void Initialize() { if (!SignatureMatch(stream)) { throw new InvalidFormatException("Not a 7Zip archive."); } BinaryReader reader = new BinaryReader(stream); reader.ReadByte();//major reader.ReadByte();//minor uint crc = reader.ReadUInt32(); ulong nextHeaderOffset = reader.ReadUInt64(); ulong nextHeaderSize = reader.ReadUInt64(); uint nextHeaderCRC = reader.ReadUInt32(); BaseOffset = stream.Position; stream.Seek(BaseOffset + (long)nextHeaderOffset, SeekOrigin.Begin); var headerBytes = new HeaderBuffer(); headerBytes.Bytes = reader.ReadBytes((int)nextHeaderSize); ReadArchive(headerBytes); PostProcess(); }
private static void UnPackDigests(HeaderBuffer headerBytes, int numItems, out uint?[] digests) { var digestsDefined = headerBytes.ReadBoolFlagsDefaultTrue(numItems); digests = new uint?[numItems]; for (int i = 0; i < numItems; i++) { if (digestsDefined[i]) { digests[i] = headerBytes.ReadUInt32(); } } }
private static void ReadSubStreamsInfo(StreamsInfo info, HeaderBuffer headerBytes) { info.UnpackedStreams = new List<UnpackedStreamInfo>(); foreach (var folder in info.Folders) { folder.UnpackedStreams = new UnpackedStreamInfo[1]; folder.UnpackedStreams[0] = new UnpackedStreamInfo(); info.UnpackedStreams.Add(folder.UnpackedStreams[0]); } bool loop = true; var prop = HeaderProperty.kEnd; while (loop) { prop = headerBytes.ReadProperty(); switch (prop) { case HeaderProperty.kNumUnPackStream: { info.UnpackedStreams.Clear(); foreach (var folder in info.Folders) { var numStreams = (int)headerBytes.ReadEncodedInt64(); folder.UnpackedStreams = new UnpackedStreamInfo[numStreams]; folder.UnpackedStreams.Initialize(() => new UnpackedStreamInfo()); info.UnpackedStreams.AddRange(folder.UnpackedStreams); } } break; case HeaderProperty.kCRC: case HeaderProperty.kSize: case HeaderProperty.kEnd: { loop = false; } break; default: throw new InvalidFormatException(prop.ToString()); } } int si = 0; for (int i = 0; i < info.Folders.Length; i++) { var folder = info.Folders[i]; ulong sum = 0; if (folder.UnpackedStreams.Length == 0) { continue; } if (prop == HeaderProperty.kSize) { for (int j = 1; j < folder.UnpackedStreams.Length; j++) { ulong size = headerBytes.ReadEncodedInt64(); info.UnpackedStreams[si].UnpackedSize = size; sum += size; si++; } } info.UnpackedStreams[si].UnpackedSize = folder.GetUnpackSize() - sum; si++; } if (prop == HeaderProperty.kSize) { prop = headerBytes.ReadProperty(); } int numDigests = 0; foreach (var folder in info.Folders) { if (folder.UnpackedStreams.Length != 1 || !folder.UnpackCRC.HasValue) { numDigests += folder.UnpackedStreams.Length; } } si = 0; while (true) { if (prop == HeaderProperty.kCRC) { int digestIndex = 0; uint?[] digests2; UnPackDigests(headerBytes, numDigests, out digests2); for (uint i = 0; i < info.Folders.Length; i++) { Folder folder = info.Folders[i]; if (folder.UnpackedStreams.Length == 1 && folder.UnpackCRC.HasValue) { info.UnpackedStreams[si].Digest = folder.UnpackCRC; si++; } else { for (uint j = 0; j < folder.UnpackedStreams.Length; j++, digestIndex++) { info.UnpackedStreams[si].Digest = digests2[digestIndex]; si++; } } } } else if (prop == HeaderProperty.kEnd) return; prop = headerBytes.ReadProperty(); } }
private static HeaderEntry[] ReadFilesInfo(StreamsInfo info, HeaderBuffer headerBytes) { var entries = headerBytes.CreateArray <HeaderEntry>(); int numEmptyStreams = 0; while (true) { var type = headerBytes.ReadProperty(); if (type == HeaderProperty.kEnd) { break; } var size = (int)headerBytes.ReadEncodedInt64(); switch (type) { case HeaderProperty.kName: { if (headerBytes.ReadByte() != 0) { throw new InvalidFormatException("Cannot be external"); } entries.ForEach(f => f.Name = headerBytes.ReadName()); break; } case HeaderProperty.kEmptyStream: { info.EmptyStreamFlags = headerBytes.ReadBoolFlags(entries.Length); numEmptyStreams = info.EmptyStreamFlags.Where(x => x).Count(); break; } case HeaderProperty.kEmptyFile: //just read bytes case HeaderProperty.kAnti: { info.EmptyFileFlags = headerBytes.ReadBoolFlags(numEmptyStreams); break; } default: { headerBytes.ReadBytes(size); break; } } } int emptyFileIndex = 0; int sizeIndex = 0; for (int i = 0; i < entries.Length; i++) { HeaderEntry file = entries[i]; file.IsAnti = false; if (info.EmptyStreamFlags == null) { file.HasStream = true; } else { file.HasStream = !info.EmptyStreamFlags[i]; } if (file.HasStream) { file.IsDirectory = false; file.Size = info.UnpackedStreams[sizeIndex].UnpackedSize; file.FileCRC = info.UnpackedStreams[sizeIndex].Digest; sizeIndex++; } else { if (info.EmptyFileFlags == null) { file.IsDirectory = true; } else { file.IsDirectory = !info.EmptyFileFlags[emptyFileIndex]; } emptyFileIndex++; file.Size = 0; } } return(entries); }
public Share(IStratumMiner miner, UInt64 jobId, IJob job, string extraNonce2, string nTimeString, string nonceString, UInt32[] cycle) { Miner = miner; JobId = jobId; Job = job; Error = ShareError.None; Cycle = cycle; var submitTime = TimeHelpers.NowInUnixTimestamp(); // time we recieved the share from miner. if (Job == null) { Error = ShareError.JobNotFound; return; } // check size of miner supplied extraNonce2 if (extraNonce2.Length / 2 != ExtraNonce.ExpectedExtraNonce2Size) { Error = ShareError.IncorrectExtraNonce2Size; return; } ExtraNonce2 = Convert.ToUInt32(extraNonce2, 16); // set extraNonce2 for the share. // check size of miner supplied nTime. if (nTimeString.Length != 8) { Error = ShareError.IncorrectNTimeSize; return; } NTime = Convert.ToUInt32(nTimeString, 16); // read ntime for the share // make sure NTime is within range. if (NTime < job.BlockTemplate.CurTime || NTime > submitTime + 7200) { Error = ShareError.NTimeOutOfRange; return; } // check size of miner supplied nonce. if (nonceString.Length != 8) { Error = ShareError.IncorrectNonceSize; return; } Nonce = Convert.ToUInt32(nonceString, 16); // nonce supplied by the miner for the share. // set job supplied parameters. Height = job.BlockTemplate.Height; // associated job's block height. ExtraNonce1 = miner.ExtraNonce; // extra nonce1 assigned to miner. // check for duplicate shares. if (!Job.RegisterShare(this)) // try to register share with the job and see if it's duplicated or not. { Error = ShareError.DuplicateShare; return; } // construct the coinbase. CoinbaseBuffer = Serializers.SerializeCoinbase(Job, ExtraNonce1, ExtraNonce2); CoinbaseHash = Coin.Coinbase.Utils.HashCoinbase(CoinbaseBuffer); // create the merkle root. MerkleRoot = Job.MerkleTree.WithFirst(CoinbaseHash).ReverseBuffer(); // create the block headers HeaderBuffer = Serializers.SerializeHeader(Job, MerkleRoot, NTime, Nonce); HeaderHash = Job.HashAlgorithm.Hash(HeaderBuffer); HeaderValue = new BigInteger(HeaderHash); BlockHash = HeaderBuffer.DoubleDigest().ReverseBuffer(); if (!checkCycle()) { Error = ShareError.IncorrectCycle; return; } var _logger = Log.ForContext <Share>(); using (var stream = new MemoryStream()) { stream.WriteByte((byte)Cycle.Length); foreach (var edge in Cycle) { stream.WriteValueU32(edge); } CycleBuffer = stream.ToArray(); } CycleHash = Job.HashAlgorithm.Hash(CycleBuffer); CycleValue = new BigInteger(CycleHash); // calculate the share difficulty Difficulty = ((double)new BigRational(AlgorithmManager.Diff1, CycleValue)) * Job.HashAlgorithm.Multiplier; // calculate the block difficulty BlockDiffAdjusted = Job.Difficulty * Job.HashAlgorithm.Multiplier; // check if block candicate if (Job.Target >= CycleValue) { if (Difficulty < 0) { IsBlockCandidate = false; if (miner.Software == MinerSoftware.MeritMiner && miner.SoftwareVersion == new Version("0.1.0")) { // if we use merit-miner 0.1.0 diff can be negative Error = ShareError.NegativeDifficultyShareOutdatedMiner; } else { Error = ShareError.NegativeDifficultyShare; } return; } IsBlockCandidate = true; BlockHex = Serializers.SerializeBlock(Job, HeaderBuffer, CoinbaseBuffer, CycleBuffer, miner.Pool.Config.Coin.Options.IsProofOfStakeHybrid); } else { IsBlockCandidate = false; // Check if share difficulty reaches miner difficulty. var lowDifficulty = Difficulty / miner.Difficulty < 0.99; // share difficulty should be equal or more then miner's target difficulty. if (!lowDifficulty) // if share difficulty is high enough to match miner's current difficulty. { return; // just accept the share. } if (miner.PreviousDifficulty > 0 && Difficulty >= miner.PreviousDifficulty) // if the difficulty matches miner's previous difficulty before the last vardiff triggered difficulty change { _logger.Debug("\tprevdiff lower; diff >= prevdiff: {0}::{1}", Difficulty, miner.PreviousDifficulty); return; // still accept the share. } // if the share difficulty can't match miner's current difficulty or previous difficulty Error = ShareError.LowDifficultyShare; // then just reject the share with low difficult share error. } }
public Share(IStratumMiner miner, UInt64 jobId, IJob job, string extraNonce2, string nTimeString, string nonceString) { _logger.Debug("Entering share constructor: {0}", nonceString); Miner = miner; JobId = jobId; Job = job; Error = ShareError.None; var submitTime = TimeHelpers.NowInUnixTimestamp(); // time we recieved the share from miner. if (Job == null) { _logger.Error("Job is null"); Error = ShareError.JobNotFound; return; } if (extraNonce2 == null) { _logger.Error("extraNonce2 is NULL!"); } // check size of miner supplied extraNonce2 if (extraNonce2.Length / 2 != ExtraNonce.ExpectedExtraNonce2Size) { _logger.Error("Incorrect Extranonce2 size: {0} while expecting {1}", extraNonce2.Length, ExtraNonce.ExpectedExtraNonce2Size * 2); Error = ShareError.IncorrectExtraNonce2Size; return; } ExtraNonce2 = Convert.ToUInt32(extraNonce2, 16); // set extraNonce2 for the share. if (nTimeString == null) { _logger.Error("nTimeString is NULL!"); } // check size of miner supplied nTime. if (nTimeString.Length != 8) { _logger.Error("nTimeString length !=8: {0}", nTimeString.Length); Error = ShareError.IncorrectNTimeSize; return; } NTime = Convert.ToUInt32(nTimeString, 16); // read ntime for the share // make sure NTime is within range. if (NTime < job.BlockTemplate.CurTime || NTime > submitTime + 7200) { _logger.Error("NTime Out Of Range!"); Error = ShareError.NTimeOutOfRange; return; } if (nonceString == null) { _logger.Error("nonceString is NULL!"); } // check size of miner supplied nonce. if (nonceString.Length != 8) { _logger.Error("nonceString.Length != 8: {0}", nonceString.Length); Error = ShareError.IncorrectNonceSize; return; } Nonce = Convert.ToUInt32(nonceString, 16); // nonce supplied by the miner for the share. if (miner == null) { _logger.Error("miner is NULL!"); } // set job supplied parameters. Height = job.BlockTemplate.Height; // associated job's block height. ExtraNonce1 = miner.ExtraNonce; // extra nonce1 assigned to miner. // check for duplicate shares. if (!Job.RegisterShare(this)) // try to register share with the job and see if it's duplicated or not. { _logger.Error("Duplicate share: {0:l}", nonceString); Error = ShareError.DuplicateShare; return; } _logger.Debug("Serialize Share {0}", nonceString); // construct the coinbase. CoinbaseBuffer = Serializers.SerializeCoinbase(Job, ExtraNonce1, ExtraNonce2); CoinbaseHash = Coin.Coinbase.Utils.HashCoinbase(CoinbaseBuffer); // create the merkle root. MerkleRoot = Job.MerkleTree.WithFirst(CoinbaseHash).ReverseBuffer(); // create the block headers _logger.Debug("Getting Header buffer for Share {0}", nonceString); HeaderBuffer = Serializers.SerializeHeader(Job, MerkleRoot, NTime, Nonce); HeaderHash = Job.HashAlgorithm.Hash(HeaderBuffer); _logger.Debug("Got share {0} of length: {1}\nPOW: {2,64:l}\nTGT: {3,64:l}", nonceString, HeaderHash.Length, HeaderHash.ReverseBytes().ToHexString(), Job.Target.ToByteArray().ReverseBytes().ToHexString() ); HeaderValue = new BigInteger(HeaderHash); // calculate the share difficulty Difficulty = ((double)new BigRational(AlgorithmManager.Diff1, HeaderValue)) * Job.HashAlgorithm.Multiplier; // calculate the block difficulty BlockDiffAdjusted = Job.Difficulty * Job.HashAlgorithm.Multiplier; /* * Test false pozitive block candidates: negative bigints were the problem * byte[] testbytes = new byte[] { * 0xf7, 0xdf, 0xed, 0xbd, * 0x9a, 0x2b, 0xa5, 0x1f, * 0x7b, 0x0d, 0x68, 0x76, * 0xbe, 0x1f, 0x18, 0xd6, * 0x2d, 0x49, 0x94, 0x91, * 0x69, 0x11, 0x39, 0x41, * 0xdf, 0x1f, 0x25, 0xdb, * 0x9b, 0x4e, 0x97, 0xb7 * }; * string teststr = testbytes.ReverseBuffer().ToHexString(); * HeaderValue = new BigInteger(testbytes); */ // check if block candicate if (Job.Target >= HeaderValue) //if (true) //for Debug only { IsBlockCandidate = true; BlockHex = Serializers.SerializeBlock(Job, HeaderBuffer, CoinbaseBuffer, miner.Pool.Config.Coin.Options.IsProofOfStakeHybrid); BlockHash = HeaderBuffer.DoubleDigest().ReverseBuffer(); try { _logger.Debug("Job.Target is greater than or equal HeaderValue(POW-SCRYPT)!!!:\n{9}\n{10}\n\n" + "Big-Endian values for Block Header:\n" + "job.BlockTemplate.Version={0}\n" + "job.PreviousBlockHash={1}\n" + "MerkleRoot={2}\n" + "NTime={3}\n" + "job.EncodedDifficulty={4}\n" + "Nonce={5}\n" + "==============\n" + "result={6}\n\n" + "Big-Endian:\n" + "BlockHex={7}\n" + "BlockHash(2xSHA256)={8}\n", job.BlockTemplate.Version, BitConverter.ToString(job.PreviousBlockHash.HexToByteArray()).Replace("-", string.Empty), BitConverter.ToString(MerkleRoot).Replace("-", string.Empty), NTime, job.EncodedDifficulty, Nonce, BitConverter.ToString(HeaderBuffer).Replace("-", string.Empty), BlockHex, BitConverter.ToString(BlockHash).Replace("-", string.Empty), Job.Target.ToByteArray().ReverseBuffer().ToHexString(), HeaderValue.ToByteArray().ReverseBuffer().ToHexString() ); } catch (Exception e) { _logger.Error(e, "Something has happened while logging"); } } else { IsBlockCandidate = false; BlockHash = HeaderBuffer.DoubleDigest().ReverseBuffer(); // Check if share difficulty reaches miner difficulty. var lowDifficulty = Difficulty / miner.Difficulty < 0.99; // share difficulty should be equal or more then miner's target difficulty. if (!lowDifficulty) // if share difficulty is high enough to match miner's current difficulty. { return; // just accept the share. } if (Difficulty >= miner.PreviousDifficulty) // if the difficulty matches miner's previous difficulty before the last vardiff triggered difficulty change { return; // still accept the share. } // if the share difficulty can't match miner's current difficulty or previous difficulty Error = ShareError.LowDifficultyShare; // then just reject the share with low difficult share error. } }
protected override void WriteHeader(Stream output, HeaderBuffer headerBuffer, IEnumerable <MessageBuffer.Modification> modifications) { // do nothing }
private static HeaderEntry[] ReadFilesInfo(StreamsInfo info, HeaderBuffer headerBytes) { var entries = headerBytes.CreateArray<HeaderEntry>(); int numEmptyStreams = 0; while (true) { var type = headerBytes.ReadProperty(); if (type == HeaderProperty.kEnd) { break; } var size = (int)headerBytes.ReadEncodedInt64(); switch (type) { case HeaderProperty.kName: { if (headerBytes.ReadByte() != 0) { throw new InvalidFormatException("Cannot be external"); } entries.ForEach(f => f.Name = headerBytes.ReadName()); break; } case HeaderProperty.kEmptyStream: { info.EmptyStreamFlags = headerBytes.ReadBoolFlags(entries.Length); numEmptyStreams = info.EmptyStreamFlags.Where(x => x).Count(); break; } case HeaderProperty.kEmptyFile: //just read bytes case HeaderProperty.kAnti: { info.EmptyFileFlags = headerBytes.ReadBoolFlags(numEmptyStreams); break; } default: { headerBytes.ReadBytes(size); break; } } } int emptyFileIndex = 0; int sizeIndex = 0; for (int i = 0; i < entries.Length; i++) { HeaderEntry file = entries[i]; file.IsAnti = false; if (info.EmptyStreamFlags == null) { file.HasStream = true; } else { file.HasStream = !info.EmptyStreamFlags[i]; } if (file.HasStream) { file.IsDirectory = false; file.Size = info.UnpackedStreams[sizeIndex].UnpackedSize; file.FileCRC = info.UnpackedStreams[sizeIndex].Digest; sizeIndex++; } else { if (info.EmptyFileFlags == null) { file.IsDirectory = true; } else { file.IsDirectory = !info.EmptyFileFlags[emptyFileIndex]; } emptyFileIndex++; file.Size = 0; } } return entries; }
private Folder ReadFolder(HeaderBuffer headerBytes) { Folder folder = new Folder(this); folder.Coders = headerBytes.CreateArray <CodersInfo>(); int numInStreams = 0; int numOutStreams = 0; foreach (var coder in folder.Coders) { byte mainByte = headerBytes.ReadByte(); int size = (byte)(mainByte & 0xF); coder.Method = headerBytes.ReadBytes(size); if ((mainByte & 0x10) != 0) { coder.NumberOfInStreams = headerBytes.ReadEncodedInt64(); coder.NumberOfOutStreams = headerBytes.ReadEncodedInt64(); } else { coder.NumberOfInStreams = 1; coder.NumberOfOutStreams = 1; } if ((mainByte & 0x20) != 0) { ulong propertiesSize = headerBytes.ReadEncodedInt64(); coder.Properties = headerBytes.ReadBytes((int)propertiesSize); } while ((mainByte & 0x80) != 0) { mainByte = headerBytes.ReadByte(); headerBytes.ReadBytes(mainByte & 0xF); if ((mainByte & 0x10) != 0) { headerBytes.ReadEncodedInt64(); headerBytes.ReadEncodedInt64(); } if ((mainByte & 0x20) != 0) { ulong propertiesSize = headerBytes.ReadEncodedInt64(); headerBytes.ReadBytes((int)propertiesSize); } } numInStreams += (int)coder.NumberOfInStreams; numOutStreams += (int)coder.NumberOfOutStreams; } int numBindPairs = numOutStreams - 1; folder.BindPairs = new BindPair[numBindPairs]; for (int i = 0; i < numBindPairs; i++) { BindPair bindpair = new BindPair(); folder.BindPairs[i] = bindpair; bindpair.InIndex = headerBytes.ReadEncodedInt64(); bindpair.OutIndex = headerBytes.ReadEncodedInt64(); } int numPackedStreams = numInStreams - numBindPairs; folder.PackedStreamIndices = new ulong[numPackedStreams]; if (numPackedStreams == 1) { uint pi = 0; for (uint j = 0; j < numInStreams; j++) { if (!folder.BindPairs.Where(x => x.InIndex == j).Any()) { folder.PackedStreamIndices[pi++] = j; break; } } } else { for (uint i = 0; i < numPackedStreams; i++) { folder.PackedStreamIndices[i] = headerBytes.ReadEncodedInt64(); } } return(folder); }
private StreamsInfo ReadPackedStreams(HeaderBuffer headerBytes) { StreamsInfo info = new StreamsInfo(); while (true) { var prop = headerBytes.ReadProperty(); switch (prop) { case HeaderProperty.kUnPackInfo: { ReadUnPackInfo(info, headerBytes); } break; case HeaderProperty.kPackInfo: { ReadPackInfo(info, headerBytes); } break; case HeaderProperty.kSubStreamsInfo: { ReadSubStreamsInfo(info, headerBytes); } break; case HeaderProperty.kEnd: return info; default: throw new InvalidFormatException(prop.ToString()); } } }
private static void ReadPackInfo(StreamsInfo info, HeaderBuffer headerBytes) { info.PackPosition = headerBytes.ReadEncodedInt64(); int count = (int)headerBytes.ReadEncodedInt64(); info.PackedStreams = new PackedStreamInfo[count]; for (int i = 0; i < count; i++) { info.PackedStreams[i] = new PackedStreamInfo(); } var prop = headerBytes.ReadProperty(); if (prop != HeaderProperty.kSize) { throw new InvalidFormatException("Expected Size Property"); } for (int i = 0; i < count; i++) { info.PackedStreams[i].PackedSize = headerBytes.ReadEncodedInt64(); } for (int i = 0; i < count; i++) { prop = headerBytes.ReadProperty(); if (prop != HeaderProperty.kCRC) { break; } info.PackedStreams[i].Crc = headerBytes.ReadEncodedInt64(); } }
private void ReadUnPackInfo(StreamsInfo info, HeaderBuffer headerBytes) { var prop = headerBytes.ReadProperty(); int count = (int)headerBytes.ReadEncodedInt64(); info.Folders = new Folder[count]; if (headerBytes.ReadByte() != 0) { throw new NotSupportedException("External flag"); } for (int i = 0; i < count; i++) { info.Folders[i] = ReadFolder(headerBytes); } prop = headerBytes.ReadProperty(); if (prop != HeaderProperty.kCodersUnPackSize) { throw new InvalidFormatException("Expected Size Property"); } foreach (var folder in info.Folders) { int numOutStreams = folder.Coders.Aggregate(0, (sum, coder) => sum + (int)coder.NumberOfOutStreams); folder.UnpackedStreamSizes = new ulong[numOutStreams]; for (uint j = 0; j < numOutStreams; j++) { folder.UnpackedStreamSizes[j] = headerBytes.ReadEncodedInt64(); } } prop = headerBytes.ReadProperty(); if (prop != HeaderProperty.kCRC) { return; } uint?[] crcs; UnPackDigests(headerBytes, info.Folders.Length, out crcs); for (int i = 0; i < info.Folders.Length; i++) { Folder folder = info.Folders[i]; folder.UnpackCRC = crcs[i]; } prop = headerBytes.ReadProperty(); if (prop != HeaderProperty.kEnd) { throw new InvalidFormatException("Expected End property"); } }
private static void ReadSubStreamsInfo(StreamsInfo info, HeaderBuffer headerBytes) { info.UnpackedStreams = new List <UnpackedStreamInfo>(); foreach (var folder in info.Folders) { folder.UnpackedStreams = new UnpackedStreamInfo[1]; folder.UnpackedStreams[0] = new UnpackedStreamInfo(); info.UnpackedStreams.Add(folder.UnpackedStreams[0]); } bool loop = true; var prop = HeaderProperty.kEnd; while (loop) { prop = headerBytes.ReadProperty(); switch (prop) { case HeaderProperty.kNumUnPackStream: { info.UnpackedStreams.Clear(); foreach (var folder in info.Folders) { var numStreams = (int)headerBytes.ReadEncodedInt64(); folder.UnpackedStreams = new UnpackedStreamInfo[numStreams]; folder.UnpackedStreams.Initialize(() => new UnpackedStreamInfo()); info.UnpackedStreams.AddRange(folder.UnpackedStreams); } } break; case HeaderProperty.kCRC: case HeaderProperty.kSize: case HeaderProperty.kEnd: { loop = false; } break; default: throw new InvalidFormatException(prop.ToString()); } } int si = 0; for (int i = 0; i < info.Folders.Length; i++) { var folder = info.Folders[i]; ulong sum = 0; if (folder.UnpackedStreams.Length == 0) { continue; } if (prop == HeaderProperty.kSize) { for (int j = 1; j < folder.UnpackedStreams.Length; j++) { ulong size = headerBytes.ReadEncodedInt64(); info.UnpackedStreams[si].UnpackedSize = size; sum += size; si++; } } info.UnpackedStreams[si].UnpackedSize = folder.GetUnpackSize() - sum; si++; } if (prop == HeaderProperty.kSize) { prop = headerBytes.ReadProperty(); } int numDigests = 0; foreach (var folder in info.Folders) { if (folder.UnpackedStreams.Length != 1 || !folder.UnpackCRC.HasValue) { numDigests += folder.UnpackedStreams.Length; } } si = 0; while (true) { if (prop == HeaderProperty.kCRC) { int digestIndex = 0; uint?[] digests2; UnPackDigests(headerBytes, numDigests, out digests2); for (uint i = 0; i < info.Folders.Length; i++) { Folder folder = info.Folders[i]; if (folder.UnpackedStreams.Length == 1 && folder.UnpackCRC.HasValue) { info.UnpackedStreams[si].Digest = folder.UnpackCRC; si++; } else { for (uint j = 0; j < folder.UnpackedStreams.Length; j++, digestIndex++) { info.UnpackedStreams[si].Digest = digests2[digestIndex]; si++; } } } } else if (prop == HeaderProperty.kEnd) { return; } prop = headerBytes.ReadProperty(); } }
public Share(IStratumMiner miner, UInt64 jobId, IJob job, string extraNonce2, string nTimeString, string nSolution) { Miner = miner; JobId = jobId; Job = job; Error = ShareError.None; var submitTime = TimeHelpers.NowInUnixTimestamp(); // time we recieved the share from miner. if (Job == null) { Error = ShareError.JobNotFound; return; } // check size of miner supplied extraNonce2 if (extraNonce2.Length / 2 != ExtraNonce.ExpectedExtraNonce2Size) { Error = ShareError.IncorrectExtraNonce2Size; return; } ExtraNonce2 = extraNonce2; // set extraNonce2 for the share. // check size of miner supplied nTime. if (nTimeString.Length != 8) { Error = ShareError.IncorrectNTimeSize; return; } NTime = Convert.ToUInt32(nTimeString.HexToByteArray().ReverseBuffer().ToHexString(), 16); // read ntime for the share // make sure NTime is within range. if (NTime < job.BlockTemplate.CurTime || NTime > submitTime + 7200) { Error = ShareError.NTimeOutOfRange; return; } // set job supplied parameters. Height = job.BlockTemplate.Height; // associated job's block height. ExtraNonce1 = miner.ExtraNonce; // extra nonce1 assigned to miner. // check for duplicate shares. if (!Job.RegisterShare(this)) // try to register share with the job and see if it's duplicated or not. { Error = ShareError.DuplicateShare; return; } // construct the coinbase. CoinbaseBuffer = Serializers.SerializeCoinbase(Job, ExtraNonce1); CoinbaseHash = Coin.Coinbase.Utils.HashCoinbase(CoinbaseBuffer); string nonceString = extraNonce2.HexToByteArray().ReverseBuffer().ToHexString() + ExtraNonce1.BigEndian().ToString("x8"); byte[] nonce = nonceString.HexToByteArray(); // create the merkle root. MerkleRoot = Job.MerkleTree.WithFirst(CoinbaseHash).ReverseBuffer(); // create the block headers { HeaderBuffer = Serializers.SerializeHeader(Job, MerkleRoot, nonce, NTime, nSolution.HexToByteArray().ReverseBuffer()); HeaderHash = Job.HashAlgorithm.Hash(HeaderBuffer); } HeaderValue = new BigInteger(HeaderHash); // calculate the share difficulty Difficulty = ((double)new BigRational(AlgorithmManager.Diff1, HeaderValue)) * Job.HashAlgorithm.Multiplier; // calculate the block difficulty BlockDiffAdjusted = Job.Difficulty * Job.HashAlgorithm.Multiplier; // check if block candicate if (Job.Target >= HeaderValue) { IsBlockCandidate = true; BlockHex = Serializers.SerializeBlock(Job, HeaderBuffer, CoinbaseBuffer, miner.Pool.Config.Coin.Options.IsProofOfStakeHybrid); BlockHash = HeaderBuffer.DoubleDigest().ReverseBuffer(); } else { IsBlockCandidate = false; BlockHash = HeaderBuffer.DoubleDigest().ReverseBuffer(); // Check if share difficulty reaches miner difficulty. var lowDifficulty = Difficulty / miner.Difficulty < 0.99; // share difficulty should be equal or more then miner's target difficulty. if (!lowDifficulty) // if share difficulty is high enough to match miner's current difficulty. { return; // just accept the share. } if (Difficulty >= miner.PreviousDifficulty) // if the difficulty matches miner's previous difficulty before the last vardiff triggered difficulty change { return; // still accept the share. } // if the share difficulty can't match miner's current difficulty or previous difficulty Error = ShareError.LowDifficultyShare; // then just reject the share with low difficult share error. } }
protected override void ScanStartLine(HeaderBuffer headerBuffer) { throw new NotImplementedException(); }