/// <summary> /// MemoryMappedViewAccessorから終端が\0で表された文字列を取り出す。 /// </summary> /// <remarks> /// MemoryMappedViewAccessorの容量はInt32.MaxValueを超えていても良い。 /// </remarks> /// <param name="accessor">MemoryMappedViewAccessor</param> /// <param name="index">オフセット位置</param> /// <param name="enc">文字エンコーディング</param> /// <param name="buffSize">内部で使用するバッファの初期サイズ</param> /// <returns>文字列(\0は含まない)</returns> public static string GetString(MemoryMappedViewAccessor accessor, long offset, Encoding enc, int buffSize = 128) { byte[] buff = new byte[buffSize]; //IO回数削減のためのバッファ配列 accessor.ReadArray<byte>(offset, buff, 0, buffSize); //初期読込 //バイト長のカウント int byteCount = 0; while (buff[byteCount] != Nul) //終端\0に到達するまでシーク { byteCount++; if (byteCount == buffSize) //バッファ配列の終端 { //バッファ配列の拡張と追加読込 checked { buffSize *= 2; } //Int32.MaxValueを超えたならエラー byte[] newBuff = new byte[buffSize]; Buffer.BlockCopy(buff, 0, newBuff, 0, byteCount); accessor.ReadArray<byte>(offset + byteCount, newBuff, byteCount, buffSize - byteCount); buff = newBuff; } } //バッファ配列を文字列にデコード return enc.GetString(buff, 0, byteCount); }
public static List<FileBlock> ScanFileChunk(int blockNumber, CancellationToken cancellationToken, ConcurrentDictionary<long, FileHash> remoteHashes, MemoryMappedViewAccessor chunkMap, long chunkStartOffset, long chunkLength, Config config) { var bytesToRead = config.BlockLength; var outList = new List<FileBlock>(); long readStartOffset = 0; var nonMatchStartOffset = readStartOffset; var windowChecksum = new Adler32(); while (readStartOffset < chunkLength) { if (cancellationToken.IsCancellationRequested) return null; var buffer = new byte[bytesToRead]; if (readStartOffset + config.BlockLength > chunkLength) { bytesToRead = checked((int)(chunkLength - readStartOffset)); buffer = new byte[bytesToRead]; } chunkMap.ReadArray((int)readStartOffset, buffer, 0, buffer.Length); if (readStartOffset == 0) { windowChecksum.Update(buffer); } else { windowChecksum.Update(buffer, buffer.Length - 1, 1); } var weakCheckSum = windowChecksum.Value; FileHash match; remoteHashes.TryGetValue(weakCheckSum, out match); if (match != null) { var strongHash = CalculateStrongHash(buffer); if (strongHash.SequenceEqual(match.StrongHash)) { if (readStartOffset > 0) { long nonMatchEndOffset = readStartOffset - 1; var nonMatchingChunk = new FileBlock { IsMatch = false, BlockLength = nonMatchEndOffset - nonMatchStartOffset, SourceOffset = nonMatchStartOffset + readStartOffset }; outList.Add(nonMatchingChunk); nonMatchStartOffset = readStartOffset + config.BlockLength; } var matchingChunk = new FileBlock { IsMatch = true, DestinationOffset = chunkStartOffset + readStartOffset, SourceOffset = match.Offset, BlockLength = config.BlockLength }; outList.Add(matchingChunk); windowChecksum.Reset(); } } readStartOffset += 1; if (readStartOffset % 100 == 0) { _progressReporter.ReportProgress(()=> { }) ; } } if (chunkLength - nonMatchStartOffset > 1) { var nonMatchingChunk = new FileBlock() { IsMatch = false, BlockLength = chunkLength - nonMatchStartOffset, SourceOffset = nonMatchStartOffset + readStartOffset }; outList.Add(nonMatchingChunk); } return outList; }
private void readLineAndProcess(MemoryMappedViewAccessor accessor, int row) { accessor.ReadArray<byte>(12 + (row * _BlockSize), _Buffer, 0, _BlockSize); if (_Buffer[0] == 0) { _MissingItems.Add(row); return; } var str = System.Text.Encoding.Default.GetString(_Buffer); int index = str.IndexOf('\0'); if (index >= 0) str = str.Remove(index); dynamic obj; if (SimpleJson.TryDeserializeObject(str, out obj)) { Action action = () => NewRow(obj); BeginInvoke(action); } else { _MissingItems.Add(row); } }
private static long InnerReadCacheNotifyData(long lastTicks, MemoryMappedViewAccessor accessor, CacheNotifyDataMapInfo mapInfo, List<CacheNotifyData> result) { int itemSize = Marshal.SizeOf(typeof(CacheNotifyDataMapItem)); long returnTicks = lastTicks; long startPointer = Marshal.SizeOf(typeof(CacheNotifyDataMapInfo)); for (int i = 0; i < CacheNotifyDataMapInfo.CacheDataItemCount; i++) { CacheNotifyDataMapItem item; accessor.Read(startPointer, out item); if (item.Ticks > lastTicks) { if (item.Ticks > returnTicks) returnTicks = item.Ticks; byte[] data = new byte[item.Size]; accessor.ReadArray(startPointer + itemSize, data, 0, (int)item.Size); CacheNotifyData cnd = CacheNotifyData.FromBuffer(data); result.Add(cnd); } startPointer += itemSize + CacheNotifyDataMapInfo.CacheDataBlockSize; } return returnTicks; }
private static List<RemainingBytes> SearchLocalFileForSignaturesBasedOnSize(CompleteSignature sig, MemoryMappedViewAccessor accessor, List<RemainingBytes> remainingByteList, int sigSize, long fileSize, List<BlockSignature> signaturesToReuse) { var windowSize = sigSize; var newRemainingBytes = new List<RemainingBytes>(); var sigDict = GenerateBlockDict(sig); var buffer = new byte[sigSize]; var offset = 0L; foreach (var byteRange in remainingByteList) { var byteRangeSize = byteRange.EndOffset - byteRange.BeginOffset + 1; // if byte range is large... and signature size is small (what values???) then dont check. // We could end up with LOADS of tiny sig matching where ideally we'd use a larger new sig block. // The exception is when the sig size exactly matches the byterange size... then we allow it to check if the sig will match // in practice this allows small (1-2 byte sigs) to match the byte ranges. if (byteRangeSize > 1000 && sigSize > 100 || byteRangeSize == sigSize) { // if byteRange is smaller than the key we're using, then there cannot be a match so add // it to the newRemainingBytes list if (byteRange.EndOffset - byteRange.BeginOffset + 1 >= windowSize) { // search this byterange for all possible keys. offset = byteRange.BeginOffset; var generateFreshSig = true; var bytesRead = 0L; RollingSignature? currentSig = null; long oldEndOffset = byteRange.BeginOffset; do { if (generateFreshSig) { bytesRead = accessor.ReadArray(offset, buffer, 0, windowSize); currentSig = CreateRollingSignature(buffer, (int)bytesRead); } else { // roll existing sig. var previousByte = accessor.ReadByte(offset - 1); var nextByte = accessor.ReadByte(offset + windowSize - 1); // Need bounds checking? currentSig = RollSignature(windowSize, previousByte, nextByte, currentSig.Value); } if (sigDict.ContainsKey(currentSig.Value)) { // populate buffer. Potential waste of IO here. bytesRead = accessor.ReadArray(offset, buffer, 0, windowSize); // check md5 sig. var md5Sig = CreateMD5Signature(buffer, (int)bytesRead); var sigsForCurrentRollingSig = sigDict[currentSig.Value]; // have a matching md5? If so, we have a match. var matchingSigs = sigsForCurrentRollingSig.Where(s => s.MD5Signature.SequenceEqual(md5Sig)) .Select(n => n) .ToList(); if (matchingSigs.Any()) { // need to add any byte ranges between oldEndOffset and offset as bytes remaining (ie not part of any sig). // This is for catching any bytes BEFORE the sig match we've just found. if (oldEndOffset != offset) { newRemainingBytes.Add(new RemainingBytes() { BeginOffset = oldEndOffset, EndOffset = offset - 1 }); } var matchingSig = matchingSigs[0]; // when storing which existing sig to use, make sure we know the offset in the NEW file it should appear. matchingSig.Offset = offset; signaturesToReuse.Add(matchingSig); offset += windowSize; generateFreshSig = true; oldEndOffset = offset; } else { offset++; generateFreshSig = false; } } else { // no match. Just increment offset and generate rolling sig. offset++; generateFreshSig = false; } } while (offset + windowSize <= byteRange.EndOffset + 1); // add remaining bytes to newRemainingBytes list // Possible to have single byte at end with offset at very last byte. if (offset <= byteRange.EndOffset) { newRemainingBytes.Add(new RemainingBytes() { BeginOffset = oldEndOffset, EndOffset = byteRange.EndOffset }); } // if last sig } else { newRemainingBytes.Add(byteRange); } } else { newRemainingBytes.Add(byteRange); } } return newRemainingBytes; }
private void LoadHeaderAndStreams(PeHeaderReader pe, MemoryMappedViewAccessor mm) { var clrDataDir = pe.DataDirectories[PeHeaderReader.Image_Directory_Entry_Type.COM_DESCRIPTOR]; if (Marshal.SizeOf(typeof(IMAGE_COR20_HEADER)) != clrDataDir.Size) throw new Exception("Size wrong."); mm.Read<IMAGE_COR20_HEADER>(pe.GetFileOffset(clrDataDir.VirtualAddress), out mHeader); if (mHeader.cb != clrDataDir.Size) throw new Exception("Size wrong."); var metaLoc = pe.GetFileOffset(mHeader.MetaData.VirtualAddress); mm.Read<MetaDataHeaderPart1>(metaLoc, out mMetaHeader); var versionBytes = new byte[mMetaHeader.VersionLength]; metaLoc += Marshal.SizeOf(typeof(MetaDataHeaderPart1)); mm.ReadArray<byte>(metaLoc, versionBytes, 0, versionBytes.Length); int versionSize = 0; while (versionSize < versionBytes.Length && versionBytes[versionSize] != 0) versionSize++; mVersionName = Encoding.ASCII.GetString(versionBytes, 0, versionSize); metaLoc += mMetaHeader.VersionLength; mMetaDataFlags = mm.ReadUInt16(metaLoc); metaLoc += 2; uint numberOfMetaStreams = mm.ReadUInt16(metaLoc); metaLoc += 2; for (int i = 0; i < numberOfMetaStreams; i++) { MetaDataStream mds; mm.Read<MetaDataStream>(metaLoc, out mds); metaLoc += Marshal.SizeOf(typeof(MetaDataStream)); byte b; StringBuilder sb = new StringBuilder(); while ((b = mm.ReadByte(metaLoc++)) != 0) { sb.Append((char)b); } metaLoc += 3; metaLoc &= ~3; mMetaStreams.Add(sb.ToString(), mds); } }
public void RefreshData() { string tempLogString; comMMFViewAccessor = comMMF.CreateViewAccessor(0, capacity); //循环写入,使在这个进程中可以向共享内存中写入不同的字符串值 int strLength ; char[] charsInMMf; while (DetectMainProgram()) { SyncNamed.WaitOne(); strLength = comMMFViewAccessor.ReadInt32(0); charsInMMf = new char[strLength]; //读取字符 comMMFViewAccessor.ReadArray<char>(4, charsInMMf, 0, strLength); //------------------------------释放同步对象 SyncNamed.ReleaseMutex(); //------------------------------ stringInMMf=new string(charsInMMf); CustomFunc = XmlSerialize.DeserializeXML<List<CustomFunc>>(stringInMMf); //----------------------------------------------------- //----------------------------------------------------- foreach (CustomFunc m_CustomFunc in CustomFunc) { m_CustomFunc.a_ifuncDriver = allPlugins.Find(it => it.Info== m_CustomFunc.a_funcInfo.a_funcValue.driverName); if (m_CustomFunc.a_ifuncDriver != null) { } else { m_CustomFunc.a_funcInfo.a_stateValue.runState = "End"; m_CustomFunc.a_funcInfo.fault = "cant find driver file"; //---------------------------------------------------------------- tempLogString = "comName:" + m_CustomFunc.a_funcInfo.a_funcValue.funcName + "-找不到驱动文件-" + m_CustomFunc.a_funcInfo.a_funcValue.driverName; LogHelper.WriteLog(tempLogString); //---------------------------------------------------------------- #if DEBUG Debug.WriteLine("cant find driver file"); #endif } } //----------------------------------------------------- //----------------------------------------------------- //----------------------------------------- if (CustomFunc != null) { #if DEBUG Debug.WriteLine(CustomFunc[0].a_funcItfaceInfo.recData); #endif MethodInvoker In = new MethodInvoker(XmlDataGetted); this.BeginInvoke(In); MethodInvoker In2 = new MethodInvoker(TreeViewRefresh); this.BeginInvoke(In2); } else { LogHelper.WriteLog("共享内存读取错误"); } Thread.Sleep(new TimeSpan(0, 0, 0, 0, 1000)); } System.Environment.Exit(1); }