/// <summary> /// private method to write the log entry to a logfile. The LogEntry is only write to file if this class is initialized. /// /// Added on 2007-05-05 by T.Norad /// </summary> /// <param name="logEntry">LogEntry contains all necessary informations to log</param> private void Log(LogEntry logEntry) { if (logEntry == null) { throw new ArgumentNullException("logEntry"); } m_LogEntries.Add(logEntry); // remember the last log entry m_lastLogEntry = logEntry; // write to logfile only if this class is initialized. if (this.m_isInitialized) { // create logfile name String filename = string.Format("{0:yyyy}-{0:MM}-{0:dd}.log", DateTime.Now); // compine path and name and set it as member value String logPath = Path.Combine(this.LogFileDir, filename); try { StreamWriter logFileStreamWriter = new StreamWriter(new FileStream(logPath, FileMode.Append, FileAccess.Write, FileShare.Read), Encoding.UTF8); logFileStreamWriter.WriteLine("{0:T}: {1}", m_lastLogEntry.TimeStamp, m_lastLogEntry.Text); if (m_lastLogEntry.ThrownException != null) { logFileStreamWriter.WriteLine(m_lastLogEntry.ThrownException.ToString()); } logFileStreamWriter.Flush(); logFileStreamWriter.Close(); } catch { } } }
public void AddSearch(string searchID, string searchPattern) { if (m_SearchDBThread.IsAlive) { try { m_SearchesToStartBuffer.Lock(); SearchToStart newSearch = new SearchToStart(searchID, searchPattern); if (!m_SearchesToStartBuffer.Contains(newSearch)) { m_SearchesToStartBuffer.Add(newSearch); } } catch (Exception ex) { m_Logger.Log(ex, "SearchDBManager: An error was thrown while adding a search to the list.", new object[] { }); } finally { m_SearchesToStartBuffer.Unlock(); } } }
public Command23(CommandBuilder command) { if (command == null) { throw new ArgumentNullException("command"); } m_Command = command; m_CommandID = m_Command.ReadBytes(48); m_SenderPeerID = m_Command.ReadBytes(48); m_ReceiverPeerID = m_Command.ReadBytes(48); m_SearchID = m_Command.ReadBytes(48); m_SearchResults = new RList <SearchResult>(); ushort searchResultsCount = m_Command.ReadUInt16(); for (int n = 0; n < searchResultsCount; n++) { byte[] fileHash = m_Command.ReadBytes(64); uint fileSize = m_Command.ReadUInt32(); string fileName = m_Command.ReadString(); RIndexedHashtable <string, string> metaData = new RIndexedHashtable <string, string>(); ushort metaDataCount = m_Command.ReadUInt16(); for (int m = 0; m < metaDataCount; m++) { metaData.Add(m_Command.ReadString(), m_Command.ReadString()); } m_SearchResults.Add(new SearchResult(fileHash, fileSize, fileName, metaData, m_Command.ReadString(), m_Command.ReadByte())); } }
public WebCacheProvider(RList<string> webCaches) { if (webCaches == null) throw new ArgumentNullException("webCaches"); m_WebCaches = new RList<WebCacheWebServiceProxy>(); foreach (string webCache in webCaches) m_WebCaches.Add(new WebCacheWebServiceProxy(webCache)); }
public void update(bool faceFound, Image face) { RList.Add(new FindResult(faceFound, face)); if (faceFound) { this.isFaceFound = true; this.faceImage = face; } }
public WebCacheProvider(RList <string> webCaches) { if (webCaches == null) { throw new ArgumentNullException("webCaches"); } m_WebCaches = new RList <WebCacheWebServiceProxy>(); foreach (string webCache in webCaches) { m_WebCaches.Add(new WebCacheWebServiceProxy(webCache)); } }
public void AddResult(Command23.SearchResult result) { if (m_SearchDBThread.IsAlive) { try { m_ResultsToAddBuffer.Lock(); if (!m_ResultsToAddBuffer.Contains(result)) { m_ResultsToAddBuffer.Add(result); } } catch (Exception ex) { m_Logger.Log(ex, "SearchDBManager: An error was thrown while adding a result to the list.", new object[] { }); } finally { m_ResultsToAddBuffer.Unlock(); } } }
protected override void OnRoomsListRecived(RoomInfo[] info) { foreach (GameObject r in rooms) { Destroy(r); } rooms.Clear(); for (int i = 0; i < info.Length; ++i) { GameObject go = Instantiate(roomPrefab, roomsF); go.GetComponent <ChatRoomInfo>().Initia(info[i]); rooms.Add(go); } }
public Command23(CommandBuilder command) { if (command == null) throw new ArgumentNullException("command"); m_Command = command; m_CommandID = m_Command.ReadBytes(48); m_SenderPeerID = m_Command.ReadBytes(48); m_ReceiverPeerID = m_Command.ReadBytes(48); m_SearchID = m_Command.ReadBytes(48); m_SearchResults = new RList<SearchResult>(); ushort searchResultsCount = m_Command.ReadUInt16(); for (int n = 0; n < searchResultsCount; n++) { byte[] fileHash = m_Command.ReadBytes(64); uint fileSize = m_Command.ReadUInt32(); string fileName = m_Command.ReadString(); RIndexedHashtable<string, string> metaData = new RIndexedHashtable<string, string>(); ushort metaDataCount = m_Command.ReadUInt16(); for (int m = 0; m < metaDataCount; m++) metaData.Add(m_Command.ReadString(), m_Command.ReadString()); m_SearchResults.Add(new SearchResult(fileHash, fileSize, fileName, metaData, m_Command.ReadString(), m_Command.ReadByte())); } }
public SearchDBManager(string fileName ) { m_FilePath = fileName; m_CleanUpDays = int.Parse(m_Settings["SearchDBCleanUpDays"]); m_SearchDBThread = new Thread(delegate() { try { Core.SetUILanguage(); while (!m_IsClosing && m_ErrorCounter < 10) { //Move buffers to normal list try { m_SearchesToStartBuffer.Lock(); m_SearchesToStart.Lock(); foreach (SearchDBManager.SearchToStart newSearch in m_SearchesToStartBuffer) { if (!m_SearchesToStart.Contains(newSearch)) { m_SearchesToStart.Add(newSearch); } } } catch (Exception ex) { m_Logger.Log(ex, "SearchDBManager: An error was thrown while reading the SearchesToStartBuffer.", new object[] { }); } finally { m_SearchesToStartBuffer.Clear(); m_SearchesToStart.Unlock(); m_SearchesToStartBuffer.Unlock(); } try { m_ResultsToAddBuffer.Lock(); m_ResultsToAdd.Lock(); foreach (Command23.SearchResult result in m_ResultsToAddBuffer) { if (!m_ResultsToAdd.Contains(result)) { m_ResultsToAdd.Add(result); } } } catch (Exception ex) { m_Logger.Log(ex, "SearchDBManager: An error was thrown while reading the ResultsToAddBuffer.", new object[] { }); } finally { m_ResultsToAddBuffer.Clear(); m_ResultsToAdd.Unlock(); m_ResultsToAddBuffer.Unlock(); } //Because we are only comparing dates, it is only necessary to compare them once a day bool cleanUp = false; if (((TimeSpan)DateTime.Now.Subtract(m_LastCleanUp)).Days >= 1) { //CleanUp(); cleanUp = true; } //The current entry long lastKnownValidFilePosition = 0; //The entry before long lastKnownValidFilePosition2 = 0; ulong fileSizeOfEntries = 0; long cleanedUpCounter = 0; long resultCounter = 0; FileStream fileStream = null; BinaryReader fileReader = null; BinaryWriter fileWriter = null; MemoryStream memoryStream = null; BinaryReader memoryReader = null; BinaryWriter memoryWriter = null; try { m_ResultsToAdd.Lock(); m_SearchesToStart.Lock(); m_SearchResultsBuffer.Lock(); //Check if there is something to do if (m_ResultsToAdd.Count > 0 || m_SearchesToStart.Count > 0 || cleanUp) { fileStream = new FileStream(m_FilePath, FileMode.OpenOrCreate, FileAccess.ReadWrite); fileReader = new BinaryReader(fileStream, Encoding.Unicode); fileWriter = new BinaryWriter(fileStream, Encoding.Unicode); memoryStream = new MemoryStream(); memoryReader = new BinaryReader(memoryStream, Encoding.Unicode); memoryWriter = new BinaryWriter(memoryStream, Encoding.Unicode); long fileLength = fileReader.BaseStream.Length; int fileFlushCounter = 0; long fileReadPosition = 0; long fileWritePosition = 0; bool insertingData = false; bool isFirstChangedEntry = true; //Add a array for the results of each search foreach (SearchToStart searchToStart in m_SearchesToStart) { m_SearchResultsBuffer.Add(searchToStart.SearchID, new RIndexedHashtable<string, OldSearchResult>()); } //Go through the file while (fileReadPosition < fileLength) { bool isOld = false; long firstPositionOfThisEntry = fileReadPosition; lastKnownValidFilePosition2 = lastKnownValidFilePosition; lastKnownValidFilePosition = fileReadPosition; //Read the next entry byte[] rFileHash = fileReader.ReadBytes(64); long rFileSize = fileReader.ReadInt64(); int rFileNameCount = fileReader.ReadInt32(); string[] rFileNames = new string[rFileNameCount]; for (int i = 0; i < rFileNameCount; i++) { rFileNames[i] = fileReader.ReadString(); } string rAlbum = fileReader.ReadString(); string rArtist = fileReader.ReadString(); string rTitle = fileReader.ReadString(); byte rRating = fileReader.ReadByte(); //Save the position of the date long datePosition = fileReader.BaseStream.Position; string rDate = fileReader.ReadString(); //Save the beginning of the next entry fileReadPosition = fileReader.BaseStream.Position; resultCounter++; //Check if this entry is a result to a search for (int i = 0; i < m_SearchesToStart.Count; i++) { string[] searchPattern = m_SearchesToStart[i].Pattern.ToLower().Split(new char[] { ' ' }); ; //Remove all small patterns RList<string> patterns = new RList<string>(); for (int k = 0; k < searchPattern.Length; k++) { if (searchPattern[k].Length >= 3) { patterns.Add(searchPattern[k]); } } bool isResult = false; int fileNameNumber = 0; for (int j = 0; j < patterns.Count; j++) { //Check all filenames of this entry for (int k = 0; k < rFileNames.Length; k++) { if (rFileNames[k].ToLower().Contains(patterns[j])) { fileNameNumber = k; isResult = true; } } //Check the metadata of this entry if (!isResult) { if (rAlbum.ToLower().Contains(patterns[j])) { isResult = true; } else if (rArtist.ToLower().Contains(patterns[j])) { isResult = true; } else if (rTitle.ToLower().Contains(patterns[j])) { isResult = true; } } //if this is no result for this part of the searchpattern, //we can stop, because there shall be only results with all //parts of the searchpattern. if (isResult == false) { break; } //Reset isResult for the next part of the searchpattern if (j != patterns.Count - 1) { isResult = false; } } if (isResult) { //Add this entry to the results of this search m_SearchResultsBuffer[m_SearchesToStart[i].SearchID].Add(Core.ByteArrayToString(rFileHash), new OldSearchResult(rFileHash, rFileSize, rFileNames[fileNameNumber], rAlbum, rArtist, rTitle, rRating, DateTime.Parse(rDate))); } } bool updateDate = false; int[] indexOfResultsToRemove = new int[0]; //Check if a new result is equal to this entry for (int i = 0; i < m_ResultsToAdd.Count; i++) { //Compare the hashes if (Core.CompareByteArray(rFileHash, m_ResultsToAdd[i].FileHash)) { //It exists already updateDate = true; int[] tempArray1 = new int[indexOfResultsToRemove.Length + 1]; for (int j = 0; j < indexOfResultsToRemove.Length; j++) { tempArray1[j] = indexOfResultsToRemove[j]; } tempArray1[indexOfResultsToRemove.Length] = i; indexOfResultsToRemove = tempArray1; //Check the filenames bool fileNameExists = false; for (int k = 0; k < rFileNames.Length; k++) { if (rFileNames[k] == m_ResultsToAdd[i].FileName) { fileNameExists = true; break; } } if (!fileNameExists) { //The filename is new -> add it insertingData = true; string[] tempArray = new string[rFileNameCount + 1]; for (int k = 0; k < rFileNameCount; k++) { tempArray[k] = rFileNames[k]; } tempArray[rFileNameCount] = m_ResultsToAdd[i].FileName; rFileNames = tempArray; rFileNameCount++; } } } if (updateDate) { //Update the date rDate = DateTime.Now.ToString(m_DateFormatString); //Remove the new result from the list, because it exists RList<Command23.SearchResult> tempRemoveList = new RList<Command23.SearchResult>(); for (int i = 0; i < m_ResultsToAdd.Count; i++) { bool addIt = false; for (int k = 0; k < indexOfResultsToRemove.Length; k++) { if (i == indexOfResultsToRemove[k]) { addIt = true; } } if (addIt) { tempRemoveList.Add(m_ResultsToAdd[i]); } } foreach (Command23.SearchResult r in tempRemoveList) { m_ResultsToAdd.Remove(r); } //Check if we can update the date directly in the file if (!insertingData) { //Write the new date to the file fileWriter.BaseStream.Position = datePosition; fileWriter.Write(rDate); fileWriter.Flush(); fileReader.BaseStream.Position = fileReadPosition; } } //Check the date if we are cleaning up if (cleanUp) { if (((TimeSpan)DateTime.Now.Subtract(DateTime.Parse(rDate))).Days > m_CleanUpDays) { isOld = true; insertingData = true; cleanedUpCounter++; } else { fileSizeOfEntries += (ulong)rFileSize; } } else { fileSizeOfEntries += (ulong)rFileSize; } //Check if we have to insert data to the file if (insertingData) { if (isFirstChangedEntry) { //Here we have to beginn writing fileWritePosition = firstPositionOfThisEntry; isFirstChangedEntry = false; } if (!isOld) { fileFlushCounter++; //Write the entry to the buffer memoryWriter.Write(rFileHash); memoryWriter.Write(rFileSize); memoryWriter.Write(rFileNameCount); for (int i = 0; i < rFileNameCount; i++) { memoryWriter.Write(rFileNames[i]); } memoryWriter.Write(rAlbum); memoryWriter.Write(rArtist); memoryWriter.Write(rTitle); memoryWriter.Write(rRating); memoryWriter.Write(rDate); //if the buffer is big enough or we reached the end of the file, write the buffe to the file if (fileFlushCounter == 10000 || fileReadPosition >= fileLength) { fileFlushCounter = 0; memoryWriter.Flush(); memoryReader.BaseStream.Position = 0; fileWriter.BaseStream.Position = fileWritePosition; long memoryLength = memoryReader.BaseStream.Length; long spaceInFile = fileReadPosition - fileWritePosition; //write only as much as space and data we have while (memoryReader.BaseStream.Position < spaceInFile && memoryReader.BaseStream.Position < memoryLength) { fileWriter.Write(memoryReader.ReadByte()); } fileWriter.Flush(); //Reconfigure the filewriter/reader fileWritePosition = fileWriter.BaseStream.Position; fileReader.BaseStream.Position = fileReadPosition; //Write the rest of the data in the buffer to the beginning of the buffer long memoryReaderPosition = memoryReader.BaseStream.Position; long memoryWriterPosition = 0; while (memoryReaderPosition < memoryLength) { memoryReader.BaseStream.Position = memoryReaderPosition; byte b = memoryReader.ReadByte(); memoryReaderPosition = memoryReader.BaseStream.Position; memoryWriter.BaseStream.Position = memoryWriterPosition; memoryWriter.Write(b); memoryWriterPosition = memoryWriter.BaseStream.Position; } memoryWriter.Flush(); memoryWriter.BaseStream.SetLength(memoryWriterPosition); } } } } if (insertingData) { //write the rest of the memorystream to the file. fileWriter.BaseStream.Position = fileWritePosition; long mlength = memoryReader.BaseStream.Length; memoryReader.BaseStream.Position = 0; while (memoryReader.BaseStream.Position < mlength) { fileWriter.Write(memoryReader.ReadByte()); } fileWriter.Flush(); } if (cleanUp) { m_Logger.Log(Properties.Resources_Core.CleanSearchDatabase, new object[] { cleanedUpCounter, resultCounter }); resultCounter -= cleanedUpCounter; m_LastCleanUpCount = cleanedUpCounter; } //Add the new results to the file //The position of the filestream points already to the end RIndexedHashtable<string, NewSearchResult> resultsToAdd = new RIndexedHashtable<string, NewSearchResult>(); foreach (Command23.SearchResult result in m_ResultsToAdd) { string fileHashString = Core.ByteArrayToString(result.FileHash); if (resultsToAdd.ContainsKey(fileHashString)) { resultsToAdd[fileHashString].AddFileName(result.FileName); } else { resultsToAdd.Add(fileHashString, new NewSearchResult(result)); } } foreach (NewSearchResult newResult in resultsToAdd.Values) { fileWriter.Write(newResult.FileHash); fileWriter.Write(newResult.FileSize); int fileNameCount = newResult.FileNames.Length; fileWriter.Write(fileNameCount); for (int i = 0; i < fileNameCount; i++) { fileWriter.Write(newResult.FileNames[i]); } fileWriter.Write(newResult.Album); fileWriter.Write(newResult.Artist); fileWriter.Write(newResult.Title); fileWriter.Write(newResult.Rating); fileWriter.Write(DateTime.Now.ToString(m_DateFormatString)); resultCounter++; fileSizeOfEntries += (ulong)newResult.FileSize; } fileWriter.Flush(); //Clear the lists m_ResultsToAdd.Clear(); m_SearchesToStart.Clear(); //Set the correct end of the file if (insertingData) { fileWriter.BaseStream.SetLength(fileWriter.BaseStream.Position); } if (cleanUp) { m_LastCleanUp = DateTime.Now; } //Update information m_ResultCount = resultCounter; m_FileSize = fileStream.Length; m_FileSizeOfEntries = fileSizeOfEntries; fileReader.Close(); fileWriter.Close(); fileStream.Close(); memoryReader.Close(); memoryWriter.Close(); memoryStream.Close(); } } catch (Exception ex) { //Update information m_ResultCount = resultCounter; m_FileSize = fileStream.Length; m_FileSizeOfEntries = fileSizeOfEntries; m_ErrorCounter++; m_Logger.Log(ex, "An exception was thrown in searchDBThread! (#{0})", new object[] { m_ErrorCounter }); try { fileStream.SetLength(lastKnownValidFilePosition2); m_Logger.Log("Searchdatabase cutted to the entry bofore the last known valid entry. ({0} Bytes remaining)", new object[] { lastKnownValidFilePosition}); m_FileSize = lastKnownValidFilePosition2; m_ResultCount = resultCounter - cleanedUpCounter; m_FileSizeOfEntries = fileSizeOfEntries; } catch { try { if (File.Exists(m_FilePath)) { File.Delete(m_FilePath); m_Logger.Log("Searchdatabase deleted, because it was probably corrupt.", new object[] { }); m_FileSize = 0; m_ResultCount = 0; m_FileSizeOfEntries = 0; } } catch { } } } finally { m_ResultsToAdd.Unlock(); m_SearchesToStart.Unlock(); m_SearchResultsBuffer.Unlock(); if (fileReader != null) { fileReader.Close(); } if (fileWriter != null) { fileWriter.Close(); } if (fileStream != null) { fileStream.Close(); } if (memoryReader != null) { memoryReader.Close(); } if (memoryWriter != null) { memoryWriter.Close(); } if (memoryStream != null) { memoryStream.Close(); } } //Move buffer to normal list try { m_SearchResultsBuffer.Lock(); m_SearchResults.Lock(); for (int i = 0; i < m_SearchResultsBuffer.Count; i++) { if (!m_SearchResults.ContainsKey(((System.Collections.Generic.KeyValuePair<string, RIndexedHashtable<string, OldSearchResult>>)m_SearchResultsBuffer[i]).Key)) { m_SearchResults.Add(((System.Collections.Generic.KeyValuePair<string, RIndexedHashtable<string, OldSearchResult>>)m_SearchResultsBuffer[i]).Key, ((System.Collections.Generic.KeyValuePair<string, RIndexedHashtable<string, OldSearchResult>>)m_SearchResultsBuffer[i]).Value); } } } catch (Exception ex) { m_Logger.Log(ex, "SearchDBManager: An error was thrown while reading the SearchResultsBuffer.", new object[] { }); } finally { m_SearchResultsBuffer.Clear(); m_SearchResults.Unlock(); m_SearchResultsBuffer.Unlock(); } Thread.Sleep(1000); } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown in searchDBThread!", new object[] { }); } m_Logger.Log("SearchDBManager closed.", new object[] { }); }); m_SearchDBThread.Name = "searchDBThread"; m_SearchDBThread.IsBackground = true; m_SearchDBThread.Priority = ThreadPriority.Lowest; m_SearchDBThread.Start(); }
public void Process() { if (m_QueueStart == null || !m_QueueStart.HasValue) m_QueueStart = DateTime.Now; if (m_ReceivedSectors == m_Sectors) { if (m_IsHashing) return; m_IsHashing = true; Thread hashingThread = new Thread(delegate() { try { m_Logger.Log("The download of \"{0}\" is complete and will be hashed now!", m_FileName); try { m_Sources.Lock(); foreach (Source source in m_Sources.Values) if (source.State == SourceState.Active || source.State == SourceState.Requested || source.State == SourceState.Requesting) SendCommand7A(source); } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while removing sources!"); } finally { m_Sources.Clear(); m_Sources.Unlock(); } Core.RemoveDownload(m_DownloadID); try { m_FileStream.Close(); FileStream fileStream = new FileStream(m_TempFilePath, FileMode.Open, FileAccess.Read, FileShare.Read); byte[] fileHash = ComputeHashes.SHA512Compute(fileStream); fileStream.Close(); if (Core.CompareByteArray(fileHash, m_FileHash)) { //2008-03-20 Nochbaer if (Directory.Exists(Path.Combine(m_Settings["IncomingDirectory"], m_SubFolder)) == false) { Directory.CreateDirectory(Path.Combine(m_Settings["IncomingDirectory"], m_SubFolder)); } string filePath = Path.Combine(Path.Combine(m_Settings["IncomingDirectory"], m_SubFolder), m_FileName); int n = 1; while (File.Exists(filePath)) { filePath = Path.Combine(Path.Combine(m_Settings["IncomingDirectory"], m_SubFolder), string.Format("{0}({1}){2}", Path.GetFileNameWithoutExtension(m_FileName), n, Path.GetExtension(m_FileName))); n++; } Core.ShareManager.AddDownloadedFile(m_TempFilePath, filePath, m_FileHash); File.Move(m_TempFilePath, filePath); if (bool.Parse(m_Settings["ParseCollections"]) == true && Path.GetExtension(filePath) == ".sncollection") { Core.ParseStealthNetCollection(filePath); } } else { string filePath = Path.Combine(m_Settings["CorruptDirectory"], m_FileName); int n = 1; while (File.Exists(filePath)) { filePath = Path.Combine(m_Settings["CorruptDirectory"], string.Format("{0}({1}){2}", Path.GetFileNameWithoutExtension(m_FileName), n, Path.GetExtension(m_FileName))); n++; } File.Move(m_TempFilePath, filePath); Core.AddDownload(m_FileHash, m_FileHashString, 0, null); m_Logger.Log("The Download of '{0}' is corrupt", m_FileName); } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while moving temporary file '{0}'!", m_TempFilePath); } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while hashing the download of \"{0}\"!", m_FileName); } }); hashingThread.Name = "hashingThread"; hashingThread.IsBackground = true; hashingThread.Priority = ThreadPriority.Lowest; hashingThread.Start(); } else { if (m_DownloadStatistics.Count == 60) m_DownloadStatistics.RemoveAt(59); m_DownloadStatistics.Insert(0, m_Downloaded); m_Downloaded = 0; long downstream = 0; foreach (int n in m_DownloadStatistics) downstream += n; m_Downstream = (int)(downstream / m_DownloadStatistics.Count); m_DownstreamString = Core.TransferVolumeToString(m_Downstream); if (DateTime.Now.Subtract(m_LastBroadcastSent).TotalSeconds >= Constants.Command30Interval) { m_LastBroadcastSent = DateTime.Now; if (!m_HasInformation) Core.SendCommand50(m_SourceSearchFloodingHash, m_SourceSearchPeerID, m_SourceSearchID, m_OnceHashedFileHash); else Core.SendCommand60(m_SourceSearchFloodingHash, m_SourceSearchPeerID, m_SourceSearchID, m_TwiceHashedFileHash); } try { m_Sources.Lock(); RList<Source> activeSources = new RList<Source>(); RList<Source> verifiedSources = new RList<Source>(); Source source; for (int n = m_Sources.Count - 1; n >= 0; n--) { if (!m_RequestingDelay.HasValue) m_RequestingDelay = DateTime.Now; source = m_Sources[n].Value; if (!source.IsComplete) { bool hasNeededSectors = false; for (long i = 0; i < source.SectorsMap.Length; i++) if ((~m_SectorsMap[i] & source.SectorsMap[i]) != 0) { hasNeededSectors = true; break; } if (!hasNeededSectors && source.State != SourceState.NotNeeded) { source.SetState(SourceState.NotNeeded); if (source.State == SourceState.Active || source.State == SourceState.Requested || source.State == SourceState.Requesting) SendCommand7A(source); } else if (source.State == SourceState.NotNeeded && hasNeededSectors) source.SetState(SourceState.Verifying); } if (source.State == SourceState.NotNeeded) { if (DateTime.Now.Subtract(source.LastReceived).TotalSeconds >= Constants.PeerTimeout) m_Sources.RemoveAt(n); } else if (source.State == SourceState.Verifying || source.State == SourceState.Verified) { if ((source.Command70Sent == 0 && DateTime.Now.Subtract(source.LastCommand70Sent).TotalSeconds >= Constants.Command70Interval) || (source.Command70Sent > 0 && source.Command70Sent < Constants.Command70ToSend && DateTime.Now.Subtract(source.LastCommand70Sent).TotalSeconds >= Constants.Command71Timeout)) { if (source.Command70Sent > 0) source.ReportTimeout(); source.Report70Sent(); Core.SendCommand70(m_DownloadPeerID, source.PeerID, m_DownloadID, m_ThriceHashedFileHash); } else if (source.Command70Sent >= Constants.Command70ToSend && DateTime.Now.Subtract(source.LastCommand70Sent).TotalSeconds >= Constants.Command71Timeout) { m_Sources.RemoveAt(n); continue; } if (source.State == SourceState.Verified && !source.IsQueueFull) verifiedSources.Add(source); } else if (source.State == SourceState.Requesting || source.State == SourceState.Requested || (source.State == SourceState.Active && source.LastRequestedSector == -1)) { if ((source.Command74Sent == 0 && DateTime.Now.Subtract(source.LastCommand74Sent).TotalSeconds >= Constants.Command74Interval) || (source.Command74Sent > 0 && source.Command74Sent < Constants.Command74ToSend && DateTime.Now.Subtract(source.LastCommand74Sent).TotalSeconds >= Constants.Command75Timeout)) { source.Report74Sent(); Core.SendCommand74(m_DownloadPeerID, source.PeerID, m_DownloadID, m_ThriceHashedFileHash); } else if (source.Command74Sent >= Constants.Command74ToSend && DateTime.Now.Subtract(source.LastCommand74Sent).TotalSeconds >= Constants.Command75Timeout) { source.ReportTimeout(); continue; } activeSources.Add(source); } else if (source.State == SourceState.Active && source.LastRequestedSector > -1) { if ((source.Command78Sent == 0 && DateTime.Now.Subtract(source.LastCommand78Sent).TotalSeconds >= Constants.Command78Interval) || (source.Command78Sent > 0 && source.Command78Sent < Constants.Command78ToSend && DateTime.Now.Subtract(source.LastCommand78Sent).TotalSeconds >= Constants.Command79Timeout)) { source.Report78Sent(source.LastRequestedSector); Core.SendCommand78(m_DownloadPeerID, source.PeerID, m_DownloadID, source.LastRequestedSector); } else if (source.Command78Sent >= Constants.Command78ToSend && DateTime.Now.Subtract(source.LastCommand78Sent).TotalSeconds >= Constants.Command79Timeout) { source.ReportTimeout(); continue; } activeSources.Add(source); } } if (m_IsFilledWithZeros && m_RequestingDelay.HasValue && DateTime.Now.Subtract(m_RequestingDelay.Value).TotalSeconds >= Constants.DownloadRequestingDelay) { if (activeSources.Count < Constants.MaximumSourcesCount && verifiedSources.Count > 0) { for (int n = 1; n <= verifiedSources.Count - 1; n++) for (int m = 0; m < verifiedSources.Count - n; m++) { if ((verifiedSources[m].IsComplete && !verifiedSources[m + 1].IsComplete && verifiedSources[m + 1].QueueLength < Constants.MaximumUploadsCount) || verifiedSources[m].QueueLength > verifiedSources[m + 1].QueueLength) { source = verifiedSources[m]; verifiedSources[m] = verifiedSources[m + 1]; verifiedSources[m + 1] = source; } } for (int n = 0; n < Math.Min(Constants.MaximumSourcesCount - activeSources.Count, verifiedSources.Count); n++) { source = verifiedSources[n]; source.Report74Sent(); Core.SendCommand74(m_DownloadPeerID, source.PeerID, m_DownloadID, m_ThriceHashedFileHash); activeSources.Add(source); } } for (int n = 0; n < Math.Min(Constants.MaximumSourcesCount, activeSources.Count); n++) { source = activeSources[n]; if (source.State == SourceState.Requested && source.QueuePosition == 0) { RList<long> sectorsToRequest = new RList<long>(m_SectorsMap.Length); long sectorToRequest = -1; if (!source.IsComplete) { bool sectorCanBeRequested = false; for (int t = 0; t < 10 && !sectorCanBeRequested; t++) { for (long i = 0; i < m_SectorsMap.Length; i++) if ((~m_SectorsMap[i] & source.SectorsMap[i]) != 0) sectorsToRequest.Add(i); if (sectorsToRequest.Count > 0) { long d = sectorsToRequest[Randomizer.GenerateNumber(0, sectorsToRequest.Count)]; byte e = m_SectorsMap[d]; byte f = source.SectorsMap[d]; int g; for (g = 0; g < 8; g++) if (((~e & f) & (1 << g)) != 0) break; sectorToRequest = d * 8 + g; sectorCanBeRequested = true; foreach (Source activeSource in activeSources) if (activeSource.State == SourceState.Active && activeSource.LastRequestedSector == sectorToRequest) { sectorCanBeRequested = false; break; } } else sectorCanBeRequested = false; } if (sectorToRequest != -1) { source.Report78Sent(sectorToRequest); Core.SendCommand78(m_DownloadPeerID, source.PeerID, m_DownloadID, sectorToRequest); } } else { bool sectorCanBeRequested = false; for (int t = 0; t < 10 && !sectorCanBeRequested; t++) { for (long i = 0; i < m_SectorsMap.Length; i++) if (m_SectorsMap[i] != 255) sectorsToRequest.Add(i); if (sectorsToRequest.Count > 0) { long d = sectorsToRequest[Randomizer.GenerateNumber(0, sectorsToRequest.Count)]; byte e = m_SectorsMap[d]; int g; for (g = 0; g < 8; g++) if ((e & (1 << g)) == 0) break; sectorToRequest = d * 8 + g; sectorCanBeRequested = true; foreach (Source activeSource in activeSources) if (activeSource.State == SourceState.Active && activeSource.LastRequestedSector == sectorToRequest) { sectorCanBeRequested = false; break; } } else sectorCanBeRequested = false; } if (sectorToRequest != -1) { source.Report78Sent(sectorToRequest); Core.SendCommand78(m_DownloadPeerID, source.PeerID, m_DownloadID, sectorToRequest); } } } } } } finally { m_Sources.Unlock(); } } }
public bool SetSectorData(long sector, byte[] data, byte[] hashCodeResult, Source source) { if (sector < 0) throw new ArgumentOutOfRangeException("sector"); if (data == null) throw new ArgumentNullException("data"); if (hashCodeResult == null) throw new ArgumentNullException("hashCodeResult"); if (source == null) throw new ArgumentNullException("source"); try { m_Sources.Lock(); // 2007-06-14 T.Norad // set current time as last reception in this download m_LastReception = DateTime.Now; if ((m_SectorsMap[sector / 8] & (1 << (int)(sector % 8))) == 0) { // Sicherer Hash-Vergleich ANFANG byte[] hash = ComputeHashes.SHA512Compute(data); byte[] hashCode = new byte[64]; for (int n = 0; n < 64; n++) hashCode[n] = (byte)(hash[n] ^ m_FileHash[n]); hashCode = ComputeHashes.SHA512Compute(hashCode); hashCode = ComputeHashes.SHA512Compute(hashCode); hashCode = ComputeHashes.SHA512Compute(hashCode); hashCode = ComputeHashes.SHA512Compute(hashCode); if (!Core.CompareByteArray(hashCode, hashCodeResult)) { m_Logger.Log("A manipulated sector was received!"); return false; } // Sicherer Hash-Vergleich ENDE // Sicherer Sektor-Vergleich ANFANG if (sector != source.LastRequestedSector || m_IsHashing) { m_Logger.Log("An unrequested command was received!"); return false; } // Sicherer Sektor-Vergleich ENDE // Filestream.Position setzen m_FileStream.Position = sector * 32768; int count; if (m_FileStream.Position + 32768 <= m_FileSize) count = 32768; else count = (int)(m_FileSize - m_FileStream.Position); m_FileStream.Write(data, 0, count); m_FileStream.Flush(); m_ReceivedSectors++; //Update SectorsMap m_SectorsMap[(int)(sector / 8)] |= (byte)(1 << (int)(sector % 8)); m_Downloaded += 32768; source.Report79Received(sector); } else return false; if (m_ReceivedSectors < m_Sectors) { RList<long> sectorsToRequest = new RList<long>(m_SectorsMap.Length); long sectorToRequest = -1; if (!source.IsComplete) { bool sectorCanBeRequested = false; for (int t = 0; t < 10 && !sectorCanBeRequested; t++) { for (long i = 0; i < m_SectorsMap.Length; i++) if ((~m_SectorsMap[i] & source.SectorsMap[i]) != 0) sectorsToRequest.Add(i); if (sectorsToRequest.Count > 0) { long d = sectorsToRequest[Randomizer.GenerateNumber(0, sectorsToRequest.Count)]; byte e = m_SectorsMap[d]; byte f = source.SectorsMap[d]; int g; for (g = 0; g < 8; g++) if (((~e & f) & (1 << g)) != 0) break; sectorToRequest = d * 8 + g; sectorCanBeRequested = true; foreach (Source activeSource in m_Sources.Values) if (activeSource.State == SourceState.Active && activeSource.LastRequestedSector == sectorToRequest) { sectorCanBeRequested = false; break; } } else sectorCanBeRequested = false; } if (sectorToRequest != -1) { source.Report78Sent(sectorToRequest); Core.SendCommand78(m_DownloadPeerID, source.PeerID, m_DownloadID, sectorToRequest); } } else { bool sectorCanBeRequested = false; for (int t = 0; t < 10 && !sectorCanBeRequested; t++) { for (long i = 0; i < m_SectorsMap.Length; i++) if (m_SectorsMap[i] != 255) sectorsToRequest.Add(i); if (sectorsToRequest.Count > 0) { long d = sectorsToRequest[Randomizer.GenerateNumber(0, sectorsToRequest.Count)]; byte e = m_SectorsMap[d]; int g; for (g = 0; g < 8; g++) if ((e & (1 << g)) == 0) break; sectorToRequest = d * 8 + g; sectorCanBeRequested = true; foreach (Source activeSource in m_Sources.Values) if (activeSource.State == SourceState.Active && activeSource.LastRequestedSector == sectorToRequest) { sectorCanBeRequested = false; break; } } else sectorCanBeRequested = false; } if (sectorToRequest != -1) { source.Report78Sent(sectorToRequest); Core.SendCommand78(m_DownloadPeerID, source.PeerID, m_DownloadID, sectorToRequest); } } } return true; } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while writing in temporary file '{0}'!", m_TempFilePath); return false; } finally { m_Sources.Unlock(); } }
public void RListTest() { RList rl = new RList(); rl.Mode.Should().Be(RMode.List); rl.Should().BeEmpty(); var e = rl.GetEnumerator(); e.Should().NotBeNull(); e.MoveNext().Should().BeFalse(); RObject rv = new RVector <RNumber>(RMode.Numeric, 1); var rs = new RString("abc"); rl.Add(rs, rv); rl.Should().HaveCount(1); var e1 = rl.Keys.GetEnumerator(); e1.MoveNext(); e1.Current.Should().Be(rs); e1.Current.Should().Be(new RString("abc")); var e2 = rl.Values.GetEnumerator(); e2.MoveNext(); e2.Current.Should().Be(rv); rl.ContainsKey(rs).Should().BeTrue(); rl.ContainsKey(new RString("abc")).Should().BeTrue(); rl.Contains(new KeyValuePair <RString, RObject>(rs, rv)).Should().BeTrue(); var arr = new KeyValuePair <RString, RObject> [2]; rl.CopyTo(arr, 1); arr[1].Key.Should().Be(rs); arr[1].Value.Should().Be(rv); rl[rs].Should().Be(rv); rl.IsReadOnly.Should().BeFalse(); RObject u; rl.TryGetValue(rs, out u).Should().BeTrue(); var en = rl.GetEnumerator(); en.Should().NotBeNull(); en.MoveNext().Should().BeTrue(); en.Current.Key.Should().Be(rs); en.Current.Value.Should().Be(rv); en.MoveNext().Should().BeFalse(); IEnumerator <RObject> en1 = ((IEnumerable <RObject>)rl).GetEnumerator(); en1.Should().NotBeNull(); en1.MoveNext().Should().BeTrue(); en1.Current.Should().Be(rv); en1.MoveNext().Should().BeFalse(); IEnumerator <KeyValuePair <RString, RObject> > en2 = ((IEnumerable <KeyValuePair <RString, RObject> >)rl).GetEnumerator(); en2.Should().NotBeNull(); en2.MoveNext().Should().BeTrue(); en2.Current.Key.Should().Be(rs); en2.Current.Value.Should().Be(rv); en2.MoveNext().Should().BeFalse(); IEnumerator en3 = ((IEnumerable)rl).GetEnumerator(); en3.Should().NotBeNull(); en3.MoveNext().Should().BeTrue(); en3.MoveNext().Should().BeFalse(); rl.Remove(rs).Should().BeTrue(); rl.Should().BeEmpty(); rl.ContainsKey(rs).Should().BeFalse(); rl.Add(new KeyValuePair <RString, RObject>(new RString("x"), new RLogical(true))); rl.Length.Should().Be(1); rl.Count.Should().Be(1); rl.Clear(); rl.Length.Should().Be(0); rl.Count.Should().Be(0); rl.TryGetValue(rs, out u).Should().BeFalse(); u.Should().BeNull(); }
private static void SendSearchResults(byte[] senderPeerID, byte[] searchID, string searchPattern) { if (senderPeerID == null) throw new ArgumentNullException("senderPeerID"); if (senderPeerID.Length != 48) throw new ArgumentException(); if (searchID == null) throw new ArgumentNullException("searchID"); if (searchID.Length != 48) throw new ArgumentException(); if (searchPattern == null) throw new ArgumentNullException("searchPattern"); searchPattern = searchPattern.ToLower(); RList<Command23.SearchResult> searchResults = new RList<Command23.SearchResult>(); int entriesLength = 0; foreach (SharedFile sharedFile in SharedFiles.Values) { bool found = false; if (sharedFile.FileName.ToLower().Contains(searchPattern)) found = true; if (!found && sharedFile.Album.ToLower().Contains(searchPattern)) found = true; if (!found && sharedFile.Artist.ToLower().Contains(searchPattern)) found = true; if (!found && sharedFile.Title.ToLower().Contains(searchPattern)) found = true; if (found) { int entryLength = sharedFile.GetEntryLength(); if (entriesLength + entryLength <= Constants.MaximumDataLength) { searchResults.Add(new Command23.SearchResult(sharedFile.FileHash, (uint)sharedFile.FileSize, sharedFile.FileName, sharedFile.MetaData, sharedFile.Comment, sharedFile.Rating)); entriesLength += entryLength; } else break; } } if (!searchResults.IsEmpty) { byte[] commandID = GenerateIDOrHash(); Send(new Command23(commandID, m_PeerID, senderPeerID, searchID, searchResults)); m_LastCommandID[ByteArrayToString(commandID)] = DateTime.Now; } }
private static void SendBroadcast(IRequestCommand command, IPAddress excludedConnection, int dropChainTailCount) { if (command == null) throw new ArgumentNullException("command"); try { m_Connections.Lock(); RList<Connection> connections = new RList<Connection>(); foreach (Connection connection in m_Connections.Values) if (connection.IsEstablished && !connection.RemoteEndPoint.Address.Equals(excludedConnection)) connections.Add(connection); dropChainTailCount = Math.Min(dropChainTailCount, connections.Count); for (int n = 0; n < dropChainTailCount; n++) { int index = Randomizer.GenerateNumber(0, connections.Count); command.Send(connections[index]); connections.RemoveAt(index); } } finally { m_Connections.Unlock(); } }
/// <summary> /// Neue ResumeDownloads() /// 10.06.2009 Lars /// 03.07.2009 Lars (Neue Downloadwarteschlange) /// 04.07.2009 Lars (Einfacheres und besseres Handling) /// </summary> private static void ResumeDownloads() { try { // Alle gesicherten Downloads einlesen RIndexedHashtable<string, XmlNode> downloadsXml = new RIndexedHashtable<string, XmlNode>(); if (File.Exists(m_DownloadsFilePath)) { XmlDocument downloadsXmlDocument = new XmlDocument(); downloadsXmlDocument.Load(m_DownloadsFilePath); foreach (XmlNode downloadNode in downloadsXmlDocument.SelectSingleNode("downloads")) try { downloadsXml.Add(downloadNode.Attributes["hash"].InnerText, downloadNode); } catch (Exception ex) { m_Logger.Log(ex, "A download cannot be resumed due to non existent information about it!"); continue; } } // Alle vorhandenen Dateien durchgehen RList<Download> temporary = new RList<Download>(downloadsXml.Count); foreach (string filePath in Directory.GetFiles(Settings.Instance["TemporaryDirectory"])) { string fileName = new FileInfo(filePath).Name; try { if (!Regex.IsMatch(fileName, "^[0-9A-F]{128,128}$", RegexOptions.IgnoreCase)) { m_Logger.Log("The file \"{0}\" is no valid temporary download!", fileName); } XmlNode node; if (!downloadsXml.TryGetValue(fileName, out node)) { m_Logger.Log("The download of \"{0}\" cannot be resumed due to non existent information about it!", fileName); continue; } bool hasInformation = true; if ((node as XmlElement).HasAttribute("hasinformation") && (node as XmlElement).GetAttribute("hasinformation") == "none") hasInformation = false; string lastSeenString = null; DateTime? lastSeen = null; string lastReceptionString = null; DateTime? lastReception = null; String subfolder = string.Empty; if (node.SelectSingleNode("lastseen") != null) { lastSeenString = node.SelectSingleNode("lastseen").InnerText; if (lastSeenString != null && lastSeenString.Length > 0) lastSeen = DateTime.Parse(lastSeenString); } if (node.SelectSingleNode("lastreception") != null) { lastReceptionString = node.SelectSingleNode("lastreception").InnerText; if (lastReceptionString != null && lastReceptionString.Length > 0) lastReception = DateTime.Parse(lastReceptionString); } if (node.SelectSingleNode("subfolder") != null) subfolder = node.SelectSingleNode("subfolder").InnerText; Download download = new Download(Core.FileHashStringToFileHash(fileName), node.SelectSingleNode("filename").InnerText, long.Parse(node.SelectSingleNode("filesize").InnerText), hasInformation, lastSeen, lastReception, hasInformation ? Convert.FromBase64String(node.SelectSingleNode("sectorsmap").InnerText) : null); download.SetSubFolderAndTime(subfolder, null); temporary.Add(download); } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while resuming the download of \"{0}\"!", fileName); } } // Wiederaufzunehmende Download sortieren for (int n = 1; n <= temporary.Count - 1; n++) for (int m = 0; m < temporary.Count - n; m++) { Download a = temporary[m]; Download b = temporary[m + 1]; if (downloadsXml.IndexOfKey(a.FileHashString) > downloadsXml.IndexOfKey(b.FileHashString)) { temporary[m] = b; temporary[m + 1] = a; } } // Downloads wiederaufnehmen try { m_DownloadsAndQueue.Lock(); foreach (Download download in temporary) m_DownloadsAndQueue.Add(download); } finally { m_DownloadsAndQueue.Unlock(); } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while resuming downloads!"); } finally { // Erst jetzt kann die downloads.xml wieder geschreiben werden... DownloadsXmlWriter.SetIsReady(); } }
public SearchDBManager(string fileName) { m_FilePath = fileName; m_CleanUpDays = int.Parse(m_Settings["SearchDBCleanUpDays"]); m_SearchDBThread = new Thread(delegate() { try { Core.SetUILanguage(); while (!m_IsClosing && m_ErrorCounter < 10) { //Move buffers to normal list try { m_SearchesToStartBuffer.Lock(); m_SearchesToStart.Lock(); foreach (SearchDBManager.SearchToStart newSearch in m_SearchesToStartBuffer) { if (!m_SearchesToStart.Contains(newSearch)) { m_SearchesToStart.Add(newSearch); } } } catch (Exception ex) { m_Logger.Log(ex, "SearchDBManager: An error was thrown while reading the SearchesToStartBuffer.", new object[] { }); } finally { m_SearchesToStartBuffer.Clear(); m_SearchesToStart.Unlock(); m_SearchesToStartBuffer.Unlock(); } try { m_ResultsToAddBuffer.Lock(); m_ResultsToAdd.Lock(); foreach (Command23.SearchResult result in m_ResultsToAddBuffer) { if (!m_ResultsToAdd.Contains(result)) { m_ResultsToAdd.Add(result); } } } catch (Exception ex) { m_Logger.Log(ex, "SearchDBManager: An error was thrown while reading the ResultsToAddBuffer.", new object[] { }); } finally { m_ResultsToAddBuffer.Clear(); m_ResultsToAdd.Unlock(); m_ResultsToAddBuffer.Unlock(); } //Because we are only comparing dates, it is only necessary to compare them once a day bool cleanUp = false; if (((TimeSpan)DateTime.Now.Subtract(m_LastCleanUp)).Days >= 1) { //CleanUp(); cleanUp = true; } //The current entry long lastKnownValidFilePosition = 0; //The entry before long lastKnownValidFilePosition2 = 0; ulong fileSizeOfEntries = 0; long cleanedUpCounter = 0; long resultCounter = 0; FileStream fileStream = null; BinaryReader fileReader = null; BinaryWriter fileWriter = null; MemoryStream memoryStream = null; BinaryReader memoryReader = null; BinaryWriter memoryWriter = null; try { m_ResultsToAdd.Lock(); m_SearchesToStart.Lock(); m_SearchResultsBuffer.Lock(); //Check if there is something to do if (m_ResultsToAdd.Count > 0 || m_SearchesToStart.Count > 0 || cleanUp) { fileStream = new FileStream(m_FilePath, FileMode.OpenOrCreate, FileAccess.ReadWrite); fileReader = new BinaryReader(fileStream, Encoding.Unicode); fileWriter = new BinaryWriter(fileStream, Encoding.Unicode); memoryStream = new MemoryStream(); memoryReader = new BinaryReader(memoryStream, Encoding.Unicode); memoryWriter = new BinaryWriter(memoryStream, Encoding.Unicode); long fileLength = fileReader.BaseStream.Length; int fileFlushCounter = 0; long fileReadPosition = 0; long fileWritePosition = 0; bool insertingData = false; bool isFirstChangedEntry = true; //Add a array for the results of each search foreach (SearchToStart searchToStart in m_SearchesToStart) { m_SearchResultsBuffer.Add(searchToStart.SearchID, new RIndexedHashtable <string, OldSearchResult>()); } //Go through the file while (fileReadPosition < fileLength) { bool isOld = false; long firstPositionOfThisEntry = fileReadPosition; lastKnownValidFilePosition2 = lastKnownValidFilePosition; lastKnownValidFilePosition = fileReadPosition; //Read the next entry byte[] rFileHash = fileReader.ReadBytes(64); long rFileSize = fileReader.ReadInt64(); int rFileNameCount = fileReader.ReadInt32(); string[] rFileNames = new string[rFileNameCount]; for (int i = 0; i < rFileNameCount; i++) { rFileNames[i] = fileReader.ReadString(); } string rAlbum = fileReader.ReadString(); string rArtist = fileReader.ReadString(); string rTitle = fileReader.ReadString(); byte rRating = fileReader.ReadByte(); //Save the position of the date long datePosition = fileReader.BaseStream.Position; string rDate = fileReader.ReadString(); //Save the beginning of the next entry fileReadPosition = fileReader.BaseStream.Position; resultCounter++; //Check if this entry is a result to a search for (int i = 0; i < m_SearchesToStart.Count; i++) { string[] searchPattern = m_SearchesToStart[i].Pattern.ToLower().Split(new char[] { ' ' });; //Remove all small patterns RList <string> patterns = new RList <string>(); for (int k = 0; k < searchPattern.Length; k++) { if (searchPattern[k].Length >= 3) { patterns.Add(searchPattern[k]); } } bool isResult = false; int fileNameNumber = 0; for (int j = 0; j < patterns.Count; j++) { //Check all filenames of this entry for (int k = 0; k < rFileNames.Length; k++) { if (rFileNames[k].ToLower().Contains(patterns[j])) { fileNameNumber = k; isResult = true; } } //Check the metadata of this entry if (!isResult) { if (rAlbum.ToLower().Contains(patterns[j])) { isResult = true; } else if (rArtist.ToLower().Contains(patterns[j])) { isResult = true; } else if (rTitle.ToLower().Contains(patterns[j])) { isResult = true; } } //if this is no result for this part of the searchpattern, //we can stop, because there shall be only results with all //parts of the searchpattern. if (isResult == false) { break; } //Reset isResult for the next part of the searchpattern if (j != patterns.Count - 1) { isResult = false; } } if (isResult) { //Add this entry to the results of this search m_SearchResultsBuffer[m_SearchesToStart[i].SearchID].Add(Core.ByteArrayToString(rFileHash), new OldSearchResult(rFileHash, rFileSize, rFileNames[fileNameNumber], rAlbum, rArtist, rTitle, rRating, DateTime.Parse(rDate))); } } bool updateDate = false; int[] indexOfResultsToRemove = new int[0]; //Check if a new result is equal to this entry for (int i = 0; i < m_ResultsToAdd.Count; i++) { //Compare the hashes if (Core.CompareByteArray(rFileHash, m_ResultsToAdd[i].FileHash)) { //It exists already updateDate = true; int[] tempArray1 = new int[indexOfResultsToRemove.Length + 1]; for (int j = 0; j < indexOfResultsToRemove.Length; j++) { tempArray1[j] = indexOfResultsToRemove[j]; } tempArray1[indexOfResultsToRemove.Length] = i; indexOfResultsToRemove = tempArray1; //Check the filenames bool fileNameExists = false; for (int k = 0; k < rFileNames.Length; k++) { if (rFileNames[k] == m_ResultsToAdd[i].FileName) { fileNameExists = true; break; } } if (!fileNameExists) { //The filename is new -> add it insertingData = true; string[] tempArray = new string[rFileNameCount + 1]; for (int k = 0; k < rFileNameCount; k++) { tempArray[k] = rFileNames[k]; } tempArray[rFileNameCount] = m_ResultsToAdd[i].FileName; rFileNames = tempArray; rFileNameCount++; } } } if (updateDate) { //Update the date rDate = DateTime.Now.ToString(m_DateFormatString); //Remove the new result from the list, because it exists RList <Command23.SearchResult> tempRemoveList = new RList <Command23.SearchResult>(); for (int i = 0; i < m_ResultsToAdd.Count; i++) { bool addIt = false; for (int k = 0; k < indexOfResultsToRemove.Length; k++) { if (i == indexOfResultsToRemove[k]) { addIt = true; } } if (addIt) { tempRemoveList.Add(m_ResultsToAdd[i]); } } foreach (Command23.SearchResult r in tempRemoveList) { m_ResultsToAdd.Remove(r); } //Check if we can update the date directly in the file if (!insertingData) { //Write the new date to the file fileWriter.BaseStream.Position = datePosition; fileWriter.Write(rDate); fileWriter.Flush(); fileReader.BaseStream.Position = fileReadPosition; } } //Check the date if we are cleaning up if (cleanUp) { if (((TimeSpan)DateTime.Now.Subtract(DateTime.Parse(rDate))).Days > m_CleanUpDays) { isOld = true; insertingData = true; cleanedUpCounter++; } else { fileSizeOfEntries += (ulong)rFileSize; } } else { fileSizeOfEntries += (ulong)rFileSize; } //Check if we have to insert data to the file if (insertingData) { if (isFirstChangedEntry) { //Here we have to beginn writing fileWritePosition = firstPositionOfThisEntry; isFirstChangedEntry = false; } if (!isOld) { fileFlushCounter++; //Write the entry to the buffer memoryWriter.Write(rFileHash); memoryWriter.Write(rFileSize); memoryWriter.Write(rFileNameCount); for (int i = 0; i < rFileNameCount; i++) { memoryWriter.Write(rFileNames[i]); } memoryWriter.Write(rAlbum); memoryWriter.Write(rArtist); memoryWriter.Write(rTitle); memoryWriter.Write(rRating); memoryWriter.Write(rDate); //if the buffer is big enough or we reached the end of the file, write the buffe to the file if (fileFlushCounter == 10000 || fileReadPosition >= fileLength) { fileFlushCounter = 0; memoryWriter.Flush(); memoryReader.BaseStream.Position = 0; fileWriter.BaseStream.Position = fileWritePosition; long memoryLength = memoryReader.BaseStream.Length; long spaceInFile = fileReadPosition - fileWritePosition; //write only as much as space and data we have while (memoryReader.BaseStream.Position < spaceInFile && memoryReader.BaseStream.Position < memoryLength) { fileWriter.Write(memoryReader.ReadByte()); } fileWriter.Flush(); //Reconfigure the filewriter/reader fileWritePosition = fileWriter.BaseStream.Position; fileReader.BaseStream.Position = fileReadPosition; //Write the rest of the data in the buffer to the beginning of the buffer long memoryReaderPosition = memoryReader.BaseStream.Position; long memoryWriterPosition = 0; while (memoryReaderPosition < memoryLength) { memoryReader.BaseStream.Position = memoryReaderPosition; byte b = memoryReader.ReadByte(); memoryReaderPosition = memoryReader.BaseStream.Position; memoryWriter.BaseStream.Position = memoryWriterPosition; memoryWriter.Write(b); memoryWriterPosition = memoryWriter.BaseStream.Position; } memoryWriter.Flush(); memoryWriter.BaseStream.SetLength(memoryWriterPosition); } } } } if (insertingData) { //write the rest of the memorystream to the file. fileWriter.BaseStream.Position = fileWritePosition; long mlength = memoryReader.BaseStream.Length; memoryReader.BaseStream.Position = 0; while (memoryReader.BaseStream.Position < mlength) { fileWriter.Write(memoryReader.ReadByte()); } fileWriter.Flush(); } if (cleanUp) { m_Logger.Log(Properties.Resources_Core.CleanSearchDatabase, new object[] { cleanedUpCounter, resultCounter }); resultCounter -= cleanedUpCounter; m_LastCleanUpCount = cleanedUpCounter; } //Add the new results to the file //The position of the filestream points already to the end RIndexedHashtable <string, NewSearchResult> resultsToAdd = new RIndexedHashtable <string, NewSearchResult>(); foreach (Command23.SearchResult result in m_ResultsToAdd) { string fileHashString = Core.ByteArrayToString(result.FileHash); if (resultsToAdd.ContainsKey(fileHashString)) { resultsToAdd[fileHashString].AddFileName(result.FileName); } else { resultsToAdd.Add(fileHashString, new NewSearchResult(result)); } } foreach (NewSearchResult newResult in resultsToAdd.Values) { fileWriter.Write(newResult.FileHash); fileWriter.Write(newResult.FileSize); int fileNameCount = newResult.FileNames.Length; fileWriter.Write(fileNameCount); for (int i = 0; i < fileNameCount; i++) { fileWriter.Write(newResult.FileNames[i]); } fileWriter.Write(newResult.Album); fileWriter.Write(newResult.Artist); fileWriter.Write(newResult.Title); fileWriter.Write(newResult.Rating); fileWriter.Write(DateTime.Now.ToString(m_DateFormatString)); resultCounter++; fileSizeOfEntries += (ulong)newResult.FileSize; } fileWriter.Flush(); //Clear the lists m_ResultsToAdd.Clear(); m_SearchesToStart.Clear(); //Set the correct end of the file if (insertingData) { fileWriter.BaseStream.SetLength(fileWriter.BaseStream.Position); } if (cleanUp) { m_LastCleanUp = DateTime.Now; } //Update information m_ResultCount = resultCounter; m_FileSize = fileStream.Length; m_FileSizeOfEntries = fileSizeOfEntries; fileReader.Close(); fileWriter.Close(); fileStream.Close(); memoryReader.Close(); memoryWriter.Close(); memoryStream.Close(); } } catch (Exception ex) { //Update information m_ResultCount = resultCounter; m_FileSize = fileStream.Length; m_FileSizeOfEntries = fileSizeOfEntries; m_ErrorCounter++; m_Logger.Log(ex, "An exception was thrown in searchDBThread! (#{0})", new object[] { m_ErrorCounter }); try { fileStream.SetLength(lastKnownValidFilePosition2); m_Logger.Log("Searchdatabase cutted to the entry bofore the last known valid entry. ({0} Bytes remaining)", new object[] { lastKnownValidFilePosition }); m_FileSize = lastKnownValidFilePosition2; m_ResultCount = resultCounter - cleanedUpCounter; m_FileSizeOfEntries = fileSizeOfEntries; } catch { try { if (File.Exists(m_FilePath)) { File.Delete(m_FilePath); m_Logger.Log("Searchdatabase deleted, because it was probably corrupt.", new object[] { }); m_FileSize = 0; m_ResultCount = 0; m_FileSizeOfEntries = 0; } } catch { } } } finally { m_ResultsToAdd.Unlock(); m_SearchesToStart.Unlock(); m_SearchResultsBuffer.Unlock(); if (fileReader != null) { fileReader.Close(); } if (fileWriter != null) { fileWriter.Close(); } if (fileStream != null) { fileStream.Close(); } if (memoryReader != null) { memoryReader.Close(); } if (memoryWriter != null) { memoryWriter.Close(); } if (memoryStream != null) { memoryStream.Close(); } } //Move buffer to normal list try { m_SearchResultsBuffer.Lock(); m_SearchResults.Lock(); for (int i = 0; i < m_SearchResultsBuffer.Count; i++) { if (!m_SearchResults.ContainsKey(((System.Collections.Generic.KeyValuePair <string, RIndexedHashtable <string, OldSearchResult> >)m_SearchResultsBuffer[i]).Key)) { m_SearchResults.Add(((System.Collections.Generic.KeyValuePair <string, RIndexedHashtable <string, OldSearchResult> >)m_SearchResultsBuffer[i]).Key, ((System.Collections.Generic.KeyValuePair <string, RIndexedHashtable <string, OldSearchResult> >)m_SearchResultsBuffer[i]).Value); } } } catch (Exception ex) { m_Logger.Log(ex, "SearchDBManager: An error was thrown while reading the SearchResultsBuffer.", new object[] { }); } finally { m_SearchResultsBuffer.Clear(); m_SearchResults.Unlock(); m_SearchResultsBuffer.Unlock(); } Thread.Sleep(1000); } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown in searchDBThread!", new object[] { }); } m_Logger.Log("SearchDBManager closed.", new object[] { }); }); m_SearchDBThread.Name = "searchDBThread"; m_SearchDBThread.IsBackground = true; m_SearchDBThread.Priority = ThreadPriority.Lowest; m_SearchDBThread.Start(); }