public TA(Monitor monitor, TAInfo taInfo, string dllName) { this.monitor = monitor; this.TickTA = new TickTA(monitor); // Create KLines storage dictKS = new Dictionary<int, RList<KLine>>() { { 0, new RList<KLine>() } // 缺省有日线 }; foreach (int p in TAInfo.Periods) { dictKS[p] = new RList<KLine>(); } // create Formula in taInfo's List Assembly assembly = Assembly.LoadFrom(dllName + ".DLL"); FList = new List<FormulaInfo>(); foreach (FormulaInfo fi in taInfo.FList) { FormulaInfo fiClone = fi.Copy(); Type type = assembly.GetType(string.Format("{0}.{1}_Formula", dllName, fi.Name)); fiClone.Formula = (TAFormula)Activator.CreateInstance(type, fi.Parameters); FList.Add(fiClone); } BPList = taInfo.BuyPoints; }
public override void Push(RList<KLine> x) { yMTR.Add(0.0); yATR.Add(0.0); Formula.ATR(x, n, yMTR, yATR); }
public Command23(CommandBuilder command) { if (command == null) { throw new ArgumentNullException("command"); } m_Command = command; m_CommandID = m_Command.ReadBytes(48); m_SenderPeerID = m_Command.ReadBytes(48); m_ReceiverPeerID = m_Command.ReadBytes(48); m_SearchID = m_Command.ReadBytes(48); m_SearchResults = new RList <SearchResult>(); ushort searchResultsCount = m_Command.ReadUInt16(); for (int n = 0; n < searchResultsCount; n++) { byte[] fileHash = m_Command.ReadBytes(64); uint fileSize = m_Command.ReadUInt32(); string fileName = m_Command.ReadString(); RIndexedHashtable <string, string> metaData = new RIndexedHashtable <string, string>(); ushort metaDataCount = m_Command.ReadUInt16(); for (int m = 0; m < metaDataCount; m++) { metaData.Add(m_Command.ReadString(), m_Command.ReadString()); } m_SearchResults.Add(new SearchResult(fileHash, fileSize, fileName, metaData, m_Command.ReadString(), m_Command.ReadByte())); } }
public void AddResult(RList <Command23.SearchResult> results) { if (m_SearchDBThread.IsAlive) { try { m_ResultsToAddBuffer.Lock(); foreach (Command23.SearchResult result in results) { if (!m_ResultsToAddBuffer.Contains(result)) { m_ResultsToAddBuffer.Add(result); } } } catch (Exception ex) { m_Logger.Log(ex, "SearchDBManager: An error was thrown while adding a result to the list.", new object[] { }); } finally { m_ResultsToAddBuffer.Unlock(); } } }
public override void OnGUI(Rect rect, SerializedProperty property, GUIContent label) { if (!isEnabled) { // this shouldn't be here, but since CanCacheInspectorGUI is apparently not called // there is not currently way to call this just on enabled OnEnable(property); } foldoutRList = EditorGUI.Foldout(new Rect(rect.position, new Vector2(rect.size.x, space)), foldoutRList, label, true); if (KeysProperty == null) { DrawErrorMessage(rect, property.name.Length, KeyTypeErrorMessage); } else if (ValuesProperty == null) { DrawErrorMessage(rect, property.name.Length, ValueTypeErrorMessage); } else if (hasDuplicatedKey) { DrawErrorMessage(rect, property.name.Length, DuplicatedKeyErrorMessage); } if (foldoutRList && RList != null) { hasDuplicatedKey = false; rect.y += space; RList.DoList(rect); } }
public static void MA(RList<KLine> s, string sname, int n, RList<double> t) { int length = s.Count; if (length < n) return; // get property by name PropertyInfo propS = typeof(KLine).GetProperty(sname); if (length == n) // 设定初始值 { double sum = 0.0; for (int i = 0; i < n; i++) { sum += Convert.ToDouble(propS.GetValue(s[i])); } t[0] = sum / n; } else { double s0 = Convert.ToDouble(propS.GetValue(s[0])); double sn = Convert.ToDouble(propS.GetValue(s[n])); t[0] = t[1] + (s0 - sn) / n; } }
public void ReverseListTest() { const string data = "How now, brown cow?"; var list = data.Split().Aggregate(RList <string> .Empty, (current, word) => RList <string> .Cons(word, current)); var reverse = RList <string> .Reverse(list); Assert.AreEqual("[How, now,, brown, cow?]", reverse.ToReadableString()); }
public void ReverseSingleListTest() { var list = RList <string> .Cons("Wow", RList <string> .Empty); var reverse = RList <string> .Reverse(list); Assert.AreSame(list, reverse); }
public void IsEmptyTest() { var list = RList <string> .Empty; Assert.IsTrue(RList <string> .IsEmpty(list)); list = RList <string> .Cons("A", list); Assert.IsFalse(RList <string> .IsEmpty(list)); }
public void UpdateNegativeTest() { const string data = "How now, brown cow?"; var list = data.Split().Aggregate(RList <string> .Empty, (current, word) => RList <string> .Cons(word, current)); var exception = AssertThrows <ArgumentException>(() => RList <string> .Fupdate(null, -1, list)); Assert.AreEqual("Negative\r\nParameter name: i", exception.Message); }
public void LookupOneTest() { const string data = "How now, brown cow?"; var list = data.Split().Aggregate(RList <string> .Empty, (current, word) => RList <string> .Cons(word, current)); var item = RList <string> .Lookup(1, list); Assert.AreEqual("brown", item); }
public void CatLeftEmptyTest() { const string data = "How now, brown cow?"; var list = data.Split().Aggregate(RList <string> .Empty, (current, word) => RList <string> .Cons(word, current)); var list2 = RList <string> .Cat(RList <string> .Empty, list); Assert.AreSame(list, list2); }
public void update(bool faceFound, Image face) { RList.Add(new FindResult(faceFound, face)); if (faceFound) { this.isFaceFound = true; this.faceImage = face; } }
public WebCacheProvider(RList<string> webCaches) { if (webCaches == null) throw new ArgumentNullException("webCaches"); m_WebCaches = new RList<WebCacheWebServiceProxy>(); foreach (string webCache in webCaches) m_WebCaches.Add(new WebCacheWebServiceProxy(webCache)); }
public override void Push(RList<KLine> x) { y1.Add(0.0); Formula.EMA(x, "CLOSE", n1, y1); y2.Add(0.0); Formula.EMA(x, "CLOSE", n2, y2); y3.Add(0.0); Formula.EMA(x, "CLOSE", n3, y3); }
public override void Push(RList<KLine> x) { rLLV.Add(0.0); rHHV.Add(0.0); rRat.Add(0.0); rRsv.Add(0.0); rK.Add(0.0); rD.Add(0.0); Formula.SKDJ(x, cN, cM, rLLV, rHHV, rRat, rRsv, rK, rD); }
static int _Length(RList <T> xs) { if (xs is Zero) { return(RList <Pair <T> > ._Length(((Zero)xs).arg)); } else { return(0); } }
public string GetChartData(string cmdText) { return(ListInvork <string>(() => { Logger.Info(cmdText); var json = SqlHepler.GetSqlDataBySql(cmdText); RList <string> r = new RList <string>(); r.sucess = true; r.data = json; return r; })); }
public void CatTest() { const string data1 = "How now,"; var list1 = data1.Split().Aggregate(RList <string> .Empty, (current, word) => RList <string> .Cons(word, current)); const string data2 = "brown cow?"; var list2 = data2.Split().Aggregate(RList <string> .Empty, (current, word) => RList <string> .Cons(word, current)); var list3 = RList <string> .Cat(list1, list2); Assert.AreEqual("[now,, How, cow?, brown]", list3.ToReadableString()); }
public WebCacheProvider(RList <string> webCaches) { if (webCaches == null) { throw new ArgumentNullException("webCaches"); } m_WebCaches = new RList <WebCacheWebServiceProxy>(); foreach (string webCache in webCaches) { m_WebCaches.Add(new WebCacheWebServiceProxy(webCache)); } }
public override void OnGUI(Rect rect, SerializedProperty property, GUIContent label) { foldoutRList = EditorGUI.Foldout(new Rect(rect.position, new Vector2(rect.size.x, space)), foldoutRList, label); if (showErrorMessage) { DrawErrorMessage(rect, property.name.Length, errorMessage); } if (foldoutRList && RList != null) { showErrorMessage = false; rect.y += space; RList.DoList(rect); } }
public Command23(byte[] commandID, byte[] senderPeerID, byte[] receiverPeerID, byte[] searchID, RList<SearchResult> searchResults) { if (commandID == null) throw new ArgumentNullException("commandID"); if (commandID.Length != 48) throw new ArgumentException(); if (senderPeerID == null) throw new ArgumentNullException("senderPeerID"); if (senderPeerID.Length != 48) throw new ArgumentException(); if (receiverPeerID == null) throw new ArgumentNullException("receiverPeerID"); if (receiverPeerID.Length != 48) throw new ArgumentException(); if (searchID == null) throw new ArgumentNullException("searchID"); if (searchID.Length != 48) throw new ArgumentException(); if (searchResults == null) throw new ArgumentNullException("searchResults"); m_CommandID = commandID; m_SenderPeerID = senderPeerID; m_ReceiverPeerID = receiverPeerID; m_SearchID = searchID; m_SearchResults = searchResults; m_Command = new CommandBuilder(CommandBuilder.EncryptionMethod.Rijndael, 0x23); m_Command.Write(0x23); m_Command.Write(m_CommandID); m_Command.Write(m_SenderPeerID); m_Command.Write(m_ReceiverPeerID); m_Command.Write(m_SearchID); m_Command.Write((ushort)m_SearchResults.Count); foreach (SearchResult searchResult in searchResults) { m_Command.Write(searchResult.FileHash); m_Command.Write(searchResult.FileSize); m_Command.Write(searchResult.FileName); m_Command.Write((ushort)searchResult.MetaData.Count); foreach (KeyValuePair<string, string> metaData in searchResult.MetaData) { m_Command.Write(metaData.Key); m_Command.Write(metaData.Value); } m_Command.Write(searchResult.Comment); m_Command.Write(searchResult.Rating); } }
public void listele() { try { int FirmaID = Convert.ToInt32(Firma_ID.SelectedValue); DateTime t1 = Convert.ToDateTime(tarih1.Text); DateTime t2 = Convert.ToDateTime(tarih2.Text); var sorgu = (from r in ctx.TBL_Rapor join p in ctx.TBL_Personel on r.PersonelID equals p.ID join h in ctx.TBL_Haritalar on r.HaritaID equals h.ID join f in ctx.TBL_Firmalar on p.Firma_ID equals f.ID join d in ctx.TBL_Departman on p.Departman_ID equals d.ID join g in ctx.TBL_Gorev on p.Gorev_ID equals g.ID where r.dlt == 0 && r.Tarih >= t1 && r.Tarih <= t2 select new { p.Firma_ID, p.TagNo, r.Tarih, f.Firma, Personel = p.Ad + " " + p.Soyad, d.Departman, g.Gorev, Konum = h.HaritaAdi, r.ToplamZaman, personelID = p.ID, haritaID = h.ID }); if (FirmaID != 0) { sorgu = sorgu.Where(x => x.Firma_ID == FirmaID); } RList.DataSource = sorgu.ToList(); RList.DataBind(); } catch { } }
//public AllData(Respondent r) //{ // using (informatikexamenEntities db = new informatikexamenEntities()) // { // List<QuestionResponse> tempList = db.QuestionResponse.ToList(); // RList = db.Response.ToList(); // QRList = new List<QuestionResponse>(); // List<Question> ql = db.Question.ToList(); // RQRList = db.RQR.Where(x => x.RespondentID == r.Id).ToList(); // RQTList = db.RQT.Where(x => x.RespondentID == r.Id).ToList(); // foreach (RQR rqr in RQRList) // { // foreach (QuestionResponse qr in tempList) // { // if (qr.Id == rqr.QuestionResponseID) // { // QRList.Add(qr); // } // } // } // } //} public string GetAnswer(int qID) { Response response = new Response(); response.ResponseText = "Saknas"; using (informatikexamenEntities db = new informatikexamenEntities()) { foreach (QuestionResponse qr in QRList) { if (qr.QuestionID == qID) { response = RList.Where(x => x.Id == qr.ResponseID).FirstOrDefault(); } } } return(response.ResponseText); }
public ActionResult TopRecommendations() { var results = from x in db.refrigerators select x; int pagesize = 9, pageindex = 1; RList temp = new RList(); results = results.Where(x => x.Star2009 >= 5).OrderBy(x => Guid.NewGuid()).Take(9); var list = results.ToList(); temp.Refrigerators = list.ToPagedList(pageindex, pagesize); BreadCrumb.Clear(); BreadCrumb.Add(Url.Action("Index", "Home"), "Home"); BreadCrumb.Add(Url.Action("AppliancesType", "Home"), "Save Energy"); BreadCrumb.Add(Url.Action("Index", "refrigerators"), "Refrigerator"); BreadCrumb.Add("", "Top Recommendations"); List <SelectListItem> Ratings_level = new List <SelectListItem>(); Ratings_level.Add(new SelectListItem() { Text = "All Ratings", Value = "-1" }); Ratings_level.Add(new SelectListItem() { Text = "1 Star", Value = "1" }); Ratings_level.Add(new SelectListItem() { Text = "2 Star", Value = "2" }); Ratings_level.Add(new SelectListItem() { Text = "3 Star", Value = "3" }); Ratings_level.Add(new SelectListItem() { Text = "4 Star", Value = "4" }); Ratings_level.Add(new SelectListItem() { Text = "5 Star", Value = "5" }); this.ViewBag.Ratings = new SelectList(Ratings_level, "Value", "Text"); return(View(temp)); }
public static void EMA(RList<double> s, int n, RList<double> t) { int length = s.Count; if (length < n) return; if (length == n) // 设定初始值 { double sum = 0.0; for (int i = 0; i < n; i++) { sum += s[i]; } t[0] = sum / n; } else { t[0] = (2 * s[0] + t[1] * (n - 1)) / (n + 1); } }
public static void MA(RList<double> s, int n, RList<double> t) { int length = s.Count; if (length < n) return; if (length == n) // 设定初始值 { double sum = 0.0; for (int i = 0; i < n; i++) { sum += s[i]; } t[0] = sum / n; } else { t[0] = t[1] + (s[0] - s[n]) / n; } }
protected void Page_Load(object sender, EventArgs e) { RTLSEntities ctx = new RTLSEntities(); if (string.IsNullOrEmpty(Request.QueryString["dlt"]) == false) { int dlt = Convert.ToInt32(Request.QueryString["dlt"]); var sorgu = ctx.TBL_Haritalar.SingleOrDefault(x => x.ID == dlt); sorgu.dlt = 1; sorgu.dlt_Zaman = DateTime.Now; ctx.SaveChanges(); } int MapID = Convert.ToInt32(Request.QueryString["MapID"]); baslik.InnerHtml = ctx.TBL_Map.SingleOrDefault(x => x.ID == MapID).Map; var bolge = ctx.TBL_Haritalar.Where(x => x.dlt == 0 && x.Map_ID == MapID).OrderBy(x => x.HaritaAdi); RList.DataSource = bolge.ToList(); RList.DataBind(); }
protected void Page_Load(object sender, EventArgs e) { RTLSEntities ctx = new RTLSEntities(); if (string.IsNullOrEmpty(Request.QueryString["dlt"]) == false) { int dlt = Convert.ToInt32(Request.QueryString["dlt"]); var sorgu = ctx.TBL_Map.SingleOrDefault(x => x.ID == dlt); sorgu.dlt = 1; sorgu.dlt_Zaman = DateTime.Now; ctx.SaveChanges(); } if (string.IsNullOrEmpty(Request.QueryString["d"]) == false) { string d = Request.QueryString["d"]; int id = Convert.ToInt32(Request.QueryString["id"]); if (id == 0) { TBL_Map dep = new TBL_Map(); dep.dlt = 0; dep.Map = Request.QueryString["name"]; ctx.TBL_Map.Add(dep); ctx.SaveChanges(); } else { TBL_Map dep = ctx.TBL_Map.SingleOrDefault(x => x.ID == id && x.dlt == 0); dep.Map = Request.QueryString["name"]; ctx.SaveChanges(); } Response.Redirect("map.aspx"); } var map = ctx.TBL_Map.Where(x => x.dlt == 0).OrderBy(x => x.Map); RList.DataSource = map.ToList(); RList.DataBind(); }
void GameOver() { gameOver = true; gameOverPage.SetActive(true); if (pCtrl.die) { finScoreText.text = score.ToString(); } else { finScoreText.text = ((int)gameTime + score).ToString(); //更改最终分数文本 } idText.text = Client.instance.userName; //游戏结束页名字 Cursor.lockState = CursorLockMode.None; //解除指针隐藏 Cursor.visible = true; RList cData = new RList(Client.instance.userName, int.Parse(finScoreText.text)); //发送数据给服务端 string msgToSend = JsonMapper.ToJson(cData); Client.instance.SendMessage(msgToSend); }
private static string DumpList <T>(RList <Tuple <T, T> > .Node list) { var result = new StringBuilder(); result.Append("{"); var separator = ""; while (true) { if (list == null) { break; } result.Append(separator); separator = ", "; var head = RList <Tuple <T, T> > .Head(list); result.Append(head); list = RList <Tuple <T, T> > .Tail(list); } result.Append("}"); return(result.ToString()); }
protected void Page_Load(object sender, EventArgs e) { RTLSEntities ctx = new RTLSEntities(); if (string.IsNullOrEmpty(Request.QueryString["dlt"]) == false) { int dlt = Convert.ToInt32(Request.QueryString["dlt"]); var sorgu = ctx.TBL_Personel.SingleOrDefault(x => x.ID == dlt); sorgu.dlt = 1; ctx.SaveChanges(); } var personel = from p in ctx.TBL_Personel join f in ctx.TBL_Firmalar on p.Firma_ID equals f.ID join d in ctx.TBL_Departman on p.Departman_ID equals d.ID join g in ctx.TBL_Gorev on p.Gorev_ID equals g.ID where p.dlt == 0 select new { p.ID, p.Ad, p.Soyad, p.TagNo, p.TagTipi, p.KamraIp, f.Firma, d.Departman, g.Gorev }; RList.DataSource = personel.ToList(); RList.DataBind(); }
public Command23(CommandBuilder command) { if (command == null) throw new ArgumentNullException("command"); m_Command = command; m_CommandID = m_Command.ReadBytes(48); m_SenderPeerID = m_Command.ReadBytes(48); m_ReceiverPeerID = m_Command.ReadBytes(48); m_SearchID = m_Command.ReadBytes(48); m_SearchResults = new RList<SearchResult>(); ushort searchResultsCount = m_Command.ReadUInt16(); for (int n = 0; n < searchResultsCount; n++) { byte[] fileHash = m_Command.ReadBytes(64); uint fileSize = m_Command.ReadUInt32(); string fileName = m_Command.ReadString(); RIndexedHashtable<string, string> metaData = new RIndexedHashtable<string, string>(); ushort metaDataCount = m_Command.ReadUInt16(); for (int m = 0; m < metaDataCount; m++) metaData.Add(m_Command.ReadString(), m_Command.ReadString()); m_SearchResults.Add(new SearchResult(fileHash, fileSize, fileName, metaData, m_Command.ReadString(), m_Command.ReadByte())); } }
public override void Push(RList<KLine> x) { rU.Add(0.0); rD.Add(0.0); Formula.TDM(x, rU, rD); }
private static double HHV(RList<KLine>s, PropertyInfo prop, int n) { double max = Convert.ToDouble(prop.GetValue(s[0])); for (int i = 1; i < n; i++) { double d = Convert.ToDouble(prop.GetValue(s[i])); if (max < d) max = d; } return max; }
private static double LLV(RList<KLine>s, PropertyInfo prop, int n) { double min = Convert.ToDouble(prop.GetValue(s[0])); for (int i = 1; i < n; i++) { double d = Convert.ToDouble(prop.GetValue(s[i])); if (min > d) min = d; } return min; }
public static void TDM(RList<KLine> s, RList<double> rU, RList<double> rD) { int length = s.Count; if (length < 6) return; PropertyInfo prop = typeof(KLine).GetProperty("CLOSE"); double curr = Convert.ToDouble(prop.GetValue(s[0])); double prev4 = Convert.ToDouble(prop.GetValue(s[4])); if (rU[1] == 0.0) { // find td struct double prev1 = Convert.ToDouble(prop.GetValue(s[1])); double prev5 = Convert.ToDouble(prop.GetValue(s[5])); if (curr > prev4 && prev1 < prev5) rU[0] = 1; } else { if (curr > prev4) rU[0] = rU[1] + 1; } if (rD[1] == 0.0) { // find td struct double prev1 = Convert.ToDouble(prop.GetValue(s[1])); double prev5 = Convert.ToDouble(prop.GetValue(s[5])); if (curr < prev4 && prev1 > prev5) rD[0] = 1; } else { if (curr < prev4) rD[0] = rD[1] + 1; } }
public static void SKDJ(RList<KLine> s, int n, int m, RList<double> rLLV, RList<double> rHHV, RList<double> rRat, RList<double> rRsv, RList<double> rK, RList<double> rD) { int length = s.Count; if (length < n) return; PropertyInfo propHIGH = typeof(KLine).GetProperty("HIGH"); PropertyInfo propLOW = typeof(KLine).GetProperty("LOW"); PropertyInfo propCLOSE = typeof(KLine).GetProperty("CLOSE"); rLLV[0] = LLV(s, propLOW, n); rHHV[0] = HHV(s, propHIGH, n); double close = Convert.ToDouble(propCLOSE.GetValue(s[0])); if (rHHV[0] == rLLV[0]) rRat[0] = 100; else rRat[0] = (close - rLLV[0]) / (rHHV[0] - rLLV[0]) * 100; EMA(rRat, m, rRsv); EMA(rRsv, m, rK); MA(rK, m, rD); }
private static void SendSearchResults(byte[] senderPeerID, byte[] searchID, string searchPattern) { if (senderPeerID == null) throw new ArgumentNullException("senderPeerID"); if (senderPeerID.Length != 48) throw new ArgumentException(); if (searchID == null) throw new ArgumentNullException("searchID"); if (searchID.Length != 48) throw new ArgumentException(); if (searchPattern == null) throw new ArgumentNullException("searchPattern"); searchPattern = searchPattern.ToLower(); RList<Command23.SearchResult> searchResults = new RList<Command23.SearchResult>(); int entriesLength = 0; foreach (SharedFile sharedFile in SharedFiles.Values) { bool found = false; if (sharedFile.FileName.ToLower().Contains(searchPattern)) found = true; if (!found && sharedFile.Album.ToLower().Contains(searchPattern)) found = true; if (!found && sharedFile.Artist.ToLower().Contains(searchPattern)) found = true; if (!found && sharedFile.Title.ToLower().Contains(searchPattern)) found = true; if (found) { int entryLength = sharedFile.GetEntryLength(); if (entriesLength + entryLength <= Constants.MaximumDataLength) { searchResults.Add(new Command23.SearchResult(sharedFile.FileHash, (uint)sharedFile.FileSize, sharedFile.FileName, sharedFile.MetaData, sharedFile.Comment, sharedFile.Rating)); entriesLength += entryLength; } else break; } } if (!searchResults.IsEmpty) { byte[] commandID = GenerateIDOrHash(); Send(new Command23(commandID, m_PeerID, senderPeerID, searchID, searchResults)); m_LastCommandID[ByteArrayToString(commandID)] = DateTime.Now; } }
public void Process() { if (m_QueueStart == null || !m_QueueStart.HasValue) m_QueueStart = DateTime.Now; if (m_ReceivedSectors == m_Sectors) { if (m_IsHashing) return; m_IsHashing = true; Thread hashingThread = new Thread(delegate() { try { m_Logger.Log("The download of \"{0}\" is complete and will be hashed now!", m_FileName); try { m_Sources.Lock(); foreach (Source source in m_Sources.Values) if (source.State == SourceState.Active || source.State == SourceState.Requested || source.State == SourceState.Requesting) SendCommand7A(source); } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while removing sources!"); } finally { m_Sources.Clear(); m_Sources.Unlock(); } Core.RemoveDownload(m_DownloadID); try { m_FileStream.Close(); FileStream fileStream = new FileStream(m_TempFilePath, FileMode.Open, FileAccess.Read, FileShare.Read); byte[] fileHash = ComputeHashes.SHA512Compute(fileStream); fileStream.Close(); if (Core.CompareByteArray(fileHash, m_FileHash)) { //2008-03-20 Nochbaer if (Directory.Exists(Path.Combine(m_Settings["IncomingDirectory"], m_SubFolder)) == false) { Directory.CreateDirectory(Path.Combine(m_Settings["IncomingDirectory"], m_SubFolder)); } string filePath = Path.Combine(Path.Combine(m_Settings["IncomingDirectory"], m_SubFolder), m_FileName); int n = 1; while (File.Exists(filePath)) { filePath = Path.Combine(Path.Combine(m_Settings["IncomingDirectory"], m_SubFolder), string.Format("{0}({1}){2}", Path.GetFileNameWithoutExtension(m_FileName), n, Path.GetExtension(m_FileName))); n++; } Core.ShareManager.AddDownloadedFile(m_TempFilePath, filePath, m_FileHash); File.Move(m_TempFilePath, filePath); if (bool.Parse(m_Settings["ParseCollections"]) == true && Path.GetExtension(filePath) == ".sncollection") { Core.ParseStealthNetCollection(filePath); } } else { string filePath = Path.Combine(m_Settings["CorruptDirectory"], m_FileName); int n = 1; while (File.Exists(filePath)) { filePath = Path.Combine(m_Settings["CorruptDirectory"], string.Format("{0}({1}){2}", Path.GetFileNameWithoutExtension(m_FileName), n, Path.GetExtension(m_FileName))); n++; } File.Move(m_TempFilePath, filePath); Core.AddDownload(m_FileHash, m_FileHashString, 0, null); m_Logger.Log("The Download of '{0}' is corrupt", m_FileName); } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while moving temporary file '{0}'!", m_TempFilePath); } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while hashing the download of \"{0}\"!", m_FileName); } }); hashingThread.Name = "hashingThread"; hashingThread.IsBackground = true; hashingThread.Priority = ThreadPriority.Lowest; hashingThread.Start(); } else { if (m_DownloadStatistics.Count == 60) m_DownloadStatistics.RemoveAt(59); m_DownloadStatistics.Insert(0, m_Downloaded); m_Downloaded = 0; long downstream = 0; foreach (int n in m_DownloadStatistics) downstream += n; m_Downstream = (int)(downstream / m_DownloadStatistics.Count); m_DownstreamString = Core.TransferVolumeToString(m_Downstream); if (DateTime.Now.Subtract(m_LastBroadcastSent).TotalSeconds >= Constants.Command30Interval) { m_LastBroadcastSent = DateTime.Now; if (!m_HasInformation) Core.SendCommand50(m_SourceSearchFloodingHash, m_SourceSearchPeerID, m_SourceSearchID, m_OnceHashedFileHash); else Core.SendCommand60(m_SourceSearchFloodingHash, m_SourceSearchPeerID, m_SourceSearchID, m_TwiceHashedFileHash); } try { m_Sources.Lock(); RList<Source> activeSources = new RList<Source>(); RList<Source> verifiedSources = new RList<Source>(); Source source; for (int n = m_Sources.Count - 1; n >= 0; n--) { if (!m_RequestingDelay.HasValue) m_RequestingDelay = DateTime.Now; source = m_Sources[n].Value; if (!source.IsComplete) { bool hasNeededSectors = false; for (long i = 0; i < source.SectorsMap.Length; i++) if ((~m_SectorsMap[i] & source.SectorsMap[i]) != 0) { hasNeededSectors = true; break; } if (!hasNeededSectors && source.State != SourceState.NotNeeded) { source.SetState(SourceState.NotNeeded); if (source.State == SourceState.Active || source.State == SourceState.Requested || source.State == SourceState.Requesting) SendCommand7A(source); } else if (source.State == SourceState.NotNeeded && hasNeededSectors) source.SetState(SourceState.Verifying); } if (source.State == SourceState.NotNeeded) { if (DateTime.Now.Subtract(source.LastReceived).TotalSeconds >= Constants.PeerTimeout) m_Sources.RemoveAt(n); } else if (source.State == SourceState.Verifying || source.State == SourceState.Verified) { if ((source.Command70Sent == 0 && DateTime.Now.Subtract(source.LastCommand70Sent).TotalSeconds >= Constants.Command70Interval) || (source.Command70Sent > 0 && source.Command70Sent < Constants.Command70ToSend && DateTime.Now.Subtract(source.LastCommand70Sent).TotalSeconds >= Constants.Command71Timeout)) { if (source.Command70Sent > 0) source.ReportTimeout(); source.Report70Sent(); Core.SendCommand70(m_DownloadPeerID, source.PeerID, m_DownloadID, m_ThriceHashedFileHash); } else if (source.Command70Sent >= Constants.Command70ToSend && DateTime.Now.Subtract(source.LastCommand70Sent).TotalSeconds >= Constants.Command71Timeout) { m_Sources.RemoveAt(n); continue; } if (source.State == SourceState.Verified && !source.IsQueueFull) verifiedSources.Add(source); } else if (source.State == SourceState.Requesting || source.State == SourceState.Requested || (source.State == SourceState.Active && source.LastRequestedSector == -1)) { if ((source.Command74Sent == 0 && DateTime.Now.Subtract(source.LastCommand74Sent).TotalSeconds >= Constants.Command74Interval) || (source.Command74Sent > 0 && source.Command74Sent < Constants.Command74ToSend && DateTime.Now.Subtract(source.LastCommand74Sent).TotalSeconds >= Constants.Command75Timeout)) { source.Report74Sent(); Core.SendCommand74(m_DownloadPeerID, source.PeerID, m_DownloadID, m_ThriceHashedFileHash); } else if (source.Command74Sent >= Constants.Command74ToSend && DateTime.Now.Subtract(source.LastCommand74Sent).TotalSeconds >= Constants.Command75Timeout) { source.ReportTimeout(); continue; } activeSources.Add(source); } else if (source.State == SourceState.Active && source.LastRequestedSector > -1) { if ((source.Command78Sent == 0 && DateTime.Now.Subtract(source.LastCommand78Sent).TotalSeconds >= Constants.Command78Interval) || (source.Command78Sent > 0 && source.Command78Sent < Constants.Command78ToSend && DateTime.Now.Subtract(source.LastCommand78Sent).TotalSeconds >= Constants.Command79Timeout)) { source.Report78Sent(source.LastRequestedSector); Core.SendCommand78(m_DownloadPeerID, source.PeerID, m_DownloadID, source.LastRequestedSector); } else if (source.Command78Sent >= Constants.Command78ToSend && DateTime.Now.Subtract(source.LastCommand78Sent).TotalSeconds >= Constants.Command79Timeout) { source.ReportTimeout(); continue; } activeSources.Add(source); } } if (m_IsFilledWithZeros && m_RequestingDelay.HasValue && DateTime.Now.Subtract(m_RequestingDelay.Value).TotalSeconds >= Constants.DownloadRequestingDelay) { if (activeSources.Count < Constants.MaximumSourcesCount && verifiedSources.Count > 0) { for (int n = 1; n <= verifiedSources.Count - 1; n++) for (int m = 0; m < verifiedSources.Count - n; m++) { if ((verifiedSources[m].IsComplete && !verifiedSources[m + 1].IsComplete && verifiedSources[m + 1].QueueLength < Constants.MaximumUploadsCount) || verifiedSources[m].QueueLength > verifiedSources[m + 1].QueueLength) { source = verifiedSources[m]; verifiedSources[m] = verifiedSources[m + 1]; verifiedSources[m + 1] = source; } } for (int n = 0; n < Math.Min(Constants.MaximumSourcesCount - activeSources.Count, verifiedSources.Count); n++) { source = verifiedSources[n]; source.Report74Sent(); Core.SendCommand74(m_DownloadPeerID, source.PeerID, m_DownloadID, m_ThriceHashedFileHash); activeSources.Add(source); } } for (int n = 0; n < Math.Min(Constants.MaximumSourcesCount, activeSources.Count); n++) { source = activeSources[n]; if (source.State == SourceState.Requested && source.QueuePosition == 0) { RList<long> sectorsToRequest = new RList<long>(m_SectorsMap.Length); long sectorToRequest = -1; if (!source.IsComplete) { bool sectorCanBeRequested = false; for (int t = 0; t < 10 && !sectorCanBeRequested; t++) { for (long i = 0; i < m_SectorsMap.Length; i++) if ((~m_SectorsMap[i] & source.SectorsMap[i]) != 0) sectorsToRequest.Add(i); if (sectorsToRequest.Count > 0) { long d = sectorsToRequest[Randomizer.GenerateNumber(0, sectorsToRequest.Count)]; byte e = m_SectorsMap[d]; byte f = source.SectorsMap[d]; int g; for (g = 0; g < 8; g++) if (((~e & f) & (1 << g)) != 0) break; sectorToRequest = d * 8 + g; sectorCanBeRequested = true; foreach (Source activeSource in activeSources) if (activeSource.State == SourceState.Active && activeSource.LastRequestedSector == sectorToRequest) { sectorCanBeRequested = false; break; } } else sectorCanBeRequested = false; } if (sectorToRequest != -1) { source.Report78Sent(sectorToRequest); Core.SendCommand78(m_DownloadPeerID, source.PeerID, m_DownloadID, sectorToRequest); } } else { bool sectorCanBeRequested = false; for (int t = 0; t < 10 && !sectorCanBeRequested; t++) { for (long i = 0; i < m_SectorsMap.Length; i++) if (m_SectorsMap[i] != 255) sectorsToRequest.Add(i); if (sectorsToRequest.Count > 0) { long d = sectorsToRequest[Randomizer.GenerateNumber(0, sectorsToRequest.Count)]; byte e = m_SectorsMap[d]; int g; for (g = 0; g < 8; g++) if ((e & (1 << g)) == 0) break; sectorToRequest = d * 8 + g; sectorCanBeRequested = true; foreach (Source activeSource in activeSources) if (activeSource.State == SourceState.Active && activeSource.LastRequestedSector == sectorToRequest) { sectorCanBeRequested = false; break; } } else sectorCanBeRequested = false; } if (sectorToRequest != -1) { source.Report78Sent(sectorToRequest); Core.SendCommand78(m_DownloadPeerID, source.PeerID, m_DownloadID, sectorToRequest); } } } } } } finally { m_Sources.Unlock(); } } }
public abstract void Push(RList<KLine> rl);
public ViewData(SerializedProperty property, ListSettingsAttribute attribute) { List = RList.Create(property); Settings = attribute; }
public DailyTA() { xs = new RList<KLineDaily>(); ys = new RList<Quota>(); }
public Command23(byte[] commandID, byte[] senderPeerID, byte[] receiverPeerID, byte[] searchID, RList <SearchResult> searchResults) { if (commandID == null) { throw new ArgumentNullException("commandID"); } if (commandID.Length != 48) { throw new ArgumentException(); } if (senderPeerID == null) { throw new ArgumentNullException("senderPeerID"); } if (senderPeerID.Length != 48) { throw new ArgumentException(); } if (receiverPeerID == null) { throw new ArgumentNullException("receiverPeerID"); } if (receiverPeerID.Length != 48) { throw new ArgumentException(); } if (searchID == null) { throw new ArgumentNullException("searchID"); } if (searchID.Length != 48) { throw new ArgumentException(); } if (searchResults == null) { throw new ArgumentNullException("searchResults"); } m_CommandID = commandID; m_SenderPeerID = senderPeerID; m_ReceiverPeerID = receiverPeerID; m_SearchID = searchID; m_SearchResults = searchResults; m_Command = new CommandBuilder(CommandBuilder.EncryptionMethod.Rijndael, 0x23); m_Command.Write(0x23); m_Command.Write(m_CommandID); m_Command.Write(m_SenderPeerID); m_Command.Write(m_ReceiverPeerID); m_Command.Write(m_SearchID); m_Command.Write((ushort)m_SearchResults.Count); foreach (SearchResult searchResult in searchResults) { m_Command.Write(searchResult.FileHash); m_Command.Write(searchResult.FileSize); m_Command.Write(searchResult.FileName); m_Command.Write((ushort)searchResult.MetaData.Count); foreach (KeyValuePair <string, string> metaData in searchResult.MetaData) { m_Command.Write(metaData.Key); m_Command.Write(metaData.Value); } m_Command.Write(searchResult.Comment); m_Command.Write(searchResult.Rating); } }
/// <summary> /// Neue ResumeDownloads() /// 10.06.2009 Lars /// 03.07.2009 Lars (Neue Downloadwarteschlange) /// 04.07.2009 Lars (Einfacheres und besseres Handling) /// </summary> private static void ResumeDownloads() { try { // Alle gesicherten Downloads einlesen RIndexedHashtable<string, XmlNode> downloadsXml = new RIndexedHashtable<string, XmlNode>(); if (File.Exists(m_DownloadsFilePath)) { XmlDocument downloadsXmlDocument = new XmlDocument(); downloadsXmlDocument.Load(m_DownloadsFilePath); foreach (XmlNode downloadNode in downloadsXmlDocument.SelectSingleNode("downloads")) try { downloadsXml.Add(downloadNode.Attributes["hash"].InnerText, downloadNode); } catch (Exception ex) { m_Logger.Log(ex, "A download cannot be resumed due to non existent information about it!"); continue; } } // Alle vorhandenen Dateien durchgehen RList<Download> temporary = new RList<Download>(downloadsXml.Count); foreach (string filePath in Directory.GetFiles(Settings.Instance["TemporaryDirectory"])) { string fileName = new FileInfo(filePath).Name; try { if (!Regex.IsMatch(fileName, "^[0-9A-F]{128,128}$", RegexOptions.IgnoreCase)) { m_Logger.Log("The file \"{0}\" is no valid temporary download!", fileName); } XmlNode node; if (!downloadsXml.TryGetValue(fileName, out node)) { m_Logger.Log("The download of \"{0}\" cannot be resumed due to non existent information about it!", fileName); continue; } bool hasInformation = true; if ((node as XmlElement).HasAttribute("hasinformation") && (node as XmlElement).GetAttribute("hasinformation") == "none") hasInformation = false; string lastSeenString = null; DateTime? lastSeen = null; string lastReceptionString = null; DateTime? lastReception = null; String subfolder = string.Empty; if (node.SelectSingleNode("lastseen") != null) { lastSeenString = node.SelectSingleNode("lastseen").InnerText; if (lastSeenString != null && lastSeenString.Length > 0) lastSeen = DateTime.Parse(lastSeenString); } if (node.SelectSingleNode("lastreception") != null) { lastReceptionString = node.SelectSingleNode("lastreception").InnerText; if (lastReceptionString != null && lastReceptionString.Length > 0) lastReception = DateTime.Parse(lastReceptionString); } if (node.SelectSingleNode("subfolder") != null) subfolder = node.SelectSingleNode("subfolder").InnerText; Download download = new Download(Core.FileHashStringToFileHash(fileName), node.SelectSingleNode("filename").InnerText, long.Parse(node.SelectSingleNode("filesize").InnerText), hasInformation, lastSeen, lastReception, hasInformation ? Convert.FromBase64String(node.SelectSingleNode("sectorsmap").InnerText) : null); download.SetSubFolderAndTime(subfolder, null); temporary.Add(download); } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while resuming the download of \"{0}\"!", fileName); } } // Wiederaufzunehmende Download sortieren for (int n = 1; n <= temporary.Count - 1; n++) for (int m = 0; m < temporary.Count - n; m++) { Download a = temporary[m]; Download b = temporary[m + 1]; if (downloadsXml.IndexOfKey(a.FileHashString) > downloadsXml.IndexOfKey(b.FileHashString)) { temporary[m] = b; temporary[m + 1] = a; } } // Downloads wiederaufnehmen try { m_DownloadsAndQueue.Lock(); foreach (Download download in temporary) m_DownloadsAndQueue.Add(download); } finally { m_DownloadsAndQueue.Unlock(); } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while resuming downloads!"); } finally { // Erst jetzt kann die downloads.xml wieder geschreiben werden... DownloadsXmlWriter.SetIsReady(); } }
private static void SendBroadcast(IRequestCommand command, IPAddress excludedConnection, int dropChainTailCount) { if (command == null) throw new ArgumentNullException("command"); try { m_Connections.Lock(); RList<Connection> connections = new RList<Connection>(); foreach (Connection connection in m_Connections.Values) if (connection.IsEstablished && !connection.RemoteEndPoint.Address.Equals(excludedConnection)) connections.Add(connection); dropChainTailCount = Math.Min(dropChainTailCount, connections.Count); for (int n = 0; n < dropChainTailCount; n++) { int index = Randomizer.GenerateNumber(0, connections.Count); command.Send(connections[index]); connections.RemoveAt(index); } } finally { m_Connections.Unlock(); } }
/* * private constructor to prevent creating manually an instance of this class. * * Added on 2007-05-05 by T.Norad */ private Logger() { m_LogEntries = new RList<LogEntry>(); }
public ConsoleInterface() { list = new RList(); dorm = new Dormitory(); isExit = false; }
public void AddSource(byte[] id, string fileName, RIndexedHashtable<string, string> metaData, string comment, byte rating) { if (id == null) throw new ArgumentNullException("id"); if (id.Length != 48) throw new ArgumentException(); if (fileName == null) throw new ArgumentNullException("fileName"); if (metaData == null) throw new ArgumentNullException("metaData"); if (comment == null) throw new ArgumentNullException("comment"); if (rating > 3) throw new ArgumentOutOfRangeException("rating"); try { m_Sources.Lock(); string idString = Core.ByteArrayToString(id); if (!m_Sources.ContainsKey(idString)) m_Sources.Add(idString, new Source(m_Sources, id, fileName, metaData, comment, rating)); else m_Sources[idString].ReportReceived(fileName, comment, rating); RIndexedHashtable<string, int> fileNames1 = new RIndexedHashtable<string, int>(); RIndexedHashtable<byte, int> ratings1 = new RIndexedHashtable<byte, int>(); foreach (Source source in m_Sources.Values) { if (!fileNames1.ContainsKey(source.FileName)) fileNames1.Add(source.FileName, 1); else fileNames1[source.FileName]++; if (!ratings1.ContainsKey(source.Rating)) ratings1.Add(source.Rating, 1); else ratings1[source.Rating]++; } RList<string> fileNames2 = new RList<string>(fileNames1.Keys); for (int n = 1; n <= fileNames2.Count - 1; n++) for (int m = 0; m < fileNames2.Count - n; m++) { if (fileNames1[fileNames2[m]] < fileNames1[fileNames2[m + 1]]) { string temp; temp = fileNames2[m]; fileNames2[m] = fileNames2[m + 1]; fileNames2[m + 1] = temp; } } m_FileName = fileNames2[0]; RList<byte> ratings2 = new RList<byte>(ratings1.Keys); for (int n = 1; n <= ratings2.Count - 1; n++) for (int m = 0; m < ratings2.Count - n; m++) { if (ratings1[ratings2[m]] < ratings1[ratings2[m + 1]]) { byte temp; temp = ratings2[m]; ratings2[m] = ratings2[m + 1]; ratings2[m + 1] = temp; } } ratings2.Remove(0); if (!ratings2.IsEmpty) m_Rating = ratings2[0]; else m_Rating = 0; foreach (KeyValuePair<string, string> metaDataItem in metaData) if (!m_MetaData.ContainsKey(metaDataItem.Key)) m_MetaData.Add(metaDataItem.Key, metaDataItem.Value); Core.ParseMetaData(m_MetaData, out m_Album, out m_Artist, out m_Title); } finally { m_Sources.Unlock(); } }
public void RListTest() { RList rl = new RList(); rl.Mode.Should().Be(RMode.List); rl.Should().BeEmpty(); var e = rl.GetEnumerator(); e.Should().NotBeNull(); e.MoveNext().Should().BeFalse(); RObject rv = new RVector <RNumber>(RMode.Numeric, 1); var rs = new RString("abc"); rl.Add(rs, rv); rl.Should().HaveCount(1); var e1 = rl.Keys.GetEnumerator(); e1.MoveNext(); e1.Current.Should().Be(rs); e1.Current.Should().Be(new RString("abc")); var e2 = rl.Values.GetEnumerator(); e2.MoveNext(); e2.Current.Should().Be(rv); rl.ContainsKey(rs).Should().BeTrue(); rl.ContainsKey(new RString("abc")).Should().BeTrue(); rl.Contains(new KeyValuePair <RString, RObject>(rs, rv)).Should().BeTrue(); var arr = new KeyValuePair <RString, RObject> [2]; rl.CopyTo(arr, 1); arr[1].Key.Should().Be(rs); arr[1].Value.Should().Be(rv); rl[rs].Should().Be(rv); rl.IsReadOnly.Should().BeFalse(); RObject u; rl.TryGetValue(rs, out u).Should().BeTrue(); var en = rl.GetEnumerator(); en.Should().NotBeNull(); en.MoveNext().Should().BeTrue(); en.Current.Key.Should().Be(rs); en.Current.Value.Should().Be(rv); en.MoveNext().Should().BeFalse(); IEnumerator <RObject> en1 = ((IEnumerable <RObject>)rl).GetEnumerator(); en1.Should().NotBeNull(); en1.MoveNext().Should().BeTrue(); en1.Current.Should().Be(rv); en1.MoveNext().Should().BeFalse(); IEnumerator <KeyValuePair <RString, RObject> > en2 = ((IEnumerable <KeyValuePair <RString, RObject> >)rl).GetEnumerator(); en2.Should().NotBeNull(); en2.MoveNext().Should().BeTrue(); en2.Current.Key.Should().Be(rs); en2.Current.Value.Should().Be(rv); en2.MoveNext().Should().BeFalse(); IEnumerator en3 = ((IEnumerable)rl).GetEnumerator(); en3.Should().NotBeNull(); en3.MoveNext().Should().BeTrue(); en3.MoveNext().Should().BeFalse(); rl.Remove(rs).Should().BeTrue(); rl.Should().BeEmpty(); rl.ContainsKey(rs).Should().BeFalse(); rl.Add(new KeyValuePair <RString, RObject>(new RString("x"), new RLogical(true))); rl.Length.Should().Be(1); rl.Count.Should().Be(1); rl.Clear(); rl.Length.Should().Be(0); rl.Count.Should().Be(0); rl.TryGetValue(rs, out u).Should().BeFalse(); u.Should().BeNull(); }
public override float GetPropertyHeight(SerializedProperty property, GUIContent label) => foldoutRList ? (RList != null ? RList.GetHeight() : 0) + space : space;
public void AddSource(byte[] id, string fileName, RIndexedHashtable <string, string> metaData, string comment, byte rating) { if (id == null) { throw new ArgumentNullException("id"); } if (id.Length != 48) { throw new ArgumentException(); } if (fileName == null) { throw new ArgumentNullException("fileName"); } if (metaData == null) { throw new ArgumentNullException("metaData"); } if (comment == null) { throw new ArgumentNullException("comment"); } if (rating > 3) { throw new ArgumentOutOfRangeException("rating"); } try { m_Sources.Lock(); string idString = Core.ByteArrayToString(id); if (!m_Sources.ContainsKey(idString)) { m_Sources.Add(idString, new Source(m_Sources, id, fileName, metaData, comment, rating)); } else { m_Sources[idString].ReportReceived(fileName, comment, rating); } RIndexedHashtable <string, int> fileNames1 = new RIndexedHashtable <string, int>(); RIndexedHashtable <byte, int> ratings1 = new RIndexedHashtable <byte, int>(); foreach (Source source in m_Sources.Values) { if (!fileNames1.ContainsKey(source.FileName)) { fileNames1.Add(source.FileName, 1); } else { fileNames1[source.FileName]++; } if (!ratings1.ContainsKey(source.Rating)) { ratings1.Add(source.Rating, 1); } else { ratings1[source.Rating]++; } } RList <string> fileNames2 = new RList <string>(fileNames1.Keys); for (int n = 1; n <= fileNames2.Count - 1; n++) { for (int m = 0; m < fileNames2.Count - n; m++) { if (fileNames1[fileNames2[m]] < fileNames1[fileNames2[m + 1]]) { string temp; temp = fileNames2[m]; fileNames2[m] = fileNames2[m + 1]; fileNames2[m + 1] = temp; } } } m_FileName = fileNames2[0]; RList <byte> ratings2 = new RList <byte>(ratings1.Keys); for (int n = 1; n <= ratings2.Count - 1; n++) { for (int m = 0; m < ratings2.Count - n; m++) { if (ratings1[ratings2[m]] < ratings1[ratings2[m + 1]]) { byte temp; temp = ratings2[m]; ratings2[m] = ratings2[m + 1]; ratings2[m + 1] = temp; } } } ratings2.Remove(0); if (!ratings2.IsEmpty) { m_Rating = ratings2[0]; } else { m_Rating = 0; } foreach (KeyValuePair <string, string> metaDataItem in metaData) { if (!m_MetaData.ContainsKey(metaDataItem.Key)) { m_MetaData.Add(metaDataItem.Key, metaDataItem.Value); } } Core.ParseMetaData(m_MetaData, out m_Album, out m_Artist, out m_Title); } finally { m_Sources.Unlock(); } }
public static void ATR(RList<KLine> s, int n, RList<double> t1, RList<double> t2) { int length = s.Count; if (length <= 1) return; PropertyInfo propHIGH = typeof(KLine).GetProperty("HIGH"); PropertyInfo propLOW = typeof(KLine).GetProperty("LOW"); PropertyInfo propCLOSE = typeof(KLine).GetProperty("CLOSE"); // Caculate MTR double high = Convert.ToDouble(propHIGH.GetValue(s[0])); double low = Convert.ToDouble(propLOW.GetValue(s[0])); double mtr = Math.Max(high - low, Math.Abs(Convert.ToDouble(propCLOSE.GetValue(s[1])) - high)); mtr = Math.Max(mtr, Math.Abs(Convert.ToDouble(propCLOSE.GetValue(s[1])) - low)); t1[0] = mtr; // Caculate ATR MA(t1, n, t2); }
public SearchDBManager(string fileName ) { m_FilePath = fileName; m_CleanUpDays = int.Parse(m_Settings["SearchDBCleanUpDays"]); m_SearchDBThread = new Thread(delegate() { try { Core.SetUILanguage(); while (!m_IsClosing && m_ErrorCounter < 10) { //Move buffers to normal list try { m_SearchesToStartBuffer.Lock(); m_SearchesToStart.Lock(); foreach (SearchDBManager.SearchToStart newSearch in m_SearchesToStartBuffer) { if (!m_SearchesToStart.Contains(newSearch)) { m_SearchesToStart.Add(newSearch); } } } catch (Exception ex) { m_Logger.Log(ex, "SearchDBManager: An error was thrown while reading the SearchesToStartBuffer.", new object[] { }); } finally { m_SearchesToStartBuffer.Clear(); m_SearchesToStart.Unlock(); m_SearchesToStartBuffer.Unlock(); } try { m_ResultsToAddBuffer.Lock(); m_ResultsToAdd.Lock(); foreach (Command23.SearchResult result in m_ResultsToAddBuffer) { if (!m_ResultsToAdd.Contains(result)) { m_ResultsToAdd.Add(result); } } } catch (Exception ex) { m_Logger.Log(ex, "SearchDBManager: An error was thrown while reading the ResultsToAddBuffer.", new object[] { }); } finally { m_ResultsToAddBuffer.Clear(); m_ResultsToAdd.Unlock(); m_ResultsToAddBuffer.Unlock(); } //Because we are only comparing dates, it is only necessary to compare them once a day bool cleanUp = false; if (((TimeSpan)DateTime.Now.Subtract(m_LastCleanUp)).Days >= 1) { //CleanUp(); cleanUp = true; } //The current entry long lastKnownValidFilePosition = 0; //The entry before long lastKnownValidFilePosition2 = 0; ulong fileSizeOfEntries = 0; long cleanedUpCounter = 0; long resultCounter = 0; FileStream fileStream = null; BinaryReader fileReader = null; BinaryWriter fileWriter = null; MemoryStream memoryStream = null; BinaryReader memoryReader = null; BinaryWriter memoryWriter = null; try { m_ResultsToAdd.Lock(); m_SearchesToStart.Lock(); m_SearchResultsBuffer.Lock(); //Check if there is something to do if (m_ResultsToAdd.Count > 0 || m_SearchesToStart.Count > 0 || cleanUp) { fileStream = new FileStream(m_FilePath, FileMode.OpenOrCreate, FileAccess.ReadWrite); fileReader = new BinaryReader(fileStream, Encoding.Unicode); fileWriter = new BinaryWriter(fileStream, Encoding.Unicode); memoryStream = new MemoryStream(); memoryReader = new BinaryReader(memoryStream, Encoding.Unicode); memoryWriter = new BinaryWriter(memoryStream, Encoding.Unicode); long fileLength = fileReader.BaseStream.Length; int fileFlushCounter = 0; long fileReadPosition = 0; long fileWritePosition = 0; bool insertingData = false; bool isFirstChangedEntry = true; //Add a array for the results of each search foreach (SearchToStart searchToStart in m_SearchesToStart) { m_SearchResultsBuffer.Add(searchToStart.SearchID, new RIndexedHashtable<string, OldSearchResult>()); } //Go through the file while (fileReadPosition < fileLength) { bool isOld = false; long firstPositionOfThisEntry = fileReadPosition; lastKnownValidFilePosition2 = lastKnownValidFilePosition; lastKnownValidFilePosition = fileReadPosition; //Read the next entry byte[] rFileHash = fileReader.ReadBytes(64); long rFileSize = fileReader.ReadInt64(); int rFileNameCount = fileReader.ReadInt32(); string[] rFileNames = new string[rFileNameCount]; for (int i = 0; i < rFileNameCount; i++) { rFileNames[i] = fileReader.ReadString(); } string rAlbum = fileReader.ReadString(); string rArtist = fileReader.ReadString(); string rTitle = fileReader.ReadString(); byte rRating = fileReader.ReadByte(); //Save the position of the date long datePosition = fileReader.BaseStream.Position; string rDate = fileReader.ReadString(); //Save the beginning of the next entry fileReadPosition = fileReader.BaseStream.Position; resultCounter++; //Check if this entry is a result to a search for (int i = 0; i < m_SearchesToStart.Count; i++) { string[] searchPattern = m_SearchesToStart[i].Pattern.ToLower().Split(new char[] { ' ' }); ; //Remove all small patterns RList<string> patterns = new RList<string>(); for (int k = 0; k < searchPattern.Length; k++) { if (searchPattern[k].Length >= 3) { patterns.Add(searchPattern[k]); } } bool isResult = false; int fileNameNumber = 0; for (int j = 0; j < patterns.Count; j++) { //Check all filenames of this entry for (int k = 0; k < rFileNames.Length; k++) { if (rFileNames[k].ToLower().Contains(patterns[j])) { fileNameNumber = k; isResult = true; } } //Check the metadata of this entry if (!isResult) { if (rAlbum.ToLower().Contains(patterns[j])) { isResult = true; } else if (rArtist.ToLower().Contains(patterns[j])) { isResult = true; } else if (rTitle.ToLower().Contains(patterns[j])) { isResult = true; } } //if this is no result for this part of the searchpattern, //we can stop, because there shall be only results with all //parts of the searchpattern. if (isResult == false) { break; } //Reset isResult for the next part of the searchpattern if (j != patterns.Count - 1) { isResult = false; } } if (isResult) { //Add this entry to the results of this search m_SearchResultsBuffer[m_SearchesToStart[i].SearchID].Add(Core.ByteArrayToString(rFileHash), new OldSearchResult(rFileHash, rFileSize, rFileNames[fileNameNumber], rAlbum, rArtist, rTitle, rRating, DateTime.Parse(rDate))); } } bool updateDate = false; int[] indexOfResultsToRemove = new int[0]; //Check if a new result is equal to this entry for (int i = 0; i < m_ResultsToAdd.Count; i++) { //Compare the hashes if (Core.CompareByteArray(rFileHash, m_ResultsToAdd[i].FileHash)) { //It exists already updateDate = true; int[] tempArray1 = new int[indexOfResultsToRemove.Length + 1]; for (int j = 0; j < indexOfResultsToRemove.Length; j++) { tempArray1[j] = indexOfResultsToRemove[j]; } tempArray1[indexOfResultsToRemove.Length] = i; indexOfResultsToRemove = tempArray1; //Check the filenames bool fileNameExists = false; for (int k = 0; k < rFileNames.Length; k++) { if (rFileNames[k] == m_ResultsToAdd[i].FileName) { fileNameExists = true; break; } } if (!fileNameExists) { //The filename is new -> add it insertingData = true; string[] tempArray = new string[rFileNameCount + 1]; for (int k = 0; k < rFileNameCount; k++) { tempArray[k] = rFileNames[k]; } tempArray[rFileNameCount] = m_ResultsToAdd[i].FileName; rFileNames = tempArray; rFileNameCount++; } } } if (updateDate) { //Update the date rDate = DateTime.Now.ToString(m_DateFormatString); //Remove the new result from the list, because it exists RList<Command23.SearchResult> tempRemoveList = new RList<Command23.SearchResult>(); for (int i = 0; i < m_ResultsToAdd.Count; i++) { bool addIt = false; for (int k = 0; k < indexOfResultsToRemove.Length; k++) { if (i == indexOfResultsToRemove[k]) { addIt = true; } } if (addIt) { tempRemoveList.Add(m_ResultsToAdd[i]); } } foreach (Command23.SearchResult r in tempRemoveList) { m_ResultsToAdd.Remove(r); } //Check if we can update the date directly in the file if (!insertingData) { //Write the new date to the file fileWriter.BaseStream.Position = datePosition; fileWriter.Write(rDate); fileWriter.Flush(); fileReader.BaseStream.Position = fileReadPosition; } } //Check the date if we are cleaning up if (cleanUp) { if (((TimeSpan)DateTime.Now.Subtract(DateTime.Parse(rDate))).Days > m_CleanUpDays) { isOld = true; insertingData = true; cleanedUpCounter++; } else { fileSizeOfEntries += (ulong)rFileSize; } } else { fileSizeOfEntries += (ulong)rFileSize; } //Check if we have to insert data to the file if (insertingData) { if (isFirstChangedEntry) { //Here we have to beginn writing fileWritePosition = firstPositionOfThisEntry; isFirstChangedEntry = false; } if (!isOld) { fileFlushCounter++; //Write the entry to the buffer memoryWriter.Write(rFileHash); memoryWriter.Write(rFileSize); memoryWriter.Write(rFileNameCount); for (int i = 0; i < rFileNameCount; i++) { memoryWriter.Write(rFileNames[i]); } memoryWriter.Write(rAlbum); memoryWriter.Write(rArtist); memoryWriter.Write(rTitle); memoryWriter.Write(rRating); memoryWriter.Write(rDate); //if the buffer is big enough or we reached the end of the file, write the buffe to the file if (fileFlushCounter == 10000 || fileReadPosition >= fileLength) { fileFlushCounter = 0; memoryWriter.Flush(); memoryReader.BaseStream.Position = 0; fileWriter.BaseStream.Position = fileWritePosition; long memoryLength = memoryReader.BaseStream.Length; long spaceInFile = fileReadPosition - fileWritePosition; //write only as much as space and data we have while (memoryReader.BaseStream.Position < spaceInFile && memoryReader.BaseStream.Position < memoryLength) { fileWriter.Write(memoryReader.ReadByte()); } fileWriter.Flush(); //Reconfigure the filewriter/reader fileWritePosition = fileWriter.BaseStream.Position; fileReader.BaseStream.Position = fileReadPosition; //Write the rest of the data in the buffer to the beginning of the buffer long memoryReaderPosition = memoryReader.BaseStream.Position; long memoryWriterPosition = 0; while (memoryReaderPosition < memoryLength) { memoryReader.BaseStream.Position = memoryReaderPosition; byte b = memoryReader.ReadByte(); memoryReaderPosition = memoryReader.BaseStream.Position; memoryWriter.BaseStream.Position = memoryWriterPosition; memoryWriter.Write(b); memoryWriterPosition = memoryWriter.BaseStream.Position; } memoryWriter.Flush(); memoryWriter.BaseStream.SetLength(memoryWriterPosition); } } } } if (insertingData) { //write the rest of the memorystream to the file. fileWriter.BaseStream.Position = fileWritePosition; long mlength = memoryReader.BaseStream.Length; memoryReader.BaseStream.Position = 0; while (memoryReader.BaseStream.Position < mlength) { fileWriter.Write(memoryReader.ReadByte()); } fileWriter.Flush(); } if (cleanUp) { m_Logger.Log(Properties.Resources_Core.CleanSearchDatabase, new object[] { cleanedUpCounter, resultCounter }); resultCounter -= cleanedUpCounter; m_LastCleanUpCount = cleanedUpCounter; } //Add the new results to the file //The position of the filestream points already to the end RIndexedHashtable<string, NewSearchResult> resultsToAdd = new RIndexedHashtable<string, NewSearchResult>(); foreach (Command23.SearchResult result in m_ResultsToAdd) { string fileHashString = Core.ByteArrayToString(result.FileHash); if (resultsToAdd.ContainsKey(fileHashString)) { resultsToAdd[fileHashString].AddFileName(result.FileName); } else { resultsToAdd.Add(fileHashString, new NewSearchResult(result)); } } foreach (NewSearchResult newResult in resultsToAdd.Values) { fileWriter.Write(newResult.FileHash); fileWriter.Write(newResult.FileSize); int fileNameCount = newResult.FileNames.Length; fileWriter.Write(fileNameCount); for (int i = 0; i < fileNameCount; i++) { fileWriter.Write(newResult.FileNames[i]); } fileWriter.Write(newResult.Album); fileWriter.Write(newResult.Artist); fileWriter.Write(newResult.Title); fileWriter.Write(newResult.Rating); fileWriter.Write(DateTime.Now.ToString(m_DateFormatString)); resultCounter++; fileSizeOfEntries += (ulong)newResult.FileSize; } fileWriter.Flush(); //Clear the lists m_ResultsToAdd.Clear(); m_SearchesToStart.Clear(); //Set the correct end of the file if (insertingData) { fileWriter.BaseStream.SetLength(fileWriter.BaseStream.Position); } if (cleanUp) { m_LastCleanUp = DateTime.Now; } //Update information m_ResultCount = resultCounter; m_FileSize = fileStream.Length; m_FileSizeOfEntries = fileSizeOfEntries; fileReader.Close(); fileWriter.Close(); fileStream.Close(); memoryReader.Close(); memoryWriter.Close(); memoryStream.Close(); } } catch (Exception ex) { //Update information m_ResultCount = resultCounter; m_FileSize = fileStream.Length; m_FileSizeOfEntries = fileSizeOfEntries; m_ErrorCounter++; m_Logger.Log(ex, "An exception was thrown in searchDBThread! (#{0})", new object[] { m_ErrorCounter }); try { fileStream.SetLength(lastKnownValidFilePosition2); m_Logger.Log("Searchdatabase cutted to the entry bofore the last known valid entry. ({0} Bytes remaining)", new object[] { lastKnownValidFilePosition}); m_FileSize = lastKnownValidFilePosition2; m_ResultCount = resultCounter - cleanedUpCounter; m_FileSizeOfEntries = fileSizeOfEntries; } catch { try { if (File.Exists(m_FilePath)) { File.Delete(m_FilePath); m_Logger.Log("Searchdatabase deleted, because it was probably corrupt.", new object[] { }); m_FileSize = 0; m_ResultCount = 0; m_FileSizeOfEntries = 0; } } catch { } } } finally { m_ResultsToAdd.Unlock(); m_SearchesToStart.Unlock(); m_SearchResultsBuffer.Unlock(); if (fileReader != null) { fileReader.Close(); } if (fileWriter != null) { fileWriter.Close(); } if (fileStream != null) { fileStream.Close(); } if (memoryReader != null) { memoryReader.Close(); } if (memoryWriter != null) { memoryWriter.Close(); } if (memoryStream != null) { memoryStream.Close(); } } //Move buffer to normal list try { m_SearchResultsBuffer.Lock(); m_SearchResults.Lock(); for (int i = 0; i < m_SearchResultsBuffer.Count; i++) { if (!m_SearchResults.ContainsKey(((System.Collections.Generic.KeyValuePair<string, RIndexedHashtable<string, OldSearchResult>>)m_SearchResultsBuffer[i]).Key)) { m_SearchResults.Add(((System.Collections.Generic.KeyValuePair<string, RIndexedHashtable<string, OldSearchResult>>)m_SearchResultsBuffer[i]).Key, ((System.Collections.Generic.KeyValuePair<string, RIndexedHashtable<string, OldSearchResult>>)m_SearchResultsBuffer[i]).Value); } } } catch (Exception ex) { m_Logger.Log(ex, "SearchDBManager: An error was thrown while reading the SearchResultsBuffer.", new object[] { }); } finally { m_SearchResultsBuffer.Clear(); m_SearchResults.Unlock(); m_SearchResultsBuffer.Unlock(); } Thread.Sleep(1000); } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown in searchDBThread!", new object[] { }); } m_Logger.Log("SearchDBManager closed.", new object[] { }); }); m_SearchDBThread.Name = "searchDBThread"; m_SearchDBThread.IsBackground = true; m_SearchDBThread.Priority = ThreadPriority.Lowest; m_SearchDBThread.Start(); }
protected void Page_Load(object sender, EventArgs e) { RTLSEntities ctx = new RTLSEntities(); gorevekle.Visible = false; if (string.IsNullOrEmpty(Request.QueryString["dlt"]) == false) { int dlt = Convert.ToInt32(Request.QueryString["dlt"]); var sorgu = ctx.TBL_Departman.SingleOrDefault(x => x.ID == dlt); sorgu.dlt = 1; ctx.SaveChanges(); } if (string.IsNullOrEmpty(Request.QueryString["dltG"]) == false) { int dlt = Convert.ToInt32(Request.QueryString["dltG"]); var sorgu = ctx.TBL_Gorev.SingleOrDefault(x => x.ID == dlt); sorgu.dlt = 1; ctx.SaveChanges(); } if (string.IsNullOrEmpty(Request.QueryString["d"]) == false) { string d = Request.QueryString["d"]; int id = Convert.ToInt32(Request.QueryString["id"]); if (d == "departman") { if (id == 0) { TBL_Departman dep = new TBL_Departman(); dep.dlt = 0; dep.Departman = Request.QueryString["name"]; ctx.TBL_Departman.Add(dep); ctx.SaveChanges(); } else { TBL_Departman dep = ctx.TBL_Departman.SingleOrDefault(x => x.ID == id && x.dlt == 0); dep.Departman = Request.QueryString["name"]; ctx.SaveChanges(); } Response.Redirect("departman.aspx"); } else if (d == "gorev") { if (id == 0) { TBL_Gorev gor = new TBL_Gorev(); gor.dlt = 0; gor.Gorev = Request.QueryString["name"]; gor.Departman_ID = Convert.ToInt32(Request.QueryString["DID"]); ctx.TBL_Gorev.Add(gor); ctx.SaveChanges(); } else { TBL_Gorev gor = ctx.TBL_Gorev.SingleOrDefault(x => x.ID == id && x.dlt == 0); gor.Gorev = Request.QueryString["name"]; ctx.SaveChanges(); } Response.Redirect("departman.aspx?DID=" + Request.QueryString["DID"]); } } var departman = ctx.TBL_Departman.Where(x => x.dlt == 0).OrderBy(x => x.Departman); RList.DataSource = departman.ToList(); RList.DataBind(); if (string.IsNullOrEmpty(Request.QueryString["DID"]) == false) { gorevekle.Visible = true; int DID = Convert.ToInt32(Request.QueryString["DID"]); var dep = ctx.TBL_Departman.SingleOrDefault(x => x.ID == DID); gorevBaslik.InnerHtml = dep.Departman + " Görevleri"; var gorev = ctx.TBL_Gorev.Where(x => x.dlt == 0 && x.Departman_ID == DID).OrderBy(x => x.Gorev); RListG.DataSource = gorev.ToList(); RListG.DataBind(); } }
public void DrawChart(RList<KLine> klines, int ktype, List<RList<double>> quotas, List<string> names) { // 保存要画的4个数据 this.klines = klines; this.ktype = ktype; this.quotas = quotas; this.names = names; currentPage = 0; ksIndex = 0; // 结构下标 this.Invalidate(); }
public void AddResult(RList<Command23.SearchResult> results) { if (m_SearchDBThread.IsAlive) { try { m_ResultsToAddBuffer.Lock(); foreach (Command23.SearchResult result in results) { if (!m_ResultsToAddBuffer.Contains(result)) { m_ResultsToAddBuffer.Add(result); } } } catch (Exception ex) { m_Logger.Log(ex, "SearchDBManager: An error was thrown while adding a result to the list.", new object[] { }); } finally { m_ResultsToAddBuffer.Unlock(); } } }
public bool SetSectorData(long sector, byte[] data, byte[] hashCodeResult, Source source) { if (sector < 0) throw new ArgumentOutOfRangeException("sector"); if (data == null) throw new ArgumentNullException("data"); if (hashCodeResult == null) throw new ArgumentNullException("hashCodeResult"); if (source == null) throw new ArgumentNullException("source"); try { m_Sources.Lock(); // 2007-06-14 T.Norad // set current time as last reception in this download m_LastReception = DateTime.Now; if ((m_SectorsMap[sector / 8] & (1 << (int)(sector % 8))) == 0) { // Sicherer Hash-Vergleich ANFANG byte[] hash = ComputeHashes.SHA512Compute(data); byte[] hashCode = new byte[64]; for (int n = 0; n < 64; n++) hashCode[n] = (byte)(hash[n] ^ m_FileHash[n]); hashCode = ComputeHashes.SHA512Compute(hashCode); hashCode = ComputeHashes.SHA512Compute(hashCode); hashCode = ComputeHashes.SHA512Compute(hashCode); hashCode = ComputeHashes.SHA512Compute(hashCode); if (!Core.CompareByteArray(hashCode, hashCodeResult)) { m_Logger.Log("A manipulated sector was received!"); return false; } // Sicherer Hash-Vergleich ENDE // Sicherer Sektor-Vergleich ANFANG if (sector != source.LastRequestedSector || m_IsHashing) { m_Logger.Log("An unrequested command was received!"); return false; } // Sicherer Sektor-Vergleich ENDE // Filestream.Position setzen m_FileStream.Position = sector * 32768; int count; if (m_FileStream.Position + 32768 <= m_FileSize) count = 32768; else count = (int)(m_FileSize - m_FileStream.Position); m_FileStream.Write(data, 0, count); m_FileStream.Flush(); m_ReceivedSectors++; //Update SectorsMap m_SectorsMap[(int)(sector / 8)] |= (byte)(1 << (int)(sector % 8)); m_Downloaded += 32768; source.Report79Received(sector); } else return false; if (m_ReceivedSectors < m_Sectors) { RList<long> sectorsToRequest = new RList<long>(m_SectorsMap.Length); long sectorToRequest = -1; if (!source.IsComplete) { bool sectorCanBeRequested = false; for (int t = 0; t < 10 && !sectorCanBeRequested; t++) { for (long i = 0; i < m_SectorsMap.Length; i++) if ((~m_SectorsMap[i] & source.SectorsMap[i]) != 0) sectorsToRequest.Add(i); if (sectorsToRequest.Count > 0) { long d = sectorsToRequest[Randomizer.GenerateNumber(0, sectorsToRequest.Count)]; byte e = m_SectorsMap[d]; byte f = source.SectorsMap[d]; int g; for (g = 0; g < 8; g++) if (((~e & f) & (1 << g)) != 0) break; sectorToRequest = d * 8 + g; sectorCanBeRequested = true; foreach (Source activeSource in m_Sources.Values) if (activeSource.State == SourceState.Active && activeSource.LastRequestedSector == sectorToRequest) { sectorCanBeRequested = false; break; } } else sectorCanBeRequested = false; } if (sectorToRequest != -1) { source.Report78Sent(sectorToRequest); Core.SendCommand78(m_DownloadPeerID, source.PeerID, m_DownloadID, sectorToRequest); } } else { bool sectorCanBeRequested = false; for (int t = 0; t < 10 && !sectorCanBeRequested; t++) { for (long i = 0; i < m_SectorsMap.Length; i++) if (m_SectorsMap[i] != 255) sectorsToRequest.Add(i); if (sectorsToRequest.Count > 0) { long d = sectorsToRequest[Randomizer.GenerateNumber(0, sectorsToRequest.Count)]; byte e = m_SectorsMap[d]; int g; for (g = 0; g < 8; g++) if ((e & (1 << g)) == 0) break; sectorToRequest = d * 8 + g; sectorCanBeRequested = true; foreach (Source activeSource in m_Sources.Values) if (activeSource.State == SourceState.Active && activeSource.LastRequestedSector == sectorToRequest) { sectorCanBeRequested = false; break; } } else sectorCanBeRequested = false; } if (sectorToRequest != -1) { source.Report78Sent(sectorToRequest); Core.SendCommand78(m_DownloadPeerID, source.PeerID, m_DownloadID, sectorToRequest); } } } return true; } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while writing in temporary file '{0}'!", m_TempFilePath); return false; } finally { m_Sources.Unlock(); } }
public static void Load() { SetUILanguage(); m_ShareManager = new ShareManager(); Constants.SetMaximumDownloadsCount(int.Parse(Settings.Instance["MaximumDownloadsCount"])); if (!Directory.Exists(Settings.Instance["PreferencesDirectory"])) Directory.CreateDirectory(Settings.Instance["PreferencesDirectory"]); Settings.Instance["PreferencesDirectory"] = new DirectoryInfo(Settings.Instance["PreferencesDirectory"]).FullName; if (!Directory.Exists(Settings.Instance["LogDirectory"])) Directory.CreateDirectory(Settings.Instance["LogDirectory"]); Settings.Instance["LogDirectory"] = new DirectoryInfo(Settings.Instance["LogDirectory"]).FullName; if (!Directory.Exists(Settings.Instance["IncomingDirectory"])) Directory.CreateDirectory(Settings.Instance["IncomingDirectory"]); Settings.Instance["IncomingDirectory"] = new DirectoryInfo(Settings.Instance["IncomingDirectory"]).FullName; if (!Directory.Exists(Settings.Instance["TemporaryDirectory"])) Directory.CreateDirectory(Settings.Instance["TemporaryDirectory"]); Settings.Instance["TemporaryDirectory"] = new DirectoryInfo(Settings.Instance["TemporaryDirectory"]).FullName; if (!Directory.Exists(Settings.Instance["CorruptDirectory"])) Directory.CreateDirectory(Settings.Instance["CorruptDirectory"]); Settings.Instance["CorruptDirectory"] = new DirectoryInfo(Settings.Instance["CorruptDirectory"]).FullName; m_WebCachesFilePath = Path.Combine(Settings.Instance["PreferencesDirectory"], m_WebCachesFilePath); //2009-01-25 Nochbaer m_SearchDBFilePath = Path.Combine(Settings.Instance["PreferencesDirectory"], m_SearchDBFilePath); m_SharedDirectoriesFilePath = Path.Combine(Settings.Instance["PreferencesDirectory"], m_SharedDirectoriesFilePath); m_SharedFilesFilePath = Path.Combine(Settings.Instance["PreferencesDirectory"], m_SharedFilesFilePath); m_SharedFilesStatsFilePath = Path.Combine(Settings.Instance["PreferencesDirectory"], m_SharedFilesStatsFilePath); m_MetaDataFilePath = Path.Combine(Settings.Instance["PreferencesDirectory"], m_MetaDataFilePath); m_CommentsFilePath = Path.Combine(Settings.Instance["PreferencesDirectory"], m_CommentsFilePath); m_RatingsFilePath = Path.Combine(Settings.Instance["PreferencesDirectory"], m_RatingsFilePath); m_StatisticsFilePath = Path.Combine(Settings.Instance["PreferencesDirectory"], m_StatisticsFilePath); m_DownloadsFilePath = Path.Combine(Settings.Instance["PreferencesDirectory"], m_DownloadsFilePath); // initialize logger with log directory // Changed 2007-05-06 by T.Norad if (bool.Parse(Settings.Instance["WriteLogfile"])) { Logger.Instance.initialize(Settings.Instance["LogDirectory"]); } // 2007-05-16 T.Norad ResumeDownloads(); m_Logger.Log(Properties.Resources_Core.StealthNetLoading, String.Format(Constants.Software, Core.Version)); m_Logger.Log(Properties.Resources_Core.DownloadSourcesAllowed, Constants.MaximumDownloadsCount, Constants.MaximumSourcesCount); m_Logger.Log(Properties.Resources_Core.NETFrameworkVersion, Environment.Version.ToString()); m_Logger.Log(Properties.Resources_Core.OSVersion, Environment.OSVersion.ToString()); m_DropChainTailCount = 0; if (GenerateIDOrHash()[47] > 192) while (GenerateIDOrHash()[47] <= 128) m_DropChainTailCount++; m_PeerID = GenerateIDOrHash(); try { UpdateWebServiceProxy update = new UpdateWebServiceProxy(); if (bool.Parse(Settings.Instance["SynchronizeWebCaches"])) { string webCaches = update.GetWebCaches(); if (File.Exists(m_WebCachesFilePath)) { string backupFilePath = string.Format("{0}.bak", m_WebCachesFilePath); if (File.Exists(backupFilePath)) { string backupBackupFilePath = string.Format("{0}.bak", backupFilePath); if (File.Exists(backupBackupFilePath)) File.Delete(backupBackupFilePath); File.Move(backupFilePath, backupBackupFilePath); } File.Move(m_WebCachesFilePath, backupFilePath); } StreamWriter webCachesFileStreamWriter = new StreamWriter(new FileStream(m_WebCachesFilePath, FileMode.Create, FileAccess.Write, FileShare.None)); webCachesFileStreamWriter.Write(webCaches); webCachesFileStreamWriter.Flush(); webCachesFileStreamWriter.Close(); } m_IsUpdateAvailable = update.IsUpdateAvailable(String.Format(Constants.Software, Core.Version)); update.Dispose(); } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while updating the list of WebCaches!"); } m_Keys = RSAGenerateKeys(); try { if (File.Exists(m_WebCachesFilePath)) { XmlDocument webCachesXmlDocument = new XmlDocument(); webCachesXmlDocument.Load(m_WebCachesFilePath); foreach (XmlNode webCacheNode in webCachesXmlDocument.SelectSingleNode("webcaches").SelectNodes("webcache")) m_WebCaches.Add(webCacheNode.Attributes["url"].Value); } else if (bool.Parse(Settings.Instance["SynchronizeWebCaches"])) { m_WebCaches.Add("http://rshare.de/rshare.asmx"); m_WebCaches.Add("http://webcache.stealthnet.at/rwpmws.php"); } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while reading WebCaches file!"); } m_ShareManager.LoadConfiguration(m_SharedDirectoriesFilePath, m_SharedFilesFilePath, m_SharedFilesStatsFilePath, m_MetaDataFilePath, m_CommentsFilePath, m_RatingsFilePath); //2009-01-25 Nochbaer if (bool.Parse(Settings.Instance["ActivateSearchDB"])) { m_SearchDBManager = new SearchDBManager(m_SearchDBFilePath); } try { if (File.Exists(m_StatisticsFilePath)) { XmlDocument statisticsXmlDocument = new XmlDocument(); statisticsXmlDocument.Load(m_StatisticsFilePath); XmlNode statisticsXmlNode = statisticsXmlDocument.SelectSingleNode("statistics"); m_CumulativeDownloaded = long.Parse(statisticsXmlNode.SelectSingleNode("downloaded").InnerText); m_CumulativeUploaded = long.Parse(statisticsXmlNode.SelectSingleNode("uploaded").InnerText); m_CumulativeUptime = TimeSpan.Parse(statisticsXmlNode.SelectSingleNode("uptime").InnerText); } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while reading statistics file!"); } // [MONO] the variable 'downloadsXmlNode' seems to be never used /* try { if (File.Exists(m_DownloadsFilePath)) { XmlDocument downloadsXmlDocument = new XmlDocument(); downloadsXmlDocument.Load(m_DownloadsFilePath); XmlNode downloadsXmlNode = downloadsXmlDocument.SelectSingleNode("downloads"); } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while reading downloads file!"); } */ Thread statisticsThread = new Thread(delegate() { RList<int> downloadStatistics = new RList<int>(); int downloadStatisticsRestCount = 0; long downloadStatisticsRest = 0; long averageDownloadStatistics; RList<int> uploadStatistics = new RList<int>(); int uploadStatisticsRestCount = 0; long uploadStatisticsRest = 0; long averageUploadStatistics; while (!m_IsClosing) { m_Downstream = m_CurrentDownstream; m_CurrentDownstream = 0; m_Upstream = m_CurrentUpstream; m_CurrentUpstream = 0; int downstream = m_Downstream; int upstream = m_Upstream; m_Downloaded += downstream; m_Uploaded += upstream; m_CumulativeDownloaded += downstream; m_CumulativeUploaded += upstream; m_Uptime = m_Uptime.Add(TimeSpan.FromSeconds(1)); m_CumulativeUptime = m_CumulativeUptime.Add(TimeSpan.FromSeconds(1)); if (downloadStatistics.Count == 300) { downloadStatisticsRestCount++; downloadStatisticsRest += downloadStatistics[downloadStatistics.Count - 1]; downloadStatistics.RemoveAt(downloadStatistics.Count - 1); } downloadStatistics.Insert(0, downstream); if (m_MinuteAverageDownloadStatistics.Count != 0) { averageDownloadStatistics = 0; for (int n = 0; n < Math.Min(60, downloadStatistics.Count); n++) averageDownloadStatistics += downloadStatistics[n]; if (m_MinuteAverageDownloadStatistics.Count == 300) m_MinuteAverageDownloadStatistics.RemoveAt(m_MinuteAverageDownloadStatistics.Count - 1); m_MinuteAverageDownloadStatistics.Insert(0, (int)(averageDownloadStatistics / Math.Min(60, m_MinuteAverageDownloadStatistics.Count))); } else m_MinuteAverageDownloadStatistics.Insert(0, 0); averageDownloadStatistics = 0; foreach (int item in downloadStatistics) averageDownloadStatistics += item; if (m_AverageDownloadStatistics.Count == 300) m_AverageDownloadStatistics.RemoveAt(m_AverageDownloadStatistics.Count - 1); m_AverageDownloadStatistics.Insert(0, (int)((averageDownloadStatistics + downloadStatisticsRest) / (downloadStatistics.Count + downloadStatisticsRestCount))); if (uploadStatistics.Count == 300) { uploadStatisticsRestCount++; uploadStatisticsRest += uploadStatistics[uploadStatistics.Count - 1]; uploadStatistics.RemoveAt(uploadStatistics.Count - 1); } uploadStatistics.Insert(0, upstream); if (m_MinuteAverageUploadStatistics.Count != 0) { averageUploadStatistics = 0; for (int n = 0; n < Math.Min(60, uploadStatistics.Count); n++) averageUploadStatistics += uploadStatistics[n]; if (m_MinuteAverageUploadStatistics.Count == 300) m_MinuteAverageUploadStatistics.RemoveAt(m_MinuteAverageUploadStatistics.Count - 1); m_MinuteAverageUploadStatistics.Insert(0, (int)(averageUploadStatistics / Math.Min(60, m_MinuteAverageUploadStatistics.Count))); } else m_MinuteAverageUploadStatistics.Insert(0, 0); averageUploadStatistics = 0; foreach (int item in uploadStatistics) averageUploadStatistics += item; if (m_AverageUploadStatistics.Count == 300) m_AverageUploadStatistics.RemoveAt(m_AverageUploadStatistics.Count - 1); m_AverageUploadStatistics.Insert(0, (int)((averageUploadStatistics + uploadStatisticsRest) / (uploadStatistics.Count + uploadStatisticsRestCount))); if (m_ConnectionsStatistics.Count == 300) m_ConnectionsStatistics.RemoveAt(m_ConnectionsStatistics.Count - 1); m_ConnectionsStatistics.Insert(0, m_Connections.Count); Thread.Sleep(1000); } }); statisticsThread.Name = "statisticsThread"; statisticsThread.IsBackground = true; statisticsThread.Start(); Thread lastCommandIDThread = new Thread(delegate() { while (!m_IsClosing) { try { m_LastCommandID.Lock(); for (int n = m_LastCommandID.Count - 1; n >= 0; n--) if (DateTime.Now.Subtract(m_LastCommandID[n].Value).TotalSeconds >= Constants.LastCommandIDTimeout) m_LastCommandID.RemoveAt(n); } catch { } finally { m_LastCommandID.Unlock(); } Thread.Sleep(1000); } }); lastCommandIDThread.Name = "lastCommandIDThread"; lastCommandIDThread.IsBackground = true; lastCommandIDThread.Start(); Thread connectionsThread = new Thread(delegate() { while (!m_IsClosing) { try { m_Connections.Lock(); Connection connection; /* Sum up the total bandwidth used during the last second and * calculate an adjustment, so that the predicted bandwidth for this second * will not exceeded the allowed bandwidth limit. */ int uploadAdjustment = 0; int downloadAdjustment = 0; if ((bool.Parse(Settings.Instance["HasUploadLimit"]) || bool.Parse(Settings.Instance["HasDownloadLimit"])) && Core.Connections.Count != 0) { int totalUp = 0; int totalDown = 0; for (int n = m_Connections.Count - 1; n >= 0; n--) { connection = m_Connections[n].Value; totalUp += connection.UploadLimitUsed; totalDown += connection.DownloadLimitUsed; } uploadAdjustment = (int)((float)(int.Parse(Settings.Instance["UploadLimit"]) - totalUp) / Core.Connections.Count); downloadAdjustment = (int)((float)(int.Parse(Settings.Instance["DownloadLimit"]) - totalDown) / Core.Connections.Count); } for (int n = m_Connections.Count - 1; n >= 0; n--) { connection = m_Connections[n].Value; if (connection.IsDisconnected) m_Connections.RemoveAt(n); else { connection.UploadAdjustment = uploadAdjustment; connection.DownloadAdjustment = downloadAdjustment; connection.Process(); } } } catch { } finally { m_Connections.Unlock(); } Thread.Sleep(1000); } }); connectionsThread.Name = "connectionsThread"; connectionsThread.IsBackground = true; connectionsThread.Start(); Thread peersThread = new Thread(delegate() { while (!m_IsClosing) { try { m_Peers.Lock(); Peer peer; for (int n = m_Peers.Count - 1; n >= 0; n--) { peer = m_Peers[n].Value; if (DateTime.Now.Subtract(peer.LastReceived).TotalSeconds >= Constants.PeerTimeout) m_Peers.RemoveAt(n); else peer.Process(); } } catch { } finally { m_Peers.Unlock(); } Thread.Sleep(1000); } }); peersThread.Name = "peersThread"; peersThread.IsBackground = true; peersThread.Start(); Thread searchesThread = new Thread(delegate() { while (!m_IsClosing) { try { foreach (Search search in m_Searches.Values) search.Process(); } catch { } Thread.Sleep(1000); } }); searchesThread.Name = "searchesThread"; searchesThread.IsBackground = true; searchesThread.Start(); // 06.07.2009 Auto-Move re-implemented (Lars) Thread downloadsThread = new Thread(delegate() { int moveIntervall = 60; bool moveDownloads = false; try { moveIntervall = Int32.Parse(Settings.Instance["AutoMoveDownloadsIntervall"]); if (moveIntervall < 60) moveIntervall = 60; moveDownloads = bool.Parse(Settings.Instance["AutoMoveDownloads"]); } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while initializing Auto-Move!"); } while (!m_IsClosing) { try { m_DownloadsAndQueue.Lock(); Download download; for (int n = Math.Min(m_DownloadsAndQueue.Count, Constants.MaximumDownloadsCount) - 1; n >= 0; n--) { download = m_DownloadsAndQueue[n].Value; if (moveDownloads) { // Wir verschieben Downloads if ((download.LastReception == null || !download.LastReception.HasValue || DateTime.Now.Subtract(download.LastReception.Value).TotalMinutes >= moveIntervall) && (download.QueueStart != null && download.QueueStart.HasValue && DateTime.Now.Subtract(download.QueueStart.Value).TotalMinutes >= moveIntervall)) { // Unser Download hat länger als moveIntervall nichts empfangen und // ist bereits länger als moveIntervall gestartet bool flag = false; foreach (Download.Source source in download.Sources.Values) if (source.State == Download.SourceState.Active || source.State == Download.SourceState.Requested) { flag = true; break; } if (!flag && m_DownloadsAndQueue.Count > Constants.MaximumDownloadsCount) { // Der Download wird in die Queue verschoben MoveDownloadToQueue(download.DownloadIDString); m_Logger.Log("Auto-Move: The download of \"{0}\" has been moved to the queue!", download.FileName); } else { // Der Download hat geeignete Quellen und wird daher nicht verschoben download.Process(); } } else { // Unser Download empfängt oder das moveIntervall wurde noch nicht erreicht download.Process(); } } else { // Wir verschieben keine Downloads download.Process(); } } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while processing downloads!"); } finally { m_DownloadsAndQueue.Unlock(); } Thread.Sleep(1000); } }); downloadsThread.Name = "downloadsThread"; downloadsThread.IsBackground = true; downloadsThread.Start(); Thread uploadsThread = new Thread(delegate() { while (!m_IsClosing) { try { m_Uploads.Lock(); Upload upload; for (int n = m_Uploads.Count - 1; n >= 0; n--) { upload = m_Uploads[n].Value; if (DateTime.Now.Subtract(upload.LastRequest).TotalSeconds >= Constants.UploadTimeout) m_Uploads.RemoveAt(n); upload.Process(); } } catch { } finally { m_Uploads.Unlock(); } Thread.Sleep(1000); } }); uploadsThread.Name = "uploadsThread"; uploadsThread.IsBackground = true; uploadsThread.Start(); Thread listeningThread = new Thread(delegate() { try { Socket listeningSocket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); listeningSocket.Bind(new IPEndPoint(IPAddress.Any, int.Parse(Settings.Instance["Port"]))); listeningSocket.Listen(5); while (!m_IsClosing) { try { Connection connection = new Connection(listeningSocket.Accept()); m_IsAccessible = true; //2008-09-17 : Nochbaer int avgConCount = int.Parse(Settings.Instance["AverageConnectionsCount"]); if (avgConCount > 10) avgConCount = 10; if (m_Connections.Count < (int)((float)avgConCount * 1.25F) && !m_Connections.ContainsKey(connection.RemoteEndPoint.Address)) { m_Connections.Add(connection.RemoteEndPoint.Address, connection); (new Command10(m_Keys)).Send(connection); } else connection.Disconnect(); } catch { } } try { listeningSocket.Shutdown(SocketShutdown.Both); } catch { } try { listeningSocket.Close(); } catch { } } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while binding socket at port {0}!", Settings.Instance["Port"]); } }); listeningThread.Name = "listeningThread"; listeningThread.IsBackground = true; listeningThread.Start(); if (!m_WebCaches.IsEmpty) { try { WebCacheProvider webCacheProvider = new WebCacheProvider(m_WebCaches); try { webCacheProvider.RemovePeer(); } catch { } Thread webCacheAddOrRemovePeerThread = new Thread(delegate() { while (!m_IsClosing) { try { //2009-06-02 : Lars int avgConCount = int.Parse(Settings.Instance["AverageConnectionsCount"]); if (avgConCount > 10) avgConCount = 10; if (m_Connections.Count < avgConCount) webCacheProvider.AddPeer(int.Parse(Settings.Instance["Port"])); } catch { } for (int n = 0; !m_IsClosing && n < 120; n++) Thread.Sleep(500); } try { webCacheProvider.RemovePeer(); } catch { } }); webCacheAddOrRemovePeerThread.Name = "webCacheAddOrRemovePeerThread"; webCacheAddOrRemovePeerThread.Start(); Thread webCacheGetPeerThread = new Thread(delegate() { while (!m_IsClosing) { try { //2008-09-17 : Nochbaer int avgConCount = int.Parse(Settings.Instance["AverageConnectionsCount"]); if (avgConCount > 10) avgConCount = 10; while (m_Connections.Count < avgConCount && !m_IsClosing) { string node = webCacheProvider.GetPeer(); if (node == string.Empty) break; string[] endPoint = node.Split(':'); AddConnection(new IPEndPoint(IPAddress.Parse(endPoint[0]), Convert.ToInt32(endPoint[1]))); if (m_Connections.Count < avgConCount - 1) Thread.Sleep(15000); else break; } } catch { } Thread.Sleep(60000); } }); webCacheGetPeerThread.Name = "webCacheGetPeerThread"; webCacheGetPeerThread.IsBackground = true; webCacheGetPeerThread.Start(); } catch (Exception ex) { m_Logger.Log("The WebCache's client could not be initialized properly!", ex); } } Thread OnlineSignatureThread = new Thread(delegate() { SetUILanguage(); while (bool.Parse(Settings.Instance["ActivateOnlineSignature"])) { try { XmlWriterSettings onlineSignatureXmlWriterSettings = new XmlWriterSettings(); onlineSignatureXmlWriterSettings.CloseOutput = true; onlineSignatureXmlWriterSettings.Indent = true; MemoryStream memoryStream = new MemoryStream(); XmlWriter onlineSignatureXmlWriter = XmlWriter.Create(memoryStream, onlineSignatureXmlWriterSettings); onlineSignatureXmlWriter.WriteStartDocument(); onlineSignatureXmlWriter.WriteStartElement("onlinesignature"); onlineSignatureXmlWriter.WriteStartElement("software"); onlineSignatureXmlWriter.WriteValue(String.Format(Constants.Software, Core.Version)); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("connections"); onlineSignatureXmlWriter.WriteValue(m_Connections.Count); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("downloads"); onlineSignatureXmlWriter.WriteValue(m_DownloadsAndQueue.Count); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("downloadqueue"); onlineSignatureXmlWriter.WriteValue("0"); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("activeuploads"); onlineSignatureXmlWriter.WriteValue(m_Uploads.Count > Constants.MaximumUploadsCount ? Constants.MaximumUploadsCount : m_Uploads.Count); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("uploads"); onlineSignatureXmlWriter.WriteValue(m_Uploads.Count); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("sharedfiles"); onlineSignatureXmlWriter.WriteValue(m_ShareManager.SharedFiles.Count); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("downloadcapacity"); onlineSignatureXmlWriter.WriteValue(Settings.Instance["DownloadCapacity"]); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("uploadcapacity"); onlineSignatureXmlWriter.WriteValue(Settings.Instance["UploadCapacity"]); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("downloadlimit"); onlineSignatureXmlWriter.WriteValue(bool.Parse(Settings.Instance["HasDownloadLimit"]) ? Settings.Instance["DownloadLimit"] : "0"); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("uploadlimit"); onlineSignatureXmlWriter.WriteValue(bool.Parse(Settings.Instance["HasUploadLimit"]) ? Settings.Instance["UploadLimit"] : "0"); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("language"); onlineSignatureXmlWriter.WriteValue(Settings.Instance["UICulture"]); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("cumulativedownloaded"); onlineSignatureXmlWriter.WriteValue(m_CumulativeDownloaded); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("cumulativeuploaded"); onlineSignatureXmlWriter.WriteValue(m_CumulativeUploaded); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("downloaded"); onlineSignatureXmlWriter.WriteValue(m_Downloaded); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("uploaded"); onlineSignatureXmlWriter.WriteValue(m_Uploaded); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("downstream"); onlineSignatureXmlWriter.WriteValue(m_MinuteAverageDownloadStatistics[0].ToString()); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("upstream"); onlineSignatureXmlWriter.WriteValue(m_MinuteAverageUploadStatistics[0].ToString()); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("cumulativeuptime"); onlineSignatureXmlWriter.WriteValue(m_CumulativeUptime.ToString()); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteStartElement("uptime"); onlineSignatureXmlWriter.WriteValue(m_Uptime.ToString()); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.WriteEndElement(); onlineSignatureXmlWriter.Flush(); FileStream fileStream = new FileStream(Path.Combine(Settings.Instance["PreferencesDirectory"], "onlinesignature.xml"), FileMode.Create, FileAccess.Write, FileShare.Read); byte[] buffer = memoryStream.ToArray(); fileStream.Write(buffer, 0, buffer.Length); onlineSignatureXmlWriter.Close(); fileStream.Flush(); fileStream.Close(); } catch (Exception ex) { m_Logger.Log(ex, Properties.Resources_Core.Exception_OnlineSignature); } Thread.Sleep(int.Parse(Settings.Instance["OnlineSignatureUpdateIntervall"]) * 60 * 1000); } }); OnlineSignatureThread.Name = "onlineSignatureThread"; OnlineSignatureThread.IsBackground = true; OnlineSignatureThread.Start(); //2008-05-22-Eroli: Writing downloads.xml every 5 minutes to prevent loosing the sectorsMap if StealthNet crashes //2009-02-16 Nochbaer: Also statistics.xml Thread backupThread = new Thread(delegate() { DateTime lastDownloads = DateTime.MinValue; DateTime lastStatistics = DateTime.MinValue; while (!m_IsClosing) { if (DownloadsXmlWriter.IsReady && DateTime.Now.Subtract(lastDownloads).TotalMinutes >= 5) { DownloadsXmlWriter.Write(m_DownloadsFilePath, m_DownloadsAndQueue); lastDownloads = DateTime.Now; } if (DateTime.Now.Subtract(lastStatistics).TotalMinutes >= 5) { try { StatisticsXmlWriter.write(m_StatisticsFilePath, m_CumulativeDownloaded.ToString(), m_CumulativeUploaded.ToString(), m_CumulativeUptime.ToString()); } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while writing statistics.xml!"); } if (m_ShareManager.IsReady) { try { m_ShareManager.SaveConfiguration(m_SharedDirectoriesFilePath, m_SharedFilesFilePath, m_SharedFilesStatsFilePath, m_MetaDataFilePath, m_CommentsFilePath, m_RatingsFilePath); } catch (Exception ex) { m_Logger.Log(ex, "An exception was thrown while writing shared files index!"); } } lastStatistics = DateTime.Now; } Thread.Sleep(60000); } }); backupThread.Name = "backupThread"; backupThread.IsBackground = true; backupThread.Start(); Thread collectingThread = new Thread(delegate() { while (!m_IsClosing) { GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); GC.WaitForPendingFinalizers(); Thread.Sleep(60000); } }); collectingThread.Name = "collectingThread"; collectingThread.IsBackground = true; collectingThread.Start(); m_Logger.Log(Properties.Resources_Core.StealthNetLoaded, String.Format(Constants.Software, Core.Version)); }
public void AddSource(byte[] id, long fileSize, string fileName, RIndexedHashtable<string, string> metaData, string comment, byte rating, byte[] sectorsMap) { if (id == null) throw new ArgumentNullException("id"); if (id.Length != 48) throw new ArgumentException(); if (fileSize < 0) throw new ArgumentOutOfRangeException("fileSize"); if (fileName == null) throw new ArgumentNullException("fileName"); if (metaData == null) throw new ArgumentNullException("metaData"); if (comment == null) throw new ArgumentNullException("comment"); if (rating > 3) throw new ArgumentOutOfRangeException("rating"); if (m_IsHashing) return; try { // 2007-06-14 T.Norad // set current time as last seen in this download m_LastSeen = DateTime.Now; m_Sources.Lock(); string idString = Core.ByteArrayToString(id); if (!m_Sources.ContainsKey(idString)) m_Sources.Add(idString, new Source(m_Sources, id, fileName, metaData, comment, rating, sectorsMap)); else m_Sources[idString].Report5354Received(fileName, metaData, comment, rating, sectorsMap); RIndexedHashtable<string, int> fileNames1 = new RIndexedHashtable<string, int>(); RIndexedHashtable<byte, int> ratings1 = new RIndexedHashtable<byte, int>(); foreach (Source source in m_Sources.Values) { if (!source.HasInformation) continue; if (!fileNames1.ContainsKey(source.FileName)) fileNames1.Add(source.FileName, 1); else fileNames1[source.FileName]++; if (!ratings1.ContainsKey(source.Rating)) ratings1.Add(source.Rating, 1); else ratings1[source.Rating]++; } RList<string> fileNames2 = new RList<string>(fileNames1.Keys); for (int n = 1; n <= fileNames2.Count - 1; n++) for (int m = 0; m < fileNames2.Count - n; m++) { if (fileNames1[fileNames2[m]] < fileNames1[fileNames2[m + 1]]) { string temp; temp = fileNames2[m]; fileNames2[m] = fileNames2[m + 1]; fileNames2[m + 1] = temp; } } fileNames2.Remove(string.Empty); if (!fileNames2.IsEmpty) { m_FileName = fileNames2[0]; } else { m_FileName = m_FileHashString; } RList<byte> ratings2 = new RList<byte>(ratings1.Keys); for (int n = 1; n <= ratings2.Count - 1; n++) for (int m = 0; m < ratings2.Count - n; m++) { if (ratings1[ratings2[m]] < ratings1[ratings2[m + 1]]) { byte temp; temp = ratings2[m]; ratings2[m] = ratings2[m + 1]; ratings2[m + 1] = temp; } } ratings2.Remove(0); if (!ratings2.IsEmpty) m_Rating = ratings2[0]; else m_Rating = 0; foreach (KeyValuePair<string, string> metaDataItem in metaData) if (!m_MetaData.ContainsKey(metaDataItem.Key)) m_MetaData.Add(metaDataItem.Key, metaDataItem.Value); Core.ParseMetaData(m_MetaData, out m_Album, out m_Artist, out m_Title); if (!m_HasInformation) { m_FileSize = fileSize; m_FileSizeString = Core.LengthToString(m_FileSize); m_Sectors = m_FileSize / 32768; m_SectorsMap = new byte[(m_Sectors / 8) + 1]; for (long n = m_Sectors + 1; n < m_SectorsMap.Length * 8; n++) m_SectorsMap[n / 8] |= (byte)(1 << ((int)n % 8)); FillWithZeros(); m_HasInformation = true; } } finally { m_Sources.Unlock(); } }