public Analyzer(string controlFlowGrapJson, string dataFlowGraphJson, AnalyzeData analyzeData, string code) { ControlFlowGraph = new Graph(); ControlFlowGraph.FromJson(controlFlowGrapJson); DataFlowGraph = new Graph(); DataFlowGraph.FromJson(dataFlowGraphJson); this.analyzeData = analyzeData; ingressEndStructs = new Dictionary <string, Struct>(); AllStructs = new List <Dictionary <string, Struct> >(); this.code = FileHelper.InputClean(code); }
static void Main(string[] args) { DateTime currentPeriod = DateTime.Now.ToUniversalTime(); int lastDaysNumber = 5; AnalyzeData dataProcessor = new AnalyzeData(lastDaysNumber, currentPeriod); dataProcessor.RunMainProcess(); Console.WriteLine("Press enter to quit."); Console.ReadLine(); }
private static int log(string coin, AnalyzeData analyzeData, string percent) { logger.Error($"---> {coin} {percent}"); foreach (var item in analyzeData.FlexPoint[percent]) { logger.Error($"isHigh:{item.isHigh} open:{item.open} date:{Utils.GetDateById(item.id).ToString("yyyy-MM-dd HH:mm:ss")}"); } //logger.Error($""); //logger.Error($""); //logger.Error($""); return(analyzeData.FlexPoint[percent].Count); }
public void AnalyzeTest() { var content = System.IO.File.ReadAllText(@"..\..\..\..\AngularApp\Files\demo1.txt"); var structs = Analyzer.GetStructs(content); var graph = P4ToGraph.ControlFlowGraph(ref content); var graph1 = P4ToGraph.DataFlowGraph(content, graph); structs.ForEach(_struct => { foreach (var header in _struct.Headers.Values) { header.Valid = true; header.Variables.ForEach(x => x.IsInitialize = true); } }); var analyzeData = new AnalyzeData { EndState = structs, Id = 1, StartState = JsonSerializer.Deserialize <List <Struct> >(JsonSerializer.Serialize(structs)) }; var analyzer = new Analyzer(graph.ToJson(), graph1.ToJson(), analyzeData, content); analyzer.Analyze(); analyzer.FinishOperations(); var controlFlowGraph = analyzer.ControlFlowGraph; Assert.Equal(0, controlFlowGraph[0].Use); Assert.Equal(1, controlFlowGraph[1].Use); Assert.Equal(1, controlFlowGraph[2].Use); Assert.Equal(1, controlFlowGraph[3].Use); Assert.Equal(2, controlFlowGraph[4].Use); Assert.Equal(2, controlFlowGraph[5].Use); Assert.Equal(2, controlFlowGraph[6].Use); Assert.Equal(4, controlFlowGraph[7].Use); }
private void Search(SiteEntity siteEntity, KeywordQuery keywordQuery, List <AnalyzeData> resultDataList, int ProgressPercStart, int ProgressPercEnd) { var firstCrawl = CrawlBusiness.GetTopBySiteID(siteEntity.SiteID, ""); var crawlID = firstCrawl.CrawlID; string lastItemID = null; var keyword = keywordQuery.Keyword; var keywordExclude = ""; var startPage = keywordQuery.StartPage; var endPage = keywordQuery.EndPage; var crawl = CrawlBusiness.GetByCrawlID(crawlID); var site = SiteBusiness.GetBySiteID(crawl.SiteID); ListResponse result = null; for (int currentPage = startPage; currentPage <= endPage; currentPage++) { CrawlRequest request = CrawlRequest.GetQueryUrl(crawlID, keyword, currentPage, keywordExclude, "", ""); crawl.KeywordQuery = keyword; crawl.KeywordAny = ""; crawl.KeywordNot = keywordExclude; crawl.KeywordSite = ""; var response = Core.Crawler.SimpleCrawler.CrawlList_Single(request, crawl, site, true, null); var currentItems = response.ExtractItems(crawl); if (!(currentItems == null || !currentItems.Any() || currentItems.Last().ItemID == lastItemID)) { lastItemID = currentItems.Last().ItemID; //bool stopCrawl; //ExistCheck.ExistCheck_List( // response, null, // (Enums.ExistItemStrategy) crawl.ExistItemStrategy, // (Enums.ContentDetailLevel) site.ContentDetailLevel, crawl.IssueID, // crawl.CrawlID, out stopCrawl); if (result == null) { result = response; } else { result.CombineList(response); } } backgroundWorker1.ReportProgress(ProgressPercStart + (ProgressPercEnd - ProgressPercStart) * (currentPage + 1) / (endPage - startPage + 1)); } //Get Item if (result != null) { if (DetailChk.Checked) { this.Text = Site.Name + " 抓取Items"; Core.Crawler.SimpleCrawler.CrawlItem_Multi( result, crawl, site, msg => { }); this.Text = @"Palas搜索工具"; } var items = result.ExtractItems(crawl, null); AnalyzeData data = new AnalyzeData() { Items = items, CrawlID = crawl.CrawlID }; resultDataList.Add(data); } }
///// <summary> ///// 获取Chunked数据部分的索引 ///// </summary> ///// <param name="bts"></param> ///// <param name="offset"></param> ///// <param name="count"></param> ///// <param name="dataIndex">Chunked数据开始位置索引</param> ///// <param name="datalen">Chunked数据长度</param> ///// <returns>是否找到Chunked封装</returns> //public bool GetChunkedDataIndex(byte[] bts, int offset, int count, out int dataIndex, out int datalen) //{ // //TODO:未处理第二个0D0A 分开发送bug // dataIndex = datalen = -1; // //找两个0D0A之间的数据 // int index1; // if (flag0D)//上一包结尾是0D // { // flag0A = bts[offset] == 0xA; // if (flag0A) // { // offset++; // } // } // if (flag0D && flag0A)//已读第一个0D0A // { // index1 = offset; // } // else // { // index1 = bts.FindNext0D0AIndex(offset, count); // if (index1 < 0)//未找到0D0A // { // flag0D = bts[offset + count] == 0xD; // LastData2.Clear(); // return false; // } // index1 += 2; // count = count - (index1 - offset); // } // var index2 = bts.FindNext0D0AIndex(index1, count); // if (index2 >= 0) // { // var len = index2 - index1; // var hbts = new byte[len]; // Array.Copy(bts, index1, hbts, 0, len); // string hexlen; // if (LastData2.Count == 0) // { // hexlen = ByteHelper.GBKToString(hbts); // } // else // { // LastData2.AddRange(hbts); // hexlen = ByteHelper.GBKToString(LastData2.ToArray()); // LastData2.Clear(); // } // datalen = Convert.ToInt32(hexlen, 16); // dataIndex = index2 + 2; // return true; // } // else // { // flag0D = flag0A = true; // for (int i = index1; i < count; i++) // { // LastData2.Add(bts[i]); // } // } // return false; //} /// <summary> /// /// </summary> /// <param name="bts"></param> /// <param name="offset"></param> /// <param name="count"></param> /// <param name="analyze"></param> public void SplitChunkedData(byte[] bts, int offset, int count, AnalyzeData analyze) { if (NoHttpPackage) { analyze(bts, offset, count); return; } if (MustReadHeader) { //首包数据为特征数据表示无HTTP封装 if (bts.Length > offset + 3 && bts[offset] == 0x30 && bts[offset + 1] == 0x31 && bts[offset + 2] == 0x63 && bts[offset + 3] == 0x64) { NoHttpPackage = true; analyze(bts, offset, count); return; } var hlen = ReadHeader(bts, offset, count); if (hlen < 0) { return; } offset += hlen - 2; count -= hlen - 2; MustReadHeader = false; } while (count > 0 && bts.Length > offset) { if (NeedLen > 0) { if (NeedLen > count) { //数据未收全,缓存数据等待下一包再次触发analyze analyze(bts, offset, count); NeedLen -= count; break; } analyze(bts, offset, NeedLen); offset += NeedLen; count -= NeedLen; NeedLen = 0; } else { if (GetChunkedDataIndex(bts, offset, count, out int dataIndex, out int datalen)) { var blen = count - dataIndex + offset; if (datalen > blen) { //数据未收全,等待下一包再次触发analyze analyze(bts, dataIndex, blen); NeedLen = datalen - blen; break; } else { analyze(bts, dataIndex, datalen); count = blen - datalen; offset = dataIndex + datalen; } }
/// <summary> /// 接收数据 /// </summary> /// <param name="reciveEventArgs"></param> private void ProcessRecive(SocketAsyncEventArgs reciveEventArgs) { try { #region 处理接收到的UserToken //如果连接不正常,或者接收到的数据长度为0 则主动断开连接 if (reciveEventArgs.SocketError != SocketError.Success || reciveEventArgs.BytesTransferred == 0) { closeConnect(reciveEventArgs.AcceptSocket); reciveEventArgs = null; return; } int len = reciveEventArgs.BytesTransferred;//记录接收到的数据包长度 byte[] data_pac = new byte[len]; Array.Copy(reciveEventArgs.Buffer, reciveEventArgs.Offset, data_pac, 0, len); Func <bool, byte[]> appendData = (ok) => { byte[] newData; if (reciveEventArgs.UserToken != null) { byte[] tmp = (byte[])reciveEventArgs.UserToken; newData = new byte[len + tmp.Length]; Array.Copy(tmp, 0, newData, 0, tmp.Length); Array.Copy(data_pac, 0, newData, tmp.Length, len); if (ok) { reciveEventArgs.UserToken = null; } } else { newData = data_pac; } return(newData); }; //匿名方法appendData结束 if (reciveEventArgs.AcceptSocket.Available != 0) { reciveEventArgs.UserToken = appendData(false); } else { data_pac = appendData(true); ClientOP client = new ClientOP();//记录当前连接过来的客户端对象 client.cSocket = reciveEventArgs.AcceptSocket; if (Analyze(data_pac, len, client)) { string msg = client.Pac_Msg; if (!string.IsNullOrEmpty(msg)) { string decodeMsg = ZipHelper.GZipDecompressString(msg);//将接收到的数据进行Gizp解压 AnalyzeData analzeData = new AnalyzeData(); bool isZ; string sendMsg = analzeData.StartAnalyze(decodeMsg, client, out isZ); Send(sendMsg, client.cSocket, isZ); //Send(decodeMsg, client.cSocket); } if (client.Pac_Type == 101) { Send("socket连接成功", client.cSocket, false); } } else//Analyze方法的else { Send("数据解析失败", client.cSocket, false); client = null; closeConnect(reciveEventArgs.AcceptSocket); dic_Clients.TryRemove(client.ConID, out client); reciveEventArgs.AcceptSocket = null; }//Analyze方法的if-else结束 } #endregion } catch (Exception ex) { LogHelper.WriteLog(typeof(WsSocket), ex); } finally { if (reciveEventArgs != null && reciveEventArgs.AcceptSocket != null) { StartRecive(reciveEventArgs);//继续处理接收到的数据 } } }
//если внести graduation_year в Human, то надо убрать передачу аргумента public int CalcPlausibility() { Plausibility = 0; if (universities.Any()) { foreach (University university in universities) { if (university.graduation_year != 0 && AnalyzeData.CheckAge(bdate.Year, university.graduation_year)) { Plausibility += 2; } if (!check_university(university, ref plausibility)) { break; } } } check_social(social.twitter, ref plausibility); check_social(social.skype, ref plausibility); check_social(social.livejournal, ref plausibility); check_social(social.instagram, ref plausibility); check_social(social.facebook, ref plausibility); if (city.city_id == 1463) { Plausibility += 4; } else if (city.city_id == 1) { Plausibility++; } else { Plausibility -= 3; } if (contacts.mobile_phone != null) { Plausibility++; } if (contacts.home_phone != null) { Plausibility++; } if (bdate.Year != DateTime.MinValue.Year) { Plausibility++; } if (Plausibility > 40) { Plausibility = 40; } if (Plausibility < 0) { Plausibility = 0; } return(Plausibility); }