public static void For( int start, int stop, int stepLength, ForLoopBody loopBody, bool close ) { // get instance of parallel computation manager Parallel instance = new Parallel (); instance.Initialize (); instance.ForLoop (start,stop,stepLength,loopBody, close); }
//Construcotr public Simulator(int multiThreads) { //Create Thread to Run Simulation on this._mySimulationThread = new Thread(this._SimulationThreadHandler); //Create SubThreads for Multithreading if two or more if (multiThreads > 1) { //this._isMultiThreaded = true; ; //Create Arrays for threads this._parallel = new Parallel(multiThreads); } }
/// <summary> /// The VMCS scan is based on the LINK pointer, abort code and CR3 register /// We later isolate the EPTP based on constraints for that pointer /// </summary> /// <param name="xoffset"></param> /// <returns>true if the page being scanned is a candidate</returns> public bool VMCS(long xoffset) { var RevID = (REVISION_ID)(block[0] & 0xffffffff); var Acode = (VMCS_ABORT)((block[0] >> 32) & 0x7fffffff); var KnownAbortCode = false; var KnownRevision = false; var Candidate = false; var LinkCount = 0; var Neg1 = -1; if (ScanForVMCSset == null) { throw new NullReferenceException("Entered VMCS callback w/o having found any VMCS, this is a second pass Func"); } // this might be a bit micro-opt-pointless ;) KnownRevision = typeof(REVISION_ID).GetEnumValues().Cast <REVISION_ID>().Any(x => x == RevID); KnownAbortCode = typeof(VMCS_ABORT).GetEnumValues().Cast <VMCS_ABORT>().Any(x => x == Acode); // TODO: Relax link pointer check. Possible when VMCS is shadow, then the link pointer is configured, retest this detection/nesting etc.. // Find a 64bit value for link ptr for (int l = 0; l < block.Length; l++) { if (block[l] == Neg1) { LinkCount++; } // too many if (LinkCount > 32) { return(false); } } // Currently, we expect to have 1 Link pointer at least if (LinkCount == 0 || !KnownAbortCode) { return(false); } // curr width of line to screen Candidate = false; Parallel.For(0, ScanForVMCSset.Length, (v) => { var ScanFor = ScanForVMCSset[v]; for (int check = 1; check < block.Length; check++) { if (block[check] == ScanFor.CR3Value && Candidate == false) { var OutputList = new List <long>(); StringBuilder sb = null, sbRED = null; byte[] shorted = null; var curr_width = 0; if (Vtero.VerboseOutput) { sb = new StringBuilder(); // reverse endianness for easy reading in hex dumps/editors shorted = BitConverter.GetBytes(block[check]); Array.Reverse(shorted, 0, 8); var Converted = BitConverter.ToUInt64(shorted, 0); sbRED = new StringBuilder(); sbRED.Append($"Hypervisor: VMCS revision field: {RevID} [{((uint)RevID):X8}] abort indicator: {Acode} [{((int)Acode):X8}]{Environment.NewLine}"); sbRED.Append($"Hypervisor: {ScanFor.PageTableType} CR3 found [{ScanFor.CR3Value:X16})] byte-swapped: [{Converted:X16}] @ PAGE/File Offset = [{xoffset:X16}]"); } for (int i = 0; i < block.Length; i++) { var value = block[i]; var eptp = new EPTP(value); // any good minimum size? 64kb? if (block[i] > 0 && block[i] < FileSize && eptp.IsFullyValidated() // && EPTP.IsValid(eptp.aEPTP) && EPTP.IsValid2(eptp.aEPTP) && EPTP.IsValidEntry(eptp.aEPTP) && !OutputList.Contains(block[i])) { Candidate = true; OutputList.Add(block[i]); if (Vtero.VerboseOutput) { var linefrag = $"[{i}][{block[i]:X16}] "; if (curr_width + linefrag.Length > MAX_LINE_WIDTH) { sb.Append(Environment.NewLine); curr_width = 0; } sb.Append(linefrag); curr_width += linefrag.Length; } } } if (Candidate && Vtero.VerboseOutput) { WColor(ConsoleColor.Red, ConsoleColor.Black, sbRED.ToString().PadRight(WindowWidth)); WColor(ConsoleColor.DarkGreen, ConsoleColor.Black, sb.ToString().PadRight(WindowWidth)); } // most VMWare I've scanned comes are using this layout // we know VMWare well so ignore any other potential candidates // TODO: Constantly Verify assumption's if (RevID == REVISION_ID.VMWARE_NESTED && OutputList.Contains(block[14])) { var vmcsFound = new VMCS { dp = ScanFor, EPTP = block[14], gCR3 = ScanFor.CR3Value, Offset = xoffset }; HVLayer.Add(vmcsFound); } else { foreach (var entry in OutputList) { HVLayer.Add(new VMCS { dp = ScanFor, EPTP = entry, gCR3 = ScanFor.CR3Value, Offset = xoffset }); } } } } }); return(Candidate); }
public static void AddSqlsugarSetup(this IServiceCollection services) { if (services == null) { throw new ArgumentNullException(nameof(services)); } // 默认添加主数据库连接 MainDb.CurrentDbConnId = Appsettings.app(new string[] { "MainDB" }); // 把多个连接对象注入服务,这里必须采用Scope,因为有事务操作 services.AddScoped <ISqlSugarClient>(o => { // 连接字符串 var listConfig = new List <ConnectionConfig>(); // 从库 var listConfig_Slave = new List <SlaveConnectionConfig>(); BaseDBConfig.MutiConnectionString.Item2.ForEach(s => { listConfig_Slave.Add(new SlaveConnectionConfig() { HitRate = s.HitRate, ConnectionString = s.Conn }); }); BaseDBConfig.MutiConnectionString.Item1.ForEach(m => { listConfig.Add(new ConnectionConfig() { ConfigId = m.ConnId.ObjToString().ToLower(), ConnectionString = m.Conn, DbType = (DbType)m.DbType, IsAutoCloseConnection = true, IsShardSameThread = false, AopEvents = new AopEvents { OnLogExecuting = (sql, p) => { if (Appsettings.app(new string[] { "AppSettings", "SqlAOP", "Enabled" }).ObjToBool()) { Parallel.For(0, 1, e => { MiniProfiler.Current.CustomTiming("SQL:", GetParas(p) + "【SQL语句】:" + sql); LogLock.OutSql2Log("SqlLog", new string[] { GetParas(p), "【SQL语句】:" + sql }); }); } } }, MoreSettings = new ConnMoreSettings() { IsAutoRemoveDataCache = true }, // 从库 SlaveConnectionConfigs = listConfig_Slave, //InitKeyType = InitKeyType.SystemTable } ); }); return(new SqlSugarClient(listConfig)); }); }
public static void Calculate(int IS, int JS, float Cmueh, float VISHMIN, float AREAxy, Single UG, float building_Z0, float relax) { Parallel.For(3, Program.NII, Program.pOptions, i1 => { float DXK = Program.DXK; float DYK = Program.DYK; int KKART_LL, Vert_Index_LL; float AREAxy_L = AREAxy; Single[] PIMU = new Single[Program.KADVMAX + 1]; Single[] QIMU = new Single[Program.KADVMAX + 1]; for (int j1 = 2; j1 <= Program.NJJ - 1; j1++) { int j = j1; if (JS == -1) { j = Program.NJJ - j1 + 1; } int i = i1; if (IS == -1) { i = Program.NII - i1 + 1; } if (Program.ADVDOM[i][j] == 1) { Single[] TURB_L = Program.TURB[i][j]; Single[] UK_L = Program.UK[i][j]; Single[] UKS_L = Program.UKS[i][j]; Single[] VKS_L = Program.VKS[i][j]; Single[] WKS_L = Program.WKS[i][j]; Single[] DPMNEW_L = Program.DPMNEW[i][j]; Single[] UKim_L = Program.UK[i - 1][j]; Single[] UKip_L = Program.UK[i + 1][j]; Single[] UKjm_L = Program.UK[i][j - 1]; Single[] UKjp_L = Program.UK[i][j + 1]; Single[] UKSim_L = Program.UKS[i - 1][j]; Single[] VKSim_L = Program.VKS[i - 1][j]; Single[] VKSimjm_L = Program.VKS[i - 1][j - 1]; Single[] VKSjm_L = Program.VKS[i][j - 1]; Single[] VKSimjp_L = Program.VKS[i - 1][j + 1]; Single[] VKSjp_L = Program.VKS[i][j + 1]; Single[] WKSim_L = Program.WKS[i - 1][j]; Single[] DPMNEWim_L = Program.DPMNEW[i - 1][j]; Single[] VKSipjp_L = Program.VKS[i + 1][j + 1]; Single[] VKSipjm_L = Program.VKS[i + 1][j - 1]; Single[] WKSip_L = Program.WK[i + 1][j]; Single[] VK_L = Program.VK[i][j]; KKART_LL = Program.KKART[i][j]; Vert_Index_LL = Program.VerticalIndex[i][j]; float Ustern_terrain_helpterm = Program.UsternTerrainHelpterm[i][j]; float Ustern_obstacles_helpterm = Program.UsternObstaclesHelpterm[i][j]; float CUTK_L = Program.CUTK[i][j]; int KSTART = 1; if (CUTK_L == 0) { KSTART = KKART_LL + 1; } for (int k = KSTART; k <= Vert_Index_LL; k++) { float DZK_K = Program.DZK[k]; float DXKDZK = DXK * DZK_K; float DYKDZK = DYK * DZK_K; //ADVECTION TERMS float FE = UKS_L[k] * DYKDZK; float FW = UKSim_L[k] * DYKDZK; float FS = 0.25F * (VKSim_L[k] + VKS_L[k] + VKSimjm_L[k] + VKSjm_L[k]) * DXKDZK; float FN = 0.25F * (VKSim_L[k] + VKS_L[k] + VKSimjp_L[k] + VKSjp_L[k]) * DXKDZK; float FT = 0.25F * (WKSim_L[k] + WKS_L[k] + WKSim_L[k + 1] + WKS_L[k + 1]) * AREAxy_L; float FB = 0; if (k > KKART_LL + 1) { FB = 0.25F * (WKSim_L[k] + WKS_L[k] + WKSim_L[k - 1] + WKS_L[k - 1]) * AREAxy_L; } //POWER LAW ADVECTION SCHEME float BIM = Program.FloatMax(-FT, 0F); float CIM = Program.FloatMax(FB, 0F); float AE1 = Program.FloatMax(-FE, 0F); float AW1 = Program.FloatMax(FW, 0F); float AS1 = Program.FloatMax(FS, 0F); float AN1 = Program.FloatMax(-FN, 0F); float AIM = BIM + CIM + AW1 + AS1 + AE1 + AN1 + PrognosticFlowfield.AP0[k]; //SOURCE TERMS float DDPX = DPMNEWim_L[k] - DPMNEW_L[k]; float DIMU = (float)(AW1 * UKim_L[k] + AS1 * UKjm_L[k] + AE1 * UKip_L[k] + AN1 * UKjp_L[k] + PrognosticFlowfield.AP0[k] * 0.5 * (UKSim_L[k] + UKS_L[k]) + DDPX * DYKDZK + Program.CorolisParam * (UG - UK_L[k]) * AREAxy_L * DZK_K); //BOUNDARY CONDITION AT SURFACES (OBSTACLES AND TERRAIN) if ((k == KKART_LL + 1) && (CUTK_L == 0)) { float xhilf = (float)((float)i * (DXK - DXK * 0.5)); float yhilf = (float)((float)j * (DYK - DYK * 0.5)); float windhilf = Program.FloatMax((float)Math.Sqrt(Program.Pow2(0.5 * ((UKSim_L[k]) + UKS_L[k])) + Program.Pow2(0.5 * ((VKSim_L[k]) + VKS_L[k]))), 0.01F); int IUstern = (int)(xhilf / Program.DDX[1]) + 1; int JUstern = (int)(yhilf / Program.DDY[1]) + 1; float Ustern_Buildings = Ustern_terrain_helpterm * windhilf; if (Program.Z0Gramm[IUstern][JUstern] >= DZK_K * 0.1) { Ustern_Buildings = Ustern_terrain_helpterm * (float)Math.Sqrt(Program.Pow2(0.5 * ((UKSim_L[k + 1]) + UKS_L[k + 1])) + Program.Pow2(0.5 * ((VKSim_L[k + 1]) + VKS_L[k + 1]))); } DIMU -= (float)(UK_L[k] / windhilf * Program.Pow2(Ustern_Buildings) * AREAxy_L); } else if ((k == KKART_LL + 1) && (CUTK_L == 1)) { float windhilf = Program.FloatMax((float)Math.Sqrt(Program.Pow2(0.5 * ((UKSim_L[k]) + UKS_L[k])) + Program.Pow2(0.5 * ((VKSim_L[k]) + VKS_L[k]))), 0.01F); float Ustern_Buildings = Ustern_obstacles_helpterm * windhilf; DIMU -= (float)(UK_L[k] / windhilf * Program.Pow2(Ustern_Buildings) * AREAxy_L); } //RECURRENCE FORMULA if (k > KKART_LL + 1) { PIMU[k] = (BIM / (AIM - CIM * PIMU[k - 1])); QIMU[k] = ((DIMU + CIM * QIMU[k - 1]) / (AIM - CIM * PIMU[k - 1])); } else { PIMU[k] = (BIM / AIM); QIMU[k] = (DIMU / AIM); } } //OBTAIN NEW U-COMPONENTS for (int k = Vert_Index_LL; k >= KSTART; k--) { if ((KKART_LL < k) && (Program.KKART[i - 1][j] < k)) { UK_L[k] += (relax * (PIMU[k] * UK_L[k + 1] + QIMU[k] - UK_L[k])); } } } } }); }
private async Task HandleResponseAsync(MessageData <Response[]> message) { if (message?.Data == null || message.Data.Length == 0) { Logger.LogWarning($"{Id} receive empty message"); return; } _lastRequestedTime = DateTimeOffset.Now; Response[] responses = message.Data; try { if (responses.Length == 0) { Logger.LogWarning($"{Id} receive empty message"); return; } _responded.Add(responses.Length); // 只要有回应就从缓存中删除,即便是异常要重新下载会成 EnqueueRequest 中重新加回缓存 // 此处只需要保证: 发 -> 收 可以一对一删除就可以保证检测机制的正确性 foreach (var response in responses) { _enqueuedRequestDict.TryRemove(response.Request.Hash, out _); } var agentId = responses.First().AgentId; var successResponses = responses.Where(x => x.Success).ToList(); // 统计下载成功 if (successResponses.Count > 0) { var elapsedMilliseconds = successResponses.Sum(x => x.ElapsedMilliseconds); await _statisticsService.IncrementDownloadSuccessAsync(agentId, successResponses.Count, elapsedMilliseconds); } // 处理下载成功的请求 Parallel.ForEach(successResponses, async response => { Logger.LogInformation($"{Id} download {response.Request.Url} success"); try { using (var scope = Services.CreateScope()) { var context = new DataFlowContext(response, scope.ServiceProvider); foreach (var dataFlow in _dataFlows) { var dataFlowResult = await dataFlow.HandleAsync(context); var @break = false; switch (dataFlowResult) { case DataFlowResult.Success: { continue; } case DataFlowResult.Failed: { // 如果处理失败,则直接返回 Logger.LogInformation( $"{Id} handle {response.Request.Url} failed: {context.Message}"); await _statisticsService.IncrementFailedAsync(Id); return; } case DataFlowResult.Terminated: { @break = true; break; } } if (@break) { break; } } var resultIsEmpty = !context.HasData && !context.HasParseData; // 如果解析结果为空,重试 if (resultIsEmpty && RetryWhenResultIsEmpty) { if (response.Request.RetriedTimes < response.Request.RetryTimes) { response.Request.RetriedTimes++; await EnqueueRequests(response.Request); // 即然是重试这个请求,则解析必然还会再执行一遍,所以解析到的目标链接、成功状态都应该到最后来处理。 Logger.LogInformation($"{Id} retry {response.Request.Url} because empty result"); return; } } // 解析的目标请求 if (context.ExtraRequests != null && context.ExtraRequests.Count > 0) { var requests = new List <Request>(); foreach (var newRequest in context.ExtraRequests) { newRequest.Depth = response.Request.Depth + 1; if (newRequest.Depth <= Depth) { // 在此强制设制 OwnerId, 防止用户忘记导致出错 if (string.IsNullOrWhiteSpace(newRequest.OwnerId)) { newRequest.OwnerId = context.Response.Request.OwnerId; newRequest.AgentId = context.Response.Request.AgentId; } requests.Add(newRequest); } } var count = _scheduler.Enqueue(requests); if (count > 0) { await _statisticsService.IncrementTotalAsync(Id, count); } } if (!resultIsEmpty) { await _statisticsService.IncrementSuccessAsync(Id); Logger.LogInformation($"{Id} handle {response.Request.Url} success"); } else { if (RetryWhenResultIsEmpty) { await _statisticsService.IncrementFailedAsync(Id); Logger.LogInformation( $"{Id} handle {response.Request.Url} failed,extract result is empty"); } else { await _statisticsService.IncrementSuccessAsync(Id); Logger.LogInformation( $"{Id} handle {response.Request.Url} success,extract result is empty"); } } } } catch (Exception e) { await _statisticsService.IncrementFailedAsync(Id); Logger.LogInformation($"{Id} handle {response.Request.Url} failed: {e}"); } }); // TODO: 此处需要优化 var retryResponses = responses.Where(x => !x.Success && x.Request.RetriedTimes < x.Request.RetryTimes) .ToList(); var downloadFailedResponses = responses.Where(x => !x.Success) .ToList(); var failedResponses = responses.Where(x => !x.Success && x.Request.RetriedTimes >= x.Request.RetryTimes) .ToList(); if (retryResponses.Count > 0) { retryResponses.ForEach(x => { x.Request.RetriedTimes++; Logger.LogInformation($"{Id} download {x.Request.Url} failed: {x.Exception}"); }); await EnqueueRequests(retryResponses.Select(x => x.Request).ToArray()); } // 统计下载失败 if (downloadFailedResponses.Count > 0) { var elapsedMilliseconds = downloadFailedResponses.Sum(x => x.ElapsedMilliseconds); await _statisticsService.IncrementDownloadFailedAsync(agentId, downloadFailedResponses.Count, elapsedMilliseconds); } // 统计失败 if (failedResponses.Count > 0) { await _statisticsService.IncrementFailedAsync(Id, failedResponses.Count); } } catch (Exception ex) { Logger.LogError($"{Id} handle message {message} failed: {ex}"); } }
static long ReadMerchantsDirAndGenerateDirList() { //DirectoryInfo dirInfo = new DirectoryInfo(pathToRootMerchants); //DirectoryInfo[] directories = dirInfo.GetDirectories("*", SearchOption.AllDirectories); string logDirName = string.Empty; ConcurrentBag<string> topDirList = new ConcurrentBag<string>(); try { string[] directories = System.IO.Directory.GetDirectories(pathToRootMerchants);//, "*", SearchOption.AllDirectories); string dirListFile = pathToRootDestination + "\\TopDirList.log"; using (StreamWriter sw = new StreamWriter(dirListFile)) { foreach (string dirName in directories) { logDirName = dirName; if (!dirName.Contains("256XXX")) { topDirList.Add(dirName); } sw.WriteLine(dirName); } } } catch (Exception ex) { Console.WriteLine("exception : " + logDirName + " : " + ex.Message); } string logTopDir = string.Empty; //Parallel.ForEach(topDirList, (topDir) => foreach (string topDir in topDirList) { logTopDir = topDir; string dirName = String.Empty; string secLevelDirListFile = String.Empty; int nameOffset = topDir.LastIndexOf("\\"); if (nameOffset > -1) dirName = topDir.Substring(nameOffset); ConcurrentBag<string> secDirNames = new ConcurrentBag<string>(); try { string[] secondLeveDirs = System.IO.Directory.GetDirectories(topDir); if (dirName.Length > 0) { secLevelDirListFile = pathToRootDestination + dirName + ".txt"; //foreach (string secDir in secondLeveDirs) Parallel.ForEach(secondLeveDirs, (secDir) => { //using (StreamWriter sw = new StreamWriter(secLevelDirListFile)) //{ // sw.WriteLine(secDir); //} secDirNames.Add(secDir); }); } } catch (Exception ex) { Console.WriteLine("Exception parallel: " + logTopDir + " : " + ex.Message); } using (StreamWriter sw = new StreamWriter(secLevelDirListFile)) { foreach (string secDir in secDirNames) { sw.WriteLine(secDir); } } }//); return 0; }
void DrawWall(Vector2[] room) { bool isClose = false; if(room[0] == room[room.Length-1]){ isClose = true; } if(isClose){ List<Vector2> outter; RoomQuad.GetPoint(room, true, out outter); List<Vector2> inner; RoomQuad.GetPoint(room, false, out inner); WallFill wallFill = new WallFill(VectorLine.canvas3D, "wallFill"); wallFill.Add(outter, inner); wallFill.Draw(); if(inner.Count > 0){ VectorLine roomInner = new VectorLine("RoomInner", inner, null, 1.0f, LineType.Continuous, Joins.Weld); roomInner.SetColor(Color.black); roomInner.Draw3D(); } if(outter.Count > 0){ VectorLine roomOutter = new VectorLine("RoomOutter", outter, null, 1.0f, LineType.Continuous, Joins.Weld); roomOutter.SetColor(Color.black); roomOutter.Draw3D(); } } else { Parallel parallel = new Parallel(); List<Vector2> outter = parallel.Execute(room, wallThick, false); List<Vector2> inner = parallel.Execute(room, wallThick, true); WallFill wallFill = new WallFill(VectorLine.canvas3D, "wallFill"); wallFill.Add(outter, inner); wallFill.Draw(); // if(inner.Count > 0){ // VectorLine roomInner = new VectorLine("RoomInner", inner, null, 1.0f, LineType.Continuous, Joins.None); // roomInner.SetColor(Color.black); // roomInner.Draw3D(); // } // // if(outter.Count > 0){ // VectorLine roomOutter = new VectorLine("RoomOutter", outter, null, 1.0f, LineType.Continuous, Joins.None); // roomOutter.SetColor(Color.black); // roomOutter.Draw3D(); // } // List<Vector3> outter; // bool counterClockwise = RoomQuad.GetPoint(room, isClose, true, out outter); // // int length = outter.Count-1; // // int startA = -1; // int startB = -1; // int endC = -1; // int endD = -1; // for(int i=0; i < length; i++){ // Vector2 dist = outter[i] - room[0]; // float wallThick = (float)RoomQuad.WallThick; // if(Mathf.Abs(dist.sqrMagnitude - wallThick/2*wallThick/2) < 0.001f){ // if(startA == -1){ // startA = i; // } // else{ // startB = i; // } // } // // dist = outter[i] - room[room.Length-1]; // if(Mathf.Abs(dist.sqrMagnitude - wallThick/2*wallThick/2) < 0.001f){ // if(endC == -1){ // endC = i; // } // else{ // endD = i; // } // } // } // // Debug.Log(string.Format("start index: ({0}, {1}), ", startA, startB)); // // Debug.Log("orientation: " + counterClockwise); // // List<Vector3> a = new List<Vector3>(); // List<Vector3> b = new List<Vector3>(); // // if(counterClockwise){ // // Vector3[] tmp = new Vector3[length+1]; // outter.CopyTo(0, tmp, 0, length); // //// if(length%2 == 1){ //// tmp[length] = tmp[length-1]; //// length++; //// } // // for(int i = startA; i > startA - length/2; i--){ // b.Add(tmp[(i+length)%length]); // } // // for(int i = startB; i < startB + length/2; i++){ // a.Add(tmp[i%length]); // } // // WallFill wallFill = new WallFill(VectorLine.canvas3D, "wallFill"); // wallFill.Draw(a, b); // } // if(outter.Count > 0){ // VectorLine roomInner = new VectorLine("RoomOutter", outter, null, 1.0f, LineType.Continuous, Joins.Weld); // roomInner.SetColor(Color.blue); // roomInner.Draw3D(); // } } }
protected override ProblemOutput Solve(ProblemInput input) { var allPhotos = input.Photos.ToArray(); var totalTags = input.TagCount; var usedInASlideC = new ConcurrentDictionary <long, byte>(); var usedInASlide = new HashSet <long>(); Slide lastSlide; ////for (int i = 0; i < allPhotos.Length; i++) ////{ //// var bestScore = -1; //// for (int j = 0; j < allPhotos.Length; j++) //// { //// int currScore = -1; //// if (visited.Contains(j)) //// { //// continue; //// } //// currScore = CalclateScoreForCurrentSlide(lastSlide, allPhotos[i], allPhotos[j]); //// if (bestScore < currScore) //// { //// } //// } ////} // Build First Slide List <Slide> slides = new List <Slide>(); if (!allPhotos.First().IsVertical) { lastSlide = new Slide(new List <Photo>() { allPhotos.First() }); } else { lastSlide = new Slide(new List <Photo>() { allPhotos.First(), allPhotos.Skip(1).First(_ => _.IsVertical) }); } slides.Add(lastSlide); foreach (var photo in lastSlide.Images) { usedInASlide.Add(photo.Index); usedInASlideC.TryAdd(photo.Index, 0); } var lastSlideTagsBool = new bool[input.TagCount]; var pairTagsBool = new bool[input.TagCount]; while (usedInASlide.Count != allPhotos.Length) { Array.Clear(lastSlideTagsBool, 0, lastSlideTagsBool.Length); int lastSlideTagCount = 0; foreach (var tagIndex in lastSlide.TagsIndexes) { lastSlideTagsBool[tagIndex] = true; lastSlideTagCount++; } // build a slide long bestScore = -1; Slide slideToUse = null; Parallel.ForEach(Partitioner.Create(1, allPhotos.Length), range => { for (int i = range.Item1; i < range.Item2; i++) { if (usedInASlideC.ContainsKey(i)) { continue; } // Build a slide with this photo var potentialSlide = new Slide(new List <Photo>() { allPhotos[i] }); long ScoreIfUsed = CalculateScoreForNewSlide(allPhotos[i], lastSlideTagsBool, lastSlideTagCount); if (ParallelHelper.InterlockedExchangeIfGreaterThan(ref bestScore, ScoreIfUsed)) { // use this //bestScore = ScoreIfUsed; lock (slides) { slideToUse = potentialSlide; } } } }); //for (int i = 1; i < allPhotos.Length; i++) //{ // // Skip if already used // if (usedInASlide.Contains(i)) // { // continue; // } // // Build a slide with this photo // var potentialSlide = new Slide(new List<Photo>() { allPhotos[i] }); // long ScoreIfUsed = CalculateScoreForNewSlide(allPhotos[i], lastSlideTagsBool, lastSlideTagCount); // if (bestScore < ScoreIfUsed) // { // // use this // slideToUse = potentialSlide; // bestScore = ScoreIfUsed; // } //} // if slideToUse is lonely V, find it a pair: if (slideToUse.Images.First().IsVertical) { // Build Pair Bool Array.Clear(pairTagsBool, 0, pairTagsBool.Length); foreach (var tagindex in slideToUse.Images.First().TagIndexes) { pairTagsBool[tagindex] = true; } long bestScoreForV = -1; Photo vPair = null; object lockpair = new object(); Parallel.ForEach(Partitioner.Create(1, allPhotos.Length), range => { for (int i = range.Item1; i < range.Item2; i++) { if (usedInASlideC.ContainsKey(i) || !allPhotos[i].IsVertical || i == slideToUse.Images.First().Index) { continue; } long scoreIfUsed = CalculateScoreForVPair(lastSlideTagsBool, lastSlideTagCount, slideToUse.Images.First(), allPhotos[i], pairTagsBool); if (ParallelHelper.InterlockedExchangeIfGreaterThan(ref bestScoreForV, scoreIfUsed)) { lock (lockpair) { vPair = allPhotos[i]; } } } }); //for (int i = 0; i < allPhotos.Length; i++) //{ // if (usedInASlide.Contains(i) || !allPhotos[i].IsVertical || i == slideToUse.Images.First().Index) // { // continue; // } // ////foreach (var tagIndex in allPhotos[i].TagIndexes) // ////{ // //// pairTagsBool[tagIndex] = true; // ////} // long scoreIfUsed = CalculateScoreForVPair(lastSlideTagsBool, lastSlideTagCount, slideToUse.Images.First(), allPhotos[i]); // if (bestScoreForV < scoreIfUsed) // { // vPair = allPhotos[i]; // bestScoreForV = scoreIfUsed; // } //} if (vPair == null) { var vertsNotUsed = allPhotos.Where(p => p.IsVertical && !usedInASlide.Contains(p.Index)).ToArray(); } slideToUse.Images.Add(vPair); } slides.Add(slideToUse); foreach (var photo in slideToUse.Images) { usedInASlide.Add(photo.Index); usedInASlideC.TryAdd(photo.Index, 0); } lastSlide = slideToUse; } return(new ProblemOutput() { Slides = slides }); }
public void Consistent_reads_and_transactions_run_in_parallel() { using (var connector = new Connector(_clientConfig)) { connector.DeclareCollection <Account>(); connector.DeclareCollection <MoneyTransfer>(); var accounts = connector.DataSource <Account>(); const int iterations = 101; var accountIds = connector.GenerateUniqueIds("account_id", 2); var transferIds = connector.GenerateUniqueIds("transfer_id", iterations); accounts.Put(new Account { Id = accountIds[0], Balance = 1000 }); accounts.Put(new Account { Id = accountIds[1], Balance = 0 }); var watch = new Stopwatch(); watch.Start(); // run in parallel a sequence of transactions and consistent read-only operation List <Account> all = accounts.ToList(); try { Parallel.Invoke( () => { Parallel.For(0, iterations, i => { // ReSharper disable once AccessToDisposedClosure connector.ConsistentRead(ctx => { var myAccounts = ctx.Collection <Account>().ToList(); Assert.AreEqual(2, myAccounts.Count); // with consistent reed we do not see the updates during a transaction. The sum of the accounts balance should always be 1000 Assert.AreEqual(1000, myAccounts.Sum(acc => acc.Balance)); var transfers = ctx.Collection <MoneyTransfer>(); var tr = transfers.Where(t => t.SourceAccount == myAccounts[0].Id).ToList(); var trAll = transfers.ToList(); //check consistency between transfer and balance var sumTransferred = tr.Sum(tr => tr.Amount); Assert.AreEqual(sumTransferred, myAccounts[1].Balance); }, nameof(MoneyTransfer), nameof(Account)); }); }, () => { for (var i = 0; i < iterations; i++) { var transfer = new MoneyTransfer { Id = transferIds[i], Amount = 10, Date = DateTime.Today, SourceAccount = all[0].Id, DestinationAccount = all[1].Id }; all[0].Balance -= 10; all[1].Balance += 10; var transaction = connector.BeginTransaction(); transaction.Put(all[0]); transaction.Put(all[1]); transaction.Put(transfer); transaction.Commit(); } }); } catch (Exception e) { Console.WriteLine(e.Message); Assert.Fail(e.Message); } watch.Stop(); Console.WriteLine($"{iterations} iterations took {watch.ElapsedMilliseconds} ms"); } // check that the data is persistent (force the external server to reload data) StopServers(); StartServers(); using (var connector = new Connector(_clientConfig)) { connector.DeclareCollection <Account>(); connector.DeclareCollection <MoneyTransfer>(); var accounts = connector.DataSource <Account>(); var myAccounts = accounts.ToList(); Assert.AreEqual(2, myAccounts.Count); var sum = myAccounts.Sum(acc => acc.Balance); Assert.AreEqual(1000, sum); Assert.IsTrue(myAccounts.All(acc => acc.Balance != 1000), "The balance is unchanged when reloading data"); Console.WriteLine($"balance1={myAccounts[0].Balance} balance2={myAccounts[1].Balance}"); } }
public void No_deadlock_if_transactions_and_non_transactional_queries_are_run_in_parallel() { using (var connector = new Connector(_clientConfig)) { connector.DeclareCollection <Account>(); connector.DeclareCollection <MoneyTransfer>(); var accounts = connector.DataSource <Account>(); var accountIds = connector.GenerateUniqueIds("account_id", 2); accounts.Put(new Account { Id = accountIds[0], Balance = 1000 }); accounts.Put(new Account { Id = accountIds[1], Balance = 0 }); // run in parallel a sequence of transactions and non transactional read requests try { Parallel.Invoke( () => { Parallel.For(0, 200, i => { // this is a non transactional request var myAccounts = accounts.ToList(); Assert.AreEqual(2, myAccounts.Count); var transfers = connector.DataSource <MoneyTransfer>(); // this is also a non transactional request var unused = transfers.Where(t => t.SourceAccount == myAccounts[0].Id).ToList(); }); }, () => { List <Account> myAccounts = accounts.ToList(); for (var i = 0; i < 200; i++) { var transfer = new MoneyTransfer { Amount = 10, Date = DateTime.Today, SourceAccount = myAccounts[0].Id, DestinationAccount = myAccounts[1].Id }; myAccounts[0].Balance -= 10; myAccounts[1].Balance += 10; var transaction = connector.BeginTransaction(); transaction.Put(myAccounts[0]); transaction.Put(myAccounts[1]); transaction.Put(transfer); transaction.Commit(); } }); } catch (Exception e) { Console.WriteLine(e.Message); Assert.Fail(e.Message); } } // check that the data is persistent (force the external server to reload data) StopServers(); StartServers(); using (var connector = new Connector(_clientConfig)) { connector.DeclareCollection <Account>(); connector.DeclareCollection <MoneyTransfer>(); var accounts = connector.DataSource <Account>(); var myAccounts = accounts.ToList(); Assert.AreEqual(2, myAccounts.Count); var sum = myAccounts.Sum(acc => acc.Balance); Assert.AreEqual(1000, sum); Assert.IsTrue(myAccounts.All(acc => acc.Balance != 1000), "The balance is unchanged when reloading data"); Console.WriteLine($"balance1={myAccounts[0].Balance} balance2={myAccounts[1].Balance}"); } }
public void Delete_many_in_transaction() { using var connector = new Connector(_clientConfig); connector.DeclareCollection <Account>("delete_test"); const int count = 2000; const int classes = 10; var accountIds = connector.GenerateUniqueIds("account_id", count); var accounts = connector.DataSource <Account>("delete_test"); var all = new List <Account>(count); for (int i = 0; i < count; i++) { var acc = new Account { Id = accountIds[i], Balance = i % classes }; all.Add(acc); } accounts.PutMany(all); Assert.AreEqual(count, accounts.Count()); Assert.AreEqual(count / classes, accounts.Count(acc => acc.Balance == 3)); var random = new Random(Environment.TickCount); Parallel.Invoke( () => { for (int i = 0; i < classes; i++) { var transaction = connector.BeginTransaction(); transaction.DeleteMany <Account>(acc => acc.Balance == i, "delete_test"); transaction.Commit(); Thread.Sleep(random.Next(200)); } }, () => { var notDeleted = count; while (notDeleted > 0) { connector.ConsistentRead(ctx => { notDeleted = accounts.Count(); var chunkSize = count / classes; Assert.AreEqual(0, notDeleted % chunkSize, "only complete chunks are deleted"); Console.WriteLine($"found {notDeleted} items"); }, "delete_test"); Thread.Sleep(random.Next(200)); } }); }
static long ReadFilesParallelRecursiveFullDepth(DirectoryInfo dirInfo) { FileInfo[] fileInfoArray = dirInfo.GetFiles("*", SearchOption.AllDirectories); ConcurrentBag<string> fileFoundData = new ConcurrentBag<string>(); string folderPath = dirInfo.FullName; string outputFileName = folderPath.Substring(folderPath.LastIndexOf('\\')); outputFileName = pathToRootDestination + outputFileName + ".csv"; //int lineNumber = 0; StreamWriter outputWriter = new StreamWriter(outputFileName); try { Parallel.ForEach(fileInfoArray, (fileInfo) => //foreach (FileInfo fileInfo in fileInfoArray) { string extn = ""; string newOut = ""; string outputLine = ""; string delimitedString = string.Empty; List<string> strList = new List<string>(); try { outputLine = fileInfo.FullName; newOut = outputLine.TrimStart(pathToRootSource.ToCharArray()); string[] words = newOut.Split('\\'); strList.Add(words[2]); strList.Add(newOut); int extnPos = newOut.LastIndexOf('.'); if (extnPos != -1) { extn = newOut.Substring(extnPos); } strList.Add(extn); strList.Add(fileInfo.LastWriteTime.ToString()); strList.Add(fileInfo.Length.ToString()); delimitedString = string.Join(exportFileDelimiter, strList.ToArray()); fileFoundData.Add(delimitedString); } catch (Exception ex) { Console.WriteLine("Wait here " + ex.Message); } }); foreach (var fileData in fileFoundData) { //lineNumber++; outputWriter.WriteLine(fileData); } } catch (Exception ex) { Console.WriteLine("outputFileName: " + outputFileName + " : Exception : " + ex.Message); } finally { outputWriter.Close(); } return fileInfoArray.Length; }
static long GetDirSizeInclSubFolders(string folderPath) { //string[] fileNames = System.IO.Directory.GetFiles(folderPath, "*.pdf", System.IO.SearchOption.AllDirectories); long totalSize = 0; long lineNumber = 0; long flushCounter = 0; //string inputFilePath = @"C:\Projects\DirListingSize\InputFile.txt"; //string outputFilepath = @"C:\Projects\DirListingSize\OutputFile.txt"; string inputFilePath = @"InputFileList.txt"; string outputFilepath = @"OutputFileList.txt"; StreamWriter outputFile = new StreamWriter(outputFilepath); //StreamWriter notFoundFiles = new StreamWriter(@"C:\Projects\DirListingSize\NotFoundFiles.txt"); //StreamWriter notEnumeratedFiles = new StreamWriter(@"C:\Projects\DirListingSize\FailedEnumerationFiles.txt"); StreamWriter notFoundFiles = new StreamWriter(@"NotFoundFiles.txt"); StreamWriter notEnumeratedFiles = new StreamWriter(@"FailedEnumerationFiles.txt"); ConcurrentDictionary<string, long> fileNameAndSizeMap = new ConcurrentDictionary<string, long>(); ConcurrentBag<string> notFoundFileList = new ConcurrentBag<string>(); ConcurrentBag<string> notEnumeratedFileList = new ConcurrentBag<string>(); try { string[] fileList = File.ReadAllLines(inputFilePath); Parallel.ForEach(fileList, (currentFile) => { System.IO.FileInfo fileInfo = new System.IO.FileInfo(currentFile); if (fileInfo.Exists) { if (!fileNameAndSizeMap.TryAdd(fileInfo.FullName, fileInfo.Length)) { notEnumeratedFileList.Add(currentFile); } } else { notFoundFileList.Add(currentFile); } } ); foreach (var item in fileNameAndSizeMap) { lineNumber++; flushCounter++; outputFile.WriteLine(item.Key + "\t" + item.Value + " bytes"); totalSize += item.Value; if (flushCounter > 10000) { outputFile.Flush(); flushCounter = 0; } } outputFile.WriteLine("Total size of all " + lineNumber + " files in bytes: " + totalSize); outputFile.Close(); foreach (var item in notFoundFileList) { notFoundFiles.WriteLine(item); } notFoundFiles.Close(); foreach (var item in notEnumeratedFileList) { notEnumeratedFiles.WriteLine(item); } notEnumeratedFiles.Close(); } catch (Exception ex) { Console.WriteLine("CurrentLineNumber: ", lineNumber); Console.WriteLine(ex.Message, ex.StackTrace); } return totalSize; }
static long ReadCSVAndGetFileSize(string fileName) { long totalSize = 0; long lineNumber = 0; long flushCounter = 0; string inputFilePath = importFileName; string outputFilepath = exportFileName; StreamWriter outputFile = new StreamWriter(outputFilepath); StreamWriter notFoundFiles = new StreamWriter(notFoundFileName); StreamWriter programOutputFile = new StreamWriter(bcpOutputFileName); StreamWriter notEnumeratedFiles = new StreamWriter(foundButFailedFileName); ConcurrentDictionary<string, long> fileNameAndSizeMap = new ConcurrentDictionary<string, long>(); ConcurrentBag<string> notFoundFileList = new ConcurrentBag<string>(); ConcurrentBag<string> notEnumeratedFileList = new ConcurrentBag<string>(); try { Parallel.ForEach(File.ReadLines(inputFilePath), (currentLine, _, lineNum) => { string currentFile = pathToRootSource + currentLine.Split(importFileDelimiter[0])[2]; System.IO.FileInfo fileInfo = new System.IO.FileInfo(currentFile); if (fileInfo.Exists) { if (!fileNameAndSizeMap.TryAdd(currentLine, fileInfo.Length)) { notEnumeratedFileList.Add(currentLine + exportFileDelimiter + "-999"); } } else { notFoundFileList.Add(currentLine + exportFileDelimiter + "-999"); } } ); foreach (var item in fileNameAndSizeMap) { lineNumber++; flushCounter++; outputFile.WriteLine(item.Key + exportFileDelimiter + item.Value); totalSize += item.Value; if (flushCounter > 10000) { outputFile.Flush(); flushCounter = 0; } } outputFile.Flush(); programOutputFile.WriteLine("Files found: " + lineNumber + "; size of all files in bytes: " + totalSize); programOutputFile.Flush(); foreach (var item in notFoundFileList) { notFoundFiles.WriteLine(item); } notFoundFiles.Flush(); foreach (var item in notEnumeratedFileList) { notEnumeratedFiles.WriteLine(item); } notEnumeratedFiles.Flush(); } catch (Exception ex) { Console.WriteLine("CurrentLineNumber: ", lineNumber); Console.WriteLine(ex.Message, ex.StackTrace); } finally { outputFile.Close(); notFoundFiles.Close(); programOutputFile.Close(); notEnumeratedFiles.Close(); } return totalSize; }
internal static HandleRef getCPtr(Parallel obj) { return (obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr; }
public static void set_global_parallel(Parallel parallel) { modshogunPINVOKE.set_global_parallel(Parallel.getCPtr(parallel)); if (modshogunPINVOKE.SWIGPendingException.Pending) throw modshogunPINVOKE.SWIGPendingException.Retrieve(); }
public override int Read(float[] data, int offset, int count) { if (!initialized) { Initialize(); } int samplesWritten = ReadBody(data, offset, count); while (samplesWritten < count) { //Slide over noise samples Array.Copy( sourceArray: noiseBuffer, sourceIndex: stepSize, destinationArray: noiseBuffer, destinationIndex: 0, length: overlapSize); int read = stream.Read(inputBuffer, overlapSize, stepSize); if (read <= 0 && samplesHandled <= 0) { //Done, No samples left to work with break; } else if (read <= 0) { //We are in buffer-dumping window //Set rest of inputBuffer to zero Array.Clear(inputBuffer, overlapSize, stepSize); } else if (read < stepSize) { //Near or at the end //Set rest of inputBuffer to zero Array.Clear(inputBuffer, overlapSize + read, inputBuffer.Length - overlapSize - read); } //Generate new noise for (int i = 0; i < stepSize; i++) { noiseBuffer[overlapSize + i] = noiseScalarA - noiseScalarB * randomizer.NextDouble(); } //Copy in the input data for (int i = 0; i < fftSize; i++) { signalFFTBuffer[i] = inputBuffer[i] * window[i]; noiseFFTBuffer[i] = noiseBuffer[i]; } //FFT Task.WaitAll( Task.Run(() => Fourier.Forward(signalFFTBuffer)), Task.Run(() => Fourier.Forward(noiseFFTBuffer))); //For each band... Parallel.For( fromInclusive: 0, toExclusive: bandFrequencies.Length - 1, body: (int band) => { int lowerBound = FrequencyDomain.GetComplexFrequencyBin(fftSize, bandFrequencies[band], SamplingRate); int upperBound = FrequencyDomain.GetComplexFrequencyBin(fftSize, bandFrequencies[band + 1], SamplingRate); Complex64[] amplitudeBuffer = amplitudeBuffers[band]; Complex64[] noiseBandBuffer = noiseBandBuffers[band]; //Copy over band just the relevant frequency band for (int i = lowerBound; i < upperBound; i++) { amplitudeBuffer[i] = 2.0 * signalFFTBuffer[i]; noiseBandBuffer[i] = 2.0 * noiseFFTBuffer[i]; } Complex64 zero = Complex64.Zero; //Clear rest of buffers for (int i = 0; i < lowerBound; i++) { amplitudeBuffer[i] = zero; noiseBandBuffer[i] = zero; } for (int i = upperBound; i < amplitudeBuffer.Length; i++) { amplitudeBuffer[i] = zero; noiseBandBuffer[i] = zero; } //IFFT Task.WaitAll( Task.Run(() => Fourier.Inverse(amplitudeBuffer)), Task.Run(() => Fourier.Inverse(noiseBandBuffer))); for (int i = 0; i < amplitudeBuffer.Length; i++) { outputAccumulation[i] += outputFactor * window[i] * noiseBandBuffer[i].Real * amplitudeBuffer[i].Magnitude; } }); samplesHandled += read; if (--frameLag <= 0) { bufferIndex = 0; bufferCount = Math.Min(stepSize, samplesHandled); samplesHandled -= bufferCount; //Copy output samples to output buffer for (int sample = 0; sample < bufferCount; sample++) { cachedSampleBuffer[sample] = (float)outputAccumulation[sample]; } } //Slide over input samples Array.Copy( sourceArray: inputBuffer, sourceIndex: stepSize, destinationArray: inputBuffer, destinationIndex: 0, length: overlapSize); //Slide output samples Array.Copy( sourceArray: outputAccumulation, sourceIndex: stepSize, destinationArray: outputAccumulation, destinationIndex: 0, length: overlapSize); //Clear empty output accumulation region Array.Clear(outputAccumulation, overlapSize, stepSize); samplesWritten += ReadBody(data, offset + samplesWritten, count - samplesWritten); } return(samplesWritten); }
public void DrawInspector(Parallel node) { EditorGUILayout.LabelField(new GUIContent("Parallel"), TitleStyle); EditorGUILayout.Space (); node.strategy = (Parallel.ResolutionStrategy) EditorGUILayout.EnumPopup("Return Strategy", node.strategy); string message = "The parallel node ticks all children sequentially from left to right, regardless of their return states. It returns SUCCESS if the number of succeeding children is larger than a local constant S, FAILURE if the number of failing children is larger than a local constant F or RUNNING otherwise."; EditorGUILayout.HelpBox(message, MessageType.Info); EditorGUILayout.HelpBox("Not yet implemented!", MessageType.Error); }
/// <summary> /// </summary> public static IEnumerable <Chunk> ImmutableUnmixOutOfCore(this IEnumerable <Chunk> chunks, string tmpdir, int binsExponent, ParseConfig config) { var binsExponentFactor = 1.0 / Math.Pow(2.0, binsExponent); try { Report.BeginTimed("ImmutableUnmixOutOfCore"); tmpdir = Path.Combine(tmpdir, Guid.NewGuid().ToString()); Directory.CreateDirectory(tmpdir); var root = default(Cell?); var hasNormals = false; var hasColors = false; var hasIntensities = false; var countChunks = 0L; var countOriginal = 0L; Report.BeginTimed("processing chunks"); var lockedFilenames = new HashSet <string>(); Parallel.ForEach(chunks, chunk => { countChunks++; countOriginal += chunk.Count; hasNormals = chunk.HasNormals; hasColors = chunk.HasColors; hasIntensities = chunk.HasIntensities; var _ps = chunk.Positions; var _ns = chunk.Normals; var _js = chunk.Intensities; var _cs = chunk.Colors; // binning var map = new Dictionary <V3l, List <int> >(); for (var i = 0; i < chunk.Count; i++) { var p = _ps[i]; var key = binsExponent == 0 ? new V3l(p) : new V3l(p * binsExponentFactor); if (!map.TryGetValue(key, out var value)) { map[key] = value = new List <int>(); } value.Add(i); } // store cells foreach (var kv in map) { var cell = new Cell(kv.Key.X, kv.Key.Y, kv.Key.Z, binsExponent); root = root.HasValue ? new Cell(new Box3d(root.Value.BoundingBox, cell.BoundingBox)) : cell; var filename = Path.Combine(tmpdir, $"{kv.Key.X}_{kv.Key.Y}_{kv.Key.Z}"); while (true) { lock (lockedFilenames) { if (lockedFilenames.Add(filename)) { break; } } Task.Delay(100); } using (var f = File.Open(filename, FileMode.Append, FileAccess.Write, FileShare.None)) using (var bw = new BinaryWriter(f)) { var ia = kv.Value; foreach (var i in ia) { var p = _ps[i]; bw.Write(p.X); bw.Write(p.Y); bw.Write(p.Z); if (hasNormals) { var n = _ns[i]; bw.Write(n.X); bw.Write(n.Y); bw.Write(n.Z); } if (hasIntensities) { var j = _js[i]; bw.Write(j); } if (hasColors) { var c = _cs[i]; var x = c.R + c.G << 8 + c.B << 16; bw.Write(x); } } } lock (lockedFilenames) { lockedFilenames.Remove(filename); } } }); Report.EndTimed(); Report.Line($"[ImmutableUnmixOutOfCore] chunk count = {countChunks:N0}"); Report.Line($"[ImmutableUnmixOutOfCore] root cell = {root:N0}"); Report.Line($"[ImmutableUnmixOutOfCore] point count = {countOriginal:N0}"); // construct hierarchy Report.BeginTimed("constructing hierarchy"); foreach (var path in Directory.EnumerateFiles(tmpdir)) { var filename = Path.GetFileName(path); var ts = filename.Split('_'); var cell = new Cell(long.Parse(ts[0]), long.Parse(ts[1]), long.Parse(ts[2]), binsExponent); var stack = new Stack <string>(); while (cell.Exponent < root.Value.Exponent) { cell = cell.Parent; stack.Push($"{cell.X}_{cell.Y}_{cell.Z}_{cell.Exponent}"); } var dir = tmpdir; while (stack.Count > 0) { dir = Path.Combine(dir, stack.Pop()); } try { Directory.CreateDirectory(dir); File.Move(path, Path.Combine(dir, filename)); } catch (Exception e) { Report.Error(e.ToString()); Report.Error($"[dir ] {dir}"); Report.Error($"[move] {path} -> {Path.Combine(dir, filename)}"); } } Report.EndTimed(); // filter min distance Report.BeginTimed("filtering min distance"); var countFiltered = 0L; Parallel.ForEach(Directory.EnumerateFiles(tmpdir, "*", SearchOption.AllDirectories), path => { var filename = Path.GetFileName(path); var ts = filename.Split('_'); var cell = new Cell(long.Parse(ts[0]), long.Parse(ts[1]), long.Parse(ts[2]), binsExponent); var _ps = new List <V3d>(); var _ns = hasNormals ? new List <V3f>() : null; var _js = hasIntensities ? new List <int>() : null; var _cs = hasColors ? new List <C4b>() : null; using (var f = File.Open(path, FileMode.Open, FileAccess.Read)) using (var br = new BinaryReader(f)) { try { while (br.BaseStream.Position < br.BaseStream.Length) { _ps.Add(new V3d(br.ReadDouble(), br.ReadDouble(), br.ReadDouble())); if (hasNormals) { _ns.Add(new V3f(br.ReadSingle(), br.ReadSingle(), br.ReadSingle())); } if (hasIntensities) { _js.Add(br.ReadInt32()); } if (hasColors) { var x = br.ReadInt32(); _cs.Add(new C4b(x & 0xff, (x >> 8) & 0xff, (x >> 16) & 0xff)); } } } catch (Exception e) { Report.Error(e.ToString()); return; } } var chunk = new Chunk(_ps, _cs, _ns, _js); var chunkFiltered = chunk.ImmutableFilterMinDistByCell(cell, config); countFiltered += chunkFiltered.Count; //Report.Line($"[{cell}] {countFiltered:N0}/{countOriginal:N0} ({countOriginal- countFiltered:N0})"); using (var f = File.Open(path, FileMode.Open, FileAccess.Write, FileShare.None)) using (var bw = new BinaryWriter(f)) { for (var i = 0; i < chunkFiltered.Count; i++) { var p = chunkFiltered.Positions[i]; bw.Write(p.X); bw.Write(p.Y); bw.Write(p.Z); if (hasNormals) { var n = chunkFiltered.Normals[i]; bw.Write(n.X); bw.Write(n.Y); bw.Write(n.Z); } if (hasIntensities) { var j = chunkFiltered.Intensities[i]; bw.Write(j); } if (hasColors) { var c = chunkFiltered.Colors[i]; var x = c.R + c.G << 8 + c.B << 16; bw.Write(x); } } } }); Report.Line($"{countFiltered:N0}/{countOriginal:N0} (removed {countOriginal - countFiltered:N0} points)"); Report.EndTimed(); // return final chunks var ps = new List <V3d>(); var ns = hasNormals ? new List <V3f>() : null; var js = hasIntensities ? new List <int>() : null; var cs = hasColors ? new List <C4b>() : null; foreach (var path in Directory.EnumerateFiles(tmpdir, "*", SearchOption.AllDirectories)) { using (var f = File.Open(path, FileMode.Open, FileAccess.Read)) using (var br = new BinaryReader(f)) { try { while (br.BaseStream.Position < br.BaseStream.Length) { ps.Add(new V3d(br.ReadDouble(), br.ReadDouble(), br.ReadDouble())); if (hasNormals) { ns.Add(new V3f(br.ReadSingle(), br.ReadSingle(), br.ReadSingle())); } if (hasIntensities) { js.Add(br.ReadInt32()); } if (hasColors) { var x = br.ReadInt32(); cs.Add(new C4b(x & 0xff, (x >> 8) & 0xff, (x >> 16) & 0xff)); } } } catch (Exception e) { Report.Error(e.ToString()); ps = new List <V3d>(); ns = hasNormals ? new List <V3f>() : null; js = hasIntensities ? new List <int>() : null; cs = hasColors ? new List <C4b>() : null; continue; } } File.Delete(path); if (ps.Count >= config.MaxChunkPointCount) { yield return(new Chunk(ps, cs, ns, js)); ps = new List <V3d>(); ns = hasNormals ? new List <V3f>() : null; js = hasIntensities ? new List <int>() : null; cs = hasColors ? new List <C4b>() : null; } } // rest? if (ps.Count >= 0) { yield return(new Chunk(ps, cs, ns, js)); } } finally { try { Report.BeginTimed("deleting temporary data"); Directory.Delete(tmpdir, true); Report.EndTimed(); } catch (Exception e) { Report.Warn(e.ToString()); } Report.EndTimed(); } }
// Main function assigned to Convert action private void ToBlur() { // Parallels options contain amount of threads chosen by user var options = new ParallelOptions() { MaxDegreeOfParallelism = threadsAmount }; Parallel.ForEach(outputArray, options, (row, state, index) => { // Depending on radius passed by the user height of a local // array is being calculated int localArrayHeight = 1 + 2 * radius; byte[] surroundingArea; surroundingArea = new byte[localArrayHeight * arrayWidth]; if (index >= 0 && index < radius) { for (int j = 0; j < localArrayHeight; ++j) { int temporaryRowAccess = 0; int offset = Convert.ToInt32(index) - radius + j; if (offset < 0) { temporaryRowAccess = arrayHeight - 1 + offset; } for (int i = 0; i < arrayWidth; i += 3) { if (temporaryRowAccess > 0) { surroundingArea[(j * (arrayWidth)) + i] = inputArray[temporaryRowAccess][i]; surroundingArea[(j * (arrayWidth)) + i + 1] = inputArray[temporaryRowAccess][i + 1]; surroundingArea[(j * (arrayWidth)) + i + 2] = inputArray[temporaryRowAccess][i + 2]; } else { surroundingArea[(j * (arrayWidth)) + i] = inputArray[index - radius + j][i]; surroundingArea[(j * (arrayWidth)) + i + 1] = inputArray[index - radius + j][i + 1]; surroundingArea[(j * (arrayWidth)) + i + 2] = inputArray[index - radius + j][i + 2]; } } } } else if (index >= radius && index < (arrayHeight - radius)) { for (int j = 0; j < localArrayHeight; ++j) { for (int i = 0; i < arrayWidth; i += 3) { surroundingArea[(j * (arrayWidth)) + i] = inputArray[index - radius + j][i]; surroundingArea[(j * (arrayWidth)) + i + 1] = inputArray[index - radius + j][i + 1]; surroundingArea[(j * (arrayWidth)) + i + 2] = inputArray[index - radius + j][i + 2]; } } } else if (index >= (arrayHeight - radius) && index < (arrayHeight - 1)) { for (int j = 0; j < localArrayHeight; ++j) { int temporaryRowAccess = (arrayHeight - 1); int offset = Convert.ToInt32(index) - radius + j; if (offset > (arrayHeight - 1)) { temporaryRowAccess = offset - (arrayHeight); } for (int i = 0; i < arrayWidth; i += 3) { if (temporaryRowAccess < (arrayHeight - 1)) { surroundingArea[(j * (arrayWidth)) + i] = inputArray[temporaryRowAccess][i]; surroundingArea[(j * (arrayWidth)) + i + 1] = inputArray[temporaryRowAccess][i + 1]; surroundingArea[(j * (arrayWidth)) + i + 2] = inputArray[temporaryRowAccess][i + 2]; } else { surroundingArea[(j * (arrayWidth)) + i] = inputArray[index - radius + j][i]; surroundingArea[(j * (arrayWidth)) + i + 1] = inputArray[index - radius + j][i + 1]; surroundingArea[(j * (arrayWidth)) + i + 2] = inputArray[index - radius + j][i + 2]; } } } } // Use C DLL if (conversionType.Equals(Conversion.C)) { BlurTransformRowC(arrayWidth, localArrayHeight, radius, row, surroundingArea); } // Use ASM DLL else if (conversionType.Equals(Conversion.ASM)) { BlurTransformRowASM(arrayWidth, localArrayHeight, row, surroundingArea, radius); } }); return; }
public override bool Execute(ProgramOptions programOptions, JobConfiguration jobConfiguration) { Stopwatch stopWatch = new Stopwatch(); stopWatch.Start(); StepTiming stepTimingFunction = new StepTiming(); stepTimingFunction.JobFileName = programOptions.OutputJobFilePath; stepTimingFunction.StepName = jobConfiguration.Status.ToString(); stepTimingFunction.StepID = (int)jobConfiguration.Status; stepTimingFunction.StartTime = DateTime.Now; stepTimingFunction.NumEntities = jobConfiguration.Target.Count; this.DisplayJobStepStartingStatus(jobConfiguration); FilePathMap = new FilePathMap(programOptions, jobConfiguration); try { if (this.ShouldExecute(programOptions, jobConfiguration) == false) { return(true); } if (jobConfiguration.Target.Count(t => t.Type == APPLICATION_TYPE_WEB) == 0) { logger.Warn("No {0} targets to process", APPLICATION_TYPE_WEB); loggerConsole.Warn("No {0} targets to process", APPLICATION_TYPE_WEB); return(true); } // Process each target for (int i = 0; i < jobConfiguration.Target.Count; i++) { Stopwatch stopWatchTarget = new Stopwatch(); stopWatchTarget.Start(); JobTarget jobTarget = jobConfiguration.Target[i]; if (jobTarget.Type != null && jobTarget.Type.Length > 0 && jobTarget.Type != APPLICATION_TYPE_WEB) { continue; } StepTiming stepTimingTarget = new StepTiming(); stepTimingTarget.Controller = jobTarget.Controller; stepTimingTarget.ApplicationName = jobTarget.Application; stepTimingTarget.ApplicationID = jobTarget.ApplicationID; stepTimingTarget.JobFileName = programOptions.OutputJobFilePath; stepTimingTarget.StepName = jobConfiguration.Status.ToString(); stepTimingTarget.StepID = (int)jobConfiguration.Status; stepTimingTarget.StartTime = DateTime.Now; stepTimingTarget.NumEntities = 3; try { this.DisplayJobTargetStartingStatus(jobConfiguration, jobTarget, i + 1); // Set up controller access using (ControllerApi controllerApi = new ControllerApi(jobTarget.Controller, jobTarget.UserName, AESEncryptionHelper.Decrypt(jobTarget.UserPassword))) { controllerApi.PrivateApiLogin(); #region Prepare time range long fromTimeUnix = UnixTimeHelper.ConvertToUnixTimestamp(jobConfiguration.Input.TimeRange.From); long toTimeUnix = UnixTimeHelper.ConvertToUnixTimestamp(jobConfiguration.Input.TimeRange.To); long differenceInMinutes = (toTimeUnix - fromTimeUnix) / (60000); #endregion #region EUM Pages loggerConsole.Info("Pages and AJAX Requests"); string pagesJSON = controllerApi.GetWEBPages(jobTarget.ApplicationID, fromTimeUnix, toTimeUnix); if (pagesJSON != String.Empty) { FileIOHelper.SaveFileToPath(pagesJSON, FilePathMap.WEBPagesDataFilePath(jobTarget)); } #endregion #region EUM Pages Performance if (pagesJSON != String.Empty) { JObject pagesListContainer = JObject.Parse(pagesJSON); if (isTokenPropertyNull(pagesListContainer, "data") == false) { JArray pagesArray = (JArray)pagesListContainer["data"]; loggerConsole.Info("Performance of Pages and Ajax Requests ({0}) entities", pagesArray.Count); int j = 0; var listOfPagesChunks = pagesArray.BreakListIntoChunks(ENTITIES_EXTRACT_NUMBER_OF_PAGES_TO_PROCESS_PER_THREAD); ParallelOptions parallelOptions = new ParallelOptions(); if (programOptions.ProcessSequentially == true) { parallelOptions.MaxDegreeOfParallelism = 1; } else { parallelOptions.MaxDegreeOfParallelism = PAGES_EXTRACT_NUMBER_OF_THREADS; } Parallel.ForEach <List <JToken>, int>( listOfPagesChunks, parallelOptions, () => 0, (listOfPagesChunk, loop, subtotal) => { // Set up controller access using (ControllerApi controllerApiParallel = new ControllerApi(jobTarget.Controller, jobTarget.UserName, AESEncryptionHelper.Decrypt(jobTarget.UserPassword))) { controllerApiParallel.PrivateApiLogin(); foreach (JToken pageToken in listOfPagesChunk) { string pageName = getStringValueFromJToken(pageToken, "name"); string pageType = getStringValueFromJToken(pageToken, "type"); long pageID = getLongValueFromJToken(pageToken, "addId"); if (File.Exists(FilePathMap.WEBPagePerformanceDataFilePath(jobTarget, pageType, pageName, pageID, jobConfiguration.Input.TimeRange)) == false) { string pageJSON = controllerApi.GetWEBPagePerformance(jobTarget.ApplicationID, pageID, fromTimeUnix, toTimeUnix, differenceInMinutes); if (pageJSON != String.Empty) { FileIOHelper.SaveFileToPath(pageJSON, FilePathMap.WEBPagePerformanceDataFilePath(jobTarget, pageType, pageName, pageID, jobConfiguration.Input.TimeRange)); } } } return(listOfPagesChunk.Count); } }, (finalResult) => { Interlocked.Add(ref j, finalResult); Console.Write("[{0}].", j); } ); loggerConsole.Info("Completed {0} Pages", pagesArray.Count); } } #endregion #region Geo Regions loggerConsole.Info("Geo Locations"); string geoRegionsJSON = controllerApi.GetWEBGeoRegions(jobTarget.ApplicationID, String.Empty, String.Empty, String.Empty, fromTimeUnix, toTimeUnix, differenceInMinutes); if (geoRegionsJSON != String.Empty) { FileIOHelper.SaveFileToPath(geoRegionsJSON, FilePathMap.WEBGeoLocationsDataFilePath(jobTarget, "all")); } if (geoRegionsJSON != String.Empty) { JObject geoRegionsContainerObject = JObject.Parse(geoRegionsJSON); if (geoRegionsContainerObject != null) { if (isTokenPropertyNull(geoRegionsContainerObject, "rumItems") == false) { int j = 0; JArray geoRegionsArray = (JArray)geoRegionsContainerObject["rumItems"]; foreach (JObject geoRegionObject in geoRegionsArray) { if (isTokenPropertyNull(geoRegionObject, "eumRegionPerformanceSummaryData") == false) { string country = getStringValueFromJToken(geoRegionObject["eumRegionPerformanceSummaryData"], "country"); string geoRegionJSON = controllerApi.GetWEBGeoRegions(jobTarget.ApplicationID, country, String.Empty, String.Empty, fromTimeUnix, toTimeUnix, differenceInMinutes); if (geoRegionJSON != String.Empty) { FileIOHelper.SaveFileToPath(geoRegionJSON, FilePathMap.WEBGeoLocationsDataFilePath(jobTarget, country)); } } j++; if (j % 10 == 0) { Console.Write("[{0}].", j); } } loggerConsole.Info("Completed {0} Geo Locations and Regions", j); } } } #endregion } } catch (Exception ex) { logger.Warn(ex); loggerConsole.Warn(ex); return(false); } finally { stopWatchTarget.Stop(); this.DisplayJobTargetEndedStatus(jobConfiguration, jobTarget, i + 1, stopWatchTarget); stepTimingTarget.EndTime = DateTime.Now; stepTimingTarget.Duration = stopWatchTarget.Elapsed; stepTimingTarget.DurationMS = stopWatchTarget.ElapsedMilliseconds; List <StepTiming> stepTimings = new List <StepTiming>(1); stepTimings.Add(stepTimingTarget); FileIOHelper.WriteListToCSVFile(stepTimings, new StepTimingReportMap(), FilePathMap.StepTimingReportFilePath(), true); } } return(true); } catch (Exception ex) { logger.Error(ex); loggerConsole.Error(ex); return(false); } finally { stopWatch.Stop(); this.DisplayJobStepEndedStatus(jobConfiguration, stopWatch); stepTimingFunction.EndTime = DateTime.Now; stepTimingFunction.Duration = stopWatch.Elapsed; stepTimingFunction.DurationMS = stopWatch.ElapsedMilliseconds; List <StepTiming> stepTimings = new List <StepTiming>(1); stepTimings.Add(stepTimingFunction); FileIOHelper.WriteListToCSVFile(stepTimings, new StepTimingReportMap(), FilePathMap.StepTimingReportFilePath(), true); } }
public PluginManager(GameController gameController, Graphics graphics, MultiThreadManager multiThreadManager) { _gameController = gameController; _graphics = graphics; _multiThreadManager = multiThreadManager; RootDirectory = AppDomain.CurrentDomain.BaseDirectory; Directories["Temp"] = Path.Combine(RootDirectory, PluginsDirectory, "Temp"); Directories[PluginsDirectory] = Path.Combine(RootDirectory, PluginsDirectory); Directories[CompiledPluginsDirectory] = Path.Combine(Directories[PluginsDirectory], CompiledPluginsDirectory); Directories[SourcePluginsDirectory] = Path.Combine(Directories[PluginsDirectory], SourcePluginsDirectory); _gameController.EntityListWrapper.EntityAdded += EntityListWrapperOnEntityAdded; _gameController.EntityListWrapper.EntityRemoved += EntityListWrapperOnEntityRemoved; _gameController.EntityListWrapper.EntityAddedAny += EntityListWrapperOnEntityAddedAny; _gameController.EntityListWrapper.EntityIgnored += EntityListWrapperOnEntityIgnored; _gameController.Area.OnAreaChange += AreaOnOnAreaChange; parallelLoading = _gameController.Settings.CoreSettings.MultiThreadLoadPlugins; foreach (var directory in Directories) { if (!Directory.Exists(directory.Value)) { DebugWindow.LogMsg($"{directory.Value} doesn't exists, but don't worry i created it for you."); Directory.CreateDirectory(directory.Value); } } var(compiledPlugins, sourcePlugins) = SearchPlugins(); List <(Assembly asm, DirectoryInfo directoryInfo)> assemblies = new List <(Assembly, DirectoryInfo)>(); Task task = null; if (sourcePlugins.Length > 0) { task = Task.Run(() => { var compilePluginsFromSource = CompilePluginsFromSource(sourcePlugins); }); } var compiledAssemblies = GetCompiledAssemblies(compiledPlugins, parallelLoading); var devTree = Plugins.FirstOrDefault(x => x.Name.Equals("DevTree")); task?.Wait(); Plugins = Plugins.OrderBy(x => x.Order).ThenByDescending(x => x.CanBeMultiThreading).ThenBy(x => x.Name) .ToList(); if (devTree != null) { try { var fieldInfo = devTree.Plugin.GetType().GetField("Plugins"); List <PluginWrapper> devTreePlugins() => Plugins; fieldInfo.SetValue(devTree.Plugin, (Func <List <PluginWrapper> >)devTreePlugins); } catch (Exception e) { LogError(e.ToString()); } } if (parallelLoading) { //Pre init some general objects because with multi threading load they can null sometimes for some plugin var ingameStateIngameUi = gameController.IngameState.IngameUi; var ingameStateData = gameController.IngameState.Data; var ingameStateServerData = gameController.IngameState.ServerData; Parallel.ForEach(Plugins, wrapper => wrapper.Initialise(gameController)); } else { Plugins.ForEach(wrapper => wrapper.Initialise(gameController)); } AreaOnOnAreaChange(gameController.Area.CurrentArea); Plugins.ForEach(x => x.SubscrideOnFile(HotReloadDll)); AllPluginsLoaded = true; }
void GetWorkshopBlueprints() { Task = Parallel.Start(DownloadBlueprints); }
static void ParllelCancel(IEnumerable <int> list, CancellationToken token) { Parallel.ForEach(list, new ParallelOptions { CancellationToken = token }, x => Console.WriteLine(x)); }
public void Execute(IJobExecutionContext context) { //分页读取门店信息 //分页获取美容团购订单,对已经统计过的产品-shopid,不在统计,对没统计过的产品,进行统计 //把统计的结果写入到 Tuhu_comment..ShopCommentStatistics try { logger.Info("开始执行"); int pageIndex = 1; int pageSize = 1000; int total = DalShopStatistics.GetShopsCount(); int pageTotal = (total - 1) / pageSize + 1; for (; pageIndex <= pageTotal; pageIndex++) { logger.Info($"门店页码 {pageIndex}/{pageTotal}"); var shops = DalShopStatistics.GetShopsPage(pageIndex, pageSize); shops.ForEach((item) => { //查询门店里的美容团购订单 int shopPageIndex = 1; int shopPageSize = 1000; int shopTotal = DalShopCommentSync.GetShopCommentOrderCount(item.PKID); int shopPageTotal = (shopTotal - 1) / shopPageSize + 1; for (; shopPageIndex <= shopPageTotal; shopPageIndex++) { logger.Info($"门店评论页码 shopId:{item.PKID} {shopPageIndex}/{shopPageTotal}"); var orders = DalShopCommentSync.GetShopCommentOrder(shopPageIndex, shopPageSize, item.PKID); var index = shopPageIndex; Parallel.ForEach(orders, new ParallelOptions() { MaxDegreeOfParallelism = 5 }, order => { //获取这个订单下的所有产品 var orderProducts = DalShopCommentSync.GetShopOrderProduct(order.OrderId); logger.Info($"门店评论页码 shopId:{item.PKID} {index}/{shopPageTotal} orderid:{order.OrderId}"); foreach (var product in orderProducts) { using (var client = CacheHelper.CreateCounterClient(typeof(ShopCommentStatisticsJob).FullName, TimeSpan.FromHours(12))) { var response = client.Increment(item.PKID + product.Pid); if (response.Success && response.Value > 1) { continue; } } //获取这个门店下这个产品对应的所有订单号 var productOrders = DalShopCommentSync.GetShopProductOrder(item.PKID, product.Pid); //根据这个产品的所有订单号统计评论的平均分,写入到这个产品下 var statistics = DalShopCommentSync.GetShopCommentStatistics(item.PKID, product.Pid, productOrders.Select(x => x.CommentId)); foreach (var s in statistics) { DalShopCommentSync.SyncShopCommentStatistics(s.ShopId, s); } } }); logger.Info($"门店评论页码 shopId:{item.PKID} {shopPageIndex}/{shopPageTotal} 执行结束"); } }); logger.Info($"门店页码 {pageIndex}/{pageTotal} 执行结束"); } using (var client = new ShopCommentClient()) { client.RefreshCommentStatisticsCache(2); } logger.Info("执行结束"); } catch (Exception e) { logger.Error("执行异常", e); } }
private static void Main(string[] args) { // Configurar prioridad del proceso. var thisProcess = System.Diagnostics.Process.GetCurrentProcess(); try { thisProcess.PriorityClass = System.Diagnostics.ProcessPriorityClass.RealTime; } catch (Exception ex) { Console.WriteLine(ex.Message); Console.WriteLine($"No fue posible cambiar la prioridad de esta aplicación. Continuando con prioridad {thisProcess.PriorityClass}"); } // Inicializar un arreglo de datos gigantesco var c = new int[262144]; var u = c.GetUpperBound(0); var rnd = new Random(); for (var j = 0; j < u; j++) { c[j] = rnd.Next(1, 262144); } Console.Write("Conteo en bloque foreach... "); var t = new System.Diagnostics.Stopwatch(); var tot = 0; t.Start(); foreach (var j in c) { if (j.IsPrime()) { tot++; } } t.Stop(); Console.WriteLine($"{tot}, Tiempo: {t.ElapsedMilliseconds} ms"); Console.Write("Conteo utilizando lambdas... "); t.Reset(); tot = 0; t.Start(); tot = c.Count(p => p.IsPrime()); t.Stop(); Console.WriteLine($"{tot}, Tiempo: {t.ElapsedMilliseconds} ms"); var times = new Dictionary <int, long>(); for (var threads = 2; threads <= Environment.ProcessorCount; threads++) { Console.Write($"Conteo multihilo ({threads} hilos)... "); t.Reset(); tot = 0; t.Start(); var part = Partitioner.Create(c); ConcurrentBag <int> primes = new ConcurrentBag <int>(); Parallel.ForEach(part, new ParallelOptions { MaxDegreeOfParallelism = threads }, j => { if (j.IsPrime()) { primes.Add(j); } }); tot = primes.Count; t.Stop(); times.Add(threads, t.ElapsedMilliseconds); Console.WriteLine($"{tot}, Tiempo: {t.ElapsedMilliseconds} ms"); } var best = times.OrderBy(p => p.Value).First(); Console.WriteLine($"Mejor tiempo: {best.Key} hilos ({best.Value} ms)"); }
static void Main(string[] args) { try { // Setup NLog var logConfig = new NLog.Config.LoggingConfiguration(); var logFile = new NLog.Targets.FileTarget("logfile") { FileName = "debug.log" }; var logConsole = new NLog.Targets.ColoredConsoleTarget("logconsole"); logConfig.AddRule(LogLevel.Info, LogLevel.Fatal, logConsole); logConfig.AddRule(LogLevel.Debug, LogLevel.Fatal, logFile); LogManager.Configuration = logConfig; // Parse options string[] inputPaths = null; _ = Parser.Default.ParseArguments<Options>(args) .WithParsed(opts => { if (opts.OutputDir != null) { if (Directory.Exists(opts.OutputDir)) Decrypto.OutputDir = opts.OutputDir; else Logger.Error("Designated output directory {Path} does not exist.", opts.OutputDir); } Decrypto.SkipDuplicate = opts.SkipDuplicate; TencentDecrypto.ForceRename = opts.ForceRename; inputPaths = opts.InputPaths.ToArray(); }) .WithNotParsed(errs => { }); if (inputPaths != null) { // Search for files List<string> foundPaths = new List<string>(); foreach (string path in inputPaths) { try { if (Directory.Exists(path)) { foundPaths.AddRange( Directory.EnumerateFiles(path, "*", SearchOption.AllDirectories) .Where(file => file.ToLower().EndsWith(".ncm") || file.ToLower().EndsWith(".mflac") || file.ToLower().EndsWith(".qmc0") || file.ToLower().EndsWith(".qmc3") || file.ToLower().EndsWith(".qmcogg") || file.ToLower().EndsWith(".tkm") || file.ToLower().EndsWith(".tm2") || file.ToLower().EndsWith(".tm6") || file.ToLower().EndsWith(".bkcmp3") || file.ToLower().EndsWith(".bkcflac")) ); } else if (File.Exists(path) && ( path.ToLower().EndsWith(".ncm") || path.ToLower().EndsWith(".mflac") || path.ToLower().EndsWith(".qmc0") || path.ToLower().EndsWith(".qmc3") || path.ToLower().EndsWith(".qmcogg") || path.ToLower().EndsWith(".qmcflac") || path.ToLower().EndsWith(".tkm") || path.ToLower().EndsWith(".tm2") || path.ToLower().EndsWith(".tm6") || path.ToLower().EndsWith(".bkcmp3") || path.ToLower().EndsWith(".bkcflac"))) { foundPaths.Add(path); } } catch (IOException e) { Logger.Error(e); } } // Decrypt and dump string[] trimmedPaths = foundPaths.Where((x, i) => foundPaths.FindIndex(y => y == x) == i).ToArray(); if (trimmedPaths.Length > 0) { _ = Parallel.ForEach(trimmedPaths, file => { Decrypto decrypto = null; try { switch (Path.GetExtension(file)) { case ".ncm": decrypto = new NetEaseDecrypto(file); break; case ".tm2": case ".tm6": decrypto = new TencentSimpleDecrypto(file, "audio/mpeg4"); break; case ".qmc0": case ".qmc3": case ".bkcmp3": decrypto = new TencentFixedDecrypto(file, "audio/mpeg"); break; case ".qmcogg": decrypto = new TencentFixedDecrypto(file, "audio/ogg"); break; case ".qmcflac": case ".bkcflac": decrypto = new TencentFixedDecrypto(file, "audio/flac"); break; case ".tkm": decrypto = new TencentFixedDecrypto(file, "audio/mpeg4"); break; case ".mflac": decrypto = new TencentDynamicDecrypto(file, "audio/flac"); break; default: Logger.Error("Cannot recognize {Path}", file); break; } decrypto?.Dump(); } catch (IOException e) { Logger.Error(e); } finally { decrypto?.Dispose(); } }); Logger.Info("Program finished with {Requested} files requested and {Succeeded} files saved successfully.", trimmedPaths.Length, Decrypto.SuccessCount); return; } Logger.Error("Found no valid file from specified path(s)."); } } catch (Exception e) { Logger.Fatal(e); } }
/// <summary> /// A simple memory mapped scan over the input provided in the constructor /// </summary> /// <param name="ExitAfter">Optionally stop checking or exit early after this many candidates. 0 does not exit early.</param> /// <returns></returns> public int Analyze(int ExitAfter = 0) { CurrWindowBase = 0; mapSize = (64 * 1024 * 1024); long RunShift = 0; if (File.Exists(Filename)) { using (var fs = new FileStream(Filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { var mapName = Path.GetFileNameWithoutExtension(Filename) + DateTime.Now.ToBinary().ToString("X16"); using (var mmap = MemoryMappedFile.CreateFromFile(fs, mapName, 0, MemoryMappedFileAccess.Read, null, HandleInheritability.Inheritable, false)) { if (FileSize == 0) { FileSize = new FileInfo(Filename).Length; } // TODO: Clean up all the shifts while (CurrWindowBase < FileSize) { using (var reader = mmap.CreateViewAccessor(CurrWindowBase, mapSize, MemoryMappedFileAccess.Read)) { CurrMapBase = 0; reader.ReadArray(CurrMapBase, buffers[filled], 0, 512); while (CurrMapBase < mapSize) { // Adjust for known memory run / extents mappings. var offset = TrueOffset = CurrWindowBase + CurrMapBase; var offset_pfn = offset >> MagicNumbers.PAGE_SHIFT; // next page, may be faster with larger chunks but it's simple to view 1 page at a time long IndexedOffset_pfn = 0; do { IndexedOffset_pfn = vtero.MemAccess.OffsetToMemIndex(offset_pfn + RunShift); if (IndexedOffset_pfn == -1) { RunShift++; continue; } if (IndexedOffset_pfn == -2) { return(DetectedProcesses.Count()); } } while (IndexedOffset_pfn < 0); // found shift, accumulate indexes CurrMapBase += 4096; offset_pfn += RunShift; IndexedOffset_pfn = IndexedOffset_pfn >> MagicNumbers.PAGE_SHIFT; // Calculate DIFF var diff_off_pfn = offset < IndexedOffset_pfn ? IndexedOffset_pfn - offset_pfn : offset_pfn - IndexedOffset_pfn; // Skew Offset offset += (diff_off_pfn << MagicNumbers.PAGE_SHIFT); // setup buffers for parallel load/read block = buffers[filled]; filled ^= 1; #pragma warning disable HeapAnalyzerImplicitParamsRule // Array allocation for params parameter Parallel.Invoke(() => Parallel.ForEach <Func <long, bool> >(CheckMethods, (check) => { check(offset); }), () => { if (CurrMapBase < mapSize) { UnsafeHelp.ReadBytes(reader, CurrMapBase, ref buffers[filled]); } } ); if (ExitAfter > 0 && (ExitAfter == DetectedProcesses.Count())) // || FoundValueOffsets.Count() >= ExitAfter)) { return(DetectedProcesses.Count()); } var progress = Convert.ToInt32((Convert.ToDouble(CurrWindowBase) / Convert.ToDouble(FileSize) * 100.0) + 0.5); if (progress != ProgressBarz.Progress) { ProgressBarz.RenderConsoleProgress(progress); } } } // close current window CurrWindowBase += CurrMapBase; if (CurrWindowBase + mapSize > FileSize) { mapSize = FileSize - CurrWindowBase; } } } } // close map } // close stream return(DetectedProcesses.Count()); }
/// <summary> /// Performs object detection on the given frame. /// </summary> /// //public Rectangle[] ProcessFrame(Bitmap frame) //{ // using (FastBitmap fastBitmap = new FastBitmap(frame)) // { // return ProcessFrame(fastBitmap); // } //} /// <summary> /// Performs object detection on the given frame. /// </summary> /// public Rectangle[] ProcessFrame(Bitmap image) { // int colorChannel = // image.PixelFormat == PixelFormat.Format8bppIndexed ? 0 : channel; Rectangle[] objects; // Creates an integral image representation of the frame using (var fastBitmap = new FastBitmap(image, _classifier.Cascade.HasTiltedFeatures)) { // Creates a new list of detected objects. _detectedObjects.Clear(); var width = fastBitmap.Width; var height = fastBitmap.Height; // Update parameters only if different size if (_steps == null || width != _lastWidth || height != _lastHeight) { Update(width, height); } var window = Rectangle.Empty; // For each scaling step for (var i = 0; i < _steps.Length; i++) { var scaling = _steps[i]; // Set the classifier window scale _classifier.Scale = scaling; // Get the scaled window size window.Width = (int)(_baseWidth * scaling); window.Height = (int)(_baseHeight * scaling); // Check if the window is lesser than the minimum size if (window.Width < _minSize.Width || window.Height < _minSize.Height) { // If we are searching in greater to smaller mode, if (_scalingMode == ObjectDetectorScalingMode.GreaterToSmaller) { break; // it won't get bigger, so we should stop. } else { continue; // continue until it gets greater. } } // Check if the window is greater than the maximum size else if (window.Width > _maxSize.Width || window.Height > _maxSize.Height) { // If we are searching in greater to smaller mode, if (_scalingMode == ObjectDetectorScalingMode.GreaterToSmaller) { continue; // continue until it gets smaller. } break; // it won't get smaller, so we should stop. } // Grab some scan loop parameters var xStep = window.Width >> 3; var yStep = window.Height >> 3; var xEnd = width - window.Width; var yEnd = height - window.Height; // Parallel mode. Scan the integral image searching // for objects in the window with parallelization. var bag = new System.Collections.Concurrent.ConcurrentBag <Rectangle>(); var numSteps = (int)Math.Ceiling((double)yEnd / yStep); // For each pixel in the window column var window1 = window; Parallel.For( 0, numSteps, (j, options) => { var y = j * yStep; // Create a local window reference var localWindow = window1; localWindow.Y = y; // For each pixel in the window row for (var x = 0; x < xEnd; x += xStep) { if (options.ShouldExitCurrentIteration) { return; } localWindow.X = x; // Try to detect and object inside the window if (_classifier.Compute(fastBitmap, localWindow)) { // an object has been detected bag.Add(localWindow); if (_searchMode == ObjectDetectorSearchMode.Single) { options.Stop(); } } } }); // If required, avoid adding overlapping objects at // the expense of extra computation. Otherwise, only // add objects to the detected objects collection. if (_searchMode == ObjectDetectorSearchMode.NoOverlap) { foreach (var obj in bag) { if (!Overlaps(obj)) { _detectedObjects.Add(obj); } } } else if (_searchMode == ObjectDetectorSearchMode.Single) { if (bag.TryPeek(out window)) { _detectedObjects.Add(window); break; } } else { foreach (var obj in bag) { _detectedObjects.Add(obj); } } } } objects = _detectedObjects.ToArray(); if (_searchMode == ObjectDetectorSearchMode.Average) { objects = _match.Group(objects); } CheckSteadiness(objects); _lastObjects = objects; return(objects); // Returns the array of detected objects. }
public Statements.WorkflowActivity Parse(string key, string workflowDefinition, IEnumerable<ActivitySetting> activitySettings) { var settings = GetActivitySettings(); var flow = new Statements.WorkflowActivity(); ParallelWorkflowParser.Human = WorkflowBuilder.CreateHuman(settings.ElementAt(1) , "并行子节点1" , new GetUsers(this._actionerRule) , null); ParallelWorkflowParser.SubProcess = WorkflowBuilder.CreateSubProcess(settings.ElementAt(2) , "并行子节点2" , new Dictionary<string, string>() { { "error", this._finishRule } } , null); flow.Body.StartNode = WorkflowBuilder.CreateParallel(settings.ElementAt(0) , "并行节点" , this._completion , null , ParallelWorkflowParser.Human , ParallelWorkflowParser.SubProcess); Parallel = (flow.Body.StartNode as FlowStep).Action as Parallel; return flow; }
static void TestSync() { Parallel.For(0, 10, i => Withdraw(1)); Console.WriteLine("finally-totalNum" + totalNum); }
public Parallel(Parallel orig) : this(modshogunPINVOKE.new_Parallel__SWIG_1(Parallel.getCPtr(orig)), true) { if (modshogunPINVOKE.SWIGPendingException.Pending) throw modshogunPINVOKE.SWIGPendingException.Retrieve(); }
static void CheckMD5(ProgressReporterDialogue frmProgressReporter, string url) { var baseurl = ConfigurationManager.AppSettings["UpdateLocation"]; if (dobeta) { baseurl = ConfigurationManager.AppSettings["BetaUpdateLocation"]; } L10N.ReplaceMirrorUrl(ref baseurl); string responseFromServer = ""; WebRequest request = WebRequest.Create(url); request.Timeout = 10000; // Set the Method property of the request to POST. request.Method = "GET"; // Get the response. // Get the stream containing content returned by the server. // Open the stream using a StreamReader for easy access. using (WebResponse response = request.GetResponse()) using (Stream dataStream = response.GetResponseStream()) using (StreamReader reader = new StreamReader(dataStream)) { // Display the status. log.Info(((HttpWebResponse)response).StatusDescription); // Read the content. responseFromServer = reader.ReadToEnd(); } Regex regex = new Regex(@"([^\s]+)\s+upgrade/(.*)", RegexOptions.IgnoreCase); if (regex.IsMatch(responseFromServer)) { // background md5 List <Tuple <string, string, Task <bool> > > tasklist = new List <Tuple <string, string, Task <bool> > >(); MatchCollection matchs = regex.Matches(responseFromServer); for (int i = 0; i < matchs.Count; i++) { string hash = matchs[i].Groups[1].Value.ToString(); string file = matchs[i].Groups[2].Value.ToString(); Task <bool> ismatch = Task <bool> .Factory.StartNew(() => MD5File(file, hash)); tasklist.Add(new Tuple <string, string, Task <bool> >(file, hash, ismatch)); } // parallel download ParallelOptions opt = new ParallelOptions() { MaxDegreeOfParallelism = 3 }; Parallel.ForEach(tasklist, opt, task => //foreach (var task in tasklist) { string file = task.Item1; string hash = task.Item2; // check if existing matchs hash task.Item3.Wait(); bool match = task.Item3.Result; if (!match) { log.Info("Newer File " + file); // check is we have already downloaded and matchs hash if (!MD5File(file + ".new", hash)) { if (frmProgressReporter != null) { frmProgressReporter.UpdateProgressAndStatus(-1, Strings.Getting + file); } string subdir = Path.GetDirectoryName(file) + Path.DirectorySeparatorChar; subdir = subdir.Replace("" + Path.DirectorySeparatorChar + Path.DirectorySeparatorChar, "" + Path.DirectorySeparatorChar); GetNewFile(frmProgressReporter, baseurl + subdir.Replace('\\', '/'), subdir, Path.GetFileName(file)); // check the new downloaded file matchs hash if (!MD5File(file + ".new", hash)) { throw new Exception("File downloaded does not match hash: " + file); } } else { log.Info("already got new File " + file); } } else { log.Info("Same File " + file); if (frmProgressReporter != null) { frmProgressReporter.UpdateProgressAndStatus(-1, Strings.Checking + file); } } }); } }
public override void Configure(object target) { //Tween.Delay = Delay; //Tween.Duration = Duration; bool goingX = false; bool goingY = false; if (null != WidthTo || null != WidthBy) goingX = true; //throw new Exception("XTo nor XBy value not defined"); if (null != HeightTo || null != HeightBy) goingY = true; //throw new Exception("YTo nor YBy value not defined"); if (!goingX && !goingY) { Debug.LogWarning("Not Width nor Height resize"); return; } Parallel parallel = new Parallel(); if (goingX) { float widthStart = (float)(WidthFrom ?? StartWidthValueReaderFunc(target)); _tx = new Tween { Name = "ResizeX", Property = "Width", Duration = Duration, Easer = Easer, StartValue = widthStart, EndValue = WidthTo ?? widthStart + WidthBy }; parallel.Add(_tx); } if (goingY) { float heightStart = (float)(HeightFrom ?? StartHeightValueReaderFunc(target)); _ty = new Tween { Name = "ResizeY", Property = "Height", Duration = Duration, Easer = Easer, StartValue = heightStart, EndValue = HeightTo ?? heightStart + HeightBy }; parallel.Add(_ty); } Tween = parallel; //Debug.Log("_tx.StartValue " + _tx.StartValue); //Debug.Log("_tx.EndValue " + _tx.EndValue); //Debug.Log("_ty.StartValue " + _ty.StartValue); //Debug.Log("_ty.EndValue " + _ty.EndValue); }
/// <summary> /// Runs this instance. /// </summary> /// <returns></returns> public bool Run() { var startTime = DateTime.UtcNow; var success = true; Log.Trace($"CoarseUniverseGeneratorProgram.ProcessDailyFolder(): Processing: {_dailyDataFolder.FullName}"); var symbolsProcessed = 0; var filesRead = 0; var dailyFilesNotFound = 0; var coarseFilesGenerated = 0; var mapFileResolver = _mapFileProvider.Get(_market); var blackListedTickers = new HashSet<string>(); if (_blackListedTickersFile.Exists) { blackListedTickers = File.ReadAllLines(_blackListedTickersFile.FullName).ToHashSet(); } var marketFolder = _dailyDataFolder.Parent; var fineFundamentalFolder = new DirectoryInfo(Path.Combine(marketFolder.FullName, "fundamental", "fine")); if (!fineFundamentalFolder.Exists) { Log.Error($"CoarseUniverseGenerator.Run(): FAIL, Fine Fundamental folder not found at {fineFundamentalFolder}! "); return false; } var securityIdentifierContexts = PopulateSidContex(mapFileResolver, blackListedTickers); var dailyPricesByTicker = new ConcurrentDictionary<string, List<TradeBar>>(); var outputCoarseContent = new ConcurrentDictionary<DateTime, List<string>>(); var parallelOptions = new ParallelOptions { MaxDegreeOfParallelism = Environment.ProcessorCount / 2 }; try { Parallel.ForEach(securityIdentifierContexts, parallelOptions, sidContext => { var symbol = new Symbol(sidContext.SID, sidContext.LastTicker); var symbolCount = Interlocked.Increment(ref symbolsProcessed); Log.Debug($"CoarseUniverseGeneratorProgram.Run(): Processing {symbol}"); var factorFile = _factorFileProvider.Get(symbol); // Populate dailyPricesByTicker with all daily data by ticker for all tickers of this security. foreach (var ticker in sidContext.Tickers) { var dailyFile = new FileInfo(Path.Combine(_dailyDataFolder.FullName, $"{ticker}.zip")); if (!dailyFile.Exists) { Log.Error($"CoarseUniverseGeneratorProgram.Run(): {dailyFile} not found!"); Interlocked.Increment(ref dailyFilesNotFound); continue; } if (!dailyPricesByTicker.ContainsKey(ticker)) { dailyPricesByTicker.AddOrUpdate(ticker, ParseDailyFile(dailyFile)); Interlocked.Increment(ref filesRead); } } // Look for daily data for each ticker of the actual security for (int mapFileRowIndex = sidContext.MapFileRows.Length - 1; mapFileRowIndex >= 1; mapFileRowIndex--) { var ticker = sidContext.MapFileRows[mapFileRowIndex].Item2.ToLowerInvariant(); var endDate = sidContext.MapFileRows[mapFileRowIndex].Item1; var startDate = sidContext.MapFileRows[mapFileRowIndex - 1].Item1; List<TradeBar> tickerDailyData; if (!dailyPricesByTicker.TryGetValue(ticker, out tickerDailyData)) { Log.Error($"CoarseUniverseGeneratorProgram.Run(): Daily data for ticker {ticker.ToUpperInvariant()} not found!"); continue; } var tickerFineFundamentalFolder = Path.Combine(fineFundamentalFolder.FullName, ticker); var fineAvailableDates = Enumerable.Empty<DateTime>(); if (Directory.Exists(tickerFineFundamentalFolder)) { fineAvailableDates = Directory.GetFiles(tickerFineFundamentalFolder, "*.zip") .Select(f => DateTime.ParseExact(Path.GetFileNameWithoutExtension(f), DateFormat.EightCharacter, CultureInfo.InvariantCulture)) .ToList(); } // Get daily data only for the time the ticker was foreach (var tradeBar in tickerDailyData.Where(tb => tb.Time >= startDate && tb.Time <= endDate)) { var coarseRow = GenerateFactorFileRow(ticker, sidContext, factorFile, tradeBar, fineAvailableDates, fineFundamentalFolder); List<string> tempList; outputCoarseContent.AddOrUpdate(tradeBar.Time, new List<string> { coarseRow }, (time, list) => { lock (list) { list.Add(coarseRow); return list; } }); } } if (symbolCount % 1000 == 0) { var elapsed = DateTime.UtcNow - startTime; Log.Trace($"CoarseUniverseGeneratorProgram.Run(): Processed {symbolCount} in {elapsed:g} at {symbolCount / elapsed.TotalMinutes:F2} symbols/minute "); } }); _destinationFolder.Create(); var startWriting = DateTime.UtcNow; Parallel.ForEach(outputCoarseContent, coarseByDate => { var filename = $"{coarseByDate.Key.ToString(DateFormat.EightCharacter, CultureInfo.InvariantCulture)}.csv"; var filePath = Path.Combine(_destinationFolder.FullName, filename); Log.Debug($"CoarseUniverseGeneratorProgram.Run(): Saving {filename} with {coarseByDate.Value.Count} entries."); File.WriteAllLines(filePath, coarseByDate.Value.OrderBy(cr => cr)); var filesCount = Interlocked.Increment(ref coarseFilesGenerated); if (filesCount % 1000 == 0) { var elapsed = DateTime.UtcNow - startWriting; Log.Trace($"CoarseUniverseGeneratorProgram.Run(): Processed {filesCount} in {elapsed:g} at {filesCount / elapsed.TotalSeconds:F2} files/second "); } }); Log.Trace($"\n\nTotal of {coarseFilesGenerated} coarse files generated in {DateTime.UtcNow - startTime:g}:\n" + $"\t => {filesRead} daily data files read.\n"); } catch (Exception e) { Log.Error(e, $"CoarseUniverseGeneratorProgram.Run(): FAILED!"); success = false; } return success; }
private static Task ConnectBatch(string url, string transport, int batchSize, ConcurrentBag <Connection> connections) { var options = new ParallelOptions { MaxDegreeOfParallelism = batchSize }; var batchTcs = new TaskCompletionSource <object>(); long remaining = batchSize; Parallel.For(0, batchSize, options, async i => { var connection = new Connection(url); if (!_running) { batchTcs.TrySetResult(null); return; } try { var clientTransport = GetTransport(transport); await(clientTransport == null ? connection.Start() : connection.Start(clientTransport)); if (_running) { connections.Add(connection); var clientId = connection.ConnectionId; connection.Error += e => { Debug.WriteLine(String.Format("SIGNALR: Client {0} ERROR: {1}", clientId, e)); }; connection.Closed += () => { Debug.WriteLine(String.Format("SIGNALR: Client {0} CLOSED", clientId)); // Remove it from the list on close connections.TryTake(out connection); }; } } catch (Exception ex) { Console.WriteLine("Failed to start client. {0}", ex.GetBaseException()); } finally { if (Interlocked.Decrement(ref remaining) == 0) { // When all connections are connected, mark the task as complete batchTcs.TrySetResult(null); } } }); return(batchTcs.Task); }
public HealthResponse HealthCheck() { HealthResponse response = new HealthResponse() { serviceName = "Internal Sales API", httpResponseCode = HttpStatusCode.OK, nestedServices = new List <NestedServices>() }; Parallel.Invoke( () => { CheckAudienceAndRatingsService(response); }, () => { CheckSmsConnection(response); }, () => { CheckProposalService(response); }, () => { CheckBOMSService(response); }, () => { CheckSystemTopographyService2(response); } ); if (response.nestedServices.Count > 0) { var timeOut = response.nestedServices.Where(x => x.httpResponseCode == HttpStatusCode.ServiceUnavailable).FirstOrDefault(); if (timeOut != null) { response.httpResponseCode = HttpStatusCode.ServiceUnavailable; } else { response.httpResponseCode = HttpStatusCode.GatewayTimeout; } throw new ApiException(response.httpResponseCode, response.nestedServices[0].ToString(), Newtonsoft.Json.JsonConvert.SerializeObject(response)); } return(response); }
public void SetupSegment(List<Wall2D> wallList, float wallThick, VectorLine outline, Parallel parallel) { //this.walls.AddRange(wallList); List<Color> colors = new List<Color>(); foreach(Wall2D wall in wallList){ this.walls.Add(wall); colors.Add(wall.Color); } Vector2[] room = DrawHelper.DiscreteToContinue(wallList); // bool isClose = false; // if(wallList[0].StartPos == wallList[wallList.Count-1].EndPos){ // isClose = true; // } bool isClose = false; if(room[0] == room[room.Length-1]){ isClose = true; } if(isClose){ // List<Vector3> outter; // RoomQuad.GetPoint(room, true, out outter); // List<Vector3> inner; // RoomQuad.GetPoint(room, false, out inner); List<Vector2> outter = parallel.Execute(room, wallThick, false, true); List<Vector2> inner = parallel.Execute(room, wallThick, true, true); Add(outter, inner, colors); if(inner.Count > 0){ SetupOutLine(outline, inner); } if(outter.Count > 0){ SetupOutLine(outline, outter); } } else{ List<Vector2> outter = parallel.Execute(room, wallThick, false); List<Vector2> inner = parallel.Execute(room, wallThick, true); Add(outter, inner, colors); if(inner.Count > 0){ SetupOutLine(outline, inner); } if(outter.Count > 0){ SetupOutLine(outline, outter); } } }
public static long ReadSubDirFilesAndGenerateFileInfoCSVs() { long retVal = 0; try { string[] dirListings = System.IO.Directory.GetFiles(pathToRootDestination, "*.txt"); // for each *XXX.txt foreach (string dirListFile in dirListings) { string[] lowLevelDirList = File.ReadAllLines(dirListFile); int dirNameOffset = dirListFile.LastIndexOf("\\"); string fileInfoOutput = string.Empty; if (dirNameOffset > -1) fileInfoOutput = pathToRootDestination + dirListFile.Substring(dirNameOffset) + ".csv"; ConcurrentBag<string> lowLevelFolderList = new ConcurrentBag<string>(); Parallel.ForEach(lowLevelDirList, (lowLevelDir) => //foreach (string lowLevelDir in lowLevelDirList) { lowLevelFolderList.Add(lowLevelDir); }); ConcurrentBag<string> actualFileInfo = new ConcurrentBag<string>(); //ConcurrentDictionary<string, string> actualFileInfo = new ConcurrentDictionary<string, string>(); Parallel.ForEach(lowLevelFolderList, (curLowLevelFolder) => //foreach (string curLowLevelFolder in lowLevelFolderList) { try { DirectoryInfo dirInfo = new DirectoryInfo(curLowLevelFolder); FileInfo[] fileInfoArray = dirInfo.GetFiles(); foreach (FileInfo fileInfo in fileInfoArray) { string fileOtherInfo = fileInfo.LastWriteTime + exportFileDelimiter + fileInfo.Length; try { string delimitedOutputString = getDelimitedString(fileInfo.FullName, fileOtherInfo); //if (!actualFileInfo.TryAdd(fileInfo.FullName, fileOtherInfo)) //{ //} actualFileInfo.Add(delimitedOutputString); } catch (Exception ex) { Console.WriteLine("Exception in TryAdd: " + ex.Message); } } } catch (Exception ex) { Console.WriteLine("Exception in Parallel ForEach curLowLevelFolder: " + curLowLevelFolder + " : " + ex.Message); } }); using (StreamWriter sw = new StreamWriter(fileInfoOutput)) { foreach (string fileData in actualFileInfo) { sw.WriteLine(fileData); } } } } catch (Exception ex) { Console.WriteLine("Exception: " + ex.Message); } return retVal; }