private static void BroadcastMessagesFunc(Repositories.IMessangerRepository repository, IDependencyResolver resolver) { var connectionManager = resolver.Resolve<IConnectionManager>(); var hubContext = connectionManager.GetHubContext<Hubs.MessangerHub>(); var allGrpMsgs = (from msg in repository.GetAllNewMessges().ToList() from item in msg.UsersWhoRead.DefaultIfEmpty() let usr = item let grp = msg.Group where usr == null select new { UsersWereRead = usr, Msg = msg, Group = grp } into tuple group tuple by tuple.Group into g select new { Group = g.Key, Messages = g.Select(m => new ViewModels.MessageViewModel(m.Msg)), ExcludeUsers = g.Select(u => u.UsersWereRead) }).ToList(); if (allGrpMsgs == null) return; var parallelOpt = new System.Threading.Tasks.ParallelOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount }; var result = System.Threading.Tasks.Parallel.ForEach(allGrpMsgs, parallelOpt, grpMsgs => { if (hubContext != null && grpMsgs.Group != null) hubContext.Clients.Group(grpMsgs.Group.Name). broadcastMessages(grpMsgs.Group.Name, grpMsgs.Messages).Wait(); }); //old school sending workflow //for (int i = 0; i < allGrpMsgs.Count; i++) //{ // var groupMessages = allGrpMsgs[i]; // if (hubContext != null && groupMessages.Group != null) // hubContext.Clients.Group(groupMessages.Group.Name). // broadcastMessages(groupMessages.Group.Name, groupMessages.Messages).Wait(); //} }
/// <summary> /// Process image looking for matchings with specified template. /// </summary> /// /// <param name="image">Unmanaged source image to process.</param> /// <param name="template">Unmanaged template image to search for.</param> /// <param name="searchZone">Rectangle in source image to search template for.</param> /// <param name="parallelTasks">Number of used tasks</param> /// /// <returns>Returns array of found template matches. The array is sorted by similarity /// of found matches in descending order.</returns> /// /// <exception cref="UnsupportedImageFormatException">The source image has incorrect pixel format.</exception> /// <exception cref="InvalidImagePropertiesException">Template image is bigger than search zone.</exception> /// public TemplateMatch[] ProcessImage( UnmanagedImage image, UnmanagedImage template, Rectangle searchZone, int parallelTasks=1 ) { // check image format if ( ( ( image.PixelFormat != PixelFormat.Format8bppIndexed ) && ( image.PixelFormat != PixelFormat.Format24bppRgb ) ) || ( image.PixelFormat != template.PixelFormat ) ) { throw new UnsupportedImageFormatException( "Unsupported pixel format of the source or template image." ); } // clip search zone Rectangle zone = searchZone; zone.Intersect( new Rectangle( 0, 0, image.Width, image.Height ) ); // search zone's starting point int startX = zone.X; int startY = zone.Y; // get source and template image size int sourceWidth = zone.Width; int sourceHeight = zone.Height; int templateWidth = template.Width; int templateHeight = template.Height; // check template's size if ( ( templateWidth > sourceWidth ) || ( templateHeight > sourceHeight ) ) { throw new InvalidImagePropertiesException( "Template's size should be smaller or equal to search zone." ); } int pixelSize = ( image.PixelFormat == PixelFormat.Format8bppIndexed ) ? 1 : 3; int sourceStride = image.Stride; // similarity map. its size is increased by 4 from each side to increase // performance of non-maximum suppresion int mapWidth = sourceWidth - templateWidth + 1; int mapHeight = sourceHeight - templateHeight + 1; int[,] map = new int[mapHeight + 4, mapWidth + 4]; // maximum possible difference with template int maxDiff = templateWidth * templateHeight * pixelSize * 255; // integer similarity threshold int threshold = (int) ( similarityThreshold * maxDiff ); // width of template in bytes int templateWidthInBytes = templateWidth * pixelSize; // do the job unsafe { byte* baseSrc = (byte*) image.ImageData.ToPointer( ); byte* baseTpl = (byte*) template.ImageData.ToPointer( ); int sourceOffset = image.Stride - templateWidth * pixelSize; int templateOffset = template.Stride - templateWidth * pixelSize; // for each row of the source image //for ( int y = 0; y < mapHeight; y++ ) //use multiple tasks. var opt=new System.Threading.Tasks.ParallelOptions { MaxDegreeOfParallelism=parallelTasks }; System.Threading.Tasks.Parallel.For(0,mapHeight, opt, y=> { // for each pixel of the source image for ( int x = 0; x < mapWidth; x++ ) { byte* src = baseSrc + sourceStride * ( y + startY ) + pixelSize * ( x + startX ); byte* tpl = baseTpl; // compare template with source image starting from current X,Y int dif = 0; // for each row of the template for ( int i = 0; i < templateHeight; i++ ) { // for each pixel of the template for ( int j = 0; j < templateWidthInBytes; j++, src++, tpl++ ) { int d = *src - *tpl; if ( d > 0 ) { dif += d; } else { dif -= d; } } src += sourceOffset; tpl += templateOffset; } // templates similarity int sim = maxDiff - dif; if ( sim >= threshold ) map[y + 2, x + 2] = sim; } }); } // collect interesting points - only those points, which are local maximums List<TemplateMatch> matchingsList = new List<TemplateMatch>( ); // for each row for ( int y = 2, maxY = mapHeight + 2; y < maxY; y++ ) { // for each pixel for ( int x = 2, maxX = mapWidth + 2; x < maxX; x++ ) { int currentValue = map[y, x]; // for each windows' row for ( int i = -2; ( currentValue != 0 ) && ( i <= 2 ); i++ ) { // for each windows' pixel for ( int j = -2; j <= 2; j++ ) { if ( map[y + i, x + j] > currentValue ) { currentValue = 0; break; } } } // check if this point is really interesting if ( currentValue != 0 ) { matchingsList.Add( new TemplateMatch( new Rectangle( x - 2 + startX, y - 2 + startY, templateWidth, templateHeight ), (float) currentValue / maxDiff ) ); } } } // convert list to array TemplateMatch[] matchings = new TemplateMatch[matchingsList.Count]; matchingsList.CopyTo( matchings ); // sort in descending order Array.Sort( matchings, new MatchingsSorter( ) ); return matchings; }
public static System.Threading.Tasks.ParallelLoopResult For(int fromInclusive, int toExclusive, System.Threading.Tasks.ParallelOptions parallelOptions, System.Action <int> body) { throw null; }
public static System.Threading.Tasks.ParallelLoopResult For <TLocal>(int fromInclusive, int toExclusive, System.Threading.Tasks.ParallelOptions parallelOptions, System.Func <TLocal> localInit, System.Func <int, System.Threading.Tasks.ParallelLoopState, TLocal, TLocal> body, System.Action <TLocal> localFinally) { throw null; }
public static System.Threading.Tasks.ParallelLoopResult ForEach <TSource>(System.Collections.Generic.IEnumerable <TSource> source, System.Threading.Tasks.ParallelOptions parallelOptions, System.Action <TSource> body) { throw null; }
public static System.Threading.Tasks.ParallelLoopResult ForEach <TSource, TLocal>(System.Collections.Generic.IEnumerable <TSource> source, System.Threading.Tasks.ParallelOptions parallelOptions, System.Func <TLocal> localInit, System.Func <TSource, System.Threading.Tasks.ParallelLoopState, long, TLocal, TLocal> body, System.Action <TLocal> localFinally) { throw null; }
/// <summary> /// Dense matrix multiplication, C = A*B /// </summary> /// <param name="other">The dense matrix multiplied to this instance.</param> /// <param name="result">The product matrix.</param> /// <param name="options">Parallel options (optional).</param> public virtual void ParallelMultiply(DenseColumnMajorStorage <T> other, DenseColumnMajorStorage <T> result, System.Threading.Tasks.ParallelOptions options = null) { Multiply(other, result); }
/// <summary> /// Decide this matrix as a proposition. Throw an exception if it contains both free variables and modal operators. /// Use up to System.Environment.ProcessorCount threads to make the decision. /// </summary> /// <returns>whether this proposition is necessary, contingent or impossible</returns> public Alethicity Decide() { if ( this.FreeVariables.Any() ) { if ( ContainsModalities ) throw new EngineException( "This proposition can't be decided; it contains both constants and modal operators." ); else return DecideForFreeVariables(); } Predicates lPredicates = CollectPredicates(); bool lNotImpossible = false; bool lNotNecessary = false; uint lLastInterpretation = lPredicates.LastInterpretation; uint lLastKindOfWorld = lPredicates.LastKindOfWorld; uint lFirstNonemptyWorld = lPredicates.FirstNonemptyWorld; uint lFirstInterpretation = lPredicates.FirstInterpretation; #if SALTARELLE int lStatusInterval = StatusInterval; #endif #if PARALLELIZE //System.Windows.Forms.MessageBox.Show( string.Format( "Maximum number of distinguishable objects: {0}", MaxmimumNumberOfDistinguishableObjects ) ); System.Threading.Tasks.ParallelOptions lParallelOptions = new System.Threading.Tasks.ParallelOptions(); System.Threading.CancellationTokenSource lCancellationTokenSource = new System.Threading.CancellationTokenSource(); lParallelOptions.CancellationToken = lCancellationTokenSource.Token; lParallelOptions.MaxDegreeOfParallelism = System.Environment.ProcessorCount; try { #endif if ( ContainsModalities ) { #if PARALLELIZE System.Threading.Tasks.Parallel.For( Convert.ToInt64( lPredicates.FirstInterpretation ), Convert.ToInt64( lLastInterpretation ) + 1, lParallelOptions, ( fInterpretation ) => { uint lInterpretation = Convert.ToUInt32( fInterpretation ); #else for ( uint lInterpretation = lFirstInterpretation; lInterpretation <= lLastInterpretation; lInterpretation++ ) { #endif #if SALTARELLE if ( ( lInterpretation - lFirstInterpretation ) % lStatusInterval == 0 ) Utility.Status( String.Format( "Deciding... {0:n0} of {1:n0} interpretations of predicates tested.", lInterpretation - lFirstInterpretation, lLastInterpretation - lFirstInterpretation + 1 ) ); #endif foreach ( uint lKindOfWorld in lPredicates.KindsOfWorlds( lInterpretation ) ) { if ( this.TrueIn( lInterpretation, lKindOfWorld, lPredicates ) ) lNotImpossible = true; else lNotNecessary = true; // End the decision once it has been determined that the proposition is neither necessary nor impossible. // Further evaluation will not change the outcome. if ( lNotImpossible && lNotNecessary ) #if PARALLELIZE lCancellationTokenSource.Cancel(); } } ); #else return Alethicity.Contingent; } } #endif }
/// <summary> /// Create the required number of offspring genomes, using specieStatsArr as the basis for selecting how /// many offspring are produced from each species. /// </summary> private List <TGenome> CreateOffspring(SpecieStats[] specieStatsArr, int offspringCount) { // Build a RouletteWheelLayout for selecting species for cross-species reproduction. // While we're in the loop we also pre-build a RouletteWheelLayout for each specie; // Doing this before the main loop means we have RouletteWheelLayouts available for // all species when performing cross-specie matings. int specieCount = specieStatsArr.Length; double[] specieFitnessArr = new double[specieCount]; RouletteWheelLayout[] rwlArr = new RouletteWheelLayout[specieCount]; // Count of species with non-zero selection size. // If this is exactly 1 then we skip inter-species mating. One is a special case because for 0 the // species all get an even chance of selection, and for >1 we can just select normally. int nonZeroSpecieCount = 0; for (int i = 0; i < specieCount; i++) { // Array of probabilities for specie selection. Note that some of these probabilites can be zero, but at least one of them won't be. SpecieStats inst = specieStatsArr[i]; specieFitnessArr[i] = inst._selectionSizeInt; if (0 != inst._selectionSizeInt) { nonZeroSpecieCount++; } // For each specie we build a RouletteWheelLayout for genome selection within // that specie. Fitter genomes have higher probability of selection. List <TGenome> genomeList = _specieList[i].GenomeList; double[] probabilities = new double[inst._selectionSizeInt]; for (int j = 0; j < inst._selectionSizeInt; j++) { probabilities[j] = genomeList[j].EvaluationInfo.Fitness; } rwlArr[i] = new RouletteWheelLayout(probabilities); } // Complete construction of RouletteWheelLayout for specie selection. RouletteWheelLayout rwlSpecies = new RouletteWheelLayout(specieFitnessArr); // Produce offspring from each specie in turn and store them in offspringList. List <TGenome> offspringList = new List <TGenome>(offspringCount); for (int specieIdx = 0; specieIdx < specieCount; specieIdx++) { SpecieStats inst = specieStatsArr[specieIdx]; List <TGenome> genomeList = _specieList[specieIdx].GenomeList; // Get RouletteWheelLayout for genome selection. RouletteWheelLayout rwl = rwlArr[specieIdx]; // --- Produce the required number of offspring from asexual reproduction. System.Threading.Tasks.ParallelOptions po = new System.Threading.Tasks.ParallelOptions(); po.MaxDegreeOfParallelism = 8; for (int i = 0; i < inst._offspringAsexualCount; i++) { int genomeIdx = RouletteWheel.SingleThrow(rwl, _rng); TGenome offspring = genomeList[genomeIdx].CreateOffspring(_currentGeneration); if (SharedParams.SharedParams.USEFITNESSBANK) { offspring.EvaluationInfo.SetFitness(genomeList[genomeIdx].EvaluationInfo.Fitness); } offspringList.Add(offspring); } _stats._asexualOffspringCount += (ulong)inst._offspringAsexualCount; // --- Produce the required number of offspring from sexual reproduction. // Cross-specie mating. // If nonZeroSpecieCount is exactly 1 then we skip inter-species mating. One is a special case because // for 0 the species all get an even chance of selection, and for >1 we can just select species normally. int crossSpecieMatings = nonZeroSpecieCount == 1 ? 0 : (int)Utilities.ProbabilisticRound(_eaParams.InterspeciesMatingProportion * inst._offspringSexualCount, _rng); _stats._sexualOffspringCount += (ulong)(inst._offspringSexualCount - crossSpecieMatings); _stats._interspeciesOffspringCount += (ulong)crossSpecieMatings; // An index that keeps track of how many offspring have been produced in total. int matingsCount = 0; for (; matingsCount < crossSpecieMatings; matingsCount++) { TGenome offspring = CreateOffspring_CrossSpecieMating(rwl, rwlArr, rwlSpecies, specieIdx, genomeList); offspringList.Add(offspring); } // For the remainder we use normal intra-specie mating. // Test for special case - we only have one genome to select from in the current specie. if (1 == inst._selectionSizeInt) { // Fall-back to asexual reproduction. for (; matingsCount < inst._offspringSexualCount; matingsCount++) { int genomeIdx = RouletteWheel.SingleThrow(rwl, _rng); TGenome offspring = genomeList[genomeIdx].CreateOffspring(_currentGeneration); offspringList.Add(offspring); } } else { // Remainder of matings are normal within-specie. for (; matingsCount < inst._offspringSexualCount; matingsCount++) { // Select parents. SelectRouletteWheelItem() guarantees parent2Idx!=parent1Idx int parent1Idx = RouletteWheel.SingleThrow(rwl, _rng); TGenome parent1 = genomeList[parent1Idx]; // Remove selected parent from set of possible outcomes. RouletteWheelLayout rwlTmp = rwl.RemoveOutcome(parent1Idx); if (0.0 != rwlTmp.ProbabilitiesTotal) { // Get the two parents to mate. int parent2Idx = RouletteWheel.SingleThrow(rwlTmp, _rng); TGenome parent2 = genomeList[parent2Idx]; TGenome offspring = parent1.CreateOffspring(parent2, _currentGeneration); offspringList.Add(offspring); } else { // No other parent has a non-zero selection probability (they all have zero fitness). // Fall back to asexual reproduction of the single genome with a non-zero fitness. TGenome offspring = parent1.CreateOffspring(_currentGeneration); offspringList.Add(offspring); } } } } _stats._totalOffspringCount += (ulong)offspringCount; return(offspringList); }
/// <summary> /// Dense matrix multiplication, C = A*B /// </summary> /// <param name="other">The dense matrix multiplied to this instance.</param> /// <param name="options">Parallel options (optional).</param> /// <returns>C = A*B</returns> public DenseColumnMajorStorage <T> ParallelMultiply(DenseColumnMajorStorage <T> other, System.Threading.Tasks.ParallelOptions options = null) { int m = this.rowCount; int n = other.columnCount; // check inputs if (this.columnCount != other.RowCount) { throw new ArgumentException(Resources.MatrixDimensions, "other"); } var result = DenseColumnMajorStorage <T> .Create(m, n); ParallelMultiply(other, result, options); return(result); }
/// <summary> /// Executes a for loop in which iterations may run in parallel, loop options can /// be configured, and the state of the loop can be monitored and manipulated. /// </summary> public static SystemParallelLoopResult For(int fromInclusive, int toExclusive, SystemParallelOptions parallelOptions, Action <int, SystemParallelLoopState> body) { ExceptionProvider.ThrowUncontrolledInvocationException(nameof(SystemParallel.For)); return(SystemParallel.For(fromInclusive, toExclusive, parallelOptions, body)); }
/// <summary> /// Executes each of the provided actions, possibly in parallel, unless the operation is cancelled by the user. /// </summary> public static void Invoke(SystemParallelOptions parallelOptions, params Action[] actions) { ExceptionProvider.ThrowUncontrolledInvocationException(nameof(SystemParallel.Invoke)); SystemParallel.Invoke(parallelOptions, actions); }
/// <summary> /// Executes a foreach operation on a <see cref="OrderablePartitioner{TSource}"/> /// in which iterations may run in parallel, loop options can be configured, and /// the state of the loop can be monitored and manipulated. /// </summary> public static SystemParallelLoopResult ForEach <TSource>(OrderablePartitioner <TSource> source, SystemParallelOptions parallelOptions, Action <TSource, SystemParallelLoopState, long> body) { ExceptionProvider.ThrowUncontrolledInvocationException(nameof(SystemParallel.ForEach)); return(SystemParallel.ForEach(source, parallelOptions, body)); }
public TemplateMatch[] ProcessImage(UnmanagedImage image, UnmanagedImage template, Rectangle searchZone, int parallelTasks = 1) { // check image format if ( ((image.PixelFormat != PixelFormat.Format8bppIndexed) && (image.PixelFormat != PixelFormat.Format24bppRgb)) || (image.PixelFormat != template.PixelFormat)) { throw new UnsupportedImageFormatException("Unsupported pixel format of the source or template image."); } // clip search zone var zone = searchZone; zone.Intersect(new Rectangle(0, 0, image.Width, image.Height)); // search zone's starting point var startX = zone.X; var startY = zone.Y; // get source and template image size var sourceWidth = zone.Width; var sourceHeight = zone.Height; var templateWidth = template.Width; var templateHeight = template.Height; // check template's size if ((templateWidth > sourceWidth) || (templateHeight > sourceHeight)) { throw new InvalidImagePropertiesException("Template's size should be smaller or equal to search zone."); } var pixelSize = (image.PixelFormat == PixelFormat.Format8bppIndexed) ? 1 : 3; var sourceStride = image.Stride; // similarity map. its size is increased by 4 from each side to increase // performance of non-maximum suppresion var mapWidth = sourceWidth - templateWidth + 1; var mapHeight = sourceHeight - templateHeight + 1; var map = new int[mapHeight + 4, mapWidth + 4]; // maximum possible difference with template var maxDiff = templateWidth * templateHeight * pixelSize * 255; // integer similarity threshold var threshold = (int)(this.similarityThreshold * maxDiff); // width of template in bytes var templateWidthInBytes = templateWidth * pixelSize; // do the job unsafe { var baseSrc = (byte *)image.ImageData.ToPointer( ); var baseTpl = (byte *)template.ImageData.ToPointer( ); var sourceOffset = image.Stride - templateWidth * pixelSize; var templateOffset = template.Stride - templateWidth * pixelSize; // for each row of the source image //for ( int y = 0; y < mapHeight; y++ ) //use multiple tasks. var opt = new System.Threading.Tasks.ParallelOptions { MaxDegreeOfParallelism = parallelTasks }; for (int y = 0; y < mapHeight; y++) //System.Threading.Tasks.Parallel.For(0,mapHeight, opt, y=> { try { // for each pixel of the source image for (var x = 0; x < mapWidth; x++) { var src = baseSrc + sourceStride * (y + startY) + pixelSize * (x + startX); var tpl = baseTpl; // compare template with source image starting from current X,Y var dif = 0; // for each row of the template for (var i = 0; i < templateHeight; i++) { // for each pixel of the template for (var j = 0; j < templateWidthInBytes; j++, src++, tpl++) { var d = *src - *tpl; if (d > 0) { dif += d; } else { dif -= d; } } src += sourceOffset; tpl += templateOffset; } // templates similarity var sim = maxDiff - dif; if (sim >= threshold) { map[y + 2, x + 2] = sim; } } } catch (AccessViolationException ex) { } }//); } // collect interesting points - only those points, which are local maximums var matchingsList = new List <TemplateMatch>( ); // for each row for (int y = 2, maxY = mapHeight + 2; y < maxY; y++) { // for each pixel for (int x = 2, maxX = mapWidth + 2; x < maxX; x++) { var currentValue = map[y, x]; // for each windows' row for (var i = -2; (currentValue != 0) && (i <= 2); i++) { // for each windows' pixel for (var j = -2; j <= 2; j++) { if (map[y + i, x + j] > currentValue) { currentValue = 0; break; } } } // check if this point is really interesting if (currentValue != 0) { matchingsList.Add(new TemplateMatch( new Rectangle(x - 2 + startX, y - 2 + startY, templateWidth, templateHeight), (float)currentValue / maxDiff)); } } } // convert list to array var matchings = new TemplateMatch[matchingsList.Count]; matchingsList.CopyTo(matchings); // sort in descending order Array.Sort(matchings, new MatchingsSorter( )); return(matchings); }
//public String Proxy { get; set; } //public String UserAgent { get; set; } //public bool CEExternalRead { get; set; } //public bool AllRes { get; set; } //public bool SkipeAliveCheck { get; set; } static public Byte[] Gethtml(String URI, int range, String UA, bool CRReplace, String LastMod = null) { URI = URI.Replace("2ch.net", "5ch.net"); if (ViewModel.Setting.CEExternalRead) { return HTMLTranceOutRegex(URI, range, UA, LastMod); } using (WebClient get = new WebClient()) { get.Headers["User-Agent"] = ViewModel.Setting.UserAgent4; try { if (ViewModel.Setting.ProxyAddress != "") get.Proxy = new WebProxy(ViewModel.Setting.ProxyAddress); using (System.IO.StreamReader html = new System.IO.StreamReader(get.OpenRead(URI), Encoding.GetEncoding("Shift_JIS"))) { String title = "もうずっと人大杉", ketu = ""; //dat構築用StringBuilder var Builddat = new StringBuilder(510 * 1024); bool alive = true, NewCGI = false; //タイトルの検索 for (String line = html.ReadLine(); !html.EndOfStream; line = html.ReadLine()) { if (Regex.IsMatch(line, @"<title>(.+?)<\/title>")) { title = Regex.Match(line, @"<title>(.+?)<\/title>").Groups[1].Value; break; } else if (Regex.IsMatch(line, @"<title>(.+?)$")) { title = Regex.Match(line, @"<title>(.+?)$").Groups[1].Value; NewCGI = true; break; } } if (Regex.IsMatch(title, @"(5ちゃんねる error \d+|もうずっと人大杉|datが存在しません.削除されたかURL間違ってますよ)")) return new byte[] { 0 }; if (Regex.IsMatch(title, @"(2|5)ch\.net\s(\[\d+\])")) { var tmatch = Regex.Match(title, @"(2|5)ch\.net\s(\[\d+\])").Groups; title = title.Replace(tmatch[0].Value, $"{tmatch[1].Value}ch.net\t {tmatch[2].Value}"); } if (CRReplace) title = title.Replace("©", "©"); //新CGI形式と古いCGI形式で処理を分ける if (NewCGI) { String line = html.ReadLine(); //スレッド本文探索 do { if (Regex.IsMatch(line, @"<d(?:iv|l) class=.(?:thread|post).+?>")) break; line = html.ReadLine(); } while (!html.EndOfStream); //スレ生存チェック if (!ViewModel.Setting.SkipAliveCheck) { if (Regex.IsMatch(line, @"<div class=" + '"' + @"[a-zA-Z\s]+?" + '"' + @">(.+?過去ログ倉庫.+?|レス数が\d{3,}を超えています.+?(書き込み.*?|表.?示)でき.+?)</div>") == false) { return new byte[] { 0, 0 }; } } var Bres = new StringBuilder(5 * 1024); //pinkレスずれ処理用 bool pink = URI.Contains("bbspink.com"); int datResnumber = 1, htmlResnumber = 0; long ThreadTime = long.Parse(Regex.Match(URI, @"/(\d{9,})").Groups[1].Value); var ResMatches = Regex.Matches(line, @"<(?:div|dl) class=.post. id=.\d.+?>(.+?(?:</div></div>|</dd></dl>))"); foreach (Match Res in ResMatches) { //Match date = Regex.Match(Res.Groups[1].Value, @"<(?:div|span) class=.date.+?>(.+?)</(?:div|span)>(?:<(?:div|span) class=.be\s.+?.>(.+?)</(?:div|span)>)?"); Match date = Regex.Match(Res.Groups[1].Value, @"<(?:div|span) class=.date.+?>(.+?(?:</span><span class=" + '"' + @"\w+?" + '"' + @">.*?)?)</(?:div|span)>(?:<(?:div|span) class=.be\s.+?.>(.+?)</(?:div|span)>)?"); String number = Regex.Match(Res.Groups[1].Value, @"<(?:div|span) class=.number.+?>(\d{1,5})(?: : )?</(?:div|span)>").Groups[1].Value; //0,NGの検出 if (number == "0" && date.Groups[1].Value == "NG") { //飛ばす continue; } //htmlでレスが飛んでいるときを検出 if (pink && int.TryParse(number, out htmlResnumber) && datResnumber < htmlResnumber) { for (int j = htmlResnumber - datResnumber; j > 0; --j) { Builddat.Append("うふ~ん<>うふ~ん<>うふ~ん ID:DELETED<>うふ~ん<>うふ~ん<>\n"); } datResnumber = htmlResnumber; } //String name = Regex.Match(Res.Groups[1].Value, $"<(?:div|span) class={'"'}name{'"'}>((?:{'"'}.*?{'"'}|'.*?'|[^'{'"'}])+?)</(?:div|span)>").Groups[1].Value; String name = Regex.Match(Res.Groups[1].Value, $"<(?:div|span) class=.name.+?>(.+?(?:</b>|</a>))</(?:div|span)>").Groups[1].Value; //目欄が空の時フォントカラー指定を消す if (!name.Contains("<a href=" + '"' + "mailto:")) { name = Regex.Replace(name, @"<font color=.green.>", ""); name = name.Replace("</font>", ""); } //ID部のspanタグ削除 String dateid = date.Groups[1].Value; if (dateid.Contains("</span><span ")) { dateid = Regex.Replace(dateid, $"</span><span class={'"'}" + @"\w+?" + $"{'"'}>", " "); } //日付IDがNGになっているとき if (dateid.Contains("NG NG")) { DateTime UnixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); UnixEpoch = UnixEpoch.AddSeconds(ThreadTime); String time = UnixEpoch.ToLocalTime().ToString("yyyy/MM/dd(ddd) HH:mm:ss.00"); dateid = time + " ID:NG0"; } //beリンク処理 String be = ""; if (!string.IsNullOrEmpty(date.Groups[2].Value)) { var mb = Regex.Match(date.Groups[2].Value, @"<a href.+?(\d{2,}).+?>(.+)$"); be = $" <a href={'"'}javascript:be({mb.Groups[1].Value});{'"'}>{mb.Groups[2].Value}"; } String message = Regex.Match(Res.Groups[1].Value, @"<d(?:iv|d) class=.(?:message|thread_in).+?>(?:<span class=.escaped.>)?(.+?)(?:</span>)?(?:</div></div>|</dd></dl>)").Groups[1].Value; //安価のリンク修正、http://potato.2ch.net/test/read.cgi/jisaku/1447271149/9→../test/read.cgi/jisaku/1447271149/9 Bres.Append(message); foreach (Match item in Regex.Matches(message, @"(<a href=.)(?:https?:)?//\w+\.((?:2|5)ch\.net|bbspink\.com)(/test/read.cgi/\w+/\d+/\d{1,4}.\s.+?>>>\d{1,5}</a>)")) { Bres.Replace(item.Groups[0].Value, item.Groups[1].Value + ".." + item.Groups[3].Value); } //お絵かきリンク修正 foreach (Match item in Regex.Matches(message, $@"<a\s(?:class={'"'}image{'"'}\s)?href=" + '"' + @"(?:https?:)?//jump.(?:2|5)ch\.net/\?(https?://[a-zA-Z\d]+?\.8ch.net\/.+?\.\w+?)" + '"' + @">https?://[a-zA-Z\d]+?\.8ch\.net\/.+?\.\w+?</a>")) { Bres.Replace(item.Groups[0].Value, "<img src=" + '"' + item.Groups[1].Value + '"' + ">"); } //p53など、レス前後にスペースが無いときに補う。 if (!Regex.IsMatch(message, @"^\s.+\s$")) { Bres.Insert(0, " "); Bres.Append(" "); } Bres.Insert(0, ":" + dateid + be + "<dd>"); Bres.Insert(0, "<dt>" + number + " :" + name); Bres.Append("<br><br>"); Builddat.Append(html2dat(Bres.ToString())); if (!String.IsNullOrEmpty(title)) { Builddat.Append(title + "\n"); title = ""; } else Builddat.Append("\n"); Bres.Clear(); datResnumber++; } ketu = Regex.Match(line, @"<(?:div|li) class=.+?>(?<datsize>\d+?)KB</(?:div|li)>").Groups[1].Value; } else { if (!ViewModel.Setting.SkipAliveCheck) { //dat落ちかチェック for (String line = html.ReadLine(); !html.EndOfStream; line = html.ReadLine()) { if (Regex.IsMatch(line, @"<div.*?>(.+?過去ログ倉庫.+?|レス数が\d{3,}を超えています.+?(書き込み.*?でき|表示しません).+?)</div>")) { alive = false; break; } else if (Regex.IsMatch(line, @"<h1 style.+>.+?<\/h1>")) { alive = true; break; } } //生きているなら終了 if (alive) return new byte[] { 0, 0 }; } String ResHtml = html.ReadToEnd(); System.Collections.Concurrent.ConcurrentDictionary<int, string> Trancedat = new System.Collections.Concurrent.ConcurrentDictionary<int, string>(4, 1005); System.Threading.Tasks.ParallelOptions option = new System.Threading.Tasks.ParallelOptions(); option.MaxDegreeOfParallelism = 4; System.Threading.Tasks.Parallel.ForEach<Match>(Regex.Matches(ResHtml, @"<dt>(\d{1,4})\s:.+?<br><br>(?:\r|\n)").Cast<Match>(), option, match => { Trancedat[int.Parse(match.Groups[1].Value) - 1] = html2dat(match.Groups[0].Value) + "\n"; }); Builddat.Append(Trancedat[0].Substring(0, Trancedat[0].Length - 1) + title + "\n"); for (int i = 1; i < Trancedat.Count; ++i) Builddat.Append(Trancedat[i]); if (!ViewModel.Setting.AllReturn || range > -1) ketu = Regex.Match(ResHtml, @"<font\scolor.+?><b>(\d+)\sKB<\/b><\/font>").Groups[1].Value; } //if (ViewModel.Setting.Replace5chURI || ViewModel.Setting.ReplaceHttpsLink) //{ // Builddat = new StringBuilder(HTMLtoDat.ResContentReplace(Builddat.ToString())); //} //Byte[] Bdat = Encoding.GetEncoding("Shift_JIS").GetBytes(Builddat.ToString()); Byte[] Bdat = Encoding.GetEncoding("Shift_JIS").GetBytes((ViewModel.Setting.Replace5chURI || ViewModel.Setting.ReplaceHttpsLink) ? (HTMLtoDat.ResContentReplace(Builddat.ToString())) : (Builddat.ToString())); if (ViewModel.Setting.AllReturn || range < 0) return Bdat; int size; try { size = int.Parse(ketu); } catch (FormatException) { size = 0; } //差分返答処理 return DifferenceDetection(Bdat, LastMod, UA, range, size); } } catch (System.Threading.ThreadAbortException e) { throw e; } catch (Exception err) { ViewModel.OnModelNotice(URI + "をHTMLから変換中にエラーが発生しました。\n" + err.ToString()); return new byte[] { 0 }; } } }
public static ClangSharpSourceCompilation Create( string sourceDirectory, string interopFileName, Dictionary <string, string> remaps, Dictionary <string, string> typeImports, Dictionary <string, string> requiredNamespaces, HashSet <string> reducePointerLevels) { sourceDirectory = Path.GetFullPath(sourceDirectory); var netstandardPath = FindNetstandardDllPath(); if (!File.Exists(netstandardPath)) { throw new FileNotFoundException("Failed to find the netstandard DLL."); } List <MetadataReference> refs = new List <MetadataReference>(); refs.Add(MetadataReference.CreateFromFile(interopFileName)); refs.Add(MetadataReference.CreateFromFile(netstandardPath)); List <SyntaxTree> syntaxTrees = new List <SyntaxTree>(); var sourceFiles = Directory.GetFiles(sourceDirectory, "*.cs", SearchOption.AllDirectories); System.Threading.Tasks.ParallelOptions opt = new System.Threading.Tasks.ParallelOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount * 2 }; System.Threading.Tasks.Parallel.ForEach(sourceFiles, opt, (sourceFile) => { if (sourceFile.EndsWith("modified.cs")) { return; } string fileToRead = Path.GetFullPath(sourceFile); var tree = CSharpSyntaxTree.ParseText(File.ReadAllText(fileToRead), null, fileToRead); lock (syntaxTrees) { syntaxTrees.Add(tree); } }); syntaxTrees = NamesToCorrectNamespacesMover.MoveNamesToCorrectNamespaces(syntaxTrees, requiredNamespaces); HashSet <string> foundNonEmptyStructs = GetNonEmptyStructs(syntaxTrees); #if MakeSingleThreaded opt.MaxDegreeOfParallelism = 1; #endif string objDir = Path.Combine(sourceDirectory, "obj"); Directory.CreateDirectory(objDir); List <SyntaxTree> cleanedTrees = new List <SyntaxTree>(); System.Threading.Tasks.Parallel.ForEach(syntaxTrees, opt, (tree) => { // Turn c:\dir\generated\foo.cs into c:\dir\generated\obj\foo.modified.cs string modifiedFile = Path.ChangeExtension(tree.FilePath, ".modified.cs"); string fileWithSubDir = modifiedFile.Substring(sourceDirectory.Length); if (fileWithSubDir.StartsWith('\\')) { fileWithSubDir = fileWithSubDir.Substring(1); } modifiedFile = Path.Combine(objDir, fileWithSubDir); // e.g. c:\dir\generated\obj string newSubDir = Path.GetDirectoryName(modifiedFile); if (!Directory.Exists(newSubDir)) { Directory.CreateDirectory(newSubDir); } var cleanedTree = MetadataSyntaxTreeCleaner.CleanSyntaxTree(tree, remaps, requiredNamespaces, foundNonEmptyStructs, modifiedFile); File.WriteAllText(modifiedFile, cleanedTree.GetText().ToString()); lock (cleanedTrees) { cleanedTrees.Add(cleanedTree); } }); CSharpCompilationOptions compilationOptions = new CSharpCompilationOptions(OutputKind.WindowsRuntimeMetadata, allowUnsafe: true); var comp = CSharpCompilation.Create( null, cleanedTrees, refs, compilationOptions); return(new ClangSharpSourceCompilation(comp, typeImports)); }
public static System.Threading.Tasks.ParallelLoopResult ForEach <TSource>(System.Collections.Generic.IEnumerable <TSource> source, System.Threading.Tasks.ParallelOptions parallelOptions, System.Action <TSource, System.Threading.Tasks.ParallelLoopState> body) { return(default(System.Threading.Tasks.ParallelLoopResult)); }
public static System.Threading.Tasks.ParallelLoopResult ForEach <TSource>(System.Collections.Concurrent.Partitioner <TSource> source, System.Threading.Tasks.ParallelOptions parallelOptions, System.Action <TSource> body) { throw null; }
public static System.Threading.Tasks.ParallelLoopResult ForEach <TSource, TLocal>(System.Collections.Concurrent.OrderablePartitioner <TSource> source, System.Threading.Tasks.ParallelOptions parallelOptions, System.Func <TLocal> localInit, System.Func <TSource, System.Threading.Tasks.ParallelLoopState, long, TLocal, TLocal> body, System.Action <TLocal> localFinally) { return(default(System.Threading.Tasks.ParallelLoopResult)); }
public static System.Threading.Tasks.ParallelLoopResult ForEach <TSource, TLocal>(System.Collections.Concurrent.Partitioner <TSource> source, System.Threading.Tasks.ParallelOptions parallelOptions, System.Func <TLocal> localInit, System.Func <TSource, System.Threading.Tasks.ParallelLoopState, TLocal, TLocal> body, System.Action <TLocal> localFinally) { throw null; }
public static System.Threading.Tasks.ParallelLoopResult For(int fromInclusive, int toExclusive, System.Threading.Tasks.ParallelOptions parallelOptions, System.Action <int, System.Threading.Tasks.ParallelLoopState> body) { return(default(System.Threading.Tasks.ParallelLoopResult)); }
public static void Invoke(System.Threading.Tasks.ParallelOptions parallelOptions, params System.Action[] actions) { }
public static System.Threading.Tasks.ParallelLoopResult For(long fromInclusive, long toExclusive, System.Threading.Tasks.ParallelOptions parallelOptions, System.Action <long> body) { return(default(System.Threading.Tasks.ParallelLoopResult)); }
public static System.Threading.Tasks.ParallelLoopResult For(long fromInclusive, long toExclusive, System.Threading.Tasks.ParallelOptions parallelOptions, System.Action <long> body) { throw null; }
public static System.Threading.Tasks.ParallelLoopResult For <TLocal>(long fromInclusive, long toExclusive, System.Threading.Tasks.ParallelOptions parallelOptions, System.Func <TLocal> localInit, System.Func <long, System.Threading.Tasks.ParallelLoopState, TLocal, TLocal> body, System.Action <TLocal> localFinally) { return(default(System.Threading.Tasks.ParallelLoopResult)); }
public static void Test() { Student[] Students = new Student[1000000]; Random rd = new Random(); for (var i = 0; i < Students.Length; i++) { Students[i] = new Student() { ClassName = rd.Next(1, 100).ToString(), StudentID = i, StudentName = i.ToString() }; } Dictionary<string, int> ClassStudentDic = new Dictionary<string, int>(); Stopwatch sw = Stopwatch.StartNew(); System.Threading.Tasks.Parallel.ForEach(Students, s => { if (ClassStudentDic.ContainsKey(s.ClassName)) { ClassStudentDic[s.ClassName] += 1; } else { ClassStudentDic[s.ClassName] = 1; } }); sw.Stop(); Console.WriteLine(string.Format("{0}\t\t{1}\t毫秒", "并行计算", sw.ElapsedMilliseconds)); ClassStudentDic.Clear(); System.Threading.Tasks.ParallelOptions option = new System.Threading.Tasks.ParallelOptions(); option.MaxDegreeOfParallelism = System.Environment.ProcessorCount - 1; System.Collections.Concurrent.Partitioner<Student> rangePartitioner = System.Collections.Concurrent.Partitioner.Create<Student>(Students, true); sw.Restart(); System.Threading.Tasks.Parallel.ForEach<Student, Dictionary<string, int>>(rangePartitioner, option, () => new Dictionary<string, int>(), (student, state, dic) => { if (dic.ContainsKey(student.ClassName)) { dic[student.ClassName] += 1; } else { dic[student.ClassName] = 1; } return dic; }, (finalDic) => { foreach (var item in finalDic) { if (ClassStudentDic.ContainsKey(item.Key)) { ClassStudentDic[item.Key] += item.Value; } else { ClassStudentDic[item.Key] = item.Value; } } }); sw.Stop(); Console.WriteLine(string.Format("{0}\t\t{1}\t毫秒", "并行计算", sw.ElapsedMilliseconds)); //foreach (var item in ClassStudentDic.OrderBy(x => x.Key)) //{ // Console.WriteLine(string.Format("paral\tclass={0}\tcount={1}", item.Key, item.Value)); //} ClassStudentDic.Clear(); sw.Restart(); foreach (var s in Students) { if (ClassStudentDic.ContainsKey(s.ClassName)) { ClassStudentDic[s.ClassName] += 1; } else { ClassStudentDic[s.ClassName] = 1; } } sw.Stop(); Console.WriteLine(string.Format("{0}\t\t{1}\t毫秒", "循环计算", sw.ElapsedMilliseconds)); //foreach (var item in ClassStudentDic.OrderBy(x => x.Key)) //{ // Console.WriteLine(string.Format("loop\tclass={0}\tcount={1}", item.Key, item.Value)); //} sw.Restart(); var dddd = Students.GroupBy(x => x.ClassName).ToList(); sw.Stop(); Console.WriteLine(string.Format("{0}\t\t{1}\t毫秒", "LINQ计算", sw.ElapsedMilliseconds)); //foreach (var item in dddd.OrderBy(x => x.Key)) //{ // Console.WriteLine(string.Format("linq\tclass={0}\tcount={1}", item.Key, item.Count())); //} }
public static System.Threading.Tasks.ParallelLoopResult ForEach <TSource>(System.Collections.Concurrent.Partitioner <TSource> source, System.Threading.Tasks.ParallelOptions parallelOptions, System.Action <TSource, System.Threading.Tasks.ParallelLoopState> body) { return(default(System.Threading.Tasks.ParallelLoopResult)); }
private static void MarkInactiveUsers(Repositories.IMessangerRepository repo, IDependencyResolver resolver) { var connectionManager = resolver.Resolve<IConnectionManager>(); var hubContext = connectionManager.GetHubContext<Hubs.MessangerHub>(); var inactiveUsers = new List<Common.Models.MessangerUser>(); IQueryable<Common.Models.MessangerUser> users = repo.Users.Online(); foreach (var user in users) { var status = (Common.Models.UserStatus)user.Status; var elapsed = DateTime.UtcNow - user.LastActivity; if (elapsed.TotalMinutes > 15) { user.Status = (int)Common.Models.UserStatus.Inactive; inactiveUsers.Add(user); } } if (inactiveUsers.Count > 0) { var roomGroups = from usr in inactiveUsers from grp in usr.Groups select new { User = usr, Group = grp } into tuple group tuple by tuple.Group into g select new { Group = g.Key, Users = g.Select(t => new UserViewModel(t.User)) }; var parallelOpt = new System.Threading.Tasks.ParallelOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount }; var result = System.Threading.Tasks.Parallel.ForEach(roomGroups, parallelOpt, roomGroup => { if (hubContext != null) hubContext.Clients.Group(roomGroup.Group.Name).markInactive(roomGroup.Users).Wait(); }); //foreach (var roomGroup in roomGroups) //{ // hubContext.Clients.Group(roomGroup.Group.Name).markInactive(roomGroup.Users).Wait(); //} } }
/// <summary> /// Sparse matrix multiplication, C = A*B /// </summary> /// <param name="other">The sparse matrix multiplied to this instance.</param> /// <param name="options">Parallel options (optional).</param> /// <returns>C = A*B</returns> public virtual CompressedColumnStorage <T> ParallelMultiply(CompressedColumnStorage <T> other, System.Threading.Tasks.ParallelOptions options = null) { return(Multiply(other)); }
public static void Run(string configurationFileName) { TextWriter gDisaggregatedCsvFile = null; TextWriter lAggregateMarginalDamage = null; TextWriter lGlobalInputCsv = null; TextWriter lRegionInputCsv = null; TextWriter lYearInputCsv = null; TextWriter lRegionYearInputCsv = null; SimulationManager lSimulationManager; var lDefaultConfigurationFile = Path.Combine(Path.Combine(Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location), "Data"), "DefaultSimulation.xml"); lSimulationManager = new SimulationManager(configurationFileName == null ? lDefaultConfigurationFile : configurationFileName); lSimulationManager.Load(); if (!Directory.Exists(ConsoleApp.OutputPath)) Directory.CreateDirectory(ConsoleApp.OutputPath); var lRandom = GetNewRandom(lSimulationManager); lAggregateMarginalDamage = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Aggregate marginal damage.csv"))); var TempOutputFile = new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Temp.csv")); if (lSimulationManager.OutputVerbal) { lAggregateMarginalDamage.Write("Scenario"); lAggregateMarginalDamage.Write(";"); lAggregateMarginalDamage.Write("Gas"); lAggregateMarginalDamage.Write(";"); lAggregateMarginalDamage.Write("Emissionyear"); lAggregateMarginalDamage.Write(";"); lAggregateMarginalDamage.Write("Run"); lAggregateMarginalDamage.Write(";"); lAggregateMarginalDamage.Write("Weightingscheme"); lAggregateMarginalDamage.Write(";"); lAggregateMarginalDamage.Write("Marginal damage"); lAggregateMarginalDamage.WriteLine(); } TextWriter lSummaryDamage = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Summary damage.csv"))); if (lSimulationManager.OutputVerbal) { lSummaryDamage.WriteLine("Scenario;Gas;Emissionyear;Weightingscheme;Bestguess;Mean;TrimMean0.1%;TrimMean1%;TrimMean5%;Median;Std;Var;Skew;Kurt;Min;Max;SE"); } if (lSimulationManager.Runs.Exists((Run r) => r.OutputDisaggregatedData)) { gDisaggregatedCsvFile = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Fact YearRegionSectorWeightingscheme.csv"))); if (lSimulationManager.OutputVerbal) { gDisaggregatedCsvFile.Write("Scenario"); gDisaggregatedCsvFile.Write(";"); gDisaggregatedCsvFile.Write("Run"); gDisaggregatedCsvFile.Write(";"); gDisaggregatedCsvFile.Write("Marginal Gas"); gDisaggregatedCsvFile.Write(";"); gDisaggregatedCsvFile.Write("Marginal Emission Year"); gDisaggregatedCsvFile.Write(";"); gDisaggregatedCsvFile.Write("Year"); gDisaggregatedCsvFile.Write(";"); gDisaggregatedCsvFile.Write("Region"); gDisaggregatedCsvFile.Write(";"); gDisaggregatedCsvFile.Write("Sector"); gDisaggregatedCsvFile.Write(";"); gDisaggregatedCsvFile.Write("Weightingscheme"); gDisaggregatedCsvFile.Write(";"); gDisaggregatedCsvFile.Write("Damage"); gDisaggregatedCsvFile.WriteLine(); } } using (var lDimGasCsv = new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Dim Gas.csv"))) { lDimGasCsv.WriteLine("0;C"); lDimGasCsv.WriteLine("1;CH4"); lDimGasCsv.WriteLine("2;N2O"); lDimGasCsv.WriteLine("3;SF6"); } using (var lDimSectorCsv = new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Dim Sector.csv"))) { lDimSectorCsv.WriteLine("0;eloss;Water"); lDimSectorCsv.WriteLine("1;eloss;Forests"); lDimSectorCsv.WriteLine("2;eloss;Heating"); lDimSectorCsv.WriteLine("3;eloss;Cooling"); lDimSectorCsv.WriteLine("4;eloss;Agriculture"); lDimSectorCsv.WriteLine("5;eloss;Dryland"); lDimSectorCsv.WriteLine("6;eloss;SeaProtection"); lDimSectorCsv.WriteLine("7;eloss;Imigration"); lDimSectorCsv.WriteLine("8;sloss;Species"); lDimSectorCsv.WriteLine("9;sloss;Death"); lDimSectorCsv.WriteLine("10;sloss;Morbidity"); lDimSectorCsv.WriteLine("11;sloss;Wetland"); lDimSectorCsv.WriteLine("12;sloss;Emigration"); lDimSectorCsv.WriteLine("13;eloss;Hurricane"); lDimSectorCsv.WriteLine("14;eloss;ExtratropicalStorms"); } using (var lDimScenarioCsv = new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Dim Scenario.csv"))) { foreach (Scenario lScenario in lSimulationManager.Scenarios) { // Write Scenario dimension file lDimScenarioCsv.Write(lScenario.Id); lDimScenarioCsv.Write(";"); lDimScenarioCsv.Write(lScenario.Name); lDimScenarioCsv.WriteLine(); } } using (var lDimYearCsv = new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Dim Year.csv"))) { for (int i = 1950; i <= 2300; i++) { string lYearStr = i.ToString(); lDimYearCsv.Write(lYearStr); lDimYearCsv.Write(";"); lDimYearCsv.Write(lYearStr.Substring(0, 2)); lDimYearCsv.Write("xx;"); lDimYearCsv.Write(lYearStr.Substring(0, 3)); lDimYearCsv.Write("x;"); lDimYearCsv.Write(lYearStr.Substring(0, 4)); lDimYearCsv.WriteLine(); } } var lDimEmissionYear = new ConcurrentBag<Timestep>(); if (lSimulationManager.OutputInputParameters) { lGlobalInputCsv = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Fact Parameter.csv"))); lRegionInputCsv = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Fact ParameterRegion.csv"))); lYearInputCsv = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Fact ParameterYear.csv"))); lRegionYearInputCsv = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Fact ParameterYearRegion.csv"))); } if (lSimulationManager.RunParallel && !lSimulationManager.SameRandomStreamPerRun) { throw new ArgumentException("Cannot run in parallel but without random stream per run"); } var parallelOptions = new System.Threading.Tasks.ParallelOptions() { MaxDegreeOfParallelism = lSimulationManager.RunParallel ? -1 : 1 }; if (lSimulationManager.RunParallel) { var parameterDefinition = new Parameters(); parameterDefinition.ReadExcelFile(@"Data\Parameter - base.xlsm"); // Create a new model that inits itself from the parameters just loaded var model = new Esmf.Model.ModelTyped<FundWorkflow>(); model.Run(parameterDefinition.GetBestGuess()); } System.Threading.Tasks.Parallel.ForEach<Run, object>( lSimulationManager.Runs, parallelOptions, () => { Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture; Thread.CurrentThread.Priority = ThreadPriority.BelowNormal; return null; }, (lRun, loopState, dummy) => { var tlRandom = lSimulationManager.SameRandomStreamPerRun ? GetNewRandom(lSimulationManager) : lRandom; var lParam = new Parameters(); foreach (string filename in lRun.Scenario.ExcelFiles) lParam.ReadExcelFile(filename); Console.WriteLine(lRun.Scenario.Name); if (lRun.Mode == RunMode.MarginalRun) { var lMarginalRun = new TMarginalRun(lRun, lRun.MarginalGas, lRun.EmissionYear, ConsoleApp.OutputPath, lParam, lRandom); lDimEmissionYear.Add(lRun.EmissionYear); lMarginalRun.AggregateDamageCsv = lAggregateMarginalDamage; lMarginalRun.SummaryCsv = lSummaryDamage; if (lRun.OutputDisaggregatedData) { lMarginalRun.YearRegionSectorWeightingSchemeCsv = gDisaggregatedCsvFile; } if (lSimulationManager.OutputInputParameters) { lMarginalRun.GlobalInputCsv = lGlobalInputCsv; lMarginalRun.RegionInputCsv = lRegionInputCsv; lMarginalRun.YearInputCsv = lYearInputCsv; lMarginalRun.RegionYearInputCsv = lRegionYearInputCsv; } var watch = new System.Diagnostics.Stopwatch(); watch.Start(); lMarginalRun.Run(); watch.Stop(); //Console.WriteLine("Elapsed time: {0}", watch.Elapsed); } else if (lRun.Mode == RunMode.FullMarginalRun) { MarginalGas[] gases = { MarginalGas.C }; foreach (MarginalGas gas in gases) { for (int emissionyear = 2010; emissionyear <= 2100; emissionyear += 5) { lDimEmissionYear.Add(Timestep.FromYear(emissionyear)); Console.WriteLine("Now doing year {0} and gas {1}", emissionyear, gas); // DA: Use MargMain for marginal cost, use Main for total cost and optimisation // modes var lMarginalRun = new TMarginalRun(lRun, gas, Timestep.FromYear(emissionyear), ConsoleApp.OutputPath, lParam, lRandom); lMarginalRun.AggregateDamageCsv = lAggregateMarginalDamage; if (lRun.OutputDisaggregatedData) { lMarginalRun.YearRegionSectorWeightingSchemeCsv = gDisaggregatedCsvFile; } if (lSimulationManager.OutputInputParameters) { lMarginalRun.GlobalInputCsv = lGlobalInputCsv; lMarginalRun.RegionInputCsv = lRegionInputCsv; lMarginalRun.YearInputCsv = lYearInputCsv; lMarginalRun.RegionYearInputCsv = lRegionYearInputCsv; } var watch = new System.Diagnostics.Stopwatch(); watch.Start(); lMarginalRun.Run(); watch.Stop(); //Console.WriteLine("Elapsed time: {0}", watch.Elapsed); } } } else if (lRun.Mode == RunMode.TotalRun) { // DA: Use MargMain for marginal cost, use Main for total cost and optimisation // modes var lTotalDamageRun = new TotalDamage(lRun, ConsoleApp.OutputPath, lParam, lRun.EmissionYear); lTotalDamageRun.AggregateDamageCsv = lAggregateMarginalDamage; if (lRun.OutputDisaggregatedData) { lTotalDamageRun.YearRegionSectorWeightingSchemeCsv = gDisaggregatedCsvFile; } if (lSimulationManager.OutputInputParameters) { lTotalDamageRun.GlobalInputCsv = lGlobalInputCsv; lTotalDamageRun.RegionInputCsv = lRegionInputCsv; lTotalDamageRun.YearInputCsv = lYearInputCsv; lTotalDamageRun.RegionYearInputCsv = lRegionYearInputCsv; } lTotalDamageRun.Run(); } return null; }, (dummy) => { return; }); lSummaryDamage.Close(); lAggregateMarginalDamage.Close(); TempOutputFile.Close(); using (var lDimEmissionYearCsv = new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Dim Emissionyear.csv"))) { foreach (Timestep emissionyear in lDimEmissionYear.Distinct().OrderBy(i => i.Value)) lDimEmissionYearCsv.WriteLine("{0};{1}", emissionyear, emissionyear); } if (lSimulationManager.Runs.Exists((Run run) => run.OutputDisaggregatedData)) { gDisaggregatedCsvFile.Close(); } if (lSimulationManager.OutputInputParameters) { lGlobalInputCsv.Close(); lRegionInputCsv.Close(); lYearInputCsv.Close(); lRegionYearInputCsv.Close(); } }
public static ClangSharpSourceCompilation Create( string sourceDirectory, string interopFileName, Dictionary <string, string> remaps, Dictionary <string, string> typeImports, Dictionary <string, string> requiredNamespaces) { var netstandardPath = FindNetstandardDllPath(); if (!File.Exists(netstandardPath)) { throw new FileNotFoundException("Failed to find the netstandard DLL."); } List <MetadataReference> refs = new List <MetadataReference>(); refs.Add(MetadataReference.CreateFromFile(interopFileName)); refs.Add(MetadataReference.CreateFromFile(netstandardPath)); List <SyntaxTree> syntaxTrees = new List <SyntaxTree>(); var sourceFiles = Directory.GetFiles(sourceDirectory, "*.cs", SearchOption.AllDirectories); System.Threading.Tasks.ParallelOptions opt = new System.Threading.Tasks.ParallelOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount * 2 }; System.Threading.Tasks.Parallel.ForEach(sourceFiles, opt, (sourceFile) => { if (sourceFile.EndsWith("modified.cs")) { return; } string fileToRead = Path.GetFullPath(sourceFile); var tree = CSharpSyntaxTree.ParseText(File.ReadAllText(fileToRead), null, fileToRead); lock (syntaxTrees) { syntaxTrees.Add(tree); } }); syntaxTrees = NamesToCorrectNamespacesMover.MoveNamesToCorrectNamespaces(syntaxTrees, requiredNamespaces); HashSet <string> foundNonEmptyStructs = GetNonEmptyStructs(syntaxTrees); List <SyntaxTree> cleanedTrees = new List <SyntaxTree>(); System.Threading.Tasks.Parallel.ForEach(syntaxTrees, opt, (tree) => { string modifiedFile = Path.ChangeExtension(tree.FilePath, ".modified.cs"); var cleanedTree = MetadataSyntaxTreeCleaner.CleanSyntaxTree(tree, remaps, requiredNamespaces, foundNonEmptyStructs, modifiedFile); File.WriteAllText(modifiedFile, cleanedTree.GetText().ToString()); lock (cleanedTrees) { cleanedTrees.Add(cleanedTree); } }); CSharpCompilationOptions compilationOptions = new CSharpCompilationOptions(OutputKind.WindowsRuntimeMetadata, allowUnsafe: true); var comp = CSharpCompilation.Create( //Path.GetFileName(outputFileName), null, cleanedTrees, refs, compilationOptions); return(new ClangSharpSourceCompilation(comp, typeImports)); }