public static IEnumerable <T> Concat <T>(this IEnumerable <T> source, params T[] items) => Enumerable.Concat(source, items);
private string Solve(List <string> digitLetters) { return(string.Join(" ", Enumerable.Concat( Enumerable.Range(1, digitLetters.Count(x => x == "n")).Select(x => "1"), Enumerable.Range(1, digitLetters.Count(x => x == "z")).Select(x => "0")))); }
public GroupsLoader( GroupInfo[] groupsInfo, IGroup[] output, IBuffersPool buffersPool, IIoService ioService, IConfig config) { _groupsInfo = groupsInfo; _output = output; _ioService = ioService; _buffersPool = buffersPool; int maxGroupBytesCount = 0, maxGroupLinesCount = 0; for (int i = 0; i < Consts.MaxGroupsCount; i++) { var info = groupsInfo[i]; if (!GroupInfo.IsZero(info)) { maxGroupBytesCount = Math.Max(maxGroupBytesCount, info.BytesCount); maxGroupLinesCount = Math.Max(maxGroupLinesCount, info.LinesCount); } } var memoryUsedForBuffers = (long) _buffersPool.Count * config.PhysicalBufferLength; var maxGroupBuffersCount = (int)Math.Ceiling((double) maxGroupBytesCount / config.UsingBufferLength); var maxGroupSize = maxGroupBuffersCount * config.PhysicalBufferLength; var lineSize = Marshal.SizeOf <LineIndexes>() + sizeof(ulong); var maxSizeForGroupLines = lineSize * maxGroupLinesCount; var maxLoadedGroupsCount = memoryUsedForBuffers / (maxGroupSize + maxSizeForGroupLines); var memoryForLines = (int) maxLoadedGroupsCount * maxSizeForGroupLines; _reservedLinesCount = memoryForLines / lineSize; var buffersCountForFree = (int)Math.Ceiling((double) memoryForLines / config.PhysicalBufferLength); var buffersCount = _buffersPool.Count - buffersCountForFree; var allBuffers = _buffersPool.ExtractAll(); Array.Resize(ref allBuffers, buffersCount); _buffers = allBuffers; _linesIndexes = new LineIndexes[_reservedLinesCount]; _sortingSegments = new ulong[_reservedLinesCount]; _groupsFilePath = config.GroupsFilePath; _usingBufferLength = config.UsingBufferLength; _readers = Enumerable .Range(0, Consts.MaxRunningTasksCount) .Select(_ => _ioService.OpenRead(_groupsFilePath)) .ToArray(); var tempBuffersHandles = Enumerable .Range(0, Consts.MaxRunningTasksCount) .Select(_ => _buffersPool.Get()) .ToArray(); _tempBuffers = tempBuffersHandles .Select(o => o.Value) .ToArray(); _dispose = Enumerable .Concat <IDisposable>(_readers, tempBuffersHandles) .Select(o => new Action(o.Dispose)) .Aggregate((a, b) => a + b); }
void ProcessStage3() { var rStage3 = RawData.Stage3; if (rStage3 == null) { return; } var rParticipants = Stage.FriendAndEnemy; var rFriendMainDamages = rStage3.FriendDamage.Skip(1); var rEnemyMainDamages = rStage3.EnemyDamage.Skip(1); var rDamages = Enumerable.Concat(rFriendMainDamages, rEnemyMainDamages).ToArray(); IEnumerable <int> rEnemyEscortDamages = null; if (RawData.Stage3CombinedFleet != null) { var rFriendEscortDamages = RawData.Stage3CombinedFleet.FriendDamage.Skip(1); var rEscortDamages = Enumerable.Concat(rDamages, rFriendEscortDamages); if (RawData.Stage3CombinedFleet.EnemyDamage != null) { rEnemyEscortDamages = RawData.Stage3CombinedFleet.EnemyDamage.Skip(1); rEscortDamages = rEscortDamages.Concat(rEnemyEscortDamages); } rDamages = rEscortDamages.ToArray(); } var rEnemyDamages = rEnemyMainDamages; if (rEnemyEscortDamages != null) { rEnemyDamages = rEnemyMainDamages.Concat(rEnemyEscortDamages).ToArray(); } if (rDamages.All(r => r == 0)) { return; } for (var i = 0; i < rDamages.Length; i++) { var rParticipant = rParticipants[i]; if (rParticipant != null) { rParticipant.Current -= rDamages[i]; } } if (rEnemyDamages.All(r => r == 0)) { return; } var rFriendAttackers = RawData.Attackers[0]; if (rFriendAttackers.Length == 1 && rFriendAttackers[0] != -1) { rParticipants[rFriendAttackers[0] - 1].DamageGivenToOpponent += rEnemyDamages.Sum(); } else if (rFriendAttackers.Length > 1) { var rFirepowers = rFriendAttackers.Select(r => { var rShip = ((FriendShip)rParticipants[r - 1].Participant).Ship; return(rShip.Slots.Where(rpSlot => rpSlot.HasEquipment).Sum(rpSlot => { var rEquipmentInfo = rpSlot.Equipment.Info; switch (rEquipmentInfo.Type) { case EquipmentType.CarrierBasedDiveBomber: case EquipmentType.SeaplaneBomber: return rEquipmentInfo.DiveBomberAttack * Math.Sqrt(rpSlot.PlaneCount) + 25; case EquipmentType.CarrierBasedTorpedoBomber: return 1.15 * (rEquipmentInfo.Torpedo * Math.Sqrt(rpSlot.PlaneCount) + 25); default: return 0; } })); }).ToArray(); var rTotalDamages = rEnemyDamages.Sum(); var rTotalFirepowers = rFirepowers.Sum(); if (rTotalDamages == 0) { return; } for (var i = 0; i < rFriendAttackers.Length; i++) { var rParticipant = rParticipants[rFriendAttackers[i] - 1]; rParticipant.DamageGivenToOpponent += (int)Math.Round(rTotalDamages * rFirepowers[i] / rTotalFirepowers); rParticipant.Inaccurate = true; } } }
/// <summary> /// Left-aligns the characters in the given string by padding them with spaces on the right, /// for a specified total length. /// </summary> /// <param name="str">String to pad.</param> /// <param name="newWidth">Total length of the string after padding.</param> /// <param name="padChars">Character to pad with, defaults to space.</param> /// <returns name="str"> /// Strings left-aligned by padding with trailing whitespaces for a specified total length. /// </returns> /// <search>pad right,left align,left-align,pad string space,whitespace</search> public static string PadRight(string str, int newWidth, string padChars = " ") { return(new string( Enumerable.Concat(str, padChars.Cycle().Take(newWidth - str.Length)).ToArray())); }
/// <summary> /// Execute the command /// </summary> public override void ExecuteBuild() { Console.WriteLine(); // Parse the command line arguments string LocalDirName = ParseParamValue("LocalDir", null); if (LocalDirName == null) { throw new AutomationException("Missing -LocalDir=... argument"); } long MaxSize; if (!TryParseSize(ParseParamValue("MaxSize", "0mb"), out MaxSize)) { throw new AutomationException("Invalid -MaxSize=... argument"); } string[] RemoteDirNames = ParseParamValues("RemoteDir"); if (RemoteDirNames.Length == 0) { throw new AutomationException("Missing -RemoteDir=... argument"); } int MaxDays; if (!int.TryParse(ParseParamValue("MaxDays", "3"), out MaxDays)) { throw new AutomationException("Invalid -MaxDays=... argument"); } int TimeLimit; if (!TryParseTime(ParseParamValue("TimeLimit", "0m"), out TimeLimit)) { throw new AutomationException("Invalid -TimeLimit=... argument"); } bool bPreview = ParseParam("Preview"); // Make sure the source directory exists List <DirectoryInfo> RemoteDirs = new List <DirectoryInfo>(); foreach (string RemoteDirName in RemoteDirNames) { DirectoryInfo RemoteDir = new DirectoryInfo(RemoteDirName); if (!RemoteDir.Exists) { throw new AutomationException("Remote directory '{0}' does not exist", RemoteDirName); } RemoteDirs.Add(RemoteDir); } // Get the local directory DirectoryInfo LocalDir = new DirectoryInfo(LocalDirName); if (!LocalDir.Exists) { LocalDir.Create(); } // Create all the base DDC directory names. These are three entries deep, each numbered 0-9. List <string> BasePathPrefixes = new List <string>(); for (int IndexA = 0; IndexA <= 9; IndexA++) { for (int IndexB = 0; IndexB <= 9; IndexB++) { for (int IndexC = 0; IndexC <= 9; IndexC++) { BasePathPrefixes.Add(String.Format("{0}{3}{1}{3}{2}{3}", IndexA, IndexB, IndexC, Path.DirectorySeparatorChar)); } } } // Find all the local files ConcurrentBag <CacheFile> LocalFiles = new ConcurrentBag <CacheFile>(); Console.WriteLine("Enumerating local files from {0}", LocalDir.FullName); ForEach(BasePathPrefixes, (BasePath, Messages) => (() => EnumerateFiles(LocalDir, BasePath, LocalFiles)), "Enumerating files..."); Console.WriteLine("Found {0} files, {1}mb.", LocalFiles.Count, LocalFiles.Sum(x => x.Length) / (1024 * 1024)); Console.WriteLine(); // Find all the remote files ConcurrentBag <CacheFile> RemoteFiles = new ConcurrentBag <CacheFile>(); foreach (DirectoryInfo RemoteDir in RemoteDirs) { Console.WriteLine("Enumerating remote files from {0}", RemoteDir.FullName); ForEach(BasePathPrefixes, (BasePath, Messages) => (() => EnumerateFiles(RemoteDir, BasePath, RemoteFiles)), "Enumerating files..."); Console.WriteLine("Found {0} files, {1}mb.", RemoteFiles.Count, RemoteFiles.Sum(x => x.Length) / (1024 * 1024)); Console.WriteLine(); } // Get the oldest file that we want to copy DateTime OldestLastWriteTimeUtc = DateTime.Now - TimeSpan.FromDays(MaxDays); // Build a lookup of remote files by name Dictionary <string, CacheFile> RelativePathToRemoteFile = new Dictionary <string, CacheFile>(StringComparer.InvariantCultureIgnoreCase); foreach (CacheFile RemoteFile in RemoteFiles) { if (RemoteFile.LastWriteTimeUtc > OldestLastWriteTimeUtc) { RelativePathToRemoteFile[RemoteFile.RelativePath] = RemoteFile; } } // Build a lookup of local files by name Dictionary <string, CacheFile> RelativePathToLocalFile = LocalFiles.ToDictionary(x => x.RelativePath, x => x, StringComparer.InvariantCultureIgnoreCase); // Build a list of target files that we want in the DDC long TotalSize = 0; Dictionary <string, CacheFile> RelativePathToTargetFile = new Dictionary <string, CacheFile>(StringComparer.InvariantCultureIgnoreCase); foreach (CacheFile TargetFile in Enumerable.Concat <CacheFile>(RelativePathToLocalFile.Values, RelativePathToRemoteFile.Values).OrderByDescending(x => x.LastWriteTimeUtc)) { if (MaxSize > 0 && TotalSize + TargetFile.Length > MaxSize) { break; } if (!RelativePathToTargetFile.ContainsKey(TargetFile.RelativePath)) { RelativePathToTargetFile.Add(TargetFile.RelativePath, TargetFile); TotalSize += TargetFile.Length; } } // Print measure of how coherent the cache is double CoherencyPct = RelativePathToTargetFile.Values.Count(x => RelativePathToLocalFile.ContainsKey(x.RelativePath)) * 100.0 / RelativePathToTargetFile.Count; Console.WriteLine("Cache is {0:0.0}% coherent with remote.", CoherencyPct); Console.WriteLine(); // Remove any outdated files List <CacheFile> FilesToRemove = RelativePathToLocalFile.Values.Where(x => !RelativePathToTargetFile.ContainsKey(x.RelativePath)).ToList(); if (bPreview) { Console.WriteLine("Sync would remove {0} files ({1}mb)", FilesToRemove.Count, FilesToRemove.Sum(x => x.Length) / (1024 * 1024)); } else if (FilesToRemove.Count > 0) { Console.WriteLine("Deleting {0} files ({1}mb)...", FilesToRemove.Count, FilesToRemove.Sum(x => x.Length) / (1024 * 1024)); ForEach(FilesToRemove, (File, Messages) => (() => RemoveFile(LocalDir, File.RelativePath, Messages)), "Deleting files"); Console.WriteLine(); } // Add any new files List <CacheFile> FilesToAdd = RelativePathToTargetFile.Values.Where(x => !RelativePathToLocalFile.ContainsKey(x.RelativePath)).ToList(); if (bPreview) { Console.WriteLine("Sync would add {0} files ({1}mb)", FilesToAdd.Count, FilesToAdd.Sum(x => x.Length) / (1024 * 1024)); } else if (FilesToAdd.Count > 0) { Console.WriteLine("Copying {0} files ({1}mb)...", FilesToAdd.Count, FilesToAdd.Sum(x => x.Length) / (1024 * 1024)); CancellationTokenSource CancellationTokenSource = new CancellationTokenSource(); if (TimeLimit > 0) { CancellationTokenSource.CancelAfter(TimeLimit * 1000); } DateTime StartTime = DateTime.UtcNow; CopyStats Stats = new CopyStats(); ForEach(FilesToAdd, (File, Messages) => (() => CopyFile(File, LocalDir, Messages, Stats)), "Copying files...", CancellationTokenSource.Token); double TotalSizeMb = Stats.NumBytes / (1024.0 * 1024.0); Console.WriteLine("Copied {0} files totalling {1:0.0}mb ({2:0.00}mb/s).", Stats.NumFiles, TotalSizeMb, TotalSizeMb / (DateTime.UtcNow - StartTime).TotalSeconds); double FinalCoherencyPct = (RelativePathToTargetFile.Values.Count(x => RelativePathToLocalFile.ContainsKey(x.RelativePath)) + Stats.NumFiles) * 100.0 / RelativePathToTargetFile.Count; Console.WriteLine(); Console.WriteLine("Final cache is {0:0.0}% coherent with remote.", FinalCoherencyPct); if (CancellationTokenSource.IsCancellationRequested) { Console.WriteLine("Halting due to expired time limit."); } } Console.WriteLine(); }
protected override void SaveInternal() { this.themeInfo.ID = this.ID; this.themeInfo.Name = this.Name; this.themeInfo.Author = this.Author; this.themeInfo.Version = ((object)(this.Version ?? new Version(0, 0))).ToString(); this.themeInfo.Comments = this.Comments; this.themeInfo.ThemeType = this.ThemeType; foreach (IThemeItem themeItem in (Collection <IThemeItem>) this.ThemeItems) { if (!themeItem.IsLoaded) { themeItem.Load(); } } this.ZipFile.BeginUpdate(); foreach (ZipEntry entry in new List <ZipEntry>(Enumerable.OfType <ZipEntry>((IEnumerable)this.ZipFile))) { this.ZipFile.Delete(entry); } this.themeItemEntries.Clear(); this.themeInfo.Fonts = this.FontsItem; this.themeInfo.Colors = this.ColorsItem; MemoryStream memoryStream = new MemoryStream(); new XmlSerializer(typeof(VmcStudioTheme.ThemeInfo)).Serialize((Stream)memoryStream, (object)this.themeInfo); this.ZipFile.Add((IStaticDataSource) new ZippedTheme.StreamDataSource((Stream)memoryStream), "Theme.xml"); if (this.MainScreenshot != null) { VmcStudioTheme.AddImageResource(this.MainScreenshot, this.ZipFile, "Screenshot.png"); } for (int index = 0; index < this.Screenshots.Count; ++index) { VmcStudioTheme.AddImageResource(this.Screenshots[index], this.ZipFile, string.Format("Screenshots\\{0}.png", (object)index)); } foreach (ResourceThemeItem resourceThemeItem in Enumerable.OfType <ResourceThemeItem>((IEnumerable)this.ThemeItems)) { string entryName = (string)null; if (resourceThemeItem is ImageResourceThemeItem) { entryName = VmcStudioTheme.GetResourceZipPath(resourceThemeItem, "Images"); } else if (resourceThemeItem is SoundResourceThemeItem) { entryName = VmcStudioTheme.GetResourceZipPath(resourceThemeItem, "Sounds"); } if (entryName == null) { throw new Exception("Unknown theme item type - " + (object)resourceThemeItem.GetType()); } this.ZipFile.Add(ZippedTheme.GetResourceThemeItemData(resourceThemeItem), entryName); } foreach (string fontName in Enumerable.Distinct <string>(Enumerable.Concat <string>(Enumerable.Select <FontClass, string>((IEnumerable <FontClass>) this.themeInfo.Fonts.FontClasses, (Func <FontClass, string>)(fontClass => fontClass.FontFace.FontFamily)), Enumerable.Select <FontOverride, string>(Enumerable.Where <FontOverride>((IEnumerable <FontOverride>) this.themeInfo.Fonts.FontOverrides, (Func <FontOverride, bool>)(fontOverride => fontOverride.FontFace != null)), (Func <FontOverride, string>)(fontOverride => fontOverride.FontFace.FontFamily))))) { FontFamily fontFamily = this.GetFontFamily(fontName); if (fontFamily != null) { string file = FontUtil.GetFile(fontFamily); if (file != null) { this.ZipFile.Add(file, "Fonts\\" + Path.GetFileName(file)); } } } this.ZipFile.CommitUpdate(); this.ResetZipFile(); foreach (IThemeItem themeItem in Enumerable.Where <IThemeItem>((IEnumerable <IThemeItem>) this.ThemeItems, (Func <IThemeItem, bool>)(t => t.IsDirty))) { themeItem.ClearDirty(); } this.FontsItem.ClearDirty(); this.ColorsItem.ClearDirty(); this.IsDirty = false; }
private static void Ololo(string[] args) { Console.WriteLine(Enumerable.Concat(argºs, args)); }
static MethodDeclarationSyntax MakeMapFunction( SyntaxToken applyToIdentifier, SyntaxList <TypeParameterConstraintClauseSyntax> applyToConstraints, MethodDeclarationSyntax[] applyToMembers, TypeParameterListSyntax applyToTypeParams, MethodDeclarationSyntax pure) { var genA = applyToTypeParams.Parameters.First().ToString(); var genB = CodeGenUtil.NextGenName(genA); var genC = CodeGenUtil.NextGenName(genB); var typeA = MakeTypeName(applyToIdentifier.Text, genA); var typeB = MakeTypeName(applyToIdentifier.Text, genB); var typeC = MakeTypeName(applyToIdentifier.Text, genC); var mapFuncType = ParseTypeName($"System.Func<{genA}, {genB}>"); var pureTypeA = MakeTypeName(pure.Identifier.Text, genA); var pureTypeB = MakeTypeName(pure.Identifier.Text, genB); var mapFunc = InvocationExpression( MemberAccessExpression( SyntaxKind.SimpleMemberAccessExpression, InvocationExpression( MemberAccessExpression( SyntaxKind.SimpleMemberAccessExpression, IdentifierName("v"), IdentifierName("Next"))) .WithArgumentList(ArgumentList(SingletonSeparatedList <ArgumentSyntax>(Argument(IdentifierName("n"))))), IdentifierName("Map"))) .WithArgumentList( ArgumentList( SingletonSeparatedList <ArgumentSyntax>( Argument( IdentifierName("f"))))); var pureFunc = new SyntaxNodeOrToken[] { SwitchExpressionArm( DeclarationPattern( pureTypeA, SingleVariableDesignation(Identifier("v"))), ObjectCreationExpression(pureTypeB) .WithArgumentList( ArgumentList( SingletonSeparatedList <ArgumentSyntax>( Argument( InvocationExpression(IdentifierName("f")) .WithArgumentList( ArgumentList( SingletonSeparatedList <ArgumentSyntax>( Argument( MemberAccessExpression( SyntaxKind.SimpleMemberAccessExpression, IdentifierName("v"), IdentifierName(CodeGenUtil.MakeFirstCharUpper(pure.ParameterList.Parameters.First().Identifier.Text)))))))))))), Token(SyntaxKind.CommaToken) }; var termimalFuncs = applyToMembers .Where(m => m != pure && m.AttributeLists != null && m.AttributeLists.SelectMany(a => a.Attributes).Any(a => a.Name.ToString() == "Pure")) .SelectMany(m => new SyntaxNodeOrToken[] { SwitchExpressionArm( DeclarationPattern( ParseTypeName($"{m.Identifier.Text}<{genA}>"), SingleVariableDesignation(Identifier("v"))), ObjectCreationExpression(MakeTypeName(m.Identifier.Text, genB)) .WithArgumentList( ArgumentList( SeparatedList <ArgumentSyntax>( m.ParameterList .Parameters .Select(p => Argument( MemberAccessExpression( SyntaxKind.SimpleMemberAccessExpression, IdentifierName("v"), IdentifierName(CodeGenUtil.MakeFirstCharUpper(p.Identifier))))))))), Token(SyntaxKind.CommaToken) }); var freeFuncs = applyToMembers .Where(m => m.AttributeLists == null || !m.AttributeLists.SelectMany(a => a.Attributes).Any(a => a.Name.ToString() == "Pure")) .SelectMany(m => new SyntaxNodeOrToken[] { SwitchExpressionArm( DeclarationPattern( ParseTypeName($"{m.Identifier.Text}<{genA}>"), SingleVariableDesignation(Identifier("v"))), ObjectCreationExpression(MakeTypeName(m.Identifier.Text, genB)) .WithArgumentList( ArgumentList( SeparatedList <ArgumentSyntax>( Enumerable.Concat( m.ParameterList .Parameters .Take(m.ParameterList.Parameters.Count - 1) .SelectMany(p => new SyntaxNodeOrToken[] { Argument( MemberAccessExpression( SyntaxKind.SimpleMemberAccessExpression, IdentifierName("v"), IdentifierName(CodeGenUtil.MakeFirstCharUpper(p.Identifier.Text)))), Token(SyntaxKind.CommaToken) }), new SyntaxNodeOrToken [1] { Argument(SimpleLambdaExpression(Parameter(Identifier("n")), mapFunc)) }))))), Token(SyntaxKind.CommaToken) }); var tokens = new List <SyntaxNodeOrToken>(); tokens.AddRange(pureFunc); tokens.AddRange(termimalFuncs); tokens.AddRange(freeFuncs); tokens.Add( SwitchExpressionArm( DiscardPattern(), ThrowExpression( ObjectCreationExpression( QualifiedName( IdentifierName("System"), IdentifierName("NotSupportedException"))) .WithArgumentList(ArgumentList())))); return(MethodDeclaration(typeB, Identifier("Map")) .WithModifiers( TokenList(new[] { Token(SyntaxKind.PublicKeyword), Token(SyntaxKind.StaticKeyword) })) .WithTypeParameterList( TypeParameterList( SeparatedList <TypeParameterSyntax>( new SyntaxNodeOrToken[] { TypeParameter( Identifier(genA)), Token(SyntaxKind.CommaToken), TypeParameter( Identifier(genB)) }))) .WithParameterList( ParameterList( SeparatedList <ParameterSyntax>( new SyntaxNodeOrToken[] { Parameter( Identifier("ma")) .WithModifiers( TokenList( Token(SyntaxKind.ThisKeyword))) .WithType(typeA), Token(SyntaxKind.CommaToken), Parameter( Identifier("f")) .WithType(mapFuncType) }))) .WithExpressionBody( ArrowExpressionClause( SwitchExpression( IdentifierName("ma")) .WithArms(SeparatedList <SwitchExpressionArmSyntax>(tokens)))) .WithSemicolonToken( Token(SyntaxKind.SemicolonToken)) .NormalizeWhitespace()); }
public static IRegistrationBuilder <TLimit, TConcreteReflectionActivatorData, TRegistrationStyle> CustomEnableClassInterceptors <TLimit, TConcreteReflectionActivatorData, TRegistrationStyle>(this IRegistrationBuilder <TLimit, TConcreteReflectionActivatorData, TRegistrationStyle> registration) where TConcreteReflectionActivatorData : ConcreteReflectionActivatorData { if (registration == null) { throw new ArgumentNullException(nameof(registration)); } // Create a new proxy generator per call... var proxyGenerator = new ProxyGenerator(); registration.ActivatorData.ImplementationType = (Type)proxyGenerator.ProxyBuilder.CreateClassProxyType((Type)registration.ActivatorData.ImplementationType, new Type[0], ProxyGenerationOptions.Default); registration.OnPreparing((Action <PreparingEventArgs>)(e => e.Parameters = (IEnumerable <Parameter>)Enumerable.ToArray <Parameter>(Enumerable.Concat <Parameter>((IEnumerable <Parameter>) new Parameter[1] { (Parameter) new PositionalParameter(0, (object)Enumerable.ToArray <IInterceptor>(Enumerable.Cast <IInterceptor>((IEnumerable)Enumerable.Select <Service, object>(GetInterceptorServices(e.Component, (Type)registration.ActivatorData.ImplementationType), (Func <Service, object>)(s => ResolutionExtensions.ResolveService(e.Context, s)))))) }, (IEnumerable <Parameter>)e.Parameters)))); return(registration); }
void IUtilityCommand.Run(Utility utility, string[] args) { // HACK: The engine code assumes that Game.modData is set. Game.ModData = utility.ModData; var version = utility.ModData.Manifest.Metadata.Version; if (args.Length > 1) { version = args[1]; } Console.WriteLine( "This documentation is aimed at modders. It displays a template for weapon definitions " + "as well as its contained types (warheads and projectiles) with default values and developer commentary. " + "Please do not edit it directly, but add new `[Desc(\"String\")]` tags to the source code. This file has been " + "automatically generated for version {0} of OpenRA.", version); Console.WriteLine(); var doc = new StringBuilder(); var currentNamespace = ""; var objectCreator = utility.ModData.ObjectCreator; var weaponInfo = objectCreator.GetTypesImplementing <WeaponInfo>(); var warheads = objectCreator.GetTypesImplementing <IWarhead>().OrderBy(t => t.Namespace); var projectiles = objectCreator.GetTypesImplementing <IProjectileInfo>().OrderBy(t => t.Namespace); var weaponTypes = Enumerable.Concat(weaponInfo, Enumerable.Concat(projectiles, warheads)); foreach (var t in weaponTypes) { // skip helpers like TraitInfo<T> if (t.ContainsGenericParameters || t.IsAbstract) { continue; } if (currentNamespace != t.Namespace) { currentNamespace = t.Namespace; doc.AppendLine(); doc.AppendLine("## {0}".F(currentNamespace)); } var traitName = t.Name.EndsWith("Info") ? t.Name.Substring(0, t.Name.Length - 4) : t.Name; doc.AppendLine(); doc.AppendLine("### {0}".F(traitName)); var traitDescLines = t.GetCustomAttributes <DescAttribute>(false).SelectMany(d => d.Lines); foreach (var line in traitDescLines) { doc.AppendLine(line); } var infos = FieldLoader.GetTypeLoadInfo(t); if (!infos.Any()) { continue; } doc.AppendLine("<table>"); doc.AppendLine("<tr><th>Property</th><th>Default Value</th><th>Type</th><th>Description</th></tr>"); var liveTraitInfo = t == typeof(WeaponInfo) ? null : objectCreator.CreateBasic(t); foreach (var info in infos) { var fieldDescLines = info.Field.GetCustomAttributes <DescAttribute>(true).SelectMany(d => d.Lines); var fieldType = Util.FriendlyTypeName(info.Field.FieldType); var defaultValue = liveTraitInfo == null ? "" : FieldSaver.SaveField(liveTraitInfo, info.Field.Name).Value.Value; doc.Append("<tr><td>{0}</td><td>{1}</td><td>{2}</td>".F(info.YamlName, defaultValue, fieldType)); doc.Append("<td>"); foreach (var line in fieldDescLines) { doc.Append(line + " "); } doc.AppendLine("</td></tr>"); } doc.AppendLine("</table>"); } Console.Write(doc.ToString()); }
public static int Main(string[] args) { Console.WriteLine("-- GoOnTap.Test --"); Options = args.Where(a => a.StartsWith("/")) .Select(a => a.TrimStart('/')) .Select(a => new { Parameter = a.Trim(), Separator = a.Trim().IndexOf(':') }) .ToDictionary(a => a.Separator == -1 ? a.Parameter : a.Parameter.Substring(0, a.Separator).ToLower(), a => a.Separator == -1 ? null : a.Parameter.Substring(a.Separator + 1)); Parameters = args.Where(a => !a.StartsWith("/")) .ToList(); // Decode parameters if (Options.ContainsKey("playerlevel")) { int playerLevel; if (!int.TryParse(Options["playerlevel"], out playerLevel)) { throw new FormatException("Invalid specified player level"); } PlayerLevel = playerLevel; } if (Options.ContainsKey("onlycandy")) { OnlyCandy = true; } // Show current settings if (PlayerLevel != null) { Console.WriteLine("Default player level: " + PlayerLevel); } Console.WriteLine("Use only candy name: " + OnlyCandy); // Run specified test bool success = true; string arg = args.Length == 0 ? null : args[0]; try { if (arg != null && File.Exists(arg)) { FileInfo screenshotInfo = new FileInfo(arg); success = RunTest(screenshotInfo); } else { if (args != null) { ScreenshotsDirectory = new DirectoryInfo(arg); if (!ScreenshotsDirectory.Exists) { throw new FileNotFoundException(); } } Console.WriteLine("Testing directory: " + ScreenshotsDirectory.FullName); Console.WriteLine(); FileInfo[] screenshotsInfo = Enumerable.Concat( ScreenshotsDirectory.GetFiles("*.png", SearchOption.AllDirectories), ScreenshotsDirectory.GetFiles("*.jpg", SearchOption.AllDirectories) ).ToArray(); Console.WriteLine("Found {0} screenshots to test", screenshotsInfo.Length); foreach (FileInfo screenshotInfo in screenshotsInfo) { success &= RunTest(screenshotInfo); } } } catch (Exception e) { Console.Error.WriteLine("Error while checking specified parameter. " + e); } #if DEBUG Console.WriteLine(); Console.WriteLine("-- End --"); Console.ReadLine(); #else if (!success) { return(-1); } #endif return(0); }
/// <summary> /// Add all the data given by data to the Blob and increments max size. /// </summary> /// <param name="data"></param> public void Concat(IEnumerable <byte> data) { Blob = Enumerable.Concat(Blob, data).ToArray(); BlobLength += data.Count(); }
/// <summary> /// Converter for handling legacy Edit /// </summary> /// <param name="reader"></param> /// <param name="objectType"></param> /// <param name="existingValue"></param> /// <param name="hasExistingValue"></param> /// <param name="serializer"></param> /// <returns></returns> public override ConsequenceQuery ReadJson(JsonReader reader, Type objectType, ConsequenceQuery existingValue, bool hasExistingValue, JsonSerializer serializer) { if (reader.TokenType == JsonToken.Null) { return(null); } if (reader.TokenType != JsonToken.StartObject) { return(null); } ConsequenceQuery ret = new ConsequenceQuery(); var tokens = JToken.Load(reader); foreach (var token in tokens) { var isProperty = token.Type == JTokenType.Property; if (!isProperty) { continue; } var property = (JProperty)token; var isArray = property.Value.Type == JTokenType.Array; if (!isArray) { continue; } var array = (JArray)property.Value; List <Edit> edits = new List <Edit>(); switch (property.Name) { case "remove": List <string> removeList = array.ToObject <List <string> >(); if (removeList.Count > 0) { foreach (var remove in removeList) { edits.Add(new Edit { Type = EditType.Remove, Delete = remove }); } } break; case "edits": edits = array.ToObject <List <Edit> >(); break; } if (ret.Edits == null) { ret.Edits = edits; } else { ret.Edits = Enumerable.Concat(ret.Edits, edits); } } return(ret); }
public static byte[] GetFrameData(NutReader reader, Node node, out byte[] sideData, out byte[] metaData) { FrameFlags flags = GetFrameFlags(reader, node); sideData = metaData = null; //Always include the frame header IEnumerable <byte> frameData = GetFrameHeader(reader, node); //Check if data needs to be removed if (flags.HasFlag(FrameFlags.SideMetaData)) { //Compatibility //If SizeData in the header was set this is a draft version and the data is at the end of the frame. //In such a case the sidedata size was already read from the frame header and is stored in the Identifier. long sidedata_size = node.Identifier[4]; //Just incase it was indicated that this frame uses a normal side data size of 0 and includes specific side data for the frame //Check the header and frame headers value which was stored when reading the frame. if (sidedata_size > 0) { metaData = null; //Not included in spec. int dataSize = (int)(node.DataSize - sidedata_size); frameData = Enumerable.Concat(frameData, node.Data.Take(dataSize)); sideData = node.Data.Skip(dataSize).ToArray(); } else //Current Spec { int bytesReadNow = 0, bytesReadTotal = 0; //Get a stream of the data using (var stream = node.DataStream) { int size = (int)reader.DecodeVariableLength(stream, out bytesReadNow); //Side Data Count (From Info) and read if (size > 0) { bytesReadTotal += bytesReadNow; sideData = new byte[size]; reader.Read(sideData, 0, size); bytesReadTotal += size; } //Meta Data Count (From Info) and read size = (int)reader.DecodeVariableLength(stream, out bytesReadNow); if (size > 0) { bytesReadTotal += bytesReadNow; metaData = new byte[size]; reader.Read(sideData, 0, size); bytesReadTotal += size; } //The position of this stream is now @ the end of the data which does not belong to the frame itself. frameData = Enumerable.Concat(frameData, node.Data.Skip(bytesReadTotal)); } } } else //The data of the frame is as it is { frameData = Enumerable.Concat(frameData, node.Data); } //Return the allocated array return(frameData.ToArray()); }
private void SleepT() { try { Invoke((MethodInvoker) delegate() { this.textBox1.Clear(); btn_Go.Enabled = false; }); FileTool file = new FileTool(); var data = file.Read(); SetTextMesssage(100 * 1 / 5, "开始发送联机(更新)信号,请将分拣架重新上电" + "\r\n"); /*****************************2.发送联机(更新)信号 校验码 4F9 0xF9, 0x4*****************************/ bool isSendSuccess = false; for (int i = 1; i < 100; i++) { try { var write2 = new byte[] { 0x55, 0xAA, 0x1C, 0xC1 }; write2 = Enumerable.Concat(write2, CLCData(write2)).ToArray(); byte[] result2 = OrderSortService.DoloadBoard(write2); if (result2[0] != 0x5F || result2[1] != 0x01 || result2[2] != 0x00 || result2[3] != 0xC1) { throw new Exception($"下载数据失败:"); } isSendSuccess = true; break; } catch { } } if (!isSendSuccess) { throw new Exception($"下载数据失败:联机信号应答失败"); } SetTextMesssage(100 * 2 / 5, "发送联机(更新)信号成功" + "\r\n"); SetTextMesssage(100 * 2 / 5, "开始发送文件大小" + "\r\n"); /******************************3.发送文件大小 ******************************/ byte[] fileSize = BitConverter.GetBytes(Convert.ToInt16(data.Length)); var update3 = new byte[] { 0x55, 0xAA, 0x2C, 0xC2 }; update3 = Enumerable.Concat(update3, fileSize).ToArray(); update3 = Enumerable.Concat(update3, CLCData(update3)).ToArray(); byte[] result3 = OrderSortService.DoloadBoard(update3); if (result3[0] != 0x5F || result3[1] != 0x01 || result3[2] != 0x00 || result3[3] != 0xC2) { throw new Exception($"下载数据失败:发送文件大小应答失败"); } SetTextMesssage(100 * 3 / 5, $"发送文件大小成功" + "\r\n"); /****************************** 4文件传输******************************/ //调整数据为1024的倍数 var dataRe = data.Length % 1024; if (dataRe != 0) { var dataEx = new byte[1024 - data.Length % 1024]; data = Enumerable.Concat(data, dataEx).ToArray(); } int dataLen = data.Length / 1024; SetTextMesssage(100 * 3 / 5, $"开始传输文件数据,共{dataLen}步" + "\r\n"); //5发送数据 for (int i = 1; i <= dataLen; i++) { var writeData = new byte[] { 0x55, 0xAA, 0x3C, 0xC3 }; var partData = GetData(data, i); writeData = Enumerable.Concat(writeData, partData).ToArray(); var clcData = CLCData(writeData); writeData = Enumerable.Concat(writeData, clcData).ToArray(); byte[] result4 = OrderSortService.DoloadBoard(writeData); if (result4[0] != 0x5F || result4[1] != 0x01 || result4[2] != 0x00 || result4[3] != 0xC3) { throw new Exception($"下载数据失败:接收传输数据应答失败:第{i}步"); } SetTextMesssage(100 * 3 / 5 + 40 / (data.Length / 1024) * i, $"第{i}步传输文件数据成功" + "\r\n"); } SetTextMesssage(100 * 5 / 5, "更新成功!" + "\r\n"); } catch (Exception ex) { Invoke((MethodInvoker) delegate() { btn_Go.Enabled = true; }); MessageBox.Show(ex.Message); } }
public static IEnumerable <TSource> Concat <TSource>(this Arr <TSource> first, IEnumerable <TSource> second) => Enumerable.Concat(first.Value, second);
public static void HorribleOneLiner() { Enumerable.Range(1, 100).Select(i => new KeyValuePair <int, IEnumerable <string> >(i, Enumerable.Empty <string>())) .Select(p => new KeyValuePair <int, IEnumerable <string> >(p.Key, p.Key % 3 == 0 ? p.Value.Append("Fizz") : p.Value)) .Select(p => new KeyValuePair <int, IEnumerable <string> >(p.Key, p.Key % 5 == 0 ? p.Value.Append("Buzz") : p.Value)) .Select(p => new KeyValuePair <int, IEnumerable <string> >(p.Key, p.Key % 7 == 0 ? p.Value.Append("Bang") : p.Value)) .Select(p => new KeyValuePair <int, List <string> >(p.Key, p.Key % 11 == 0 ? new List <string>() { "Bong" } : p.Value.ToList())) .Select(p => new KeyValuePair <int, IEnumerable <string> >(p.Key, p.Key % 13 == 0 ? p.Value.Count > 0 && p.Value[0] == "Fizz" ? Enumerable.Concat(new List <string>() { "Fizz", "Fezz" }, p.Value.Skip(1)).ToList() : Enumerable.Prepend(p.Value, "Fezz").ToList() : p.Value)) .Select(p => new KeyValuePair <int, IEnumerable <string> >(p.Key, p.Key % 17 == 0 ? Enumerable.Reverse(p.Value) : p.Value)) .Select(p => p.Value.Any() ? String.Join("", p.Value) : p.Key.ToString()) .ToList().ForEach(s => Console.WriteLine(s)); }
public IEnumerable <Room> GetRooms() { return(Enumerable.Concat((IEnumerable <Room>)_rooms.Values, _voiceRooms.Values)); }
public void CreatePackage(ServiceDefinition definition, CloudProjectPathInfo paths, DevEnv type, string azureSdkBinDirectory, out string standardOutput, out string standardError) { if (definition == null) { throw new ArgumentNullException( "definition", string.Format(Resources.InvalidOrEmptyArgumentMessage, "Service definition")); } if (string.IsNullOrEmpty(paths.RootPath)) { throw new ArgumentException(Resources.InvalidRootNameMessage, "rootPath"); } // Track the directories that are created by GetOrCreateCleanPath // to avoid publishing iisnode log files so we can delete the temp // copies when we're finished packaging Dictionary <string, string> tempDirectories = new Dictionary <string, string>(); try { string roles = // Get the names of all web and worker roles Enumerable.Concat( definition.WebRole.NonNull().Select(role => role.name), definition.WorkerRole.NonNull().Select(role => role.name)) // Get the name and safe path for each role (i.e., if the // role has files that shouldn't be packaged, it'll be // copied to a temp location without those files) .Select(name => GetOrCreateCleanPath(paths.RolesPath, name, tempDirectories, type)) // Format the role name and path as a role argument .Select(nameAndPath => string.Format(Resources.RoleArgTemplate, nameAndPath.Key, nameAndPath.Value)) // Join all the role arguments together into one .DefaultIfEmpty(string.Empty) .Aggregate(string.Concat); string sites = // Get all of the web roles definition.WebRole.NonNull() // Get all the sites in each role and format them all as // site arguments .SelectMany(role => // Format each site as a site argument role.Sites.Site.Select(site => string.Format( Resources.SitesArgTemplate, role.name, site.name, tempDirectories.GetValueOrDefault(role.name, paths.RolesPath)))) // Join all the site arguments together into one .DefaultIfEmpty(string.Empty) .Aggregate(string.Concat); string args = string.Format( type == DevEnv.Local ? Resources.CsPackLocalArg : Resources.CsPackCloudArg, paths.RootPath, roles, sites); // Run CsPack to generate the package ProcessHelper.StartAndWaitForProcess( new ProcessStartInfo( Path.Combine(azureSdkBinDirectory, Resources.CsPackExe), args), out standardOutput, out standardError); } finally { // Cleanup any temp directories tempDirectories.Values.ForEach(dir => Directory.Delete(dir, true)); } }
public Ruleset Evaluate(NodeList <Expression> args, Env env, List <Ruleset> closureContext) { if (args) { Guard.ExpectMaxArguments(Params.Count, args.Count, String.Format("'{0}'", Name), Index); } var frame = new Ruleset(null, new List <Node>()); for (var i = 0; i < Params.Count; i++) { if (!String.IsNullOrEmpty(Params[i].Name)) { Node val; if (args && i < args.Count) { val = args[i]; } else { val = Params[i].Value; } if (val) { frame.Rules.Add(new Rule(Params[i].Name, val.Evaluate(env)) { Index = val.Index }); } else { throw new ParsingException(String.Format("wrong number of arguments for {0} ({1} for {2})", Name, args.Count, _arity), Index); } } } var frames = new[] { this, frame }.Concat(env.Frames).Concat(closureContext).Reverse(); var context = new Env { Frames = new Stack <Ruleset>(frames) }; var newRules = new List <Node>(); foreach (var rule in Rules) { if (rule is MixinDefinition) { var mixin = rule as MixinDefinition; var parameters = Enumerable.Concat(mixin.Params, frame.Rules.Cast <Rule>()); newRules.Add(new MixinDefinition(mixin.Name, new NodeList <Rule>(parameters), mixin.Rules)); } else if (rule is Ruleset) { var ruleset = (rule as Ruleset); context.Frames.Push(ruleset); var rules = NodeHelper.NonDestructiveExpandNodes <MixinCall>(context, ruleset.Rules) .Select(r => r.Evaluate(context)).ToList(); context.Frames.Pop(); newRules.Add(new Ruleset(ruleset.Selectors, rules)); } else if (rule is MixinCall) { newRules.AddRange((NodeList)rule.Evaluate(context)); } else { newRules.Add(rule.Evaluate(context)); } } return(new Ruleset(null, newRules)); }
public async Task ProcessorClientBeginsWithTheNextEventAfterCheckpointing() { // Setup the environment. await using EventHubScope scope = await EventHubScope.CreateAsync(1); var connectionString = TestEnvironment.BuildConnectionStringForEventHub(scope.EventHubName); using var cancellationSource = new CancellationTokenSource(); cancellationSource.CancelAfter(TimeSpan.FromMinutes(4)); // Send a set of events. var segmentEventCount = 25; var beforeCheckpointEvents = EventGenerator.CreateEvents(segmentEventCount).ToList(); var afterCheckpointEvents = EventGenerator.CreateEvents(segmentEventCount).ToList(); var sourceEvents = Enumerable.Concat(beforeCheckpointEvents, afterCheckpointEvents).ToList(); var checkpointEvent = beforeCheckpointEvents.Last(); var sentCount = await SendEvents(connectionString, sourceEvents, cancellationSource.Token); Assert.That(sentCount, Is.EqualTo(sourceEvents.Count), "Not all of the source events were sent."); // Attempt to read back the first half of the events and checkpoint. Func <ProcessEventArgs, Task> processedEventCallback = async args => { if (args.Data.IsEquivalentTo(checkpointEvent)) { await args.UpdateCheckpointAsync(cancellationSource.Token); } }; var processedEvents = new ConcurrentDictionary <string, EventData>(); var completionSource = new TaskCompletionSource <bool>(TaskCreationOptions.RunContinuationsAsynchronously); var beforeCheckpointProcessHandler = CreateEventTrackingHandler(segmentEventCount, processedEvents, completionSource, cancellationSource.Token, processedEventCallback); var options = new EventProcessorOptions { LoadBalancingUpdateInterval = TimeSpan.FromMilliseconds(250) }; var storageManager = new InMemoryStorageManager(_ => {}); var processor = CreateProcessor(scope.ConsumerGroups.First(), connectionString, storageManager, options); processor.ProcessErrorAsync += CreateAssertingErrorHandler(); processor.ProcessEventAsync += beforeCheckpointProcessHandler; await processor.StartProcessingAsync(cancellationSource.Token); await Task.WhenAny(completionSource.Task, Task.Delay(Timeout.Infinite, cancellationSource.Token)); Assert.That(cancellationSource.IsCancellationRequested, Is.False, "The cancellation token should not have been signaled."); await processor.StopProcessingAsync(cancellationSource.Token); // Validate a checkpoint was created and that events were processed. var checkpoints = (await storageManager.ListCheckpointsAsync(processor.FullyQualifiedNamespace, processor.EventHubName, processor.ConsumerGroup, cancellationSource.Token))?.ToList(); Assert.That(checkpoints, Is.Not.Null, "A checkpoint should have been created."); Assert.That(checkpoints.Count, Is.EqualTo(1), "A single checkpoint should exist."); Assert.That(processedEvents.Count, Is.AtLeast(beforeCheckpointEvents.Count), "All events before the checkpoint should have been processed."); // Reset state and start the processor again; it should resume from the event following the checkpoint. processedEvents.Clear(); completionSource = new TaskCompletionSource <bool>(TaskCreationOptions.RunContinuationsAsynchronously); processor.ProcessEventAsync -= beforeCheckpointProcessHandler; processor.ProcessEventAsync += CreateEventTrackingHandler(segmentEventCount, processedEvents, completionSource, cancellationSource.Token); await processor.StartProcessingAsync(cancellationSource.Token); await Task.WhenAny(completionSource.Task, Task.Delay(Timeout.Infinite, cancellationSource.Token)); Assert.That(cancellationSource.IsCancellationRequested, Is.False, "The cancellation token should not have been signaled."); await processor.StopProcessingAsync(cancellationSource.Token); cancellationSource.Cancel(); foreach (var sourceEvent in afterCheckpointEvents) { var sourceId = sourceEvent.Properties[EventGenerator.IdPropertyName].ToString(); Assert.That(processedEvents.TryGetValue(sourceId, out var processedEvent), Is.True, $"The event with custom identifier [{ sourceId }] was not processed."); Assert.That(sourceEvent.IsEquivalentTo(processedEvent), $"The event with custom identifier [{ sourceId }] did not match the corresponding processed event."); } }
public virtual ResolvedPropertySetter MergeValues(ResolvedPropertySetter a, ResolvedPropertySetter b, out string error) { var property = a.Property; error = null; if (a is ResolvedPropertyControlCollection firstCollection && b is ResolvedPropertyControlCollection secondCollection) { return(new ResolvedPropertyControlCollection(property, Enumerable.Concat(firstCollection.Controls, secondCollection.Controls).ToList())); } ResolvedBinding bindingA; Expression valA = GetExpression(a, out bindingA); ResolvedBinding bindingB; Expression valB = GetExpression(b, out bindingB); if (valA == null) { error = $"Could not merge with property type '{a.GetType().Name}"; return(null); } if (valB == null) { error = $"Could not merge with property type '{b.GetType().Name}"; return(null); } if (bindingA != null && !typeof(IStaticValueBinding).IsAssignableFrom(bindingA.BindingType) || bindingB != null && !typeof(IStaticValueBinding).IsAssignableFrom(bindingB.BindingType)) { error = $"Can not merge values of non-value bindings."; return(null); } if (bindingA != null && bindingB != null) { if (bindingA.BindingType != bindingB.BindingType) { error = $"Can not merge values of different binding types"; return(null); } } var resultExpression = TryOptimizeMethodCall(TryFindMethod(GetType(), MergeExpressionsMethodName, Expression.Constant(property), Expression.Constant(valA), Expression.Constant(valB))) as Expression; // Try to find MegeValues method if MergeExpression does not exists, or try to eval it to constant if expression is not constant if (resultExpression == null || valA.NodeType == ExpressionType.Constant && valB.NodeType == ExpressionType.Constant && resultExpression.NodeType != ExpressionType.Constant) { var methodCall = TryFindMergeMethod(property, valA, valB); if (methodCall == null) { error = $"Could not find merge method for '{valA}' and '{valB}'."; return(null); } var optimizedCall = TryOptimizeMethodCall(methodCall); if (optimizedCall != null) { resultExpression = Expression.Constant(optimizedCall); } else if (resultExpression == null) { resultExpression = methodCall; } } if (resultExpression.NodeType == ExpressionType.Constant) { return(EmitConstant(resultExpression.CastTo <ConstantExpression>().Value, property, ref error)); } else { return(EmitBinding(resultExpression, property, bindingA ?? bindingB, ref error)); } }
static void Main(string[] args) { int[] numeros = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; var numerosPares = from n in numeros where (n % 2) == 0 select n; //Console.WriteLine(string.Join(",", numerosPares)); // otra manera de hacerlo sin usar Linq foreach (var item in numeros) { if ((item % 2) == 0) { //Console.Write($"{item},"); } } Console.WriteLine(""); var numerosMayoresA5 = from n in numeros where n > 5 select n; //Console.WriteLine(string.Join(",", numerosMayoresA5)); var numerosmayoresa2Menoresa7 = from n in numeros where n > 2 && n < 7 select n; //Console.WriteLine(string.Join(",", numerosmayoresa2Menoresa7)); string[] cadenas = { "Uno", "Dos", "Tres", "Cuatro", "Cinco" }; var ContieneI = from c in cadenas where c.Contains("i") select c; //Console.WriteLine(string.Join(",", ContieneI)); var ContieneC = from c in cadenas where c.ToLower().Contains("c") select c; //Console.WriteLine(string.Join(",", ContieneC)); var esCuatro = from c in cadenas where c == "Cuatro" select c; //Console.WriteLine(string.Join(",", esCuatro)); List <Estudiante> estudiantes = new List <Estudiante>(); estudiantes.Add(new Estudiante() { Nombre = "Denys", Apellido = "Cuervo", Codigo = "E001", Edad = 19, Codigo_Curso = "C001" }); estudiantes.Add(new Estudiante() { Nombre = "Hernan", Apellido = "Rojas", Codigo = "E002", Edad = 20, Codigo_Curso = "C002" }); estudiantes.Add(new Estudiante() { Nombre = "Julian", Apellido = "Caicedo", Codigo = "E003", Edad = 18, Codigo_Curso = "C001" }); estudiantes.Add(new Estudiante() { Nombre = "Ricardo", Apellido = "Zaldua", Codigo = "E004", Edad = 21, Codigo_Curso = "C002" }); estudiantes.Add(new Estudiante() { Nombre = "Santiago", Apellido = "Balcero", Codigo = "E005", Edad = 20, Codigo_Curso = "C003" }); estudiantes.Add(new Estudiante() { Nombre = "William", Apellido = "Garcia", Codigo = "E006", Edad = 19, Codigo_Curso = "C004" }); estudiantes.Add(new Estudiante() { Nombre = "Miguel", Apellido = "Teheran", Codigo = "E007", Edad = 21, Codigo_Curso = "C005" }); var estudiantesMayoresA20 = from e in estudiantes where e.Edad > 20 select e; //foreach (var item in estudiantesMayoresA20) //{ // Console.WriteLine($"{item.Codigo} - {item.Nombre} {item.Apellido} - {item.Edad} "); //} var estudiantesApellidoconCMayorA18 = from e in estudiantes where e.Edad > 18 && e.Apellido.ToLower().StartsWith("c") select e; foreach (var item in estudiantesApellidoconCMayorA18) { //Console.WriteLine($"{item.Codigo} - {item.Nombre} {item.Apellido} - {item.Edad} "); } var estudiantesNombreFinalizaconOMayorA19 = from e in estudiantes where e.Edad >= 20 || e.Nombre.ToLower().EndsWith("o") select e; foreach (var item in estudiantesNombreFinalizaconOMayorA19) { //Console.WriteLine($"{item.Codigo} - {item.Nombre} {item.Apellido} - {item.Edad} "); } var estudiantesDiferentesFiltros = from e in estudiantes where ((e.Codigo == "E001" || e.Codigo == "E002") && e.Edad <= 19) || e.Edad >= 21 select e; foreach (var item in estudiantesDiferentesFiltros) { //Console.WriteLine($"{item.Codigo} - {item.Nombre} {item.Apellido} - {item.Edad} "); } //Ordenamiento en Linq var estudiantesOrdenadosPorEdad = from e in estudiantes where e.Nombre.ToLower().Contains("m") orderby e.Edad ascending select e; var estudiantesOrdenadosPorEdadDescending = from e in estudiantes where e.Nombre.ToLower().Contains("m") orderby e.Edad descending select e; IOrderedEnumerable <Estudiante> estudiantesOrdenadosPorNombreDescending = from e in estudiantes orderby e.Nombre descending select e; foreach (var item in estudiantesOrdenadosPorNombreDescending) { //Console.WriteLine($"{item.Codigo} - {item.Nombre} {item.Apellido} - {item.Edad} "); } //Agrupamiento var estudiantesAgrupadosPorEdad = from e in estudiantes orderby e.Edad group e by e.Edad; foreach (var group in estudiantesAgrupadosPorEdad) { //Console.WriteLine($"{group.Key} - {group.Count()}"); foreach (var item in group) { //Console.WriteLine($"{item.Codigo} - {item.Nombre} {item.Apellido} - {item.Edad} "); } } List <Curso> cursos = new List <Curso>(); cursos.Add(new Curso() { Codigo = "C001", Nombre = "Curso Azure", Descripcion = "Curso de azure" }); cursos.Add(new Curso() { Codigo = "C002", Nombre = "Curso HTML", Descripcion = "Curso basico y fundamentos de HTML" }); cursos.Add(new Curso() { Codigo = "C003", Nombre = "Curso netCore", Descripcion = "Curso avanzado de .net core y microservicios" }); cursos.Add(new Curso() { Codigo = "C004", Nombre = "Curso React.js", Descripcion = "Curso fron-end en React.js" }); cursos.Add(new Curso() { Codigo = "C005", Nombre = "Curso Base de datos", Descripcion = "Curso basico de sentencias SQL, base de datos relacionales" }); var EstudiantePorCurso = from e in estudiantes join c in cursos on e.Codigo_Curso equals c.Codigo orderby e.Edad select new { NombreEstudiante = e.Nombre, NombreCurso = c.Nombre }; //foreach (var item in EstudiantePorCurso) //{ // Console.WriteLine($"{item.NombreEstudiante} - {item.NombreCurso}"); //} foreach (var estudiante in estudiantes) { foreach (var curso in cursos) { if (estudiante.Codigo_Curso == curso.Codigo) { //Console.WriteLine($"{estudiante.Nombre} - {curso.Nombre}"); } } } // Metodos de extension en las listas foreach (var item in estudiantes.OrderBy(p => p.Edad)) { //Console.WriteLine($"{item.Codigo} - {item.Nombre} {item.Apellido} - {item.Edad} "); } foreach (var item in estudiantes.OrderByDescending(p => p.Edad)) { //Console.WriteLine($"{item.Codigo} - {item.Nombre} {item.Apellido} - {item.Edad} "); } foreach (var item in estudiantes.Where(p => (p.Codigo_Curso == "C001" && p.Edad > 19) || p.Edad == 18).OrderBy(p => p.Edad)) { //Console.WriteLine($"{item.Codigo} - {item.Nombre} {item.Apellido} - {item.Edad} "); } var estudiante19 = estudiantes.First(p => p.Edad == 19); //Console.WriteLine($"{estudiante19.Codigo} - {estudiante19.Nombre} {estudiante19.Apellido} - {estudiante19.Edad} "); //Console.WriteLine($"{estudiantes.Min(p => p.Edad)} - {estudiantes.Max(p => p.Edad)} - {estudiantes.Sum(p => p.Edad)/estudiantes.Count} "); var estudiantesTexto = from e in estudiantes where e.Edad > 18 orderby e.Edad select $"{e.Nombre} {e.Apellido} - {e.Codigo_Curso}"; foreach (var item in estudiantesTexto) { //Console.WriteLine(item); } // Ejercicio List <Book> lstBooks = new List <Book>(); var consultaLibro = from l in lstBooks where l.Title.ToLower().Contains("vida") && l.Year == "2005" orderby l.Year select l; //Metodos de Enumerable //Console.WriteLine($"{Enumerable.Min(estudiantes,p=> p.Edad) } - {Enumerable.Max(estudiantes, p => p.Edad)} - {(double)(double.Parse(Enumerable.Sum(estudiantes, p => p.Edad).ToString()) / double.Parse(estudiantes.Count.ToString()))} "); var estudiantesRandom = Enumerable.Repeat(new Estudiante() { Nombre = "Mario" }, 10); foreach (var item in estudiantesRandom) { //Console.WriteLine($"{item.Codigo} - {item.Nombre} {item.Apellido} - {item.Edad} "); } var estudianteContact = Enumerable.Concat(estudiantes, estudiantesRandom); foreach (var item in estudianteContact) { //Console.WriteLine($"{item.Codigo} - {item.Nombre} {item.Apellido} - {item.Edad} "); } Console.WriteLine($"{Enumerable.Min(estudiantes,p=> p.Edad) } - {Enumerable.Max(estudiantes, p => p.Edad)} - {Enumerable.Average(estudiantes, p => p.Edad).ToString("P")} "); foreach (var item in Enumerable.Except(estudiantes, estudiantesDiferentesFiltros)) { //esto no se debe hacer if (estudiantesDiferentesFiltros.Contains(item)) { continue; } //Console.WriteLine($"{item.Codigo} - {item.Nombre} {item.Apellido} - {item.Edad} "); } Console.ReadKey(); }
protected string GetFullAPIURL() => string.Join("/", Enumerable.Concat(new string[] { BaseURL }, this.ApiUrl).Concat(ApiParams));
// **** BACKGROUND THREAD **** private void WorkerLoop() { try { EnsureOnThread(); while (!_shutdownToken.IsCancellationRequested) { // Grab the parcel of work to do var parcel = _main.GetParcel(); if (parcel.Changes.Any()) { try { BackgroundParserResultsReadyEventArgs args = null; using (var linkedCancel = CancellationTokenSource.CreateLinkedTokenSource(_shutdownToken, parcel.CancelToken)) { if (!linkedCancel.IsCancellationRequested) { // Collect ALL changes List <ChangeReference> allChanges; if (_previouslyDiscarded != null) { allChanges = Enumerable.Concat(_previouslyDiscarded, parcel.Changes).ToList(); } else { allChanges = parcel.Changes.ToList(); } var finalChange = allChanges.Last(); var results = ParseChange(finalChange.Snapshot, linkedCancel.Token); if (results != null && !linkedCancel.IsCancellationRequested) { // Clear discarded changes list _previouslyDiscarded = null; _currentSyntaxTree = results.GetSyntaxTree(); // Build Arguments args = new BackgroundParserResultsReadyEventArgs(finalChange, results); } else { // Parse completed but we were cancelled in the mean time. Add these to the discarded changes set _previouslyDiscarded = allChanges; } } } if (args != null) { _main.ReturnParcel(args); } } catch (OperationCanceledException) { } } else { Thread.Yield(); } } } catch (OperationCanceledException) { // Do nothing. Just shut down. } finally { // Clean up main thread resources _main.Dispose(); } }
internal static (int added, int deleted) GetLinesChanged(string contentBefore, string contentNow) { if (contentBefore.Length == 0) { return(Split(contentNow, 0, contentNow.Length).Count(), 0); } if (contentNow.Length == 0) { return(0, Split(contentBefore, 0, contentBefore.Length).Count()); } int prefixLength = FindLengthOfCommonPrefix(contentBefore, contentNow); int suffixLength = FindLengthOfCommonSuffix(contentBefore, contentNow); if (prefixLength + suffixLength >= Math.Max(contentBefore.Length, contentNow.Length) && prefixLength < suffixLength) { prefixLength = 0; } else if (prefixLength + suffixLength >= Math.Max(contentBefore.Length, contentNow.Length) && suffixLength < prefixLength) { suffixLength = 0; } Dictionary <int, List <string> > allLines1 = Split(contentBefore, prefixLength, suffixLength).GroupBy(x => x.GetHashCode()) .ToDictionary(x => x.Key, x => x.ToList()); Dictionary <int, List <string> > allLines2 = Split(contentNow, prefixLength, suffixLength).GroupBy(x => x.GetHashCode()) .ToDictionary(x => x.Key, x => x.ToList()); int added = 0; int deleted = 0; foreach (int hashCode in Enumerable.Concat(allLines1.Keys, allLines2.Keys).Distinct()) { bool existed1 = allLines1.TryGetValue(hashCode, out List <string> lines1); bool existed2 = allLines2.TryGetValue(hashCode, out List <string> lines2); if (existed1 && existed2 && hashCode == BlankLineHash) { int diff = lines1.Count - lines2.Count; if (diff > 0) { deleted += diff; } if (diff < 0) { added += (diff * -1); } } else if (existed1 && existed2) { List <string> lines2Copy = new List <string>(lines2); foreach (string line in lines1) { if (lines2Copy.Contains(line)) { lines2Copy.Remove(line); } else { deleted++; } } foreach (string line in lines2) { if (lines1.Contains(line)) { lines1.Remove(line); } else { added++; } } } else if (existed1) { deleted += lines1.Count; } else if (existed2) { added += lines2.Count; } } return(added, deleted); }
// **** BACKGROUND THREAD **** private void WorkerLoop() { long? elapsedMs = null; string fileNameOnly = Path.GetFileName(_fileName); #if EDITOR_TRACING Stopwatch sw = new Stopwatch(); #endif try { RazorEditorTrace.TraceLine(RazorResources.Trace_BackgroundThreadStart, fileNameOnly); EnsureOnThread(); while (!_shutdownToken.IsCancellationRequested) { // Grab the parcel of work to do WorkParcel parcel = _main.GetParcel(); if (parcel.Changes.Any()) { RazorEditorTrace.TraceLine(RazorResources.Trace_ChangesArrived, fileNameOnly, parcel.Changes.Count); try { DocumentParseCompleteEventArgs args = null; using (var linkedCancel = CancellationTokenSource.CreateLinkedTokenSource(_shutdownToken, parcel.CancelToken)) { if (parcel != null && !linkedCancel.IsCancellationRequested) { // Collect ALL changes #if EDITOR_TRACING if (_previouslyDiscarded != null && _previouslyDiscarded.Any()) { RazorEditorTrace.TraceLine(RazorResources.Trace_CollectedDiscardedChanges, fileNameOnly, _previouslyDiscarded.Count); } #endif var allChanges = Enumerable.Concat( _previouslyDiscarded ?? Enumerable.Empty <TextChange>(), parcel.Changes).ToList(); var finalChange = allChanges.LastOrDefault(); if (finalChange != default(TextChange)) { #if EDITOR_TRACING sw.Start(); #endif GeneratorResults results = ParseChange(finalChange.NewBuffer, linkedCancel.Token); #if EDITOR_TRACING sw.Stop(); elapsedMs = sw.ElapsedMilliseconds; sw.Reset(); #endif RazorEditorTrace.TraceLine( RazorResources.Trace_ParseComplete, fileNameOnly, elapsedMs.HasValue ? elapsedMs.Value.ToString() : "?"); if (results != null && !linkedCancel.IsCancellationRequested) { // Clear discarded changes list _previouslyDiscarded = null; // Take the current tree and check for differences #if EDITOR_TRACING sw.Start(); #endif bool treeStructureChanged = _currentParseTree == null || TreesAreDifferent(_currentParseTree, results.Document, allChanges, parcel.CancelToken); #if EDITOR_TRACING sw.Stop(); elapsedMs = sw.ElapsedMilliseconds; sw.Reset(); #endif _currentParseTree = results.Document; RazorEditorTrace.TraceLine(RazorResources.Trace_TreesCompared, fileNameOnly, elapsedMs.HasValue ? elapsedMs.Value.ToString() : "?", treeStructureChanged); // Build Arguments args = new DocumentParseCompleteEventArgs() { GeneratorResults = results, SourceChange = finalChange, TreeStructureChanged = treeStructureChanged }; } else { // Parse completed but we were cancelled in the mean time. Add these to the discarded changes set RazorEditorTrace.TraceLine(RazorResources.Trace_ChangesDiscarded, fileNameOnly, allChanges.Count); _previouslyDiscarded = allChanges; } #if CHECK_TREE if (args != null) { // Rewind the buffer and sanity check the line mappings finalChange.NewBuffer.Position = 0; int lineCount = finalChange.NewBuffer.ReadToEnd().Split(new string[] { Environment.NewLine, "\r", "\n" }, StringSplitOptions.None).Count(); Debug.Assert( !args.GeneratorResults.DesignTimeLineMappings.Any(pair => pair.Value.StartLine > lineCount), "Found a design-time line mapping referring to a line outside the source file!"); Debug.Assert( !args.GeneratorResults.Document.Flatten().Any(span => span.Start.LineIndex > lineCount), "Found a span with a line number outside the source file"); Debug.Assert( !args.GeneratorResults.Document.Flatten().Any(span => span.Start.AbsoluteIndex > parcel.NewBuffer.Length), "Found a span with an absolute offset outside the source file"); } #endif } } } if (args != null) { _main.ReturnParcel(args); } } catch (OperationCanceledException) { } } else { RazorEditorTrace.TraceLine(RazorResources.Trace_NoChangesArrived, fileNameOnly, parcel.Changes.Count); Thread.Yield(); } } } catch (OperationCanceledException) { // Do nothing. Just shut down. } catch (Exception ex) { MonoDevelop.Core.LoggingService.LogInternalError("Internal error in Razor parser", ex); } finally { RazorEditorTrace.TraceLine(RazorResources.Trace_BackgroundThreadShutdown, fileNameOnly); // Clean up main thread resources _main.Dispose(); } }
private static IEnumerable <Func <string, int, TokenMatch> > GetPatterns(bool includeKeywords) => includeKeywords?Enumerable.Concat(KeywordPatterns, GeneralPatterns) : GeneralPatterns;
// **** BACKGROUND THREAD **** private void WorkerLoop() { var fileNameOnly = Path.GetFileName(_filePath); try { EnsureOnThread(); while (!_shutdownToken.IsCancellationRequested) { // Grab the parcel of work to do var parcel = _main.GetParcel(); if (parcel.Edits.Any()) { try { DocumentParseCompleteEventArgs args = null; using (var linkedCancel = CancellationTokenSource.CreateLinkedTokenSource(_shutdownToken, parcel.CancelToken)) { if (!linkedCancel.IsCancellationRequested) { // Collect ALL changes List <Edit> allEdits; if (_previouslyDiscarded != null) { allEdits = Enumerable.Concat(_previouslyDiscarded, parcel.Edits).ToList(); } else { allEdits = parcel.Edits.ToList(); } var finalEdit = allEdits.Last(); var results = ParseChange(finalEdit.Snapshot, linkedCancel.Token); if (results != null && !linkedCancel.IsCancellationRequested) { // Clear discarded changes list _previouslyDiscarded = null; var treeStructureChanged = _currentSyntaxTree == null || TreesAreDifferent(_currentSyntaxTree, results.GetSyntaxTree(), allEdits, parcel.CancelToken); _currentSyntaxTree = results.GetSyntaxTree(); // Build Arguments args = new DocumentParseCompleteEventArgs( finalEdit.Change, finalEdit.Snapshot, treeStructureChanged, results); } else { // Parse completed but we were cancelled in the mean time. Add these to the discarded changes set _previouslyDiscarded = allEdits; } } } if (args != null) { _main.ReturnParcel(args); } } catch (OperationCanceledException) { } } else { Thread.Yield(); } } } catch (OperationCanceledException) { // Do nothing. Just shut down. } finally { // Clean up main thread resources _main.Dispose(); } }