public override dynamic Run(IList<Token> tokens, Context context) { /* 1: content to write 2: file object 3: item separator 4: line breack 3: file mode -> w: write, a: append */ var arguments = ExtractArguments(tokens.Skip(1).ToArray(), context); FileObject fileObject; string separator = ",", lineBreack = "\r\n"; var rhs = recurse.Run(tokens.Skip(2).Take(1).ToArray(), context); fileObject = recurse.Run(tokens.Skip(4).Take(1).ToArray(), context); if (arguments.Count() >= 6) separator = arguments[5].Value.ExtractStringValue(); if (arguments.Count() >= 7) lineBreack = arguments[6].Value.ExtractStringValue(); context.Core.WriteToFile(rhs, fileObject, separator, lineBreack); return null; }
/// <summary> /// Creates type list for nested tuples. /// </summary> /// <param name="itemTypes">The type list of tuple items, in order.</param> /// <returns> /// The type list for nested tuples. /// The order is from outer to inner. /// </returns> public static List<Type> CreateTupleTypeList( IList<Type> itemTypes ) { var itemTypesStack = new Stack<List<Type>>( itemTypes.Count / 7 + 1 ); for ( int i = 0; i < itemTypes.Count / 7; i++ ) { itemTypesStack.Push( itemTypes.Skip( i * 7 ).Take( 7 ).ToList() ); } if ( itemTypes.Count % 7 != 0 ) { itemTypesStack.Push( itemTypes.Skip( ( itemTypes.Count / 7 ) * 7 ).Take( itemTypes.Count % 7 ).ToList() ); } var result = new List<Type>( itemTypesStack.Count ); while ( 0 < itemTypesStack.Count ) { var itemTypesStackEntry = itemTypesStack.Pop(); if ( 0 < result.Count ) { itemTypesStackEntry.Add( result.Last() ); } var tupleType = Type.GetType( "System.Tuple`" + itemTypesStackEntry.Count, true ).MakeGenericType( itemTypesStackEntry.ToArray() ); result.Add( tupleType ); } result.Reverse(); return result; }
public bool Match(IList<Token> tokens, HandlerRegistry registry) { var tokenIndex = 0; foreach (var pattern in _patterns) { var isOptional = pattern.IsOptional; var isRequired = !isOptional; var thereAreNoMoreTokens = tokenIndex == tokens.Count; if (pattern is TagPattern) { var match = tokenIndex < tokens.Count && tokens[tokenIndex].IsTaggedAs((pattern as TagPattern).TagType); if (match == false && isRequired) { return false; } if (match) { tokenIndex++; } } else if (pattern is RepeatPattern) { var repetition = pattern as RepeatPattern; int advancement; var match = repetition.Match(tokens.Skip(tokenIndex).ToList(), out advancement); if (match == false && isRequired) { return false; } if (match) { tokenIndex += advancement; } } else if (pattern is HandlerTypePattern) { if (isOptional && thereAreNoMoreTokens) { return true; } var subHandlers = registry.GetHandlers((pattern as HandlerTypePattern).Type); foreach (var handler in subHandlers) { if (handler.Match(tokens.Skip(tokenIndex).ToList(), registry)) { return true; } } } } if (tokenIndex != tokens.Count) { return false; } return true; }
public override dynamic Run(IList<Token> tokens, Context context) { ExpectTokens(tokens, 4); if (tokens[1].Type != TokenType.OpenPar || tokens[3].Type != TokenType.Comma || tokens[5].Type != TokenType.ClosedPar) throw new Exception("Invalid syntax."); var value = recurse.Run(tokens.Skip(2).Take(1).ToArray(), context); var xpath = recurse.Run(tokens.Skip(4).Take(1).ToArray(), context); return context.Core.Find(value, xpath); }
public override dynamic Run(IList<Token> tokens, Context context) { ExpectTokens(tokens, 1); var rhs = recurse.Run(tokens.Skip(2).ToArray(), context); var separator = recurse.Run(tokens.Skip(4).ToArray(), context); string lineEnd = ""; if (tokens.Count >= 6) lineEnd = recurse.Run(tokens.Skip(6).ToArray(), context); context.Core.PrintList(rhs, separator, lineEnd); return null; }
/// <summary> /// Returns a Faro shuffled list. /// The initial list is split in half. A new list is created by alternately adding each item from the half-lists to it. /// </summary> /// <param name="list"></param> /// <returns></returns> public static IList<string> FaroShuffle(IList<string> list) { if (list.Count == 0) return list; var items = list.Count; //First half needs to greater by 1, or equal to the second half for this to work right int firstHalfSize; if (list.Count % 2 == 0) firstHalfSize = list.Count / 2; else firstHalfSize = list.Count / 2 + 1; //Split in two halfs IList<string> firstHalf = list.Take(firstHalfSize).ToList(); IList<string> secondHalf = list.Skip(firstHalfSize).Take(list.Count - firstHalfSize).ToList(); IList<string> result = new List<string>(); for (int i = 0, skip = 0; i < items; i = i + 2, skip++) { var item = firstHalf.Skip(skip).FirstOrDefault(); if (item != null) result.Add(item); item = secondHalf.Skip(skip).FirstOrDefault(); if (item != null) result.Add(item); } return result; }
public IList<Point> Filter(IList<Point> points) { IList<Point> result = new List<Point>(); if (points.Count == 0) { return result; } var point = new Point(points.First()); result.Add(point); foreach (var currentSourcePoint in points.Skip(1)) { if (!this.DistanceIsTooSmall(currentSourcePoint, point)) { point = new Point(currentSourcePoint); result.Add(point); } } if (this.checkBoundary && result.Count > 1) { CheckFirstAndLastPoint(result); } return result; }
public void Call(IList<IValue> args, VM vm, SourceInfo info) { int argCount = args.Count; if (argCount < mParamCount || argCount > mParamCount && !mAllowRest && !mAllowKeys) { throw new RheaException( this.WrongNumberOfArguments(mParamCount, argCount), info ); } var dict = new Dictionary<IValue, IValue>(); if (mAllowKeys) { foreach (IValue arg in args.Skip(mParamCount)) { ValueArray pair = arg as ValueArray; if (pair == null || pair.Value.Count != 2) { throw new RheaException( "keyword arguments should occur pairwise", info ); } dict.Add(pair.Value[0], pair.Value[1]); } } var newArgs = new Arguments(args, dict); mSubrValue(newArgs, vm, info); }
private void ParseRealization(IList<Double> realization) { Double repeatedValue = realization.Last(); // last value it's the same as cycle start value Int32 cycleStartIndex = realization.IndexOf(repeatedValue); Appendix = realization.Take(cycleStartIndex).ToList(); Cycle = realization.Skip(cycleStartIndex).Take(realization.Count - cycleStartIndex - 1).ToList(); }
public IList<DepthPointEx> Filter(IList<DepthPointEx> points) { IList<DepthPointEx> result = new List<DepthPointEx>(); if (points.Count > 0) { var point = new DepthPointEx(points.First()); result.Add(point); foreach (var currentSourcePoint in points.Skip(1)) { if (!PointsAreClose(currentSourcePoint, point)) { point = new DepthPointEx(currentSourcePoint); result.Add(point); } } if (result.Count > 1) { CheckFirstAndLastPoint(result); } } return result; }
public override MatchResult Match(IList<Pattern> left, IEnumerable<Pattern> collected = null) { var coll = collected ?? new List<Pattern>(); var sresult = SingleMatch(left); var match = sresult.Match; if (match == null) { return new MatchResult(false, left, coll); } var left_ = new List<Pattern>(); left_.AddRange(left.Take(sresult.Position)); left_.AddRange(left.Skip(sresult.Position + 1)); var sameName = coll.Where(a => a.Name == Name).ToList(); if (Value != null && (Value.IsList || Value.IsOfTypeInt)) { var increment = new ValueObject(1); if (!Value.IsOfTypeInt) { increment = match.Value.IsString ? new ValueObject(new [] {match.Value}) : match.Value; } if (sameName.Count == 0) { match.Value = increment; var res = new List<Pattern>(coll) {match}; return new MatchResult(true, left_, res); } sameName[0].Value.Add(increment); return new MatchResult(true, left_, coll); } var resColl = new List<Pattern>(); resColl.AddRange(coll); resColl.Add(match); return new MatchResult(true, left_, resColl); }
public IEnumerable<Double> FindAllAverages(IList<double> values, int windowSize) { //Checking for errors if(windowSize > values.Count) throw new ValueCountLessThanWindowException(); if (windowSize < 0) throw new BadWindowException(); if (values.Count == 0) throw new ValuesCountException(); var cumulativeAverageArray = new double[values.Count]; var currentAverage = 0.0; for (var i = 0; i < values.Count; i++) { var convertedIndexToCounterFromIndex = i + 1; if (convertedIndexToCounterFromIndex <= windowSize && i != 0) // handles greater than 0, but less than window size { currentAverage = (((currentAverage * i) + values[i]) / convertedIndexToCounterFromIndex); } else if (convertedIndexToCounterFromIndex <= windowSize) // handles 0 case { currentAverage = (((currentAverage * i) + values[i]) / convertedIndexToCounterFromIndex); } else // https://en.wikipedia.org/wiki/Moving_average //handles greater than 0 and greater than window size { currentAverage = ((values.Skip(convertedIndexToCounterFromIndex - windowSize).Take(windowSize).Sum()) / windowSize); } cumulativeAverageArray[i] = currentAverage; } return cumulativeAverageArray; }
public IEnumerable<PageLinksValidationResult> ValidateLinks(IList<DocumentationPage> pages) { var results = new ConcurrentBag<PageLinksValidationResult>(); var p = new List<DocumentationPage>[2]; var half = pages.Count / 2; p[0] = pages.Take(half).ToList(); p[1] = pages.Skip(half).ToList(); Parallel.For( 0, 2, i => { using (var client = new HttpClient()) { var pagesToCheck = p[i]; foreach (var page in pagesToCheck) results.Add(ValidatePageLinks(client, page, pages)); } }); return results; }
public void ParseAndRun(IList<string> lines, int start, out Range size, out string result) { string command = null; Dictionary<string, string> commandArgs = new Dictionary<string, string>(); int end = start; while(end+1 < lines.Count && lines[end + 1].Contains(":") && lines[end + 1].StartsWith(" ")) end += 1; foreach(var line in lines.Skip(start).Take(1 + end - start)) { var arg = line.Split(new char[] { ':' }, 2); if(command == null) command = arg[0].Trim(); commandArgs[arg[0].Trim()] = arg[1].Trim(); } result = Invoke(command, commandArgs); if(result != null && !result.EndsWith("\n")) result = result + '\n'; size = new Range(start) { EndRow = end, EndColumn = lines[end].Length }; }
public void Execute(IList<string> arguments, IPhonebookRepository repository) { if (arguments.Count < 2) { throw new ArgumentException("Invalid number of arguments to add phone number"); } string name = arguments[0]; var phoneNumbers = arguments.Skip(1).ToList(); for (int i = 0; i < phoneNumbers.Count; i++) { phoneNumbers[i] = sanitizer.ConvertPhoneToCanonical(phoneNumbers[i]); } bool isPhoneNew = repository.AddPhone(name, phoneNumbers); if (isPhoneNew) { Print("Phone entry created"); } else { Print("Phone entry merged"); } }
public static Document CreateDocument(IList<Person> people) { var document = Word.Documents.Add(); document.EmbedTrueTypeFonts = true; var range = document.Range(); range.Font.Name = "Centaur Festive MT Italic"; range.Font.Size = 14; range.ParagraphFormat.Alignment = WdParagraphAlignment.wdAlignParagraphCenter; range.PageSetup.LeftMargin = range.PageSetup.RightMargin = 24; range.PageSetup.TextColumns.SetCount(ColumnCount); var colSize = (int)Math.Ceiling(people.Count / (double)ColumnCount); for (int col = 0; col < ColumnCount; col++) { range.Text = people.Skip(col * colSize).Take(colSize).Join("\v", p => p.FullName); if (col == ColumnCount - 1) break; range.Collapse(WdCollapseDirection.wdCollapseEnd); range.InsertBreak(WdBreakType.wdColumnBreak); } document.Activate(); Word.Activate(); document.Activate(); return document; }
private static IList<MembershipFunction> mergeFirstTwoFunctions(IList<MembershipFunction> msfs) { var combined = combine(msfs[0], msfs[1]); var newMsfs = new List<MembershipFunction> {combined}; newMsfs.AddRange(msfs.Skip(2)); return newMsfs; }
public void GraphBuildHistory(IList<BuildStatus> buildStatuses) { if (buildStatuses == null) { _log.Warn("buildStatuses was null. Unable to build a graph."); return; } if (_buildHistoryZedGraph == null || _buildHistoryZedGraph.GraphPane == null) { _log.Warn("_buildHistoryZedGraph was null. Unable to build a graph."); return; } GraphPane myPane = _buildHistoryZedGraph.GraphPane; myPane.CurveList.Clear(); IEnumerable<BuildStatus> lastFewBuildStatuses = buildStatuses.Skip(buildStatuses.Count - 8); foreach (BuildStatus buildStatus in lastFewBuildStatuses) { if (buildStatus == null || buildStatus.FinishedTime == null || buildStatus.StartedTime == null) continue; var duration = buildStatus.FinishedTime.Value - buildStatus.StartedTime.Value; Fill fill = buildStatus.BuildStatusEnum == BuildStatusEnum.Broken ? _failFill : _successFill; var bar = myPane.AddBar(null, null, new[] { duration.TotalMinutes }, Color.White); bar.Bar.Fill = fill; bar.Bar.Border.Color = Color.White; } _buildHistoryZedGraph.AxisChange(); _buildHistoryZedGraph.Invalidate(); }
public override dynamic Run(IList<Token> tokens, Context context) { var arguments = ExtractArguments(tokens.Skip(1).ToArray(), context); string trimValue = ""; if (arguments.Count >= 4) trimValue = arguments[arguments.Count - 1].Value.ExtractStringValue(); object expression = null; if (arguments[0].Type == TokenType.Identifier && arguments[0].Value.ToLower() == "it") { var expr = arguments.Take(3).ToArray(); var argName = arguments[0].Value; // do not evaluate this symbol //context.Symbols.Set(argName, item); expression = recurse.Run(expr, context); } else expression = recurse.Run(arguments, context); if (expression is string) return TrimString(expression, trimValue); else if (expression is IEnumerable<string>) { var list = ((IEnumerable<string>)expression).ToArray(); for (int index = 0; index < list.Count(); index++) list[index] = TrimString(list[index], trimValue); return list; } throw new Exception("Trim expected string values but not foud"); }
protected IList<Token> ExtractArguments(IList<Token> tokens, Context context) { if (tokens.Count < 3) throw new Exception("Invalid subexpression."); if (tokens[0].Type != TokenType.OpenPar) throw new Exception("Syntax error."); // start depth at zero; for each token, increment the depth for "(" and decrement for ")" // when the depth reaches zero again, stop and return the explored sublist var depth = 0; var index = 0; while (index < tokens.Count) { if (tokens[index].Type == TokenType.OpenPar) depth++; else if (tokens[index].Type == TokenType.ClosedPar) depth--; if (depth == 0) break; index++; } // if the loop has ended and the depth is not zero, the parentheses are unbalanced if (depth > 0) throw new Exception("Too many open parentheses."); // this normally can't happen but just in case if (depth < 0) throw new Exception("Too many closed parentheses."); return tokens.Skip(1).Take(index - 1).ToArray(); }
public IMvxSourceBinding CreateBinding(object source, IList<MvxPropertyToken> tokens) { if (tokens == null || tokens.Count == 0) { throw new MvxException("empty token list passed to CreateBinding"); } var currentToken = tokens[0]; var remainingTokens = tokens.Skip(1).ToList(); IMvxSourceBinding extensionResult; if (TryCreateBindingFromExtensions(source, currentToken, remainingTokens, out extensionResult)) { return extensionResult; } if (source != null) { MvxBindingTrace.Trace( MvxTraceLevel.Warning, "Unable to bind: source property source not found {0} on {1}" , currentToken , source.GetType().Name); } return new MvxMissingSourceBinding(source); }
public override dynamic Run(IList<Token> tokens, Context context) { var arguments = ExtractArguments(tokens.Skip(1).ToArray(), context); var listName = arguments[0].Value; var list = context.Symbols.Get(listName) as IEnumerable; if (list == null) throw new Exception("Map: first argument is not a list."); var lambda = arguments.Skip(2).ToArray(); var argName = lambda[0].Value; // do not evaluate this symbol context.Symbols.Declare(argName); var expr = lambda.Skip(2).ToArray(); var result = new List<object>(); foreach (var item in list) { context.Symbols.Set(argName, item); result.Add(recurse.Run(expr, context)); } context.Symbols.Undeclare(argName); return result; }
public static QueryExpressionSyntax WithAllClauses( this QueryExpressionSyntax query, IList<SyntaxNode> allClauses) { var fromClause = (FromClauseSyntax)allClauses.First(); return query.WithFromClause(fromClause).WithBody(query.Body.WithAllClauses(allClauses.Skip(1))); }
public static List MakeList(IList<object> elements, List tail = null) { if (elements.Count == 0) return tail; return new List(elements[0], MakeList(elements.Skip(1).ToList(), tail)); }
public static List MakeList(IList<object> elements, Variable tail) { if (elements.Count == 1) return new List(elements[0], tail); return new List(elements[0], MakeList(elements.Skip(1).ToList(), tail)); }
public void ExportExcel(IList<dynamic> items, string fileName) { using (var excelPackage = new ExcelPackage(new FileStream(fileName, FileMode.CreateNew))) { var currentIndex = 0; var sheetIndex = 1; do { var take = Math.Min(1000000, items.Count - currentIndex); var currentSet = items.Skip(currentIndex).Take(take).ToList(); currentIndex += currentSet.Count; var sheet = excelPackage.Workbook.Worksheets.Add("Sheet " + sheetIndex); var arrayData = new List<object[]>(); foreach (var item in currentSet) { arrayData.Add(((IDictionary<string, object>)item).Values.ToArray()); } sheet.Cells["A1"].LoadFromArrays(arrayData); sheetIndex++; excelPackage.Save(); } while (currentIndex < items.Count); } }
public static IEnumerable<double> MovingAverage(IList<double> list, int period) { int end = list.Count - period + 1; for (int i = 0; i < end; i++) { yield return list.Skip(i).Take(period).Average(); } }
protected static IList<TimePeriod> RemoveEarlyEmptyTimePeriods(PartitionTrendDataDictionaryBuilder dictionaryBuilder, IList<TimePeriod> timePeriods) { int earliestIndexRemoved = dictionaryBuilder.RemoveEarlyEmptyYears(); return earliestIndexRemoved > -1 ? timePeriods.Skip(earliestIndexRemoved + 1).ToList() : timePeriods; }
public static Question CreateFromStrings(IList<string> row) { return new Question { Picture = row[0], CorrectAnswersMask = row.Skip(1).Select(mask => new Regex(MaskToRegex(mask))).ToArray() }; }
public Response(IList<byte> rawMessage) { this.code = rawMessage[2]; this.errorCode = rawMessage[3]; this.message = rawMessage .Skip(4) .Take(rawMessage[1] - 2) .ToArray(); }
private IList <Graphic> ConstructGraphicsList(IList <string[]> rowList) { var graphics = new List <Graphic>(); var columns = rowList[0]; rowList?.Skip(1) .ToList() .ForEach(x => graphics.Add(ConstructNewGraphicfromRow(x, columns))); return(graphics); }
private static IIndexingManager GetIndexingManager(ISearchProvider searchProvider, IList <DocumentSource> documentSources) { var primaryDocumentSource = documentSources?.FirstOrDefault(); var configuration = new IndexDocumentConfiguration { DocumentType = DocumentType, DocumentSource = CreateIndexDocumentSource(primaryDocumentSource), RelatedSources = documentSources?.Skip(1).Select(CreateIndexDocumentSource).ToArray(), }; return(new IndexingManager(searchProvider, new[] { configuration }, new Moq.Mock <ISearchConnection>().Object)); }
private IEnumerable <string> SelectEnumberable(IList <IPathSegment> pathSegments, IPathSegment parentPathSegment, XElement element) { var returnData = new List <string>(); XElement currentElement = element; if (pathSegments.Count > 0) { for (int i = 0; i < pathSegments.Count; i++) { var pathSegment = pathSegments[i] as XmlPathSegment; XmlPathSegment previousPathSegment; if (i > 0) { previousPathSegment = pathSegments[i - 1] as XmlPathSegment; } else { previousPathSegment = parentPathSegment as XmlPathSegment; } bool lastSegment = i == pathSegments.Count - 1; if (previousPathSegment != null && previousPathSegment.IsEnumarable) { if (currentElement != null) { if (pathSegment != null) { List <XElement> childElements = currentElement.Elements(pathSegment.ActualSegment).ToList(); if (childElements.Count > 0) { if (lastSegment) { foreach (XElement childElement in childElements) { if (pathSegment.IsAttribute) { XAttribute attribute = childElement.Attribute(pathSegment.ActualSegment); if (attribute != null) { returnData.Add(attribute.Value); } else { throw new Exception(string.Format("Attribute {0} not found.", pathSegment.ActualSegment)); } } else { returnData.Add(childElement.Value); } } } else { foreach (XElement childElement in childElements) { returnData.AddRange(SelectEnumberable(pathSegments.Skip(i + 1).ToList(), pathSegment, childElement)); } } } } } return(returnData); } if (pathSegment != null && pathSegment.IsAttribute) { if (currentElement != null) { XAttribute attribute = currentElement.Attribute(pathSegment.ActualSegment); if (attribute != null) { currentElement = null; if (lastSegment) { returnData.Add(attribute.Value); } } } } else { if (currentElement != null) { if (pathSegment != null) { currentElement = currentElement.Element(pathSegment.ActualSegment); } } if (currentElement != null && lastSegment) { returnData.Add(currentElement.Value); } } } } else if (currentElement.Name == parentPathSegment.ActualSegment) { returnData.Add(currentElement.Value); } return(returnData); }
public static string Row(this IList <string> list, int rowIndex) { return(list.Skip(rowIndex).First()); }
public static void SetCommand(this ProcessStartInfo si, IList <string> args) { si.FileName = args[0]; si.Arguments = string.Join(" ", args.Skip(1).Select(a => "\"" + a + "\"").ToArray()); }
public static bool IsContiguous(IEnumerable <Hex> hexes, Position direction) { IList <Hex> sortedHexes = SortOnAxis(hexes, direction).ToList(); return(sortedHexes.Zip(sortedHexes.Skip(1), (hex1, hex2) => IsContiguous(hex1, hex2, direction)).All(b => b)); }
/// <summary> /// Send indexing actions to be processed by the service. /// </summary> /// <param name="batch">The batch of actions to submit.</param> /// <param name="cancellationToken">A cancellation token.</param> /// <returns>Whether the submission was throttled.</returns> protected override async Task <bool> OnSubmitBatchAsync(IList <PublisherAction <IndexDocumentsAction <T> > > batch, CancellationToken cancellationToken) { // Bail early if someone sent an empty batch if (batch.Count == 0) { return(false); } // Notify the action is being sent foreach (PublisherAction <IndexDocumentsAction <T> > action in batch) { await _sender.OnActionSentAsync(action.Document, cancellationToken).ConfigureAwait(false); } AzureSearchDocumentsEventSource.Instance.BatchSubmitted($"{nameof(SearchIndexingBufferedSender<T>)}<{typeof(T).Name}>", _sender.Endpoint.AbsoluteUri, batch.Count); // Send the request to the service Response <IndexDocumentsResult> response = null; try { response = await _sender.SearchClient.IndexDocumentsAsync( IndexDocumentsBatch.Create(batch.Select(a => a.Document).ToArray()), cancellationToken : cancellationToken) .ConfigureAwait(false); } // Handle batch level failures catch (RequestFailedException ex) when(ex.Status == 413) // Payload Too Large { AzureSearchDocumentsEventSource.Instance.BatchActionPayloadTooLarge($"{nameof(SearchIndexingBufferedSender<T>)}<{typeof(T).Name}>", _sender.Endpoint.AbsoluteUri, BatchActionCount); int oldBatchActionCount = BatchActionCount; // Split the batch and try with smaller payloads // Update 'BatchActionCount' so future submissions can avoid this error. BatchActionCount = (int)Math.Floor((double)batch.Count / 2.0); AzureSearchDocumentsEventSource.Instance.BatchActionCountUpdated($"{nameof(SearchIndexingBufferedSender<T>)}<{typeof(T).Name}>", _sender.Endpoint.AbsoluteUri, oldBatchActionCount, BatchActionCount); var smaller = new List <PublisherAction <IndexDocumentsAction <T> > >(batch.Take(BatchActionCount)); // Add the second half to the retry queue without counting this as a retry attempt EnqueueRetry(batch.Skip(BatchActionCount)); // Try resubmitting with just the smaller half await SubmitBatchAsync(smaller, cancellationToken).ConfigureAwait(false); return(false); } catch (Exception ex) { // Retry the whole batch using the same exception for everything foreach (PublisherAction <IndexDocumentsAction <T> > action in batch) { await EnqueueOrFailRetryAsync(action, null, ex, cancellationToken).ConfigureAwait(false); } // Search currently uses 503s for throttling return(ex is RequestFailedException failure && failure.Status == 503); } // Handle individual responses which might be success or failure bool throttled = false; foreach ((PublisherAction <IndexDocumentsAction <T> > action, IndexingResult result) in AssociateResults(batch, response.Value.Results)) { // Search currently uses 503s for throttling throttled |= (result.Status == 503); Debug.Assert(action.Key == result.Key); if (result.Succeeded) { await _sender.OnActionCompletedAsync( action.Document, result, cancellationToken) .ConfigureAwait(false); } else if (IsRetriable(result.Status)) { await EnqueueOrFailRetryAsync( action, result, exception : null, cancellationToken) .ConfigureAwait(false); } else { await _sender.OnActionFailedAsync( action.Document, result, exception : null, cancellationToken) .ConfigureAwait(false); } } return(throttled); }
public static async Task <T> Choose <T>(this Context ctx, string description, IList <T> items, Func <T, string> display = null) { // Generate a list of :regional_indicator_?: emoji surrogate pairs (starting at codepoint 0x1F1E6) // We just do 7 (ABCDEFG), this amount is arbitrary (although sending a lot of emojis takes a while) var pageSize = 7; var indicators = new string[pageSize]; for (var i = 0; i < pageSize; i++) { indicators[i] = char.ConvertFromUtf32(0x1F1E6 + i); } // Default to x.ToString() if (display == null) { display = x => x.ToString(); } string MakeOptionList(int page) { var makeOptionList = string.Join("\n", items .Skip(page * pageSize) .Take(pageSize) .Select((x, i) => $"{indicators[i]} {display(x)}")); return(makeOptionList); } // If we have more items than the page size, we paginate as appropriate if (items.Count > pageSize) { var currPage = 0; var pageCount = (items.Count - 1) / pageSize + 1; // Send the original message var msg = await ctx.Reply($"**[Page {currPage + 1}/{pageCount}]**\n{description}\n{MakeOptionList(currPage)}"); // Add back/forward reactions and the actual indicator emojis async Task AddEmojis() { await msg.AddReactionAsync(new Emoji("\u2B05")); await msg.AddReactionAsync(new Emoji("\u27A1")); for (int i = 0; i < items.Count; i++) { await msg.AddReactionAsync(new Emoji(indicators[i])); } } var _ = AddEmojis(); // Not concerned about awaiting while (true) { // Wait for a reaction var reaction = await ctx.AwaitReaction(msg, ctx.Author); // If it's a movement reaction, inc/dec the page index if (reaction.Emote.Name == "\u2B05") { currPage -= 1; // < } if (reaction.Emote.Name == "\u27A1") { currPage += 1; // > } if (currPage < 0) { currPage += pageCount; } if (currPage >= pageCount) { currPage -= pageCount; } // If it's an indicator emoji, return the relevant item if (indicators.Contains(reaction.Emote.Name)) { var idx = Array.IndexOf(indicators, reaction.Emote.Name) + pageSize * currPage; // only if it's in bounds, though // eg. 8 items, we're on page 2, and I hit D (3 + 1*7 = index 10 on an 8-long list) = boom if (idx < items.Count) { return(items[idx]); } } var __ = msg.RemoveReactionAsync(reaction.Emote, ctx.Author); // don't care about awaiting await msg.ModifyAsync(mp => mp.Content = $"**[Page {currPage + 1}/{pageCount}]**\n{description}\n{MakeOptionList(currPage)}"); } } else { var msg = await ctx.Reply($"{description}\n{MakeOptionList(0)}"); // Add the relevant reactions (we don't care too much about awaiting) async Task AddEmojis() { for (int i = 0; i < items.Count; i++) { await msg.AddReactionAsync(new Emoji(indicators[i])); } } var _ = AddEmojis(); // Then wait for a reaction and return whichever one we found var reaction = await ctx.AwaitReaction(msg, ctx.Author, rx => indicators.Contains(rx.Emote.Name)); return(items[Array.IndexOf(indicators, reaction.Emote.Name)]); } }
public IList <Keyword> GetList(int skipCount, int takeCount) { return(keywords.Skip(skipCount).Take(takeCount).ToList()); }
public static string ScriptMergePartitions(IList <Partition> obj) { return(TOM.JsonScripter.ScriptMergePartitions(obj.First().MetadataObject, obj.Skip(1).Select(p => p.MetadataObject))); }
void ExecuteWorker_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) { mnuExecute.Text = "Execute"; mnuExecute.ForeColor = SystemColors.ControlText; mnuRun.Enabled = true; mnuSchema.Enabled = true; this.Cursor = Cursors.Default; textEditorControl1.Cursor = Cursors.Default; timer.Stop(); if (e.Cancelled) { lblCurrentStatus.Text = string.Format("Query cancelled"); } else if (e.Error != null) { this.Cursor = Cursors.Default; Program.logger.ErrorException("Execute", e.Error); lblCurrentStatus.Text = "Error: " + e.Error.Message; MessageBox.Show("An error occurred executing sql statement data:\n" + e.Error.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } else if (e.Result != null && e.Result is IList <IList <object> > ) { IList <IList <object> > result = (IList <IList <object> >)e.Result; grid1.ClipboardMode = SourceGrid.ClipboardMode.All; grid1.Rows.Clear(); grid1.FixedRows = 1; grid1.FixedColumns = 1; SourceGrid.Cells.Editors.EditorBase editor = new SourceGrid.Cells.Editors.TextBox(typeof(string)); editor.EnableEdit = false; // Set the column count int colCount = result[0].Count; grid1.ColumnsCount = colCount + 1; // Extra one for header // Write the header grid1.Rows.Insert(0); grid1[0, 0] = new SourceGrid.Cells.ColumnHeader(""); for (int c = 0; c < colCount; c++) { grid1[0, c + 1] = new SourceGrid.Cells.ColumnHeader(result[0][c]); } int i = 0; // Write the values foreach (List <object> row in result.Skip(1)) { i++; grid1.Rows.Insert(i); grid1[i, 0] = new SourceGrid.Cells.RowHeader(i.ToString()); for (int c = 0; c < colCount; c++) { if (row[c] == null) { grid1[i, c + 1] = new SourceGrid.Cells.Cell("", typeof(string)); } else if (row[c] == typeof(DateTime)) { grid1[i, c + 1] = new SourceGrid.Cells.Cell(((DateTime)row[c]).ToString("yyyy-MM-dd HH:mm:ss"), typeof(string)); } else if (row[c] == typeof(string)) { grid1[i, c + 1] = new SourceGrid.Cells.Cell(row[c].ToString().Trim(), typeof(string)); } else { grid1[i, c + 1] = new SourceGrid.Cells.Cell(row[c], typeof(double)); } grid1[i, c + 1].Editor = editor; } } grid1.AutoSizeCells(); grid1.Visible = true; lblCurrentStatus.Text = string.Format("Query completed in {0}s", DateTime.Now.Subtract(start).TotalSeconds); } }
private static void OnCollectionChanged <TSrc, TDest>(NotifyCollectionChangedEventArgs e, IList <TSrc> source, IList <TDest> destination, Func <TSrc, TDest> registerSync, Func <TSrc, TDest> deregisterSync, Func <TSrc, TDest> getDestItem, Func <TDest, TSrc> getSrcItem, Action clearSync) { List <TSrc> newListSource; List <TDest> newListDestination; Action <Action> updateDestination = action => { var transactive = destination as TransactiveObservableCollection <TDest>; if (transactive != null) { using (transactive.Transaction()) action(); } else { action(); } }; switch (e.Action) { case NotifyCollectionChangedAction.Add: newListSource = e.NewItems.Cast <TSrc>().ToList(); newListDestination = new List <TDest>(); foreach (var item in newListSource) { newListDestination.Add(registerSync(item)); } if (newListDestination.Count > 0) { if (e.NewStartingIndex == -1) { if (newListDestination.Count == 1) { destination.Add(newListDestination[0]); } else { updateDestination(() => { foreach (var item in newListDestination) { destination.Add(item); } }); } } else { var index = e.NewStartingIndex; if (newListDestination.Count == 1) { destination.Insert(index, newListDestination[0]); } else { updateDestination(() => { foreach (var item in newListDestination) { destination.Insert(index, item); index++; } }); } } } break; case NotifyCollectionChangedAction.Remove: newListSource = e.OldItems.Cast <TSrc>().ToList(); newListDestination = new List <TDest>(); foreach (var item in newListSource) { newListDestination.Add(deregisterSync(item)); } if (newListDestination.Count > 0) { if (newListDestination.Count == 1) { destination.Remove(newListDestination[0]); } else { updateDestination(() => { foreach (var item in newListDestination) { destination.Remove(item); } }); } } break; case NotifyCollectionChangedAction.Replace: if (e.NewItems.Count != 1) { throw new NotImplementedException(@"NotifyCollectionChangedAction.Replace && e.NewItems.Count != 1"); } var oldSrc = e.OldItems.Cast <TSrc>().Single(); var newSrc = e.NewItems.Cast <TSrc>().Single(); var oldDest = deregisterSync(oldSrc); var newDest = registerSync(newSrc); var indexSrc = source.IndexOf(newSrc); var indexDest = destination.IndexOf(oldDest); if (indexSrc != indexDest) { throw new InvalidOperationException(); } destination[indexDest] = newDest; break; case NotifyCollectionChangedAction.Reset: var sourceComparer = EqualityComparer <TSrc> .Default; // ложное срабатывание // здесь нельзя вызывать getDestItem, потому что в source уже новые элементы,а в _mapping еще старые. // в итогде mapping.Find выдает null, а за ним - NullReferenceException // getDestItem - можно вызывать для гарантированно неизменных элементов // getSrcItem - напротив, вызывать можно на всех, ибо destination пока содержит старые элементы // if (source.Count == destination.Count && !destination.SkipWhile((d, i) => d.Equals(getDestItem(source[i]))).Any()) { if (source.Count == destination.Count && source.SequenceEqual(destination.Select(getSrcItem), sourceComparer)) { // nop } else if (source.Count == 0) { destination.Clear(); clearSync(); // addRange } else if (source.Count > destination.Count && !destination.SkipWhile((d, i) => sourceComparer.Equals(source[i], getSrcItem(d))).Any()) { var items = source.Skip(destination.Count).ToList(); OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Add, items), source, destination, registerSync, deregisterSync, getDestItem, getSrcItem, clearSync); // removeRange } else if (source.Count < destination.Count && !source.Except(destination.Select(getSrcItem), sourceComparer).Any()) { var items = destination.Except(source.Select(getDestItem)).Select(getSrcItem).ToList(); OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Remove, items), source, destination, registerSync, deregisterSync, getDestItem, getSrcItem, clearSync); } else { // accurate //Synchronize(source, destination); // rough clearSync(); updateDestination(() => { destination.Clear(); OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Add, (IList)source), source, destination, registerSync, deregisterSync, getDestItem, getSrcItem, clearSync); }); } break; default: throw new NotImplementedException(e.Action.ToString()); } }
public override BuiltRichText BuildText(IList <RichTextNode> nodes, int width, float sizeMultiplier = 1) { if (!customLoader.Valid) { CreateCustomCollection(); } var paragraphs = new List <List <RichTextNode> >(); paragraphs.Add(new List <RichTextNode>()); int first = 0; while (nodes[first] is RichTextParagraphNode && first < nodes.Count) { first++; } DWrite.TextAlignment ta = CastAlignment((nodes[first] as RichTextTextNode).Alignment); paragraphs[paragraphs.Count - 1].Add(nodes[first]); foreach (var node in nodes.Skip(first + 1)) { if (node is RichTextParagraphNode) { paragraphs.Add(new List <RichTextNode>()); } else { var n = (RichTextTextNode)node; var align = CastAlignment(n.Alignment); if (align != ta && paragraphs[paragraphs.Count - 1].Count > 0) { paragraphs.Add(new List <RichTextNode>()); } paragraphs[paragraphs.Count - 1].Add(node); ta = align; } } string lastFont = null; float lastSize = 0; //Format text var layouts = new List <TextLayout>(); for (int j = 0; j < paragraphs.Count; j++) { var p = paragraphs[j]; var builder = new StringBuilder(); foreach (var n in p) { builder.Append(((RichTextTextNode)n).Contents); } var layout = new TextLayout( dwFactory, builder.ToString(), new TextFormat( dwFactory, string.IsNullOrEmpty(lastFont) ? "Arial" : lastFont, lastSize > 0 ? lastSize : (16 * sizeMultiplier) ), width, float.MaxValue ); if (p.Count > 0) { layout.TextAlignment = CastAlignment(((RichTextTextNode)p[0]).Alignment); } int startIdx = 0; foreach (var n in p) { var text = (RichTextTextNode)n; var range = new TextRange(startIdx, text.Contents.Length); if (text.Bold) { layout.SetFontWeight(FontWeight.Bold, range); } if (text.Italic) { layout.SetFontStyle(FontStyle.Italic, range); } if (text.Underline) { layout.SetUnderline(true, range); } if (!string.IsNullOrEmpty(text.FontName)) { if (customCollection.FindFamilyName(text.FontName, out int _)) { layout.SetFontCollection(customCollection, range); } layout.SetFontFamilyName(text.FontName, range); lastFont = text.FontName; } if (text.FontSize > 0) { layout.SetFontSize(text.FontSize * sizeMultiplier, range); lastSize = text.FontSize * sizeMultiplier; } layout.SetDrawingEffect(new ColorDrawingEffect(text.Color, text.Shadow), range); startIdx += text.Contents.Length; } layouts.Add(layout); } //Return var built = new DirectWriteBuiltText(this) { Layout = layouts, Width = width }; built.CacheQuads(); return(built); }
private IList <TestCaseData> BuildTestCasesForThens(IList <Context> parentContexts) { var testCases = new List <TestCaseData>(); if (!thens.Any()) { return(testCases); } var setupText = new StringBuilder(); setupText.AppendLine(parentContexts.First().Description + ":"); // start with the spec method's name var first = true; foreach (var context in parentContexts.Skip(1).Where(c => c.IsNamedContext)) { setupText.AppendLine(context.Conjunction(first) + context.Description); first = false; } setupText.AppendLine("when " + when.Key); const string thenText = "then "; foreach (var spec in thens) { var parentContextsCapture = new List <Context>(parentContexts); var whenCapture = new KeyValuePair <string, Func <Task> >(when.Key, when.Value); Func <Task> executeTest = async() => { BeforeIfNotAlreadyRun(); try { var exceptionThrownAndAsserted = false; await InitializeContext(parentContextsCapture).ConfigureAwait(false); try { thrownException = null; await whenCapture.Value().ConfigureAwait(false); } catch (Exception ex) { thrownException = ex; } try { exceptionAsserted = false; await spec.Value().ConfigureAwait(false); } catch (Exception) { if (thrownException == null || exceptionAsserted) { throw; } } if (thrownException != null) { throw thrownException; } exceptionThrownAndAsserted = true; if (!exceptionThrownAndAsserted) { await spec.Value().ConfigureAwait(false); } } finally { After(); } }; var description = setupText + thenText + spec.Key + Environment.NewLine; testCases.Add(new TestCaseData(executeTest).SetName(description)); } return(testCases); }
public IList <T> GetPaged(IList <T> lists, int pageIndex, int pageSize) { return(lists.Skip((pageIndex - 1) * pageSize).Take(pageSize).ToList()); }
/// <summary> /// get partial list. /// </summary> /// <typeparam name="T">list object type.</typeparam> /// <param name="list">list object.</param> /// <param name="page">page number.</param> /// <param name="pageSize">page size.</param> /// <returns>paginated list</returns> public IList <T> GetPage <T>(IList <T> list, int page, int pageSize) { return(list.Skip(page * pageSize).Take(pageSize).ToList()); }
static void Main(string[] args) { AppDomain.CurrentDomain.UnhandledException += CurrentDomain_UnhandledException; var commands = new Dictionary <string, Func <IList <string>, bool> > { { "target", (arg) => Target(arg) }, { "login", (arg) => Login(arg) }, { "info", (arg) => Info(arg) }, { "apps", (arg) => Apps(arg) }, { "push", (arg) => Push(arg) }, { "update", (arg) => Update(arg) }, { "services", (arg) => Services(arg) }, { "bind-service", (arg) => BindService(arg) }, { "unbind-service", (arg) => UnbindService(arg) }, { "create-service", (arg) => CreateService(arg) }, { "delete-service", (arg) => DeleteService(arg) }, { "delete", (arg) => Delete(arg) }, { "help", (arg) => Usage() }, { "passwd", (arg) => Passwd(arg) }, { "add-user", (arg) => AddUser(arg) }, { "delete-user", (arg) => DeleteUser(arg) }, { "files", (arg) => Files(arg) }, #if DEBUG { "testfiles", (arg) => TestFiles(arg) }, { "teststats", (arg) => TestStats(arg) }, #endif }; var p = new OptionSet { { "v|verbose", "increase verbosity", v => { if (false == String.IsNullOrEmpty(v)) { ++verbosity; } } }, { "h|help", "show help", v => { show_help = null != v; } }, { "json", "show result as json", v => { result_as_json = null != v; } }, { "rawjson", "show result as raw json", v => { result_as_rawjson = null != v; } }, { "url=", "set command url", v => { command_url = v; } }, { "email=", "set command email", v => { command_email = v; } }, { "passwd=", "set command password", v => { command_password = v; } }, { "noprompts", "set prompting", v => { prompt_ok = v.IsNullOrWhiteSpace(); } }, { "instances=", "set instances", v => { instances = Convert.ToUInt16(v); } }, { "mem=", "set memory in MB", v => { memoryMB = Convert.ToUInt16(v); } }, }; IList <string> unparsed = null; try { unparsed = p.Parse(args); } catch (OptionException) { Usage(); } bool success = true; if (show_help) { Usage(p); } else { if (false == unparsed.IsNullOrEmpty()) { string verb = unparsed[0]; Func <IList <string>, bool> action; if (commands.TryGetValue(verb, out action)) { try { success = action(unparsed.Skip(1).ToList()); } catch (Exception e) { success = false; Console.Error.WriteLine(e.Message); } } else { Usage(p); } } else { Usage(p); } } Environment.Exit(success ? 0 : 1); }
public void BulkInsert(IList <Event> events) { // lock (this.thisLock) { using (var dbConnection = new DbConnection.DbConnection()) { var updatedOn = DateTime.UtcNow.ToString("yy-MM-dd HH:mm:ss"); for (int i = 0; i <= events.Count / INSERT_COUNT; i++) { var subEvents = events.Skip(i * INSERT_COUNT).Take(INSERT_COUNT).ToList(); if (subEvents.Any()) { var query = new StringBuilder( "INSERT DELAYED INTO dvs_event (EventId,EventName,StartDate,SportId,SportName,LeagueId, " + "LeagueName,LocationId,LocationName,Status,LastUpdate,HomeTeamId,HomeTeamName,AwayTeamId,AwayTeamName," + "CreatedOn,CreatedBy,IsActive,UpdatedOn,UpdatedBy,MarketId,MarketName,ProviderId,ProviderName,ProductId) VALUES "); var counter = 0; foreach (var @event in subEvents) { //@event.XmlText = "blabla"; query.Append("("); query.Append(@event.EventId); query.Append(",'" + @event.BuildEventName() + "'"); query.Append(",'" + @event.StartDate + "'"); query.Append(",'" + (@event.SportId ?? 0) + "'"); query.Append(",'" + @event.SportName + "'"); query.Append(",'" + (@event.LeagueId ?? 0) + "'"); query.Append(",'" + @event.LeagueName + "'"); query.Append(",'" + (@event.LocationId ?? 0) + "'"); query.Append(",'" + @event.LocationName + "'"); query.Append(",'" + @event.Status + "'"); query.Append(",'" + @event.LastUpdate.ToString("yyyy-MM-ddTHH:mm:ss.fff") + "'"); query.Append(",'" + (@event.HomeTeamId ?? 0) + "'"); query.Append(",'" + @event.HomeTeamName + "'"); query.Append(",'" + (@event.AwayTeamId ?? 0) + "'"); query.Append(",'" + @event.AwayTeamName + "'"); query.Append(",'" + @event.CreatedOn.ToString("yyyy-MM-ddTHH:mm:ss.fff") + "'"); query.Append(",'" + @event.CreatedBy + "'"); query.Append(", b'1'"); query.Append(",'" + updatedOn + "'"); query.Append(",'" + @event.UpdatedBy + "'"); query.Append(",'" + (@event.MarketId ?? 0) + "'"); query.Append(",'" + @event.MarketName + "'"); query.Append(",'" + (@event.ProviderId ?? 0) + "'"); query.Append(",'" + @event.ProviderName + "'"); query.Append(",'" + @event.ProductId + "'"); query.Append(")"); if (counter != subEvents.Count - 1) { query.Append(","); } counter++; } query.Append(" ON DUPLICATE KEY UPDATE"); query.Append(" UpdatedOn = '" + updatedOn + "'"); query.Append(";"); //throw new Exception(query.ToString()); dbConnection.BulkExecute(query.ToString()); }// else pass } } } }
public RouteValueDictionary Match(string virtualPath, RouteValueDictionary defaultValues) { IList <string> source = RouteParser.SplitUrlToPathSegmentStrings(virtualPath); if (defaultValues == null) { defaultValues = new RouteValueDictionary(); } RouteValueDictionary matchedValues = new RouteValueDictionary(); bool flag = false; bool flag2 = false; for (int i = 0; i < this.PathSegments.Count; i++) { PathSegment segment = this.PathSegments[i]; if (source.Count <= i) { flag = true; } string a = flag ? null : source[i]; if (segment is SeparatorPathSegment) { if (!flag && !string.Equals(a, "/", StringComparison.Ordinal)) { return(null); } } else { ContentPathSegment contentPathSegment = segment as ContentPathSegment; if (contentPathSegment != null) { if (contentPathSegment.IsCatchAll) { this.MatchCatchAll(contentPathSegment, source.Skip <string>(i), defaultValues, matchedValues); flag2 = true; } else if (!this.MatchContentPathSegment(contentPathSegment, a, defaultValues, matchedValues)) { return(null); } } } } if (!flag2 && (this.PathSegments.Count < source.Count)) { for (int j = this.PathSegments.Count; j < source.Count; j++) { if (!RouteParser.IsSeparator(source[j])) { return(null); } } } if (defaultValues != null) { foreach (KeyValuePair <string, object> pair in defaultValues) { if (!matchedValues.ContainsKey(pair.Key)) { matchedValues.Add(pair.Key, pair.Value); } } } return(matchedValues); }
private void CalculateHelper(IList <Price> prices, out IList <IndicatorValue> smaValues, out IList <IndicatorValue> diplus, out IList <IndicatorValue> diminus, out IList <IndicatorValue> differences, out IList <IndicatorValue> differences2) { var plusDms = new List <IndicatorValue>(); var minusDms = new List <IndicatorValue>(); for (var i = 1; i < prices.Count; ++i) { decimal plusDm, minusDm; var upMove = prices[i].HighPrice - prices[i - 1].HighPrice; var downMove = prices[i - 1].LowPrice - prices[i].LowPrice; if (upMove > downMove && upMove > 0) { plusDm = upMove; } else { plusDm = 0; } if (downMove > upMove && downMove > 0) { minusDm = downMove; } else { minusDm = 0; } plusDms.Add(new IndicatorValue { Date = prices[i].Date, Value = plusDm }); minusDms.Add(new IndicatorValue { Date = prices[i].Date, Value = minusDm }); } var plusDisMovingAverage = MovingAverageHelper.SmoothedSum(plusDms, Term); var minusDisMovingAverage = MovingAverageHelper.SmoothedSum(minusDms, Term); var atr = MovingAverageHelper.SmoothedSum(GetRsValues(prices.Skip(1).ToList()), Term); var plusDis = new List <IndicatorValue>(); var minusDis = new List <IndicatorValue>(); var diDifferences = new List <IndicatorValue>(); var diDifferences2 = new List <IndicatorValue>(); var dxs = new List <IndicatorValue>(); for (var i = 0; i < atr.Count; ++i) { if (atr[i].Value > 0) { plusDis.Add(new IndicatorValue { Date = atr[i].Date, Value = 100 * plusDisMovingAverage[i].Value / atr[i].Value }); minusDis.Add(new IndicatorValue { Date = atr[i].Date, Value = 100 * minusDisMovingAverage[i].Value / atr[i].Value }); diDifferences.Add(new IndicatorValue { Date = plusDis[i].Date, Value = Math.Abs(plusDis[i].Value - minusDis[i].Value) }); diDifferences2.Add(new IndicatorValue { Date = plusDis[i].Date, Value = plusDis[i].Value + minusDis[i].Value }); dxs.Add(new IndicatorValue { Date = atr[i].Date, Value = 100 * diDifferences[i].Value / diDifferences2[i].Value }); } else { plusDis.Add(new IndicatorValue() { Date = atr[i].Date, Value = decimal.MaxValue }); minusDis.Add(new IndicatorValue() { Date = atr[i].Date, Value = decimal.MaxValue }); diDifferences.Add(new IndicatorValue() { Date = plusDis[i].Date, Value = 0 }); diDifferences2.Add(new IndicatorValue() { Date = minusDis[i].Date, Value = decimal.MaxValue }); dxs.Add(new IndicatorValue() { Date = atr[i].Date, Value = 0 }); } } var sma = MovingAverageHelper.SmoothedMovingAverage2(dxs, Term); diDifferences2 = diDifferences2.Skip(Term - 1).ToList(); smaValues = sma; diplus = plusDis; diminus = minusDis; differences = diDifferences; differences2 = diDifferences2; }
public static void Main() { const int numInHiddenLayer = 500; const int numOfOutputs = 10; const double normalisation = 255.0d; var inputFileReader = new InputFileReader(); IList <Tuple <int, IEnumerable <double> > > csvInputs = inputFileReader.ReadTrainingInputFile(@"C:\Users\Pavlos\Desktop\training.csv", normalisation); int validationFraction = csvInputs.Count / 10; // use all but ten percent for training, hold the rest back for validation var trainingInputs = csvInputs.Skip(validationFraction).ToList(); var validationInputs = csvInputs.Take(validationFraction).ToList(); // create inputs and the three layers List <SensoryInput> sensoryInputs = trainingInputs[0].Item2.Select(i => new SensoryInput()).ToList(); List <INeuron> inputLayer = CreateLayer(sensoryInputs.Count, sensoryInputs.Cast <IInput>().ToList()); List <INeuron> hiddenLayer = CreateLayer(numInHiddenLayer, inputLayer.Cast <IInput>().ToList()); List <INeuron> outputLayer = CreateLayer(numOfOutputs, hiddenLayer.Cast <IInput>().ToList()); double previousGlobalError = double.MaxValue; double globalErrorDelta; // training: do { foreach (var specimen in trainingInputs) { UpdateNetwork(specimen.Item2.ToList(), sensoryInputs, inputLayer, hiddenLayer, outputLayer); // train output layer for (int k = 0; k < outputLayer.Count; k++) { double desired = k == specimen.Item1 ? 1.0d : 0.0d; double output = outputLayer[k].GetValue(); double error = desired - output; outputLayer[k].Train(error); } // train hidden layer, then train input layer BackPropagate(hiddenLayer, outputLayer); BackPropagate(inputLayer, hiddenLayer); } // calculate global error using the validation set that was excluded from training: double globalError = 0.0d; foreach (var specimen in validationInputs) { UpdateNetwork(specimen.Item2.ToList(), sensoryInputs, inputLayer, hiddenLayer, outputLayer); for (int i = 0; i < outputLayer.Count; i++) { double desired = i == specimen.Item1 ? 1.0d : 0.0d; globalError += Math.Abs(desired - outputLayer[i].GetValue()); } } globalErrorDelta = Math.Abs(previousGlobalError - globalError); previousGlobalError = globalError; Console.WriteLine("Global error: {0}", globalError); } while (globalErrorDelta > 1000.0d); // train until global error begins to level off // Run on real testing data and write output to console: var testingInputs = inputFileReader.ReadTestingInputFile(@"C:\Users\Pavlos\Desktop\testing.csv", normalisation); foreach (var specimen in testingInputs) { UpdateNetwork(specimen.ToList(), sensoryInputs, inputLayer, hiddenLayer, outputLayer); int mostLikelyAnswer = GetMostLikelyAnswer(outputLayer); Console.WriteLine(mostLikelyAnswer); } }
/// <summary> /// Returns a page from Packages. /// </summary> /// <param name="pageIndex"></param> /// <param name="pageSize"></param> /// <returns></returns> public IEnumerable <Package> Get(int pageIndex, int pageSize) { return(Packages.Skip(pageIndex * pageSize).Take(pageSize)); }
public static IList <T> Drop <T>(int n, IList <T> coll) => coll.Skip(n).ToList();
protected virtual void OnViewModelCollectionChanged(NotifyCollectionChangedEventArgs e) { INotifyCollectionChanged?notifyCollection = modelCollection as INotifyCollectionChanged; if (notifyCollection != null) { notifyCollection.CollectionChanged -= OnModelCollectionChanged; } switch (e.Action) { case NotifyCollectionChangedAction.Add: for (int i = 0; i < e.NewItems.Count; i++) { modelCollection.Insert(e.NewStartingIndex + i, CreateModel((TViewModel)e.NewItems[i])); } break; case NotifyCollectionChangedAction.Move: if (e.OldItems.Count == 1 && modelCollection is ObservableCollection <TModel> observableCollection) { observableCollection.Move(e.OldStartingIndex, e.NewStartingIndex); } else { List <TModel> items = modelCollection.Skip(e.OldStartingIndex).Take(e.OldItems.Count).ToList(); for (int i = 0; i < e.OldItems.Count; i++) { modelCollection.RemoveAt(e.OldStartingIndex); } for (int i = 0; i < items.Count; i++) { modelCollection.Insert(e.NewStartingIndex + i, items[i]); } } break; case NotifyCollectionChangedAction.Remove: for (int i = 0; i < e.OldItems.Count; i++) { modelCollection.RemoveAt(e.OldStartingIndex); } break; case NotifyCollectionChangedAction.Replace: for (int i = 0; i < e.OldItems.Count; i++) { modelCollection.RemoveAt(e.OldStartingIndex); } goto case NotifyCollectionChangedAction.Add; case NotifyCollectionChangedAction.Reset: modelCollection.Clear(); break; } if (notifyCollection != null) { notifyCollection.CollectionChanged += OnModelCollectionChanged; } }
// Get list public static IList <T> GetAsListOf <T>(this IList <string> input, int rowStartIndex, int rowCount) { return(input.Skip(rowStartIndex).Take(rowCount).Select(s => s.As <T>()).ToList()); }
public DataProcessing(IList <IList <string> > train, IList <IList <string> > test, string className = null, string IdName = null) { if (train == null) { throw new ArgumentNullException(nameof(train)); } Features = train.First().Select((name, index) => new FeatureModel(index, name)).ToList(); if (!string.IsNullOrEmpty(className)) { Features.Single(f => f.Name == className).IsClass = true; } if (!string.IsNullOrEmpty(IdName)) { Features.Single(f => f.Name == IdName).IsId = true; } TrainRowAmount = train.Count() - 1; TestRowAmount = test.Count() - 1; var IdFeature = Features.Single(f => f.IsId); TrainRows = train.Skip(1) .Select( (r, index) => { var id = r.ElementAt(IdFeature.Index); return(new RowModel(index, id)); } ) .ToList(); TestRows = test.Skip(1) .Select( (r, index) => { var id = r.ElementAt(IdFeature.Index); return(new RowModel(index, id)); } ) .ToList(); foreach (var feature in Features) { foreach (var row in TrainRows) { var value = new ValueModel(row.Id, feature, train.ElementAt(row.Index + 1).ElementAt(feature.Index), false); feature.Values.Add(value); row.Values.Add(value); } if (!feature.IsClass) { foreach (var row in TestRows) { var value = new ValueModel(row.Id, feature, test.ElementAt(row.Index + 1).ElementAt(feature.Index), true); feature.Values.Add(value); row.Values.Add(value); } } } //foreach (var feature in Features) //{ // feature.Values = train.Skip(1).Select(t => new ValueModel(t.ElementAt(feature.Index), isTest: false)).ToList(); // if (!feature.IsClass) // { // var testValues = test.Skip(1).Select(t => new ValueModel(t.ElementAt(feature.Index), isTest: true)).ToList(); // feature.Values = feature.Values.Concat(testValues).ToList(); // } //} }
/// <summary> /// /// </summary> /// <param name="source">source</param> /// <param name="pageIndex">page index</param> /// <param name="pageSize">page size</param> public PageList(IList <T> source, int pageIndex, int pageSize) { this.PageSetting(pageIndex, pageSize, source.Count()); this.AddRange(source.Skip((pageIndex - 1) * pageSize).Take(pageSize).ToList()); }
/// <summary> /// Send indexing actions to be processed by the service. /// </summary> /// <param name="batch">The batch of actions to submit.</param> /// <param name="cancellationToken">A cancellation token.</param> /// <returns>Whether the submission was throttled.</returns> protected override async Task <bool> OnSubmitBatchAsync(IList <PublisherAction <IndexDocumentsAction <T> > > batch, CancellationToken cancellationToken) { // Bail early if someone sent an empty batch if (batch.Count == 0) { return(false); } // Notify the action is being sent foreach (PublisherAction <IndexDocumentsAction <T> > action in batch) { await _sender.RaiseActionSentAsync(action.Document, cancellationToken).ConfigureAwait(false); } // Send the request to the service Response <IndexDocumentsResult> response = null; try { response = await _sender.SearchClient.IndexDocumentsAsync( IndexDocumentsBatch.Create(batch.Select(a => a.Document).ToArray()), cancellationToken : cancellationToken) .ConfigureAwait(false); } // Handle batch level failures catch (RequestFailedException ex) when(ex.Status == 413) // Payload Too Large { // Split the batch and try with smaller payloads int half = (int)Math.Floor((double)batch.Count / 2.0); var smaller = new List <PublisherAction <IndexDocumentsAction <T> > >(batch.Take(half)); foreach (PublisherAction <IndexDocumentsAction <T> > action in batch.Skip(half)) { // Add the second half to the retry queue without // counting this as a retry attempt _ = EnqueueRetry(action, skipIncrement: true); } // Try resubmitting with just the smaller half await SubmitBatchAsync(smaller, cancellationToken).ConfigureAwait(false); return(false); } catch (Exception ex) { // Retry the whole batch using the same exception for everything foreach (PublisherAction <IndexDocumentsAction <T> > action in batch) { await EnqueueOrFailRetryAsync(action, null, ex, cancellationToken).ConfigureAwait(false); } // Search currently uses 503s for throttling return(ex is RequestFailedException failure && failure.Status == 503); } // Handle individual responses which might be success or failure bool throttled = false; foreach ((PublisherAction <IndexDocumentsAction <T> > action, IndexingResult result) in AssociateResults(batch, response.Value.Results)) { // Search currently uses 503s for throttling throttled |= (result.Status == 503); Debug.Assert(action.Key == result.Key); if (result.Succeeded) { await _sender.RaiseActionCompletedAsync( action.Document, result, cancellationToken) .ConfigureAwait(false); } else if (IsRetriable(result.Status)) { await EnqueueOrFailRetryAsync( action, result, exception : null, cancellationToken) .ConfigureAwait(false); } else { await _sender.RaiseActionFailedAsync( action.Document, result, exception : null, cancellationToken) .ConfigureAwait(false); } } return(throttled); }
public static IList <T> TakeLast <T> (this IList <T> source, int count) => source.Skip(Math.Max(0, source.Count - count)).ToList( );
public Expression Mutate(IList <Expression> operands) { switch (BinaryExpression.NodeType) { case ExpressionType.Add: if (BinaryExpression.Method != null) { return(Expression.Add(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.Add(operands[0], operands[1])); case ExpressionType.AddChecked: if (BinaryExpression.Method != null) { return(Expression.AddChecked(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.AddChecked(operands[0], operands[1])); case ExpressionType.Divide: if (BinaryExpression.Method != null) { return(Expression.Divide(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.Divide(operands[0], operands[1])); case ExpressionType.Modulo: if (BinaryExpression.Method != null) { return(Expression.Modulo(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.Modulo(operands[0], operands[1])); case ExpressionType.Multiply: if (BinaryExpression.Method != null) { return(Expression.Multiply(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.Multiply(operands[0], operands[1])); case ExpressionType.MultiplyChecked: if (BinaryExpression.Method != null) { return(Expression.MultiplyChecked(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.MultiplyChecked(operands[0], operands[1])); case ExpressionType.Power: if (BinaryExpression.Method != null) { return(Expression.Power(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.Power(operands[0], operands[1])); case ExpressionType.Subtract: if (BinaryExpression.Method != null) { return(Expression.Subtract(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.Subtract(operands[0], operands[1])); case ExpressionType.SubtractChecked: if (BinaryExpression.Method != null) { return(Expression.SubtractChecked(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.SubtractChecked(operands[0], operands[1])); case ExpressionType.And: if (BinaryExpression.Method != null) { return(Expression.And(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.And(operands[0], operands[1])); case ExpressionType.Or: if (BinaryExpression.Method != null) { return(Expression.Or(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.Or(operands[0], operands[1])); case ExpressionType.ExclusiveOr: if (BinaryExpression.Method != null) { return(Expression.ExclusiveOr(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.ExclusiveOr(operands[0], operands[1])); case ExpressionType.LeftShift: if (BinaryExpression.Method != null) { return(Expression.LeftShift(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.LeftShift(operands[0], operands[1])); case ExpressionType.RightShift: if (BinaryExpression.Method != null) { return(Expression.RightShift(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.RightShift(operands[0], operands[1])); case ExpressionType.AndAlso: if (BinaryExpression.Method != null) { return(Expression.AndAlso(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.AndAlso(operands[0], operands[1])); case ExpressionType.OrElse: if (BinaryExpression.Method != null) { return(Expression.OrElse(operands[0], operands[1], BinaryExpression.Method)); } return(Expression.OrElse(operands[0], operands[1])); case ExpressionType.Equal: if (BinaryExpression.Method != null) { return(Expression.Equal(operands[0], operands[1], BinaryExpression.IsLiftedToNull, BinaryExpression.Method)); } return(Expression.Equal(operands[0], operands[1])); case ExpressionType.NotEqual: if (BinaryExpression.Method != null) { return(Expression.NotEqual(operands[0], operands[1], BinaryExpression.IsLiftedToNull, BinaryExpression.Method)); } return(Expression.NotEqual(operands[0], operands[1])); case ExpressionType.GreaterThanOrEqual: if (BinaryExpression.Method != null) { return(Expression.GreaterThanOrEqual(operands[0], operands[1], BinaryExpression.IsLiftedToNull, BinaryExpression.Method)); } return(Expression.GreaterThanOrEqual(operands[0], operands[1])); case ExpressionType.GreaterThan: if (BinaryExpression.Method != null) { return(Expression.GreaterThan(operands[0], operands[1], BinaryExpression.IsLiftedToNull, BinaryExpression.Method)); } return(Expression.GreaterThan(operands[0], operands[1])); case ExpressionType.LessThan: if (BinaryExpression.Method != null) { return(Expression.LessThan(operands[0], operands[1], BinaryExpression.IsLiftedToNull, BinaryExpression.Method)); } return(Expression.LessThan(operands[0], operands[1])); case ExpressionType.LessThanOrEqual: if (BinaryExpression.Method != null) { return(Expression.LessThanOrEqual(operands[0], operands[1], BinaryExpression.IsLiftedToNull, BinaryExpression.Method)); } return(Expression.LessThanOrEqual(operands[0], operands[1])); case ExpressionType.Coalesce: if (BinaryExpression.Conversion != null) { return(Expression.Coalesce(operands[0], operands[1], BinaryExpression.Conversion)); } return(Expression.Coalesce(operands[0], operands[1])); case ExpressionType.ArrayIndex: return(Expression.ArrayIndex(operands[0], operands.Skip(1))); } throw new Exception(); }