public override ClassificationSpanListSegmentModel CreateSegment(ListSegment <ClassificationSpan> segmentSpans) { return(new ClassificationSpanListSegmentModel() { LocalSymbolGroupIds = IntegerListModel.Create(segmentSpans, span => span.LocalGroupId) }); }
public void TestSlice() { var a = new[] { 1.0, 2.71828, 3.14, 4, 4.99999, 42, 1024 }; var slicea = new ListSegment <double>(a, 2, 3); var sliceaSame = new ListSegment <double>(a, 2, 3); var b = new[] { 1.0, 2, 3, 4, 5, 99, 1024 }; var sliceb = new ListSegment <double>(b, 2, 3); Assert.Equal(a, slicea.List); Assert.Equal(2, slicea.Offset); Assert.Equal(3, slicea.Count); Assert.True(slicea.Equals(sliceaSame)); Assert.True(slicea.Equals((object)sliceaSame)); Assert.Equal(sliceaSame.GetHashCode(), sliceaSame.GetHashCode()); Assert.True(slicea == sliceaSame); Assert.True(slicea != sliceb); Assert.True(slicea[2].NearEquals(4.99999)); Assert.Equal(1, slicea.IndexOf(4)); Assert.Equal(-1, slicea.IndexOf(1025)); Assert.Contains(4, slicea); Assert.DoesNotContain(1025, slicea); Assert.True(slicea.Sum().NearEquals(12.13999)); IList <double> asList = slicea; Assert.True(asList[2].NearEquals(4.99999)); Assert.Equal(1, asList.IndexOf(4)); Assert.Equal(-1, asList.IndexOf(1025)); Assert.Contains(4, asList); Assert.DoesNotContain(1025, asList); Assert.True(asList.Sum().NearEquals(12.13999)); }
public ListSegment <TSpan> GetSpans(int startPosition, int length) { var segmentList = new SegmentRangeList(Segments); var segmentListRange = segmentList.GetReadOnlyList().GetRange(new Range(startPosition, length), (searchRange, segmentRange) => RangeHelper.MinCompare(searchRange, segmentRange, inclusive: true), (searchRange, segmentRange) => RangeHelper.MaxCompare(searchRange, segmentRange, inclusive: true)); var segmentStart = segmentListRange.Start; var segmentCount = segmentListRange.Count; var absoluteStart = segmentStart << SegmentSpanCountBitWidth; var absoluteLength = segmentCount << SegmentSpanCountBitWidth; var endSegmentExclusive = segmentStart + segmentCount; if (endSegmentExclusive >= Segments.Count) { absoluteLength -= (SegmentOffsetBitMask - (SegmentOffsetBitMask & (Count - 1))); } var spanRangeList = IndexableListAdapter.GetReadOnlyList(new SpanRangeList(this)); var rangeListSegment = new ListSegment <Range>(spanRangeList, absoluteStart, absoluteLength); var spanRange = rangeListSegment.GetRange(new Range(startPosition, length), (r, start) => RangeHelper.MinCompare(r, start, inclusive: true), (r, start) => RangeHelper.MaxCompare(r, start, inclusive: true)); return(new ListSegment <TSpan>(this.GetReadOnlyList(), rangeListSegment.Start + spanRange.Start, spanRange.Count)); }
private FileDatabaseSnapshot CreateFileDatabse(FileSystemEntities entities) { using (new TimeElapsedLogger("Freezing file database state")) { var directories = entities.Directories; // Note: We cannot use "ReferenceEqualityComparer<FileName>" here because // the dictionary will be used in incremental updates where FileName instances // may be new instances from a complete file system enumeration. var files = new Dictionary <FileName, FileWithContents>(entities.Files.Count); var filesWithContentsArray = new FileWithContents[entities.Files.Count]; int filesWithContentsIndex = 0; foreach (var kvp in entities.Files) { var fileData = kvp.Value.FileWithContents; files.Add(kvp.Key, fileData); if (fileData.Contents != null && fileData.Contents.ByteLength > 0) { filesWithContentsArray[filesWithContentsIndex++] = fileData; } } var filesWithContents = new ListSegment <FileWithContents>(filesWithContentsArray, 0, filesWithContentsIndex); var searchableContentsCollection = CreateFilePieces(filesWithContents); FileDatabaseDebugLogger.LogFileContentsStats(filesWithContents); return(new FileDatabaseSnapshot( entities.ProjectHashes, files, files.Keys.ToArray(), directories, searchableContentsCollection, filesWithContents.Count)); } }
public void ListSegmentWorksLikeArray() { var a = new[] { 1.0, 2.71828, 3.14, 4, 4.99999, 42, 1024 }; var slicea = new ListSegment <double>(a); var sliceaSame = new ListSegment <double>(a); var b = new[] { 1.0, 2, 3, 4, 5, 99, 1024 }; var sliceb = new ListSegment <double>(b); Assert.Equal(a, slicea.List); Assert.Equal(0, slicea.Offset); Assert.Equal(7, slicea.Count); Assert.True(slicea.Equals(sliceaSame)); Assert.True(slicea.Equals((object)sliceaSame)); Assert.Equal(sliceaSame.GetHashCode(), sliceaSame.GetHashCode()); Assert.True(slicea == sliceaSame); Assert.True(slicea != sliceb); Assert.True(slicea[3].NearEquals(4)); Assert.Equal(6, slicea.IndexOf(1024)); Assert.Equal(-1, slicea.IndexOf(1025)); Assert.Contains(1024, slicea); Assert.DoesNotContain(1025, slicea); Assert.True(slicea.Sum().NearEquals(1081.85827)); IList <double> asList = slicea; Assert.True(asList[3].NearEquals(4)); Assert.Equal(6, asList.IndexOf(1024)); Assert.Equal(-1, asList.IndexOf(1025)); Assert.Contains(1024, asList); Assert.DoesNotContain(1025, asList); Assert.True(asList.Sum().NearEquals(1081.85827)); }
public static IList <IList <TSource> > PartitionByChunks <TSource>(this IList <TSource> source, int partitionCount) { var sourceIndex = 0; return(GetPartitionSizes(source, partitionCount).Select(size => { IList <TSource> result = new ListSegment <TSource>(source, sourceIndex, size); sourceIndex += size; return result; }).ToList()); }
public void Write(IList <long> values, bool areSigned, bool aligned) { var position = 0; while (position < values.Count) { var window = new ListSegment <long>(values, position, 512); //Encode a maximum of 512 values var numValuesEncoded = EncodeValues(window, areSigned, aligned); position += numValuesEncoded; } }
static int GetLongestMatch(ListSegment _source, ListSegment _target) { int i; for (i = 0; i < _source.Length && i < _target.Length; i++) { if (!EqualityComparer <T> .Default.Equals(_source[i], _target[i])) { break; } } return(i); }
public bool TryGetOffset(ListSegment<DataSegment> labels, out int offsetInDns) { switch (DomainNameCompressionMode) { case DnsDomainNameCompressionMode.All: return _data.TryGetValue(labels, out offsetInDns); case DnsDomainNameCompressionMode.Nothing: offsetInDns = 0; return false; default: throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, "Invalid Domain Name Compression Mode {0}", DomainNameCompressionMode)); } }
// --------------------------------------------------------------------- private static void MultiplyKaratsuba(int BASE, IList <int> result, IList <int> one, IList <int> two, int dim) { int half = dim / 2; // Отбрасываем при некотором значении if (dim <= karatsubaCutoffDimension) { LongIntegerMethods.MultiplySimple(BASE, result, one, two); return; } ListSegment <int> a = new ListSegment <int>(one, 0, half); ListSegment <int> b = new ListSegment <int>(one, half, one.Count - half); ListSegment <int> c = new ListSegment <int>(two, 0, half); ListSegment <int> d = new ListSegment <int>(two, half, two.Count - half); int[] ac = new int[dim]; int[] bd = new int[b.Count + d.Count]; int[] abcd = new int[b.Count + d.Count + 2]; MultiplyKaratsuba(BASE, ac, a, c, half); MultiplyKaratsuba(BASE, bd, b, d, half); int[] apb = new int[b.Count + 1]; int[] cpd = new int[d.Count + 1]; int[] acpbd = new int[b.Count + d.Count + 2]; LongIntegerMethods.Sum(BASE, apb, a, b); LongIntegerMethods.Sum(BASE, cpd, c, d); MultiplyKaratsuba(BASE, abcd, apb, cpd, half); LongIntegerMethods.Sum(BASE, acpbd, ac, bd); int[] difference = new int[b.Count + d.Count + 2]; if (LongIntegerMethods.Dif(BASE, difference, abcd, acpbd)) { Console.WriteLine("Lower-level difference error."); } SumPrivate(BASE, result, ac, 0, difference, half); SumPrivate(BASE, result, result, 0, bd, dim); return; }
public void AddCompressionData(ListSegment<DataSegment> labels, int dnsOffset) { if (dnsOffset > DnsDomainName.OffsetMask) return; switch (DomainNameCompressionMode) { case DnsDomainNameCompressionMode.All: if (!_data.ContainsKey(labels)) _data.Add(labels, dnsOffset); return; case DnsDomainNameCompressionMode.Nothing: return; default: throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, "Invalid Domain Name Compression Mode {0}", DomainNameCompressionMode)); } }
public void IndexerReadWriteIntegerDataMatches() { var name = Guid.NewGuid().ToString(); using (var sma = new SharedList <int>(name, 10)) { sma[0] = 3; sma[4] = 10; using (var smr = new SharedList <int>(name)) { Assert.Equal(0, smr[1]); Assert.Equal(3, smr[0]); Assert.Equal(10, smr[4]); } IList <int> list = sma; list[0] = 5; list[4] = 55; using (var smr = new SharedList <int>(name)) { IList <int> r = smr; Assert.Equal(0, r[1]); Assert.Equal(5, r[0]); Assert.Equal(55, r[4]); } list[3] = 68; IList <int> arraySlice = new ListSegment <int>(list, 1, 8); arraySlice[0] = 67; using (var smr = new SharedList <int>(name)) { IList <int> r = smr; IList <int> rarraySlice = new ListSegment <int>(r, 1, 8); Assert.Equal(67, rarraySlice[0]); Assert.Equal(68, rarraySlice[2]); Assert.Equal(55, rarraySlice[3]); } } }
/// <summary> /// Finds the moving statistic sequence from the sequence of values /// using the specified non-negative window width and the statistic functor /// <paramref name="statistic"/> of type <c>Func<IEnumerable<T>, T>"</c>. /// /// The tail values (such that there is not enough data in the window around them /// get handled using <see cref="TailValuesHandling"/> flag passed). /// </summary> /// <typeparam name="T">The type of elements in the sequence.</typeparam> /// <typeparam name="C">The numeric calculator for the <typeparamref name="T"/> type.</typeparam> /// <param name="values">The sequence of values.</param> /// <param name="windowWidth"> /// A non-negative window width applied to both sides of the current value. /// It means that, e.g. when the window width is 1, the window will consist /// of three values: the current value, one value to the left and one value to the right. /// </param> /// <param name="windowType"> /// The type of the window for calculation of the average. /// See <see cref="WindowType"/> /// </param> /// <param name="tailValuesHandling"> /// A flag specifying how the tail values should be handled /// (such that there is not enough data in the window around them). /// See <see cref="TailValuesHandling"/>. /// </param> /// <param name="statistic"> /// A functor object taking an <c>IEnumerable<T></c> sequence of observations /// and returning a statistic such as sample average, sample median, sample variance / standard /// deviation etc. /// </param> /// <returns> /// A sequence of statictics calculated in the window /// around each value from the source sequence. /// i.e. the i-th index in the result sequence means /// that the <paramref name="statistic"/> was calculated in the /// respective window around the i-th element of the source sequence. /// </returns> public static List <T> MovingStatistic <T, C>( this IList <T> values, int windowWidth, WindowType windowType, TailValuesHandling tailValuesHandling, Func <IEnumerable <T>, T> statistic) where C : ICalc <T>, new() { Contract.Requires <ArgumentNullException>(values != null, "values"); Contract.Requires <ArgumentOutOfRangeException>(windowWidth >= 0, "The window width should be non-negative"); Contract.Requires <ArgumentException>( tailValuesHandling != TailValuesHandling.UseSymmetricAvailableWindow || windowType == WindowType.Symmetric, "Symmetric tail values handling is only available for symmetric windows."); List <T> result = new List <T>(values.Count); for (int index = 0; index < values.Count; ++index) { ListSegment <T> windowSequence = __getWindowSequence <T>( values, windowWidth, windowType, tailValuesHandling, index); if (windowSequence.IsEmpty()) { continue; } else { T statisticValue = statistic(windowSequence); result.Add(statisticValue); } } return(result); }
public void Write(IList <byte> values) { var position = 0; while (position < values.Count) { var window = new ListSegment <byte>(values, position); //Check for repeats byte repeatingValue; var repeatingValueCount = FindRepeatedValues(window, out repeatingValue); if (repeatingValueCount >= 3) { EncodeRepeat(repeatingValueCount, repeatingValue); position += repeatingValueCount; continue; //Search again for new repeating values } //Check for future repeats var repeatLocation = FindNonRepeatingValues(window); var literalWindow = new ListSegment <byte>(window, 0, repeatLocation); EncodeLiterals(literalWindow); position += repeatLocation; } }
public ListSegment <SearchResult> Search(SearchOptions searchOptions) { if (searchOptions == null) { throw new ArgumentNullException(nameof(searchOptions)); } if (searchOptions.ResultFields == null || searchOptions.ResultFields.Count() == 0) { throw new ArgumentException($"{nameof(searchOptions.ResultFields)} required"); } if (searchOptions.PageSize < 0) { throw new ArgumentException($"{searchOptions.PageSize} can't be negative"); } var unstoredFields = searchOptions.ResultFields.Where(it => !StoredFields.Contains(it)); if (unstoredFields.Count() > 0) { throw new InvalidOperationException("Some of the requested fields are unstored: " + string.Join(", ", unstoredFields)); } ListSegment <SearchResult> results = new ListSegment <SearchResult>(); if (string.IsNullOrEmpty(searchOptions.Query)) { return(results); } var searchQuery = string.Join(" AND ", QueryParser.Escape(searchOptions.Query.Trim()) .Split().Where(x => !string.IsNullOrEmpty(x)).Select(x => x.Trim() + "*")); if (string.IsNullOrWhiteSpace(searchQuery)) { return(results); } using (var searcher = new IndexSearcher(Directory, true)) { var analyzer = GetAnalyzer(); QueryParser parser; if (searchOptions.SearchFields == null || searchOptions.SearchFields.Count() == 0) { parser = new MultiFieldQueryParser(luceneVersion, DefaultSearchFields, analyzer); } else if (searchOptions.SearchFields.Count() == 1) { parser = new QueryParser(luceneVersion, searchOptions.SearchFields.First(), analyzer); } else { parser = parser = new MultiFieldQueryParser(luceneVersion, searchOptions.SearchFields.ToArray(), analyzer); } Sort sort; if (string.IsNullOrEmpty(searchOptions.SortField)) { sort = Sort.RELEVANCE; } else { sort = new Sort(new SortField(searchOptions.SortField, SortField.STRING, searchOptions.InverseSort)); } var pSize = searchOptions.PageSize; var pNumber = searchOptions.PageNumber; var hitsLimit = (pNumber + 1) * pSize; var docs = searcher.Search(parser.Parse(searchQuery), null, hitsLimit, sort); var totalHits = docs.TotalHits; var hits = docs.ScoreDocs; var resultFieldsArray = searchOptions.ResultFields.ToArray(); var items = new List <SearchResult>(searchOptions.PageSize); var count = 0; for (var i = pNumber * pSize; count < pSize; i++, count++) { if (i == totalHits) { break; } var doc = searcher.Doc(hits[i].Doc); var result = new Dictionary <string, string>(resultFieldsArray.Length); foreach (var key in resultFieldsArray) { result.Add(key, doc.Get(key)); } items.Add(new SearchResult(result, resultFieldsArray)); } results.Items = items; results.TotalCount = totalHits; analyzer.Dispose(); } return(results); }
public static EditAction[] SequenceDiffer(ListSegment source, ListSegment target) {
public bool IsAvailable(ListSegment<DataSegment> labels) { int offsetInDns; return TryGetOffset(labels, out offsetInDns); }
public override SpanListSegmentModel CreateSegment(ListSegment <SymbolSpan> segmentSpans) { return(new SpanListSegmentModel()); }
public override IEnumerable <QsiScript> Parse(string input, CancellationToken cancellationToken) { TSqlFragment result; try { result = _parser.Parse(input); } catch (Exception) { return(base.Parse(input, cancellationToken)); } if (result is not TSqlScript script) { return(Enumerable.Empty <QsiScript>()); } IList <TSqlParserToken> tokenStream = script.ScriptTokenStream; var list = new List <QsiScript>(); var index = -1; int start, end; foreach (var batch in script.Batches) { foreach (var statement in batch.Statements) { if (index < statement.FirstTokenIndex && statement.FirstTokenIndex != 0) { start = index + 1; end = statement.FirstTokenIndex - 1; TrimTrivia(); AddScript(); } start = statement.FirstTokenIndex; end = statement.LastTokenIndex; AddScript(); index = statement.LastTokenIndex; } } if (index < tokenStream.Count - 1) { start = index + 1; end = tokenStream.Count - 1; TrimTrivia(); AddScript(); } return(list); void TrimTrivia() { bool trim = false; for (; start <= end; start++) { if (IsTrivia(tokenStream[start].TokenType)) { continue; } trim = true; break; } if (!trim) { start = -1; end = -1; } for (; end > start; end--) { if (IsTrivia(tokenStream[end].TokenType)) { continue; } break; } } void AddScript() { if (end == -1 || start == -1) { return; } var count = end - start + 1; var listSegment = new ListSegment <TSqlParserToken>(tokenStream, start, count); if (listSegment.All(s => s.TokenType == TSqlTokenType.WhiteSpace || s.TokenType == TSqlTokenType.Go || s.TokenType == TSqlTokenType.Semicolon)) { return; } var first = listSegment[0]; var last = listSegment[^ 1];
public SpanListModel( IReadOnlyList <TSpan> spans, IEqualityComparer <TSharedKey> sharedKeyComparer = null, IComparer <TSharedKey> sharedKeySorter = null, IComparer <TShared> sharedValueSorter = null) : this() { Optimize = true; Count = spans.Count; List <TSharedKey> sharedKeys = new List <TSharedKey>(); Dictionary <TSharedKey, int> sharedMap = new Dictionary <TSharedKey, int>(sharedKeyComparer ?? EqualityComparer <TSharedKey> .Default); foreach (var span in spans) { var sharedKey = GetSharedKey(span); if (!sharedMap.ContainsKey(sharedKey)) { sharedMap.Add(sharedKey, SharedValues.Count); SharedValues.Add(GetShared(span)); sharedKeys.Add(sharedKey); } } if (sharedValueSorter != null || sharedKeySorter != null) { TSharedKey[] sharedKeyArray = sharedKeys.ToArray(); TShared[] sharedValueArray = SharedValues.ToArray(); if (sharedValueSorter != null) { Array.Sort(sharedValueArray, sharedKeyArray, sharedValueSorter); } else { Array.Sort(sharedKeyArray, sharedValueArray, sharedKeySorter); } // Ensure shared keys is not used after this point sharedKeys = null; SharedValues.Clear(); SharedValues.AddRange(sharedValueArray); for (int index = 0; index < sharedKeyArray.Length; index++) { sharedMap[sharedKeyArray[index]] = index; } } var segmentsLength = NumberUtils.Ceiling(spans.Count, SegmentSpanCount); int offset = 0; for (int segmentIndex = 0; segmentIndex < segmentsLength; segmentIndex++, offset += SegmentSpanCount) { var length = Math.Min(SegmentSpanCount, spans.Count - offset); if (length == 0) { break; } var segmentSpans = new ListSegment <TSpan>(spans, offset, length); var segment = CreateSegment(segmentSpans); Segments.Add(segment); var firstSpanStart = segmentSpans[0].Start; var lastSpan = segmentSpans[length - 1]; segment.Starts = IntegerListModel.Create(segmentSpans, s => GetStart(s, SharedValues[sharedMap[GetSharedKey(s)]])); segment.Lengths = IntegerListModel.Create(segmentSpans, s => s.Length); segment.FullLength = (lastSpan.Start + lastSpan.Length) - firstSpanStart; segment.SharedIndices = IntegerListModel.Create(segmentSpans, s => sharedMap[GetSharedKey(s)]); } }
private FileDatabase CreateFileDatabse() { using (new TimeElapsedLogger("Freezing file database state")) { var directories = _directories; // Note: We cannot use "ReferenceEqualityComparer<FileName>" here because // the dictionary will be used in incremental updates where FileName instances // may be new instances from a complete file system enumeration. var files = new Dictionary<FileName, FileData>(_files.Count); var filesWithContentsArray = new FileData[_files.Count]; int filesWithContentsIndex = 0; foreach (var kvp in _files) { var fileData = kvp.Value.FileData; files.Add(kvp.Key, fileData); if (fileData.Contents != null && fileData.Contents.ByteLength > 0) { filesWithContentsArray[filesWithContentsIndex++] = fileData; } } var filesWithContents = new ListSegment<FileData>(filesWithContentsArray, 0, filesWithContentsIndex); var searchableContentsCollection = CreateFilePieces(filesWithContents); LogFileContentsStats(filesWithContents); return new FileDatabase( _projectHashes, files, files.Keys.ToArray(), directories, searchableContentsCollection, filesWithContents.Count); } }
public abstract TSegment CreateSegment(ListSegment <TSpan> segmentSpans);
public static EditAction[] SequenceDiffer(ListSegment source, ListSegment target) { int GetLongestMatch(ListSegment _source, ListSegment _target) { int i; for (i = 0; i < _source.Length && i < _target.Length; i++) { if (!EqualityComparer <T> .Default.Equals(_source[i], _target[i])) { break; } } return(i); } EditAction[] BuildResult(LinkNode node) { int c = node.Count; var result = new EditAction[c]; while (c > 0) { result[--c] = node.Action; node = node.Previous; } return(result); } int start = GetLongestMatch(source, target); if (start == source.Length && start == target.Length) { return(Array.Empty <EditAction>()); } var queue = new Queue <LinkNode>(source.Length + target.Length); queue.Enqueue(new LinkNode(start, start)); while (queue.Count > 0) { var node = queue.Dequeue(); if (node.X < source.Length) { var action = new EditAction { IsAdd = false, OriginalIndex = node.X, Item = source[node.X] }; int l = GetLongestMatch(source.Slice(node.X + 1), target.Slice(node.Y)); var newNode = new LinkNode(node.X + 1 + l, node.Y + l, action, node); if (newNode.X == source.Length && newNode.Y == target.Length) { return(BuildResult(newNode)); } else { queue.Enqueue(newNode); } } if (node.Y < target.Length) { var action = new EditAction { IsAdd = true, OriginalIndex = node.X, Item = target[node.Y] }; int l = GetLongestMatch(source.Slice(node.X), target.Slice(node.Y + 1)); var newNode = new LinkNode(node.X + l, node.Y + 1 + l, action, node); if (newNode.X == source.Length && newNode.Y == target.Length) { return(BuildResult(newNode)); } else { queue.Enqueue(newNode); } } } throw new Exception("How do you get here?"); }
public override SpanListSegmentModel CreateSegment(ListSegment <ReferenceSpan> segmentSpans) { return(new SpanListSegmentModel()); }