public int EuclidGCD_TwoCorrectValuesPassed_WorksCorrectly(int first, int second) { return(NumericUtils.EuclidGCD(first, second)); }
/// <summary> /// NOTE: This was parseFloat() in Lucene /// </summary> public float ParseSingle(BytesRef term) { return(NumericUtils.SortableInt32ToSingle(NumericUtils.PrefixCodedToInt32(term))); }
public TermsEnum TermsEnum(Terms terms) { return(NumericUtils.FilterPrefixCodedInt64s(terms.GetIterator(null))); }
/// <summary> /// we fake a double test using long2double conversion of NumericUtils </summary> private void TestDoubleRange(int precisionStep) { string field = "ascfield" + precisionStep; const long lower = -1000L, upper = +2000L; Query tq = NumericRangeQuery.NewDoubleRange(field, precisionStep, NumericUtils.SortableInt64ToDouble(lower), NumericUtils.SortableInt64ToDouble(upper), true, true); TopDocs tTopDocs = searcher.Search(tq, 1); Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range query must be equal to inclusive range length"); Filter tf = NumericRangeFilter.NewDoubleRange(field, precisionStep, NumericUtils.SortableInt64ToDouble(lower), NumericUtils.SortableInt64ToDouble(upper), true, true); tTopDocs = searcher.Search(new MatchAllDocsQuery(), tf, 1); Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range filter must be equal to inclusive range length"); }
static void Main(string[] args) { var loggerFactory = new LoggerFactory(); loggerFactory.AddConsole(); Console.WriteLine("======================"); var tradingEngines = new List <ITradingEngine>(); var useExchange = SupportedExchanges.Unknown; while (useExchange == SupportedExchanges.Unknown) { Console.WriteLine("Type 1 for CEX.IO, Type 2 for GDAX"); useExchange = NumericUtils.GetIntegerValueFromObject(Console.ReadLine()).ParseEnum <SupportedExchanges>(); } Console.Write($"{useExchange.ToString()} API Secret:"); var secret = Console.ReadLine(); Console.Write( $"\n{(useExchange == SupportedExchanges.Cex ? "CEX Username" : (useExchange == SupportedExchanges.Gdax ? "Gdax Pass Phrase" : "Username"))}:"); var username = Console.ReadLine(); Console.Write($"\n{useExchange.ToString()} API Key:"); var apiKey = Console.ReadLine(); Console.Write("\nSlack Notification Webhook Url:"); var slackWebhook = Console.ReadLine(); var exchangeCurrency = string.Empty; while (exchangeCurrency.IsNullOrEmpty()) { Console.Write($"\n{useExchange.ToString()} Exhcange Base currency name: (default BTC)"); exchangeCurrency = Console.ReadLine(); if (Currencies.SupportedCurrencies.Count(i => i == exchangeCurrency) > 0) { continue; } if (exchangeCurrency.IsNullOrEmpty()) { Console.WriteLine("Default cryptocurrency BTC selected."); exchangeCurrency = Currencies.BTC; } else { Console.ForegroundColor = ConsoleColor.Red; Console.BackgroundColor = ConsoleColor.White; Console.WriteLine("Invalid currency name. Please try again."); Console.ResetColor(); exchangeCurrency = null; } } var targetCurrency = string.Empty; while (targetCurrency.IsNullOrEmpty()) { Console.Write($"\n{useExchange.ToString()} Exhcange Target currency name: (default USD)"); targetCurrency = Console.ReadLine(); if (Currencies.SupportedCurrencies.Count(i => i == targetCurrency) > 0) { continue; } if (targetCurrency.IsNullOrEmpty()) { Console.WriteLine("Default target currency USD selected."); targetCurrency = Currencies.USD; } else { Console.ForegroundColor = ConsoleColor.Red; Console.BackgroundColor = ConsoleColor.White; Console.WriteLine("Invalid currency name. Please try again."); Console.ResetColor(); targetCurrency = null; } } var stopLine = 0m; while (stopLine <= 0) { Console.Write("\nSpecify the bottom line value where execution should STOP:"); stopLine = NumericUtils.GetDecimalValueFromObject(Console.ReadLine()); if (stopLine > 0) { continue; } Console.ForegroundColor = ConsoleColor.Red; Console.BackgroundColor = ConsoleColor.White; Console.WriteLine( "Bottom line value must be positive number representing your target currency value. (e.g. 5000 )"); Console.ResetColor(); } Console.WriteLine("Minutes of historical orders on CEX.IO for buying considerations: (default 30)"); var publicOrderHistoryForBuyingDecision = NumericUtils.GetIntegerValueFromObject(Console.ReadLine()); if (publicOrderHistoryForBuyingDecision <= 0) { publicOrderHistoryForBuyingDecision = 30; } Console.WriteLine("Minutes of historical orders on CEX.IO for selling considerations: (default 30)"); var publicOrderHistoryForSellingDecision = NumericUtils.GetIntegerValueFromObject(Console.ReadLine()); if (publicOrderHistoryForSellingDecision <= 0) { publicOrderHistoryForSellingDecision = 30; } Console.WriteLine("Minutes of historical account orders for buying considerations: (default 60)"); var accountOrderHistoryForBuyingDecision = NumericUtils.GetIntegerValueFromObject(Console.ReadLine()); if (accountOrderHistoryForBuyingDecision <= 0) { accountOrderHistoryForBuyingDecision = 60; } Console.WriteLine("Minutes of historical account orders for selling considerations: (default 60)"); var accountOrderHistoryForSellingDecision = NumericUtils.GetIntegerValueFromObject(Console.ReadLine()); if (accountOrderHistoryForSellingDecision <= 0) { accountOrderHistoryForSellingDecision = 60; } Console.WriteLine("Minutes change sensitivity ratio in decimal: (default 0.01)"); var sensitivityRatio = NumericUtils.GetDecimalValueFromObject(Console.ReadLine()); if (sensitivityRatio <= 0) { sensitivityRatio = 0.01m; } Console.WriteLine("Minimum Reserve In Target Currency: (default 0.2)"); var minimumReservePercentageAfterInitInTargetCurrency = NumericUtils.GetDecimalValueFromObject(Console.ReadLine()); if (minimumReservePercentageAfterInitInTargetCurrency <= 0) { minimumReservePercentageAfterInitInTargetCurrency = 0.2m; } Console.WriteLine("Minimum Reserve In Exchange Currency: (default 0.2)"); var minimumReservePercentageAfterInitInExchangeCurrency = NumericUtils.GetDecimalValueFromObject(Console.ReadLine()); if (minimumReservePercentageAfterInitInExchangeCurrency <= 0) { minimumReservePercentageAfterInitInExchangeCurrency = 0.2m; } Console.WriteLine("Order Cap Percentage On Init: (default 0.25)"); var orderCapPercentageOnInit = NumericUtils.GetDecimalValueFromObject(Console.ReadLine()); if (orderCapPercentageOnInit <= 0) { orderCapPercentageOnInit = 0.25m; } Console.WriteLine("Order Cap Percentage On Init: (default 0.3)"); var orderCapPercentageAfterInit = NumericUtils.GetDecimalValueFromObject(Console.ReadLine()); if (orderCapPercentageAfterInit <= 0) { orderCapPercentageAfterInit = 0.3m; } bool autoExecution; Console.ForegroundColor = ConsoleColor.White; Console.BackgroundColor = ConsoleColor.DarkGreen; Console.WriteLine("Automated order execution - Enter 'CONFIRM' to execute order automatically: "); Console.ResetColor(); autoExecution = Console.ReadLine() == "CONFIRM"; var tradingStrategy = new TradingStrategy { MinutesOfAccountHistoryOrderForPurchaseDecision = accountOrderHistoryForBuyingDecision, MinutesOfAccountHistoryOrderForSellDecision = accountOrderHistoryForSellingDecision, MinutesOfPublicHistoryOrderForPurchaseDecision = publicOrderHistoryForBuyingDecision, MinutesOfPublicHistoryOrderForSellDecision = publicOrderHistoryForSellingDecision, MinimumReservePercentageAfterInitInTargetCurrency = minimumReservePercentageAfterInitInTargetCurrency, MinimumReservePercentageAfterInitInExchangeCurrency = minimumReservePercentageAfterInitInExchangeCurrency, OrderCapPercentageAfterInit = orderCapPercentageAfterInit, OrderCapPercentageOnInit = orderCapPercentageOnInit, AutoDecisionExecution = autoExecution, StopLine = stopLine, MarketChangeSensitivityRatio = sensitivityRatio, PriceCorrectionFrequencyInHours = 12, TradingValueBleedRatio = 0.1m }; IApi api; switch (useExchange) { case SupportedExchanges.Gdax: api = new GdaxApi(apiKey, secret, username, slackWebhook, loggerFactory.CreateLogger($"GDAX Trading Engine - {exchangeCurrency} - {targetCurrency}"), tradingStrategy); break; case SupportedExchanges.Cex: api = new CexApi(apiKey, secret, username, slackWebhook, loggerFactory.CreateLogger($"CEX.IO Trading Engine - {exchangeCurrency} - {targetCurrency}"), tradingStrategy); break; default: throw new ArgumentOutOfRangeException(); } tradingEngines.Add(new TradingEngine(api, exchangeCurrency, targetCurrency)); var tasks = new List <Task>(); foreach (var engine in tradingEngines) { tasks.Add(Task.Run(async() => await engine.StartAsync())); } Task.WaitAll(tasks.ToArray()); }
public static BlittableJsonReaderObject[] ReadAllEntriesFromIndex(IndexReader reader, JsonOperationContext context, IState state) { if (reader.MaxDoc > 512 * 1024) { throw new InvalidOperationException("Refusing to extract all index entries from an index with " + reader.MaxDoc + " entries, because of the probable time / memory costs associated with that." + Environment.NewLine + "Viewing index entries are a debug tool, and should not be used on indexes of this size."); } var results = new Dictionary <string, object> [reader.MaxDoc]; using (var termDocs = reader.TermDocs(state)) using (var termEnum = reader.Terms(state)) { while (termEnum.Next(state)) { var term = termEnum.Term; if (term == null) { break; } string text; if (term.Field.EndsWith("__maxX") || term.Field.EndsWith("__maxY") || term.Field.EndsWith("__minY") || term.Field.EndsWith("__minX")) { // This is a Spatial Index field term // Lucene keeps the index-entries-values for 'Spatial Index Fields' with 'BoundingBox' encoded as 'prefixCoded bytes' // Need to convert to numbers var num = NumericUtils.PrefixCodedToDouble(term.Text); text = NumberUtil.NumberToString(num); } else { text = term.Text; } termDocs.Seek(termEnum, state); for (var i = 0; i < termEnum.DocFreq() && termDocs.Next(state); i++) { var result = results[termDocs.Doc]; if (result == null) { results[termDocs.Doc] = result = new Dictionary <string, object>(); } var propertyName = term.Field; if (propertyName.EndsWith("_ConvertToJson") || propertyName.EndsWith("_IsArray") || propertyName.EndsWith(Constants.Documents.Indexing.Fields.RangeFieldSuffix)) { continue; } if (result.TryGetValue(propertyName, out var oldValue)) { if (oldValue is DynamicJsonArray oldValueAsArray) { oldValueAsArray.Add(text); continue; } if (oldValue is string oldValueAsString) { result[propertyName] = oldValueAsArray = new DynamicJsonArray(); oldValueAsArray.Add(oldValueAsString); oldValueAsArray.Add(text); continue; } throw new ArgumentException("No idea how to handle " + oldValue.GetType()); } result[propertyName] = text; } } } var final = new BlittableJsonReaderObject[results.Length]; for (int i = 0; i < results.Length; i++) { var doc = new DynamicJsonValue(); var dictionary = results[i]; if (dictionary == null) { continue; } foreach (var kvp in dictionary) { doc[kvp.Key] = kvp.Value; } final[i] = context.ReadObject(doc, "index/entries"); } return(final); }
/// <summary> /// Initializes the token stream with the supplied <see cref="float"/> value. /// <para/> /// NOTE: This was setFloatValue() in Lucene /// </summary> /// <param name="value"> the value, for which this <see cref="TokenStream"/> should enumerate tokens. </param> /// <returns> this instance, because of this you can use it the following way: /// <code>new Field(name, new NumericTokenStream(precisionStep).SetSingleValue(value))</code> </returns> public NumericTokenStream SetSingleValue(float value) { numericAtt.Init(NumericUtils.SingleToSortableInt32(value), valSize = 32, precisionStep, -precisionStep); return(this); }
public void SteinGCD_ThreeZeroesPassed_ArgumentExceptionThrown() { int first = 0, second = 0, third = 0; Assert.Throws <ArgumentException>(() => NumericUtils.SteinGCD(first, second, third)); }
private void Count(ValueSource valueSource, IEnumerable <MatchingDocs> matchingDocs) { DoubleRange[] ranges = (DoubleRange[])this.ranges; LongRange[] longRanges = new LongRange[ranges.Length]; for (int i = 0; i < ranges.Length; i++) { DoubleRange range = ranges[i]; longRanges[i] = new LongRange(range.Label, NumericUtils.DoubleToSortableLong(range.minIncl), true, NumericUtils.DoubleToSortableLong(range.maxIncl), true); } LongRangeCounter counter = new LongRangeCounter(longRanges); int missingCount = 0; foreach (MatchingDocs hits in matchingDocs) { FunctionValues fv = valueSource.GetValues(new Dictionary <string, object>(), hits.Context); totCount += hits.TotalHits; Bits bits; if (fastMatchFilter != null) { DocIdSet dis = fastMatchFilter.GetDocIdSet(hits.Context, null); if (dis == null) { // No documents match continue; } bits = dis.GetBits(); if (bits == null) { throw new System.ArgumentException("fastMatchFilter does not implement DocIdSet.bits"); } } else { bits = null; } DocIdSetIterator docs = hits.Bits.GetIterator(); int doc; while ((doc = docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { if (bits != null && bits.Get(doc) == false) { doc++; continue; } // Skip missing docs: if (fv.Exists(doc)) { counter.Add(NumericUtils.DoubleToSortableLong(fv.DoubleVal(doc))); } else { missingCount++; } } } missingCount += counter.FillCounts(counts); totCount -= missingCount; }
public void EuclidGCD_LessThanTwoArgumentsPassed_ArgumentExceptionThrown(params int[] numbers) { Assert.Throws <ArgumentException>(() => NumericUtils.EuclidGCD(numbers)); }
public int SteinGCD_ThreeCorrectValuesPassed_WorksCorrectly(int first, int second, int third) { return(NumericUtils.SteinGCD(first, second, third)); }
public void EuclidGCD_ArrayOfZeroesPassed_ArgumentExceptionThrown(params int[] numbers) { Assert.Throws <ArgumentException>(() => NumericUtils.EuclidGCD(numbers)); }
public int EuclidGCD_ArrayOfCorrectValuesPassed_WorksCorrectly(params int[] numbers) { return(NumericUtils.EuclidGCD(numbers)); }
public void EuclidGCD_TwoZeroesPassed_ArgumentExceptionThrown() { int first = 0, second = 0; Assert.Throws <ArgumentException>(() => NumericUtils.EuclidGCD(first, second)); }
/// <summary> /// Returns the Key cast to TypeOfKey. Null if the type is unknown or invalid. /// </summary> /// <returns></returns> public object GetParsedKey() { if (string.IsNullOrEmpty(TypeOfKey)) { TypeOfKey = typeof(string).FullName; } var typeOfKey = System.Type.GetType(TypeOfKey); if (typeOfKey == null) { return(null); } if (typeOfKey == typeof(string)) { return(Key.ToString()); } else if (typeOfKey == typeof(uint)) { try { return(NumericUtils.HexStringToUInt32(Key)); } catch { return(null); } } else if (typeOfKey == typeof(int)) { try { return(NumericUtils.HexStringToInt32(Key)); } catch { return(null); } } else if (typeOfKey == typeof(ulong)) { try { return(NumericUtils.HexStringToUInt64(Key)); } catch { return(null); } } else if (typeOfKey == typeof(long)) { try { return(NumericUtils.HexStringToInt64(Key)); } catch { return(null); } } else if (typeOfKey == typeof(short)) { try { return(NumericUtils.HexStringToInt16(Key)); } catch { return(null); } } else if (typeOfKey == typeof(ushort)) { try { return(NumericUtils.HexStringToUInt16(Key)); } catch { return(null); } } else if (typeOfKey == typeof(double)) { try { return(System.Convert.ToDouble(Key)); } catch { return(null); } } else if (typeOfKey == typeof(decimal)) { try { return(System.Convert.ToDecimal(Key)); } catch { return(null); } } return(null); }
private Ratio CalculateFramerate() { Maybe <string> rawTrackTextMaybe = from track in _videoTrack.Value select track.Framerate; Maybe <Ratio> fpsFromParenthesis = from framerateText in rawTrackTextMaybe let startParenths = framerateText.IndexOf("(") let endParenths = framerateText.IndexOf(")") where startParenths != -1 && endParenths != -1 let fpsSubstring = framerateText.Substring(startParenths + 1, endParenths - startParenths - 1) let splitOnSlash = fpsSubstring.Split('/') where splitOnSlash.Length == 2 let numerator = NumericUtils.TryParseInt(splitOnSlash[0]) let denominator = NumericUtils.TryParseInt(splitOnSlash[1]) where numerator != null && denominator != null select new Ratio(numerator.Value, denominator.Value); Maybe <Ratio> fpsFromDirectParse = from framerateText in rawTrackTextMaybe let indexOfFpsMarker = framerateText.IndexOf("fps", StringComparison.OrdinalIgnoreCase) where indexOfFpsMarker != -1 let fpsAsDecimal = framerateText.Substring(0, indexOfFpsMarker - 2) let fpsAsDouble = NumericUtils.TryParseDouble(fpsAsDecimal) where fpsAsDouble != null select NumericUtils.ConvertDoubleToFPS(fpsAsDouble.Value); return(fpsFromParenthesis.Or(fpsFromDirectParse).OrElse(new Ratio())); }
private Query CreateStringValueQuery(QueryFieldValue value, FieldInfo currentField) { switch (value.Token) { case SnLucLexer.Token.Number: case SnLucLexer.Token.String: if (value.StringValue == ContentQuery.EmptyText) { return(new TermQuery(new Term(currentField.Name, value.StringValue))); } if (value.StringValue == ContentQuery.EmptyInnerQueryText) { return(new TermQuery(new Term("Id", NumericUtils.IntToPrefixCoded(0)))); } var words = GetAnalyzedText(currentField.Name, value.StringValue); if (words.Length == 0) { words = new String[] { String.Empty } } ; //return null; if (words.Length == 1) { var term = new Term(currentField.Name, words[0]); if (value.FuzzyValue == null) { return(new TermQuery(term)); } return(new FuzzyQuery(term, Convert.ToSingle(value.FuzzyValue))); } var phraseQuery = new PhraseQuery(); foreach (var word in words) { phraseQuery.Add(new Term(currentField.Name, word)); } if (value.FuzzyValue != null) { var slop = Convert.ToInt32(value.FuzzyValue.Value); phraseQuery.SetSlop(slop); } return(phraseQuery); case SnLucLexer.Token.WildcardString: if (!value.StringValue.EndsWith("*")) { return(new WildcardQuery(new Term(currentField.Name, value.StringValue))); } var s = value.StringValue.TrimEnd('*'); if (s.Contains('?') || s.Contains('*')) { return(new WildcardQuery(new Term(currentField.Name, value.StringValue))); } return(new PrefixQuery(new Term(currentField.Name, s))); default: throw new NotImplementedException("CreateValueQuery with Token: " + value.Token); } }
public void Integration() { var builder = new ContainerBuilder(); builder.RegisterModule(new RootModule(@"Shorthand.xml")); var container = builder.Build(); // CORRECT DATA AND INITIAL LOAD using (var cn = new SqlServerConnectionFactory(InputConnection).GetConnection()) { cn.Open(); Assert.AreEqual(2, cn.Execute(@" UPDATE [Order Details] SET UnitPrice = 14.40, Quantity = 42 WHERE OrderId = 10253 AND ProductId = 39; UPDATE Orders SET CustomerID = 'CHOPS', Freight = 22.98 WHERE OrderId = 10254; ")); } var root = ResolveRoot(container, TestFile, true); var response = new PipelineAction(root, new PipelineContext(new DebugLogger(), root)).Execute(); Assert.AreEqual(200, response.Code); Assert.AreEqual(string.Empty, response.Message); using (var reader = IndexReader.Open(FSDirectory.Open(new DirectoryInfo(Path.Combine(OutputConnection.Folder, "Order Details"))), true)) { Assert.AreEqual(2155, reader.NumDocs()); } // FIRST DELTA, NO CHANGES root = ResolveRoot(container, TestFile, false); response = new PipelineAction(root, new PipelineContext(new DebugLogger(), root)).Execute(); Assert.AreEqual(200, response.Code); Assert.AreEqual(string.Empty, response.Message); using (var reader = IndexReader.Open(FSDirectory.Open(new DirectoryInfo(Path.Combine(OutputConnection.Folder, "Order Details"))), true)) { Assert.AreEqual(2155, reader.NumDocs()); } // CHANGE 2 FIELDS IN 1 RECORD IN MASTER TABLE THAT WILL CAUSE CALCULATED FIELD TO BE UPDATED TOO using (var cn = new SqlServerConnectionFactory(InputConnection).GetConnection()) { cn.Open(); const string sql = @"UPDATE [Order Details] SET UnitPrice = 15, Quantity = 40 WHERE OrderId = 10253 AND ProductId = 39;"; Assert.AreEqual(1, cn.Execute(sql)); } root = ResolveRoot(container, TestFile, false); response = new PipelineAction(root, new PipelineContext(new DebugLogger(), root)).Execute(); Assert.AreEqual(200, response.Code); Assert.AreEqual(string.Empty, response.Message); using (var searcher = new IndexSearcher(FSDirectory.Open(new DirectoryInfo(Path.Combine(OutputConnection.Folder, "Order Details"))), true)) { var hits = searcher.Search(new TermQuery(new Term("TflId", "1025339")), null, 1); Assert.AreEqual(1, hits.TotalHits); var hit = searcher.Doc(hits.ScoreDocs[0].Doc); Assert.AreEqual(15.0d, Convert.ToDecimal(hit.Get("OrderDetailsUnitPrice"))); Assert.AreEqual(40, Convert.ToInt32(hit.Get("OrderDetailsQuantity"))); Assert.AreEqual(40 * 15.0d, Convert.ToDecimal(hit.Get("OrderDetailsExtendedPrice"))); } // CHANGE 1 RECORD'S CUSTOMERID AND FREIGHT ON ORDERS TABLE using (var cn = new SqlServerConnectionFactory(InputConnection).GetConnection()) { cn.Open(); Assert.AreEqual(1, cn.Execute("UPDATE Orders SET CustomerID = 'VICTE', Freight = 20.11 WHERE OrderId = 10254;")); } root = ResolveRoot(container, TestFile, false); response = new PipelineAction(root, new PipelineContext(new DebugLogger(), root)).Execute(); Assert.AreEqual(200, response.Code); Assert.AreEqual(string.Empty, response.Message); using (var searcher = new IndexSearcher(FSDirectory.Open(new DirectoryInfo(Path.Combine(OutputConnection.Folder, "Orders"))), true)) { var hits = searcher.Search(new TermQuery(new Term("OrdersOrderID", NumericUtils.IntToPrefixCoded(10254))), 1); Assert.AreEqual(1, hits.TotalHits); var hit = searcher.Doc(hits.ScoreDocs[0].Doc); Assert.AreEqual("VICTE", hit.Get("OrdersCustomerID")); Assert.AreEqual(20.11d, Convert.ToDecimal(hit.Get("OrdersFreight"))); } }
/// <summary> /// Initializes the token stream with the supplied <see cref="double"/> value. </summary> /// <param name="value"> the value, for which this <see cref="TokenStream"/> should enumerate tokens. </param> /// <returns> this instance, because of this you can use it the following way: /// <code>new Field(name, new NumericTokenStream(precisionStep).SetDoubleValue(value))</code> </returns> public NumericTokenStream SetDoubleValue(double value) { numericAtt.Init(NumericUtils.DoubleToSortableInt64(value), valSize = 64, precisionStep, -precisionStep); return(this); }
public float ParseFloat(BytesRef term) { return(NumericUtils.SortableIntToFloat(NumericUtils.PrefixCodedToInt(term))); }
private void TestRandomTrieAndClassicRangeQuery(int precisionStep) { string field = "field" + precisionStep; int totalTermCountT = 0, totalTermCountC = 0, termCountT, termCountC; int num = TestUtil.NextInt32(Random, 10, 20); for (int i = 0; i < num; i++) { long lower = (long)(Random.NextDouble() * noDocs * distance) + startOffset; long upper = (long)(Random.NextDouble() * noDocs * distance) + startOffset; if (lower > upper) { long a = lower; lower = upper; upper = a; } BytesRef lowerBytes = new BytesRef(NumericUtils.BUF_SIZE_INT64), upperBytes = new BytesRef(NumericUtils.BUF_SIZE_INT64); NumericUtils.Int64ToPrefixCodedBytes(lower, 0, lowerBytes); NumericUtils.Int64ToPrefixCodedBytes(upper, 0, upperBytes); // test inclusive range NumericRangeQuery <long> tq = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, true, true); TermRangeQuery cq = new TermRangeQuery(field, lowerBytes, upperBytes, true, true); TopDocs tTopDocs = searcher.Search(tq, 1); TopDocs cTopDocs = searcher.Search(cq, 1); Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal"); totalTermCountT += termCountT = CountTerms(tq); totalTermCountC += termCountC = CountTerms(cq); CheckTermCounts(precisionStep, termCountT, termCountC); // test exclusive range tq = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, false, false); cq = new TermRangeQuery(field, lowerBytes, upperBytes, false, false); tTopDocs = searcher.Search(tq, 1); cTopDocs = searcher.Search(cq, 1); Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal"); totalTermCountT += termCountT = CountTerms(tq); totalTermCountC += termCountC = CountTerms(cq); CheckTermCounts(precisionStep, termCountT, termCountC); // test left exclusive range tq = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, false, true); cq = new TermRangeQuery(field, lowerBytes, upperBytes, false, true); tTopDocs = searcher.Search(tq, 1); cTopDocs = searcher.Search(cq, 1); Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal"); totalTermCountT += termCountT = CountTerms(tq); totalTermCountC += termCountC = CountTerms(cq); CheckTermCounts(precisionStep, termCountT, termCountC); // test right exclusive range tq = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, true, false); cq = new TermRangeQuery(field, lowerBytes, upperBytes, true, false); tTopDocs = searcher.Search(tq, 1); cTopDocs = searcher.Search(cq, 1); Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal"); totalTermCountT += termCountT = CountTerms(tq); totalTermCountC += termCountC = CountTerms(cq); CheckTermCounts(precisionStep, termCountT, termCountC); } CheckTermCounts(precisionStep, totalTermCountT, totalTermCountC); if (Verbose && precisionStep != int.MaxValue) { Console.WriteLine("Average number of terms during random search on '" + field + "':"); Console.WriteLine(" Numeric query: " + (((double)totalTermCountT) / (num * 4))); Console.WriteLine(" Classical query: " + (((double)totalTermCountC) / (num * 4))); } }
public long ParseLong(BytesRef term) { return(NumericUtils.PrefixCodedToLong(term)); }
public override bool Collect(BytesRef bytes) { float boost = boostAtt.Boost; // make sure within a single seg we always collect // terms in order if (Debugging.AssertsEnabled) { Debugging.Assert(CompareToLastTerm(bytes)); } //System.out.println("TTR.collect term=" + bytes.utf8ToString() + " boost=" + boost + " ord=" + readerContext.ord); // ignore uncompetitive hits if (stQueue.Count == maxSize) { ScoreTerm t = stQueue.Peek(); // LUCENENET specific - compare bits rather than using equality operators to prevent these comparisons from failing in x86 in .NET Framework with optimizations enabled if (NumericUtils.SingleToSortableInt32(boost) < NumericUtils.SingleToSortableInt32(t.Boost)) { return(true); } // LUCENENET specific - compare bits rather than using equality operators to prevent these comparisons from failing in x86 in .NET Framework with optimizations enabled if (NumericUtils.SingleToSortableInt32(boost) == NumericUtils.SingleToSortableInt32(t.Boost) && termComp.Compare(bytes, t.Bytes) > 0) { return(true); } } TermState state = termsEnum.GetTermState(); if (Debugging.AssertsEnabled) { Debugging.Assert(state != null); } if (visitedTerms.TryGetValue(bytes, out ScoreTerm t2)) { // if the term is already in the PQ, only update docFreq of term in PQ // LUCENENET specific - compare bits rather than using equality operators to prevent these comparisons from failing in x86 in .NET Framework with optimizations enabled if (Debugging.AssertsEnabled) { Debugging.Assert(NumericUtils.SingleToSortableInt32(t2.Boost) == NumericUtils.SingleToSortableInt32(boost), "boost should be equal in all segment TermsEnums"); } t2.TermState.Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq); } else { // add new entry in PQ, we must clone the term, else it may get overwritten! st.Bytes.CopyBytes(bytes); st.Boost = boost; visitedTerms[st.Bytes] = st; if (Debugging.AssertsEnabled) { Debugging.Assert(st.TermState.DocFreq == 0); } st.TermState.Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq); stQueue.Add(st); // possibly drop entries from queue if (stQueue.Count > maxSize) { st = stQueue.Dequeue(); visitedTerms.Remove(st.Bytes); st.TermState.Clear(); // reset the termstate! } else { st = new ScoreTerm(termComp, new TermContext(m_topReaderContext)); } if (Debugging.AssertsEnabled) { Debugging.Assert(stQueue.Count <= maxSize, "the PQ size must be limited to maxSize"); } // set maxBoostAtt with values to help FuzzyTermsEnum to optimize if (stQueue.Count == maxSize) { t2 = stQueue.Peek(); maxBoostAtt.MaxNonCompetitiveBoost = t2.Boost; maxBoostAtt.CompetitiveTerm = t2.Bytes; } } return(true); }
public double ParseDouble(BytesRef term) { return(NumericUtils.SortableLongToDouble(NumericUtils.PrefixCodedToLong(term))); }
/// <summary> /// NOTE: This was parseInt() in Lucene /// </summary> public int ParseInt32(BytesRef term) { return(NumericUtils.PrefixCodedToInt32(term)); }
public TermsEnum TermsEnum(Terms terms) { return(NumericUtils.FilterPrefixCodedLongs(terms.Iterator(null))); }
/// <summary> /// NOTE: This was parseLong() in Lucene /// </summary> public long ParseInt64(BytesRef term) { return(NumericUtils.PrefixCodedToInt64(term)); }
/// <summary> /// Do the indexing and set the field values onto the Lucene document /// </summary> /// <param name="indexingContext"></param> /// <param name="document"></param> internal static void Index(IndexingContext indexingContext, Document document) { #region Node if (indexingContext.Item != null) { var publishedItemType = indexingContext?.HostItem?.ItemType ?? indexingContext.Item.ItemType; var hasNodeField = new Field( LookConstants.HasNodeField, "1", Field.Store.NO, Field.Index.NOT_ANALYZED); var nodeIdField = new Field( LookConstants.NodeIdField, indexingContext.Item.Id.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED); var nodeKeyField = new Field( LookConstants.NodeKeyField, indexingContext.Item.GetGuidKey().GuidToLuceneString(), Field.Store.YES, Field.Index.NOT_ANALYZED); var nodeTypeField = new Field( LookConstants.NodeTypeField, publishedItemType.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED, Field.TermVector.NO); var nodeAliasField = new Field( LookConstants.NodeAliasField, indexingContext.Item.DocumentTypeAlias, Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.NO); document.Add(hasNodeField); document.Add(nodeIdField); document.Add(nodeKeyField); document.Add(nodeTypeField); document.Add(nodeAliasField); if (publishedItemType == PublishedItemType.Content) { var culture = indexingContext?.HostItem?.GetCulture() ?? indexingContext.Item.GetCulture(); if (culture != null) { var cultureField = new Field( LookConstants.CultureField, culture.LCID.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED, Field.TermVector.NO); document.Add(cultureField); } } if (indexingContext.HostItem != null) { var isDetachedField = new Field( LookConstants.IsDetachedField, "1", Field.Store.NO, Field.Index.NOT_ANALYZED); // indexing detached item, so store the host context id so we can return the detached item var hostIdField = new Field( LookConstants.HostIdField, indexingContext.HostItem.Id.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED); document.Add(isDetachedField); document.Add(hostIdField); } } #endregion #region Name string name = null; if (LookService.Instance._nameIndexer != null) { try { name = LookService.Instance._nameIndexer(indexingContext); } catch (Exception exception) { LogHelper.WarnWithException(typeof(LookService), "Error in name indexer", exception); } } else if (indexingContext.Item != null) { name = indexingContext.Item.Name; } if (name != null) { var hasNameField = new Field( LookConstants.HasNameField, "1", Field.Store.NO, Field.Index.NOT_ANALYZED); var nameField = new Field( LookConstants.NameField, name, Field.Store.YES, Field.Index.NOT_ANALYZED, Field.TermVector.YES); // field for lower case searching var nameFieldLowered = new Field( LookConstants.NameField + "_Lowered", name.ToLower(), Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.YES); var nameSortedField = new Field( LuceneIndexer.SortedFieldNamePrefix + LookConstants.NameField, name.ToLower(), // force case insentive sorting Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.NO); document.Add(hasNameField); document.Add(nameField); document.Add(nameFieldLowered); document.Add(nameSortedField); } #endregion #region Date DateTime?date = null; if (LookService.Instance._dateIndexer != null) { try { date = LookService.Instance._dateIndexer(indexingContext); } catch (Exception exception) { LogHelper.WarnWithException(typeof(LookService), "Error in date indexer", exception); } } else if (indexingContext.Item != null) { date = indexingContext.Item.UpdateDate; } if (date != null) { var hasDateField = new Field( LookConstants.HasDateField, "1", Field.Store.NO, Field.Index.NOT_ANALYZED); var dateValue = DateTools.DateToString(date.Value, DateTools.Resolution.SECOND); var dateField = new Field( LookConstants.DateField, dateValue, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES); var dateSortedField = new Field( LuceneIndexer.SortedFieldNamePrefix + LookConstants.DateField, dateValue, Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.NO); document.Add(hasDateField); document.Add(dateField); document.Add(dateSortedField); } #endregion #region Text if (LookService.Instance._textIndexer != null) { string text = null; try { text = LookService.Instance._textIndexer(indexingContext); } catch (Exception exception) { LogHelper.WarnWithException(typeof(LookService), "Error in text indexer", exception); } if (text != null) { var hasTextField = new Field( LookConstants.HasTextField, "1", Field.Store.NO, Field.Index.NOT_ANALYZED); var textField = new Field( LookConstants.TextField, text, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES); document.Add(hasTextField); document.Add(textField); } } #endregion #region Tag if (LookService.Instance._tagIndexer != null) { LookTag[] tags = null; try { tags = LookService.Instance._tagIndexer(indexingContext); } catch (Exception exception) { LogHelper.WarnWithException(typeof(LookService), "Error in tag indexer", exception); } if (tags != null) { foreach (var tag in tags) { var hasTagsField = new Field( LookConstants.HasTagsField, "1", Field.Store.NO, Field.Index.NOT_ANALYZED); // add all tags to a common field (serialized such that Tag objects can be restored from this) var allTagsField = new Field( LookConstants.AllTagsField, tag.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED); // add the tag value to a specific field - this is used for searching on var tagField = new Field( LookConstants.TagsField + tag.Group, tag.Name, Field.Store.YES, Field.Index.NOT_ANALYZED); document.Add(hasTagsField); document.Add(allTagsField); document.Add(tagField); } } } #endregion #region Location if (LookService.Instance._locationIndexer != null) { Location location = null; try { location = LookService.Instance._locationIndexer(indexingContext); } catch (Exception exception) { LogHelper.WarnWithException(typeof(LookService), "Error in location indexer", exception); } if (location != null) { var hasLocationField = new Field( LookConstants.HasLocationField, "1", Field.Store.NO, Field.Index.NOT_ANALYZED); var locationField = new Field( LookConstants.LocationField, location.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED); var locationLatitudeField = new Field( LookConstants.LocationField + "_Latitude", NumericUtils.DoubleToPrefixCoded(location.Latitude), Field.Store.YES, Field.Index.NOT_ANALYZED); var locationLongitudeField = new Field( LookConstants.LocationField + "_Longitude", NumericUtils.DoubleToPrefixCoded(location.Longitude), Field.Store.YES, Field.Index.NOT_ANALYZED); document.Add(hasLocationField); document.Add(locationField); document.Add(locationLatitudeField); document.Add(locationLongitudeField); foreach (var cartesianTierPlotter in LookService.Instance._cartesianTierPlotters) { var boxId = cartesianTierPlotter.GetTierBoxId(location.Latitude, location.Longitude); var tierField = new Field( cartesianTierPlotter.GetTierFieldName(), NumericUtils.DoubleToPrefixCoded(boxId), Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS); document.Add(tierField); } } } #endregion }
public double ParseDouble(BytesRef term) { return(NumericUtils.SortableInt64ToDouble(NumericUtils.PrefixCodedToInt64(term))); }
internal static void DeleteVersionFromIndex(int versionid) { var delTerm = new Term(LuceneManager.KeyFieldName, NumericUtils.IntToPrefixCoded(versionid)); LuceneManager.DeleteDocuments(new[] { delTerm }, false, 0, false); }