public void create_index() { _index = new InMemoryProductIndex(); var manager = new IndexManager(); manager.RegisterIndex(_index); manager.CreateIndexes(); }
public void can_register_an_IIndexSource2() { var indexService = new IndexManager(); indexService.RegisterIndex<TestIndexSource>(); indexService.RegisteredIndexSources.Count.Should().Be.EqualTo(1); }
public void can_find_IndexDefinition() { var indexService = new IndexManager(); indexService.RegisterIndex<TestIndexSource>(); var index = indexService.FindRegisteredIndex(typeof(TestIndexSource)); index.Should().Not.Be.Null(); }
public void can_create_an_Index() { var indexService = new IndexManager(); var indexSource = new TestIndexSource(); indexService.RegisterIndex(indexSource); var count = indexService.CreateIndexes(); count.Should().Be.EqualTo(1); }
public void can_register_only_one_index_of_the_same_type() { var indexService = new IndexManager(); indexService.RegisterIndex<TestIndexSource>(); Executing .This(() => indexService.RegisterIndex<TestIndexSource>()) .Should().Throw<ArgumentException>() .And.ValueOf.Message.Should().Be.EqualTo("Only one index of the same type can be registered. There is no reason why you would want to register the same index twice. :)"); }
// Use this for initialization virtual public void Start () { NV = GetComponent<NodeVis> (); if (NV != null) { Node = NV.node; } else { Node = GetComponent<BehaviourNode> (); if (Node == null) { Node = GetComponent<Task> (); } } index = Node.Index; }
static void Main(string[] args) { string connectionString = "UseDevelopmentStorage=true;"; BlobjectRepository repository = new BlobjectRepository(connectionString, "test"); TestObject obj = new TestObject() { FirstName = "Dennis", LastName = "Hellewegen", Country = "Nederland" }; TestObject obj2 = new TestObject() { FirstName = "Kees", LastName = "Hellewegen", Country = "Nederland" }; obj.Subs.Add(new TestSubObject() { MiddleName = "van", SmallName = "Dennisje" }); repository.Save<TestObject>(obj); IndexManager indexManager = new IndexManager(); indexManager.BuildIndex<TestObject>(); var hellewegens = indexManager.Get<TestObject>(t => t.Country == "Nederland"); int id = 0; for (int i = 0; i < 120; i++) { obj.BlobjectId = 0; obj2.BlobjectId = 0; repository.Save<TestObject>(obj); repository.Save<TestObject>(obj2); } obj = repository.GetById<TestObject>(id); obj.FirstName = "Kees"; repository.Save<TestObject>(obj); Console.ReadKey(); }
public void create_index() { _index = new InMemoryProductIndex(); var manager = new IndexManager(); manager.RegisterIndex(_index); manager.CreateIndexes(); var files = Directory.GetFiles(@"C:\temp\", "*.facet"); foreach (var file in files) { File.Delete(file); } }
private void dgvIndexes_CellValueChanged(object sender, DataGridViewCellEventArgs e) { if (e.RowIndex < 0) { return; } if (e.ColumnIndex == this.colType.Index) { this.ShowIndexExtraPropertites(); DataGridViewRow row = this.dgvIndexes.Rows[e.RowIndex]; DataGridViewCell nameCell = row.Cells[this.colIndexName.Name]; string type = DataGridViewHelper.GetCellStringValue(row, this.colType.Name); string indexName = DataGridViewHelper.GetCellStringValue(nameCell); if (string.IsNullOrEmpty(indexName)) { nameCell.Value = type == IndexType.Primary.ToString() ? IndexManager.GetPrimaryKeyDefaultName(this.Table) : IndexManager.GetIndexDefaultName(this.Table); } } }
/// <summary> /// /// </summary> public override void LoadAttachments(IEnumerable <Resource.IDescriptor> resources) { // - Skip buffer creation if no resources to load if (resources.Count() <= 0) { return; } // - Creates a command buffer for data transfer var commands = GraphicDevice.Handle.AllocateCommandBuffer(Commands[0], CommandBufferLevel.Primary); commands.Begin(CommandBufferUsageFlags.OneTimeSubmit); foreach (Resource.IDescriptor res in resources) { switch (res) { case Shape.Descriptor shape: // - Loads the geometry Geometry pointer = GeometryDispatcher.Load(shape); // - Set resource loaded in the source thread shape.SourceScheduler.Add(() => { shape.SetResource(pointer); shape.SetState(Resource.State.Loaded); }); break; default: throw new Exception($"Resource {res.GetType().Name} does not has a loader."); } } { // - Upload invalidated vertexs VertexManager.Upload(commands, 1000); // - Upload invalidated indexes IndexManager.Upload(commands, 1000); } commands.End(); // - Perform all attachment operations GraphicDevice.GraphicQueue.Submit ( new SubmitInfo { CommandBuffers = new[] { commands } }, null ); // - Free attachment command buffer Commands[0].FreeCommandBuffers(commands); // - Wait the GPU (we must operate only when GPU is idle) GraphicDevice.Handle.WaitIdle(); }
public void Search() { IndexManager.PurgeIndexes(); var expected = new List <SampleDocument> { new SampleDocument { Id = "1", Sort = "1", Content = @"Omega", Title = "Alpha", DocDate = new DateTime(2019, 11, 7) }, new SampleDocument { Id = "2", Sort = "2", Content = @"delta", Title = "Alpha", DocDate = new DateTime(2019, 11, 7) }, new SampleDocument { Id = "3", Sort = "3", Content = @"thisfind", Title = "Mex", DocDate = new DateTime(2019, 11, 7) }, new SampleDocument { Id = "4", Sort = "4", Content = @"trit", Title = "pep", DocDate = new DateTime(2019, 11, 1) }, new SampleDocument { Id = "5", Sort = "5", Content = @"date", Title = "date", DocDate = new DateTime(2019, 11, 7) } }; IndexManager.BulkInsert(expected); var searchData = new FindRequest <SampleDocument>(0, 10); var results = searchData .Should(SearchClause <SampleDocument> .Term(x => x.Content, "thisfind")) .Should(SearchClause <SampleDocument> .Term(x => x.Title, "alpha")) .Should(SearchClause <SampleDocument> .GreaterThan(x => x.DocDate, new DateTime(2019, 11, 5))) .Sort(x => x.Sort) .Execute(); var actual = results.Documents.ToList(); Assert.Equal(4, actual.Count); Assert.Equal("1", actual[0].Id); Assert.Equal("2", actual[1].Id); Assert.Equal("3", actual[2].Id); Assert.Equal("5", actual[3].Id); }
public void Quoted() { IndexManager.PurgeIndexes(); var expected = new List <SampleDocument> { new SampleDocument { Id = "1", Sort = "1", Content = @"This a test", Title = "Alpha", DocDate = new DateTime(2019, 11, 7) }, new SampleDocument { Id = "2", Sort = "2", Content = @"Run a quoted test thisisasample", Title = "Alpha", DocDate = new DateTime(2019, 11, 7) }, new SampleDocument { Id = "3", Sort = "3", Content = @"no return test", Title = "Mex", DocDate = new DateTime(2019, 11, 7) }, new SampleDocument { Id = "4", Sort = "4", Content = @"find all of data", Title = "pep", DocDate = new DateTime(2019, 11, 1) }, new SampleDocument { Id = "5", Sort = "5", Content = @"date", Title = "date", DocDate = new DateTime(2019, 11, 7) } }; IndexManager.BulkInsert(expected); var searchData = new FindRequest <SampleDocument>(0, 10); var results = searchData .Must(SearchClause <SampleDocument> .Match(x => x.Content, "\"find all \" date return quoted test")) .Sort(x => x.Sort) .Execute(); var actual = results.Documents.ToList(); Assert.Equal(expected.Count, actual.Count); for (int i = 0; i < expected.Count; i++) { Assert.Equal(expected[i].Id, actual[i].Id); Assert.Equal(expected[i].Sort, actual[i].Sort); Assert.Equal(expected[i].Title, actual[i].Title); Assert.Equal(expected[i].DocDate, actual[i].DocDate); Assert.Equal(expected[i].Content, actual[i].Content); } }
private void Parser_QueryParser_AndOperatorDump(string queryText, StringBuilder dump, StringBuilder msg) { QueryParser parser; SnLucParser snParser; parser = new QueryParser(Search.Indexing.LuceneManager.LuceneVersion, LucObject.FieldName.AllText, IndexManager.GetAnalyzer()); var lucQueryOr = parser.Parse(queryText); parser = new QueryParser(Search.Indexing.LuceneManager.LuceneVersion, LucObject.FieldName.AllText, IndexManager.GetAnalyzer()); parser.SetDefaultOperator(QueryParser.Operator.AND); var LucQueryAnd = parser.Parse(queryText); snParser = new SnLucParser(); var snQueryOr = snParser.Parse(queryText); //snQueryOr = snQueryOr.Rewrite(IndexManager.GetIndexReader()); snParser = new SnLucParser(); var snQueryAnd = snParser.Parse(queryText, SnLucParser.DefaultOperator.And); //snQueryAnd = snQueryAnd.Rewrite(IndexManager.GetIndexReader()); dump.Append(queryText); dump.Append('\t'); dump.Append(lucQueryOr.ToString()); dump.Append('\t'); dump.Append(LucQueryAnd.ToString()); dump.Append('\t'); dump.Append(snQueryOr.ToString()); dump.Append('\t'); dump.Append(snQueryAnd.ToString()); dump.AppendLine(); var lucQueryOrString = lucQueryOr.ToString(); var LucQueryAndString = LucQueryAnd.ToString(); var snQueryOrString = snQueryOr.ToString(); var snQueryAndString = snQueryAnd.ToString(); if (lucQueryOrString != snQueryOrString) { msg.Append("Error with OR operator. Query: '").Append(queryText) .Append("'. Expected: '").Append(lucQueryOrString) .Append("'. Actual: '").AppendLine(snQueryOrString); } if (LucQueryAndString != snQueryAndString) { msg.Append("Error with AND operator. Query: '").Append(queryText) .Append("'. Expected: '").Append(LucQueryAndString) .Append("'. Actual: '").AppendLine(snQueryAndString); } }
public void Init() { // clear index IndexManager.Of(Index).DeleteAll(); }
protected void Application_Start() { //读取了配置文件中关于Log4Net配置信息. XmlConfigurator.Configure(); //开始线程扫描LuceneNet对应的数据队列。 IndexManager <JD_Commodity_001> .InitData(System.Configuration.ConfigurationManager.AppSettings["LuceneNetDir"], new List <FieldDataModel>() { new FieldDataModel() { FieldName = "Id", PropertyName = "Id", Store = Field.Store.NO, }, new FieldDataModel() { FieldName = "Title", PropertyName = "Title", Index = Field.Index.ANALYZED, TermVector = Field.TermVector.WITH_POSITIONS_OFFSETS, }, new FieldDataModel() { FieldName = "Price", PropertyName = "Price", Index = Field.Index.NOT_ANALYZED, TermVector = Field.TermVector.WITH_POSITIONS_OFFSETS, } }); AreaRegistration.RegisterAllAreas(); FilterConfig.RegisterGlobalFilters(GlobalFilters.Filters); RouteConfig.RegisterRoutes(RouteTable.Routes); BundleConfig.RegisterBundles(BundleTable.Bundles); //开启一个线程,扫描异常信息队列。 string filePath = Server.MapPath("/Log/"); //Request.MapPath() Task.Factory.StartNew(o => { while (true) { //判断一下队列中是否有数据 if (WebSiteExceptionAttribute.ExceptionQueue.Count > 0) { Exception ex = WebSiteExceptionAttribute.ExceptionQueue.Dequeue(); if (ex != null) { //将异常信息写到日志文件中。 //string fileName = DateTime.Now.ToString("yyyy-MM-dd"); //System.IO.File.AppendAllText(filePath+ fileName + ".txt", ex.ToString(), System.Text.Encoding.UTF8); ILog logger = LogManager.GetLogger("errorMsg"); logger.Error(ex.ToString()); } else { //如果队列中没有数据,休息 Thread.Sleep(3000); } } else { //如果队列中没有数据,休息 Thread.Sleep(3000); } } }, filePath); }
// Start is called before the first frame update void Start() { localmode = IndexManager.getSelectMode(); state = 0; }
public ActionResult Homeselectpwd(string name) { return(Json(IndexManager.Homeselectpwd(name), JsonRequestBehavior.AllowGet)); }
public ActionResult updateADD(string name, string pwd) { return(Json(IndexManager.update(name, pwd), JsonRequestBehavior.AllowGet)); }
internal DefaultAddinResolver(IndexManager indexManager, BodyRepository bodyRepo, ConvertionManager convertionManager) : base(indexManager, bodyRepo, convertionManager) { }
public IndexDocument CompleteIndexDocument(IndexDocumentData indexDocumentData) { return(IndexManager.CompleteIndexDocument(indexDocumentData)); }
// Update is called once per frame virtual public void Update () { if (NV == null) { NV = GetComponent<NodeVis> (); if(NV != null){ Node = NV.node; index = Node.Index; } } if (removeIt) { removeIt = false; remove(); } if (indexUpToggle) { indexUpToggle = false; indexUp(); } if (indexDownToggle) { indexDownToggle = false; indexDown(); } }
internal static global::System.Runtime.InteropServices.HandleRef getCPtr(IndexManager obj) { return((obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr); }
public void Indexing_RestoreIndexingActivityStatus() { Test(() => { var db = ((InMemoryDataProvider)DataStore.DataProvider).DB; // Empty test db.IndexingActivities.Clear(); // Real test var i = 10; var items = new[] { IndexingActivityRunningState.Done, IndexingActivityRunningState.Done, IndexingActivityRunningState.Done, IndexingActivityRunningState.Done, IndexingActivityRunningState.Done, IndexingActivityRunningState.Done, IndexingActivityRunningState.Done, }.Select(x => new IndexingActivityDoc { IndexingActivityId = ++i, ActivityType = IndexingActivityType.AddDocument, Path = "/Root/" + i, CreationDate = new DateTime(2020, 04, 18, 0, 0, i), NodeId = 95000 + i, RunningState = x, VersionId = 91000 + i }); foreach (var item in items) { db.IndexingActivities.Insert(item); } var state = new IndexingActivityStatus { LastActivityId = 15, Gaps = new[] { 13, 14 } }; // ACTION var inMemEngine = (InMemoryIndexingEngine)SearchManager.SearchEngine.IndexingEngine; try { inMemEngine.IndexIsCentralized = true; IndexManager.RestoreIndexingActivityStatusAsync(state, CancellationToken.None) .ConfigureAwait(false).GetAwaiter().GetResult(); } finally { inMemEngine.IndexIsCentralized = false; } // ASSERT var expected = "11:Done,12:Done,13:Waiting,14:Waiting,15:Done,16:Waiting,17:Waiting"; var actual = string.Join(",", db.IndexingActivities .OrderBy(x => x.IndexingActivityId) .Select(x => $"{x.Id}:{x.RunningState}")); Assert.AreEqual(expected, actual); }); }
public ParametrizedEnsembleCandidate(IndexManager indexManager) : base(indexManager) { InitSearcher(); }
/// <summary> /// Creates Index over specified XPathNavigator. /// </summary> /// <param name="navigator">Core XPathNavigator</param> public XPathNavigatorIndex(XPathNavigator navigator) { this.nav = navigator; manager = new IndexManager(); }
private Query ParseLucQuery(string queryText) { var analyzer = IndexManager.GetAnalyzer(); return(new QueryParser(Search.Indexing.LuceneManager.LuceneVersion, LucObject.FieldName.AllText, analyzer).Parse(queryText)); }
internal static global::System.Runtime.InteropServices.HandleRef getCPtr(IndexManager obj) { return (obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr; }
public void testBMACurveConsistency <T, I, B>(CommonVars vars, I interpolator, double tolerance) where T : BootstrapTraits, new() where I : IInterpolationFactory, new() where B : IBootStrap, new() { // readjust settlement vars.calendar = new JointCalendar(new BMAIndex().fixingCalendar(), new USDLibor(new Period(3, TimeUnit.Months)).fixingCalendar(), JointCalendar.JointCalendarRule.JoinHolidays); vars.today = vars.calendar.adjust(Date.Today); Settings.setEvaluationDate(vars.today); vars.settlement = vars.calendar.advance(vars.today, vars.settlementDays, TimeUnit.Days); Handle <YieldTermStructure> riskFreeCurve = new Handle <YieldTermStructure>( new FlatForward(vars.settlement, 0.04, new Actual360())); BMAIndex bmaIndex = new BMAIndex(); IborIndex liborIndex = new USDLibor(new Period(3, TimeUnit.Months), riskFreeCurve); for (int i = 0; i < vars.bmas; ++i) { Handle <Quote> f = new Handle <Quote>(vars.fractions[i]); vars.bmaHelpers.Add(new BMASwapRateHelper(f, new Period(vars.bmaData[i].n, vars.bmaData[i].units), vars.settlementDays, vars.calendar, new Period(vars.bmaFrequency), vars.bmaConvention, vars.bmaDayCounter, bmaIndex, liborIndex)); } int w = vars.today.weekday(); Date lastWednesday = (w >= 4) ? vars.today - (w - 4) : vars.today + (4 - w - 7); Date lastFixing = bmaIndex.fixingCalendar().adjust(lastWednesday); bmaIndex.addFixing(lastFixing, 0.03); vars.termStructure = new PiecewiseYieldCurve <T, I, B>(vars.settlement, vars.bmaHelpers, new Actual360(), new List <Handle <Quote> >(), new List <Date>(), 1.0e-12, interpolator); RelinkableHandle <YieldTermStructure> curveHandle = new RelinkableHandle <YieldTermStructure>(); curveHandle.linkTo(vars.termStructure); // check BMA swaps BMAIndex bma = new BMAIndex(curveHandle); IborIndex libor3m = new USDLibor(new Period(3, TimeUnit.Months), riskFreeCurve); for (int i = 0; i < vars.bmas; i++) { Period tenor = new Period(vars.bmaData[i].n, vars.bmaData[i].units); Schedule bmaSchedule = new MakeSchedule().from(vars.settlement) .to(vars.settlement + tenor) .withFrequency(vars.bmaFrequency) .withCalendar(bma.fixingCalendar()) .withConvention(vars.bmaConvention) .backwards() .value(); Schedule liborSchedule = new MakeSchedule().from(vars.settlement) .to(vars.settlement + tenor) .withTenor(libor3m.tenor()) .withCalendar(libor3m.fixingCalendar()) .withConvention(libor3m.businessDayConvention()) .endOfMonth(libor3m.endOfMonth()) .backwards() .value(); BMASwap swap = new BMASwap(BMASwap.Type.Payer, 100.0, liborSchedule, 0.75, 0.0, libor3m, libor3m.dayCounter(), bmaSchedule, bma, vars.bmaDayCounter); swap.setPricingEngine(new DiscountingSwapEngine(libor3m.forwardingTermStructure())); double expectedFraction = vars.bmaData[i].rate / 100, estimatedFraction = swap.fairLiborFraction(); double error = Math.Abs(expectedFraction - estimatedFraction); if (error > tolerance) { Console.WriteLine(vars.bmaData[i].n + " year(s) BMA swap:\n" + "\n estimated libor fraction: " + estimatedFraction + "\n expected libor fraction: " + expectedFraction + "\n error: " + error + "\n tolerance: " + tolerance); } } // this is a workaround for grabage collection // garbage collection needs a proper solution IndexManager.instance().clearHistories(); }
public static IndexManager instance() { IndexManager ret = new IndexManager(NQuantLibcPINVOKE.IndexManager_instance(), false); if (NQuantLibcPINVOKE.SWIGPendingException.Pending) throw NQuantLibcPINVOKE.SWIGPendingException.Retrieve(); return ret; }
public void ReIndexing() { IndexManager.PurgeIndexes(); var expected = new List <SampleDocument> { new SampleDocument { Id = "1", Sort = "1", Content = @"Omega", Title = "Alpha", DocDate = new DateTime(2019, 11, 7) }, new SampleDocument { Id = "2", Sort = "2", Content = @"delta", Title = "Alpha", DocDate = new DateTime(2019, 11, 7) }, new SampleDocument { Id = "3", Sort = "3", Content = @"thisfind", Title = "Mex", DocDate = new DateTime(2019, 11, 7) }, new SampleDocument { Id = "4", Sort = "4", Content = @"trit", Title = "pep", DocDate = new DateTime(2019, 11, 1) }, new SampleDocument { Id = "5", Sort = "5", Content = @"date", Title = "date", DocDate = new DateTime(2019, 11, 7) } }; IndexManager.BulkInsert(expected); Manager.Instance.AppVersion = "2"; IndexManager.ReIndex <SampleDocument>(); var searchData = new FindRequest <SampleDocument>(0, 10); var results = searchData .Sort(x => x.Sort) .Execute(); var actual = results.Documents.ToList(); Assert.Equal(expected.Count, actual.Count); for (int i = 0; i < expected.Count; i++) { Assert.Equal(expected[i].Id, actual[i].Id); Assert.Equal(expected[i].Sort, actual[i].Sort); Assert.Equal(expected[i].Title, actual[i].Title); Assert.Equal(expected[i].DocDate, actual[i].DocDate); Assert.Equal(expected[i].Content, actual[i].Content); } }
public void create_index() { //Clean any existing facets before running the tests to make sure //we are using new and correct data var facetsDirectory = @"C:\temp\facets"; if (Directory.Exists(facetsDirectory)) { var files = Directory.GetFiles(facetsDirectory, "*.facet"); foreach (var file in files) { File.Delete(file); } } _index = new DiskProductIndex(); _manager = new IndexManager(); _manager.RegisterIndex(_index); _manager.CreateIndexes(); }
public void InWildCard() { IndexManager.PurgeIndexes(); var expected = new List <SampleDocument> { new SampleDocument { Id = "1", Sort = "1", Content = @"This a test", Title = "Alpha", DocDate = new DateTime(2019, 11, 7) }, new SampleDocument { Id = "2", Sort = "2", Content = @"Run a prefix test thisisasample", Title = "Alpha", DocDate = new DateTime(2019, 11, 7) }, new SampleDocument { Id = "3", Sort = "3", Content = @"no return test", Title = "Mex", DocDate = new DateTime(2019, 11, 7) }, new SampleDocument { Id = "4", Sort = "4", Content = @"does not exist", Title = "pep", DocDate = new DateTime(2019, 11, 1) }, new SampleDocument { Id = "5", Sort = "5", Content = @"date", Title = "date", DocDate = new DateTime(2019, 11, 7) } }; IndexManager.BulkInsert(expected); var searchData = new FindRequest <SampleDocument>(0, 10); var results = searchData .Must(SearchClause <SampleDocument> .Wildcard(x => x.Content, "th*s")) .Sort(x => x.Sort) .Execute(); var actual = results.Documents.ToList(); Assert.Equal(2, actual.Count); Assert.Equal(expected[0].Id, actual[0].Id); Assert.Equal(expected[0].Sort, actual[0].Sort); Assert.Equal(expected[0].Title, actual[0].Title); Assert.Equal(expected[0].DocDate, actual[0].DocDate); Assert.Equal(expected[0].Content, actual[0].Content); Assert.Equal(expected[1].Id, actual[1].Id); Assert.Equal(expected[1].Sort, actual[1].Sort); Assert.Equal(expected[1].Title, actual[1].Title); Assert.Equal(expected[1].DocDate, actual[1].DocDate); Assert.Equal(expected[1].Content, actual[1].Content); }
protected override bool ProtectedExecute() { return(IndexManager.UpdateDocument(Document, Versioning)); }
public void LoadPrimaryKeys(IEnumerable <TableColumnDesingerInfo> columnDesingerInfos) { int primaryRowIndex = -1; foreach (DataGridViewRow row in this.dgvIndexes.Rows) { if (!row.IsNewRow) { TableIndexDesignerInfo indexDesignerInfo = row.Tag as TableIndexDesignerInfo; if (indexDesignerInfo != null && indexDesignerInfo.IsPrimary) { primaryRowIndex = row.Index; if (columnDesingerInfos.Count() > 0) { indexDesignerInfo.Columns.Clear(); indexDesignerInfo.Columns.AddRange(columnDesingerInfos.Select(item => new IndexColumn() { ColumnName = item.Name })); row.Cells[this.colColumns.Name].Value = this.GetColumnsDisplayText(indexDesignerInfo.Columns); } break; } } } if (primaryRowIndex >= 0 && columnDesingerInfos.Count() == 0) { this.dgvIndexes.Rows.RemoveAt(primaryRowIndex); } else if (primaryRowIndex < 0 && columnDesingerInfos.Count() > 0) { this.dgvIndexes.Rows.Insert(0, 1); TableIndexDesignerInfo tableIndexDesignerInfo = new TableIndexDesignerInfo() { Type = this.DatabaseType == DatabaseType.Oracle ? IndexType.Unique.ToString() : IndexType.Primary.ToString(), Name = IndexManager.GetPrimaryKeyDefaultName(this.Table), IsPrimary = true }; tableIndexDesignerInfo.Columns.AddRange(columnDesingerInfos.Select(item => new IndexColumn() { ColumnName = item.Name })); DataGridViewRow primaryRow = this.dgvIndexes.Rows[0]; primaryRow.Cells[this.colType.Name].ReadOnly = true; primaryRow.Cells[this.colType.Name].Value = tableIndexDesignerInfo.Type; primaryRow.Cells[this.colIndexName.Name].Value = tableIndexDesignerInfo.Name; primaryRow.Cells[this.colColumns.Name].Value = this.GetColumnsDisplayText(tableIndexDesignerInfo.Columns); tableIndexDesignerInfo.ExtraPropertyInfo = new TableIndexExtraPropertyInfo() { Clustered = true }; primaryRow.Tag = tableIndexDesignerInfo; } this.ShowIndexExtraPropertites(); }
/// <summary> /// 初始化一个<see cref="Row"/>类型的实例 /// </summary> /// <param name="rowIndex">行索引</param> public Row(int rowIndex) { Cells = new List <ICell>(); RowIndex = rowIndex; _indexManager = new IndexManager(); }
public void testSwaps() { //BOOST_MESSAGE("Testing Hull-White swap pricing against known values..."); Date today; //=Settings::instance().evaluationDate();; Calendar calendar = new TARGET(); today = calendar.adjust(Date.Today); Settings.setEvaluationDate(today); Date settlement = calendar.advance(today, 2, TimeUnit.Days); Date[] dates = { settlement, calendar.advance(settlement, 1, TimeUnit.Weeks), calendar.advance(settlement, 1, TimeUnit.Months), calendar.advance(settlement, 3, TimeUnit.Months), calendar.advance(settlement, 6, TimeUnit.Months), calendar.advance(settlement, 9, TimeUnit.Months), calendar.advance(settlement, 1, TimeUnit.Years), calendar.advance(settlement, 2, TimeUnit.Years), calendar.advance(settlement, 3, TimeUnit.Years), calendar.advance(settlement, 5, TimeUnit.Years), calendar.advance(settlement, 10, TimeUnit.Years), calendar.advance(settlement, 15, TimeUnit.Years) }; double[] discounts = { 1.0, 0.999258, 0.996704, 0.990809, 0.981798, 0.972570, 0.963430, 0.929532, 0.889267, 0.803693, 0.596903, 0.433022 }; //for (int i = 0; i < dates.Length; i++) // dates[i] + dates.Length; LogLinear Interpolator = new LogLinear(); Handle <YieldTermStructure> termStructure = new Handle <YieldTermStructure>( new InterpolatedDiscountCurve <LogLinear>( dates.ToList <Date>(), discounts.ToList <double>(), new Actual365Fixed(), new Calendar(), Interpolator) ); HullWhite model = new HullWhite(termStructure); int[] start = { -3, 0, 3 }; int[] length = { 2, 5, 10 }; double[] rates = { 0.02, 0.04, 0.06 }; IborIndex euribor = new Euribor6M(termStructure); IPricingEngine engine = new TreeVanillaSwapEngine(model, 120, termStructure); #if QL_USE_INDEXED_COUPON double tolerance = 4.0e-3; #else double tolerance = 1.0e-8; #endif for (int i = 0; i < start.Length; i++) { Date startDate = calendar.advance(settlement, start[i], TimeUnit.Months); if (startDate < today) { Date fixingDate = calendar.advance(startDate, -2, TimeUnit.Days); //TimeSeries<double> pastFixings; ObservableValue <TimeSeries <double> > pastFixings = new ObservableValue <TimeSeries <double> >(); pastFixings.value()[fixingDate] = 0.03; IndexManager.instance().setHistory(euribor.name(), pastFixings); } for (int j = 0; j < length.Length; j++) { Date maturity = calendar.advance(startDate, length[i], TimeUnit.Years); Schedule fixedSchedule = new Schedule(startDate, maturity, new Period(Frequency.Annual), calendar, BusinessDayConvention.Unadjusted, BusinessDayConvention.Unadjusted, DateGeneration.Rule.Forward, false); Schedule floatSchedule = new Schedule(startDate, maturity, new Period(Frequency.Semiannual), calendar, BusinessDayConvention.Following, BusinessDayConvention.Following, DateGeneration.Rule.Forward, false); for (int k = 0; k < rates.Length; k++) { VanillaSwap swap = new VanillaSwap(VanillaSwap.Type.Payer, 1000000.0, fixedSchedule, rates[k], new Thirty360(), floatSchedule, euribor, 0.0, new Actual360()); swap.setPricingEngine(new DiscountingSwapEngine(termStructure)); double expected = swap.NPV(); swap.setPricingEngine(engine); double calculated = swap.NPV(); double error = Math.Abs((expected - calculated) / expected); if (error > tolerance) { Assert.Fail("Failed to reproduce swap NPV:" //+ QL_FIXED << std::setprecision(9) + "\n calculated: " + calculated + "\n expected: " + expected //+ QL_SCIENTIFIC + "\n rel. error: " + error); } } } } }
public void Dispose() { IndexManager.instance().clearHistories(); }
/// <summary> /// Render a set of asssets on the screen. /// </summary> public override void Render(Asset[] assets) { base.Render(assets); CommandBuffers[CurrentFrame] = GraphicDevice.Handle.AllocateCommandBuffers(Commands[CurrentFrame], CommandBufferLevel.Primary, (uint)Swapchain.Images.Count()); for (int index = 0; index < Swapchain.Images.Count(); index++) { var commandBuffer = CommandBuffers[CurrentFrame][index]; commandBuffer.Begin(CommandBufferUsageFlags.SimultaneousUse); commandBuffer.BeginRenderPass(DefaultRenderPass.Handle, Swapchain.VideoBuffers[index].Handler, new Rect2D(Swapchain.SurfaceSize), (ClearValue)(0F, 1F, 0F, 1F), SubpassContents.Inline); if (assets != null) { foreach (Asset asset in assets) { // - Gets the mesh from the mesh store var mesh = GeometryDispatcher.Get(asset.Mesh); VertexManager.Bind(commandBuffer); IndexManager.Bind(commandBuffer, (int)mesh.Indexes.Offset); commandBuffer.BindPipeline(PipelineBindPoint.Graphics, Pipeline.PipelineInstance); commandBuffer.DrawIndexed((uint)mesh.Indexes.Count, (uint)mesh.Indexes.Count / 3, 0, 0, 0); } } commandBuffer.EndRenderPass(); commandBuffer.End(); } uint nextImage = Swapchain.Handle.AcquireNextImage(uint.MaxValue, ImageAvailableSemaphores[CurrentFrame], null); if (ImagesInFlight[(int)nextImage] != null) { GraphicDevice.Handle.WaitForFences(ImagesInFlight[(int)nextImage], true, UInt64.MaxValue); } // - Mark the image as now being in use by this frame ImagesInFlight[(int)nextImage] = InFlightFences[CurrentFrame]; try { GraphicDevice.Handle.ResetFences(InFlightFences[CurrentFrame]); GraphicDevice.GraphicQueue.Submit ( new SubmitInfo { CommandBuffers = new[] { CommandBuffers[CurrentFrame][nextImage] }, SignalSemaphores = new[] { RenderFinishedSemaphores[CurrentFrame] }, WaitDestinationStageMask = new[] { PipelineStageFlags.ColorAttachmentOutput }, WaitSemaphores = new[] { ImageAvailableSemaphores[CurrentFrame] } }, InFlightFences[CurrentFrame] ); } catch (DeviceLostException) { InvalidateDevice(); } try { var present = GraphicDevice.PresentQueue.Present(RenderFinishedSemaphores[CurrentFrame], Swapchain.Handle, nextImage, new Result[1]); switch (present) { case Result.Suboptimal: InvalidateGraphics(); break; } if (present == Result.Success) { GraphicDevice.Handle.WaitForFences(InFlightFences[CurrentFrame], true, UInt64.MaxValue); Commands[(CurrentFrame + 1) % MaxFrameInFlight].FreeCommandBuffers(CommandBuffers[(CurrentFrame + 1) % MaxFrameInFlight]); CommandBuffers[(CurrentFrame + 1) % MaxFrameInFlight] = null; } } catch { InvalidateGraphics(); } finally { } CurrentFrame = (CurrentFrame + 1) % MaxFrameInFlight; }
private void Awake() { instance = this; }
public void Querying_Analyzers() { var query = LucQuery.Parse("'Mr.John Smith'"); var s = query.ToString(); var pq = query.Query as Lucene.Net.Search.PhraseQuery; Assert.IsNotNull(pq, String.Concat("Parsed query is: ", pq.GetType().Name, ". Expected: PhraseQuery")); var terms = pq.GetTerms(); Assert.IsTrue(terms.Length == 2, String.Concat("Count of terms is: ", terms.Length, ". Expected: 2")); Assert.IsTrue(terms[0].Text() == "mr.john", String.Concat("First term is ", terms[0].Text(), ". Expected: 'mr.john'")); Assert.IsTrue(terms[1].Text() == "smith", String.Concat("Second term is ", terms[1].Text(), ". Expected: 'smith'")); var qtext = "\"Mr.John Smith\""; //var qtext = "(InTree:/Root/Site1/Folder1/Folder2/Folder3 OR InTree:/Root/Site2/Folder1/Folder2/Folder3/Folder5/Folder6) AND Type:Folder AND _Text:\"Mr.John Smith\""; Lucene.Net.Search.Query q; var k = 0; var stopper = Stopwatch.StartNew(); for (int i = 0; i < 10000000; i++) { k++; } var t0 = stopper.ElapsedMilliseconds; stopper.Stop(); stopper = Stopwatch.StartNew(); for (int i = 0; i < 1000; i++) { q = LucQuery.Parse(qtext).Query; } var t1 = stopper.ElapsedMilliseconds; stopper.Stop(); stopper = Stopwatch.StartNew(); for (int i = 0; i < 1000; i++) { q = new Lucene.Net.QueryParsers.QueryParser(LuceneManager.LuceneVersion, "_Text", IndexManager.GetAnalyzer()).Parse(qtext); } var t2 = stopper.ElapsedMilliseconds; stopper.Stop(); }
public SnLucParser() { _masterAnalyzer = IndexManager.GetAnalyzer(); }
protected override Task <bool> ProtectedExecuteAsync(CancellationToken cancellationToken) { return(IndexManager.UpdateDocumentAsync(Document, Versioning, cancellationToken)); }
//---- Caller: Startup, ForceRestore internal static void ExecuteUnprocessedIndexingActivities(System.IO.TextWriter consoleOut) { lock (_executingUnprocessedIndexingActivitiesLock) { try { _executingUnprocessedIndexingActivities = true; CommitUserData cud; using (var readerFrame = LuceneManager.GetIndexReaderFrame()) { cud = IndexManager.ReadCommitUserData(readerFrame.IndexReader); } MissingActivityHandler.MaxActivityId = cud.LastActivityId; MissingActivityHandler.SetGap(cud.Gap); var logProps = new Dictionary <string, object> { { "LastActivityID", cud.LastActivityId }, { "Size of gap", cud.Gap.Count } }; Logger.WriteInformation("Executing unprocessed indexing activities from the stored commit point.", Logger.EmptyCategoryList, logProps); var i = 0; var sumCount = 0; // This loop was created to avoid loading too many activities at once that are present in the gap. while (i * ACTIVITIESFRAGMENTSIZE <= cud.Gap.Count) { // get activities from the DB that are in the current gap fragment var gapSegment = cud.Gap.Skip(i * ACTIVITIESFRAGMENTSIZE).Take(ACTIVITIESFRAGMENTSIZE).ToArray(); var activities = IndexingActivityManager.GetUnprocessedActivities(gapSegment); ProcessTasks(activities, consoleOut); sumCount += activities.Length; i++; } // Execute activities where activity id is bigger than than our last (activity) task id var maxIdInDb = 0; var newtasks = IndexingActivityManager.GetUnprocessedActivities(MissingActivityHandler.MaxActivityId, out maxIdInDb, ACTIVITIESFRAGMENTSIZE); while (newtasks.Length > 0) { ProcessTasks(newtasks, consoleOut); sumCount += newtasks.Length; //load the remaining activities, but only if they were created before this operation started var tempMax = 0; newtasks = IndexingActivityManager.GetUnprocessedActivities(MissingActivityHandler.MaxActivityId, out tempMax, ACTIVITIESFRAGMENTSIZE, maxIdInDb); } if (consoleOut != null) { consoleOut.WriteLine("ok."); } logProps.Add("Processed tasks", sumCount); //write the latest max activity id and gap size to log logProps["LastActivityID"] = MissingActivityHandler.MaxActivityId; logProps["Size of gap"] = MissingActivityHandler.GetGap().Count; Logger.WriteInformation("Executing unprocessed tasks is finished.", Logger.EmptyCategoryList, logProps); } finally { _executingUnprocessedIndexingActivities = false; } } }