public TResultDetailCollection FetchAll() { TResultDetailCollection coll = new TResultDetailCollection(); Query qry = new Query(TResultDetail.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public SysTakeawayMenuCollection FetchAll() { SysTakeawayMenuCollection coll = new SysTakeawayMenuCollection(); Query qry = new Query(SysTakeawayMenu.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public KcbDanhsachBenhnhanCollection FetchAll() { KcbDanhsachBenhnhanCollection coll = new KcbDanhsachBenhnhanCollection(); Query qry = new Query(KcbDanhsachBenhnhan.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public ErrorLogCollection FetchAll() { ErrorLogCollection coll = new ErrorLogCollection(); Query qry = new Query(ErrorLog.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public DisposeTableCollection FetchAll() { DisposeTableCollection coll = new DisposeTableCollection(); Query qry = new Query(DisposeTable.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public CostingSummaryCollection FetchAll() { CostingSummaryCollection coll = new CostingSummaryCollection(); Query qry = new Query(CostingSummary.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public StudyHistoryCollection FetchAll() { var coll = new StudyHistoryCollection(); var qry = new Query(StudyHistory.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public BankAccountCollection FetchAll() { BankAccountCollection coll = new BankAccountCollection(); Query qry = new Query(BankAccount.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public B3LookupContactIDCollection FetchAll() { B3LookupContactIDCollection coll = new B3LookupContactIDCollection(); Query qry = new Query(B3LookupContactID.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public QheDautheQloiBhytCollection FetchAll() { QheDautheQloiBhytCollection coll = new QheDautheQloiBhytCollection(); Query qry = new Query(QheDautheQloiBhyt.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public QheDoituongThuocCollection FetchAll() { QheDoituongThuocCollection coll = new QheDoituongThuocCollection(); Query qry = new Query(QheDoituongThuoc.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public NoitruDmucGiuongbenhCollection FetchAll() { NoitruDmucGiuongbenhCollection coll = new NoitruDmucGiuongbenhCollection(); Query qry = new Query(NoitruDmucGiuongbenh.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public AspnetPersonalizationAllUserCollection FetchAll() { AspnetPersonalizationAllUserCollection coll = new AspnetPersonalizationAllUserCollection(); Query qry = new Query(AspnetPersonalizationAllUser.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public CustomizedProductDisplayTypeCollection FetchAll() { CustomizedProductDisplayTypeCollection coll = new CustomizedProductDisplayTypeCollection(); Query qry = new Query(CustomizedProductDisplayType.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public TDutruThuocCollection FetchAll() { TDutruThuocCollection coll = new TDutruThuocCollection(); Query qry = new Query(TDutruThuoc.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public DDeviceDatatypeCollection FetchAll() { DDeviceDatatypeCollection coll = new DDeviceDatatypeCollection(); Query qry = new Query(DDeviceDatatype.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public Registration1Collection FetchAll() { Registration1Collection coll = new Registration1Collection(); Query qry = new Query(Registration1.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public ServiceLockCollection FetchAll() { var coll = new ServiceLockCollection(); var qry = new Query(ServiceLock.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public QheDichvuHinhanhVungkhaosatCollection FetchAll() { QheDichvuHinhanhVungkhaosatCollection coll = new QheDichvuHinhanhVungkhaosatCollection(); Query qry = new Query(QheDichvuHinhanhVungkhaosat.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public FavLinkCollection FetchAll() { FavLinkCollection coll = new FavLinkCollection(); Query qry = new Query(FavLink.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public EmployeeCollection FetchAll() { EmployeeCollection coll = new EmployeeCollection(); Query qry = new Query(Employee.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public SysResourceCollection FetchAll() { SysResourceCollection coll = new SysResourceCollection(); Query qry = new Query(SysResource.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
/// <summary> Extracts all terms texts of a given Query into an array of WeightedTerms /// /// </summary> /// <param name="query">Query to extract term texts from</param> /// <param name="reader">used to compute IDF which can be used to a) score selected fragments better /// b) use graded highlights eg chaning intensity of font color</param> /// <param name="fieldName">the field on which Inverse Document Frequency (IDF) calculations are based</param> /// <returns> an array of the terms used in a query, plus their weights.</returns> public static WeightedTerm[] GetIdfWeightedTerms(Query query, IndexReader reader, string fieldName) { WeightedTerm[] terms = GetTerms(query, false, fieldName); int totalNumDocs = reader.NumDocs(); foreach (WeightedTerm t in terms) { try { int docFreq = reader.DocFreq(new Term(fieldName, t.Term)); // docFreq counts deletes if (totalNumDocs < docFreq) { docFreq = totalNumDocs; } //IDF algorithm taken from DefaultSimilarity class var idf = (float)(Math.Log((float)totalNumDocs / (double)(docFreq + 1)) + 1.0); t.Weight *= idf; } catch (IOException e) { //ignore } } return terms; }
public URLTagsExtCollection FetchAll() { URLTagsExtCollection coll = new URLTagsExtCollection(); Query qry = new Query(URLTagsExt.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public DmucDiachinhCollection FetchAll() { DmucDiachinhCollection coll = new DmucDiachinhCollection(); Query qry = new Query(DmucDiachinh.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public void MultiValueAttribute() { Services subject = new Services(Meta, DataConnector); Query queryStories = new Query(Oid.FromToken("Story:1063", Meta)); IAttributeDefinition ownersDef = Meta.GetAttributeDefinition("Story.Owners"); queryStories.Selection.Add(ownersDef); QueryResult resultStories = subject.Retrieve(queryStories); Asset story = resultStories.Assets[0]; Oid oldMember = Oid.FromToken("Member:1001", Meta); Oid newMember = Oid.FromToken("Member:20", Meta); IEnumerator owners = story.GetAttribute(ownersDef).Values.GetEnumerator(); Assert.IsTrue(owners.MoveNext()); Assert.AreEqual(oldMember, owners.Current); Assert.IsFalse(owners.MoveNext()); story.AddAttributeValue(ownersDef, newMember); owners = story.GetAttribute(ownersDef).Values.GetEnumerator(); Assert.IsTrue(owners.MoveNext()); Assert.AreEqual(oldMember, owners.Current); Assert.IsTrue(owners.MoveNext()); Assert.AreEqual(newMember, owners.Current); Assert.IsFalse(owners.MoveNext()); story.RemoveAttributeValue(ownersDef, oldMember); owners = story.GetAttribute(ownersDef).Values.GetEnumerator(); Assert.IsTrue(owners.MoveNext()); Assert.AreEqual(newMember, owners.Current); Assert.IsFalse(owners.MoveNext()); story.RemoveAttributeValue(ownersDef, newMember); owners = story.GetAttribute(ownersDef).Values.GetEnumerator(); Assert.IsFalse(owners.MoveNext()); }
public KcbDangkySokhamCollection FetchAll() { KcbDangkySokhamCollection coll = new KcbDangkySokhamCollection(); Query qry = new Query(KcbDangkySokham.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
void dispatcherTimer_Tick(object sender, EventArgs e) { _infoWindow.IsOpen = false; QueryTask queryTask = new QueryTask("http://sampleserver1.arcgisonline.com/ArcGIS/rest/services/Demographics/ESRI_Census_USA/MapServer/4"); Query query = new Query() { Geometry = _tapPoint, OutSpatialReference = MyMap.SpatialReference }; query.OutFields.Add("NAME"); queryTask.ExecuteCompleted += (s, evt) => { if (evt.FeatureSet.Features.Count > 0) { _infoWindow.Anchor = _tapPoint; (_infoWindow.Content as TextBlock).Text = evt.FeatureSet.Features[0].Attributes["NAME"] as string; _infoWindow.IsOpen = true; } }; queryTask.ExecuteAsync(query); _dispatcherTimer.Stop(); }
public PGenuCollection FetchAll() { PGenuCollection coll = new PGenuCollection(); Query qry = new Query(PGenu.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public TPhieuCapphatChitietCollection FetchAll() { TPhieuCapphatChitietCollection coll = new TPhieuCapphatChitietCollection(); Query qry = new Query(TPhieuCapphatChitiet.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
protected override void SelectAllRelated(Query query) { query.SelectAllColumns(typeof(DiaryEventType), "EventType"); JoinRelated(query); }
protected virtual ITopGroups <TGroupValue> GroupByFieldOrFunction <TGroupValue>(IndexSearcher searcher, Filter filter, Query query, int groupOffset, int groupLimit) { int topN = groupOffset + groupLimit; IAbstractFirstPassGroupingCollector <TGroupValue> firstPassCollector; IAbstractAllGroupsCollector <TGroupValue> allGroupsCollector; AbstractAllGroupHeadsCollector allGroupHeadsCollector; if (groupFunction != null) { firstPassCollector = (IAbstractFirstPassGroupingCollector <TGroupValue>) new FunctionFirstPassGroupingCollector(groupFunction, valueSourceContext, groupSort, topN); if (allGroups) { allGroupsCollector = (IAbstractAllGroupsCollector <TGroupValue>) new FunctionAllGroupsCollector(groupFunction, valueSourceContext); } else { allGroupsCollector = null; } if (allGroupHeads) { allGroupHeadsCollector = new FunctionAllGroupHeadsCollector(groupFunction, valueSourceContext, sortWithinGroup); } else { allGroupHeadsCollector = null; } } else { firstPassCollector = (IAbstractFirstPassGroupingCollector <TGroupValue>) new TermFirstPassGroupingCollector(groupField, groupSort, topN); if (allGroups) { allGroupsCollector = (IAbstractAllGroupsCollector <TGroupValue>) new TermAllGroupsCollector(groupField, initialSize); } else { allGroupsCollector = null; } if (allGroupHeads) { allGroupHeadsCollector = TermAllGroupHeadsCollector.Create(groupField, sortWithinGroup, initialSize); } else { allGroupHeadsCollector = null; } } ICollector firstRound; if (allGroupHeads || allGroups) { List <ICollector> collectors = new List <ICollector>(); collectors.Add(firstPassCollector); if (allGroups) { collectors.Add(allGroupsCollector); } if (allGroupHeads) { collectors.Add(allGroupHeadsCollector); } firstRound = MultiCollector.Wrap(collectors.ToArray(/* new Collector[collectors.size()] */)); } else { firstRound = firstPassCollector; } CachingCollector cachedCollector = null; if (maxCacheRAMMB != null || maxDocsToCache != null) { if (maxCacheRAMMB != null) { cachedCollector = CachingCollector.Create(firstRound, cacheScores, maxCacheRAMMB.Value); } else { cachedCollector = CachingCollector.Create(firstRound, cacheScores, maxDocsToCache.Value); } searcher.Search(query, filter, cachedCollector); } else { searcher.Search(query, filter, firstRound); } if (allGroups) { matchingGroups = (IList)allGroupsCollector.Groups; } else { matchingGroups = new List <TGroupValue>(); } if (allGroupHeads) { matchingGroupHeads = allGroupHeadsCollector.RetrieveGroupHeads(searcher.IndexReader.MaxDoc); } else { matchingGroupHeads = new Bits.MatchNoBits(searcher.IndexReader.MaxDoc); } IEnumerable <ISearchGroup <TGroupValue> > topSearchGroups = firstPassCollector.GetTopGroups(groupOffset, fillSortFields); if (topSearchGroups == null) { return(new TopGroups <TGroupValue>(new SortField[0], new SortField[0], 0, 0, new GroupDocs <TGroupValue> [0], float.NaN)); } int topNInsideGroup = groupDocsOffset + groupDocsLimit; IAbstractSecondPassGroupingCollector <TGroupValue> secondPassCollector; if (groupFunction != null) { secondPassCollector = new FunctionSecondPassGroupingCollector(topSearchGroups as IEnumerable <ISearchGroup <MutableValue> >, groupSort, sortWithinGroup, topNInsideGroup, includeScores, includeMaxScore, fillSortFields, groupFunction, valueSourceContext) as IAbstractSecondPassGroupingCollector <TGroupValue>; } else { secondPassCollector = new TermSecondPassGroupingCollector(groupField, topSearchGroups as IEnumerable <ISearchGroup <BytesRef> >, groupSort, sortWithinGroup, topNInsideGroup, includeScores, includeMaxScore, fillSortFields) as IAbstractSecondPassGroupingCollector <TGroupValue>; } if (cachedCollector != null && cachedCollector.IsCached) { cachedCollector.Replay(secondPassCollector); } else { searcher.Search(query, filter, secondPassCollector); } if (allGroups) { return(new TopGroups <TGroupValue>(secondPassCollector.GetTopGroups(groupDocsOffset), matchingGroups.Count)); } else { return(secondPassCollector.GetTopGroups(groupDocsOffset)); } }
/// <summary> /// Executes a grouped search. Both the first pass and second pass are executed on the specified searcher. /// </summary> /// <typeparam name="TGroupValue">The expected return type of the search.</typeparam> /// <param name="searcher">The <see cref="IndexSearcher"/> instance to execute the grouped search on.</param> /// <param name="filter">The filter to execute with the grouping</param> /// <param name="query">The query to execute with the grouping</param> /// <param name="groupOffset">The group offset</param> /// <param name="groupLimit">The number of groups to return from the specified group offset</param> /// <returns>the grouped result as a <see cref="ITopGroups{Object}"/> instance</returns> /// <exception cref="System.IO.IOException">If any I/O related errors occur</exception> public virtual ITopGroups <TGroupValue> Search <TGroupValue>(IndexSearcher searcher, Filter filter, Query query, int groupOffset, int groupLimit) { if (groupField != null || groupFunction != null) { return(GroupByFieldOrFunction <TGroupValue>(searcher, filter, query, groupOffset, groupLimit)); } else if (groupEndDocs != null) { return(GroupByDocBlock <TGroupValue>(searcher, filter, query, groupOffset, groupLimit)); } else { throw new InvalidOperationException("Either groupField, groupFunction or groupEndDocs must be set."); // This can't happen... } }
/// <summary> /// Executes a grouped search. Both the first pass and second pass are executed on the specified searcher. /// </summary> /// <typeparam name="TGroupValue">The expected return type of the search.</typeparam> /// <param name="searcher">The <see cref="IndexSearcher"/> instance to execute the grouped search on.</param> /// <param name="query">The query to execute with the grouping</param> /// <param name="groupOffset">The group offset</param> /// <param name="groupLimit">The number of groups to return from the specified group offset</param> /// <returns>the grouped result as a <see cref="ITopGroups{Object}"/> instance</returns> /// <exception cref="System.IO.IOException">If any I/O related errors occur</exception> public virtual ITopGroups <TGroupValue> Search <TGroupValue>(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) { return(Search <TGroupValue>(searcher, null, query, groupOffset, groupLimit)); }
/// <summary> /// Executes a grouped search. Both the first pass and second pass are executed on the specified searcher. /// </summary> /// <param name="searcher">The <see cref="IndexSearcher"/> instance to execute the grouped search on.</param> /// <param name="query">The query to execute with the grouping</param> /// <param name="groupOffset">The group offset</param> /// <param name="groupLimit">The number of groups to return from the specified group offset</param> /// <returns>the grouped result as a <see cref="ITopGroups{Object}"/> instance</returns> /// <exception cref="System.IO.IOException">If any I/O related errors occur</exception> public virtual ITopGroups <object> Search(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) { return(Search <object>(searcher, null, query, groupOffset, groupLimit)); }
protected override async Task <IEnumerable <DiaryEventTemplate> > ExecuteQuery(Query query) { var sql = Compiler.Compile(query); return(await Connection.QueryAsync <DiaryEventTemplate, DiaryEventType, DiaryEventTemplate>(sql.Sql, (template, eventType) => { template.DiaryEventType = eventType; return template; }, sql.NamedBindings)); }
protected override void JoinRelated(Query query) { query.LeftJoin("DiaryEventTypes as EventType", "EventType.Id", "DiaryEventTemplate.EventTypeId"); }
public void RemoveFromQueue() { _connection.JobQueue.Remove(Query <JobQueueDto> .EQ(_ => _.Id, Id)); _removedFromQueue = true; }
/// <summary>Default constructor with nagivated query.</summary> /// <param name="query">Nagivated query.</param> internal QueryNavigator(Query query) : base(query) { }
public override object GetValue(global::Sitecore.Data.Items.Item item, ISitecoreService service) { string query = ParseQuery(Query, item); if (Property.PropertyType.IsGenericType) { Type outerType = Utility.GetGenericOuter(Property.PropertyType); if (typeof(IEnumerable <>) == outerType) { Type genericType = Utility.GetGenericArgument(Property.PropertyType); Func <IEnumerable <Item> > getItems = null; if (IsRelative) { getItems = new Func <IEnumerable <Item> >(() => { return(GetLanguageItems(item.Axes.SelectItems(query), item.Language)); }); } else { getItems = new Func <IEnumerable <Item> >(() => { if (UseQueryContext) { Query conQuery = new Query(query); QueryContext queryContext = new QueryContext(item.Database.DataManager); object obj = conQuery.Execute(queryContext); QueryContext[] contextArray = obj as QueryContext[]; QueryContext context = obj as QueryContext; if (contextArray == null) { contextArray = new QueryContext[] { context } } ; return(GetLanguageItems(contextArray.Select(x => item.Database.GetItem(x.ID)), item.Language)); } else { return(GetLanguageItems(item.Database.SelectItems(query), item.Language)); } }); } var array = getItems.Invoke().ToArray(); return(service.CreateClasses(IsLazy, InferType, genericType, getItems)); } else { throw new NotSupportedException("Generic type not supported {0}. Must be IEnumerable<>.".Formatted(outerType.FullName)); } } else { Item result = null; if (IsRelative) { result = GetLanguageItem(item.Axes.SelectSingleItem(query), item.Language); } else { result = GetLanguageItem(item.Database.SelectSingleItem(query), item.Language); } return(service.CreateClass(IsLazy, InferType, Property.PropertyType, result)); } }
/// <summary> /// CSSPDB Infrastructures table service constructor /// </summary> /// <param name="query">[Query](CSSPModels.Query.html) object for filtering of service functions</param> /// <param name="db">[CSSPDBContext](CSSPModels.CSSPDBContext.html) referencing the CSSP database context</param> /// <param name="ContactID">Representing the contact identifier of the person connecting to the service</param> public InfrastructureService(Query query, CSSPDBContext db, int ContactID) : base(query, db, ContactID) { }
protected override async Task <IEnumerable <BillAccountTransaction> > ExecuteQuery(Query query) { var sql = Compiler.Compile(query); var accountTransactions = await Transaction.Connection .QueryAsync <BillAccountTransaction, Bill, AccountTransaction, BillAccountTransaction>(sql.Sql, (bat, bill, transaction) => { bat.Bill = bill; bat.AccountTransaction = transaction; return(bat); }, sql.NamedBindings, Transaction); return(accountTransactions); }
static void Main(string[] args) { Query.RunQuery(); }
// This function uses the Windows indexer and returns the list of results obtained public List <Result> Query(Query query) { var results = new List <Result>(); if (!string.IsNullOrEmpty(query.Search)) { var searchQuery = query.Search; if (_settings.MaxSearchCount <= 0) { _settings.MaxSearchCount = 50; } var regexMatch = Regex.Match(searchQuery, ReservedStringPattern); if (!regexMatch.Success) { try { var searchResultsList = _api.Search(searchQuery, maxCount: _settings.MaxSearchCount).ToList(); foreach (var searchResult in searchResultsList) { var path = searchResult.Path; string workingDir = null; if (_settings.UseLocationAsWorkingDir) { workingDir = Path.GetDirectoryName(path); } Result r = new Result(); r.Title = searchResult.Title; r.SubTitle = "Search: " + path; r.IcoPath = path; r.Action = c => { bool hide; try { Process.Start(new ProcessStartInfo { FileName = path, UseShellExecute = true, WorkingDirectory = workingDir }); hide = true; } catch (Win32Exception) { var name = $"Plugin: {_context.CurrentPluginMetadata.Name}"; var msg = "Can't Open this file"; _context.API.ShowMsg(name, msg, string.Empty); hide = false; } return(hide); }; r.ContextData = searchResult; //If the result is a directory, then it's display should show a directory. if (Directory.Exists(path)) { r.QueryTextDisplay = path; } results.Add(r); } } catch (InvalidOperationException) { //The connection has closed, internal error of ExecuteReader() //Not showing this exception to the users } catch (Exception ex) { Log.Info(ex.ToString()); } } } return(results); }
/// <summary> /// 数据接收处理,失败后抛出NullReferenceException异常,主线程会进行捕获 /// cool62061/1.html?s=10&d=0 /// /// </summary> /// <param name="args">url参数</param> public void DataReceive(DataReceivedEventArgs args) { HtmlDocument htmlDoc = new HtmlDocument(); htmlDoc.LoadHtml(args.Html); var root = htmlDoc.DocumentNode; var contentTableNode = root.SelectSingleNode("//table[@class='biankuang']"); var roomId = args.urlInfo.UniqueKey; //获取页数 if (contentTableNode != null) { var add = 0; var update = 0; if (true) { var houseDoc = new BsonDocument(); var content = contentTableNode.InnerText; var address = Toolslib.Str.Sub(content, "房屋坐落:\r\n", "\n").Replace(" ", "").Trim(); var sampleAddress = Toolslib.Str.Sub(content, "样本区域:\r\n", "\n").Replace(" ", "").Trim(); var roomNo = Toolslib.Str.Sub(content, "户室号:\r\n", "\n").Replace(" ", "").Trim(); var innerArea = Toolslib.Str.Sub(content, "套内面积:\r\n", "\n").Replace("㎡", "").Replace(" ", "").Trim(); var totalArea = Toolslib.Str.Sub(content, "总建筑面积:\r\n", "\n").Replace("㎡", "").Replace(" ", "").Trim(); var publicArea = Toolslib.Str.Sub(content, "分摊面积:\r\n", "\n").Replace("㎡", "").Replace(" ", "").Trim(); var type = Toolslib.Str.Sub(content, "类别:\r\n", "\n").Replace(" ", "").Trim(); var purpose = Toolslib.Str.Sub(content, "设计用途:\r\n", "\n").Replace(" ", "").Trim(); var structure = Toolslib.Str.Sub(content, "建筑结构:\r\n", "\n").Replace(" ", "").Trim(); var price = Toolslib.Str.Sub(content, "一房一价:\r\n", "元").Replace(" ", "").Trim(); var companyName = Toolslib.Str.Sub(content, "房开公司:\r\n", "\n").Replace(" ", "").Trim(); var saleStateName = Toolslib.Str.Sub(content, "户室状态:\r\n", "\n").Replace(" ", "").Trim(); houseDoc.Set("address", address); houseDoc.Set("sampleAddress", sampleAddress); houseDoc.Set("roomNo", roomNo); houseDoc.Set("innerArea", innerArea); houseDoc.Set("totalArea", totalArea); houseDoc.Set("publicArea", publicArea); houseDoc.Set("type", type); houseDoc.Set("purpose", purpose); houseDoc.Set("structure", structure); houseDoc.Set("price", price); houseDoc.Set("companyName", companyName); switch (saleStateName) { case "正常发售": houseDoc.Set("moreSaleStatus", "0"); break; case "安置房": houseDoc.Set("moreSaleStatus", "0"); break; case "自留房": houseDoc.Set("moreSaleStatus", "0"); break; case "非出售": houseDoc.Set("moreSaleStatus", "0"); break; case "已认购": houseDoc.Set("moreSaleStatus", "1"); break; case "已签预定协议": houseDoc.Set("moreSaleStatus", "1"); break; case "已签合同": houseDoc.Set("moreSaleStatus", "1"); break; case "合同已登记": houseDoc.Set("moreSaleStatus", "1"); break; case "不在任何项目内": break; } if (saleStateName.Contains("已销售")) { houseDoc.Set("moreSaleStatus", "1"); } var tempSaleStateName = Toolslib.Str.Sub(saleStateName, "【", "】"); if (!string.IsNullOrEmpty(tempSaleStateName)) { saleStateName = tempSaleStateName; } houseDoc.Set("moreSaleStatusName", saleStateName); var curHouseObj = this.dataop.FindOneByQuery(this.DataTableNameHouse, Query.EQ("roomId", roomId)); if (curHouseObj != null) { var hitProj = projectList.Where(c => c.Text("projId") == curHouseObj.Text("projId")).FirstOrDefault(); if (hitProj != null) { var date = hitProj.Date("saleDate"); houseDoc.Set("saleDate", hitProj.Date("saleDate").ToString("yyyy-MM-dd")); houseDoc.Set("region", hitProj.Text("region")); houseDoc.Set("year", date.Year.ToString()); houseDoc.Set("month", date.Month.ToString()); houseDoc.Set("day", date.Day.ToString()); } houseDoc.Set("isUpdate", 1); DBChangeQueue.Instance.EnQueue(new StorageData() { Document = houseDoc, Query = Query.EQ("roomId", roomId), Name = DataTableNameHouse, Type = StorageType.Update }); } } } }
public async Task <List <Activity> > Handle(Query request, CancellationToken cancellationToken) { var activities = await _context.Activities.ToListAsync(); return(activities); }
public MyQueryScorer(Query query, String field, String defaultField) : base(query, field, defaultField) { }
private bool hasExistObj(string guid) { return(this.dataop.FindCount(this.DataTableNameHouse, Query.EQ("houseId", guid)) > 0); }
public void SelectPercentileApproxInSubquery() { var query = new Query() .With("SubQuery1", q => q .SelectAs(("Column", "Alias")) .From("Table") ) .With("SubQuery2", q => q .SelectPercentileApprox(0.75, "Alias") .From("SubQuery1") ) .Select("percentileapprox") .From("SubQuery2") ; CheckCompileResult(query, EngineCodes.SqlServer, @" WITH [SubQuery1] AS ( SELECT [Column] AS [Alias] FROM [Table] ) , [__generated__SqlKata_SqlServerCompiler_percentileapprox] AS ( SELECT PERCENTILE_CONT(0.75) WITHIN GROUP(ORDER BY [Alias]) OVER() AS [value_0] FROM [SubQuery1] ) , [SubQuery2] AS ( SELECT MIN([__generated__SqlKata_SqlServerCompiler_percentileapprox].[value_0]) AS [percentileapprox] FROM [__generated__SqlKata_SqlServerCompiler_percentileapprox] ) SELECT [percentileapprox] FROM [SubQuery2] "); CheckCompileResult(query, EngineCodes.Snowflake, @" WITH ""SubQuery1"" AS ( SELECT ""Column"" AS ""Alias"" FROM ""Table"" ) , ""SubQuery2"" AS ( SELECT APPROX_PERCENTILE(""Alias"", 0.75) AS ""percentileapprox"" FROM ""SubQuery1"" ) SELECT ""percentileapprox"" FROM ""SubQuery2"" "); }
public static void Main(String[] args) { Storage storage = StorageFactory.Instance.CreateStorage(); storage.Open("testcodegenerator.dbs"); Database db = new Database(storage); DateTime start = DateTime.Now; for (int i = 0; i < nLabels; i++) { RecordLabel label = new RecordLabel(); label.name = "Label" + i; label.email = "contact@" + label.name + ".com"; label.address = "Country, City, Street"; label.phone = "+1 123-456-7890"; db.AddRecord(label); } for (int i = 0; i < nAlbums; i++) { Album album = new Album(); album.name = "Album" + i; album.label = (RecordLabel)Enumerable.First(db.Select(typeof(RecordLabel), "name='Label" + (i % nLabels) + "'")); album.genre = "Rock"; album.release = DateTime.Now; db.AddRecord(album); for (int j = 0; j < nTracksPerAlbum; j++) { Track track = new Track(); track.no = j + 1; track.name = "Track" + j; track.album = album; track.duration = 3.5f; db.AddRecord(track); } } Console.WriteLine("Elapsed time for database initialization: " + (DateTime.Now - start)); QueryExecutionListener listener = new QueryExecutionListener(); storage.Listener = listener; Query trackQuery = db.CreateQuery(typeof(Track)); CodeGenerator code = trackQuery.GetCodeGenerator(); code.Predicate(code.And(code.Gt(code.Field("no"), code.Literal(0)), code.Eq(code.Field(code.Field(code.Field("album"), "label"), "name"), code.Parameter(1, typeof(string))))); start = DateTime.Now; int nTracks = 0; for (int i = 0; i < nLabels; i++) { trackQuery[1] = "Label" + i; foreach (Track t in trackQuery) { nTracks += 1; } } Console.WriteLine("Elapsed time for searching of " + nTracks + " tracks: " + (DateTime.Now - start)); Debug.Assert(nTracks == nAlbums * nTracksPerAlbum); String prev = ""; int n = 0; Query labelQuery = db.CreateQuery(typeof(RecordLabel)); code = labelQuery.GetCodeGenerator(); code.OrderBy("name"); foreach (RecordLabel label in labelQuery) { Debug.Assert(prev.CompareTo(label.name) < 0); prev = label.name; n += 1; } Debug.Assert(n == nLabels); prev = ""; n = 0; code = labelQuery.GetCodeGenerator(); code.Predicate(code.Like(code.Field("name"), code.Literal("Label%"))); code.OrderBy("name"); foreach (RecordLabel label in labelQuery) { Debug.Assert(prev.CompareTo(label.name) < 0); prev = label.name; n += 1; } Debug.Assert(n == nLabels); n = 0; code = labelQuery.GetCodeGenerator(); code.Predicate(code.In(code.Field("name"), code.List(code.Literal("Label1"), code.Literal("Label2"), code.Literal("Label3")))); foreach (RecordLabel label in labelQuery) { n += 1; } Debug.Assert(n == 3); n = 0; code = labelQuery.GetCodeGenerator(); code.Predicate(code.And(code.Or(code.Eq(code.Field("name"), code.Literal("Label1")), code.Or(code.Eq(code.Field("name"), code.Literal("Label2")), code.Eq(code.Field("name"), code.Literal("Label3")))), code.Like(code.Field("email"), code.Literal("contact@%")))); foreach (RecordLabel label in labelQuery) { n += 1; } Debug.Assert(n == 3); code = labelQuery.GetCodeGenerator(); code.Predicate(code.And(code.Like(code.Field("phone"), code.Literal("+1%")), code.In(code.Field("name"), code.Parameter(1, typeof(ArrayList))))); ArrayList list = new ArrayList(nLabels); for (int i = 0; i < nLabels; i++) { list.Add("Label" + i); } n = 0; labelQuery[1] = list; foreach (RecordLabel label in labelQuery) { Debug.Assert(label.name == "Label" + n++); } Debug.Assert(n == nLabels); n = 0; code = trackQuery.GetCodeGenerator(); code.Predicate(code.Or(code.Eq(code.Field(code.Field(code.Field("album"), "label"), "name"), code.Literal("Label1")), code.Eq(code.Field(code.Field(code.Field("album"), "label"), "name"), code.Literal("Label2")))); foreach (Track track in trackQuery) { Debug.Assert(track.album.label.name == "Label1" || track.album.label.name == "Label2"); n += 1; } Debug.Assert(n == nAlbums * nTracksPerAlbum * 2 / nLabels); Debug.Assert(listener.nSequentialSearches == 0); Debug.Assert(listener.nSorts == 0); db.DropTable(typeof(Track)); db.DropTable(typeof(Album)); db.DropTable(typeof(RecordLabel)); storage.Close(); }
public IEnumerable <T> FindBy(Query query) { return(this.DataSession.FindBy(query)); }
public void SelectPercentileApproxComplex() { var query = new Query() .With("filter", q => q .SelectAs(("PurchaseOrderItems.ItemID", "CaseId")) .From("PurchaseOrderItems") .WhereIn("PurchaseOrderItems.Plant", new List <string> { "dummy" }) ) .SelectMin("PurchaseOrderItems.ValueOrdered", "MinValue") .SelectMax("PurchaseOrderItems.ValueOrdered", "MaxValue") .SelectPercentileApprox(0.03, "PurchaseOrderItems.ValueOrdered", "LowPercentile") .SelectPercentileApprox(0.97, "PurchaseOrderItems.ValueOrdered", "HighPercentile") .From("PurchaseOrderItems") .Join("filter", "filter.CaseId", "PurchaseOrderItems.ItemID") .Limit(1) ; CheckCompileResult(query, EngineCodes.SqlServer, @" WITH [filter] AS ( SELECT [PurchaseOrderItems].[ItemID] AS [CaseId] FROM [PurchaseOrderItems] WHERE [PurchaseOrderItems].[Plant] IN (?) ) , [__generated__SqlKata_SqlServerCompiler_percentileapprox] AS ( SELECT [PurchaseOrderItems].[ValueOrdered] AS [value_0] , [PurchaseOrderItems].[ValueOrdered] AS [value_1] , PERCENTILE_CONT(0.03) WITHIN GROUP(ORDER BY [PurchaseOrderItems].[ValueOrdered]) OVER() AS [value_2] , PERCENTILE_CONT(0.97) WITHIN GROUP(ORDER BY [PurchaseOrderItems].[ValueOrdered]) OVER() AS [value_3] FROM [PurchaseOrderItems] INNER JOIN [filter] ON [filter].[CaseId] = [PurchaseOrderItems].[ItemID] ) SELECT TOP (?) MIN([__generated__SqlKata_SqlServerCompiler_percentileapprox].[value_0]) AS [MinValue] , MAX([__generated__SqlKata_SqlServerCompiler_percentileapprox].[value_1]) AS [MaxValue] , MIN([__generated__SqlKata_SqlServerCompiler_percentileapprox].[value_2]) AS [LowPercentile] , MIN([__generated__SqlKata_SqlServerCompiler_percentileapprox].[value_3]) AS [HighPercentile] FROM [__generated__SqlKata_SqlServerCompiler_percentileapprox] "); CheckCompileResult(query, EngineCodes.Snowflake, @" WITH ""filter"" AS ( SELECT ""PurchaseOrderItems"".""ItemID"" AS ""CaseId"" FROM ""PurchaseOrderItems"" WHERE ""PurchaseOrderItems"".""Plant"" IN (?) ) SELECT MIN(""PurchaseOrderItems"".""ValueOrdered"") AS ""MinValue"" , MAX(""PurchaseOrderItems"".""ValueOrdered"") AS ""MaxValue"" , APPROX_PERCENTILE(""PurchaseOrderItems"".""ValueOrdered"", 0.03) AS ""LowPercentile"" , APPROX_PERCENTILE(""PurchaseOrderItems"".""ValueOrdered"", 0.97) AS ""HighPercentile"" FROM ""PurchaseOrderItems"" INNER JOIN ""filter"" ON ""filter"".""CaseId"" = ""PurchaseOrderItems"".""ItemID"" LIMIT ? "); }
public SysRelHistoriaClinicaEfectorCollection FetchByQuery(Query qry) { SysRelHistoriaClinicaEfectorCollection coll = new SysRelHistoriaClinicaEfectorCollection(); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; }
public IEnumerable <T> FindBy(Query query, int index, int count) { return(this.DataSession.FindBy(query, index, count)); }
protected virtual ITopGroups<TGroupValue> GroupByDocBlock<TGroupValue>(IndexSearcher searcher, Filter filter, Query query, int groupOffset, int groupLimit) { int topN = groupOffset + groupLimit; BlockGroupingCollector c = new BlockGroupingCollector(groupSort, topN, includeScores, groupEndDocs); searcher.Search(query, filter, c); int topNInsideGroup = groupDocsOffset + groupDocsLimit; return c.GetTopGroups<TGroupValue>(sortWithinGroup, groupOffset, groupDocsOffset, topNInsideGroup, fillSortFields); }
public FetchedQuery(Query parent) : base(parent) { }
/// <summary> /// Executes a grouped search base on the function specified by a <see cref="ValueSource"/> passed via the constructor. /// Both the first pass and second pass are executed on the specified searcher. /// </summary> /// <param name="searcher">The <see cref="IndexSearcher"/> instance to execute the grouped search on.</param> /// <param name="filter">The filter to execute with the grouping</param> /// <param name="query">The query to execute with the grouping</param> /// <param name="groupOffset">The group offset</param> /// <param name="groupLimit">The number of groups to return from the specified group offset</param> /// <returns>the grouped result as a <see cref="ITopGroups{Object}"/> instance</returns> /// <exception cref="IOException">If any I/O related errors occur</exception> // LUCENENET additional method signature. Makes searching by function easier due to ability to specify type of MutableValue returned. public virtual ITopGroups<TMutableValue> SearchByFunction<TMutableValue>(IndexSearcher searcher, Filter filter, Query query, int groupOffset, int groupLimit) where TMutableValue : MutableValue { if (groupFunction is null) { throw IllegalStateException.Create("Either groupField, groupFunction or groupEndDocs must be set."); // This can't happen... } if (groupField != null) { throw new Exception("The valueSource must be null."); } return GroupByFunction<TMutableValue>(searcher, filter, query, groupOffset, groupLimit); }
internal static List <Result> GetEnvironmentStringPathSuggestions(string querySearch, Query query, PluginInitContext context) { var results = new List <Result>(); var environmentVariables = LoadEnvironmentStringPaths(); var search = querySearch; if (querySearch.EndsWith("%") && search.Length > 1) { // query starts and ends with a %, find an exact match from env-string paths search = querySearch.Substring(1, search.Length - 2); if (environmentVariables.ContainsKey(search)) { var expandedPath = environmentVariables[search]; results.Add(ResultManager.CreateFolderResult($"%{search}%", expandedPath, expandedPath, query)); return(results); } } if (querySearch == "%") { search = ""; // Get all paths } else { search = search.Substring(1); } foreach (var p in environmentVariables) { if (p.Key.StartsWith(search, StringComparison.InvariantCultureIgnoreCase)) { results.Add(ResultManager.CreateFolderResult($"%{p.Key}%", p.Value, p.Value, query)); } } return(results); }
/// <summary> /// Executes a grouped search base on the function specified by a <see cref="ValueSource"/> passed via the constructor. /// Both the first pass and second pass are executed on the specified searcher. /// </summary> /// <param name="searcher">The <see cref="IndexSearcher"/> instance to execute the grouped search on.</param> /// <param name="query">The query to execute with the grouping</param> /// <param name="groupOffset">The group offset</param> /// <param name="groupLimit">The number of groups to return from the specified group offset</param> /// <returns>the grouped result as a <see cref="ITopGroups{Object}"/> instance</returns> /// <exception cref="IOException">If any I/O related errors occur</exception> // LUCENENET additional method signature. Makes searching by function easier due to ability to specify type of MutableValue returned. public virtual ITopGroups<TMutableValue> SearchByFunction<TMutableValue>(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) where TMutableValue : MutableValue { return GroupByFunction<TMutableValue>(searcher, null, query, groupOffset, groupLimit); }
//LUCENENET Specific. One of two methods that replace GroupByFieldOrFunction. Used support // SearchByFunction in a way that eliminates casting for the caller. // This method is essentually a Function specific version of the GroupByFieldOrFunction. protected virtual ITopGroups<TMutableValue> GroupByFunction<TMutableValue>(IndexSearcher searcher, Filter filter, Query query, int groupOffset, int groupLimit) where TMutableValue: MutableValue { int topN = groupOffset + groupLimit; FunctionFirstPassGroupingCollector<TMutableValue> firstPassCollector; FunctionAllGroupsCollector<TMutableValue> allGroupsCollector; AbstractAllGroupHeadsCollector allGroupHeadsCollector; if (groupFunction is null) { throw IllegalStateException.Create("groupFunction must be set via the constructor by specifying a ValueSource."); } firstPassCollector = new FunctionFirstPassGroupingCollector<TMutableValue>(groupFunction, valueSourceContext, groupSort, topN); if (allGroups) { allGroupsCollector = new FunctionAllGroupsCollector<TMutableValue>(groupFunction, valueSourceContext); } else { allGroupsCollector = null; } if (allGroupHeads) { allGroupHeadsCollector = new FunctionAllGroupHeadsCollector(groupFunction, valueSourceContext, sortWithinGroup); } else { allGroupHeadsCollector = null; } ICollector firstRound; if (allGroupHeads || allGroups) { JCG.List<ICollector> collectors = new JCG.List<ICollector>(); collectors.Add(firstPassCollector); if (allGroups) { collectors.Add(allGroupsCollector); } if (allGroupHeads) { collectors.Add(allGroupHeadsCollector); } firstRound = MultiCollector.Wrap(collectors.ToArray(/* new Collector[collectors.size()] */)); } else { firstRound = firstPassCollector; } CachingCollector cachedCollector = null; if (maxCacheRAMMB != null || maxDocsToCache != null) { if (maxCacheRAMMB != null) { cachedCollector = CachingCollector.Create(firstRound, cacheScores, maxCacheRAMMB.Value); } else { cachedCollector = CachingCollector.Create(firstRound, cacheScores, maxDocsToCache.Value); } searcher.Search(query, filter, cachedCollector); } else { searcher.Search(query, filter, firstRound); } if (allGroups) { matchingGroups = (ICollection)allGroupsCollector.Groups; } else { matchingGroups = (ICollection)Collections.EmptyList<TMutableValue>(); } if (allGroupHeads) { matchingGroupHeads = allGroupHeadsCollector.RetrieveGroupHeads(searcher.IndexReader.MaxDoc); } else { matchingGroupHeads = new Bits.MatchNoBits(searcher.IndexReader.MaxDoc); } IEnumerable<ISearchGroup<TMutableValue>> topSearchGroups = firstPassCollector.GetTopGroups(groupOffset, fillSortFields); if (topSearchGroups is null) { // LUCENENET specific - optimized empty array creation return new TopGroups<TMutableValue>(Arrays.Empty<SortField>(), Arrays.Empty<SortField>(), 0, 0, Arrays.Empty<GroupDocs<TMutableValue>>(), float.NaN); } int topNInsideGroup = groupDocsOffset + groupDocsLimit; IAbstractSecondPassGroupingCollector<TMutableValue> secondPassCollector; secondPassCollector = new FunctionSecondPassGroupingCollector<TMutableValue>(topSearchGroups as IEnumerable<ISearchGroup<TMutableValue>>, groupSort, sortWithinGroup, topNInsideGroup, includeScores, includeMaxScore, fillSortFields, groupFunction, valueSourceContext) as IAbstractSecondPassGroupingCollector<TMutableValue>; if (cachedCollector != null && cachedCollector.IsCached) { cachedCollector.Replay(secondPassCollector); } else { searcher.Search(query, filter, secondPassCollector); } if (allGroups) { return new TopGroups<TMutableValue>(secondPassCollector.GetTopGroups(groupDocsOffset), matchingGroups.Count); } else { return secondPassCollector.GetTopGroups(groupDocsOffset); } }