/// <summary> /// Retrieves the cached results for the given query /// </summary> /// <param name="query">The query to retrieve cached results for</param> /// <param name="siteId">The site ID that the query is run against</param> /// <returns>The cached results, or null if no results exist in the cache</returns> public static CachedResult GetCachedResults(ParsedQuery query, int siteId) { if (query == null || !query.IsExecutionReady || AppSettings.AutoExpireCacheMinutes == 0) { return null; } var cache = Current.DB.Query<CachedResult>(@" SELECT * FROM CachedResults WHERE QueryHash = @hash AND SiteId = @site", new { hash = query.ExecutionHash, site = siteId } ).FirstOrDefault(); if (cache != null && AppSettings.AutoExpireCacheMinutes > 0 && cache.CreationDate != null) { if (cache.CreationDate.Value.AddMinutes(AppSettings.AutoExpireCacheMinutes) < DateTime.UtcNow) { Current.DB.Execute("DELETE CachedResults WHERE Id = @id", new { id = cache.Id }); cache = null; } } return cache; }
public void TestSimpleParsing() { string sql = "SELECT TOP 10 * FROM Posts"; var query = new ParsedQuery(sql, null); Assert.AreEqual(sql, query.ExecutionSql); }
public void TestSimpleReductionParsing() { string sql = new StringBuilder() .AppendLine("SELECT") .AppendLine(" TOP 10 *") .AppendLine("FROM") .AppendLine(" Posts") .ToString(); var query = new ParsedQuery(sql, null); Assert.AreEqual("SELECT\nTOP 10 *\nFROM\nPosts", query.ExecutionSql); }
public void TestMultiLineStringReductionParsing() { string sql = new StringBuilder() .AppendLine("SELECT TOP 10 * FROM Posts WHERE Body LIKE '%") .AppendLine(" }%'") .AppendLine("WHERE") .AppendLine(" Id > 10") .ToString(); var query = new ParsedQuery(sql, null); Assert.AreEqual("SELECT TOP 10 * FROM Posts WHERE Body LIKE '%\n }%'\nWHERE\nId > 10", query.ExecutionSql); }
public void TestBatchSplittingIgnoresEmptyBatches() { string sql = new StringBuilder() .AppendLine("SELECT 1") .AppendLine("GO") .ToString(); var query = new ParsedQuery(sql, null); var batches = query.ExecutionSqlBatches.ToArray(); Assert.AreEqual(1, batches.Length); Assert.AreEqual("SELECT 1", batches[0]); }
private static QueryResults GetSingleSiteResults(ParsedQuery query, Site site, User user, AsyncQueryRunner.AsyncResult result = null) { QueryResults results = null; var timer = new Stopwatch(); timer.Start(); var cache = QueryUtil.GetCachedResults(query, site.Id); if (cache != null) { if (!query.IncludeExecutionPlan || cache.ExecutionPlan != null) { results = new QueryResults(); results.WithCache(cache); results.Truncated = cache.Truncated; results.Messages = cache.Messages; results.FromCache = true; // If we didn't ask for the execution plan, don't return it if (!query.IncludeExecutionPlan) { results.ExecutionPlan = null; } } } timer.Stop(); if (results == null) { results = ExecuteNonCached(query, site, user, result); results.FromCache = false; // Don't cache cancelled results, since we don't know what state they're in... if (result != null && !result.Cancelled) { AddResultToCache(results, query, site, cache != null); } } else { results.ExecutionTime = timer.ElapsedMilliseconds; } results.Url = site.Url; results.SiteId = site.Id; results.SiteName = site.Name.ToLower(); return(results); }
/// <summary> /// Adds the results of a running a particular query for a given site to the database cache /// </summary> /// <param name="results">The results of the query</param> /// <param name="query">The query that was executed</param> /// <param name="site">The site that the query was run against</param> /// <param name="planOnly">Whether or not this is just an update to add the cached execution plan</param> private static void AddResultToCache(QueryResults results, ParsedQuery query, Site site, bool planOnly) { // If the cache time is zero, just don't save a cache if (AppSettings.AutoExpireCacheMinutes == 0) { return; } if (!planOnly) { Current.DB.Execute(@" INSERT INTO CachedResults( QueryHash, SiteId, Results, ExecutionPlan, Messages, Truncated, CreationDate ) VALUES( @hash, @site, @results, @plan, @messages, @truncated, @creation )", new { hash = query.ExecutionHash, site = site.Id, results = results.GetJsonResults(), plan = results.ExecutionPlan, messages = results.Messages, truncated = results.Truncated, creation = DateTime.UtcNow } ); } else { // Should we just update everything in this case? Presumably the only // thing that changed was the addition of the execution plan, but... Current.DB.Execute(@" UPDATE CachedResults SET ExecutionPlan = @plan WHERE QueryHash = @hash", new { plan = results.ExecutionPlan, hash = query.ExecutionHash } ); } }
/// <summary> /// Clears the cached results for the given query /// </summary> /// <param name="query">The query to clear cache for</param> /// <param name="siteId">The site ID that the query is run against</param> public static void ClearCachedResults(ParsedQuery query, int siteId) { if (query == null || !query.IsExecutionReady || AppSettings.AutoExpireCacheMinutes == 0) { return; } Current.DB.Query <CachedResult>(@" DELETE CachedResults WHERE QueryHash = @hash AND SiteId = @site", new { hash = query.ExecutionHash, site = siteId } ); }
public void TestCommentReductionParsing() { string sql = new StringBuilder() .AppendLine("-- A single line comment") .AppendLine("SELECT") .AppendLine(" TOP 10 * -- We only want the top 10") .AppendLine("FROM") .AppendLine("/* Posts */") .AppendLine("/*") .AppendLine(" Comments") .AppendLine(" */") .AppendLine(" Users") .ToString(); var query = new ParsedQuery(sql, null); Assert.AreEqual("SELECT TOP 10 * FROM Users", query.ExecutionSql); }
public ActionResult ShowPlan(string sitename, int revisionId) { Query query = QueryUtil.GetQueryForRevision(revisionId); if (query == null) { return PageNotFound(); } var parsedQuery = new ParsedQuery(query.QueryBody, Request.Params); if (!parsedQuery.IsExecutionReady) { return PageBadRequest(); } CachedResult cache = QueryUtil.GetCachedResults( parsedQuery, Site.Id ); if (cache == null || cache.ExecutionPlan == null) { return PageNotFound(); } return new QueryPlanResult(cache.ExecutionPlan); }
public ActionResult ShowSingleSiteCsv(string sitename, int revisionId) { Query query = QueryUtil.GetQueryForRevision(revisionId); if (query == null) { return PageNotFound(); } var site = GetSite(sitename); if (sitename == null) { return PageNotFound(); } var parsedQuery = new ParsedQuery(query.QueryBody, Request.Params); if (!parsedQuery.IsExecutionReady) { return PageBadRequest(); } CachedResult cachedResults = QueryUtil.GetCachedResults( parsedQuery, Site.Id ); List<ResultSet> resultSets; if (cachedResults != null) { resultSets = JsonConvert.DeserializeObject<List<ResultSet>>(cachedResults.Results); } else { resultSets = QueryRunner.GetResults( parsedQuery, site, CurrentUser ).ResultSets; } return new CsvResult(resultSets); }
public ActionResult Execute(int querySetId, int revisionId, int siteId, bool? textResults, bool? withExecutionPlan, TargetSites? targetSites) { if (CurrentUser.IsAnonymous && !CaptchaController.CaptchaPassed(GetRemoteIP())) { return Json(new { captcha = true }); } ActionResult response = null; try { if (!ValidateTargetSites(targetSites)) { throw new ApplicationException("Invalid target sites selection"); } QuerySet querySet = null; querySet = Current.DB.QuerySets.Get(querySetId); if (querySet == null) { throw new ApplicationException("Invalid query set ID"); } Revision revision = Current.DB.Revisions.Get(revisionId); if (revision == null) { throw new ApplicationException("Invalid revision ID"); } Query query = Current.DB.Queries.Get(revision.QueryId); var parsedQuery = new ParsedQuery( query.QueryBody, Request.Params, withExecutionPlan == true, targetSites ?? TargetSites.Current ); QueryResults results = null; Site site = GetSite(siteId); ValidateQuery(parsedQuery, site); var contextData = new QueryContextData { IsText = textResults == true, QuerySet = querySet, Revision = revision }; var asyncResults = AsyncQueryRunner.Execute(parsedQuery, CurrentUser, site, contextData); if (asyncResults.State == AsyncQueryRunner.AsyncState.Failure) { throw asyncResults.Exception; } if (asyncResults.State == AsyncQueryRunner.AsyncState.Success) { results = asyncResults.QueryResults; } else { return Json(new { running = true, job_id = asyncResults.JobId }); } response = CompleteResponse(results, parsedQuery, contextData, siteId); } catch (Exception ex) { response = TransformExecutionException(ex); } return response; }
public void TestValidFloats() { string sql = "##a:float## ##b:float##"; var parameters = new NameValueCollection { { "a", "1" }, { "b", "1.2" } }; var query = new ParsedQuery(sql, parameters); Assert.AreEqual("float", query.Parameters["a"].Type); Assert.AreEqual(query.ExecutionSql, "1 1.2"); Assert.IsTrue(query.IsExecutionReady); }
public void TestWeParseParametersInMultiLineStrings() { string sql = new StringBuilder() .AppendLine("SELECT * FROM Posts WHERE Body LIKE '%") .AppendLine("##SearchTerm##'") .ToString(); var parameters = new NameValueCollection { { "SearchTerm", "foobar" } }; var query = new ParsedQuery(sql, parameters); Assert.IsTrue(query.Parameters.ContainsKey("SearchTerm")); Assert.AreEqual("SELECT * FROM Posts WHERE Body LIKE '%\nfoobar'", query.ExecutionSql); Assert.IsTrue(query.IsExecutionReady); }
public static QueryResults ExecuteNonCached(ParsedQuery query, Site site, User user, AsyncQueryRunner.AsyncResult result) { var remoteIP = OData.GetRemoteIP(); var key = "total-" + remoteIP; var currentCount = (int?)Current.GetCachedObject(key) ?? 0; currentCount++; Current.SetCachedObjectSliding(key, currentCount, 60 * 60); if (currentCount > 130) { // clearly a robot, auto black list Current.DB.BlackList.Insert(new { CreationDate = DateTime.UtcNow, IPAddress = remoteIP }); } if (currentCount > 100) { throw new Exception("You can not run any new queries for another hour, you have exceeded your limit!"); } if (Current.DB.Query <int>("select count(*) from BlackList where IPAddress = @remoteIP", new { remoteIP }).First() > 0) { System.Threading.Thread.Sleep(2000); throw new Exception("You have been blacklisted due to abuse!"); } var results = new QueryResults(); using (SqlConnection cnn = site.GetOpenConnection()) { // well we do not want to risk blocking, if somebody needs to change this we will need to add a setting cnn.Execute("set transaction isolation level read uncommitted"); var timer = new Stopwatch(); timer.Start(); var messages = new StringBuilder(); var infoHandler = new SqlInfoMessageEventHandler((sender, args) => { // todo handle errors as well messages.AppendLine(args.Message); }); try { cnn.InfoMessage += infoHandler; if (query.IncludeExecutionPlan) { using (var command = new SqlCommand("SET STATISTICS XML ON", cnn)) { command.ExecuteNonQuery(); } } var plan = new QueryPlan(); foreach (string batch in query.ExecutionSqlBatches) { using (var command = new SqlCommand(batch, cnn)) { if (result != null) { result.Command = command; if (result.Cancelled) { continue; } } command.CommandTimeout = AppSettings.QueryTimeout; try { PopulateResults(results, command, result, messages, query.IncludeExecutionPlan); } catch (Exception ex) { // Ugh. So, if we cancel the query in-process, we get an exception... // But we have no good way of knowing that the exception here is actually // *that* exception...so we'll just assume it was if the state is Cancelled if (result == null || result.State != AsyncQueryRunner.AsyncState.Cancelled) { throw ex; } } } if (query.IncludeExecutionPlan) { plan.AppendBatchPlan(results.ExecutionPlan); results.ExecutionPlan = null; } } results.ExecutionPlan = plan.PlanXml; } finally { cnn.InfoMessage -= infoHandler; results.Messages = messages.ToString(); } timer.Stop(); results.ExecutionTime = timer.ElapsedMilliseconds; ProcessMagicColumns(results, cnn); } return(results); }
private static void ValidateQuery(ParsedQuery query, Site site) { if (!query.IsExecutionReady) { throw new ApplicationException(!string.IsNullOrEmpty(query.ErrorMessage) ? query.ErrorMessage : "All parameters must be set!"); } if (site == null) { throw new ApplicationException("Invalid site ID"); } }
public static AsyncResult Execute(ParsedQuery query, User user, Site site, QueryContextData context) { string userTag = user.IsAnonymous ? user.IPAddress : user.Id.ToString(); List<Task> activeTasks; running.TryGetValue(userTag, out activeTasks); if (activeTasks != null) { lock(activeTasks) { if (activeTasks.Where(t => !t.IsCompleted).Count() >= AppSettings.ConcurrentQueries) { throw new ApplicationException("Too many queries are running, you may only run " + AppSettings.ConcurrentQueries + " queries at a time"); } } } else { running.TryAdd(userTag, new List<Task>()); activeTasks = running[userTag]; } AsyncResult result = new AsyncResult { JobId = Guid.NewGuid(), State = AsyncState.Pending, ParsedQuery = query, Site = site, LastPoll = DateTime.UtcNow, QueryContextData = context }; Task task = new Task(() => { try { result.QueryResults = QueryRunner.GetResults(query, site, user, result); if (result.State == AsyncState.Pending) { result.State = AsyncState.Success; } } catch (Exception e) { result.Exception = e; result.State = AsyncState.Failure; } }); task.ContinueWith(ignore => { result.CompletionDate = DateTime.UtcNow; lock (activeTasks) { activeTasks.RemoveAll(t => t.IsCompleted); } }); result.Task = task; jobs.TryAdd(result.JobId, result); task.Start(); lock(activeTasks) { activeTasks.Add(task); } // give it some time to get results ... System.Threading.Thread.Sleep(50); return result; }
public static AsyncResult Execute(ParsedQuery query, User user, Site site, QueryContextData context) { string userTag = user.IsAnonymous ? user.IPAddress : user.Id.ToString(); List <Task> activeTasks; running.TryGetValue(userTag, out activeTasks); if (activeTasks != null) { lock (activeTasks) { if (activeTasks.Where(t => !t.IsCompleted).Count() >= AppSettings.ConcurrentQueries) { throw new ApplicationException("Too many queries are running, you may only run " + AppSettings.ConcurrentQueries + " queries at a time"); } } } else { running.TryAdd(userTag, new List <Task>()); activeTasks = running[userTag]; } AsyncResult result = new AsyncResult { JobId = Guid.NewGuid(), State = AsyncState.Pending, ParsedQuery = query, Site = site, LastPoll = DateTime.UtcNow, QueryContextData = context }; Task task = new Task(() => { try { result.QueryResults = QueryRunner.GetResults(query, site, user, result); if (result.State == AsyncState.Pending) { result.State = AsyncState.Success; } } catch (Exception e) { result.Exception = e; result.State = AsyncState.Failure; } }); task.ContinueWith(ignore => { result.CompletionDate = DateTime.UtcNow; lock (activeTasks) { activeTasks.RemoveAll(t => t.IsCompleted); } }); result.Task = task; jobs.TryAdd(result.JobId, result); task.Start(); lock (activeTasks) { activeTasks.Add(task); } // give it some time to get results ... System.Threading.Thread.Sleep(50); return(result); }
public static QueryResults GetResults(ParsedQuery query, Site site, User user, AsyncQueryRunner.AsyncResult result = null) { return(query.TargetSites != TargetSites.Current ? GetMultiSiteResults(query, user, result) : GetSingleSiteResults(query, site, user, result)); }
public void TestDefaultParameterValue() { string sql = "SELECT * FROM Tags WHERE TagName = '##TagName?java##'"; var query = new ParsedQuery(sql, null); Assert.IsTrue(query.Parameters.ContainsKey("TagName")); Assert.AreEqual("java", query.Parameters["TagName"].Default); Assert.AreEqual("SELECT * FROM Tags WHERE TagName = 'java'", query.ExecutionSql); Assert.IsTrue(query.IsExecutionReady); }
public void TestWeDetectMissingParameterValues() { string sql = "##a## ##b##"; var parameters = new NameValueCollection { { "a", "1" }, { "b", "" } }; var query = new ParsedQuery(sql, parameters); Assert.IsFalse(query.IsExecutionReady); Assert.AreEqual("Missing value for b!", query.Errors[0]); }
private QueryResults ExecuteWithResults(ParsedQuery query, int siteId, bool textResults) { QueryResults results = null; Site site = GetSite(siteId); ValidateQuery(query, site); results = QueryRunner.GetResults(query, site, CurrentUser); results = TranslateResults(query, textResults, results); return results; }
public void TestWeDetectAllParameters() { string sql = "##a## ##b##"; var parameters = new NameValueCollection { { "a", "1" }, { "b", "3" } }; var query = new ParsedQuery(sql, parameters); Assert.IsTrue(query.Parameters.ContainsKey("a")); Assert.IsTrue(query.Parameters.ContainsKey("b")); Assert.IsTrue(query.IsExecutionReady); }
private static QueryResults TranslateResults(ParsedQuery query, bool textResults, QueryResults results) { textResults = textResults || (results.ResultSets.Count != 1); if (textResults) { results = results.ToTextResults(); } if (query.IncludeExecutionPlan) { results = results.TransformQueryPlan(); } return results; }
public static QueryResults ExecuteNonCached(ParsedQuery query, Site site, User user, AsyncQueryRunner.AsyncResult result) { var remoteIP = OData.GetRemoteIP(); var key = "total-" + remoteIP; var currentCount = (int?)Current.GetCachedObject(key) ?? 0; currentCount++; Current.SetCachedObjectSliding(key, currentCount, 60 * 60); if (currentCount > 130) { // clearly a robot, auto black list Current.DB.BlackList.Insert(new { CreationDate = DateTime.UtcNow, IPAddress = remoteIP }); } if (currentCount > 100) { throw new Exception("You can not run any new queries for another hour, you have exceeded your limit!"); } if (Current.DB.Query<int>("select count(*) from BlackList where IPAddress = @remoteIP", new { remoteIP }).First() > 0) { System.Threading.Thread.Sleep(2000); throw new Exception("You have been blacklisted due to abuse!"); } var results = new QueryResults(); using (SqlConnection cnn = site.GetOpenConnection()) { // well we do not want to risk blocking, if somebody needs to change this we will need to add a setting cnn.Execute("set transaction isolation level read uncommitted"); var timer = new Stopwatch(); timer.Start(); var messages = new StringBuilder(); var infoHandler = new SqlInfoMessageEventHandler((sender, args) => { // todo handle errors as well messages.AppendLine(args.Message); }); try { cnn.InfoMessage += infoHandler; if (query.IncludeExecutionPlan) { using (var command = new SqlCommand("SET STATISTICS XML ON", cnn)) { command.ExecuteNonQuery(); } } var plan = new QueryPlan(); foreach (string batch in query.ExecutionSqlBatches) { using (var command = new SqlCommand(batch, cnn)) { if (result != null) { result.Command = command; if (result.Cancelled) { continue; } } command.CommandTimeout = AppSettings.QueryTimeout; try { PopulateResults(results, command, result, messages, query.IncludeExecutionPlan); } catch (Exception ex) { // Ugh. So, if we cancel the query in-process, we get an exception... // But we have no good way of knowing that the exception here is actually // *that* exception...so we'll just assume it was if the state is Cancelled if (result == null || result.State != AsyncQueryRunner.AsyncState.Cancelled) { throw ex; } } } if (query.IncludeExecutionPlan) { plan.AppendBatchPlan(results.ExecutionPlan); results.ExecutionPlan = null; } } results.ExecutionPlan = plan.PlanXml; } finally { cnn.InfoMessage -= infoHandler; results.Messages = messages.ToString(); } timer.Stop(); results.ExecutionTime = timer.ElapsedMilliseconds; ProcessMagicColumns(results, cnn); } return results; }
public ActionResult Save(string sql, string title, string description, int siteId, int? querySetId, bool? textResults, bool? withExecutionPlan, TargetSites? targetSites) { if (CurrentUser.IsAnonymous && !CaptchaController.CaptchaPassed(GetRemoteIP())) { return Json(new { captcha = true }); } ActionResult response = null; try { if (!ValidateTargetSites(targetSites)) { throw new ApplicationException("Invalid target sites selection"); } QuerySet querySet = null; if (querySetId.HasValue) { querySet = Current.DB.QuerySets.Get(querySetId.Value); if (querySet == null) { throw new ApplicationException("Invalid query set ID"); } } var parsedQuery = new ParsedQuery( sql, Request.Params, withExecutionPlan == true, targetSites ?? TargetSites.Current ); QueryResults results = null; Site site = GetSite(siteId); ValidateQuery(parsedQuery, site); if (title.HasValue() && title.Length > 100) { throw new ApplicationException("Title must be no more than 100 characters"); } if (description.HasValue() && description.Length > 1000) { throw new ApplicationException("Description must be no more than 1000 characters"); } var contextData = new QueryContextData { Title = title, Description = description, IsText = textResults == true, QuerySet = querySet }; var asyncResults = AsyncQueryRunner.Execute(parsedQuery, CurrentUser, site, contextData); if (asyncResults.State == AsyncQueryRunner.AsyncState.Failure) { throw asyncResults.Exception; } if (asyncResults.State == AsyncQueryRunner.AsyncState.Success || asyncResults.State == AsyncQueryRunner.AsyncState.Cancelled) { results = asyncResults.QueryResults; } else { return Json(new {running = true, job_id = asyncResults.JobId}); } response = CompleteResponse(results, parsedQuery, contextData, siteId); } catch (Exception ex) { response = TransformExecutionException(ex); } return response; }
private static QueryResults GetSingleSiteResults(ParsedQuery query, Site site, User user, AsyncQueryRunner.AsyncResult result = null) { QueryResults results = null; var timer = new Stopwatch(); timer.Start(); var cache = QueryUtil.GetCachedResults(query, site.Id); if (cache != null) { if (!query.IncludeExecutionPlan || cache.ExecutionPlan != null) { results = new QueryResults(); results.WithCache(cache); results.Truncated = cache.Truncated; results.Messages = cache.Messages; results.FromCache = true; // If we didn't ask for the execution plan, don't return it if (!query.IncludeExecutionPlan) { results.ExecutionPlan = null; } } } timer.Stop(); if (results == null) { results = ExecuteNonCached(query, site, user, result); results.FromCache = false; // Don't cache cancelled results, since we don't know what state they're in... if (result != null && !result.Cancelled) { AddResultToCache(results, query, site, cache != null); } } else { results.ExecutionTime = timer.ElapsedMilliseconds; } results.Url = site.Url; results.SiteId = site.Id; results.SiteName = site.Name.ToLower(); return results; }
public static QueryResults GetMultiSiteResults(ParsedQuery parsedQuery, User currentUser) { var sites = Current.DB.Sites.ToList(); if (parsedQuery.ExcludesMetas) { sites = sites.Where(s => !s.Url.Contains("meta.")).ToList(); } var firstSite = sites.First(); var results = QueryRunner.GetSingleSiteResults(parsedQuery, firstSite, currentUser); StringBuilder buffer = new StringBuilder(); if (results.ResultSets.First().Columns.Where(c => c.Name == "Pivot").Any()) { foreach (var info in results.ResultSets.First().Columns) { if (info.Name == "Pivot") { info.Name = firstSite.Name + " Pivot"; break; } } foreach (var s in sites.Skip(1)) { try { var tmp = QueryRunner.GetSingleSiteResults(parsedQuery, s, currentUser); results.ExecutionTime += tmp.ExecutionTime; MergePivot(s, results, tmp); } catch (Exception e) { // don't blow up here ... just skip the site. } } } else { results = results.ToTextResults(); AddBody(buffer, results, firstSite); foreach (var s in sites.Skip(1)) { try { var tmp = QueryRunner.GetSingleSiteResults(parsedQuery, s, currentUser).ToTextResults(); results.ExecutionTime += tmp.ExecutionTime; AddBody(buffer, tmp, s); } catch (Exception e) { // don't blow up ... just skip the site } } } results.Messages = buffer.ToString(); results.MultiSite = true; results.ExcludeMetas = parsedQuery.ExcludesMetas; return results; }
public void TestWeIgnoreCommentedParameters() { string sql = new StringBuilder() .AppendLine("SELECT") .AppendLine(" TOP ##TopCount##") .AppendLine("FROM") .AppendLine(" Posts") .AppendLine("--WHERE UserId == ##UserId##") .ToString(); var parameters = new NameValueCollection { { "TopCount", "10" }, { "UserId", "1" } }; var query = new ParsedQuery(sql, parameters); Assert.IsTrue(query.Parameters.ContainsKey("TopCount")); Assert.IsFalse(query.Parameters.ContainsKey("UserId")); Assert.IsTrue(query.IsExecutionReady); }
private static QueryResults GetMultiSiteResults(ParsedQuery parsedQuery, User currentUser, AsyncQueryRunner.AsyncResult result = null) { var sites = Current.DB.Sites.All(); if (parsedQuery.TargetSites == TargetSites.AllNonMetaSites) { sites = sites.Where(s => !s.Url.Contains("meta.")).ToList(); } else if (parsedQuery.TargetSites == TargetSites.AllMetaSites) { sites = sites.Where(s => s.Url.Contains("meta.")).ToList(); } var firstSite = sites.First(); var results = QueryRunner.GetSingleSiteResults(parsedQuery, firstSite, currentUser, result); if (results.ResultSets.First().Columns.Where(c => c.Name == "Pivot").Any()) { foreach (var info in results.ResultSets.First().Columns) { if (info.Name == "Pivot") { info.Name = firstSite.Name + " Pivot"; break; } } foreach (var s in sites.Skip(1)) { try { var tmp = QueryRunner.GetSingleSiteResults(parsedQuery, s, currentUser); results.ExecutionTime += tmp.ExecutionTime; MergePivot(s, results, tmp); } catch (Exception) { // don't blow up here ... just skip the site. } } } else { results.ResultSets[0].Columns.Add(new ResultColumnInfo { Name = "Site Name", Type = ResultColumnType.Site }); foreach (var row in results.ResultSets[0].Rows) { row.Add(sites.First().SiteInfo); } foreach (var s in sites.Skip(1)) { if (result != null && result.Cancelled) { break; } try { var tmp = QueryRunner.GetSingleSiteResults(parsedQuery, s, currentUser, result); foreach (var row in tmp.ResultSets[0].Rows) { row.Add(s.SiteInfo); results.ResultSets[0].Rows.Add(row); } results.ExecutionTime += tmp.ExecutionTime; results.Messages += "\n" + tmp.Messages; } catch (Exception) { // don't blow up ... just skip the site } } } results.TargetSites = parsedQuery.TargetSites; return(results); }
public static QueryResults GetSingleSiteResults(ParsedQuery query, Site site, User user) { QueryResults results = null; var timer = new Stopwatch(); timer.Start(); var cache = QueryUtil.GetCachedResults(query, site.Id); if (cache != null) { if (!query.IncludeExecutionPlan || cache.ExecutionPlan != null) { results = new QueryResults(); results.WithCache(cache); results.Truncated = cache.Truncated; results.Messages = cache.Messages; results.FromCache = true; // If we didn't ask for the execution plan, don't return it if (!query.IncludeExecutionPlan) { results.ExecutionPlan = null; } } } timer.Stop(); if (results == null) { results = ExecuteNonCached(query, site, user); results.FromCache = false; AddResultToCache(results, query, site, cache != null); } else { results.ExecutionTime = timer.ElapsedMilliseconds; } results.Url = site.Url; results.SiteId = site.Id; results.SiteName = site.Name.ToLower(); return results; }
public static QueryResults GetResults(ParsedQuery query, Site site, User user, AsyncQueryRunner.AsyncResult result = null) { if (query.TargetSites != TargetSites.Current) { return GetMultiSiteResults(query, user, result); } else { return GetSingleSiteResults(query, site, user, result); } }
private static QueryResults GetMultiSiteResults(ParsedQuery parsedQuery, User currentUser, AsyncQueryRunner.AsyncResult result = null) { var sites = Current.DB.Sites.All(); if (parsedQuery.TargetSites == TargetSites.AllNonMetaSites) { sites = sites.Where(s => !s.Url.Contains("meta.")).ToList(); } else if (parsedQuery.TargetSites == TargetSites.AllMetaSites) { sites = sites.Where(s => s.Url.Contains("meta.")).ToList(); } else if (parsedQuery.TargetSites == TargetSites.AllNonMetaSitesButSO) { sites = sites.Where(s => !s.Url.Contains("meta.") && !s.Url.Contains("stackoverflow.")).ToList(); } else if (parsedQuery.TargetSites == TargetSites.AllMetaSitesButMSO) { sites = sites.Where(s => s.Url.Contains("meta.") && !s.Url.Contains("stackoverflow.")).ToList(); } var firstSite = sites.First(); var results = QueryRunner.GetSingleSiteResults(parsedQuery, firstSite, currentUser, result); if (results.ResultSets.First().Columns.Where(c => c.Name == "Pivot").Any()) { foreach (var info in results.ResultSets.First().Columns) { if (info.Name == "Pivot") { info.Name = firstSite.Name + " Pivot"; break; } } foreach (var s in sites.Skip(1)) { try { var tmp = QueryRunner.GetSingleSiteResults(parsedQuery, s, currentUser); results.ExecutionTime += tmp.ExecutionTime; MergePivot(s, results, tmp); } catch (Exception) { // don't blow up here ... just skip the site. } } } else { results.ResultSets[0].Columns.Add(new ResultColumnInfo { Name = "Site Name", Type = ResultColumnType.Site }); foreach (var row in results.ResultSets[0].Rows) { row.Add(sites.First().SiteInfo); } foreach (var s in sites.Skip(1)) { if (result != null && result.Cancelled) { break; } try { var tmp = QueryRunner.GetSingleSiteResults(parsedQuery, s, currentUser, result); foreach (var row in tmp.ResultSets[0].Rows) { row.Add(s.SiteInfo); results.ResultSets[0].Rows.Add(row); } results.ExecutionTime += tmp.ExecutionTime; results.Messages += "\n" + tmp.Messages; } catch (Exception) { // don't blow up ... just skip the site } } } results.TargetSites = parsedQuery.TargetSites; return results; }
/// <summary> /// Adds the results of a running a particular query for a given site to the database cache /// </summary> /// <param name="results">The results of the query</param> /// <param name="query">The query that was executed</param> /// <param name="site">The site that the query was run against</param> /// <param name="planOnly">Whether or not this is just an update to add the cached execution plan</param> private static void AddResultToCache(QueryResults results, ParsedQuery query, Site site, bool planOnly) { // If the cache time is zero, just don't save a cache if (AppSettings.AutoExpireCacheMinutes == 0) { return; } if (!planOnly) { Current.DB.Execute(@" INSERT INTO CachedResults( QueryHash, SiteId, Results, ExecutionPlan, Messages, Truncated, CreationDate ) VALUES( @hash, @site, @results, @plan, @messages, @truncated, @creation )", new { hash = query.ExecutionHash, site = site.Id, results = results.GetJsonResults(), plan = results.ExecutionPlan, messages = results.Messages, truncated = results.Truncated, creation = DateTime.UtcNow } ); } else { // Should we just update everything in this case? Presumably the only // thing that changed was the addition of the execution plan, but... Current.DB.Execute(@" UPDATE CachedResults SET ExecutionPlan = @plan WHERE QueryHash = @hash", new { plan = results.ExecutionPlan, hash = query.ExecutionHash } ); } }
public void TestTypedDefaultParameterValue() { string sql = "SELECT * FROM Users WHERE Reputation > ##Reputation:int?101##"; var query = new ParsedQuery(sql, null); Assert.IsTrue(query.Parameters.ContainsKey("Reputation")); Assert.AreEqual("101", query.Parameters["Reputation"].Default); Assert.AreEqual("SELECT * FROM Users WHERE Reputation > 101", query.ExecutionSql); Assert.IsTrue(query.IsExecutionReady); }
public static QueryResults ExecuteNonCached(ParsedQuery query, Site site, User user) { var remoteIP = OData.GetRemoteIP(); var key = "total-" + remoteIP; var currentCount = (int?)Current.GetCachedObject(key) ?? 0; currentCount++; Current.SetCachedObjectSliding(key, currentCount, 60 * 60); if (currentCount > 130) { // clearly a robot, auto black list var b = new BlackList { CreationDate = DateTime.UtcNow, IPAddress = remoteIP }; } if (currentCount > 100) { throw new Exception("You can not run any new queries for another hour, you have exceeded your limit!"); } if (Current.DB.ExecuteQuery<int>("select count(*) from BlackList where IPAddress = {0}", remoteIP).First() > 0) { System.Threading.Thread.Sleep(2000); throw new Exception("You have been blacklisted due to abuse!"); } var results = new QueryResults(); using (SqlConnection cnn = site.GetConnection()) { cnn.Open(); // well we do not want to risk blocking, if somebody needs to change this we will need to add a setting cnn.Execute("set transaction isolation level read uncommitted"); var timer = new Stopwatch(); timer.Start(); var messages = new StringBuilder(); var infoHandler = new SqlInfoMessageEventHandler((sender, args) => { // todo handle errors as well messages.AppendLine(args.Message); }); try { cnn.InfoMessage += infoHandler; if (query.IncludeExecutionPlan) { using (var command = new SqlCommand("SET STATISTICS XML ON", cnn)) { command.ExecuteNonQuery(); } } var plan = new QueryPlan(); foreach (string batch in query.ExecutionSqlBatches) { using (var command = new SqlCommand(batch, cnn)) { command.CommandTimeout = QUERY_TIMEOUT; PopulateResults(results, command, messages, query.IncludeExecutionPlan); } if (query.IncludeExecutionPlan) { plan.AppendBatchPlan(results.ExecutionPlan); results.ExecutionPlan = null; } } results.ExecutionPlan = plan.PlanXml; } finally { cnn.InfoMessage -= infoHandler; results.Messages = messages.ToString(); } timer.Stop(); results.ExecutionTime = timer.ElapsedMilliseconds; ProcessMagicColumns(results, cnn); } return results; }
private ActionResult CompleteResponse( QueryResults results, ParsedQuery parsedQuery, QueryContextData context, int siteId ) { results = TranslateResults(parsedQuery, context.IsText, results); var query = Current.DB.Query<Query>( "SELECT * FROM Queries WHERE QueryHash = @hash", new { hash = parsedQuery.Hash } ).FirstOrDefault(); int revisionId = 0; DateTime saveTime; // We only create revisions if something actually changed. // We'll log it as an execution anyway if applicable, so the user will // still get a link in their profile, just not their own revision. if (context.Revision == null && (context.QuerySet == null || query == null || context.QuerySet.CurrentRevision == null || context.QuerySet.CurrentRevision.QueryId != query.Id)) { int queryId; if (query == null) { queryId = (int)Current.DB.Queries.Insert( new { QueryHash = parsedQuery.Hash, QueryBody = parsedQuery.Sql } ); } else { queryId = query.Id; } revisionId = (int)Current.DB.Revisions.Insert( new { QueryId = queryId, OwnerId = CurrentUser.IsAnonymous ? null : (int?)CurrentUser.Id, OwnerIP = GetRemoteIP(), CreationDate = saveTime = DateTime.UtcNow, OriginalQuerySetId = context.QuerySet != null ? context.QuerySet.Id : (int?)null } ); int querySetId; // brand new queryset if (context.QuerySet == null) { // insert it querySetId = (int)Current.DB.QuerySets.Insert(new { InitialRevisionId = revisionId, CurrentRevisionId = revisionId, context.Title, context.Description, LastActivity = DateTime.UtcNow, Votes = 0, Views = 0, Featured = false, Hidden = false, CreationDate = DateTime.UtcNow, OwnerIp = CurrentUser.IPAddress, OwnerId = CurrentUser.IsAnonymous?(int?)null:CurrentUser.Id }); Current.DB.Revisions.Update(revisionId, new { OriginalQuerySetId = querySetId }); } else if ( (CurrentUser.IsAnonymous && context.QuerySet.OwnerIp == CurrentUser.IPAddress) || context.QuerySet.OwnerId != CurrentUser.Id) { // fork it querySetId = (int)Current.DB.QuerySets.Insert(new { InitialRevisionId = context.QuerySet.InitialRevisionId, CurrentRevisionId = revisionId, context.Title, context.Description, LastActivity = DateTime.UtcNow, Votes = 0, Views = 0, Featured = false, Hidden = false, CreationDate = DateTime.UtcNow, OwnerIp = CurrentUser.IPAddress, OwnerId = CurrentUser.IsAnonymous ? (int?)null : CurrentUser.Id, ForkedQuerySetId = context.QuerySet.Id }); Current.DB.Execute(@"insert QuerySetRevisions(QuerySetId, RevisionId) select @newId, RevisionId from QuerySetRevisions where QuerySetId = @oldId", new { newId = querySetId, oldId = context.QuerySet.Id }); } else { // update it querySetId = context.QuerySet.Id; context.Title = context.Title ?? context.QuerySet.Title; context.Description = context.Description ?? context.QuerySet.Description; Current.DB.QuerySets.Update(context.QuerySet.Id, new { context.Title, context.Description, CurrentRevisionId = revisionId, LastActivity = DateTime.UtcNow}); } Current.DB.QuerySetRevisions.Insert(new { QuerySetId = querySetId, RevisionId = revisionId }); results.RevisionId = revisionId; results.Created = saveTime; results.QuerySetId = querySetId; } else { results.RevisionId = context.Revision != null ? context.Revision.Id : context.QuerySet.CurrentRevisionId; results.QuerySetId = context.QuerySet.Id; results.Created = null; } if (context.Title != null) { results.Slug = context.Title.URLFriendly(); } QueryRunner.LogRevisionExecution(CurrentUser, siteId, results.RevisionId); // Consider handling this XSS condition (?) in the ToJson() method instead, if possible return Content(results.ToJson().Replace("/", "\\/"), "application/json"); }