public static Dictionary<string, ResultSet> GetCache(Site site) { if (Preferences == null) { lock (cache) { if (Preferences == null) { Refresh(); } } if (Preferences == null) { return null; } } if (Preferences.PerSite) { Dictionary<string, ResultSet> tablesCache; cache.TryGetValue(site.TinyName, out tablesCache); return tablesCache; } return cache.First().Value; }
public bool TryGetSite(string sitename, out Site site) { site = Current.DB.Query<Models.Site>( "SELECT * from Sites WHERE LOWER(TinyName) = @sitename OR LOWER(Name) = @sitename", new { sitename } ).FirstOrDefault(); return site != null && site.TinyName.ToLower() == sitename; }
static void AddBody(StringBuilder buffer, QueryResults results, Site site) { buffer.AppendLine(site.Name); buffer.AppendLine("-------------------------------------------------"); buffer.AppendLine(results.Messages); buffer.AppendLine(); buffer.AppendLine(); buffer.AppendLine(); }
public static string GetCacheAsJson(Site site) { var cache = GetCache(site); if (cache != null) { return JsonConvert.SerializeObject( cache, Formatting.None, new JsonSerializerSettings { ContractResolver = new CamelCasePropertyNamesContractResolver() } ); } return "{}"; }
public static QueryResults GetResults(ParsedQuery query, Site site, User user, AsyncQueryRunner.AsyncResult result = null) { if (query.TargetSites != TargetSites.Current) { return GetMultiSiteResults(query, user, result); } else { return GetSingleSiteResults(query, site, user, result); } }
public static void MergePivot(Site site, QueryResults current, QueryResults newResults) { int pivotIndex = -1; foreach (var info in newResults.ResultSets.First().Columns) { pivotIndex++; if (info.Name == "Pivot") { break; } } var map = current .ResultSets .First() .Rows .Select(columns => new { key = string.Join("|||", columns.Where((c, i) => i != pivotIndex && i < newResults.ResultSets.First().Columns.Count)), cols = columns }) .ToDictionary(r => r.key, r => r.cols); var newRows = new List<List<object>>(); foreach (var row in newResults.ResultSets.First().Rows) { List<object> foundRow; if (map.TryGetValue(string.Join("|||", row.Where((c, i) => i != pivotIndex)), out foundRow)) { foundRow.Add(row[pivotIndex]); } else { newRows.Add(row); } } current.ResultSets.First().Columns.Add(new ResultColumnInfo { Name = site.Name + " Pivot", Type = newResults.ResultSets.First().Columns[pivotIndex].Type }); var totalColumns = current.ResultSets.First().Columns.Count; foreach (var row in current.ResultSets.First().Rows) { if (row.Count < totalColumns) { row.Add(null); } } foreach (var row in newRows) { for (int i = pivotIndex+1; i < totalColumns; i++) { row.Insert(pivotIndex, null); } current.ResultSets.First().Rows.Add(row); } }
public static QueryResults ExecuteNonCached(ParsedQuery query, Site site, User user, AsyncQueryRunner.AsyncResult result) { var remoteIP = OData.GetRemoteIP(); var key = "total-" + remoteIP; var currentCount = (int?)Current.GetCachedObject(key) ?? 0; currentCount++; Current.SetCachedObjectSliding(key, currentCount, 60 * 60); if (currentCount > 130) { // clearly a robot, auto black list Current.DB.BlackList.Insert(new { CreationDate = DateTime.UtcNow, IPAddress = remoteIP }); } if (currentCount > 100) { throw new Exception("You can not run any new queries for another hour, you have exceeded your limit!"); } if (Current.DB.Query<int>("select count(*) from BlackList where IPAddress = @remoteIP", new { remoteIP }).First() > 0) { System.Threading.Thread.Sleep(2000); throw new Exception("You have been blacklisted due to abuse!"); } var results = new QueryResults(); using (SqlConnection cnn = site.GetOpenConnection()) { // well we do not want to risk blocking, if somebody needs to change this we will need to add a setting cnn.Execute("set transaction isolation level read uncommitted"); var timer = new Stopwatch(); timer.Start(); var messages = new StringBuilder(); var infoHandler = new SqlInfoMessageEventHandler((sender, args) => { // todo handle errors as well messages.AppendLine(args.Message); }); try { cnn.InfoMessage += infoHandler; if (query.IncludeExecutionPlan) { using (var command = new SqlCommand("SET STATISTICS XML ON", cnn)) { command.ExecuteNonQuery(); } } var plan = new QueryPlan(); foreach (string batch in query.ExecutionSqlBatches) { using (var command = new SqlCommand(batch, cnn)) { if (result != null) { result.Command = command; if (result.Cancelled) { continue; } } command.CommandTimeout = AppSettings.QueryTimeout; try { PopulateResults(results, command, result, messages, query.IncludeExecutionPlan); } catch (Exception ex) { // Ugh. So, if we cancel the query in-process, we get an exception... // But we have no good way of knowing that the exception here is actually // *that* exception...so we'll just assume it was if the state is Cancelled if (result == null || result.State != AsyncQueryRunner.AsyncState.Cancelled) { throw ex; } } } if (query.IncludeExecutionPlan) { plan.AppendBatchPlan(results.ExecutionPlan); results.ExecutionPlan = null; } } results.ExecutionPlan = plan.PlanXml; } finally { cnn.InfoMessage -= infoHandler; results.Messages = messages.ToString(); } timer.Stop(); results.ExecutionTime = timer.ElapsedMilliseconds; ProcessMagicColumns(results, cnn); } return results; }
public static QueryResults ExecuteNonCached(ParsedQuery query, Site site, User user) { var remoteIP = OData.GetRemoteIP(); var key = "total-" + remoteIP; var currentCount = (int?)Current.GetCachedObject(key) ?? 0; currentCount++; Current.SetCachedObjectSliding(key, currentCount, 60 * 60); if (currentCount > 130) { // clearly a robot, auto black list var b = new BlackList { CreationDate = DateTime.UtcNow, IPAddress = remoteIP }; } if (currentCount > 100) { throw new Exception("You can not run any new queries for another hour, you have exceeded your limit!"); } if (Current.DB.ExecuteQuery<int>("select count(*) from BlackList where IPAddress = {0}", remoteIP).First() > 0) { System.Threading.Thread.Sleep(2000); throw new Exception("You have been blacklisted due to abuse!"); } var results = new QueryResults(); using (SqlConnection cnn = site.GetConnection()) { cnn.Open(); // well we do not want to risk blocking, if somebody needs to change this we will need to add a setting cnn.Execute("set transaction isolation level read uncommitted"); var timer = new Stopwatch(); timer.Start(); var messages = new StringBuilder(); var infoHandler = new SqlInfoMessageEventHandler((sender, args) => { // todo handle errors as well messages.AppendLine(args.Message); }); try { cnn.InfoMessage += infoHandler; if (query.IncludeExecutionPlan) { using (var command = new SqlCommand("SET STATISTICS XML ON", cnn)) { command.ExecuteNonQuery(); } } var plan = new QueryPlan(); foreach (string batch in query.ExecutionSqlBatches) { using (var command = new SqlCommand(batch, cnn)) { command.CommandTimeout = QUERY_TIMEOUT; PopulateResults(results, command, messages, query.IncludeExecutionPlan); } if (query.IncludeExecutionPlan) { plan.AppendBatchPlan(results.ExecutionPlan); results.ExecutionPlan = null; } } results.ExecutionPlan = plan.PlanXml; } finally { cnn.InfoMessage -= infoHandler; results.Messages = messages.ToString(); } timer.Stop(); results.ExecutionTime = timer.ElapsedMilliseconds; ProcessMagicColumns(results, cnn); } return results; }
private static void ValidateQuery(ParsedQuery query, Site site) { if (!query.IsExecutionReady) { throw new ApplicationException(!string.IsNullOrEmpty(query.ErrorMessage) ? query.ErrorMessage : "All parameters must be set!"); } if (site == null) { throw new ApplicationException("Invalid site ID"); } }
public static AsyncResult Execute(ParsedQuery query, User user, Site site, QueryContextData context) { string userTag = user.IsAnonymous ? user.IPAddress : user.Id.ToString(); List<Task> activeTasks; running.TryGetValue(userTag, out activeTasks); if (activeTasks != null) { lock(activeTasks) { if (activeTasks.Where(t => !t.IsCompleted).Count() >= AppSettings.ConcurrentQueries) { throw new ApplicationException("Too many queries are running, you may only run " + AppSettings.ConcurrentQueries + " queries at a time"); } } } else { running.TryAdd(userTag, new List<Task>()); activeTasks = running[userTag]; } AsyncResult result = new AsyncResult { JobId = Guid.NewGuid(), State = AsyncState.Pending, ParsedQuery = query, Site = site, LastPoll = DateTime.UtcNow, QueryContextData = context }; Task task = new Task(() => { try { result.QueryResults = QueryRunner.GetResults(query, site, user, result); if (result.State == AsyncState.Pending) { result.State = AsyncState.Success; } } catch (Exception e) { result.Exception = e; result.State = AsyncState.Failure; } }); task.ContinueWith(ignore => { result.CompletionDate = DateTime.UtcNow; lock (activeTasks) { activeTasks.RemoveAll(t => t.IsCompleted); } }); result.Task = task; jobs.TryAdd(result.JobId, result); task.Start(); lock(activeTasks) { activeTasks.Add(task); } // give it some time to get results ... System.Threading.Thread.Sleep(50); return result; }
public static QueryResults GetSingleSiteResults(ParsedQuery query, Site site, User user) { QueryResults results = null; var timer = new Stopwatch(); timer.Start(); var cache = QueryUtil.GetCachedResults(query, site.Id); if (cache != null) { if (!query.IncludeExecutionPlan || cache.ExecutionPlan != null) { results = new QueryResults(); results.WithCache(cache); results.Truncated = cache.Truncated; results.Messages = cache.Messages; results.FromCache = true; // If we didn't ask for the execution plan, don't return it if (!query.IncludeExecutionPlan) { results.ExecutionPlan = null; } } } timer.Stop(); if (results == null) { results = ExecuteNonCached(query, site, user); results.FromCache = false; AddResultToCache(results, query, site, cache != null); } else { results.ExecutionTime = timer.ElapsedMilliseconds; } results.Url = site.Url; results.SiteId = site.Id; results.SiteName = site.Name.ToLower(); return results; }
partial void DeleteSite(Site instance);
partial void UpdateSite(Site instance);
partial void InsertSite(Site instance);
private static QueryResults GetSingleSiteResults(ParsedQuery query, Site site, User user, AsyncQueryRunner.AsyncResult result = null) { QueryResults results = null; var timer = new Stopwatch(); timer.Start(); var cache = QueryUtil.GetCachedResults(query, site.Id); if (cache != null) { if (!query.IncludeExecutionPlan || cache.ExecutionPlan != null) { results = new QueryResults(); results.WithCache(cache); results.Truncated = cache.Truncated; results.Messages = cache.Messages; results.FromCache = true; // If we didn't ask for the execution plan, don't return it if (!query.IncludeExecutionPlan) { results.ExecutionPlan = null; } } } timer.Stop(); if (results == null) { results = ExecuteNonCached(query, site, user, result); results.FromCache = false; // Don't cache cancelled results, since we don't know what state they're in... if (result != null && !result.Cancelled) { AddResultToCache(results, query, site, cache != null); } } else { results.ExecutionTime = timer.ElapsedMilliseconds; } results.Url = site.Url; results.SiteId = site.Id; results.SiteName = site.Name.ToLower(); return results; }
public bool SharesUsers(Site site) { var shares = false; if (this.Url.StartsWith("http://meta.") && this.Url != "http://meta.stackoverflow.com") { shares = this.Url.Substring("http://meta.".Length) == site.Url.Substring("http://".Length); } else if (site.Url.StartsWith("http://meta.") && site.Url != "http://meta.stackoverflow.com") { shares = site.Url.Substring("http://meta.".Length) == this.Url.Substring("http://".Length); } return shares; }
/// <summary> /// Adds the results of a running a particular query for a given site to the database cache /// </summary> /// <param name="results">The results of the query</param> /// <param name="query">The query that was executed</param> /// <param name="site">The site that the query was run against</param> /// <param name="planOnly">Whether or not this is just an update to add the cached execution plan</param> private static void AddResultToCache(QueryResults results, ParsedQuery query, Site site, bool planOnly) { // If the cache time is zero, just don't save a cache if (AppSettings.AutoExpireCacheMinutes == 0) { return; } if (!planOnly) { Current.DB.Execute(@" INSERT INTO CachedResults( QueryHash, SiteId, Results, ExecutionPlan, Messages, Truncated, CreationDate ) VALUES( @hash, @site, @results, @plan, @messages, @truncated, @creation )", new { hash = query.ExecutionHash, site = site.Id, results = results.GetJsonResults(), plan = results.ExecutionPlan, messages = results.Messages, truncated = results.Truncated, creation = DateTime.UtcNow } ); } else { // Should we just update everything in this case? Presumably the only // thing that changed was the addition of the execution plan, but... Current.DB.Execute(@" UPDATE CachedResults SET ExecutionPlan = @plan WHERE QueryHash = @hash", new { plan = results.ExecutionPlan, hash = query.ExecutionHash } ); } }
private void SetCommonQueryViewData(Site site, string header) { Site = site; SetHeader(header); SelectMenuItem("Compose Query"); ViewData["GuessedUserId"] = Site.GuessUserId(CurrentUser); ViewData["Tables"] = Site.GetTableInfos(); }