public void Save(StackExchange.Profiling.MiniProfiler profiler) { if (_logAsLevel == LogLevel.Debug) { _logger.Debug(profiler.RenderPlainText()); } else if (_logAsLevel == LogLevel.Trace) { _logger.Trace(profiler.RenderPlainText()); } else if (_logAsLevel == LogLevel.Info) { _logger.Info(profiler.RenderPlainText()); } else if (_logAsLevel == LogLevel.Warn) { _logger.Warn(profiler.RenderPlainText()); } else if (_logAsLevel == LogLevel.Error) { _logger.Error(profiler.RenderPlainText()); } else if (_logAsLevel == LogLevel.Fatal) { _logger.Fatal(profiler.RenderPlainText()); } }
private static IEnumerable <Question> GetQuestions(StackExchange stackExchange) { //The API will return a maximum of 100 questions, so we need to request multiple //pages of questions until we have them all. var hasMore = true; var page = 1; var questions = new List <Question>(); while (hasMore) { //The instructions state to get questions with C#, .NET, and Selenium tags. //If we wanted to get instructions with C#, .NET, or Selenium tags, I would //have to make multiple requests. If we wanted to get instructions with only //C#, .NET, and Selenium tags, I would have to filter these questions some //other way. var model = stackExchange.GetQuestions(new List <string>() { "C#", ".NET", "Selenium" }, "stackoverflow", page); questions.AddRange(model.Items); hasMore = model.Has_More; page++; } //Filter questions from users that don't exist. return(questions.Where(q => q.Owner.User_Type != "does_not_exist")); }
/// <summary> /// Provides DbCommands that inject the faulty SQL into the Exception objects they throw. /// </summary> public RichErrorDbConnection(DbConnection connection, StackExchange.Profiling.MiniProfiler profiler) : base(connection, profiler) { #if DEBUG this.connection = connection; this.profiler = profiler; #endif }
/// <summary> /// Obtient un client HttpClient /// </summary> /// <returns></returns> private HttpClient GetClient(StackExchange stackExchange) { HttpClient client = new HttpClient(); string host = Constantes.Endpoints.StackExchange + stackExchange.Version; client.BaseAddress = new Uri(host); return(client); }
/// <summary> /// Check and convert native type supported from Redis. /// </summary> /// <param name="value"></param> /// <param name="type"></param> /// <returns></returns> internal static object ConvertRedisValueToObject(StackExchange.Redis.RedisValue value, Type type, StatusItem statusItem) { object result = null; if (typeof(String) == type) { if ((statusItem & StatusItem.Compressed) == StatusItem.Compressed) { byte[] tmp = Utility.Deflate((Byte[])value, System.IO.Compression.CompressionMode.Decompress); result = System.Text.Encoding.UTF8.GetString(tmp); } else { result = (String)value; } } else if (typeof(Int16) == type) { result = (Int16)value; } else if (typeof(Int32) == type) { result = (Int32)value; } else if (typeof(Int64) == type) { result = (Int64)value; } else if (typeof(Boolean) == type) { result = (Boolean)value; } else if (typeof(Single) == type) { result = (Single)value; } else if (typeof(Double) == type) { result = (Double)value; } else { result = (Byte[])value; if ((statusItem & StatusItem.Compressed) == StatusItem.Compressed) { result = Utility.Deflate((Byte[])result, System.IO.Compression.CompressionMode.Decompress); } if ((statusItem & StatusItem.Serialized) == StatusItem.Serialized) { result = Utility.DeSerialize((Byte[])result); //If not supported De-Serialize Object... } } return result; }
internal static void Run(StackExchange.Redis.ConnectionMultiplexer redis) { Console.WriteLine("Hit <ESC> to shutdown this worker."); Redis = redis; while ((!Console.KeyAvailable) || (Console.ReadKey().Key != ConsoleKey.Escape)) { var msg = Redis.GetDatabase().ListLeftPop(Tools.WORKQUEUE); if (!msg.IsNullOrEmpty) HandleClusterWork(msg); else Thread.Sleep(0); } }
internal static void Run(StackExchange.Redis.ConnectionMultiplexer Redis) { var maxOps = (int)ScalingConstants.LastOperation; var rnd = new Random(); while (true) { var work = new Workpack(); work.Instance = new InstanceDescriptor() { Id = rnd.Next(MAX_SESSIONS).ToString() }; work.OpCode = (Operation)rnd.Next(maxOps); work.Input = new InputData(); while (work.Input.SecondOperand == 0) { work.Input.SecondOperand = rnd.NextDouble(); } RedisValue[] data = { Tools.Serialize(work) }; Redis.GetDatabase().ListRightPush(Tools.WORKQUEUE, data, CommandFlags.FireAndForget); Console.Write("."); } }
static void Main(string[] args) { //Initilize the StackExchange client. var stackExchange = new StackExchange(); //Get the questions. var questions = GetQuestions(stackExchange); //Get the users. var users = GetUsers(stackExchange, questions); foreach (var question in questions) { question.Weight = GetWeight(users.First(u => u.User_Id == question.Owner.User_Id).Badge_Counts); } //I'm only going to write the top 10 questions. var topTenQuestions = questions.OrderByDescending(q => q.Weight).Take(10); //Write the top 10 questions to bin/Debug/TopTenQuestions.json. File.WriteAllText("TopTenQuestions.json", JsonConvert.SerializeObject(topTenQuestions)); }
private static IEnumerable <User> GetUsers(StackExchange stackExchange, IEnumerable <Question> questions) { //Get all the user ids associated to the questions. var userIds = questions.Select(q => q.Owner.User_Id).Distinct().ToList(); //The maximum number of userIds I can query for is 100, so split the list up into //lists of size 100. //From https://stackoverflow.com/questions/11463734/split-a-list-into-smaller-lists-of-n-size?utm_medium=organic&utm_source=google_rich_qa&utm_campaign=google_rich_qa var userLists = new List <List <int> >(); for (var i = 0; i < questions.Count(); i += 100) { userLists.Add(userIds.GetRange(i, Math.Min(100, userIds.Count() - i))); } var users = new List <User>(); foreach (var list in userLists) { users.AddRange(stackExchange.GetUsers(list, "stackoverflow").Items); } return(users); }
public Profile(StackExchange.Profiling.MiniProfiler ProfilerUsing, IDisposable StepDisposable) { this.Current = (ProfilerUsing == null) ? StackExchange.Profiling.MiniProfiler.Current : ProfilerUsing; this.StepDisposable = StepDisposable; }
public void OnError(DbCommand profiledDbCommand, StackExchange.Profiling.Data.ExecuteType executeType, Exception exception) { var formatter = new StackExchange.Profiling.SqlFormatters.SqlServerFormatter(); var timing = new StackExchange.Profiling.SqlTiming(profiledDbCommand, executeType, null); exception.Data["SQL"] = formatter.FormatSql(timing); this.wrapped.OnError(profiledDbCommand, executeType, exception); }
public void ExecuteStart(DbCommand profiledDbCommand, StackExchange.Profiling.Data.ExecuteType executeType) { this.wrapped.ExecuteStart(profiledDbCommand, executeType); }
public void ExecuteFinish(DbCommand profiledDbCommand, StackExchange.Profiling.Data.ExecuteType executeType, DbDataReader reader) { this.wrapped.ExecuteFinish(profiledDbCommand, executeType, reader); }
public ErrorLoggingProfiler(StackExchange.Profiling.Data.IDbProfiler wrapped) { this.wrapped = wrapped; }
public Profile(StackExchange.Profiling.MiniProfiler ProfilerUsing, IDisposable StepDisposable) { this.Current = ProfilerUsing ?? StackExchange.Profiling.MiniProfiler.Current; this.StepDisposable = StepDisposable; }
public StepTiming(StackExchange.Profiling.Timing timing) : base(MiniProfiler.Current, timing.ParentTiming, timing.Name) { }
public StackExchangeRepository(IOptions <StackExchange> stackExchange) { _stackExchange = stackExchange.Value; }
public void OnError(IDbCommand profiledDbCommand, StackExchange.Profiling.Data.SqlExecuteType executeType, Exception exception) { var formatter = new StackExchange.Profiling.SqlFormatters.SqlServerFormatter(); exception.Data["SQL"] = formatter.FormatSql(profiledDbCommand.CommandText, SqlTiming.GetCommandParameters(profiledDbCommand)); this.wrapped.OnError(profiledDbCommand, executeType, exception); }