//this function is for hadr_db_commit_mgr_harden public List <Int32> GetDatabaseIDs(string sqliteDBFile) { List <Int32> list = new List <int>(); string dbfile = sqliteDBFile;// @"C:\AGLatency\AGLatency\bin\Debug\SQLiteDB\LocalHarden_Primary_2__2018-07-27_22_06_38.175.SQLiteDB"; SQLiteDB db = new SQLiteDB(); db.Open(dbfile); String databaseNum = "SELECT DISTINCT database_id FROM hadr_db_commit_mgr_harden WHERE database_id>4"; SQLiteDataReader dbidDR = db.ExecuteReader(databaseNum); if (dbidDR == null) { return(list); } while (dbidDR.Read()) { list.Add(dbidDR.GetInt16(0)); } db.CloseConnection(); list.Sort(); return(list); }
private void btnSMP_Click(object sender, EventArgs e) { string str_MP; int int_MPlength; str_MP = txtSMP.Text; int_MPlength = str_MP.Length; try { if (int_MPlength <= 15 && int_MPlength >= 8) { SQLiteDB.DatabasePath = "Task1SMP.db"; SQLiteDB db = SQLiteDB.GetInstance; db.NonSelectQuery("INSERT INTO MasterPassword (MP) VALUES ('" + str_MP + "')"); db.CloseConnection(); } else { throw new ArgumentException(); } } catch (Exception) { MessageBox.Show(int_MPlength.ToString(), "This value is not in the correct range!"); } }
private void button1_Click(object sender, EventArgs e) { string str_addname; string str_adduser; string str_addpass; string str_addq; string str_adda; string str_addnotes; str_addname = txtAdd_name.Text; str_adduser = txtAdd_user.Text; str_addpass = txtAdd_pass.Text; str_addq = txtAdd_q.Text; str_adda = txtAdd_a.Text; str_addnotes = txtAdd_notes.Text; try { if (str_addname != "" && str_adduser != "" && str_addpass != "") { SQLiteDB.DatabasePath = "credential.db"; SQLiteDB db = SQLiteDB.GetInstance; db.NonSelectQuery("INSERT INTO credentials (Name, Username, Password, Question, Answer, Notes) VALUES ('" + str_addname + "', '" + str_adduser + "', '" + str_addpass + "', '" + str_addq + "', '" + str_adda + "', '" + str_addnotes + "')"); db.CloseConnection(); } else { throw new ArgumentException(); } } catch (Exception) { MessageBox.Show("Did not enter in all required fields"); } }
public Dictionary <string, List <LogBlockNetLatency_Sec> > GetPerfPointData(string sqliteDBFile) { Dictionary <string, List <LogBlockNetLatency_Sec> > dict = new Dictionary <string, List <LogBlockNetLatency_Sec> >(); string dbfile = sqliteDBFile;// @"C:\AGLatency\AGLatency\bin\Debug\SQLiteDB\LocalHarden_Primary_2__2018-07-27_22_06_38.175.SQLiteDB"; SQLiteDB db = new SQLiteDB(); db.Open(dbfile); String databaseNum = "SELECT DISTINCT target_availability_replica_id FROM hadr_receive_harden_lsn_message"; SQLiteDataReader replicaDr = db.ExecuteReader(databaseNum); List <string> replicas = new List <string>(); if (null == replicaDr) { return(dict); } while (replicaDr.Read()) { replicas.Add(replicaDr.GetString(0)); } replicas.Sort(); foreach (string r in replicas) { dict.Add(r, new List <LogBlockNetLatency_Sec>()); } string idx1 = "CREATE INDEX lb ON hadr_receive_harden_lsn_message (log_block_id,target_availability_replica_id )"; db.Execute(idx1); string idx2 = "CREATE INDEX lb2 ON hadr_send_harden_lsn_message (log_block_id)"; db.Execute(idx2); //record which replica to caculate. Only one is valid since we just allow two replicas string replicaId = NetworkLatency.replicaId; //Update time delta for each log blocks //warning:i found that there could be two hadr_receive_harden_lsn_message events fired for same log block , all mode=1. maybe due to resend from replica? string update = @"UPDATE hadr_receive_harden_lsn_message set TimeDelta= (SELECT hadr_receive_harden_lsn_message.EventTimeStamp - hadr_send_harden_lsn_message.EventTimeStamp FROM hadr_send_harden_lsn_message WHERE hadr_send_harden_lsn_message.log_block_id = hadr_receive_harden_lsn_message.log_block_id AND hadr_receive_harden_lsn_message.target_availability_replica_id='" + replicaId + "')" + "WHERE EXISTS (SELECT * FROM hadr_send_harden_lsn_message WHERE hadr_send_harden_lsn_message.log_block_id = hadr_receive_harden_lsn_message.log_block_id)"; db.Execute(update); string select = @" SELECT (EventTimeStamp/10000000) as EventTimeStamp, target_availability_replica_id,COUNT(*) as LogBlocks, AVG(TimeDelta) as Avg_latency,SUM(TimeDelta) as Sum_latency, max(TimeDelta) as Max_latency, min(TimeDelta) as Min_latency FROM hadr_receive_harden_lsn_message WHERE TimeDelta is not null AND target_availability_replica_id='" + replicaId + "'" + "GROUP BY EventTimeStamp/10000000,target_availability_replica_id ORDER BY EventTimeStamp / 10000000,target_availability_replica_id"; SQLiteDataReader dr = db.ExecuteReader(select); if (dr == null) { return(dict); } bool isFirst = true; Int64 firstTimeStamp = 0; while (dr.Read()) { LogBlockNetLatency_Sec pfp = new LogBlockNetLatency_Sec(); Int64 EventTimeStamp = dr.GetInt64(0) + 1;//Add one more second , say, 1.220 should be map to 2.00 pfp.EventTimeStamp = new DateTime(EventTimeStamp * 10000000); pfp.availability_replica_id = dr.GetString(1); pfp.LogBlocks = dr.GetInt64(2); pfp.Avg_Latency = Math.Max(0, (Int64)(dr.GetFloat(3) / 10000)); pfp.Sum_Latency = Math.Max(0, dr.GetInt64(4) / 10000); pfp.Max_Latency = Math.Max(0, dr.GetInt64(5) / 10000); pfp.Min_Latency = Math.Max(0, dr.GetInt64(6) / 10000); if (isFirst) { firstTimeStamp = EventTimeStamp; isFirst = false; } pfp.secondDistance = EventTimeStamp - firstTimeStamp; dict[pfp.availability_replica_id].Add(pfp); } db.CloseConnection(); return(dict); }
public Dictionary <string, List <LogCapture_Sec> > GetPerfPointData(string sqliteDBFile) { Dictionary <string, List <LogCapture_Sec> > dict = new Dictionary <string, List <LogCapture_Sec> >(); string dbfile = sqliteDBFile;// @"C:\AGLatency\AGLatency\bin\Debug\SQLiteDB\LocalHarden_Primary_2__2018-07-27_22_06_38.175.SQLiteDB"; SQLiteDB db = new SQLiteDB(); db.Open(dbfile); String databaseNum = "SELECT DISTINCT availability_replica_id FROM hadr_capture_log_block"; SQLiteDataReader replicaDr = db.ExecuteReader(databaseNum); List <string> replicas = new List <string>(); if (replicaDr == null) { return(dict); } while (replicaDr.Read()) { replicas.Add(replicaDr.GetString(0)); } replicas.Sort(); foreach (string r in replicas) { dict.Add(r, new List <LogCapture_Sec>()); } string idx1 = "CREATE INDEX lb ON hadr_capture_log_block (log_block_id,availability_replica_id,mode )"; db.Execute(idx1); // string idx2 = "CREATE INDEX lb2 ON log_block_pushed_to_logpool (log_block_id )"; // db.Execute(idx2); string update = @"UPDATE hadr_capture_log_block set TimeDelta= (SELECT hadr_capture_log_block.EventTimeStamp-B.EventTimeStamp from hadr_capture_log_block AS B WHERE B.log_block_id = hadr_capture_log_block.log_block_id AND B.availability_replica_id = hadr_capture_log_block.availability_replica_id AND B.mode = 1) WHERE hadr_capture_log_block.mode = 4"; //Update time delta for each log blocks string update2 = @"UPDATE hadr_capture_log_block set TimeDelta= (SELECT hadr_capture_log_block.EventTimeStamp - log_block_pushed_to_logpool.EventTimeStamp FROM log_block_pushed_to_logpool WHERE log_block_pushed_to_logpool.log_block_id = hadr_capture_log_block.log_block_id ) WHERE EXISTS (SELECT * FROM log_block_pushed_to_logpool WHERE log_block_pushed_to_logpool.log_block_id = hadr_capture_log_block.log_block_id)"; db.Execute(update); string select = @" SELECT (EventTimeStamp/10000000) as EventTimeStamp,availability_replica_id, COUNT(*) as LogBlocks, AVG(TimeDelta) as Avg_latency,SUM(TimeDelta) as Sum_latency, max(TimeDelta) as Max_latency, min(TimeDelta) as Min_latency FROM hadr_capture_log_block WHERE TimeDelta is not null GROUP BY EventTimeStamp/10000000,availability_replica_id ORDER BY EventTimeStamp / 10000000,availability_replica_id"; SQLiteDataReader dr = db.ExecuteReader(select); if (dr == null) { return(dict); } bool isFirst = true; Int64 firstTimeStamp = 0; while (dr.Read()) { LogCapture_Sec pfp = new LogCapture_Sec(); Int64 EventTimeStamp = dr.GetInt64(0) + 1;//Add one more second , say, 1.220 should be map to 2.00 pfp.EventTimeStamp = new DateTime(EventTimeStamp * 10000000); pfp.availability_replica_id = dr.GetString(1); pfp.LogBlocks = dr.GetInt64(2); pfp.Avg_Latency = Math.Max(0, (Int64)(dr.GetFloat(3) / 10000)); pfp.Sum_Latency = Math.Max(0, dr.GetInt64(4) / 10000); pfp.Max_Latency = Math.Max(0, dr.GetInt64(5) / 10000); pfp.Min_Latency = Math.Max(0, dr.GetInt64(6) / 10000); if (isFirst) { firstTimeStamp = EventTimeStamp; isFirst = false; } pfp.secondDistance = EventTimeStamp - firstTimeStamp; dict[pfp.availability_replica_id].Add(pfp); } db.CloseConnection(); return(dict); }
public List <EventRecord_Sec> GetPerfPointData(string sqliteDBFile = null) { List <EventRecord_Sec> list = new List <EventRecord_Sec>(); string dbfile = sqliteDBFile;// @"C:\AGLatency\AGLatency\bin\Debug\SQLiteDB\LocalHarden_Primary_2__2018-07-27_22_06_38.175.SQLiteDB"; if (String.IsNullOrEmpty(dbfile)) { dbfile = eventLatency.eventDB.SQLiteDBFile; } SQLiteDB db = new SQLiteDB(); db.Open(dbfile); //preprocessing, like delete records, add index, etc PreProcessing(db, preprocessingQueries); // String select = "SELECT (EventTimeStamp/10000000) as EventTimeStamp,database_id, AVG(duration) as Avg_Duration,SUM(duration) as Sum_Duration, COUNT(*) as Flushes,SUM(write_size) as Sum_write_size from log_flush_complete group by EventTimeStamp/10000000,database_id ORDER BY EventTimeStamp / 10000000,database_id"; String select = @"SELECT (EventTimeStamp/10000000) as EventTimeStamp, COUNT(*) as count, AVG(" + processTimeFieldName + ") as Avg_ProcessingTime,SUM(" + processTimeFieldName + ") as Sum_ProcessingTime, MAX(" + processTimeFieldName + ") as Max_ProcessingTime, MIN(" + processTimeFieldName + ") as Min_ProcessingTime FROM " + eventName + " GROUP BY EventTimeStamp/10000000 "; Logger.LogMessage(select); SQLiteDataReader dr = db.ExecuteReader(select); if (dr == null) { return(list); } bool isFirst = true; Int64 firstTimeStamp = 0; while (dr.Read()) { EventRecord_Sec pfp = new EventRecord_Sec(); Int64 EventTimeStamp = dr.GetInt64(0) + 1;//Add one more second , say, 1.220 should be mapped to 2.00 pfp.EventTimeStamp = new DateTime(EventTimeStamp * 10000000); pfp.Count = dr.GetInt64(1); pfp.Avg_ProcessingTime = (Int64)Math.Max(0, dr.GetFloat(2)); pfp.Sum_ProcessingTime = Math.Max(0, dr.GetInt64(3)); pfp.Max_ProcessingTime = Math.Max(0, dr.GetInt64(4)); pfp.Min_ProcessingTime = Math.Max(0, dr.GetInt64(5)); if (isFirst) { firstTimeStamp = EventTimeStamp; isFirst = false; } list.Add(pfp); } db.CloseConnection(); //order it list = list.OrderBy(p => p.EventTimeStamp).ToList(); return(list); }
public Dictionary <int, List <TranProcessingTime_Sec> > GetPerfPointData(string sqliteDBFile) { Dictionary <int, List <TranProcessingTime_Sec> > dict = new Dictionary <int, List <TranProcessingTime_Sec> >(); string dbfile = sqliteDBFile;// @"C:\AGLatency\AGLatency\bin\Debug\SQLiteDB\LocalHarden_Primary_2__2018-07-27_22_06_38.175.SQLiteDB"; SQLiteDB db = new SQLiteDB(); db.Open(dbfile); String databaseNum = "SELECT DISTINCT database_id FROM recovery_unit_harden_log_timestamps WHERE database_id>4"; SQLiteDataReader dbidDR = db.ExecuteReader(databaseNum); List <int> databases = new List <int>(); if (dbidDR == null) { return(dict); } while (dbidDR.Read()) { databases.Add(dbidDR.GetInt16(0)); } databases.Sort(); foreach (int id in databases) { dict.Add(id, new List <TranProcessingTime_Sec>()); } // String select = "SELECT (EventTimeStamp/10000000) as EventTimeStamp,database_id, AVG(duration) as Avg_Duration,SUM(duration) as Sum_Duration, COUNT(*) as Flushes,SUM(write_size) as Sum_write_size from log_flush_complete group by EventTimeStamp/10000000,database_id ORDER BY EventTimeStamp / 10000000,database_id"; String select = "SELECT (EventTimeStamp/10000000) as EventTimeStamp,database_id, COUNT(*) as TranCommits, AVG(processing_time) as Avg_Duration,SUM(processing_time) as Sum_Duration, max(processing_time) as Max_duration, min(processing_time) as Min_duration from recovery_unit_harden_log_timestamps WHERE database_id>4 group by EventTimeStamp/10000000,database_id ORDER BY EventTimeStamp / 10000000,database_id"; SQLiteDataReader dr = db.ExecuteReader(select); if (dr == null) { return(dict); } bool isFirst = true; Int64 firstTimeStamp = 0; while (dr.Read()) { TranProcessingTime_Sec pfp = new TranProcessingTime_Sec(); Int64 EventTimeStamp = dr.GetInt64(0) + 1;//Add one more second , say, 1.220 should be mapped to 2.00 pfp.EventTimeStamp = new DateTime(EventTimeStamp * 10000000); pfp.database_id = dr.GetInt32(1); pfp.TranCommits = dr.GetInt64(2); //need to /1000 then=ms pfp.Avg_Duration = Math.Max(0, dr.GetFloat(3) / 1000); pfp.Sum_Duration = Math.Max(0, dr.GetInt64(4) / 1000); pfp.Max_Duration = Math.Max(0, dr.GetInt64(5) / 1000); pfp.Min_Duration = Math.Max(0, dr.GetInt64(6) / 1000); if (isFirst) { firstTimeStamp = EventTimeStamp; isFirst = false; } pfp.secondDistance = EventTimeStamp - firstTimeStamp; dict[pfp.database_id].Add(pfp); } db.CloseConnection(); return(dict); }
public Dictionary <string, List <LogBlockNetLatency_Sec> > GetPerfPointData(string sqliteDBFile) { Dictionary <string, List <LogBlockNetLatency_Sec> > dict = new Dictionary <string, List <LogBlockNetLatency_Sec> >(); string dbfile = sqliteDBFile;// @"C:\AGLatency\AGLatency\bin\Debug\SQLiteDB\LocalHarden_Primary_2__2018-07-27_22_06_38.175.SQLiteDB"; SQLiteDB db = new SQLiteDB(); db.Open(dbfile); String databaseNum = "SELECT DISTINCT local_availability_replica_id FROM hadr_transport_receive_log_block_message"; SQLiteDataReader replicaDr = db.ExecuteReader(databaseNum); List <string> replicas = new List <string>(); if (replicaDr == null) { return(dict); } while (replicaDr.Read()) { replicas.Add(replicaDr.GetString(0)); } replicas.Sort(); if (replicas.Count > 1) { Logger.LogMessage("[WARNING] Two many replicas found! num:" + replicas.Count); } //Save this id, since SyncReceiveNetLatency will reference this id to know which replica to use in hadr_receive_harden_lsn_message replicaId = replicas[0]; foreach (string r in replicas) { dict.Add(r, new List <LogBlockNetLatency_Sec>()); } string idx1 = "CREATE INDEX lb ON hadr_capture_log_block (log_block_id,availability_replica_id )"; db.Execute(idx1); string idx2 = "CREATE INDEX lb2 ON hadr_transport_receive_log_block_message (log_block_id,local_availability_replica_id )"; db.Execute(idx2); //Update time delta for each log blocks string update = @"UPDATE hadr_transport_receive_log_block_message set TimeDelta= (SELECT hadr_transport_receive_log_block_message.EventTimeStamp - hadr_capture_log_block.EventTimeStamp FROM hadr_capture_log_block WHERE hadr_capture_log_block.log_block_id = hadr_transport_receive_log_block_message.log_block_id and hadr_capture_log_block. availability_replica_id=hadr_transport_receive_log_block_message.local_availability_replica_id) WHERE EXISTS (SELECT * FROM hadr_capture_log_block WHERE hadr_capture_log_block.log_block_id = hadr_transport_receive_log_block_message.log_block_id and hadr_capture_log_block. availability_replica_id=hadr_transport_receive_log_block_message.local_availability_replica_id)"; db.Execute(update); string select = @" SELECT (EventTimeStamp/10000000) as EventTimeStamp,local_availability_replica_id, COUNT(*) as LogBlocks, AVG(TimeDelta) as Avg_latency,SUM(TimeDelta) as Sum_latency, max(TimeDelta) as Max_latency, min(TimeDelta) as Min_latency FROM hadr_transport_receive_log_block_message WHERE TimeDelta is not null GROUP BY EventTimeStamp/10000000,local_availability_replica_id ORDER BY EventTimeStamp / 10000000,local_availability_replica_id"; SQLiteDataReader dr = db.ExecuteReader(select); if (dr == null) { return(dict); } bool isFirst = true; Int64 firstTimeStamp = 0; while (dr.Read()) { LogBlockNetLatency_Sec pfp = new LogBlockNetLatency_Sec(); Int64 EventTimeStamp = dr.GetInt64(0) + 1;//Add one more second , say, 1.220 should be map to 2.00 pfp.EventTimeStamp = new DateTime(EventTimeStamp * 10000000); pfp.availability_replica_id = dr.GetString(1); pfp.LogBlocks = dr.GetInt64(2); pfp.Avg_Latency = Math.Max(0, (Int64)(dr.GetFloat(3) / 10000)); pfp.Sum_Latency = Math.Max(0, dr.GetInt64(4) / 10000); pfp.Max_Latency = Math.Max(0, dr.GetInt64(5) / 10000); pfp.Min_Latency = Math.Max(0, dr.GetInt64(6) / 10000); if (isFirst) { firstTimeStamp = EventTimeStamp; isFirst = false; } pfp.secondDistance = EventTimeStamp - firstTimeStamp; dict[pfp.availability_replica_id].Add(pfp); } db.CloseConnection(); return(dict); }
public Dictionary <string, List <FlowControl_Sec> > GetPerfPointData(string sqliteDBFile) { Dictionary <string, List <FlowControl_Sec> > dict = new Dictionary <string, List <FlowControl_Sec> >(); string dbfile = sqliteDBFile;// @"C:\AGLatency\AGLatency\bin\Debug\SQLiteDB\LocalHarden_Primary_2__2018-07-27_22_06_38.175.SQLiteDB"; SQLiteDB db = new SQLiteDB(); db.Open(dbfile); String databaseNum = "SELECT DISTINCT database_replica_id FROM hadr_database_flow_control_action WHERE local_availability_replica_id='" + NetworkLatency.replicaId + "'"; SQLiteDataReader replicaDr = db.ExecuteReader(databaseNum); if (replicaDr == null) { return(null); } List <string> replicas = new List <string>(); if (replicaDr == null) { return(dict); } while (replicaDr.Read()) { replicas.Add(replicaDr.GetString(0)); } replicas.Sort(); foreach (string r in replicas) { dict.Add(r, new List <FlowControl_Sec>()); } string idx1 = "CREATE INDEX lb ON hadr_database_flow_control_action (local_availability_replica_id,control_action)"; db.Execute(idx1); string select = @" SELECT (EventTimeStamp/10000000) as EventTimeStamp,database_replica_id, COUNT(*) as Occurence, AVG(Duration) as Avg_latency,SUM(Duration) as Sum_latency, max(Duration) as Max_latency, min(Duration) as Min_latency FROM hadr_database_flow_control_action WHERE local_availability_replica_id = '" + NetworkLatency.replicaId + "' AND control_action='Cleared'" + " GROUP BY EventTimeStamp/10000000,database_replica_id ORDER BY EventTimeStamp / 10000000,database_replica_id"; SQLiteDataReader dr = db.ExecuteReader(select); if (dr == null) { return(dict); } bool isFirst = true; Int64 firstTimeStamp = 0; while (dr.Read()) { FlowControl_Sec pfp = new FlowControl_Sec(); Int64 EventTimeStamp = dr.GetInt64(0) + 1;//Add one more second , say, 1.220 should be map to 2.00 pfp.EventTimeStamp = new DateTime(EventTimeStamp * 10000000); pfp.database_replica_id = dr.GetString(1); pfp.Occurence = dr.GetInt64(2); //the duration/1000=ms pfp.Avg_Duration = Math.Max(0, (Int64)(dr.GetFloat(3) / 1000)); pfp.Sum_Duration = Math.Max(0, dr.GetInt64(4) / 1000); pfp.Max_Duration = Math.Max(0, dr.GetInt64(5) / 1000); pfp.Min_Duration = Math.Max(0, dr.GetInt64(6) / 1000); if (isFirst) { firstTimeStamp = EventTimeStamp; isFirst = false; } pfp.secondDistance = EventTimeStamp - firstTimeStamp; dict[pfp.database_replica_id].Add(pfp); } db.CloseConnection(); return(dict); }