private void ParseToSqlL_btn_Click(object sender, EventArgs e) { ConvertButtonsEnabled(false); saveFileDialog.Filter = "SQL Light|*.db3"; saveFileDialog.FileName = null; DialogResult result = saveFileDialog.ShowDialog(); if (result == DialogResult.OK) { Cursor.Current = Cursors.WaitCursor; try { SQLiteWriter.createDB(ExcelReader.GetInstance(Source_path_textBox.Text), saveFileDialog.FileName); MessageBox.Show("Finished"); } catch (Exception exc) { MessageBox.Show(exc.Message + "\n" + exc.InnerException + " " + exc.StackTrace, "Error!", MessageBoxButtons.OK, MessageBoxIcon.Error); } } ConvertButtonsEnabled(true); }
/// <summary> /// Run the parsimony algorithm against the peptides and proteins in table sourceTableName in the specified SQLite database /// </summary> /// <param name="databaseFolderPath"></param> /// <param name="dataBaseFileName"></param> /// <param name="sourceTableName">Table name to process</param> /// <returns>True if success; false if an error</returns> public bool ProcessSQLite(string databaseFolderPath, string dataBaseFileName, string sourceTableName = DEFAULT_SQLITE_TABLE) { List <RowEntry> pepToProtMapping; List <Node> result; var diDataFolder = new DirectoryInfo(databaseFolderPath); if (!diDataFolder.Exists) { throw new DirectoryNotFoundException("Database folder not found: " + databaseFolderPath); } var fiDatabaseFile = new FileInfo(Path.Combine(diDataFolder.FullName, dataBaseFileName)); if (!fiDatabaseFile.Exists) { throw new FileNotFoundException("Database not found: " + fiDatabaseFile); } if (ShowProgressAtConsole) { OnStatusEvent("Opening SQLite database " + fiDatabaseFile.FullName); } if (!VerifySourceTableExists(fiDatabaseFile, sourceTableName)) { return(false); } var dbReader = new SQLiteReader { Database = fiDatabaseFile.FullName }; try { var success = GetPeptideProteinMap(dbReader, sourceTableName, out pepToProtMapping); if (!success) { if (ShowProgressAtConsole) { OnErrorEvent("Error loading data from table {0}; GetPeptideProteinMap returned false", sourceTableName); } return(false); } if (ShowProgressAtConsole) { OnStatusEvent("Loaded {0} rows from table {1}", pepToProtMapping.Count, sourceTableName); } } catch (Exception ex) { throw new Exception("Error calling GetPeptideProteinMap: " + ex.Message, ex); } if (fiDatabaseFile.DirectoryName == null) { throw new Exception("Error determining the parent directory for " + fiDatabaseFile.FullName); } var parsimonyResultsFilePath = Path.Combine(fiDatabaseFile.DirectoryName, "pars_info_temp.txt"); var proteinGroupMembersFilePath = Path.Combine(fiDatabaseFile.DirectoryName, "pars_info_temp_groups.txt"); if (pepToProtMapping == null || pepToProtMapping.Count == 0) { DeleteFile(parsimonyResultsFilePath); DeleteFile(proteinGroupMembersFilePath); throw new Exception("Error in RunAlgorithm: No rows to operate on"); } GlobalIDContainer globalIDTracker; try { PerformParsimonyThreaded(pepToProtMapping, out result, out globalIDTracker); } catch (Exception ex) { throw new Exception("Error calling PerformParsimonyThreaded: " + ex.Message, ex); } if (ShowProgressAtConsole) { Console.WriteLine(); OnStatusEvent("Exporting protein groups to temp text files"); } Utilities.SaveResults(result, parsimonyResultsFilePath, proteinGroupMembersFilePath, globalIDTracker); ClearExistingSQLiteResults(fiDatabaseFile); if (ShowProgressAtConsole) { Console.WriteLine(); } try { var resultsReader = new DelimitedFileReader { FilePath = parsimonyResultsFilePath }; var writer = new SQLiteWriter { DbPath = fiDatabaseFile.FullName, TableName = PARSIMONY_GROUPING_TABLE }; writer.ColDefOverride = new List <MageColumnDef> { new("GroupID", "integer", "4") // Note that "size" doesn't matter since we're writing to a SqLite database }; if (ShowProgressAtConsole) { OnStatusEvent("Importing data into table " + PARSIMONY_GROUPING_TABLE); } ProcessingPipeline.Assemble("ImportToSQLite", resultsReader, writer).RunRoot(null); } catch (Exception ex) { throw new Exception("Error adding data to table " + PARSIMONY_GROUPING_TABLE + " to the SqLite database: " + ex.Message, ex); } try { var proteinGroupsReader = new DelimitedFileReader { FilePath = proteinGroupMembersFilePath }; var writer = new SQLiteWriter { DbPath = fiDatabaseFile.FullName, TableName = PARSIMONY_GROUP_MEMBERS_TABLE }; writer.ColDefOverride = new List <MageColumnDef> { new("GroupID", "integer", "4") // Note that "size" doesn't matter since we're writing to a SqLite database }; if (ShowProgressAtConsole) { OnStatusEvent("Importing data into table " + PARSIMONY_GROUP_MEMBERS_TABLE); } ProcessingPipeline.Assemble("ImportToSQLite", proteinGroupsReader, writer).RunRoot(null); } catch (Exception ex) { throw new Exception("Error adding data to table " + PARSIMONY_GROUP_MEMBERS_TABLE + " to the SqLite database: " + ex.Message, ex); } DeleteFile(parsimonyResultsFilePath); DeleteFile(proteinGroupMembersFilePath); return(true); }
internal void Serialize(string fileName) { /* We are going to perform a series of queries: * One for the primary category (which we assume is the first in the dataset) * and then then one for every connection. * * A number of observations: * If we ever were to allow nested connections, we would need some recursion algorithm. * In the current setup, we only process the child relations of the primary table */ // collect some information before the dataread to prevent unnecessary calls var subQueries = _primaryTable.ChildRelations.Cast <DataRelation>().Select(s => new ChildTableQuery( // no checks if keys exist s.ChildTable.ExtendedProperties[DataSetHelper.LinkTableSelectCommandTextExtProp].ToString(), JsonConvert.DeserializeObject <CommenceConnection>(s.ExtendedProperties[DataSetHelper.CommenceConnectionDescriptionExtProp].ToString()) )).ToArray(); using (var connection = new SQLiteConnection(_cs)) { connection.Open(); using (var transaction = connection.BeginTransaction()) { using (var command = new SQLiteCommand(connection)) { command.Connection = connection; command.CommandText = SQLiteWriter.GetSQLiteSelectQueryForTable(_primaryTable); // start reading data using (var reader = command.ExecuteReader()) { using (StreamWriter sw = new StreamWriter(fileName)) { using (Newtonsoft.Json.JsonWriter wr = new JsonTextWriter(sw)) { wr.Formatting = Formatting.Indented; wr.WriteStartObject(); wr.WritePropertyName("CommenceDataSource"); wr.WriteValue(string.IsNullOrEmpty(_settings.CustomRootNode) ? _primaryTable.TableName : _settings.CustomRootNode); wr.WritePropertyName("CommenceCategory"); wr.WriteValue(_ocp.Category); wr.WritePropertyName("CommenceDataSourceType"); wr.WriteValue(_ocp.Type); wr.WritePropertyName("Items"); wr.WriteStartArray(); bool includeThids = ((ExportSettings)_settings).UserRequestedThids; while (reader.Read()) { wr.WriteStartObject(); WriteObjects(reader, wr, includeThids); SQLiteCommand sqCmd = new SQLiteCommand(connection); foreach (var sq in subQueries) { sqCmd.CommandText = sq.CommandText; sqCmd.Transaction = transaction; sqCmd.Parameters.AddWithValue("@id", reader.GetInt64(0)); // fragile var subreader = sqCmd.ExecuteReader(); WriteConnectedObjects(sq, subreader, wr, includeThids); sqCmd.Reset(); } // foreach subqueries wr.WriteEndObject(); } // while wr.WriteEndArray(); wr.WriteEndObject(); } // using streamwriter } // using jsonwriter } // using reader } // using cmd transaction.Commit(); } // using transaction } // using con } // method
protected SQLiteHelper() { sqlitereader = new SQLiteReader(); sqlitewriter = new SQLiteWriter(); }
// We could use async Read from SQLite and async XML writing, but there is no performance gain, I tested that. // Going async actually introduces the problem of ExportComplete firing too early, // but you will only notice that with huge exports. internal void Serialize(string fileName) { /* We are going to perform a series of queries: * One for the primary category * and then then one for every connection. * * A number of observations: * If we ever were to allow nested connections, we would need some recursion algorithm. * In the current setup, we only process the child relations of the primary table */ // collect some information before the dataread to prevent unnecessary calls var subQueries = _primaryTable.ChildRelations.Cast <DataRelation>().Select(s => new ChildTableQuery( // no checks if keys exist s.ChildTable.ExtendedProperties[DataSetHelper.LinkTableSelectCommandTextExtProp].ToString(), JsonConvert.DeserializeObject <CommenceConnection>(s.ExtendedProperties[DataSetHelper.CommenceConnectionDescriptionExtProp].ToString())) ).ToArray(); using (var connection = new SQLiteConnection(_cs)) { connection.Open(); using (var transaction = connection.BeginTransaction()) { using (var command = new SQLiteCommand(connection)) { command.Connection = connection; command.CommandText = SQLiteWriter.GetSQLiteSelectQueryForTable(_primaryTable); // start reading data using (var reader = command.ExecuteReader()) { XmlWriterSettings xmlSettings = new XmlWriterSettings { Async = false, WriteEndDocumentOnClose = true, Indent = true, Encoding = Encoding.UTF8 // this is what SQLite uses }; using (System.Xml.XmlWriter writer = System.Xml.XmlWriter.Create(fileName, xmlSettings)) { writer.WriteStartDocument(); writer.WriteStartElement(string.IsNullOrEmpty(_settings.CustomRootNode) ? XmlConvert.EncodeLocalName(_primaryTable.TableName) : XmlConvert.EncodeLocalName(_settings.CustomRootNode)); writer.WriteStartElement("Items"); bool includeThids = ((ExportSettings)_settings).UserRequestedThids; while (reader.Read()) { writer.WriteStartElement(null, "Item", null); WriteNodes(reader, writer, includeThids); // next step is to get the connected values // we use a separate query for that // that is probably way too convoluted // we should probably stick to using a more intelligent reader. // the problem is that we need to make sense of the lines that // the reader returns. The XmlWriter is forward only, // so botching together the nodes that belong together is a problem. // we could just use a fully filled dataset? What about size limitations? Performance? SQLiteCommand sqCmd = new SQLiteCommand(connection); foreach (var q in subQueries) { sqCmd.CommandText = q.CommandText; sqCmd.Transaction = transaction; sqCmd.Parameters.AddWithValue("@id", reader.GetInt64(0)); // fragile var subreader = sqCmd.ExecuteReader(); while (subreader.Read()) { if (_settings.NestConnectedItems) { WriteNestedNodes(subreader, writer, q.Connection, includeThids); } else { WriteNodes(subreader, writer, includeThids); } } // while rdr.Read sqCmd.Reset(); // make ready for next use } // foreach subqueries writer.WriteEndElement(); } // while } // xmlwriter } // reader } // cmd transaction.Commit(); } // transaction } // con } // method