private void Inicialize() { targetDatabase = new TargetDatabase(); savingDatabase = new SavingDatabase(); Title = "Mis Compras"; }
private void Initialize() { targetDatabase = new TargetDatabase(); ahorroItemDatabase = new SavingDatabase(); Title = "Agregar compra"; }
public void TestWriteAccessDb(string path, int numberConsensus, int numberProteins, int numberEvidences) { var writer = new AccessTargetDatabaseWriter(); var options = new Options(); var database = new TargetDatabase(); var evidenceCount = 1; for (var i = 1; i <= numberConsensus; i++) { var target = new ConsensusTarget { Id = i }; for (var k = 1; k <= numberProteins; k++) { var protein = new ProteinInformation { ProteinName = "Access_Test" + k, CleavageState = clsPeptideCleavageStateCalculator.ePeptideCleavageStateConstants.Full, TerminusState = clsPeptideCleavageStateCalculator.ePeptideTerminusStateConstants.ProteinNTerminus }; protein.Consensus.Add(target); for (var j = 1; j <= numberEvidences; j++) { var evidence = new Evidence { AnalysisId = j, Charge = 1, Id = evidenceCount, CleanPeptide = "MIKEDEGAN" + evidenceCount, Sequence = "BIGBIRD" + evidenceCount, Mz = 405, Scan = evidenceCount++, PeptideInfo = new TargetPeptideInfo() }; evidence.AddProtein(protein); target.AddEvidence(evidence); } target.AddProtein(protein); target.CalculateStatistics(); } target.TheoreticalMonoIsotopicMass = 100.0; target.AverageNet = .6; target.PredictedNet = .7; database.ConsensusTargets.Add(target); } if (File.Exists(GetTestSuiteDataPath(path))) { File.Delete(GetTestSuiteDataPath(path)); } writer.Write(database, options, GetTestSuiteDataPath(path)); }
public static void SaveItems(TargetDatabase targetData) { XmlSerializer serializer = new XmlSerializer(typeof(TargetDatabase)); FileStream stream = new FileStream(_path, FileMode.Create); serializer.Serialize(stream, targetData); stream.Close(); }
void ExpandOrCollapseAllDatabaseCategories(TargetDatabase targetDatabase, bool expand) { switch (targetDatabase) { case TargetDatabase.UIElements: DUIData.Instance.DatabaseUIElements.ExpandOrCollapseAllCategories(expand); break; case TargetDatabase.UIButtons: DUIData.Instance.DatabaseUIButtons.ExpandOrCollapseAllCategories(expand); break; } }
public static TargetDatabase LoadItems() { XmlSerializer serializer = new XmlSerializer(typeof(TargetDatabase)); FileStream stream = new FileStream(_path, FileMode.Open); TargetDatabase targetData = serializer.Deserialize(stream) as TargetDatabase; stream.Close(); return(targetData); }
/// <summary> /// Write the data to the database /// </summary> /// <param name="database"></param> /// <param name="options"></param> /// <param name="path"></param> public void Write(TargetDatabase database, Options options, string path) { if (File.Exists(path)) { File.Delete(path); } ExportToText(path, database); TextToAccessConvert(path); }
public ProteinDatabaseTreeNodeViewModel(TargetDatabase database) : base(String.Format("Target Database ({0})", database.Proteins.Count), true) { m_targetDatabase = database; Action proteinSearch = ProteinSearch; ProteinSearchCommand = new DelegateCommand(proteinSearch); IsExpanded = true; }
public TargetDatabaseTreeNodeViewModel(TargetDatabase database) : base(String.Format("Target Database ({0})", database.ConsensusTargets.Count), true) { m_targetDatabase = database; Action peptideSearch = PeptideSearch; PeptideSearchCommand = new DelegateCommand(peptideSearch); IsExpanded = true; }
public static ScatterSeries MakeStdevMassScatterSeries(TargetDatabase targetDatabase) { var scatterSeries = MakeScatterSeries(); foreach (var ct in targetDatabase.ConsensusTargets) { scatterSeries.Points.Add(new ScatterPoint(ct.TheoreticalMonoIsotopicMass, ct.Evidences.Count)); } return(scatterSeries); }
public static ScatterSeries MakeStdevNetScatterSeries(TargetDatabase targetDatabase) { var scatterSeries = MakeScatterSeries(); foreach (var ct in targetDatabase.ConsensusTargets) { scatterSeries.Points.Add(new ScatterPoint(ct.AverageNet, ct.Evidences.Count)); } return(scatterSeries); }
protected override void Execute(CodeActivityContext context) { console = context.GetExtension <ActivityConsole>(); if (console == null) { console = new ActivityConsole(); } string backupArguments = string.Format("/p:PerformDatabaseBackup={0}", BackupBeforeDeploy.Get(context)); string alwaysCreateNewArguments = string.Format("/p:AlwaysCreateNewDatabase={0}", AlwaysCreateNewDatabase.Get(context)); string vsdbcmdArguments = "/a:Deploy /dd+ /cs:\"{0}\" /p:TargetDatabase={1} \"/manifest:{2}\" {3} {4}"; string output = string.Empty; string vsdbcmd = VsdbcmdPath.Get(context); if (string.IsNullOrEmpty(vsdbcmd)) { vsdbcmd = ".\\"; } if (vsdbcmd.EndsWith("vsdbcmd.exe", StringComparison.OrdinalIgnoreCase)) { vsdbcmd = vsdbcmd.Substring(0, vsdbcmd.Length - "vsdbcmd.exe".Length); } vsdbcmd = Path.Combine(vsdbcmd, "vsdbcmd.exe"); if (!File.Exists(vsdbcmd)) { throw new ArgumentException(string.Format("Vsdbcmd missing : The file '{0}' could not be found.", vsdbcmd)); } vsdbcmdArguments = string.Format(vsdbcmdArguments, ConnectionString.Get(context), TargetDatabase.Get(context), ManifestFilename.Get(context), (BackupBeforeDeploy.Get(context) ? backupArguments : ""), (AlwaysCreateNewDatabase.Get(context) ? alwaysCreateNewArguments : "")); console.WriteLine("Executing Vsdbcmd.exe..." + Environment.NewLine); CommandLine commandLineHelper = new CommandLine(); commandLineHelper.ReportProgress += new EventHandler <CommandLineProgressEventArgs>(commandLineHelper_ReportProgress); int exitCode = commandLineHelper.Execute(vsdbcmd, vsdbcmdArguments, out output); if (exitCode != 0) { throw new InvalidOperationException(string.Format("Vsdbcmd returned a exit code : '{0}'.", exitCode)); } }
private int?GetTableCount(TargetDatabase target) { var factory = DbProviderFactories.GetFactory(target.ProviderName); int?result = null; using (var connection = factory.CreateConnection()) { connection.ConnectionString = target.ConnectionString; connection.Open(); var command = connection.CreateCommand(); command.CommandText = "SELECT COUNT(1) as [TablesCount] FROM Sys.Tables"; result = command.ExecuteScalar() as int?; } return(result); }
public void TestWriteDatabase(string path, int numberOfTargets, int numberOfEvidences) { var reader = new SqLiteTargetDatabaseWriter(); var options = new Options(); var database = new TargetDatabase(); var proteinCount = 1; var evidenceCount = 1; for (var i = 1; i <= numberOfTargets; i++) { var target = new ConsensusTarget { Id = i }; var protein = new ProteinInformation { ProteinName = "SO_Test" + proteinCount++, CleavageState = clsPeptideCleavageStateCalculator.ePeptideCleavageStateConstants.Full, TerminusState = clsPeptideCleavageStateCalculator.ePeptideTerminusStateConstants.ProteinNTerminus, }; protein.Consensus.Add(target); for (var j = 1; j <= numberOfEvidences; j++) { var evidence = new Evidence { AnalysisId = j, Charge = 1, Id = evidenceCount, CleanPeptide = "MIKEDEGAN" + evidenceCount, Sequence = "BIGBIRD" + evidenceCount, Mz = 405, Scan = evidenceCount++, PeptideInfo = new TargetPeptideInfo() }; evidence.AddProtein(protein); target.AddEvidence(evidence); } target.AddProtein(protein); target.CalculateStatistics(); database.ConsensusTargets.Add(target); } reader.Write(database, options, GetTestSuiteDataPath(path)); }
public static SqlDataType GetSqlBinaryDataType(TargetDatabase target) { switch (target) { case TargetDatabase.MS_SQL_Server: return(SqlDataType.MSSQL_BINARY); case TargetDatabase.MySQL: return(SqlDataType.MySQL_BINARY); case TargetDatabase.PostgreSQL: case TargetDatabase.CockroachDB: return(SqlDataType.Postgres_BYTEA); default: throw new ArgumentOutOfRangeException(); } }
public static SqlDataType GetSqlVarCharDataType(TargetDatabase target) { switch (target) { case TargetDatabase.MS_SQL_Server: return(SqlDataType.MSSQL_VARCHAR); case TargetDatabase.MySQL: return(SqlDataType.MySQL_VARCHAR); case TargetDatabase.PostgreSQL: case TargetDatabase.CockroachDB: return(SqlDataType.Postgres_VARCHAR); default: throw new ArgumentOutOfRangeException(); } }
public static SqlDataType GetSqlDateTimeDataType(TargetDatabase target) { switch (target) { case TargetDatabase.MS_SQL_Server: return(SqlDataType.MSSQL_DATETIME); case TargetDatabase.MySQL: return(SqlDataType.MySQL_DATETIME); case TargetDatabase.PostgreSQL: case TargetDatabase.CockroachDB: return(SqlDataType.Postgres_TIMESTAMP); default: throw new ArgumentOutOfRangeException(); } }
void DrawExpandCollapseButtons(float width, TargetDatabase targetDatabase) { QUI.BeginHorizontal(width); { if (QUI.GhostButton("Expand", QColors.Color.Gray, 80) || DetectKeyCombo_Alt_E()) // Alt + E: expand all categories) { ExpandOrCollapseAllDatabaseCategories(targetDatabase, true); } QUI.FlexibleSpace(); if (QUI.GhostButton("Collapse", QColors.Color.Gray, 80) || DetectKeyCombo_Alt_C()) // Alt + C: collapse all categories) { ExpandOrCollapseAllDatabaseCategories(targetDatabase, false); } } QUI.EndHorizontal(); }
public static SqlDataType GetSqlDoubleDataType(TargetDatabase target) { switch (target) { case TargetDatabase.MS_SQL_Server: return(SqlDataType.MSSQL_FLOAT); case TargetDatabase.MySQL: return(SqlDataType.MySQL_DOUBLE); case TargetDatabase.PostgreSQL: case TargetDatabase.CockroachDB: return(SqlDataType.Postgres_FLOAT8); default: throw new ArgumentOutOfRangeException(); } }
public static SqlDataType GetSqlSmallIntDataType(TargetDatabase target) { switch (target) { case TargetDatabase.MS_SQL_Server: return(SqlDataType.MSSQL_SMALLINT); case TargetDatabase.MySQL: return(SqlDataType.MySQL_SMALLINT); case TargetDatabase.PostgreSQL: case TargetDatabase.CockroachDB: return(SqlDataType.Postgres_INT2); default: throw new ArgumentOutOfRangeException(); } }
public static SqlDataType GetSqlDataType(Type type, TargetDatabase target) { switch (Type.GetTypeCode(type)) { case TypeCode.Int32: return(GetSqlIntDataType(target)); case TypeCode.Int16: return(GetSqlSmallIntDataType(target)); case TypeCode.Int64: return(GetSqlBigIntDataType(target)); case TypeCode.Single: return(GetSqlFloatDataType(target)); case TypeCode.Double: return(GetSqlDoubleDataType(target)); case TypeCode.Decimal: return(GetSqlDecimalDataType(target)); case TypeCode.String: return(GetSqlVarCharDataType(target)); case TypeCode.DateTime: return(GetSqlDateTimeDataType(target)); case TypeCode.Object: if (type == typeof(byte[])) { return(GetSqlVarBinaryDataType(target)); } else { throw new ArgumentOutOfRangeException(); } default: throw new ArgumentOutOfRangeException(); } }
private async void InicializeData() { try { SavingDatabase savingDatabase = new SavingDatabase(); TargetDatabase targetDatabase = new TargetDatabase(); await savingDatabase.RestoreDatabase(); await targetDatabase.RestoreDatabase(); var savings = DummyDataHelper.GetDummySaving(); var targets = DummyDataHelper.GetDummyTarget(); await savingDatabase.AddSavings(savings); await targetDatabase.AddTargets(targets); } catch (System.Exception ex) { var message = ex.Message; } }
private static T _CreateContext <T>(TargetDatabase database, bool readOnly = false) where T : DataContextEx { IConnectionManagerEx manager = null; switch (database) { case TargetDatabase.Data: manager = Host.Container.Resolve <IDataConnectionManager>(); break; } T result = default(T); if (manager.Transaction == null && readOnly) { result = Activator.CreateInstance(typeof(T), manager.Connection) as T; result.ObjectTrackingEnabled = false; manager.RegisterContext(result); return(result); } result = Activator.CreateInstance(typeof(T), manager.Connection) as T; result.Transaction = manager.Transaction; manager.RegisterContext(result); return(result); }
private void ExportToText(string path, TargetDatabase inputData) { var pieces = path.Split('\\'); string directory = ""; foreach (var piece in pieces) { if (piece.Contains(".")) { continue; } directory += piece; directory += "\\"; } var currentProt = 0; var targetWriter = new StreamWriter(directory + "tempAMT.txt"); var proteinWriter = new StreamWriter(directory + "tempAMT_Proteins.txt"); var mapWriter = new StreamWriter(directory + "tempAMT_to_Protein_Map.txt"); var targetHeader = string.Format("{0},{1},{2},{3},{4},{5},{6}", "AMT_ID", "AMTMonoisotopicMass", "NET", "PNET", "MSMS_Obs_Count", "High_Normalized_Score", "Peptide" ); targetWriter.WriteLine(targetHeader); var proteinHeader = string.Format("{0},{1},{2}", "Protein_ID", "Protein_Name", "Protein_Description" ); proteinWriter.WriteLine(proteinHeader); var mapHeader = string.Format("{0},{1}", "AMT_ID", "Protein_ID" ); mapWriter.WriteLine(mapHeader); foreach (var target in inputData.ConsensusTargets) { var msmsObsCount = target.Evidences.Count; var highestNormalized = target.Evidences.Max(x => x.NormalizedScore); var seqPieces = target.CleanSequence.Split('.'); var test = target.EncodedNumericSequence.Split('.'); var numSeq = ""; if (test.Count() != 1) { bool first = true; for (var i = 1; i < test.Count() - 1; i++) { if (!first) { numSeq += "."; } numSeq += test[i]; first = false; } } else { numSeq = test[0]; } var cleanPeptide = (seqPieces.ToList().Count == 1) ? seqPieces[0] : seqPieces[1]; var targetLine = string.Format("{0},{1},{2},{3},{4},{5},\"{6}\"", target.Id, target.TheoreticalMonoIsotopicMass, target.AverageNet, target.PredictedNet, msmsObsCount, highestNormalized, numSeq ); targetWriter.WriteLine(targetLine); m_amtToProteinMap.Add(target.Id, new List <int>()); foreach (var protein in target.Proteins) { if (!m_uniqueProteins.ContainsKey(protein.ProteinName)) { protein.Id = ++currentProt; m_uniqueProteins.Add(protein.ProteinName, protein); var proteinLine = string.Format("{0},{1},{2}", protein.Id, protein.ProteinName, "" ); proteinWriter.WriteLine(proteinLine); } protein.Id = m_uniqueProteins[protein.ProteinName].Id; m_amtToProteinMap[target.Id].Add(protein.Id); var mapLine = string.Format("{0},{1}", target.Id, protein.Id ); mapWriter.WriteLine(mapLine); } } targetWriter.Close(); proteinWriter.Close(); mapWriter.Close(); }
public TargetDatabase Process(IEnumerable <LcmsDataSet> dataSets, BackgroundWorker bWorker) { m_abortRequested = false; m_currentItem = 0; dataSets = dataSets.ToList(); m_totalItems = 2 * dataSets.Count(); OnPercentProgressChanged(new PercentCompleteEventArgs(0)); // Deal with DataSetId - Auto increments - Not in this class only var evidenceMap = new Dictionary <int, Evidence>(); var targetDatabase = new TargetDatabase(); var aligner = TargetAlignmentFactory.Create(ProcessorOptions); var clusterer = TargetClustererFactory.Create(ProcessorOptions.TargetFilterType); var epicTargets = new List <Evidence>(); foreach (var dataSet in dataSets) { float percentComplete = (float)m_currentItem / m_totalItems; UpdateProgress(m_currentItem, m_totalItems, percentComplete, "Determining Consensus Targets"); if (bWorker.CancellationPending || m_abortRequested) { return(targetDatabase); } var targetFilter = TargetFilterFactory.Create(dataSet.Tool, ProcessorOptions); var alignmentFilter = AlignmentFilterFactory.Create(dataSet.Tool, ProcessorOptions); var filteredTargets = new List <Evidence>(); var alignedTargets = new List <Evidence>(); foreach (var t in dataSet.Evidences) { // Exclude carryover peptides. // Would be evidenced by a sizable difference between observed net and predicted net if (t.ObservedNet >= ProcessorOptions.MinimumObservedNet && t.ObservedNet <= ProcessorOptions.MaximumObservedNet) { // To prevent filtration of evidences which have previously passed alignment, if (dataSet.PreviouslyAnalyzed || !targetFilter.ShouldFilter(t)) { filteredTargets.Add(t); if (!alignmentFilter.ShouldFilter(t)) { alignedTargets.Add(t); } } } } epicTargets.AddRange(filteredTargets); if (ProcessorOptions.TargetFilterType == TargetWorkflowType.TOP_DOWN) { dataSet.RegressionResult = aligner.AlignTargets(filteredTargets, alignedTargets); } m_currentItem++; } //Create the database (the list of consensus targets) //Convert the list of targets into a list of MassTagLights for LCMS to use as baseline // Cluster initially to provide a baseline for LCMSWarp var newTargets = clusterer.Cluster(epicTargets); int i = 0, j = 0; var tempConsensusTargets = new List <ConsensusTarget>(); var proteinDict = new Dictionary <string, ProteinInformation>(); foreach (var consensusTarget in newTargets) { consensusTarget.Id = ++i; foreach (var target in consensusTarget.Evidences) { target.Id = ++j; } consensusTarget.CalculateStatistics(); tempConsensusTargets.Add(consensusTarget); } var massTagLightTargets = new List <UMCLight>(); foreach (var evidence in tempConsensusTargets) { var driftStart = double.MaxValue; var driftEnd = double.MinValue; foreach (var member in evidence.Evidences) { driftStart = Math.Min(member.Scan, driftStart); driftEnd = Math.Max(member.Scan, driftEnd); } massTagLightTargets.AddRange(evidence.Charges.Select(charge => new UMCLight { Net = evidence.PredictedNet, ChargeState = charge, Mz = (evidence.TheoreticalMonoIsotopicMass + (charge * 1.00727649)) / charge, MassMonoisotopic = evidence.TheoreticalMonoIsotopicMass, Id = evidence.Id, MassMonoisotopicAligned = evidence.TheoreticalMonoIsotopicMass, DriftTime = driftEnd - driftStart, Scan = (int)((driftStart + driftEnd) / 2.0), ScanStart = (int)driftStart, ScanEnd = (int)driftEnd, })); } if (bWorker.CancellationPending || m_abortRequested) { return(targetDatabase); } var alignmentData = new List <LcmsWarpAlignmentData>(); var options = new LcmsWarpAlignmentOptions(); var lcmsAligner = new LcmsWarpAdapter(options); //For performing net warping without mass correction options.AlignType = PNNLOmics.Algorithms.Alignment.LcmsWarp.AlignmentType.NET_WARP; var lcmsNetAligner = new LcmsWarpAdapter(options); //Foreach dataset foreach (var dataSet in dataSets) { float percentComplete = (float)m_currentItem / m_totalItems; UpdateProgress(m_currentItem, m_totalItems, percentComplete, "Performing LCMSWarp Alignment"); if (bWorker.CancellationPending || m_abortRequested) { return(targetDatabase); } var umcDataset = new List <UMCLight>(); if (dataSet.Tool == LcmsIdentificationTool.MSAlign) { continue; } dataSet.Evidences.Sort((x, y) => x.Scan.CompareTo(y.Scan)); var evidenceAndUmc = new List <EvidenceUMCAssociation>(); // Only put evidences that pass the minimum observed net in this list. var backupDataset = new List <UMCLight>(); foreach (var evidence in dataSet.Evidences) { if (evidence.ObservedNet >= ProcessorOptions.MinimumObservedNet) { UMCLight umc = new UMCLight { Net = evidence.ObservedNet, ChargeState = evidence.Charge, Mz = evidence.Mz, Scan = evidence.Scan, MassMonoisotopic = evidence.MonoisotopicMass, MassMonoisotopicAligned = evidence.MonoisotopicMass, Id = evidence.Id, ScanStart = evidence.Scan, ScanEnd = evidence.Scan, }; umcDataset.Add(umc); backupDataset.Add(umc); evidenceAndUmc.Add(new EvidenceUMCAssociation(evidence, umc)); } } umcDataset.Sort((x, y) => x.MassMonoisotopic.CompareTo(y.MassMonoisotopic)); LcmsWarpAlignmentData alignedData; try { alignedData = lcmsAligner.Align(massTagLightTargets, umcDataset); } catch { try { alignedData = lcmsNetAligner.Align(massTagLightTargets, umcDataset); } catch { alignedData = null; } } var netDiffList = new List <double>(); var numBins = Math.Min(50, dataSet.Evidences.Count); var medNetDiff = new double[numBins]; var numPerBin = (int)Math.Ceiling((double)dataSet.Evidences.Count / numBins); var binNum = 0; //Copy the residual data back into the evidences foreach (var group in evidenceAndUmc) { group.Evidence.MonoisotopicMass = group.UMC.MassMonoisotopicAligned; var netShift = group.UMC.NetAligned - group.UMC.Net; netDiffList.Add(netShift); group.Evidence.NetShift = netShift; group.Evidence.ObservedNet += netShift; if (netDiffList.Count % numPerBin == 0) { medNetDiff[binNum] = netDiffList.Median(); netDiffList.Clear(); binNum++; } } if (netDiffList.Count != 0) { medNetDiff[binNum] = netDiffList.Median(); netDiffList.Clear(); } foreach (var data in dataSet.Evidences.Where(data => !evidenceMap.ContainsKey(data.Id))) { evidenceMap.Add(data.Id, data); } if (alignedData != null) { dataSet.RegressionResult.Slope = alignedData.NetSlope; dataSet.RegressionResult.Intercept = alignedData.NetIntercept; dataSet.RegressionResult.RSquared = alignedData.NetRsquared; alignmentData.Add(alignedData); } else { dataSet.RegressionResult.Slope = 1; dataSet.RegressionResult.Intercept = 0; dataSet.RegressionResult.RSquared = 0; } m_currentItem++; } if (AlignmentComplete != null) { AlignmentComplete(this, new AlignmentCompleteArgs(alignmentData)); } if (ProcessorOptions.TargetFilterType != TargetWorkflowType.TOP_DOWN) { i = j = 0; foreach (var consensus in tempConsensusTargets) { for (var evNum = 0; evNum < consensus.Evidences.Count; evNum++) { consensus.Evidences[evNum] = evidenceMap[consensus.Evidences[evNum].Id]; } //Recalculate the target's data from the warped values consensus.Id = ++i; foreach (var target in consensus.Evidences) { target.Id = ++j; } consensus.CalculateStatistics(); targetDatabase.AddConsensusTarget(consensus); foreach (var protein in consensus.Proteins) { if (!proteinDict.ContainsKey(protein.ProteinName)) { proteinDict.Add(protein.ProteinName, protein); // Don't need to manually link the first consensus to the protein continue; } proteinDict[protein.ProteinName].Consensus.Add(consensus); } } targetDatabase.Proteins = proteinDict.Values.ToList(); } return(targetDatabase); }
public ProgressEventArgs(TargetDatabase target) { Target = target; }
public void Check(ICheckNotifier notifier) { if (TargetDatabase == null) { notifier.OnCheckPerformed(new CheckEventArgs("No TargetDatabase has been set", CheckResult.Fail)); } else if (!TargetDatabase.Exists()) { notifier.OnCheckPerformed(new CheckEventArgs("TargetDatabase '" + TargetDatabase + "' does not exist", CheckResult.Fail)); } var toMigrateTables = TableInfos.Except(SkippedTables).ToArray(); if (!toMigrateTables.Any()) { notifier.OnCheckPerformed(new CheckEventArgs("There are no TableInfos selected for anonymisation", CheckResult.Fail)); } try { var joinInfos = GetJoinInfosRequiredCatalogue(); notifier.OnCheckPerformed(new CheckEventArgs("Generated Catalogue SQL succesfully", CheckResult.Success)); foreach (JoinInfo joinInfo in joinInfos) { notifier.OnCheckPerformed(new CheckEventArgs("Found required JoinInfo '" + joinInfo + "' that will have to be migrated", CheckResult.Success)); } foreach (Lookup lookup in GetLookupsRequiredCatalogue()) { notifier.OnCheckPerformed(new CheckEventArgs("Found required Lookup '" + lookup + "' that will have to be migrated", CheckResult.Success)); //for each key involved in the lookup foreach (ColumnInfo c in new[] { lookup.ForeignKey, lookup.PrimaryKey, lookup.Description }) { //lookup / table has already been migrated if (SkippedTables.Any(t => t.ID == c.TableInfo_ID)) { continue; } //make sure that the plan is sensible if (GetPlanForColumnInfo(c).Plan != Plan.PassThroughUnchanged) { notifier.OnCheckPerformed(new CheckEventArgs("ColumnInfo '" + c + "' is part of a Lookup so must PassThroughUnchanged", CheckResult.Fail)); } } } } catch (Exception ex) { notifier.OnCheckPerformed(new CheckEventArgs("Failed to generate Catalogue SQL", CheckResult.Fail, ex)); } if (DateColumn != null) { var dateColumnPlan = GetPlanForColumnInfo(DateColumn); if (dateColumnPlan.Plan != Plan.PassThroughUnchanged) { if (notifier.OnCheckPerformed(new CheckEventArgs("Plan for " + DateColumn + " must be PassThroughUnchanged", CheckResult.Fail, null, "Set plan to PassThroughUnchanged"))) { dateColumnPlan.Plan = Plan.PassThroughUnchanged; } } //get a count of the number of non lookup used tables var usedTables = TableInfos.Except(SkippedTables).Count(t => !t.IsLookupTable()); if (usedTables > 1) { notifier.OnCheckPerformed( new CheckEventArgs( "You cannot have a date based migration because you are trying to migrate " + usedTables + " TableInfos at once", CheckResult.Fail)); } } if (Plans.Any(p => p.Value.Plan == Plan.Dilute)) { if (GetIdentifierDumpServer() == null) { notifier.OnCheckPerformed(new CheckEventArgs("No default Identifier Dump server has been configured", CheckResult.Fail)); } } var refactorer = new SelectSQLRefactorer(); foreach (ExtractionInformation e in _allExtractionInformations) { if (!refactorer.IsRefactorable(e)) { notifier.OnCheckPerformed(new CheckEventArgs("ExtractionInformation '" + e + "' is a not refactorable due to reason:" + refactorer.GetReasonNotRefactorable(e), CheckResult.Fail)); } } notifier.OnCheckPerformed(new CheckEventArgs($"Preparing to evaluate {toMigrateTables.Length}' tables ({string.Join(",",toMigrateTables.Select(t=>t.GetFullyQualifiedName()))})", CheckResult.Success)); foreach (TableInfo tableInfo in toMigrateTables) { notifier.OnCheckPerformed(new CheckEventArgs("Evaluating TableInfo '" + tableInfo + "'", CheckResult.Success)); if (TargetDatabase != null && TargetDatabase.ExpectTable(tableInfo.GetRuntimeName()).Exists()) { notifier.OnCheckPerformed(new CheckEventArgs("Table '" + tableInfo + "' already exists in Database '" + TargetDatabase + "'", CheckResult.Fail)); } var pks = tableInfo.ColumnInfos.Where(c => c.IsPrimaryKey).ToArray(); if (!pks.Any()) { notifier.OnCheckPerformed(new CheckEventArgs("TableInfo '" + tableInfo + "' does not have any Primary Keys, it cannot be anonymised", CheckResult.Fail)); } if (tableInfo.IsTableValuedFunction) { notifier.OnCheckPerformed(new CheckEventArgs("TableInfo '" + tableInfo + "' is an IsTableValuedFunction so cannot be anonymised", CheckResult.Fail)); } EnsureNotAlreadySharedLocally(notifier, tableInfo); EnsureNotAlreadySharedLocally(notifier, Catalogue); } //check the column level plans foreach (var p in Plans.Values) { p.Check(notifier); } }
public TargetDatabase ReadDb(string path) { // Read in the data from the access database // put it into a text file (?) // Read the data from the text file into program var accApplication = new ACCESS.Application(); var pathPieces = path.Split('\\'); string directory = ""; foreach (var piece in pathPieces) { if (piece.Contains(".")) { continue; } directory += piece; directory += "\\"; } accApplication.OpenCurrentDatabase(path); accApplication.DoCmd.TransferText(TransferType: ACCESS.AcTextTransferType.acExportDelim, TableName: "AMT", FileName: directory + "outTempAMT.txt", HasFieldNames: true); accApplication.DoCmd.TransferText(TransferType: ACCESS.AcTextTransferType.acExportDelim, TableName: "AMT_Proteins", FileName: directory + "outTempAMT_Proteins.txt", HasFieldNames: true); accApplication.DoCmd.TransferText(TransferType: ACCESS.AcTextTransferType.acExportDelim, TableName: "AMT_to_Protein_Map", FileName: directory + "outTempAMT_to_Protein_Map.txt", HasFieldNames: true); accApplication.CloseCurrentDatabase(); accApplication.Quit(); // Put the data into its objects // AMT stuff going in Consensus targets // NET, MonoMass, Pred. Net, Peptide (Sequence with numeric mods), ID (can be crushed later) // OBSERVED <-- number of times this peptide was seen in the AMT // for <observed> times, add an evidence with the info? would make sense and would allow the stats calcs to be accurate // Prot stuff going into ProteinInfo // Prot name only thing important for MTDB, ID (can be crushed later) // AMT map // Link Consensus and Protein (ct[ct_id].protein.add(protein[prot_id])) var consensusTargets = new Dictionary <int, ConsensusTarget>(); var proteins = new Dictionary <int, ProteinInformation>(); var ctReader = new StreamReader(directory + "outTempAMT.txt"); var protReader = new StreamReader(directory + "outTempAMT_Proteins.txt"); var mapReader = new StreamReader(directory + "outTempAMT_to_Protein_Map.txt"); // Read the headers for the files ctReader.ReadLine(); protReader.ReadLine(); mapReader.ReadLine(); // Read the first "Data" lines from the files var ctLine = ctReader.ReadLine(); var protLine = protReader.ReadLine(); var mapLine = mapReader.ReadLine(); while (ctLine != null) { var pieces = ctLine.Split(','); var target = new ConsensusTarget { Id = Convert.ToInt32(pieces[0]), TheoreticalMonoIsotopicMass = Convert.ToDouble(pieces[1]), AverageNet = Convert.ToDouble(pieces[2]), PredictedNet = Convert.ToDouble(pieces[3]), EncodedNumericSequence = pieces[6] }; var totalEvidences = Convert.ToInt32(pieces[4]); var normScore = Convert.ToDouble(pieces[5]); for (var evNum = 0; evNum < totalEvidences; evNum++) { var evidence = new Evidence { ObservedNet = target.AverageNet, ObservedMonoisotopicMass = target.TheoreticalMonoIsotopicMass, PredictedNet = target.PredictedNet, NormalizedScore = normScore, SeqWithNumericMods = target.EncodedNumericSequence, Parent = target }; target.Evidences.Add(evidence); } consensusTargets.Add(target.Id, target); ctLine = ctReader.ReadLine(); } while (protLine != null) { var pieces = protLine.Split(','); var protein = new ProteinInformation { ProteinName = pieces[1] }; proteins.Add(Convert.ToInt32(pieces[0]), protein); protLine = protReader.ReadLine(); } while (mapLine != null) { var pieces = mapLine.Split(','); consensusTargets[Convert.ToInt32(pieces[0])].AddProtein(proteins[Convert.ToInt32(pieces[1])]); mapLine = mapReader.ReadLine(); } ctReader.Close(); protReader.Close(); mapReader.Close(); File.Delete(directory + "outTempAMT.txt"); File.Delete(directory + "outTempAMT_Proteins.txt"); File.Delete(directory + "outTempAMT_to_Protein_Map.txt"); var database = new TargetDatabase(); foreach (var target in consensusTargets) { database.AddConsensusTarget(target.Value); } database.Proteins = proteins.Values.ToList(); return(database); }
/// <summary> /// Write to the SQLite database /// </summary> /// <param name="database"></param> /// <param name="options"></param> /// <param name="path"></param> public void Write(TargetDatabase database, Options options, string path) { DatabaseFactory.DatabaseFile = path; var databaseDirectory = Path.GetDirectoryName(path); /********************************************************************************************** * TODO: Get the append capability working * Set to false to avoid problems. Setting it to true will append some items, but not others. **********************************************************************************************/ DatabaseFactory.ReadOrAppend = false; var sessionFactory = DatabaseFactory.CreateSessionFactory(options.DatabaseType); using (var session = sessionFactory.OpenStatelessSession()) { // populate the database using (var transaction = session.BeginTransaction()) { session.Insert(options); /* This section breaks up the Target object, pulling out the individual TargetDataSet, SequenceInfo, * and TargetPeptideInfo. These objects are then "reverse linked", so that each of these objects * relates to multiple evidences. This is because these objects need to know what they are related to. * Additionally, these objects are saved before the Evidences are, because these objects need to already * exist in order to properly generate the relation. * */ var currentTarget = 0; var currentEv = 0; var datasetCount = 0; var total = database.ConsensusTargets.Count; foreach (var consensusTarget in database.ConsensusTargets) { OnProgressChanged(new MtdbProgressChangedEventArgs(currentTarget, total, MtdbCreationProgressType.COMMIT.ToString())); consensusTarget.Id = ++currentTarget; foreach (var ptm in consensusTarget.Ptms) { ptm.Id = 0; } consensusTarget.Dataset = consensusTarget.Evidences[0].DataSet; consensusTarget.ModificationCount = consensusTarget.Evidences[0].ModificationCount; consensusTarget.ModificationDescription = consensusTarget.Evidences[0].ModificationDescription; consensusTarget.MultiProteinCount = consensusTarget.Evidences[0].MultiProteinCount; session.Insert(consensusTarget); foreach (var evidence in consensusTarget.Evidences) { if (!m_uniqueDataSets.ContainsKey(evidence.DataSet.Name)) { evidence.DataSet.Id = ++datasetCount; m_uniqueDataSets.Add(evidence.DataSet.Name, evidence.DataSet); var outputPath = databaseDirectory + evidence.DataSet.Name + "Alignment.tsv"; var datasetWriter = new StreamWriter(databaseDirectory + "\\" + evidence.DataSet.Name + "Alignment.tsv"); datasetWriter.WriteLine("GANET_Obs\tScan_Number"); m_alignmentWriters.Add(evidence.DataSet.Name, datasetWriter); session.Insert(evidence.DataSet); } Evidence writtenEvidence = new Evidence { Id = ++currentEv, Charge = evidence.Charge, ObservedNet = evidence.ObservedNet, NetShift = evidence.NetShift, Mz = evidence.Mz, Scan = evidence.Scan, DelM = evidence.DelM, DelMPpm = evidence.DelMPpm, DiscriminantValue = evidence.DiscriminantValue, SpecProb = evidence.SpecProb, DataSet = m_uniqueDataSets[evidence.DataSet.Name], Parent = consensusTarget }; m_alignmentWriters[evidence.DataSet.Name].WriteLine(string.Format("{0}\t{1}", writtenEvidence.ObservedNet, writtenEvidence.Scan)); if (writtenEvidence.DiscriminantValue > 0.0) { writtenEvidence.DiscriminantValue += 0.0; } session.Insert(writtenEvidence); } foreach (var protein in consensusTarget.Proteins) { if (!m_uniqueProteins.ContainsKey(protein.ProteinName)) { protein.Id = 0; m_uniqueProteins.Add(protein.ProteinName, protein); session.Insert(protein); } var cProt = m_uniqueProteins[protein.ProteinName]; var cPPair = new ConsensusProteinPair { Consensus = consensusTarget, Protein = cProt, CleavageState = (short)cProt.CleavageState, TerminusState = (short)cProt.TerminusState, ResidueStart = (short)cProt.ResidueStart, ResidueEnd = (short)cProt.ResidueEnd }; session.Insert(cPPair); consensusTarget.ConsensusProtein.Add(cPPair); } foreach (var ptm in consensusTarget.Ptms) { if (!m_uniquePtms.ContainsKey(ptm.Name)) { m_uniquePtms.Add(ptm.Name, ptm); session.Insert(ptm); } var cPtmPair = new ConsensusPtmPair { Location = ptm.Location, PostTranslationalModification = m_uniquePtms[ptm.Name], PtmId = m_uniquePtms[ptm.Name].Id, Target = consensusTarget, ConsensusId = consensusTarget.Id }; session.Insert(cPtmPair); } } OnProgressChanged(new MtdbProgressChangedEventArgs(currentTarget, total, MtdbCreationProgressType.COMMIT.ToString())); transaction.Commit(); session.Close(); } } foreach (var writer in m_alignmentWriters) { writer.Value.Close(); } }
private static int RunDatabaseTarget(TargetDatabase configDatabase, ProgramOptions opts) { var batchSize = Math.Max(1, configDatabase.Batches); //if we are going into a database we definitely do not need pixels! if (opts.NoPixels == false) { opts.NoPixels = true; } Stopwatch swTotal = new Stopwatch(); swTotal.Start(); string neverDistinct = "SOPInstanceUID"; if (!File.Exists(configDatabase.Template)) { Console.WriteLine($"Listed template file '{configDatabase.Template}' does not exist"); return(-1); } ImageTableTemplateCollection template; try { template = ImageTableTemplateCollection.LoadFrom(File.ReadAllText(configDatabase.Template)); } catch (Exception e) { Console.WriteLine($"Error reading yaml from '{configDatabase.Template}'"); Console.WriteLine(e.ToString()); return(-2); } ImplementationManager.Load <MySqlImplementation>(); ImplementationManager.Load <PostgreSqlImplementation>(); ImplementationManager.Load <OracleImplementation>(); ImplementationManager.Load <MicrosoftSQLImplementation>(); var server = new DiscoveredServer(configDatabase.ConnectionString, configDatabase.DatabaseType); try { server.TestConnection(); } catch (Exception e) { Console.WriteLine($"Could not reach target server '{server.Name}'"); Console.WriteLine(e); return(-2); } var db = server.ExpectDatabase(configDatabase.DatabaseName); if (!db.Exists()) { Console.WriteLine($"Creating Database '{db.GetRuntimeName()}'"); db.Create(); Console.WriteLine("Database Created"); } else { Console.WriteLine($"Found Database '{db.GetRuntimeName()}'"); } var creator = new ImagingTableCreation(db.Server.GetQuerySyntaxHelper()); Console.WriteLine($"Image template contained schemas for {template.Tables.Count} tables. Looking for existing tables.."); //setting up bulk inserters DiscoveredTable[] tables = new DiscoveredTable[template.Tables.Count]; DataTable[][] batches = new DataTable[batchSize][]; for (var i = 0; i < batches.Length; i++) { batches[i] = new DataTable[template.Tables.Count]; } IBulkCopy[][] uploaders = new IBulkCopy[batchSize][]; for (int i = 0; i < uploaders.Length; i++) { uploaders[i] = new IBulkCopy[template.Tables.Count]; } string[] pks = new string[template.Tables.Count]; for (var i = 0; i < template.Tables.Count; i++) { var tableSchema = template.Tables[i]; var tbl = db.ExpectTable(tableSchema.TableName); tables[i] = tbl; if (configDatabase.MakeDistinct) { var col = tableSchema.Columns.Where(c => c.IsPrimaryKey).ToArray(); if (col.Length > 1) { Console.WriteLine("MakeDistinct only works with single column primary keys e.g. StudyInstanceUID / SeriesInstanceUID"); } pks[i] = col.SingleOrDefault()?.ColumnName; if (pks[i] != null) { //if it is sop instance uid then we shouldn't be trying to deduplicate if (string.Equals(pks[i], neverDistinct, StringComparison.CurrentCultureIgnoreCase)) { pks[i] = null; } else { //we will make this a primary key later on col.Single().IsPrimaryKey = false; Console.WriteLine($"MakeDistinct will apply to '{pks[i]}' on '{tbl.GetFullyQualifiedName()}'"); } } } bool create = true; if (tbl.Exists()) { if (configDatabase.DropTables) { Console.WriteLine($"Dropping existing table '{tbl.GetFullyQualifiedName()}'"); tbl.Drop(); } else { Console.WriteLine($"Table '{tbl.GetFullyQualifiedName()}' already existed (so will not be created)"); create = false; } } if (create) { Console.WriteLine($"About to create '{tbl.GetFullyQualifiedName()}'"); creator.CreateTable(tbl, tableSchema); Console.WriteLine($"Successfully created create '{tbl.GetFullyQualifiedName()}'"); } Console.WriteLine($"Creating uploader for '{tbl.GetRuntimeName()}''"); for (int j = 0; j < batchSize; j++) { //fetch schema var dt = tbl.GetDataTable(); dt.Rows.Clear(); batches[j][i] = dt; uploaders[j][i] = tbl.BeginBulkInsert(); } } var tasks = new Task[batchSize]; IPersonCollection identifiers = GetPeople(opts, out Random r); for (int i = 0; i < batchSize; i++) { var batch = i; tasks[i] = new Task(() => // lgtm[cs/local-not-disposed] { RunBatch(identifiers, opts, r, batches[batch], uploaders[batch]); }); tasks[i].Start(); } Task.WaitAll(tasks); swTotal.Stop(); for (var i = 0; i < tables.Length; i++) { if (pks[i] == null) { continue; } Console.WriteLine($"{DateTime.Now} Making table '{tables[i]}' distinct (this may take a long time)"); var tbl = tables[i]; tbl.MakeDistinct(500000000); Console.WriteLine($"{DateTime.Now} Creating primary key on '{tables[i]}' of '{pks[i]}'"); tbl.CreatePrimaryKey(500000000, tbl.DiscoverColumn(pks[i])); } Console.WriteLine("Final Row Counts:"); foreach (DiscoveredTable t in tables) { Console.WriteLine($"{t.GetFullyQualifiedName()}: {t.GetRowCount():0,0}"); } Console.WriteLine("Total Running Time:" + swTotal.Elapsed); return(0); }