/// <summary> /// Write result set to a binary file for later retrieval /// </summary> public void WriteBinaryResultsFile(string fileName) { int id = Query.UserObject.Id; if (id <= 0) { throw new Exception("UserObject id not defined, Query not saved"); } BinaryWriter bw = BinaryFile.OpenWriter(fileName + ".new"); // open ".new" file string sq = Query.Serialize(); // serialize & write the query bw.Write(sq); int voArrayLen = DataTableMx.Columns.Count - KeyValueVoPos; // don't write values before the key bw.Write(voArrayLen); // write the array length for (int dri = 0; dri < DataTableMx.Rows.Count; dri++) { DataRowMx dr = DataTableMx.Rows[dri]; VoArray.WriteBinaryVoArray(dr.ItemArrayRef, bw, KeyValueVoPos); } bw.Close(); FileUtil.BackupAndReplaceFile(fileName, fileName + ".bak", fileName + ".new"); return; }
/// <summary> /// Execute a SmallWorld preview query /// </summary> /// <param name="query"></param> /// <returns></returns> public static void ExecuteSmallWorldPreviewQuery( Query query, out QueryEngine qe, out List <object[]> rows) { byte[] ba; qe = null; rows = null; string serializedQuery = query.Serialize(); if (ServiceFacade.UseRemoteServices) { Mobius.Services.Native.INativeSession nativeClient = ServiceFacade.CreateNativeSessionProxy(); Services.Native.NativeMethodTransportObject resultObject = ServiceFacade.InvokeNativeMethod(nativeClient, (int)Services.Native.ServiceCodes.MobiusCompoundUtilService, (int)Services.Native.ServiceOpCodes.MobiusCompoundUtilService.ExecuteSmallWorldPreviewQuery, new Services.Native.NativeMethodTransportObject(new object[] { serializedQuery })); ((System.ServiceModel.IClientChannel)nativeClient).Close(); if (resultObject.Value == null) { return; } object[] oa = resultObject.Value as object[]; int qeId = (int)oa[0]; qe = new QueryEngine(qeId, null); // instantiate facade QE ba = (byte[])oa[1]; rows = VoArray.DeserializeByteArrayToVoArrayList(ba); return; } else { QEL.QueryEngine qelQe; Query q2 = Query.Deserialize(serializedQuery); // need to make separate copy of query QEL.MoleculeUtil.ExecuteSmallWorldPreviewQuery(q2, out qelQe, out rows); if (rows == null) { return; } qe = new QueryEngine(-1, qelQe); ba = VoArray.SerializeBinaryVoArrayListToByteArray(rows); // serialize & deserialize to simulate service rows = VoArray.DeserializeByteArrayToVoArrayList(ba); return; } }
/// <summary> /// Serialize a DataTable to an XmlTextWriter /// </summary> /// <param name="tw"></param> /// <param name="dt"></param> public void SerializeDataTable( XmlTextWriter tw, IDataTableMx iDt) { DataTableMx dt = iDt as DataTableMx; if (dt == null) { return; // ignore if no data table } tw.WriteStartElement("DataTable"); tw.WriteAttributeString("TableName", dt.TableName); tw.WriteStartElement("DataColumns"); foreach (System.Data.DataColumn dc in dt.Columns) { if (Lex.Eq(dc.ColumnName, RowAttributesColumnName) || // don't include these in output Lex.Eq(dc.ColumnName, CheckMarkColumnName)) { continue; } tw.WriteStartElement("DataColumn"); tw.WriteAttributeString("ColumnName", dc.ColumnName); tw.WriteAttributeString("DataType", dc.DataType.FullName); tw.WriteEndElement(); } tw.WriteEndElement(); // end of DataColumns tw.WriteStartElement("DataRows"); foreach (DataRowMx dr in dt.Rows) { // put each row in a CData element object[] oa = dr.ItemArray; StringBuilder sb = VoArray.SerializeToText(oa, 2, oa.Length - 2); tw.WriteCData(sb.ToString()); } tw.WriteEndElement(); // end of DataRows tw.WriteEndElement(); // end of DataTable return; }
/// <summary> /// Execute a SmallWorld preview query with serialized parameters and results /// </summary> /// <param name="serializedQuery"></param> /// <returns>Object array with QE in first entry and serialized rows in second</returns> public static object[] ExecuteSmallWorldPreviewQuerySerialized( string serializedQuery) { QueryEngine qe; List <object[]> rows; Query q = Data.Query.Deserialize(serializedQuery); ExecuteSmallWorldPreviewQuery(q, out qe, out rows); if (rows == null) { return(null); } byte[] ba = VoArray.SerializeBinaryVoArrayListToByteArray(rows); object[] oa = new object[2]; oa[0] = qe.Id; oa[1] = ba; return(oa); }
/// <summary> /// Execute query /// </summary> /// <param name="eqp"></param> public override void ExecuteQuery( ExecuteQueryParms eqp) { MetaTable mt; MetaColumn mc; Query q; QueryTable qt; QueryColumn qc; ResultsPage rp; ResultsViewProps view; CompoundStructureActivityData cd1, cd2; bool smallerIsbetter; double r1, r2, r3, r4; int di, di2, pdi, pdi2, i3; string tok; qt = eqp.QueryTable; qc = qt.GetQueryColumnByNameWithException(SasMapParms.ParametersMetaColumnName); AssertMx.IsDefined(qc.Criteria, qc.Label + " criteria not defined"); if (Lex.Eq(qc.Criteria, LastCriteriaString)) // if same criteria as last time then use existing data { VoListPos = -1; // init list position return; } VoList = new List<object[]>(); VoListPos = -1; // init list position LastCriteriaString = qc.Criteria; ParsedSingleCriteria psc = ParsedSingleCriteria.Parse(qc); SMP = SasMapParms.Deserialize(psc.Value); mc = SMP.EndpointMc; smallerIsbetter = mc.MultiPoint; List<CompoundStructureActivityData> ds1 = ReadData(SMP); // read in the data to analyze if (ds1 == null || ds1.Count == 0) return; // throw new QueryException("No data retrieved"); List<CompoundStructureActivityData> ds2 = ds1; // just one set for now // Calculate difference or ratio coefficents for each pair List<PairData> pd = new List<PairData>(); int minCoef = -1; // index of minimum coefficent selected so far double molFactor = AssayAttributes.GetMolarConcFactor(SMP.EndpointMc); for (di = 0; di < ds1.Count; di++) { // process all compounds in 1st set // if (ds1[di].Nearest == 0) continue; // any data? if (ds2 == ds1) di2 = di + 1; // only do lower rt diagonal if one dataset else di2 = 0; // must do all compares, check for dups later for ( /* start at di2 */; di2 < ds2.Count; di2++) { // if (ds2[di2].Nearest == 0) continue; // any data? if (ds1[di].Cid == ds2[di2].Cid) continue; // avoid self compare double sim = // similarity CalculateSimilarity(ds1[di], ds2[di2]); //if (sim==1.0 && !stereo) // eliminate stereo pairs if requested // continue; // a more careful check may be needed if (sim < SMP.MinimumSimilarity) continue; // below cutoff value? double denom = 1 - sim; // denominator is 1 - sim if (denom == 0) denom = .00000000001f; // avoid divide by zero double actChange = 0; if (smallerIsbetter && ds1[di].Activity < ds2[di2].Activity) { cd1 = ds1[di]; cd2 = ds2[di2]; } else { cd1 = ds2[di2]; cd2 = ds1[di]; } double a1 = cd1.Activity; double a2 = cd2.Activity; if (a1 == NullValue.NullNumber || a2 == NullValue.NullNumber) actChange = 0; else switch (SMP.ActDiffCalcType) { case ActDiffCalcType.SimpleDifference: // activity difference { actChange = a1 - a2; break; } case ActDiffCalcType.NegativeLog: { actChange = -Math.Log10(a1) - -Math.Log10(a2); break; } case ActDiffCalcType.MolarNegativeLog: { actChange = (-Math.Log10(a1 * molFactor)) - (-Math.Log10(a2 * molFactor)); break; } case ActDiffCalcType.Ratio: // activity ratio { r1 = a1; if (r1 == 0) r1 = .00000000001f; r2 = a2; if (r2 == 0) r2 = .00000000001f; r3 = r1 / r2; r4 = r2 / r1; actChange = r3; if (SMP.UseAbsoluteValue && r4 > r3) // take the max value actChange = r4; break; } case ActDiffCalcType.Advanced: { throw new InvalidOperationException("SarMapCalcType.Advanced"); } default: throw new InvalidOperationException("SarMapCalcType: " + (int)SMP.ActDiffCalcType); } if (SMP.UseAbsoluteValue && SMP.ActDiffCalcType != ActDiffCalcType.Ratio) actChange = Math.Abs(actChange); double coef = actChange / denom; if (pd.Count < SMP.MaxPairCount) // just add this pair to end { pdi = pd.Count; pd.Add(new PairData()); } else { // see if this value is greater than anything in list if (minCoef < 0) { // find element with minimum coef minCoef = 0; for (i3 = 1; i3 < pd.Count; i3++) { if (pd[i3].Coef < pd[minCoef].Coef) minCoef = i3; } } if (coef <= pd[minCoef].Coef) continue; // if this one better? //if (ds1 != ds2) //{ // be sure not a duplicate of what we have in list // for (i3 = 0; i3 < pd.Count; i3++) // { // check for pair in list already // if ((di == pd[i3].CD1 && di2 == pd[i3].CD2) || // (di == pd[i3].CD2 && di2 == pd[i3].CD1)) break; // } // if (i3 < pd.Count) continue; // continue to next pair if found //} pdi = minCoef; // replace this item minCoef = -1; // reset to get new minimum next time } // Save data for the pair pd[pdi].CD1 = cd1; pd[pdi].CD2 = cd2; pd[pdi].Sim = sim; pd[pdi].ActChange = actChange; pd[pdi].Coef = coef; } } // Build the list of pair Vos int voLen = qt.SetSimpleVoPositions(); PairData pdItem; for (pdi = 1; pdi < pd.Count; pdi++) // sort from max to min coef value { pdItem = pd[pdi]; for (pdi2 = pdi - 1; pdi2 >= 0; pdi2--) { if (pdItem.Coef < pd[pdi2].Coef) break; pd[pdi2 + 1] = pd[pdi2]; } pd[pdi2 + 1] = pdItem; } for (pdi = 0; pdi < pd.Count; pdi++) { pdItem = pd[pdi]; cd1 = pdItem.CD1; cd2 = pdItem.CD2; object[] vo = new object[voLen]; VoArray.SetVo(qt, "PAIR_ID", vo, new NumberMx(pdi + 1)); VoArray.SetVo(qt, "COMPOUND1", vo, new StringMx(cd1.Cid)); VoArray.SetVo(qt, "STRUCTURE1", vo, cd1.Structure); VoArray.SetVo(qt, "ACTIVITY1", vo, new NumberMx(cd1.Activity)); VoArray.SetVo(qt, "COMPOUND2", vo, new StringMx(cd2.Cid)); VoArray.SetVo(qt, "STRUCTURE2", vo, cd2.Structure); VoArray.SetVo(qt, "ACTIVITY2", vo, new NumberMx(cd2.Activity)); VoArray.SetVo(qt, "SIMILARITY", vo, new NumberMx(pdItem.Sim)); VoArray.SetVo(qt, "ACTIVITY_DIFF", vo, new NumberMx(pdItem.ActChange)); VoArray.SetVo(qt, "ACT_SIM_COEF", vo, new NumberMx(pdItem.Coef)); VoList.Add(vo); } VoListPos = -1; // init list position return; }
/// <summary> /// Write out rows from table if caching has been activated /// </summary> internal void WriteRowsToCache(bool keepLastKeyValue) { DataRowMx dr; string firstKey = "", lastKey = ""; int rowsRemovedInThisCall = 0, ri, ri2, srpi; if (!AllowCaching) { return; } if (CacheStartPosition < 0 || CacheWriter == null) { return; // just return if caching not active } if (DataTableMx.Rows.Count < CacheMiminumRowsRequiredForWriting) { return; } lock (DataTransferLock) // lock the DataTable while changing { if (DataTableFetchPosition < CacheStartPosition) { return; // only cache if fetching beyond start position } if (DataTableFetchPosition < DataTableMx.Rows.Count - 1) { return; // can only cache out if at end of available rows } dr = DataTableMx.Rows[CacheStartPosition]; firstKey = dr[KeyValueVoPos] as string; ri = DataTableMx.Rows.Count - 1; dr = DataTableMx.Rows[ri]; lastKey = dr[KeyValueVoPos] as string; // key we want to keep if (QueryManager.MoleculeGrid != null) { QueryManager.MoleculeGrid.BeginUpdate(); } ri = CacheStartPosition; // start deleting here while (ri < DataTableMx.Rows.Count) { // delete anything with key other than end value dr = DataTableMx.Rows[ri]; string key2 = dr[KeyValueVoPos] as string; // end key with possibly partial data that we want to keep if (keepLastKeyValue && key2 == lastKey) { break; } bool doCacheIO = !PurgeDataTableWithoutWritingToCacheFile; if (doCacheIO) { object[] oa = dr.ItemArray; StringBuilder sb = VoArray.SerializeToText(oa, KeyValueVoPos, oa.Length - KeyValueVoPos); string lenStr = String.Format("{0,8:00000000}", sb.Length); CacheWriter.Write(lenStr); // write length CacheWriter.Write(sb); // write record RowsWrittenToCache++; } DataTableMx.Rows.Remove(dr); RowsRemovedFromDataTable++; rowsRemovedInThisCall++; } DataTableFetchPosition -= rowsRemovedInThisCall; // adjust fetch position for (ri2 = CacheStartPosition; ri2 < DataTableMx.Rows.Count; ri2++) { // adjust row indexes held the row attributes in rows below those paged out DataRowAttributes dra = GetRowAttributes(ri2); if (dra == null) { continue; } dra.FirstRowForKey -= rowsRemovedInThisCall; if (dra.SubRowPos == null) { continue; } for (srpi = 0; srpi < dra.SubRowPos.Length; srpi++) { dra.SubRowPos[srpi] -= rowsRemovedInThisCall; } } } // end of locked section if (QueryManager.MoleculeGrid != null) { QueryManager.MoleculeGrid.EndUpdate(); QueryManager.MoleculeGrid.Refresh(); Application.DoEvents(); } if (DebugCaching) { ClientLog.Message( "CachedRows - DataTable.Rows.Count: " + DataTableMx.Rows.Count + ", FirstKey: " + firstKey + ", LastKey: " + lastKey + ", RowsRemovedFromDataTable (This Call): " + rowsRemovedInThisCall + ", RowsRemovedFromDataTable (Total): " + RowsRemovedFromDataTable + ", RowsWrittenToCache (Total): " + RowsWrittenToCache + ", DataTableFetchPosition: " + DataTableFetchPosition); } return; }
/// <summary> /// Deserialize a DataTable from an XmlTextReader /// </summary> /// <param name="tr"></param> /// <returns></returns> public IDataTableMx Deserialize( XmlTextReader tr) { DataTableMx dt = new DataTableMx(); dt.Columns.Add(RowAttributesColumnName, typeof(DataRowAttributes)); dt.Columns.Add(CheckMarkColumnName, typeof(bool)); tr.MoveToContent(); if (!Lex.Eq(tr.Name, "DataTable")) { throw new Exception("No \"DataTable\" element found"); } tr.Read(); tr.MoveToContent(); // Read DataColumns if (!Lex.Eq(tr.Name, "DataColumns")) { throw new Exception("No \"DataColumns\" element found"); } tr.Read(); tr.MoveToContent(); while (true) { if (Lex.Eq(tr.Name, "DataColumn")) { string colName = tr.GetAttribute("ColumnName"); string typeName = tr.GetAttribute("DataType"); // Type type = Type.GetType(typeName); Type type = typeof(object); // store all as object types System.Data.DataColumn dc = dt.Columns.Add(colName, type); tr.Read(); tr.MoveToContent(); } else if (tr.NodeType == XmlNodeType.EndElement && tr.Name == "DataColumns") { tr.Read(); tr.MoveToContent(); break; } else { throw new Exception("Expected DataColumn or DataColumns end element but saw " + tr.Name); } } // Read DataRows if (tr.Name != "DataRows") { throw new Exception("No \"DataRows\" element found"); } object[] rowData = new object[dt.Columns.Count]; bool isEmptyElement = tr.IsEmptyElement; tr.Read(); tr.MoveToContent(); if (!isEmptyElement) { while (true) { if (tr.NodeType == XmlNodeType.CDATA) { object[] oa = VoArray.DeserializeText(tr.Value); Array.Copy(oa, 0, rowData, 2, oa.Length); dt.Rows.Add(rowData); tr.Read(); tr.MoveToContent(); } else if (tr.NodeType == XmlNodeType.EndElement && tr.Name == "DataRows") { tr.Read(); tr.MoveToContent(); break; } else if (tr.Name == "ArrayOfAnyType") // ignore old form data { while (true) { tr.Read(); tr.MoveToContent(); if (tr.NodeType == XmlNodeType.EndElement && tr.Name == "ArrayOfAnyType") { break; } } tr.Read(); tr.MoveToContent(); } else { throw new Exception("Expected ArrayOfAnyType or DataRows end element but saw " + tr.Name); } } } if (tr.NodeType == XmlNodeType.EndElement && tr.Name == "DataTable") { return(dt); } else { throw new Exception("Expected DataTable end element but saw " + tr.Name); } }
/// <summary> /// Attempt to read existing results file into the query DataTable /// </summary> /// <param name="qm"></param> public void ReadBinaryResultsFile(string fileName) { QueryTable qt; QueryColumn qc; BinaryReader br = null; Stopwatch sw = Stopwatch.StartNew(); try { bool saveHandlersEnabled = Qm.DataTable.EnableDataChangedEventHandlers(false); // disable for faster load bool saveUpdateMaxRowsPerKey = UpdateMaxRowsPerKeyEnabled; UpdateMaxRowsPerKeyEnabled = false; // disable for faster load int id = Query.UserObject.Id; if (id <= 0) { throw new Exception("Query not saved"); } if (DataTableMx == null || DataTableMx.Columns.Count == 0) { throw new Exception("DataTable not defined"); } br = BinaryFile.OpenReader(fileName); string sq = br.ReadString(); Query q0 = Query.Deserialize(sq); // deserialize the saved query QueryManager qm0 = new QueryManager(); qm0.LinkMember(q0); ResultsFormat rf0 = new ResultsFormat(qm0, OutputDest.WinForms); ResultsFormatFactory rff0 = new ResultsFormatFactory(qm0, OutputDest.WinForms); rff0.Build(); // build format with vo positions // The cached query cols should match those of the current query: however, // we'll create a mapping just in case they don't int voArrayLen0 = br.ReadInt32(); // cached vo array len int voArrayLen = DataTableMx.Columns.Count - KeyValueVoPos; // current query vo array len List <int> q0VoMap = new List <int>(); // vo position in cached query data List <int> qVoMap = new List <int>(); // vo position in current version of query q0VoMap.Add(0); // first position is the common key value qVoMap.Add(0); foreach (QueryTable qt0 in q0.Tables) // scan each table in cached data { foreach (QueryColumn qc0 in qt0.QueryColumns) // and each column { if (qc0.VoPosition < 0) { continue; // skip if not mapped to the vo in cached data } int q0VoPos = qc0.VoPosition - KeyValueVoPos; // where it is in cache int qvoPos = -1; // where it will go qt = Query.GetTableByName(qt0.MetaTable.Name); if (qt != null) { qc = qt.GetQueryColumnByName(qc0.MetaColumn.Name); if (qc != null) { qvoPos = qc.VoPosition - KeyValueVoPos; } } q0VoMap.Add(q0VoPos); // where it is in saved data qVoMap.Add(qvoPos); // where it will go (not including attributes & check cols) } } if (q0VoMap.Count != voArrayLen0) { throw new Exception("Cached Vo length doesn't match list of selected columns"); } DataTableMx.Clear(); // clear the rows CidList cidList = new CidList(); object[] voa = new object[voArrayLen]; // array to fill while (!BinaryFile.ReaderEof(br)) // process each row { for (int mi = 0; mi < q0VoMap.Count; mi++) // each col { object o = VoArray.ReadBinaryItem(br); if (mi == 0 && o != null) // add to key list if key { cidList.Add(o.ToString(), false); } if (qVoMap[mi] >= 0) // save in new buf if mapped { voa[qVoMap[mi]] = o; } } DataRowMx dr = AddDataRow(voa); } br.Close(); Qm.DataTable.EnableDataChangedEventHandlers(saveHandlersEnabled); UpdateMaxRowsPerKeyEnabled = saveUpdateMaxRowsPerKey; InitializeRowAttributes(false); ResultsKeys = cidList.ToStringList(); // include keys in DTM as well double ms = sw.Elapsed.TotalMilliseconds; return; } catch (Exception ex) { if (br != null) { br.Close(); } throw new Exception(ex.Message, ex); } }