}// end struct public static DatabaseCredentials get_DatabaseCredentials(string xpath) { DatabaseCredentials res = new DatabaseCredentials(); ConfigurationLayer.ConfigurationService theConfigService = new ConfigurationLayer.ConfigurationService( xpath); res.hostname_sql_instance = theConfigService.GetStringValue("hostname_sql_instance"); res.db_name = theConfigService.GetStringValue("db_name"); res.sql_instance = theConfigService.GetStringValue("sql_instance"); res.usr = theConfigService.GetStringValue("usr"); res.pwd = theConfigService.GetStringValue("pwd"); return(res); } // end get_DatabaseCredentials
}//------- end /// input into the db. /// <summary> /// this is the unique access point to extract documents. Flow: /// - it queries an Entity_materie::Proxy and extracts the blob /// - saves on the original filename, at the User::tmp path, decorating with a dynamic string( time based). /// </summary> /// <param name="id"> the id-field, in the db record layout.</param> /// <returns> /// a status integer: /// 0==success, /// -1==failure. /// </returns> public Int32 FILE_from_DB_writeto_FS( int id , out string extractionFullPath , string clientIP// just for Logging. ) { int res = -1; // init to invalid. extractionFullPath = null; // compulsory init; actual initialization in body. if (0 >= id) { // invalid id. return(-1); // -1==failure. }// else can continue. // System.IO.FileStream multiChunk_stream = null; // try { System.Data.DataSet ds_allInvolvedIds = Entity_materie.Proxies.usp_docMulti_dataMining_SERVICE.usp_docMulti_dataMining(id); if (null == ds_allInvolvedIds) { throw new System.Exception("trouble connecting to database."); } // //----revert the ids to their original order.-------- hm_ids = ds_allInvolvedIds.Tables.Count; if (0 >= hm_ids || 0 >= ds_allInvolvedIds.Tables[1].Rows.Count) // NB Tables[0] is static; doesn't count. { // invalid id. return(-1); // -1==failure. }// else can continue. else// valid chunk group. { ids = new Int32[hm_ids]; for (int c = 0; c < hm_ids; c++) { ids[c] = (Int32)(ds_allInvolvedIds.Tables[hm_ids - (c + 1)].Rows[0].ItemArray[0]); } string sourceName = null; System.Data.DataTable dt_sourceName = Entity_materie.Proxies.usp_docMulti_get_sourceName_SERVICE.usp_docMulti_get_sourceName( ids[1]//--NB. skip first datatable row, which has id==0. ); if (null != dt_sourceName && 0 < dt_sourceName.Rows.Count) { sourceName = (string)(dt_sourceName.Rows[0].ItemArray[0]); if (sourceName.Length > 60) { int last_dot_position = sourceName.LastIndexOf('.'); string extension = ".doc";// default if (-1 < last_dot_position) { extension = sourceName.Substring( last_dot_position, 4); } // else default to "doc". sourceName = sourceName.Substring(0, 56) + extension; }// else leave it as is. } else { // invalid id. return(-1); // -1==failure. }// else can continue. // ConfigurationLayer.ConfigurationService cs = new ConfigurationLayer.ConfigurationService("FileTransferTempPath/fullpath"); string dlgSave_InitialDirectory = cs.GetStringValue("path"); dlgSave_InitialDirectory += "\\download";// in case HTTP it's a path on the Web-Server. In case WindowsForms it's on localhost. // In case HTTP there is a WebApplication::AppCode::Downloader call // to System.Web.HttpContext.Response.WriteFile(webServer_extractionPath) // to bring the file from webServer to client.; // // Ensure the folder exists if (!System.IO.Directory.Exists(dlgSave_InitialDirectory)) { System.IO.Directory.CreateDirectory(dlgSave_InitialDirectory); }// else already present on the web server file system. string timeStamp = DateTime.Now.Year.ToString() + "#" + DateTime.Now.Month.ToString() + "#" + DateTime.Now.Day.ToString() + "_" + DateTime.Now.Hour.ToString() + "#" + DateTime.Now.Minute.ToString() + "#" + DateTime.Now.Second.ToString() + "_" + DateTime.Now.Millisecond.ToString() + "_"; timeStamp = timeStamp.Replace('/', '_').Replace('\\', '_') .Replace(' ', '_').Replace('.', '_').Replace(':', '_') .Replace(';', '_').Replace(',', '_').Replace('|', '_') .Replace('<', '_').Replace('>', '_').Replace('?', '_').Replace('*', '_').Replace('"', '_'); extractionFullPath = dlgSave_InitialDirectory + "\\" + timeStamp + "_" + sourceName; if (null == clientIP || "" == clientIP) { clientIP = " unspecified. "; }// else continue. LoggingToolsContainerNamespace.LoggingToolsContainer.LogBothSinks_DbFs( " extraction WebServer or localhost FullPath = " + extractionFullPath + " #_# and clientIP = " + clientIP , 5); //--prepare the binary stream to append to.----------- multiChunk_stream = new System.IO.FileStream( extractionFullPath, System.IO.FileMode.CreateNew, // .Append,// ? TODO isn't it better write than append ? System.IO.FileAccess.Write, System.IO.FileShare.None, docBody__Length * hm_ids// prepare a size of "n" chunks of 64Kb. The last one is generally not full: so it's surely enough. ); if (null == multiChunk_stream) { throw new System.Exception("unable to write the file."); } //---retrieve blob, foer each id.--------------------- for (int c = 1; c < hm_ids; c++)//--NB. skip first datatable row, which has id==0. { System.Data.DataTable dt_currentChunk = Entity_materie.Proxies.usp_docMulti_getBlobAtId_SERVICE.usp_docMulti_getBlobAtId( ids[c] ); byte[] tmp_chunk = (byte[])(dt_currentChunk.Rows[0]["doc"]); multiChunk_stream.Write(//NB.----write a single chunk on filesystem.---------------------- tmp_chunk, 0, tmp_chunk.Length ); }// end for LoggingToolsContainerNamespace.LoggingToolsContainer.LogBothSinks_DbFs( "OK, doc extracted at: " + extractionFullPath, 5); } // end else// valid chunk group. } // end try.--- catch (System.Exception ex)// invalid id inserted { LoggingToolsContainerNamespace.LoggingToolsContainer.LogBothSinks_DbFs( "error while trying to retrieve a document: " + ex.Message, 5); res = -1;// -1==failure. } finally { ids = null; // reset. hm_ids = -1; // reset. if (null != multiChunk_stream) { multiChunk_stream.Flush(); multiChunk_stream.Close(); multiChunk_stream = null;//---garbage collect.----- }//----------else already ok.---- res = 0; } //---ready--- return(res);// 0==success. }//---end method----
/// <summary> /// La BPL chiamante fornisce {sequenza, out mode}. /// Il par 'mode' prende valori in {'s', 'm', 'o'}, che significano rispettivamente {simple, multi, off}. /// Il default e' 'multi'. /// </summary> /// <param name="sequenza"></param> /// <param name="mode"></param> /// <returns></returns> public static CryptoEngine.theReturnType CriptazioneSequenza( string sequenza, // in chiaro out char mode ) { // Criptazione // ramoAndata della criptazione CryptoStore.Macro.CryptoEngine cs = new CryptoStore.Macro.CryptoEngine(); CryptoStore.Macro.CryptoEngine.GenericAlgoPointer specificaFunzione = null; // puntatore al servizio di criptazione /* per decidere come valorizzare il puntatore a funzione, * vado in web.config a leggere allo xpath "Criptazione/CryptoService" * per vedere se la chiave "interruttore" e' su "acceso" o "spento" * e se la chiave "specificaFunzione" punta a un algoritmo esistente o * a un nome invalido. */ ConfigurationLayer.ConfigurationService myConfig = new ConfigurationLayer.ConfigurationService( "Criptazione/CryptoService"); string AlgorithmSwitch = myConfig.GetStringValue("switch"); // NB. <!-- switch: {"simple", "multi", "off"} --> switch (AlgorithmSwitch) { case "simple": { specificaFunzione = new CryptoEngine.GenericAlgoPointer( CryptoStore.Micro.SimpleXor.xorCrypto_andata ); mode = 's'; // simple break; } default: //NB. Error in web.config: key "switch" in {"simple", "multi", "off"}. case "multi": { specificaFunzione = new CryptoEngine.GenericAlgoPointer( CryptoStore.Micro.MultiXor.xorMultiCrypto_andata ); mode = 'm'; //multi break; } case "off": { specificaFunzione = null; // no crypto. mode = 'o'; // off break; } } // end switch( AlgorithmSwitch) // CryptoStore.Macro.CryptoEngine.theReturnType cryptedValues = cs.Criptazione( sequenza, specificaFunzione ); // ready return(cryptedValues); } //
} // end struct // NB. dato privato e di istanza( ma filtrato dal pattern_Singleton) e' la db-connection. // acquisirla in apertura dell'applicazione e rilasciarla alla dispose. public SinkDb( ) { lock (typeof(LogSinkDb.Library.SinkDb)) { // config-parsing try// silent log. All must be catched. { // read the connection-string from configuration and try acquire such connection. this.logDbConn = DbLayer.ConnectionManager.getCryptedConnection( "LogSinkDb/connection"); if (null == this.logDbConn || System.Data.ConnectionState.Open != this.logDbConn.State) { throw new System.Exception("Impossibile acquisire la connessione al db. Verificare il file di configurazione."); }// else ok -> go on // read the rest of configuration. ConfigurationLayer.ConfigurationService cs = new ConfigurationLayer.ConfigurationService( "LogSinkDb/logger_application"); this.TableName = cs.GetStringValue("table_name"); string semaphore = cs.GetStringValue("semaphore"); switch (semaphore) { case "on": { this.semaphore = 1; // green semaphore break; } case "off": default: { this.semaphore = 0; // red semaphore break; } }// end switch on semaphore string verbosity = cs.GetStringValue("verbosity"); this.verbosity = int.Parse(verbosity);// if this throws -> hasPermissionsToWrite=false // end configuration acquisition. Implementation start if (1 != this.semaphore) { hasPermissionsToWrite = false; } else // green semaphore { // prepare tag-stack this.tagStack = new System.Collections.Stack(); // once the tableName is read from configuration -> try-create such table if (!this.createTable(this.TableName)) { throw new System.Exception("L'accesso alla tabella di log ha sollevato un'eccezione. Verificare il file di configurazione."); } // else ok -> go on hasPermissionsToWrite = true; } // end else // green semaphore } // end try catch (Exception ex) {// if this throws -> hasPermissionsToWrite=false this.constructorException = ex.Message; hasPermissionsToWrite = false; } finally { if (null != this.logDbConn) { if (System.Data.ConnectionState.Open == this.logDbConn.State) { this.logDbConn.Close();// no-persistency. Volatile connections. }// else already closed }// else no connection } } // end critical section } // end Ctor
}//------- end /// input into the db. /// <summary> /// this is the unique access point to extract documents. Flow: /// - it queries an Entity::Proxy and extracts the blob /// - saves on the original filename, at the User::tmp path, decorating with a dynamic string( time based). /// </summary> /// <param name="id"> the id-field, in the db record layout.</param> /// <returns> /// a status integer: /// 0==success, /// -1==failure. /// </returns> public Int32 FILE_from_DB_writeto_FS( int id, out string extractionFullPath , string clientIP ) { extractionFullPath = null; // compulsory init; actual initialization in body. if (0 >= id) { // invalid id. return(-1); }// else can continue. // System.IO.FileStream multiChunk_stream = null; // try { System.Data.DataSet ds_allInvolvedIds = Entity.Proxies.usp_doc_multi_dataMining_SERVICE.usp_doc_multi_dataMining(id); if (null == ds_allInvolvedIds) { throw new System.Exception("trouble connecting to database."); } // //----revert the ids to their original order.-------- hm_ids = ds_allInvolvedIds.Tables.Count; if (0 >= hm_ids || 0 >= ds_allInvolvedIds.Tables[1].Rows.Count) // NB Tables[0] is static; doesn't count. { // invalid id. return(-1); }// else can continue. else// valid chunk group. { ids = new Int32[hm_ids]; for (int c = 0; c < hm_ids; c++) { ids[c] = (Int32)(ds_allInvolvedIds.Tables[hm_ids - (c + 1)].Rows[0].ItemArray[0]); } string sourceName = null; System.Data.DataTable dt_sourceName = Entity.Proxies.usp_doc_multi_get_sourceName_SERVICE.usp_doc_multi_get_sourceName( ids[1]//--NB. skip first datatable row, which has id==0. ); if (null != dt_sourceName && 0 < dt_sourceName.Rows.Count) { sourceName = (string)(dt_sourceName.Rows[0].ItemArray[0]); if (sourceName.Length > 60) { int last_dot_position = sourceName.LastIndexOf('.'); string extension = ".doc";// default if (-1 < last_dot_position) { extension = sourceName.Substring( last_dot_position, 4); } // else default to "doc". sourceName = sourceName.Substring(0, 56) + extension; }// else leave it as is. } else {// invalid id. return(-1); }// else can continue. // ConfigurationLayer.ConfigurationService cs = new ConfigurationLayer.ConfigurationService("FileTransferTempPath/fullpath"); string dlgSave_InitialDirectory = cs.GetStringValue("path"); //-Gestione dismessa --- NB. it's different for every user, included ASPNETusr --------- //-Gestione dismessa string dlgSave_InitialDirectory = Environment.GetEnvironmentVariable("tmp", EnvironmentVariableTarget.User); //-Gestione dismessa string dlgSave_InitialDirectory = @"C:\root\LogSinkFs\cv";// TODO adapt to the server file sysetm. dlgSave_InitialDirectory += "\\download";// from server to client. // // Ensure the folder exists if (!System.IO.Directory.Exists(dlgSave_InitialDirectory)) { System.IO.Directory.CreateDirectory(dlgSave_InitialDirectory); }// else already present on the web server file system. extractionFullPath = dlgSave_InitialDirectory + "\\" + DateTime.Now.Year.ToString() + DateTime.Now.Month.ToString() + DateTime.Now.Day.ToString() .Replace('/', '_').Replace('\\', '_') .Replace(' ', '_').Replace('.', '_').Replace(':', '_') .Replace(';', '_').Replace(',', '_') + "_" + sourceName; if (null == clientIP || "" == clientIP) { clientIP = " unspecified. "; }// else continue. LoggingToolsContainerNamespace.LoggingToolsContainer.LogBothSinks_DbFs( " extractionFullPath = " + extractionFullPath + " _ clientIP = " + clientIP , 0); //--prepare the binary stream to append to.----------- multiChunk_stream = new System.IO.FileStream( extractionFullPath, System.IO.FileMode.Append, System.IO.FileAccess.Write, System.IO.FileShare.None, docBody__Length * hm_ids// prepare a size of "n" chunks of 64Kb. The last one is generally not full: so it's surely enough. ); if (null == multiChunk_stream) { throw new System.Exception("unable to write the file."); } //---retrieve blob, foer each id.--------------------- for (int c = 1; c < hm_ids; c++)//--NB. skip first datatable row, which has id==0. { System.Data.DataTable dt_currentChunk = Entity.Proxies.usp_doc_multi_getBlobAtId_SERVICE.usp_doc_multi_getBlobAtId( ids[c] ); byte[] tmp_chunk = (byte[])(dt_currentChunk.Rows[0]["doc"]); multiChunk_stream.Write(//NB.----write a single chunk on filesystem.---------------------- tmp_chunk, 0, tmp_chunk.Length ); }// end for LoggingToolsContainerNamespace.LoggingToolsContainer.LogBothSinks_DbFs( "OK, doc extracted.", 0); } // end else// valid chunk group. } // end try.--- catch (System.Exception ex)// invalid id inserted { LoggingToolsContainerNamespace.LoggingToolsContainer.LogBothSinks_DbFs( "error while trying to retrieve a document: " + ex.Message, 0); } finally { ids = null; // reset. hm_ids = -1; // reset. if (null != multiChunk_stream) { multiChunk_stream.Flush(); multiChunk_stream.Close(); multiChunk_stream = null;//---garbage collect.----- }//----------else already ok.---- } //---ready--- return(0);// means success. }//---end method----