public List <string> BuscarClienteAPIReniec(String ruc) { List <string> C = new List <string>(); //String[] C= new String[14]; string url = @"https://procontbusiness.com/sunat/sunat.php?ruc=" + ruc; HttpWebRequest request = (HttpWebRequest)WebRequest.Create(url); using (HttpWebResponse response = (HttpWebResponse)request.GetResponse()) using (Stream stream = response.GetResponseStream()) using (StreamReader reader = new StreamReader(stream)) { var json = reader.ReadToEnd(); JsonGeneric cliente = JsonConvert.DeserializeObject <JsonGeneric>(json); //C[0] = cliente.result.ruc; C.Add(cliente.result.ruc); C.Add(cliente.result.razon_social); C.Add(cliente.result.condicion); C.Add(cliente.result.nombre_comercial); C.Add(cliente.result.tipo); C.Add(cliente.result.fecha_inscripcion); C.Add(cliente.result.estado); C.Add(cliente.result.direccion); C.Add(cliente.result.sistema_emision); C.Add(cliente.result.actividad_exterior); C.Add(cliente.result.sistema_contabilidad); C.Add(cliente.result.oficio); C.Add(cliente.result.emision_electronica); C.Add(cliente.result.ple); } return(C); }
private void SaveToSend(JsonGeneric data, String prefix) { if ((data.data == null) || (data.data.Count == 0)) { return; } Byte[] jData = data.ToJsonBytes(); using (CryptApi cApi = new CryptApi(CATools.LoadCert(Convert.FromBase64String(config.server_cert)), jData)) { DirectoryInfo dirTo = new DirectoryInfo(Path.Combine(basePath, "Out")); if (!dirTo.Exists) { dirTo.Create(); } FileInfo f = new FileInfo(Path.Combine(dirTo.FullName, DateTime.Now.ToString("yyyyMMddHHmss-ffffff") + "-" + prefix) + ".iamdat"); File.WriteAllBytes(f.FullName, cApi.ToBytes()); TextLog.Log("PluginStarter", "File to send created " + f.Name + " (" + data.data.Count + ")"); data.data.Clear(); } }
private void ImportDelete(ProxyConfig config, JsonGeneric jData, FileInfo f, JSONRequest req, IAMDatabase db) { Int32 resourceCol = jData.GetKeyIndex("resource"); Int32 sourceCol = jData.GetKeyIndex("source"); Int32 uriCol = jData.GetKeyIndex("uri"); Int32 entityIdCol = jData.GetKeyIndex("entityid"); Int32 identityIdCol = jData.GetKeyIndex("identityid"); if (resourceCol == -1) { TextLog.Log("Inbound", "\t[ImportDelete] Erro on find column 'resource' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (sourceCol == -1) { TextLog.Log("Inbound", "\t[ImportDelete] Erro on find column 'source' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (uriCol == -1) { TextLog.Log("Inbound", "\t[ImportDelete] Erro on find column 'uri' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (entityIdCol == -1) { TextLog.Log("Inbound", "\t[ImportDelete] Erro on find column 'entityId' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (identityIdCol == -1) { TextLog.Log("Inbound", "\t[ImportDelete] Erro on find column 'identityId' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } DateTime date = DateTime.Now; foreach (String[] dr in jData.data) { try { db.ExecuteNonQuery("update [identity] set deleted = 1, deleted_date = '" + date.ToString("o") + "' where id = " + dr[identityIdCol], CommandType.Text, null); } catch { } } #if DEBUG TextLog.Log("Inbound", "\t[ImportDelete] Changed " + jData.data.Count + " identities for deleted status in enterprise " + req.enterpriseid + " and proxy " + req.host); #endif jData = null; }
private void ImportRegistersStruct(ProxyConfig config, JsonGeneric jData, FileInfo f, JSONRequest req, IAMDatabase db) { Int32 resourcePluginCol = jData.GetKeyIndex("resource_plugin"); Int32 pkgCol = jData.GetKeyIndex("package"); if (resourcePluginCol == -1) { TextLog.Log("Inbound", "\t[ImportStruct] Erro on find column 'resource_plugin' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (pkgCol == -1) { TextLog.Log("Inbound", "\t[ImportStruct] Erro on find column 'package' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } //Realiza a importação no modelo BulkInsert por melhor desempenho do banco DataTable dtBulk = new DataTable(); dtBulk.Columns.Add(new DataColumn("date", typeof(DateTime))); dtBulk.Columns.Add(new DataColumn("file_name", typeof(String))); dtBulk.Columns.Add(new DataColumn("resource_plugin", typeof(Int64))); dtBulk.Columns.Add(new DataColumn("import_id", typeof(String))); dtBulk.Columns.Add(new DataColumn("package_id", typeof(String))); dtBulk.Columns.Add(new DataColumn("package", typeof(String))); foreach (String[] dr in jData.data) { PluginConnectorBaseImportPackageStruct pkg = JSON.DeserializeFromBase64 <PluginConnectorBaseImportPackageStruct>(dr[pkgCol]); dtBulk.Rows.Add(new Object[] { DateTime.Now, f.Name, dr[resourcePluginCol], pkg.importId, pkg.pkgId, JSON.Serialize2(pkg) }); } db.BulkCopy(dtBulk, "collector_imports_struct"); //Atualiza os registros importados deste arquivo para liberar o processamento //Isso avisa o sistema que estes registros estão livres para processamento db.ExecuteNonQuery("update collector_imports_struct set status = 'F' where [file_name] = '" + f.Name + "'", CommandType.Text, null); #if DEBUG TextLog.Log("Inbound", "\t[ImportStruct] Imported " + dtBulk.Rows.Count + " registers for enterprise " + req.enterpriseid + " and proxy " + req.host); #endif dtBulk.Dispose(); dtBulk = null; jData = null; }
private void ImportRegistersOLD(ProxyConfig config, JsonGeneric jData, FileInfo f, JSONRequest req, IAMDatabase db) { Int32 resourceCol = jData.GetKeyIndex("resource"); Int32 uriCol = jData.GetKeyIndex("uri"); Int32 importidCol = jData.GetKeyIndex("importid"); Int32 registryidCol = jData.GetKeyIndex("registryid"); Int32 datanameCol = jData.GetKeyIndex("dataname"); Int32 datavalueCol = jData.GetKeyIndex("datavalue"); Int32 datatypeCol = jData.GetKeyIndex("datatype"); if (resourceCol == -1) { TextLog.Log("Inbound", "\t[ImportRegisters] Erro on find column 'resource' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (uriCol == -1) { TextLog.Log("Inbound", "\t[ImportRegisters] Erro on find column 'uri' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (importidCol == -1) { TextLog.Log("Inbound", "\t[ImportRegisters] Erro on find column 'importid' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (registryidCol == -1) { TextLog.Log("Inbound", "\t[ImportRegisters] Erro on find column 'registryid' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (datanameCol == -1) { TextLog.Log("Inbound", "\t[ImportRegisters] Erro on find column 'dataname' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (datavalueCol == -1) { TextLog.Log("Inbound", "\t[ImportRegisters] Erro on find column 'datavalue' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (datatypeCol == -1) { TextLog.Log("Inbound", "\t[ImportRegisters] Erro on find column 'datatype' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } DateTime date = DateTime.Now; //Realiza a importação no modelo BulkInsert por melhor desempenho do banco DataTable dtBulk = new DataTable(); dtBulk.Columns.Add(new DataColumn("date", typeof(DateTime))); dtBulk.Columns.Add(new DataColumn("file_name", typeof(String))); dtBulk.Columns.Add(new DataColumn("plugin_uri", typeof(String))); dtBulk.Columns.Add(new DataColumn("resource_id", typeof(Int64))); dtBulk.Columns.Add(new DataColumn("import_id", typeof(String))); dtBulk.Columns.Add(new DataColumn("registry_id", typeof(String))); dtBulk.Columns.Add(new DataColumn("data_name", typeof(String))); dtBulk.Columns.Add(new DataColumn("data_value", typeof(String))); dtBulk.Columns.Add(new DataColumn("data_type", typeof(String))); foreach (String[] dr in jData.data) { dtBulk.Rows.Add(new Object[] { date, f.Name, dr[uriCol], Int64.Parse(dr[resourceCol]), dr[importidCol], dr[registryidCol], dr[datanameCol], dr[datavalueCol], dr[datatypeCol] }); } db.BulkCopy(dtBulk, "collector_imports"); //Atualiza os registros importados deste arquivo para liberar o processamento //Isso avisa o sistema que estes registros estão livres para processamento db.ExecuteNonQuery("update collector_imports set status = 'F' where [file_name] = '" + f.Name + "'", CommandType.Text, null); //Realiza o rebuild do indice desta tabela para agilizar no engine //Este processo será executado somente uma vez pelo objeto pai //db.ExecuteNonQuery("sp_reindex_imports", CommandType.StoredProcedure, null); #if DEBUG TextLog.Log("Inbound", "\t[ImportRegisters] Imported " + dtBulk.Rows.Count + " registers for enterprise " + req.enterpriseid + " and proxy " + req.host); #endif dtBulk.Dispose(); dtBulk = null; jData = null; }
protected void Page_Load(object sender, EventArgs e) { Request.InputStream.Position = 0; try { JSONRequest req = JSON.GetRequest(Request.InputStream); using (IAMDatabase database = new IAMDatabase(IAMDatabase.GetWebConnectionString())) { ProxyConfig config = new ProxyConfig(true); config.GetDBConfig(database.Connection, ((EnterpriseData)Page.Session["enterprise_data"]).Id, req.host); if (config.fqdn != null) //Encontrou o proxy { DirectoryInfo inDir = null; using (ServerDBConfig c = new ServerDBConfig(IAMDatabase.GetWebConnection())) inDir = new DirectoryInfo(c.GetItem("inboundFiles")); if (!inDir.Exists) { inDir.Create(); } req.enterpriseid = ((EnterpriseData)Page.Session["enterprise_data"]).Id.ToString(); String filename = config.proxy_name + "-" + DateTime.Now.ToString("yyyyMMddHHmmss-ffffff") + ".iamreq"; if (String.IsNullOrEmpty(req.filename)) { req.filename = "Empty"; } StringBuilder trackData = new StringBuilder(); trackData.AppendLine("Proxy: " + req.host); trackData.AppendLine("Enterprise ID: " + req.enterpriseid); trackData.AppendLine("Proxy filename: " + req.filename); trackData.AppendLine("Saved filename: " + filename); UserLogLevel level = UserLogLevel.Info; trackData.AppendLine(""); trackData.AppendLine("Checking package..."); if (String.IsNullOrEmpty(req.data)) { throw new Exception("Request data is empty"); } Byte[] rData = Convert.FromBase64String(req.data); if (!String.IsNullOrEmpty(req.sha1hash)) { if (!CATools.SHA1CheckHash(rData, req.sha1hash)) { throw new Exception("SHA1 Checksum is not equal"); } } String type = ""; try { JsonGeneric jData = new JsonGeneric(); try { String certPass = CATools.SHA1Checksum(Encoding.UTF8.GetBytes(config.fqdn)); if (String.IsNullOrEmpty(config.server_pkcs12_cert)) { throw new Exception("Server PKCS12 from proxy config is empty"); } using (CryptApi cApi = CryptApi.ParsePackage(CATools.LoadCert(Convert.FromBase64String(config.server_pkcs12_cert), certPass), rData)) jData.FromJsonBytes(cApi.clearData); } catch (Exception ex) { jData = null; trackData.AppendLine("Error decrypting package data for enterprise " + req.enterpriseid + " and proxy " + req.host + ", " + ex.Message); #if DEBUG trackData.AppendLine(ex.StackTrace); #endif } if (jData != null) { #if DEBUG trackData.AppendLine(""); trackData.AppendLine("Request data:"); trackData.AppendLine(jData.ToJsonString()); trackData.AppendLine(""); #endif type = jData.function; trackData.AppendLine("Type: " + type); trackData.AppendLine("Data array length: " + (jData.data == null ? "0" : jData.data.Count.ToString())); if (type.ToLower() == "processimportv2") { Int32 d = 1; foreach (String[] dr in jData.data) { try { Int32 resourcePluginCol = jData.GetKeyIndex("resource_plugin"); Int32 pkgCol = jData.GetKeyIndex("package"); if (resourcePluginCol == -1) { trackData.AppendLine("[Package data " + d + "] Erro finding column 'resource_plugin'"); } if (pkgCol == -1) { trackData.AppendLine("[Package data " + d + "] Erro finding column 'package'"); } if ((resourcePluginCol != -1) && (pkgCol != -1)) { PluginConnectorBaseImportPackageUser pkg = JSON.DeserializeFromBase64 <PluginConnectorBaseImportPackageUser>(dr[pkgCol]); trackData.AppendLine("[Package data " + d + "] Import id: " + pkg.importId); trackData.AppendLine("[Package data " + d + "] Package id: " + pkg.pkgId); Int64 trackId = 0; try { String tpkg = JSON.Serialize2(pkg); DbParameterCollection par = new DbParameterCollection(); par.Add("@entity_id", typeof(Int64)).Value = 0; par.Add("@date", typeof(DateTime)).Value = pkg.GetBuildDate(); par.Add("@flow", typeof(String)).Value = "inbound"; par.Add("@package_id", typeof(String), pkg.pkgId.Length).Value = pkg.pkgId; par.Add("@filename", typeof(String)).Value = req.filename; par.Add("@package", typeof(String), tpkg.Length).Value = tpkg; trackId = database.ExecuteScalar <Int64>("sp_new_package_track", System.Data.CommandType.StoredProcedure, par, null); trackData.AppendLine("[Package data " + d + "] Package track id: " + trackId); tpkg = null; if (trackId > 0) { database.AddPackageTrack(trackId, "ProxyAPI", "Package received from proxy and saved at " + filename); } } catch (Exception ex3) { trackData.AppendLine("[Package data " + d + "] Erro generating package track: " + ex3.Message); } pkg.Dispose(); pkg = null; } } catch (Exception ex2) { trackData.AppendLine("[Package data " + d + "] Erro parsing package data " + ex2.Message); } d++; } } } } catch (Exception ex1) { trackData.AppendLine("Erro parsing package " + ex1.Message); level = UserLogLevel.Error; } database.AddUserLog(LogKey.API_Log, DateTime.Now, "ProxyAPI", level, 0, ((EnterpriseData)Page.Session["enterprise_data"]).Id, 0, 0, 0, 0, 0, "File received from proxy " + req.host + (String.IsNullOrEmpty(type) ? "" : " (" + type + ")"), trackData.ToString()); File.WriteAllBytes(Path.Combine(inDir.FullName, filename), Encoding.UTF8.GetBytes(JSON.Serialize <JSONRequest>(req))); ReturnHolder.Controls.Add(new LiteralControl(JSON.GetResponse(true, "", "Request received and proxy finded (" + (req.data != null ? req.data.Length.ToString() : "0") + ")"))); } } } catch (Exception ex) { Tools.Tool.notifyException(ex); throw ex; } }
private void InboundTimer(Object state) { TextLog.Log("Server", "Starting inbound timer"); try { DirectoryInfo inDir = new DirectoryInfo(Path.Combine(basePath, "In")); if (!inDir.Exists) { TextLog.Log("Server", "\t0 files to process"); return; } FileInfo[] files = inDir.GetFiles("*.iamreq"); TextLog.Log("Server", "\t" + files.Length + " files to process"); MSSQLDB db = new MSSQLDB(localConfig.SqlServer, localConfig.SqlDb, localConfig.SqlUsername, localConfig.SqlPassword); db.openDB(); foreach (FileInfo f in files) { JSONRequest req = null; try { using (FileStream fs = f.OpenRead()) req = JSON.GetRequest(fs); if ((req.host == null) || (req.host == "")) { TextLog.Log("Server", "Paramter 'host' is empty on " + f.Name); continue; } if ((req.enterpriseid == null) || (req.enterpriseid == "")) { TextLog.Log("Server", "Paramter 'enterpriseid' is empty on " + f.Name); continue; } try { Int64 tst = Int64.Parse(req.enterpriseid); } catch { if ((req.enterpriseid == null) || (req.enterpriseid == "")) { TextLog.Log("Server", "Paramter 'enterpriseid' is not Int64 " + f.Name); continue; } } ProxyConfig config = new ProxyConfig(true); config.GetDBCertConfig(db.conn, Int64.Parse(req.enterpriseid), req.host); if (config.fqdn != null) //Encontrou o proxy { JsonGeneric jData = new JsonGeneric(); try { String certPass = CATools.SHA1Checksum(Encoding.UTF8.GetBytes(config.fqdn)); using (CryptApi cApi = CryptApi.ParsePackage(CATools.LoadCert(Convert.FromBase64String(config.server_pkcs12_cert), certPass), Convert.FromBase64String(req.data))) jData.FromJsonBytes(cApi.clearData); } catch (Exception ex) { jData = null; TextLog.Log("Server", "Error on decrypt package data " + f.Name + " for enterprise " + req.enterpriseid + " and proxy " + req.host + ", " + ex.Message); } if (jData == null) { continue; } Int32 contextCol = jData.GetKeyIndex("context"); Int32 uriCol = jData.GetKeyIndex("uri"); Int32 importidCol = jData.GetKeyIndex("importid"); Int32 registryidCol = jData.GetKeyIndex("registryid"); Int32 datanameCol = jData.GetKeyIndex("dataname"); Int32 datavalueCol = jData.GetKeyIndex("datavalue"); Int32 datatypeCol = jData.GetKeyIndex("datatype"); if (uriCol == -1) { TextLog.Log("Server", "Erro on find column 'uri' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); continue; } if (importidCol == -1) { TextLog.Log("Server", "Erro on find column 'importid' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); continue; } if (registryidCol == -1) { TextLog.Log("Server", "Erro on find column 'registryid' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); continue; } if (datanameCol == -1) { TextLog.Log("Server", "Erro on find column 'dataname' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); continue; } if (datavalueCol == -1) { TextLog.Log("Server", "Erro on find column 'datavalue' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); continue; } if (datatypeCol == -1) { TextLog.Log("Server", "Erro on find column 'datatype' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); continue; } DateTime date = DateTime.Now; //Realiza a importação no modelo BulkInsert por melhor desempenho do banco DataTable dtBulk = new DataTable(); dtBulk.Columns.Add(new DataColumn("date", typeof(DateTime))); dtBulk.Columns.Add(new DataColumn("plugin_uri", typeof(String))); dtBulk.Columns.Add(new DataColumn("context_id", typeof(Int64))); dtBulk.Columns.Add(new DataColumn("import_id", typeof(String))); dtBulk.Columns.Add(new DataColumn("registry_id", typeof(String))); dtBulk.Columns.Add(new DataColumn("data_name", typeof(String))); dtBulk.Columns.Add(new DataColumn("data_value", typeof(String))); dtBulk.Columns.Add(new DataColumn("data_type", typeof(String))); foreach (String[] dr in jData.data) { dtBulk.Rows.Add(new Object[] { date, dr[uriCol], Int64.Parse(dr[contextCol]), dr[importidCol], dr[registryidCol], dr[datanameCol], dr[datavalueCol], dr[datatypeCol] }); } db.BulkCopy(dtBulk, "collector_imports"); TextLog.Log("Server", "Imported " + dtBulk.Rows.Count + " registers for enterprise " + req.enterpriseid + " and proxy " + req.host); dtBulk.Dispose(); dtBulk = null; jData = null; f.Delete(); } else { TextLog.Log("Server", "Proxy config not found for enterprise " + req.enterpriseid + " and proxy " + req.host); } config = null; } finally { req = null; } } db.closeDB(); } catch (Exception ex) { TextLog.Log("Server", "Error on inbound timer " + ex.Message); } finally { TextLog.Log("Server", "Finishing inbound timer"); } }
private void ProcessDeploy(Int64 resource, Int64 resource_plugin, Dictionary <String, Object> connectorConf, List <PluginConnectorBaseDeployPackageMapping> mapping) { StringBuilder deployLog = new StringBuilder(); TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Starting deploy thread..."); deployLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] Starting deploy thread..."); JsonGeneric notify = new JsonGeneric(); notify.function = "notify"; notify.fields = new String[] { "source", "resource", "uri", "entityid", "identityid" }; JsonGeneric deleted = new JsonGeneric(); deleted.function = "deleted"; deleted.fields = new String[] { "source", "resource", "uri", "entityid", "identityid" }; JsonGeneric records = new JsonGeneric(); records.function = "ProcessImportV2"; records.fields = new String[] { "resource_plugin", "package" }; ImportPackageUserEvent newPackage = new ImportPackageUserEvent(delegate(PluginConnectorBaseImportPackageUser pkg) { records.data.Add(new String[] { resource_plugin.ToString(), JSON.SerializeToBase64(pkg) }); try { SaveToSend(records, resource_plugin.ToString()); records.data.Clear(); } catch { } deployLog.AppendLine("Package generated from resource plugin " + resource_plugin.ToString() + ". ID: " + pkg.pkgId); TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "}>ProcessDeploy> Package generated from resource plugin " + resource_plugin.ToString() + ". ID: " + pkg.pkgId); }); List <FileInfo> files = null; try { System.Reflection.Assembly asm = System.Reflection.Assembly.GetAssembly(plugin.GetType()); DirectoryInfo dirFrom = new DirectoryInfo(Path.Combine(basePath, "In\\" + Path.GetFileNameWithoutExtension(asm.Location) + "\\rp" + resource_plugin)); if (!dirFrom.Exists)//Diretório inexistente { deployLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] Path not found " + dirFrom.FullName); return; } //Ordena os arquivos, do mais antigo para o mais novo sortOndate sod = new sortOndate(); files = new List <FileInfo>(); files.AddRange(dirFrom.GetFiles("*.iamdat")); files.Sort(sod); foreach (FileInfo f in files) { List <PluginConnectorBaseDeployPackage> fData = null; try { deployLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] Loading file " + f.Name); try { fData = LoadFile(f); } catch (Exception ex) { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Error reading file " + f.FullName.Replace(basePath, "") + ", " + ex.Message); logProxy.AddLog(LogKey.Proxy_Event, "Proxy", resource_plugin, resource.ToString(), plugin.GetPluginId().AbsoluteUri, UserLogLevel.Error, 0, 0, "Error reading file " + f.FullName.Replace(basePath, "") + ", " + ex.Message, ""); } if (fData == null) { continue; } if (fData.Count == 0) { throw new Exception("Package is empty"); } TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} [" + resource_plugin + "]" + fData.Count + " packages in " + f.Name); deployLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] " + fData.Count + " packages in " + f.Name); LogEvent log = new LogEvent(delegate(Object sender, PluginLogType type, String text) { TextLog.Log("PluginStarter", "{" + ((PluginConnectorBase)sender).GetPluginId().AbsoluteUri + "}>Log> " + type + ", " + text); }); LogEvent2 log2 = new LogEvent2(delegate(Object sender, PluginLogType type, Int64 entityId, Int64 identityId, String text, String additionalData) { logProxy.AddLog(LogKey.Plugin_Event, "Proxy", resource_plugin, resource.ToString(), ((PluginConnectorBase)sender).GetPluginId().AbsoluteUri, (UserLogLevel)((Int32)type), entityId, identityId, text, additionalData); #if DEBUG TextLog.Log("PluginStarter", "{" + ((PluginConnectorBase)sender).GetPluginId().AbsoluteUri + "}>Log2> " + (((UserLogLevel)((Int32)type)).ToString()) + " entityId = " + entityId + ", identityId = " + identityId + ", " + text + additionalData); #endif }); NotityChangeUserEvent log3 = new NotityChangeUserEvent(delegate(Object sender, Int64 entityId, Int64 identityId) { notify.data.Add(new String[] { "Proxy", resource.ToString(), ((PluginConnectorBase)sender).GetPluginId().AbsoluteUri, entityId.ToString(), identityId.ToString() }); }); NotityChangeUserEvent log4 = new NotityChangeUserEvent(delegate(Object sender, Int64 entityId, Int64 identityId) { deleted.data.Add(new String[] { "Proxy", resource.ToString(), ((PluginConnectorBase)sender).GetPluginId().AbsoluteUri, entityId.ToString(), identityId.ToString() }); }); plugin.ImportPackageUser += newPackage; plugin.Log += log; plugin.Log2 += log2; plugin.NotityChangeUser += log3; plugin.NotityDeletedUser += log4; //Somente realiza a importação após o deploy se for o Deploy Only, ou seja, a publicação sobre demanda de um usuário estecífico Boolean doImportAfterLogin = (fData.Count <= 5); try { foreach (PluginConnectorBaseDeployPackage pkg in fData) { try { deployLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] EntityId = " + pkg.entityId + ", IdentityId = " + pkg.identityId + ", Pkg id: " + pkg.pkgId + ", user deleted? " + pkg.deleted); if (pkg.deleted) { plugin.ProcessDelete(resource_plugin.ToString(), pkg, connectorConf, mapping); } else { plugin.ProcessDeploy(resource_plugin.ToString(), pkg, connectorConf, mapping); if (doImportAfterLogin) { #if DEBUG TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} [" + resource_plugin + "] Doing import after deploy"); #endif plugin.ProcessImportAfterDeploy(resource_plugin.ToString(), pkg, connectorConf, mapping); #if DEBUG TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} [" + resource_plugin + "] Finihing import after deploy"); #endif } } } catch (Exception ex) { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} [" + resource_plugin + "] EntityId = " + pkg.entityId + ", IdentityId = " + pkg.identityId + ", Error on ProcessDeploy thread of file " + f.FullName.Replace(basePath, "") + ", " + ex.Message + (ex.InnerException != null ? " - " + ex.InnerException.Message : "")); logProxy.AddLog(LogKey.Proxy_Event, "Proxy", resource_plugin, resource.ToString(), plugin.GetPluginId().AbsoluteUri, UserLogLevel.Error, pkg.entityId, pkg.identityId, "error on ProcessDeploy thread of file " + f.FullName.Replace(basePath, "") + ", " + ex.Message + (ex.InnerException != null ? " - " + ex.InnerException.Message : ""), ""); deployLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] EntityId = " + pkg.entityId + ", IdentityId = " + pkg.identityId + ", Error on ProcessDeploy thread of file " + f.FullName.Replace(basePath, "") + ", " + ex.Message + (ex.InnerException != null ? " - " + ex.InnerException.Message : "")); } } } finally { plugin.Log -= log; plugin.Log2 -= log2; plugin.NotityChangeUser -= log3; plugin.NotityDeletedUser -= log4; plugin.ImportPackageUser -= newPackage; log = null; log2 = null; log3 = null; log4 = null; } //Salva as notificações if (notify.data.Count > 0) { SaveToSend(notify, resource_plugin.ToString() + "notify"); } //Salva as exclusões if (deleted.data.Count > 0) { SaveToSend(deleted, resource_plugin.ToString() + "deleted"); } if (records.data.Count > 0) { SaveToSend(records, resource_plugin.ToString()); } try { f.Delete(); try { if (dirFrom.GetFiles("*.iamdat").Length == 0) { dirFrom.Delete(); } if (dirFrom.Parent.GetFiles("*.iamdat").Length == 0) { dirFrom.Parent.Delete(); } } catch { } } catch (Exception ex) { deployLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] Error deleting file " + f.FullName.Replace(basePath, "") + ", " + ex.Message + (ex.InnerException != null ? " - " + ex.InnerException.Message : "")); } } catch (Exception ex) { logProxy.AddLog(LogKey.Proxy_Event, "Proxy", resource_plugin, resource.ToString(), plugin.GetPluginId().AbsoluteUri, UserLogLevel.Error, 0, 0, "Erro on deploy thread of file " + f.FullName.Replace(basePath, "") + ", " + ex.Message + (ex.InnerException != null ? " - " + ex.InnerException.Message : ""), ""); deployLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] Error on deploy thread of file " + f.FullName.Replace(basePath, "") + ", " + ex.Message + (ex.InnerException != null ? " - " + ex.InnerException.Message : "")); } finally { if (fData != null) { foreach (PluginConnectorBaseDeployPackage p in fData) { p.Dispose(); } } } } files.Clear(); } catch (Exception ex) { logProxy.AddLog(LogKey.Proxy_Event, "Proxy", resource_plugin, resource.ToString(), plugin.GetPluginId().AbsoluteUri, UserLogLevel.Error, 0, 0, "Erro on deploy thread: " + ex.Message, ""); deployLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] Erro on deploy thread: " + ex.Message); throw ex; } finally { //Salva as notificações if (notify.data.Count > 0) { SaveToSend(notify, resource_plugin.ToString() + "notify"); } TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Finishing deploy thread..."); deployLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] Finishing deploy thread..."); if (files != null) { files.Clear(); } files = null; logProxy.AddLog(LogKey.Proxy_Event, "Proxy", resource_plugin, resource.ToString(), plugin.GetPluginId().AbsoluteUri, UserLogLevel.Info, 0, 0, "Deploy executed", deployLog.ToString()); deployLog.Clear(); deployLog = null; //Salva os logs para envio logProxy.SaveToSend(resource_plugin.ToString() + "log"); newPackage = null; } }
private void ProcessImport(Int64 resource, Int64 resource_plugin, Dictionary <String, Object> connectorConf, List <PluginConnectorBaseDeployPackageMapping> mapping) { StringBuilder importLog = new StringBuilder(); importLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] Starting import thread..."); TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Starting import thread..."); Int64 count = 0; try { if (connectorConf == null) { throw new Exception("connectorConf is null"); } if (mapping == null) { throw new Exception("mapping is null"); } String id = Guid.NewGuid().ToString(); /* * JsonGeneric records = new JsonGeneric(); * records.function = "ProcessImport"; * records.fields = new String[] { "resource", "uri", "importid", "registryid", "dataname", "datavalue", "datatype" }; */ JsonGeneric records = new JsonGeneric(); records.function = "ProcessImportV2"; records.fields = new String[] { "resource_plugin", "package" }; JsonGeneric structRecords = new JsonGeneric(); structRecords.function = "ProcessStructImport"; structRecords.fields = new String[] { "resource_plugin", "package" }; String uri = plugin.GetPluginId().AbsoluteUri.ToLower(); ImportPackageUserEvent newPackage = new ImportPackageUserEvent(delegate(PluginConnectorBaseImportPackageUser pkg) { count++; records.data.Add(new String[] { resource_plugin.ToString(), JSON.SerializeToBase64(pkg) }); if (records.data.Count >= 500) { try { SaveToSend(records, resource_plugin.ToString()); records.data.Clear(); } catch { } } #if DEBUG TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "}>ProcessImport> Package generated from resource plugin " + resource_plugin.ToString() + ". ID: " + pkg.pkgId); #endif }); ImportPackageStructEvent newStructPackage = new ImportPackageStructEvent(delegate(PluginConnectorBaseImportPackageStruct pkg) { count++; structRecords.data.Add(new String[] { resource_plugin.ToString(), JSON.SerializeToBase64(pkg) }); if (structRecords.data.Count >= 500) { try { SaveToSend(structRecords, resource_plugin.ToString()); structRecords.data.Clear(); } catch { } } }); /* * RegistryEvent reg = new RegistryEvent(delegate(String importId, String registryId, String dataName, String dataValue, String dataType) * { * count++; * records.data.Add(new String[] { resource.ToString(), uri, importId, registryId, dataName, dataValue, dataType }); * * //Contabiliza a quantidade de registros para separar em vários arquivos * if (records.data.Count >= 30000) * { * //Após 30000 registros monitora a troca de registryId para salvar o arquivo * //Evitando que o mesmo registryId tenha dados em arquivos diferentes * //Isso evita problemas no servidor * * if (lastRegistryId != registryId) * { * try * { * SaveToSend(records, importId); * records.data.Clear(); * } * catch { } * } * } * * lastRegistryId = registryId; * });*/ LogEvent log = new LogEvent(delegate(Object sender, PluginLogType type, string text) { TextLog.Log("PluginStarter", "{" + ((PluginConnectorBase)sender).GetPluginId().AbsoluteUri + "} " + type + ", " + text); }); LogEvent2 log2 = new LogEvent2(delegate(Object sender, PluginLogType type, Int64 entityId, Int64 identityId, String text, String additionalData) { logProxy.AddLog(LogKey.Plugin_Event, "Proxy", resource_plugin, resource.ToString(), ((PluginConnectorBase)sender).GetPluginId().AbsoluteUri, (UserLogLevel)((Int32)type), entityId, identityId, text, additionalData); #if DEBUG TextLog.Log("PluginStarter", "{" + ((PluginConnectorBase)sender).GetPluginId().AbsoluteUri + "} Type: " + type + ", Entity Id: " + entityId + ", Identity Id: " + identityId + ", Data: " + text + additionalData); #endif }); plugin.ImportPackageUser += newPackage; plugin.ImportPackageStruct += newStructPackage; plugin.Log += log; plugin.Log2 += log2; plugin.ProcessImport(resource_plugin.ToString(), id, connectorConf, mapping); plugin.ImportPackageUser -= newPackage; plugin.ImportPackageStruct -= newStructPackage; plugin.Log -= log; plugin.Log2 -= log2; newPackage = null; log = null; uri = null; //Salva os registros remanescentes if (records.data.Count > 0) { SaveToSend(records, id + "-user"); } if (structRecords.data.Count > 0) { SaveToSend(structRecords, id + "-struct"); } if (records.data.Count > 0) { SaveToSend(records, resource_plugin.ToString()); } importLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] Imported " + count + " items..."); TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Imported " + count + " items..."); } catch (Exception ex) { logProxy.AddLog(LogKey.Proxy_Event, "Proxy", resource_plugin, resource.ToString(), plugin.GetPluginId().AbsoluteUri, UserLogLevel.Error, 0, 0, "Erro on import thread: " + ex.Message, ""); importLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] Erro on import thread: " + ex.Message); throw ex; } finally { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Finishing import thread"); importLog.AppendLine("[" + DateTime.Now.ToString("HH:mm:ss") + "] Finishing import thread..."); if (count > 0) { logProxy.AddLog(LogKey.Proxy_Event, "Proxy", resource_plugin, resource.ToString(), plugin.GetPluginId().AbsoluteUri, UserLogLevel.Info, 0, 0, "Import executed", importLog.ToString()); } else { #if DEBUG //Mesmo log anterior, porém para mostrar quando estiver em debug logProxy.AddLog(LogKey.Proxy_Event, "Proxy", resource_plugin, resource.ToString(), plugin.GetPluginId().AbsoluteUri, UserLogLevel.Info, 0, 0, "Import executed", importLog.ToString()); #endif } importLog.Clear(); importLog = null; //Salva os logs para envio logProxy.SaveToSend(resource_plugin.ToString() + "log"); } }
protected void Page_Load(object sender, EventArgs e) { Request.InputStream.Position = 0; try { JSONRequest req = JSON.GetRequest(Request.InputStream); using (IAMDatabase database = new IAMDatabase(IAMDatabase.GetWebConnectionString())) { ProxyConfig config = new ProxyConfig(); config.GetDBConfig(database.Connection, ((EnterpriseData)Page.Session["enterprise_data"]).Id, req.host); if (config.fqdn != null) //Encontrou o proxy { DirectoryInfo outDir = null; using (ServerDBConfig c = new ServerDBConfig(IAMDatabase.GetWebConnection())) outDir = new DirectoryInfo(Path.Combine(c.GetItem("outboundFiles"), config.proxyID + "_" + config.proxy_name)); if (!outDir.Exists) { outDir.Create(); } if ((req.data != null) && (req.data != "")) { //Recebeu o nome do arquivo, envia o unico arquivo FileInfo fName = null; try { fName = new FileInfo(Path.Combine(outDir.FullName, req.data.Trim("..\\/".ToCharArray()))); } catch { ReturnHolder.Controls.Add(new LiteralControl(JSON.GetResponse(false, "Filename is invalid", ""))); return; } if (fName.Exists) { try { Byte[] fData = File.ReadAllBytes(fName.FullName); ReturnHolder.Controls.Add(new LiteralControl(JSON.GetResponse(true, "", Convert.ToBase64String(fData)))); try { DbParameterCollection par = new DbParameterCollection(); par.Add("@filename", typeof(String)).Value = fName.FullName; Int64 packageTrackId = database.ExecuteScalar <Int64>("select id from st_package_track where flow = 'deploy' and filename = @filename", System.Data.CommandType.Text, par, null); par = new DbParameterCollection(); par.Add("@package_id", typeof(Int64)).Value = packageTrackId; par.Add("@source", typeof(String)).Value = "proxy"; par.Add("@text", typeof(String)).Value = "Proxy Downloaded file from IP " + Tools.Tool.GetIPAddress(); database.ExecuteNonQuery("insert into st_package_track_history ([package_id] ,[source] ,[text]) values (@package_id ,@source ,@text)", System.Data.CommandType.Text, par, null); } catch { } } catch (Exception ex) { ReturnHolder.Controls.Add(new LiteralControl(JSON.GetResponse(false, "Error loading file " + fName.Name + ", " + ex.Message, ""))); } } else { ReturnHolder.Controls.Add(new LiteralControl(JSON.GetResponse(false, "File not found '" + req.data + "'", ""))); } } else { List <FileInfo> files = new List <FileInfo>(); foreach (DirectoryInfo d in outDir.GetDirectories()) { files.AddRange(d.GetFiles("*.iamdat", SearchOption.AllDirectories)); } JsonGeneric list = new JsonGeneric(); list.fields = new String[] { "name" }; //Envia a listagem dos arquivos foreach (FileInfo f in files) { list.data.Add(new String[] { f.FullName.Replace(outDir.FullName, "").Trim("\\/ ".ToCharArray()) }); } ReturnHolder.Controls.Add(new LiteralControl(JSON.GetResponse(true, "", list.ToJsonString()))); } //File.WriteAllBytes(Path.Combine(pluginsDir.FullName, config.fqdn + "-" + DateTime.Now.ToString("yyyyMMddHHmmss-ffffff") + ".iamreq"), Encoding.UTF8.GetBytes(JSON.Serialize<JSONRequest>(req))); } } } catch (Exception ex) { Tools.Tool.notifyException(ex); throw ex; } }
private void ExecuteConnector(Boolean deployOnly) { List <Int64> resource_plugin = new List <Int64>(); //Separa os contextos String certPass = CATools.SHA1Checksum(Encoding.UTF8.GetBytes(config.fqdn)); OpenSSL.X509.X509Certificate cert = CATools.LoadCert(Convert.FromBase64String(config.client_cert), certPass); try { foreach (PluginConfig p in config.plugins) { if (p.uri.ToLower() == plugin.GetPluginId().AbsoluteUri.ToLower()) { JsonGeneric pgConf = new JsonGeneric(); try { using (CryptApi cApi = CryptApi.ParsePackage(cert, Convert.FromBase64String(p.parameters))) pgConf.FromJsonString(Encoding.UTF8.GetString(cApi.clearData)); } catch (Exception ex) { throw new Exception("Decrypt error1 " + ex.Message); } finally { pgConf = null; } if (!resource_plugin.Contains(p.resource_plugin)) { resource_plugin.Add(p.resource_plugin); } } } foreach (Int64 rp in resource_plugin) { DebugLog("{" + plugin.GetPluginId().AbsoluteUri + "} Resource plugin " + rp); Dictionary <String, Object> connectorConf = new Dictionary <String, Object>(); List <PluginConnectorBaseDeployPackageMapping> mapping = new List <PluginConnectorBaseDeployPackageMapping>(); Boolean enableDeploy = false; Int64 r = 0; try { foreach (PluginConfig p in config.plugins) { if ((p.uri.ToLower() == plugin.GetPluginId().AbsoluteUri.ToLower()) && (p.resource_plugin == rp)) { r = p.resource; Dictionary <String, String> tmp = new Dictionary <string, string>(); foreach (PluginConfigMapping m in p.mapping) { mapping.Add(new PluginConnectorBaseDeployPackageMapping(m.data_name, m.data_type, m.is_id, m.is_unique_property, m.is_password, m.is_login, m.is_name)); } enableDeploy = p.enable_deploy; JsonGeneric pgConf = new JsonGeneric(); try { if (cert == null) { throw new Exception("Certificate is null"); } using (CryptApi cApi = CryptApi.ParsePackage(cert, Convert.FromBase64String(p.parameters))) pgConf.FromJsonString(Encoding.UTF8.GetString(cApi.clearData)); } catch (Exception ex) { throw new Exception("Decrypt error: " + ex.Message); } if ((pgConf.data == null) || (pgConf.data.Count == 0)) { continue; } Int32 kCol = pgConf.GetKeyIndex("key"); Int32 vCol = pgConf.GetKeyIndex("value"); if (!String.IsNullOrWhiteSpace(p.mail_domain)) { PluginBase.FillConfig(plugin, ref connectorConf, "iam_mail_domain", p.mail_domain); } //connectorConf.Add("iam_mail_domain", p.mail_domain); foreach (String[] d1 in pgConf.data) { PluginBase.FillConfig(plugin, ref connectorConf, d1[kCol], d1[vCol].ToString()); } /* * if (!connectorConf.ContainsKey(d1[kCol])) * connectorConf.Add(d1[kCol], d1[vCol].ToString());*/ } } //Deploy ocorre antes da importação //Para que na importação ja apareça os registros que foram publicados pelo deploy try { System.Reflection.Assembly asm = System.Reflection.Assembly.GetAssembly(plugin.GetType()); DirectoryInfo dirFrom = new DirectoryInfo(Path.Combine(basePath, "In\\" + Path.GetFileNameWithoutExtension(asm.Location) + "\\rp" + rp)); DebugLog("{" + plugin.GetPluginId().AbsoluteUri + "} RP =" + rp + ", r = " + r + " => path " + dirFrom.FullName + ", exists? " + dirFrom.Exists); if (enableDeploy) { //Verifica se há algo para processar if (dirFrom.Exists) { ProcessDeploy(r, rp, connectorConf, mapping); } } else { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Deploy disabled"); //Exclui os arquivos if (dirFrom.Exists) { foreach (FileInfo f in dirFrom.GetFiles("*.iamdat")) { f.Delete(); } } } } catch (Exception ex) { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Error on deploy: " + ex.Message); } if (!deployOnly) { try { //O import não é desabilitado, pois ele é necessário para relatório de consistência //o Engine não utilizará ele para adicionar novas entidades ProcessImport(r, rp, connectorConf, mapping); } catch (Exception ex) { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Error on import: " + ex.Message); } } executionCount++; if (executionCount > 50) { executionCount = 0; TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Cleaning up proccess"); System.Diagnostics.Process.GetCurrentProcess().Kill(); } } catch (Exception ex) { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Error on parse config: " + ex.Message); } finally { connectorConf.Clear(); connectorConf = null; mapping.Clear(); mapping = null; } } } finally { cert = null; certPass = null; } }
static void ExecuteConnector(Boolean deployOnly) { List <Int64> resource = new List <Int64>(); //Separa os contextos String certPass = CATools.SHA1Checksum(Encoding.UTF8.GetBytes(config.fqdn)); OpenSSL.X509.X509Certificate cert = CATools.LoadCert(Convert.FromBase64String(config.client_cert), certPass); foreach (PluginConfig p in config.plugins) { if (p.uri.ToLower() == plugin.GetPluginId().AbsoluteUri.ToLower()) { JsonGeneric pgConf = new JsonGeneric(); try { using (CryptApi cApi = CryptApi.ParsePackage(cert, Convert.FromBase64String(p.parameters))) pgConf.FromJsonString(Encoding.UTF8.GetString(cApi.clearData)); } catch (Exception ex) { throw new Exception("Decrypt error1 " + ex.Message); } finally { pgConf = null; } if (!resource.Contains(p.resource)) { resource.Add(p.resource); } } } foreach (Int64 r in resource) { Dictionary <String, Object> connectorConf = new Dictionary <String, Object>(); Dictionary <String, String> mapping = new Dictionary <String, String>(); Boolean enableDeploy = false; try { foreach (PluginConfig p in config.plugins) { if ((p.uri.ToLower() == plugin.GetPluginId().AbsoluteUri.ToLower()) && (p.resource == r)) { mapping = p.mappingDataTypeDic; enableDeploy = p.enable_deploy; JsonGeneric pgConf = new JsonGeneric(); try { if (cert == null) { throw new Exception("Certificate is null"); } using (CryptApi cApi = CryptApi.ParsePackage(cert, Convert.FromBase64String(p.parameters))) pgConf.FromJsonString(Encoding.UTF8.GetString(cApi.clearData)); } catch (Exception ex) { throw new Exception("Decrypt error: " + ex.Message); } if ((pgConf.data == null) || (pgConf.data.Count == 0)) { continue; } Int32 kCol = pgConf.GetKeyIndex("key"); Int32 vCol = pgConf.GetKeyIndex("value"); if (!String.IsNullOrWhiteSpace(p.mail_domain)) { connectorConf.Add("iam_mail_domain", p.mail_domain); } foreach (String[] d1 in pgConf.data) { if (!connectorConf.ContainsKey(d1[kCol])) { connectorConf.Add(d1[kCol], d1[vCol].ToString()); } } } } //Deploy ocorre antes da importação //Para que na importação ja apareça os registros que foram publicados pelo deploy try { if (enableDeploy) { ProcessDeploy(r, connectorConf, mapping); } else { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Deploy disabled"); //Exclui os arquivos System.Reflection.Assembly asm = System.Reflection.Assembly.GetAssembly(plugin.GetType()); DirectoryInfo dirFrom = new DirectoryInfo(Path.Combine(basePath, "In\\" + Path.GetFileNameWithoutExtension(asm.Location) + "\\" + resource)); if (dirFrom.Exists) { foreach (FileInfo f in dirFrom.GetFiles("*.iamdat")) { f.Delete(); } } } } catch (Exception ex) { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Error on deploy: " + ex.Message); } if (!deployOnly) { try { //O import não é desabilitado, pois ele é necessário para relatório de consistência //o Engine não utilizará ele para adicionar novas entidades ProcessImport(r, connectorConf, mapping); } catch (Exception ex) { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Error on import: " + ex.Message); } } } catch (Exception ex) { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Error on parse config: " + ex.Message); } finally { connectorConf.Clear(); connectorConf = null; mapping.Clear(); mapping = null; } } cert = null; certPass = null; }
private void ImportRegistersV2(ProxyConfig config, JsonGeneric jData, FileInfo f, JSONRequest req, IAMDatabase db) { Int32 resourcePluginCol = jData.GetKeyIndex("resource_plugin"); Int32 pkgCol = jData.GetKeyIndex("package"); if (resourcePluginCol == -1) { TextLog.Log("Inbound", "\t[ImportRegistersV2] Erro on find column 'resource_plugin' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (pkgCol == -1) { TextLog.Log("Inbound", "\t[ImportRegistersV2] Erro on find column 'package' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } //Realiza a importação no modelo BulkInsert por melhor desempenho do banco DataTable dtBulk = new DataTable(); dtBulk.Columns.Add(new DataColumn("date", typeof(DateTime))); dtBulk.Columns.Add(new DataColumn("file_name", typeof(String))); dtBulk.Columns.Add(new DataColumn("resource_plugin", typeof(Int64))); dtBulk.Columns.Add(new DataColumn("import_id", typeof(String))); dtBulk.Columns.Add(new DataColumn("package_id", typeof(String))); dtBulk.Columns.Add(new DataColumn("package", typeof(String))); dtBulk.Columns.Add(new DataColumn("status", typeof(String))); foreach (String[] dr in jData.data) { PluginConnectorBaseImportPackageUser pkg = JSON.DeserializeFromBase64 <PluginConnectorBaseImportPackageUser>(dr[pkgCol]); dtBulk.Rows.Add(new Object[] { DateTime.Now, f.Name, dr[resourcePluginCol], pkg.importId, pkg.pkgId, JSON.Serialize2(pkg), 'F' }); try { DbParameterCollection par = new DbParameterCollection(); par.Add("@date", typeof(DateTime)).Value = pkg.GetBuildDate(); par.Add("@package_id", typeof(String), pkg.pkgId.Length).Value = pkg.pkgId; Int64 trackId = db.ExecuteScalar <Int64>("select id from st_package_track where flow = 'inbound' and date = @date and package_id = @package_id", System.Data.CommandType.Text, par, null); db.AddPackageTrack(trackId, "inbound", "Package imported to process queue"); } catch { } } db.BulkCopy(dtBulk, "collector_imports"); //Apaga todos os registros da tabela temporaria /* * Procedimento desabiliato em 2018-08-29 por suspeita de problema * db.ExecuteNonQuery("delete from collector_imports_temp", System.Data.CommandType.Text, null, null); * * db.BulkCopy(dtBulk, "collector_imports_temp"); * * //Proteção contra reimportação de pacotes (loop) * db.ExecuteNonQuery("delete from collector_imports_temp where exists (select 1 from collector_imports_old o where o.date >= dateadd(day,-1,getdate()) and o.file_name = file_name and o.resource_plugin_id = resource_plugin_id and o.import_id = import_id and o.package_id = package_id)", System.Data.CommandType.Text, null, null); * db.ExecuteNonQuery("delete from collector_imports_temp where exists (select 1 from collector_imports o where o.date >= dateadd(day,-1,getdate()) and o.file_name = file_name and o.resource_plugin_id = resource_plugin_id and o.import_id = import_id and o.package_id = package_id)", System.Data.CommandType.Text, null, null); * * db.ExecuteNonQuery("insert into collector_imports select * from collector_imports_temp", System.Data.CommandType.Text, null, null); * db.ExecuteNonQuery("delete from collector_imports_temp", System.Data.CommandType.Text, null, null); * */ //Atualiza os registros importados deste arquivo para liberar o processamento //Isso avisa o sistema que estes registros estão livres para processamento //*** Desabilitado essa funç~~ao em 2018-03-08, e colocado o registro para ser importado diretamente com o Status 'F' //db.ExecuteNonQuery("update collector_imports set status = 'F' where [file_name] = '" + f.Name + "'", CommandType.Text, null); //Realiza o rebuild do indice desta tabela para agilizar no engine //Este processo será executado somente uma vez pelo objeto pai //db.ExecuteNonQuery("sp_reindex_imports", CommandType.StoredProcedure, null); #if DEBUG TextLog.Log("Inbound", "\t[ImportRegistersV2] Imported " + dtBulk.Rows.Count + " registers for enterprise " + req.enterpriseid + " and proxy " + req.host); #endif dtBulk.Dispose(); dtBulk = null; jData = null; }
static void ProcessDeploy(Int64 resource, Dictionary <String, Object> connectorConf, Dictionary <String, String> mapping) { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Starting deploy thread..."); JsonGeneric notify = new JsonGeneric(); notify.function = "notify"; notify.fields = new String[] { "source", "resource", "uri", "entityid" }; try { System.Reflection.Assembly asm = System.Reflection.Assembly.GetAssembly(plugin.GetType()); DirectoryInfo dirFrom = new DirectoryInfo(Path.Combine(basePath, "In\\" + Path.GetFileNameWithoutExtension(asm.Location) + "\\" + resource)); if (!dirFrom.Exists) //Diretório inexistente { return; } //Ordena os arquivos, do mais antigo para o mais novo sortOndate sod = new sortOndate(); List <FileInfo> files = new List <FileInfo>(); files.AddRange(dirFrom.GetFiles("*.iamdat")); files.Sort(sod); foreach (FileInfo f in files) { try { List <PluginBaseDeployPackage> fData = null; try { fData = LoadFile(f); } catch (Exception ex) { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Error reading file " + f.FullName.Replace(basePath, "") + ", " + ex.Message); logProxy.AddLog("Proxy", resource.ToString(), plugin.GetPluginId().AbsoluteUri, UserLogLevel.Error, 0, 0, "Error reading file " + f.FullName.Replace(basePath, "") + ", " + ex.Message, ""); } if (fData == null) { continue; } if (fData.Count == 0) { throw new Exception("Package is empty"); } TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} [" + resource + "]" + fData.Count + " packages in " + f.Name); LogEvent log = new LogEvent(delegate(PluginBase sender, PluginLogType type, String text) { TextLog.Log("PluginStarter", "{" + sender.GetPluginId().AbsoluteUri + "} " + type + ", " + text); }); LogEvent2 log2 = new LogEvent2(delegate(PluginBase sender, PluginLogType type, Int64 entityId, Int64 identityId, String text, String additionalData) { logProxy.AddLog("Proxy", resource.ToString(), sender.GetPluginId().AbsoluteUri, (UserLogLevel)((Int32)type), entityId, identityId, text, additionalData); }); NotityChangeUserEvent log3 = new NotityChangeUserEvent(delegate(PluginBase sender, Int64 entityId) { notify.data.Add(new String[] { "Proxy", resource.ToString(), sender.GetPluginId().AbsoluteUri, entityId.ToString() }); }); plugin.Log += log; plugin.Log2 += log2; plugin.NotityChangeUser += log3; try { foreach (PluginBaseDeployPackage pkg in fData) { try { plugin.ProcessDeploy(pkg, connectorConf, mapping); } catch (Exception ex) { logProxy.AddLog("Proxy", resource.ToString(), plugin.GetPluginId().AbsoluteUri, UserLogLevel.Error, pkg.entityId, 0, "error on ProcessDeploy thread of file " + f.FullName.Replace(basePath, "") + ", " + ex.Message + (ex.InnerException != null ? " - " + ex.InnerException.Message : ""), ""); } } } finally { plugin.Log -= log; plugin.Log2 -= log2; plugin.NotityChangeUser -= log3; log = null; log2 = null; log3 = null; } //Salva as notificações if (notify.data.Count > 0) { SaveToSend(notify, resource.ToString() + "notify"); } //Salva os logs para envio logProxy.SaveToSend(resource.ToString() + "log"); try { f.Delete(); if (dirFrom.GetFiles("*.iamdat").Length == 0) { dirFrom.Delete(); } if (dirFrom.Parent.GetFiles("*.iamdat").Length == 0) { dirFrom.Parent.Delete(); } } catch { } } catch (Exception ex) { logProxy.AddLog("Proxy", resource.ToString(), plugin.GetPluginId().AbsoluteUri, UserLogLevel.Error, 0, 0, "Erro on deploy thread of file " + f.FullName.Replace(basePath, "") + ", " + ex.Message + (ex.InnerException != null ? " - " + ex.InnerException.Message : ""), ""); } } files.Clear(); } catch (Exception ex) { logProxy.AddLog("Proxy", resource.ToString(), plugin.GetPluginId().AbsoluteUri, UserLogLevel.Error, 0, 0, "Erro on deploy thread: " + ex.Message, ""); throw ex; } finally { //Salva as notificações if (notify.data.Count > 0) { SaveToSend(notify, resource.ToString() + "notify"); } //Salva os logs para envio logProxy.SaveToSend(resource.ToString() + "log"); TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Finishing deploy thread..."); } }
private void StartAgents() { List <Int64> resource = new List <Int64>(); //Separa os contextos String certPass = CATools.SHA1Checksum(Encoding.UTF8.GetBytes(config.fqdn)); OpenSSL.X509.X509Certificate cert = CATools.LoadCert(Convert.FromBase64String(config.client_cert), certPass); try { foreach (PluginConfig p in config.plugins) { if (p.uri.ToLower() == plugin.GetPluginId().AbsoluteUri.ToLower()) { Dictionary <String, Object> connectorConf = new Dictionary <String, Object>(); JsonGeneric pgConf = new JsonGeneric(); try { if (cert == null) { throw new Exception("Certificate is null"); } using (CryptApi cApi = CryptApi.ParsePackage(cert, Convert.FromBase64String(p.parameters))) pgConf.FromJsonString(Encoding.UTF8.GetString(cApi.clearData)); } catch (Exception ex) { throw new Exception("Decrypt error: " + ex.Message); } if ((pgConf.data == null) || (pgConf.data.Count == 0)) { continue; } Int32 kCol = pgConf.GetKeyIndex("key"); Int32 vCol = pgConf.GetKeyIndex("value"); foreach (String[] d1 in pgConf.data) { PluginBase.FillConfig(plugin, ref connectorConf, d1[kCol], d1[vCol].ToString()); } /*if (!connectorConf.ContainsKey(d1[kCol])) * connectorConf.Add(d1[kCol], d1[vCol].ToString());*/ try { StartAgents(connectorConf); } catch (Exception ex) { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Error on start agent: " + ex.Message); } finally { connectorConf.Clear(); connectorConf = null; } } } } catch (Exception ex) { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Error on parse config: " + ex.Message); } cert = null; certPass = null; }
private void ImportNotify(ProxyConfig config, JsonGeneric jData, FileInfo f, JSONRequest req, IAMDatabase db) { Int32 resourceCol = jData.GetKeyIndex("resource"); Int32 sourceCol = jData.GetKeyIndex("source"); Int32 uriCol = jData.GetKeyIndex("uri"); Int32 entityIdCol = jData.GetKeyIndex("entityid"); if (resourceCol == -1) { TextLog.Log("Inbound", "\t[ImportNotify] Erro on find column 'resource' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (sourceCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'source' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (uriCol == -1) { TextLog.Log("Inbound", "\t[ImportNotify] Erro on find column 'uri' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (entityIdCol == -1) { TextLog.Log("Inbound", "\t[ImportNotify] Erro on find column 'entityId' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } DateTime date = DateTime.Now; //Realiza a importação no modelo BulkInsert por melhor desempenho do banco DataTable dtBulk = new DataTable(); dtBulk.Columns.Add(new DataColumn("date", typeof(DateTime))); dtBulk.Columns.Add(new DataColumn("source", typeof(String))); dtBulk.Columns.Add(new DataColumn("plugin_uri", typeof(String))); dtBulk.Columns.Add(new DataColumn("resource_id", typeof(Int64))); dtBulk.Columns.Add(new DataColumn("entity_id", typeof(Int64))); foreach (String[] dr in jData.data) { dtBulk.Rows.Add(new Object[] { date, dr[sourceCol], dr[uriCol], Int64.Parse(dr[resourceCol]), Int64.Parse(dr[entityIdCol]) }); } db.BulkCopy(dtBulk, "notify_imports"); #if DEBUG TextLog.Log("Inbound", "\t[ImportNotify] Imported " + dtBulk.Rows.Count + " notify for enterprise " + req.enterpriseid + " and proxy " + req.host); #endif dtBulk.Dispose(); dtBulk = null; jData = null; }
private void ImportLogs(ProxyConfig config, JsonGeneric jData, FileInfo f, JSONRequest req, IAMDatabase db) { Int32 resourceCol = jData.GetKeyIndex("resource"); Int32 dateCol = jData.GetKeyIndex("date"); Int32 sourceCol = jData.GetKeyIndex("source"); Int32 keyCol = jData.GetKeyIndex("key"); Int32 uriCol = jData.GetKeyIndex("uri"); Int32 typeCol = jData.GetKeyIndex("type"); Int32 entityIdCol = jData.GetKeyIndex("entityid"); Int32 identityIdCol = jData.GetKeyIndex("identityid"); Int32 textCol = jData.GetKeyIndex("text"); Int32 additionaldataCol = jData.GetKeyIndex("additionaldata"); if (resourceCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'resource' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (sourceCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'source' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (keyCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'key' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (uriCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'uri' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (entityIdCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'entityId' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (identityIdCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'identityId' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (textCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'text' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } DateTime date = DateTime.Now; //Realiza a importação no modelo BulkInsert por melhor desempenho do banco DataTable dtBulk = new DataTable(); dtBulk.Columns.Add(new DataColumn("date", typeof(DateTime))); dtBulk.Columns.Add(new DataColumn("source", typeof(String))); dtBulk.Columns.Add(new DataColumn("key", typeof(Int32))); dtBulk.Columns.Add(new DataColumn("enterprise_id", typeof(Int64))); dtBulk.Columns.Add(new DataColumn("proxy_name", typeof(String))); dtBulk.Columns.Add(new DataColumn("proxy_id", typeof(Int64))); dtBulk.Columns.Add(new DataColumn("plugin_uri", typeof(String))); dtBulk.Columns.Add(new DataColumn("plugin_id", typeof(Int64))); dtBulk.Columns.Add(new DataColumn("resource_id", typeof(Int64))); dtBulk.Columns.Add(new DataColumn("entity_id", typeof(Int64))); dtBulk.Columns.Add(new DataColumn("identity_id", typeof(Int64))); dtBulk.Columns.Add(new DataColumn("type", typeof(String))); dtBulk.Columns.Add(new DataColumn("text", typeof(String))); dtBulk.Columns.Add(new DataColumn("additional_data", typeof(String))); foreach (String[] dr in jData.data) { try { //Console.WriteLine(f.Name + " - " + dr[entityIdCol] + " ==> " + dr[textCol]); //Console.WriteLine(dr[additionaldataCol]); //Console.WriteLine(""); dtBulk.Rows.Add(new Object[] { (dateCol >= 0 ? DateTime.Parse(dr[dateCol]) : date), dr[sourceCol], dr[keyCol], req.enterpriseid, req.host, 0, dr[uriCol], 0, Int64.Parse(dr[resourceCol]), Int64.Parse(dr[entityIdCol]), Int64.Parse(dr[identityIdCol]), dr[typeCol], dr[textCol], (additionaldataCol >= 0 ? dr[additionaldataCol] : "") }); } catch (Exception ex) { throw ex; } } db.BulkCopy(dtBulk, "logs_imports"); //Procedure que processa os logs e importa para a tabela definitiva db.ExecuteNonQuery("sp_process_logs", CommandType.StoredProcedure, null); #if debug db.AddUserLog(LogKey.Import, null, "Inbound", UserLogLevel.Info, 0, 0, 0, 0, 0, 0, 0, "Imported " + dtBulk.Rows.Count + " logs for enterprise " + req.enterpriseid + " and proxy " + req.host + " from file " + f.Name); TextLog.Log("Inbound", "\t[ImportLogs] Imported " + dtBulk.Rows.Count + " logs for enterprise " + req.enterpriseid + " and proxy " + req.host); #endif dtBulk.Dispose(); dtBulk = null; jData = null; }
private void ImportPackageTrack(ProxyConfig config, JsonGeneric jData, FileInfo f, JSONRequest req, IAMDatabase db) { Int32 resourceCol = jData.GetKeyIndex("resource"); Int32 dateCol = jData.GetKeyIndex("date"); Int32 sourceCol = jData.GetKeyIndex("source"); Int32 filenameCol = jData.GetKeyIndex("filename"); Int32 packageIdCol = jData.GetKeyIndex("packageid"); Int32 flowCol = jData.GetKeyIndex("flow"); Int32 textCol = jData.GetKeyIndex("text"); if (resourceCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'resource' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (sourceCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'source' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (textCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'text' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (flowCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'flow' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (filenameCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'filename' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } if (packageIdCol == -1) { TextLog.Log("Inbound", "\t[ImportLogs] Erro on find column 'packageid' in " + f.Name + " enterprise " + req.enterpriseid + " and proxy " + req.host); return; } DateTime date = DateTime.Now; foreach (String[] dr in jData.data) { try { //Console.WriteLine(f.Name + " - " + dr[entityIdCol] + " ==> " + dr[textCol]); //Console.WriteLine(dr[additionaldataCol]); //Console.WriteLine(""); Int64 packageId = 0; DbParameterCollection par = new DbParameterCollection(); par.Add("@flow", typeof(String)).Value = dr[flowCol]; par.Add("@package_id", typeof(String)).Value = dr[packageIdCol]; try { Int64 tmp = db.ExecuteScalar <Int64>("select id from st_package_track where flow = @flow and package_id = @package_id", System.Data.CommandType.Text, par, null); if (tmp > 0) { packageId = tmp; } } catch { } if (packageId == 0) { par = new DbParameterCollection(); par.Add("@entity_id", typeof(Int64)).Value = 0; par.Add("@date", typeof(DateTime)).Value = (dateCol >= 0 ? DateTime.Parse(dr[dateCol]) : date); par.Add("@flow", typeof(String)).Value = dr[flowCol]; par.Add("@package_id", typeof(String), dr[packageIdCol].Length).Value = dr[packageIdCol]; par.Add("@filename", typeof(String), dr[filenameCol].Length).Value = dr[filenameCol]; par.Add("@package", typeof(String), dr[textCol].Length).Value = dr[textCol]; packageId = db.ExecuteScalar <Int64>("sp_new_package_track", System.Data.CommandType.StoredProcedure, par, null); } db.AddPackageTrack(packageId, dr[flowCol], dr[textCol]); } catch (Exception ex) { throw ex; } } jData = null; }
private void ProcQueue(FileInfo f, Object oStarter) { IAMDatabase db = null; try { db = new IAMDatabase(localConfig.SqlServer, localConfig.SqlDb, localConfig.SqlUsername, localConfig.SqlPassword); db.openDB(); db.Timeout = 900; Boolean rebuildIndex = false; String type = ""; type = ""; JSONRequest req = null; try { using (FileStream fs = f.OpenRead()) req = JSON.GetRequest(fs); if ((req.host == null) || (req.host == "")) { db.AddUserLog(LogKey.Inbound, null, "Inbound", UserLogLevel.Error, 0, 0, 0, 0, 0, 0, 0, "Paramter 'host' is empty on " + f.Name); return; } if ((req.enterpriseid == null) || (req.enterpriseid == "")) { db.AddUserLog(LogKey.Inbound, null, "Inbound", UserLogLevel.Error, 0, 0, 0, 0, 0, 0, 0, "Paramter 'enterpriseid' is empty on " + f.Name); return; } try { Int64 tst = Int64.Parse(req.enterpriseid); } catch { if ((req.enterpriseid == null) || (req.enterpriseid == "")) { db.AddUserLog(LogKey.Inbound, null, "Inbound", UserLogLevel.Error, 0, 0, 0, 0, 0, 0, 0, "Paramter 'enterpriseid' is not Int64 " + f.Name); return; } } ProxyConfig config = new ProxyConfig(true); config.GetDBCertConfig(db.Connection, Int64.Parse(req.enterpriseid), req.host); if (config.fqdn != null) //Encontrou o proxy { JsonGeneric jData = new JsonGeneric(); try { String certPass = CATools.SHA1Checksum(Encoding.UTF8.GetBytes(config.fqdn)); using (CryptApi cApi = CryptApi.ParsePackage(CATools.LoadCert(Convert.FromBase64String(config.server_pkcs12_cert), certPass), Convert.FromBase64String(req.data))) jData.FromJsonBytes(cApi.clearData); } catch (Exception ex) { jData = null; db.AddUserLog(LogKey.Inbound, null, "Inbound", UserLogLevel.Error, config.proxyID, 0, 0, 0, 0, 0, 0, "Error on decrypt package data " + f.Name + " for enterprise " + req.enterpriseid + " and proxy " + req.host + ", " + ex.Message); } if (jData == null) { return; } type = jData.function.ToLower(); switch (type) { case "processimport-disabled": rebuildIndex = true; //ImportRegisters(config, jData, f, req, db); f.Delete(); break; case "processimportv2": rebuildIndex = true; last_status = "Executando importação de registros"; ImportRegistersV2(config, jData, f, req, db); f.Delete(); break; case "processstructimport": last_status = "Executando importação de registros de estrutura"; ImportRegistersStruct(config, jData, f, req, db); f.Delete(); break; case "notify": last_status = "Executando importação de notificações"; ImportNotify(config, jData, f, req, db); f.Delete(); break; case "deleted": last_status = "Executando importação de exclusões"; ImportDelete(config, jData, f, req, db); f.Delete(); break; case "logrecords": last_status = "Executando importação de logs"; ImportLogs(config, jData, f, req, db); f.Delete(); //f.MoveTo(f.FullName + ".imported"); break; case "packagetrack": last_status = "Executando importação de track dos pacotes"; ImportPackageTrack(config, jData, f, req, db); f.Delete(); //f.MoveTo(f.FullName + ".imported"); break; default: db.AddUserLog(LogKey.Inbound, null, "Inbound", UserLogLevel.Error, config.proxyID, 0, 0, 0, 0, 0, 0, "Invalid jData function '" + jData.function + "'"); break; } } else { db.AddUserLog(LogKey.Inbound, null, "Inbound", UserLogLevel.Error, 0, 0, 0, 0, 0, 0, 0, "Proxy config not found for enterprise " + req.enterpriseid + " and proxy " + req.host); } config = null; } catch (Exception ex) { TextLog.Log("Inbound", "Erro on process file '" + f.Name + "' (" + type + "): " + ex.Message); db.AddUserLog(LogKey.Import, null, "Inbound", UserLogLevel.Info, 0, 0, 0, 0, 0, 0, 0, "Erro processing file '" + f.Name + "' (" + type + "): " + ex.Message); } finally { last_status = ""; req = null; filesProcessed++; } /* * if (rebuildIndex) * { * db.Timeout = 900; * last_status = "Reindexando registros"; * db.ExecuteNonQuery("sp_reindex_imports", CommandType.StoredProcedure, null); * }*/ } catch (Exception ex) { TextLog.Log("Inbound", "Error importing file (" + f.Name + ")" + ex.Message); } finally { if (db != null) { db.closeDB(); } } }
static void ProcessImport(Int64 resource, Dictionary <String, Object> connectorConf, Dictionary <String, String> mapping) { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Starting import thread..."); try { if (connectorConf == null) { throw new Exception("connectorConf is null"); } if (mapping == null) { throw new Exception("mapping is null"); } String id = Guid.NewGuid().ToString(); JsonGeneric records = new JsonGeneric(); records.function = "ProcessImport"; records.fields = new String[] { "resource", "uri", "importid", "registryid", "dataname", "datavalue", "datatype" }; String uri = plugin.GetPluginId().AbsoluteUri.ToLower(); String lastRegistryId = ""; RegistryEvent reg = new RegistryEvent(delegate(String importId, String registryId, String dataName, String dataValue, String dataType) { records.data.Add(new String[] { resource.ToString(), uri, importId, registryId, dataName, dataValue, dataType }); //Contabiliza a quantidade de registros para separar em vários arquivos if (records.data.Count >= 2000) { //Após 2000 registros monitora a troca de registryId para salvar o arquivo //Evitando que o mesmo registryId tenha dados em arquivos diferentes //Isso evita problemas no servidor if (lastRegistryId != registryId) { try { SaveToSend(records, importId); records.data.Clear(); } catch { } } } lastRegistryId = registryId; }); LogEvent log = new LogEvent(delegate(PluginBase sender, PluginLogType type, string text) { TextLog.Log("PluginStarter", "{" + sender.GetPluginId().AbsoluteUri + "} " + type + ", " + text); }); LogEvent2 log2 = new LogEvent2(delegate(PluginBase sender, PluginLogType type, Int64 entityId, Int64 identityId, String text, String additionalData) { logProxy.AddLog("Proxy", resource.ToString(), sender.GetPluginId().AbsoluteUri, (UserLogLevel)((Int32)type), entityId, identityId, text, additionalData); }); plugin.Registry += reg; plugin.Log += log; plugin.Log2 += log2; plugin.ProcessImport(id, connectorConf, mapping); plugin.Registry -= reg; plugin.Log -= log; plugin.Log2 -= log2; reg = null; log = null; uri = null; //Salva os registros remanescentes if (records.data.Count > 0) { SaveToSend(records, id); } //Salva os logs para envio logProxy.SaveToSend(resource.ToString() + "log"); } finally { TextLog.Log("PluginStarter", "{" + plugin.GetPluginId().AbsoluteUri + "} Finishing import thread"); } }
public void TestGeneric() { // Test that: Object => Generic => Object #2 are identical var testObject = new SimpleJsonObject { Name = _random.AnyString(6), Number = _random.Any(), }; for (int i = 0; i < _random.Any(2, 7); i++) { var childObject = new SimpleChildJsonObject { Name = _random.AnyString(6), Number = _random.Any(), }; testObject.Children.Add(childObject); } var memoryStream = new MemoryStream(); // Serialize the object using (var writer = JSON.GetWriter(memoryStream)) { testObject.ToJson(writer); } Assert.Greater(memoryStream.Position, 0); memoryStream.Position = 0; // Deserialize the object into a JsonGeneric var jsonGeneric = new JsonGeneric(); using (var reader = JSON.GetReader(memoryStream)) { reader.Read(); jsonGeneric.FromJson(reader); } // Serialize the JsonGeneric to another stream var memoryStream2 = new MemoryStream(); using (var writer2 = JSON.GetWriter(memoryStream2)) { jsonGeneric.ToJson(writer2); } Assert.Greater(memoryStream2.Position, 0); memoryStream2.Position = 0; // Deserialize back into the original object var simpleJsonObjectResponse = new SimpleJsonObject(); using (var reader2 = JSON.GetReader(memoryStream2)) { reader2.Read(); simpleJsonObjectResponse.FromJson(reader2); } }
protected void Page_Load(object sender, EventArgs e) { try { Request.InputStream.Position = 0; JSONRequest req = JSON.GetRequest(Request.InputStream); JsonGeneric data = new JsonGeneric(); data.FromJsonString(req.data); if (data.data.Count == 0) { return; } using (IAMDatabase db = new IAMDatabase(IAMDatabase.GetWebConnectionString())) { ProxyConfig config = new ProxyConfig(); config.GetDBConfig(db.Connection, ((EnterpriseData)Page.Session["enterprise_data"]).Id, req.host); if (config.fqdn == null) //Não encontrou o proxy { return; } String uri = Tools.Tool.TrataInjection(data.data[0][data.GetKeyIndex("uri")]); DataTable dt = db.Select("select * from plugin where uri = '" + uri + "'"); if ((dt == null) || (dt.Rows.Count == 0)) { return; } DirectoryInfo pluginsDir = null; using (ServerDBConfig c = new ServerDBConfig(IAMDatabase.GetWebConnection())) pluginsDir = new DirectoryInfo(c.GetItem("pluginFolder")); if (pluginsDir == null) { throw new Exception("Parâmtro 'pluginFolder' não encontrado"); } if (pluginsDir.Exists) { FileInfo f = new FileInfo(Path.Combine(pluginsDir.FullName, dt.Rows[0]["assembly"].ToString())); if (f.Exists) { Byte[] fData = File.ReadAllBytes(f.FullName); String fileHash = CATools.SHA1Checksum(fData); Int32 ci = data.GetKeyIndex("checksum"); if ((ci != -1) && (data.data[0][ci] == fileHash)) { ReturnHolder.Controls.Add(new LiteralControl("{ \"name\":\"" + f.Name + "\", \"status\":\"updated\"}")); } else { String certPass = CATools.SHA1Checksum(Encoding.UTF8.GetBytes(config.fqdn)); using (CryptApi cApi = new CryptApi(CATools.LoadCert(Convert.FromBase64String(config.client_cert), certPass), fData)) ReturnHolder.Controls.Add(new LiteralControl("{ \"name\":\"" + f.Name + "\", \"status\":\"outdated\", \"date\":\"" + f.LastWriteTimeUtc.ToString("yyyy-MM-dd HH:mm:ss") + "\", \"content\":\"" + Convert.ToBase64String(cApi.ToBytes()) + "\"}")); } fData = new Byte[0]; } } /* * ProxyConfig config = new ProxyConfig(); * config.GetDBConfig(IAMDatabase.GetWebConnection(), ((EnterpriseData)Page.Session["enterprise_data"]).Id, req.host); * * if (config.fqdn != null) * { * ReturnHolder.Controls.Add(new LiteralControl(config.ToJsonString())); * }*/ } } catch (Exception ex) { Tools.Tool.notifyException(ex); throw ex; } }