/// <summary> /// loads the existing price-warnlevel data /// </summary> internal void ImportCommodityPriceWarnLevels(String Filename) { DBConnector lDBCon = null; dsEliteDB Data; List<EDCommoditiesWarningLevels> WarnLevels; Int32 Counter = 0; WarnLevels = JsonConvert.DeserializeObject<List<EDCommoditiesWarningLevels>>(File.ReadAllText(Filename)); Data = new dsEliteDB(); try { lDBCon = new DBConnector(Program.DBCon.ConfigData, true); lDBCon.TableRead("select * from tbCommodity", Data.tbcommodity); sendProgressEvent(new ProgressEventArgs() {Info="import warnlevels", CurrentValue=Counter, TotalValue=WarnLevels.Count }); // first check if there's a new language foreach (EDCommoditiesWarningLevels Warnlevel in WarnLevels) { DataRow[] Commodity = Data.tbcommodity.Select("commodity = " + DBConnector.SQLAString(DBConnector.DTEscape(Warnlevel.Name))); if(Commodity.Count() > 0) { Commodity[0]["pwl_demand_buy_low"] = Warnlevel.PriceWarningLevel_Demand_Buy_Low; Commodity[0]["pwl_demand_buy_high"] = Warnlevel.PriceWarningLevel_Demand_Buy_High; Commodity[0]["pwl_supply_buy_low"] = Warnlevel.PriceWarningLevel_Supply_Buy_Low; Commodity[0]["pwl_supply_buy_high"] = Warnlevel.PriceWarningLevel_Supply_Buy_High; Commodity[0]["pwl_demand_sell_low"] = Warnlevel.PriceWarningLevel_Demand_Sell_Low; Commodity[0]["pwl_demand_sell_high"] = Warnlevel.PriceWarningLevel_Demand_Sell_High; Commodity[0]["pwl_supply_sell_low"] = Warnlevel.PriceWarningLevel_Supply_Sell_Low; Commodity[0]["pwl_supply_sell_high"] = Warnlevel.PriceWarningLevel_Supply_Sell_High; } Counter++; sendProgressEvent(new ProgressEventArgs() { Info="import warnlevels", CurrentValue=Counter, TotalValue=WarnLevels.Count }); } // submit changes (tbLanguage) lDBCon.TableUpdate(Data.tbcommodity); lDBCon.Dispose(); } catch (Exception ex) { if(lDBCon != null) lDBCon.Dispose(); throw new Exception("Error while loading commodity names", ex); } }
/// <summary> /// loads the localized commodity names and check if /// the self added names now included in the official dictionary /// </summary> internal void ImportCommodityLocalizations(DataSet DataNames, enLocalisationImportType importType = enLocalisationImportType.onlyNew) { DBConnector lDBCon = null; dsEliteDB Data; Dictionary<String, Int32> foundLanguagesFromFile = new Dictionary<String, Int32>(); String sqlString; Int32 currentSelfCreatedIndex; Int32 Counter = 0; Boolean idColumnFound = false; String BaseName; DataRow[] Commodity; Data = new dsEliteDB(); try { lDBCon = new DBConnector(Program.DBCon.ConfigData, true); // gettin' some freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=2"); sqlString = "select min(id) As min_id from tbCommodity"; lDBCon.Execute(sqlString, "minID", DataNames); if(Convert.IsDBNull(DataNames.Tables["minID"].Rows[0]["min_id"])) currentSelfCreatedIndex = -1; else { currentSelfCreatedIndex = ((Int32)DataNames.Tables["minID"].Rows[0]["min_id"]) - 1; if(currentSelfCreatedIndex >= 0) currentSelfCreatedIndex = -1; } lDBCon.TableRead("select * from tbLanguage", Data.tblanguage); lDBCon.TableRead("select * from tbCommodityLocalization", Data.tbcommoditylocalization); lDBCon.TableRead("select * from tbCommodity", Data.tbcommodity); if(DataNames.Tables["Names"] != null) { sendProgressEvent(new ProgressEventArgs() {Info="import commodity localization", CurrentValue=Counter, TotalValue=DataNames.Tables["Names"].Rows.Count }); // first check if there's a new language foreach (DataColumn LanguageFromFile in DataNames.Tables["Names"].Columns) { if(!LanguageFromFile.ColumnName.Equals("id", StringComparison.InvariantCultureIgnoreCase)) { DataRow[] LanguageName = Data.tblanguage.Select("language = " + DBConnector.SQLAString(LanguageFromFile.ColumnName)); if(LanguageName.Count() == 0) { // add a non existing language DataRow newRow = Data.tblanguage.NewRow(); int? Wert = DBConvert.To<int?>(Data.tblanguage.Compute("max(id)", "")); if(Wert == null) Wert = 0; Wert += 1; newRow["id"] = Wert; newRow["language"] = LanguageFromFile.ColumnName; Data.tblanguage.Rows.Add(newRow); foundLanguagesFromFile.Add(LanguageFromFile.ColumnName, (Int32)Wert); } else foundLanguagesFromFile.Add((String)LanguageName[0]["language"], (Int32)LanguageName[0]["id"]); } else idColumnFound = true; } // submit changes (tbLanguage) lDBCon.TableUpdate(Data.tblanguage); // compare and add the localized names foreach (DataRow LocalizationFromFile in DataNames.Tables["Names"].AsEnumerable()) { int? commodityID = null; if (idColumnFound) commodityID = DBConvert.To<int?>(LocalizationFromFile["id"]); if (commodityID == 1) Debug.Print("Stop"); BaseName = (String)LocalizationFromFile[Program.BASE_LANGUAGE]; if ((commodityID == null) || (commodityID < 0)) { // no id or selfcreated Commodity = Data.tbcommodity.Select("commodity = " + DBConnector.SQLAString(DBConnector.DTEscape(BaseName))); } else { // confirmed commodity with id Commodity = Data.tbcommodity.Select("id = " + commodityID); } if (Commodity.Count() == 0) { // completely unknown commodity - add first new entry to "tbCommodities" DataRow newRow = Data.tbcommodity.NewRow(); newRow["id"] = currentSelfCreatedIndex; newRow["commodity"] = BaseName; newRow["is_rare"] = 0; Data.tbcommodity.Rows.Add(newRow); currentSelfCreatedIndex -= 1; // submit changes (tbCommodity) lDBCon.TableUpdate(Data.tbcommodity); Commodity = Data.tbcommodity.Select("commodity = " + DBConnector.SQLAString(DBConnector.DTEscape(BaseName))); } foreach (KeyValuePair<String, Int32> LanguageFormFile in foundLanguagesFromFile) { DataRow[] currentLocalizations = Data.tbcommoditylocalization.Select(" commodity_id = " + Commodity[0]["id"] + " and language_id = " + LanguageFormFile.Value); if(currentLocalizations.Count() == 0) { // add a new localization DataRow newRow = Data.tbcommoditylocalization.NewRow(); newRow["commodity_id"] = Commodity[0]["id"]; newRow["language_id"] = LanguageFormFile.Value; if((String)LocalizationFromFile[LanguageFormFile.Key] == "") newRow["locname"] = BaseName; else newRow["locname"] = (String)LocalizationFromFile[LanguageFormFile.Key]; Data.tbcommoditylocalization.Rows.Add(newRow); } else if((importType == enLocalisationImportType.overWriteAll) || ((importType == enLocalisationImportType.overwriteNonBase) && (LanguageFormFile.Key != Program.BASE_LANGUAGE))) { if((String)LocalizationFromFile[LanguageFormFile.Key] != "") currentLocalizations[0]["locname"] = (String)LocalizationFromFile[LanguageFormFile.Key]; } } Counter++; sendProgressEvent(new ProgressEventArgs() {Info="import commodity localization", CurrentValue=Counter, TotalValue=DataNames.Tables["Names"].Rows.Count }); //if((Counter % 50) == 0) // lDBCon.TableUpdate(Data.tbcommoditylocalization); } } // submit changes lDBCon.TableUpdate(Data.tbcommoditylocalization); // gettin' some freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=1"); lDBCon.Dispose(); } catch (Exception ex) { if(lDBCon != null) { try { // reset freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=1"); } catch (Exception) { } lDBCon.Dispose(); } throw new Exception("Error while loading commodity names", ex); } }
/// <summary> /// loads the localized economy level names and check /// </summary> internal void ImportEconomyLevelLocalizations(String Filename) { DBConnector lDBCon = null; dsEliteDB Data; DataSet DataNames; Dictionary<String, Int32> foundLanguagesFromFile = new Dictionary<String, Int32>(); Int32 Counter = 0; Data = new dsEliteDB(); DataNames = new DataSet(); try { lDBCon = new DBConnector(Program.DBCon.ConfigData, true); DataNames.ReadXml(Filename); lDBCon.TableRead("select * from tbLanguage", Data.tblanguage); lDBCon.TableRead("select * from tbLevelLocalization", Data.tblevellocalization); lDBCon.TableRead("select * from tbEconomyLevel", Data.tbeconomylevel); if(DataNames.Tables["Levels"] != null) { sendProgressEvent(new ProgressEventArgs() {Info="import economy level localization", CurrentValue=Counter, TotalValue=DataNames.Tables["Levels"].Rows.Count }); // first check if there's a new language foreach (DataColumn LanguageFromFile in DataNames.Tables["Levels"].Columns) { if(!LanguageFromFile.ColumnName.Equals("ID", StringComparison.InvariantCultureIgnoreCase)) { DataRow[] LanguageName = Data.tblanguage.Select("language = " + DBConnector.SQLAString(LanguageFromFile.ColumnName)); if(LanguageName.Count() == 0) { // add a non existing language DataRow newRow = Data.tblanguage.NewRow(); int? Wert = DBConvert.To<int?>(Data.tblanguage.Compute("max(id)", "")); if(Wert == null) Wert = 0; newRow["id"] = Wert; newRow["language"] = LanguageFromFile.ColumnName; Data.tblanguage.Rows.Add(newRow); foundLanguagesFromFile.Add(LanguageFromFile.ColumnName, (Int32)Wert); } else foundLanguagesFromFile.Add((String)LanguageName[0]["language"], (Int32)LanguageName[0]["id"]); } } // submit changes (tbLanguage) lDBCon.TableUpdate(Data.tblanguage); // compare and add the localized names foreach (DataRow LocalizationFromFile in DataNames.Tables["Levels"].AsEnumerable()) { String BaseName = (String)LocalizationFromFile[Program.BASE_LANGUAGE]; DataRow[] Level = Data.tbeconomylevel.Select("level = " + DBConnector.SQLAString(DBConnector.DTEscape(BaseName))); foreach (KeyValuePair<String, Int32> LanguageFormFile in foundLanguagesFromFile) { DataRow[] currentLocalizations = Data.tblevellocalization.Select(" economylevel_id = " + Level[0]["id"] + " and language_id = " + LanguageFormFile.Value); if(currentLocalizations.Count() == 0) { // add a new localization dsEliteDB.tblevellocalizationRow newRow = (dsEliteDB.tblevellocalizationRow)Data.tblevellocalization.NewRow(); newRow.economylevel_id = (Int32)Level[0]["id"]; newRow.language_id = LanguageFormFile.Value; newRow.locname = LocalizationFromFile[LanguageFormFile.Key].ToString(); Data.tblevellocalization.Rows.Add(newRow); } } Counter++; sendProgressEvent(new ProgressEventArgs() {Info="import economy level localization", CurrentValue=Counter, TotalValue=DataNames.Tables["Levels"].Rows.Count }); } } // submit changes lDBCon.TableUpdate(Data.tblevellocalization); lDBCon.Dispose(); } catch (Exception ex) { if(lDBCon != null) lDBCon.Dispose(); throw new Exception("Error while loading commodity names", ex); } }
/// <summary> /// copies the commodities data from a "EDStation"-object to "tb_______Commodity"-table /// </summary> /// <param name="StationObject"></param> /// <param name="EconomyRow"></param> private void CopyEDStationCommodityToDataRow(EDStation StationObject, dsEliteDB Data, ref UInt32 AutoIndex) { Int32 i; String[] currentCommodityCollection = null; String currentCommodityAttribute = null; try { var existingClassification = ((dsEliteDB.tbcommodityclassificationRow[])Data.tbcommodityclassification.Select("station_id = " + StationObject.Id)).ToList(); var newCommodityClassification = new Dictionary<String, List<String>>(); // collect classification data for(i=0 ; i<=2 ; i++) { switch (i) { case 0: currentCommodityCollection = StationObject.ImportCommodities; currentCommodityAttribute = "import"; break; case 1: currentCommodityCollection = StationObject.ExportCommodities; currentCommodityAttribute = "export"; break; case 2: currentCommodityCollection = StationObject.ProhibitedCommodities; currentCommodityAttribute = "prohibited"; break; } foreach (var Commodity in currentCommodityCollection) { // cyxle throught the <Attribute>-commodities from the station List<String> currentClassification; if(!newCommodityClassification.TryGetValue(Commodity, out currentClassification)) { // this commodity is not registered at all var newCL = new List<String>() {currentCommodityAttribute}; newCommodityClassification.Add(Commodity, newCL); } else { // this commodity is already registered if(!currentClassification.Contains(currentCommodityAttribute)) { // but not yet for this classification currentClassification.Add(currentCommodityAttribute); } } } } // process classification data foreach (var Classification in newCommodityClassification) { // get the current commodity id Int32 CommodityID = (Int32)DBConvert.From(BaseTableNameToID("commodity", Classification.Key)); UInt32 CClassifID; // and check, if the commodity is already added to station var Found = from dsEliteDB.tbcommodityclassificationRow relevantCommodity in existingClassification where ((relevantCommodity.RowState != DataRowState.Deleted) && (relevantCommodity.commodity_id == CommodityID)) select relevantCommodity; // if it's not existing, insert commodity if(Found.Count() == 0) { var newRow = (dsEliteDB.tbcommodityclassificationRow)Data.tbcommodityclassification.NewRow(); newRow.id = AutoIndex; newRow.station_id = StationObject.Id; newRow.commodity_id = CommodityID; Data.tbcommodityclassification.Rows.Add(newRow); CClassifID = newRow.id; AutoIndex += 1; } else { // memorize the id and remove commodity from list to mark it as "found" CClassifID = Found.First().id; existingClassification.Remove(Found.First()); //Debug.Print("removed " + Classification.Key); } var existingAttributes = ((dsEliteDB.tbcommodity_has_attributeRow[])Data.tbcommodity_has_attribute.Select("tbcommodityclassification_id = " + CClassifID)).ToList(); // now check the attributes for this commodity foreach (var Attribute in Classification.Value) { // get the current attribute id Int32 AttributeID = (Int32)DBConvert.From(BaseTableNameToID("attribute", Attribute)); // and check, if the attribute is already added to classification var FoundCC = from dsEliteDB.tbcommodity_has_attributeRow relevantCommodity in existingAttributes where relevantCommodity.RowState != DataRowState.Deleted && relevantCommodity.tbAttribute_id == AttributeID select relevantCommodity; // if it's not existing, insert attribute if(FoundCC.Count() == 0) { var newRow = (dsEliteDB.tbcommodity_has_attributeRow)Data.tbcommodity_has_attribute.NewRow(); newRow.tbAttribute_id = AttributeID; newRow.tbCommodityClassification_id = CClassifID; Data.tbcommodity_has_attribute.Rows.Add(newRow); } else { // remove attribute from list to mark it as "found" existingAttributes.Remove(FoundCC.First()); } } // remove all old, not more existing attributes foreach (DataRow RemovedRow in existingAttributes) Data.tbcommodity_has_attribute.Rows.Remove(RemovedRow); } // remove all old, not more existing classification foreach (DataRow RemovedRow in existingClassification) Data.tbcommodityclassification.Rows.Remove(RemovedRow); } catch (Exception ex) { throw new Exception("Error while copying station commodity data", ex); } }
// private Dictionary<DataTable, MySql.Data.MySqlClient.MySqlDataAdapter> m_BaseData_UpdateObjects = new Dictionary<DataTable, MySql.Data.MySqlClient.MySqlDataAdapter>(); /// <summary> /// loads the data from the basetables into memory. For correct initialization it is /// necessary to call this fuction with a unset tableName /// /// </summary> /// <param name="m_BaseData"></param> internal void PrepareBaseTables(String TableName = "", Boolean saveChanged = false) { PerformanceTimer Runtime; //MySql.Data.MySqlClient.MySqlDataAdapter dataAdapter; DBConnector currentDBCon; try { Runtime = new PerformanceTimer(); if(String.IsNullOrEmpty(TableName)) { if(m_BaseData == null) { m_BaseData = new dsEliteDB(); m_BaseData_Connector = new Dictionary<DataTable, DBConnector>(); } foreach (String BaseTable in BaseTables_Systems) { if (!m_BaseData_Connector.TryGetValue(m_BaseData.Tables[BaseTable], out currentDBCon)) { // each basetable gets it's own DBConnector, because // the contained DataReaders will be hold open for possible // changes (MySQL doesn't support MARS "Multiple Active result Sets") currentDBCon = new DBConnector(Program.DBCon.ConfigData); m_BaseData_Connector.Add(m_BaseData.Tables[BaseTable], currentDBCon); currentDBCon.Connect(); } Runtime.startMeasuring(); m_BaseData.Tables[BaseTable].Clear(); if (!Program.SplashScreen.IsDisposed) Program.SplashScreen.InfoAdd("...loading basetable '" + BaseTable + "'..."); // preload all tables with base data currentDBCon.TableRead(String.Format("select * from {0}", BaseTable), BaseTable, m_BaseData); if (!Program.SplashScreen.IsDisposed) Program.SplashScreen.InfoAppendLast("<OK>"); Runtime.PrintAndReset("loading full table '" + BaseTable + "':"); } } else if(BaseTables_Systems.Contains(TableName)) { currentDBCon = m_BaseData_Connector[m_BaseData.Tables[TableName]]; if(saveChanged) { // save all containing changes Runtime.PrintAndReset("saving changes in table '" + TableName + "':"); currentDBCon.TableUpdate(TableName, m_BaseData); } else { Runtime.startMeasuring(); m_BaseData.Tables[TableName].Clear(); // reload selected table currentDBCon.TableRead("", TableName, m_BaseData); Runtime.PrintAndReset("re-loading full table '" + TableName + "':"); } } else { throw new Exception(string.Format("Attempt to load an unknown basetable : <{0}>", TableName)); } } catch (Exception ex) { throw new Exception("Error while preparing base tables", ex); } }
/// <summary> /// imports the data from the file into the database /// (only newer data will be imported) /// </summary> /// <param name="fileName"></param> public void ImportStations(String Filename, Boolean addPrices) { DBConnector lDBCon = null; String sqlString; List<EDStation> Stations; dsEliteDB.tbstations_orgRow[] FoundRows_org; dsEliteDB.tbstationsRow[] FoundRows; DateTime Timestamp_new, Timestamp_old; Int32 ImportCounter = 0; dsEliteDB Data; Int32 Counter = 0; UInt32 currentComodityClassificationID=0; Int32 updated = 0; Int32 added = 0; Data = new dsEliteDB(); try { lDBCon = new DBConnector(Program.DBCon.ConfigData, true); // gettin' some freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=2"); Stations = JsonConvert.DeserializeObject<List<EDStation>>(File.ReadAllText(Filename)); sendProgressEvent(new ProgressEventArgs() { Info="import systems", NewLine = true }); lDBCon.TransBegin(); sqlString = "select * from tbStations lock in share mode"; lDBCon.TableRead(sqlString, Data.tbstations); sqlString = "select * from tbStations_org lock in share mode"; lDBCon.TableRead(sqlString, Data.tbstations_org); sqlString = "select * from tbStationEconomy lock in share mode"; lDBCon.TableRead(sqlString, Data.tbstationeconomy); sqlString = "select * from tbsource"; lDBCon.Execute(sqlString, Data.tbsource); sqlString = "select * from tbCommodityClassification lock in share mode"; lDBCon.TableRead(sqlString, Data.tbcommodityclassification); sqlString = "select * from tbcommodity_has_attribute lock in share mode"; lDBCon.TableRead(sqlString, Data.tbcommodity_has_attribute); sqlString = "select * from tbattribute lock in share mode"; lDBCon.TableRead(sqlString, Data.tbattribute); currentComodityClassificationID = getFreeIndex("tbCommodityClassification"); lDBCon.Execute(sqlString, Data.tbsource); foreach (EDStation Station in Stations) { FoundRows = (dsEliteDB.tbstationsRow[])Data.tbstations.Select("id=" + Station.Id.ToString()); if (FoundRows.Count() > 0) { // Location is existing if ((bool)(FoundRows[0]["is_changed"])) { // data is changed by user - hold it ... // ...and check table "tbStations_org" for the original data FoundRows_org = (dsEliteDB.tbstations_orgRow[])Data.tbstations_org.Select("id=" + Station.Id.ToString()); if ((FoundRows_org != null) && (FoundRows_org.Count() > 0)) { // Location is in "tbStations_org" existing - keep the newer version Timestamp_old = (DateTime)(FoundRows_org[0]["updated_at"]); Timestamp_new = DateTimeOffset.FromUnixTimeSeconds(Station.UpdatedAt).DateTime; if (Timestamp_new > Timestamp_old) { // data from file is newer CopyEDStationToDataRow(Station, (DataRow)FoundRows_org[0], false, null, true); CopyEDStationEconomiesToDataRows(Station, Data.tbstationeconomy); CopyEDStationCommodityToDataRow(Station, Data, ref currentComodityClassificationID); ImportCounter += 1; } } } else { // Location is existing - keep the newer version Timestamp_old = (DateTime)(FoundRows[0]["updated_at"]); Timestamp_new = DateTimeOffset.FromUnixTimeSeconds(Station.UpdatedAt).DateTime; if (Timestamp_new > Timestamp_old) { // data from file is newer CopyEDStationToDataRow(Station, (DataRow)FoundRows[0], false, null, true); CopyEDStationEconomiesToDataRows(Station, Data.tbstationeconomy); CopyEDStationCommodityToDataRow(Station, Data, ref currentComodityClassificationID); ImportCounter += 1; } } } else { // self-created stations don't have the correct id so they must be identified by name FoundRows = (dsEliteDB.tbstationsRow[])Data.tbstations.Select("stationname = " + DBConnector.SQLAString(DBConnector.DTEscape(Station.Name.ToString())) + " and " + " system_id = " + Station.SystemId + " and " + " id < 0"); if (FoundRows.Count() > 0) { // self created station is existing -> correct id and get new data from EDDB CopyEDStationToDataRow(Station, (DataRow)FoundRows[0], false, null, true); // update immediately because otherwise the references are wrong after changing a id lDBCon.TableUpdate(Data.tbstations); lDBCon.TableUpdate(Data.tbstations_org); lDBCon.TableUpdate(Data.tbstationeconomy); lDBCon.TableUpdate(Data.tbcommodityclassification); lDBCon.TableUpdate(Data.tbcommodity_has_attribute); lDBCon.TableUpdate(Data.tbattribute); lDBCon.TableRefresh(Data.tbstationeconomy); lDBCon.TableRefresh(Data.tbcommodityclassification); lDBCon.TableRefresh(Data.tbcommodity_has_attribute); lDBCon.TableRefresh(Data.tbattribute); } else { // add a new Location dsEliteDB.tbstationsRow newStationRow = (dsEliteDB.tbstationsRow)Data.tbstations.NewRow(); CopyEDStationToDataRow(Station, (DataRow)newStationRow, false, null, true); Data.tbstations.Rows.Add(newStationRow); added++; } CopyEDStationEconomiesToDataRows(Station, Data.tbstationeconomy); CopyEDStationCommodityToDataRow(Station, Data, ref currentComodityClassificationID); ImportCounter += 1; } if ((ImportCounter > 0) && ((ImportCounter % 100) == 0)) { // save changes Debug.Print("added Stations : " + ImportCounter.ToString()); lDBCon.TableUpdate(Data.tbstations); lDBCon.TableUpdate(Data.tbstations_org); lDBCon.TableUpdate(Data.tbstationeconomy); lDBCon.TableUpdate(Data.tbcommodityclassification); lDBCon.TableUpdate(Data.tbcommodity_has_attribute); lDBCon.TableUpdate(Data.tbattribute); } Counter++; if(sendProgressEvent(new ProgressEventArgs() { Info = String.Format("import stations : analysed={0}, updated={1}, added={2}", Counter, ImportCounter-added, added), CurrentValue=Counter, TotalValue=Stations.Count})) break; } // save changes lDBCon.TableUpdate(Data.tbstations); lDBCon.TableUpdate(Data.tbstations_org); lDBCon.TableUpdate(Data.tbstationeconomy); lDBCon.TableUpdate(Data.tbcommodityclassification); lDBCon.TableUpdate(Data.tbcommodity_has_attribute); lDBCon.TableUpdate(Data.tbattribute); lDBCon.TransCommit(); // now add the prices if wanted if (addPrices) { ImportPrices(Stations, enImportBehaviour.OnlyNewer, enDataSource.fromEDDN); } // reset freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=1"); lDBCon.Dispose(); } catch (Exception ex) { if(lDBCon != null) { if (lDBCon.TransActive()) lDBCon.TransRollback(); try { // reset freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=1"); lDBCon.Dispose(); } catch (Exception) { } } throw new Exception("Error while importing Station data", ex); } }
/// <summary> /// imports the "own" station data into the database /// </summary> /// <param name="fileName"></param> public void ImportStations_Own(List<EDStation> Stations, Dictionary<Int32, Int32> changedSystemIDs, Boolean OnlyAddUnknown = false, Boolean setVisitedFlag = false) { DBConnector lDBCon = null; String sqlString; dsEliteDB.tbstationsRow[] FoundRows; dsEliteDB.tbsystemsRow[] FoundSysRows; DateTime Timestamp_new, Timestamp_old; Int32 ImportCounter = 0; Int32 currentSelfCreatedIndex = -1; dsEliteDB Data = new dsEliteDB(); Int32 Counter = 0; try { lDBCon = new DBConnector(Program.DBCon.ConfigData, true); // gettin' some freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=2"); lDBCon.TransBegin(); sqlString = "select * from tbStations lock in share mode"; lDBCon.TableRead(sqlString, Data.tbstations); sqlString = "select * from tbStations_org lock in share mode"; lDBCon.TableRead(sqlString, Data.tbstations_org); sqlString = "select * from tbStationEconomy lock in share mode"; lDBCon.TableRead(sqlString, Data.tbstationeconomy); // get the smallest ID for self added stations sqlString = "select min(id) As min_id from tbStations"; lDBCon.Execute(sqlString, "minID", Data); if (Convert.IsDBNull(Data.Tables["minID"].Rows[0]["min_id"])) currentSelfCreatedIndex = -1; else { currentSelfCreatedIndex = ((Int32)Data.Tables["minID"].Rows[0]["min_id"]) - 1; if (currentSelfCreatedIndex >= 0) currentSelfCreatedIndex = -1; } sendProgressEvent(new ProgressEventArgs() { Info="import self-added stations", CurrentValue=Counter, TotalValue=Stations.Count }); foreach (EDStation Station in Stations) { Int32 SystemID; if(Station.Name == "Glass City") Debug.Print("stop"); // is the system id changed ? --> get the new system id, otherwise the original if (changedSystemIDs.TryGetValue(Station.SystemId, out SystemID)) Station.SystemId = SystemID; // if there are missing system ids, try to get them if ((Station.SystemId == 0) && (!String.IsNullOrEmpty(Station.SystemName))) { FoundSysRows = (dsEliteDB.tbsystemsRow[])Data.tbsystems.Select("systemname=" + DBConnector.SQLAString(DBConnector.DTEscape(Station.SystemName))); if((FoundSysRows != null) && (FoundSysRows.Count() > 0)) { // got it - set the id Station.SystemId = FoundSysRows[0].id; } } if (!String.IsNullOrEmpty(Station.Name.Trim()) && (Station.SystemId != 0)) { // self-created stations don't have the correct id so they must be identified by name FoundRows = (dsEliteDB.tbstationsRow[])Data.tbstations.Select("stationname=" + DBConnector.SQLAString(DBConnector.DTEscape(Station.Name)) + " and " + "system_id = " + Station.SystemId); if ((FoundRows != null) && (FoundRows.Count() > 0)) { // Location is existing, get the same Id Station.Id = (Int32)FoundRows[0]["id"]; if (!OnlyAddUnknown) { if ((bool)(FoundRows[0]["is_changed"])) { // existing data data is also changed by user - keep the newer version Timestamp_old = (DateTime)(FoundRows[0]["updated_at"]); Timestamp_new = DateTimeOffset.FromUnixTimeSeconds(Station.UpdatedAt).DateTime; if (Timestamp_new > Timestamp_old) { // data from file is newer CopyEDStationToDataRow(Station, (DataRow)FoundRows[0], true, null, true); CopyEDStationEconomiesToDataRows(Station, Data.tbstationeconomy); // commodities are not considered because there was no possibility for input in the old RN ImportCounter += 1; } } else { // new data is user changed data, old data is original data // copy the original data ("tbStations") to the saving data table ("tbStations_org") // and get the correct system ID Data.tbstations_org.LoadDataRow(FoundRows[0].ItemArray, false); CopyEDStationToDataRow(Station, (DataRow)FoundRows[0], true, null, true); CopyEDStationEconomiesToDataRows(Station, Data.tbstationeconomy); // commodities are not considered because there was no possibility for input in the old RN ImportCounter += 1; } } } else { // add a new station Station.Id = currentSelfCreatedIndex; dsEliteDB.tbstationsRow newRow = (dsEliteDB.tbstationsRow)Data.tbstations.NewRow(); CopyEDStationToDataRow(Station, (DataRow)newRow, true, null, true); newRow.visited = setVisitedFlag; newRow.updated_at = DateTime.UtcNow; Data.tbstations.Rows.Add(newRow); currentSelfCreatedIndex -= 1; ImportCounter += 1; CopyEDStationEconomiesToDataRows(Station, Data.tbstationeconomy); // commodities are not considered because there was no possibility for input in the old RN ImportCounter += 1; } if ((ImportCounter > 0) && ((ImportCounter % 100) == 0)) { // save changes Debug.Print("added Stations : " + ImportCounter.ToString()); lDBCon.TableUpdate(Data.tbstations); lDBCon.TableUpdate(Data.tbstations_org); lDBCon.TableUpdate(Data.tbstationeconomy); } } else Debug.Print("why"); Counter++; sendProgressEvent(new ProgressEventArgs() { Info="import self-added stations", CurrentValue=Counter, TotalValue=Stations.Count }); } // save changes lDBCon.TableUpdate(Data.tbstations); lDBCon.TableUpdate(Data.tbstations_org); lDBCon.TableUpdate(Data.tbstationeconomy); lDBCon.TransCommit(); // reset freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=1"); lDBCon.Dispose(); } catch (Exception ex) { if(lDBCon != null) { if (lDBCon.TransActive()) lDBCon.TransRollback(); try { lDBCon.Dispose(); // reset freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=1"); lDBCon.Dispose(); } catch (Exception) { } } throw new Exception("Error while importing Station data", ex); } }
/// <summary> /// imports the data from the file into the database /// (only newer data will be imported) /// </summary> /// <param name="fileName"></param> public void ImportSystems(String Filename) { DBConnector lDBCon = null; String sqlString; List<EDSystem> Systems; EDSystem importSystem; dsEliteDB.tbsystemsRow[] FoundRows; dsEliteDB.tbsystems_orgRow[] FoundRows_org; DateTime Timestamp_new, Timestamp_old; Int32 ImportCounter = 0; Dictionary<Int32, Int32> changedSystemIDs = new Dictionary<Int32, Int32>(); dsEliteDB localDataSet; Int32 counter = 0; Boolean dataChanged; localDataSet = new dsEliteDB(); Int32 updated = 0; Int32 added = 0; MySql.Data.MySqlClient.MySqlDataAdapter dataAdapter_sys = null; MySql.Data.MySqlClient.MySqlDataAdapter dataAdapter_sysorg = null; Int32 systemsTotal=0; try { lDBCon = new DBConnector(Program.DBCon.ConfigData, true); // gettin' some freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=2"); StreamReader rawDataStream; JsonTextReader jsonReader; JsonSerializer serializer = new JsonSerializer(); rawDataStream = new StreamReader(Filename); jsonReader = new JsonTextReader(rawDataStream); sendProgressEvent(new ProgressEventArgs() { Info="import systems...", NewLine = true } ); while (jsonReader.Read()) if((jsonReader.TokenType == JsonToken.StartObject) && (jsonReader.Depth == 1)) systemsTotal++; jsonReader.Close(); rawDataStream.Close(); rawDataStream.Dispose(); rawDataStream = new StreamReader(Filename); jsonReader = new JsonTextReader(rawDataStream); while(jsonReader.Read()) { if((jsonReader.TokenType == JsonToken.StartObject) && (jsonReader.Depth == 1)) { dataChanged = false; importSystem = serializer.Deserialize<EDSystem>(jsonReader); localDataSet.Clear(); if(dataAdapter_sys != null) { dataAdapter_sys.Dispose(); dataAdapter_sys = null; } if(dataAdapter_sysorg != null) { dataAdapter_sysorg.Dispose(); dataAdapter_sysorg = null; } lDBCon.TableRead(String.Format("select * from tbSystems where id = {0} lock in share mode;", importSystem.Id), localDataSet.tbsystems, ref dataAdapter_sys); //sqlString = "select * from tbSystems_org lock in share mode"; //lDBCon.TableRead(sqlString, Data.tbsystems_org); if (localDataSet.tbsystems.Rows.Count > 0) { // system is existing if ((bool)(localDataSet.tbsystems.Rows[0]["is_changed"])) { // data is changed by user - hold it ... // ...and check table "tbSystems_org" for the original data lDBCon.TableRead(String.Format("select * from tbSystems_org where id = {0} lock in share mode;", importSystem.Id), localDataSet.tbsystems_org, ref dataAdapter_sysorg); if (localDataSet.tbsystems_org.Rows.Count > 0) { // system is in "tbSystems_org" existing - keep the newer version Timestamp_old = (DateTime)(localDataSet.tbsystems_org.Rows[0]["updated_at"]); Timestamp_new = DateTimeOffset.FromUnixTimeSeconds(importSystem.UpdatedAt).DateTime; if (Timestamp_new > Timestamp_old) { // data from file is newer CopyEDSystemToDataRow(importSystem, (DataRow)localDataSet.tbsystems_org.Rows[0], false, null, true); ImportCounter += 1; dataChanged = true; } } } else { // system is existing - keep the newer version Timestamp_old = (DateTime)(localDataSet.tbsystems.Rows[0]["updated_at"]); Timestamp_new = DateTimeOffset.FromUnixTimeSeconds(importSystem.UpdatedAt).DateTime; if (Timestamp_new > Timestamp_old) { // data from file is newer CopyEDSystemToDataRow(importSystem, localDataSet.tbsystems.Rows[0], false, null, true); ImportCounter += 1; dataChanged = true; updated += 1; } } } else { if(dataAdapter_sys != null) { dataAdapter_sys.Dispose(); dataAdapter_sys = null; } // check if there's a user generated system // self-created systems don't have the correct id so it must be identified by name lDBCon.TableRead(String.Format("select * from tbSystems where systemname = {0} and id < 0 lock in share mode;", DBConnector.SQLAEscape(importSystem.Name.ToString()) ), localDataSet.tbsystems, ref dataAdapter_sys); if (localDataSet.tbsystems.Rows.Count > 0) { // self created systems is existing -> correct id and get new data from EDDB // (changed system_id in tbStations are automatically internal updated by the database itself) CopyEDSystemToDataRow(importSystem, (DataRow)localDataSet.tbsystems.Rows[0], false, null, true); dataChanged = true; } else { // add a new system dsEliteDB.tbsystemsRow newRow = (dsEliteDB.tbsystemsRow)localDataSet.tbsystems.NewRow(); CopyEDSystemToDataRow(importSystem, (DataRow)newRow, false, null, true); localDataSet.tbsystems.Rows.Add(newRow); dataChanged = true; } added += 1; ImportCounter += 1; } if(dataChanged) { if(localDataSet.tbsystems.Rows.Count > 0) lDBCon.TableUpdate(localDataSet.tbsystems, dataAdapter_sys); if(localDataSet.tbsystems_org.Rows.Count > 0) lDBCon.TableUpdate(localDataSet.tbsystems_org, dataAdapter_sysorg); dataChanged = false; } counter++; if(sendProgressEvent(new ProgressEventArgs() { Info = String.Format("import systems : analysed={0}, updated={1}, added={2}", counter, ImportCounter-added, added), CurrentValue=counter, TotalValue=systemsTotal})) break; } } // reset freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=1"); lDBCon.Dispose(); } catch (Exception ex) { if(lDBCon != null) { if (lDBCon.TransActive()) lDBCon.TransRollback(); try { // reset freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=1"); lDBCon.Dispose(); } catch (Exception) { } } throw new Exception("Error while importing system data", ex); } }
/// <summary> /// imports the data from the list of systems /// </summary> /// <param name="fileName"></param> public Dictionary<Int32, Int32> ImportSystems_Own(ref List<EDSystem> Systems, Boolean OnlyAddUnknown = false, Boolean setVisitedFlag = false) { DBConnector lDBCon = null; String sqlString; dsEliteDB.tbsystemsRow[] FoundRows; DateTime Timestamp_new, Timestamp_old; Int32 ImportCounter = 0; Dictionary<Int32, Int32> changedSystemIDs = new Dictionary<Int32, Int32>(); Int32 currentSelfCreatedIndex = -1; dsEliteDB Data; Int32 Counter = 0; try { lDBCon = new DBConnector(Program.DBCon.ConfigData, true); Data = new dsEliteDB(); // gettin' some freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=2"); lDBCon.TransBegin(); sqlString = "select * from tbSystems lock in share mode"; lDBCon.TableRead(sqlString, Data.tbsystems); sqlString = "select * from tbSystems_org lock in share mode"; lDBCon.TableRead(sqlString, Data.tbsystems_org); currentSelfCreatedIndex = getNextOwnSystemIndex(); sendProgressEvent(new ProgressEventArgs() { Info="import self-added systems", CurrentValue=Counter, TotalValue=Systems.Count }); foreach (EDSystem System in Systems) { if (!String.IsNullOrEmpty(System.Name.ToString().Trim())) { // self-created systems don't have the correct id so it must be identified by name FoundRows = (dsEliteDB.tbsystemsRow[])Data.tbsystems.Select("systemname=" + DBConnector.SQLAString(DBConnector.DTEscape(System.Name.ToString()))); if ((FoundRows != null) && (FoundRows.Count() > 0)) { if (!OnlyAddUnknown) { // system is existing // memorize the changed system ids for importing user changed stations in the (recommend) second step changedSystemIDs.Add(System.Id, (Int32)FoundRows[0]["id"]); System.Id = (Int32)FoundRows[0]["id"]; if ((bool)(FoundRows[0]["is_changed"])) { // old data is changed by user and the new data is also a user changed data // keep the newer version in the main table Timestamp_old = (DateTime)(FoundRows[0]["updated_at"]); Timestamp_new = DateTimeOffset.FromUnixTimeSeconds(System.UpdatedAt).DateTime; if (Timestamp_new > Timestamp_old) { // data from file is newer -> take it but hold the old id CopyEDSystemToDataRow(System, (DataRow)FoundRows[0], true, null, true); ImportCounter += 1; } } else { // new data is user changed data, old data is original data // copy the original data ("tbSystems") to the saving data table ("tbSystems_org") // and get the correct system ID Data.tbsystems_org.LoadDataRow(FoundRows[0].ItemArray, false); CopyEDSystemToDataRow(System, (DataRow)FoundRows[0], true, null, true); ImportCounter += 1; } } else { System.Id = (Int32)FoundRows[0]["id"]; } } else { // add a new system // memorize the changed system ids for importing user changed stations in the (recommend) second step if (!OnlyAddUnknown) changedSystemIDs.Add(System.Id, currentSelfCreatedIndex); System.Id = currentSelfCreatedIndex; dsEliteDB.tbsystemsRow newRow = (dsEliteDB.tbsystemsRow)Data.tbsystems.NewRow(); CopyEDSystemToDataRow(System, (DataRow)newRow, true, null, true); newRow.visited = setVisitedFlag; newRow.updated_at = DateTime.UtcNow; Data.tbsystems.Rows.Add(newRow); currentSelfCreatedIndex -= 1; ImportCounter += 1; } if ((ImportCounter > 0) && ((ImportCounter % 100) == 0)) { // save changes Debug.Print("added Systems : " + ImportCounter.ToString()); lDBCon.TableUpdate(Data.tbsystems); lDBCon.TableUpdate(Data.tbsystems_org); } } Counter++; sendProgressEvent(new ProgressEventArgs() { Info="import self-added systems", CurrentValue=Counter, TotalValue=Systems.Count }); } // save changes lDBCon.TableUpdate(Data.tbsystems); lDBCon.TableUpdate(Data.tbsystems_org); lDBCon.TransCommit(); // reset freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=1"); lDBCon.Dispose(); // return all changed ids return changedSystemIDs; } catch (Exception ex) { if(lDBCon != null) { if (lDBCon.TransActive()) lDBCon.TransRollback(); try { // reset freaky performance lDBCon.Execute("set global innodb_flush_log_at_trx_commit=1"); lDBCon.Dispose(); } catch (Exception) { } } throw new Exception("Error while importing system data", ex); } }
/// <summary> /// loads the localized commodity names and check if /// the self added names now included in the official dictionary /// </summary> internal void ImportCommodityLocalizations(DataSet DataNames) { dsEliteDB Data; Dictionary<String, Int32> foundLanguagesFromFile = new Dictionary<String, Int32>(); String sqlString; Int32 currentSelfCreatedIndex; Int32 Counter = 0; Data = new dsEliteDB(); try { sqlString = "select min(id) As min_id from tbCommodity"; Program.DBCon.Execute(sqlString, "minID", DataNames); if(Convert.IsDBNull(DataNames.Tables["minID"].Rows[0]["min_id"])) currentSelfCreatedIndex = -1; else { currentSelfCreatedIndex = ((Int32)DataNames.Tables["minID"].Rows[0]["min_id"]) - 1; if(currentSelfCreatedIndex >= 0) currentSelfCreatedIndex = -1; } Program.DBCon.TableRead("select * from tbLanguage", Data.tblanguage); Program.DBCon.TableRead("select * from tbCommodityLocalization", Data.tbcommoditylocalization); Program.DBCon.TableRead("select * from tbCommodity", Data.tbcommodity); if(DataNames.Tables["Names"] != null) { sendProgressEvent("import commodity localization", Counter, DataNames.Tables["Names"].Rows.Count); // first check if there's a new language foreach (DataColumn LanguageFromFile in DataNames.Tables["Names"].Columns) { DataRow[] LanguageName = Data.tblanguage.Select("language = " + DBConnector.SQLAString(LanguageFromFile.ColumnName)); if(LanguageName.Count() == 0) { // add a non existing language DataRow newRow = Data.tblanguage.NewRow(); int? Wert = DBConvert.To<int?>(Data.tblanguage.Compute("max(id)", "")); if(Wert == null) Wert = 0; newRow["id"] = Wert; newRow["language"] = LanguageFromFile.ColumnName; Data.tblanguage.Rows.Add(newRow); foundLanguagesFromFile.Add(LanguageFromFile.ColumnName, (Int32)Wert); } else foundLanguagesFromFile.Add((String)LanguageName[0]["language"], (Int32)LanguageName[0]["id"]); } // submit changes (tbLanguage) Program.DBCon.TableUpdate(Data.tblanguage); // compare and add the localized names foreach (DataRow LocalizationFromFile in DataNames.Tables["Names"].AsEnumerable()) { String BaseName = (String)LocalizationFromFile[Program.BASE_LANGUAGE]; DataRow[] Commodity = Data.tbcommodity.Select("commodity = " + DBConnector.SQLAString(DBConnector.DTEscape(BaseName))); if (Commodity.Count() == 0) { // completely unknown commodity - add first new entry to "tbCommodities" DataRow newRow = Data.tbcommodity.NewRow(); newRow["id"] = currentSelfCreatedIndex; newRow["commodity"] = BaseName; newRow["is_rare"] = 0; Data.tbcommodity.Rows.Add(newRow); currentSelfCreatedIndex -= 1; // submit changes (tbCommodity) Program.DBCon.TableUpdate(Data.tbcommodity); Commodity = Data.tbcommodity.Select("commodity = " + DBConnector.SQLAString(DBConnector.DTEscape(BaseName))); } foreach (KeyValuePair<String, Int32> LanguageFormFile in foundLanguagesFromFile) { DataRow[] currentLocalizations = Data.tbcommoditylocalization.Select(" commodity_id = " + Commodity[0]["id"] + " and language_id = " + LanguageFormFile.Value); if(currentLocalizations.Count() == 0) { // add a new localization DataRow newRow = Data.tbcommoditylocalization.NewRow(); newRow["commodity_id"] = Commodity[0]["id"]; newRow["language_id"] = LanguageFormFile.Value; newRow["locname"] = (String)LocalizationFromFile[LanguageFormFile.Key]; Data.tbcommoditylocalization.Rows.Add(newRow); } } Counter++; sendProgressEvent("import commodity localization", Counter, DataNames.Tables["Names"].Rows.Count); } } // submit changes Program.DBCon.TableUpdate(Data.tbcommoditylocalization); Program.DBCon.TableReadRemove(Data.tblanguage); Program.DBCon.TableReadRemove(Data.tbcommoditylocalization); Program.DBCon.TableReadRemove(Data.tbcommodity); } catch (Exception ex) { Program.DBCon.TableReadRemove(Data.tblanguage); Program.DBCon.TableReadRemove(Data.tbcommoditylocalization); Program.DBCon.TableReadRemove(Data.tbcommodity); throw new Exception("Error while loading commodity names", ex); } }
/// <summary> /// imports the data from the file into the database /// (only newer data will be imported) /// </summary> /// <param name="fileName"></param> public void ImportSystems(String Filename) { String sqlString; List<EDSystem> Systems; dsEliteDB.tbsystemsRow[] FoundRows; dsEliteDB.tbsystems_orgRow[] FoundRows_org; DateTime Timestamp_new, Timestamp_old; Int32 ImportCounter = 0; Dictionary<Int32, Int32> changedSystemIDs = new Dictionary<Int32, Int32>(); dsEliteDB Data; Int32 Counter = 0; Data = new dsEliteDB(); try { // gettin' some freaky performance Program.DBCon.Execute("set global innodb_flush_log_at_trx_commit=2"); Systems = JsonConvert.DeserializeObject<List<EDSystem>>(File.ReadAllText(Filename)); Program.DBCon.TransBegin(); sqlString = "select * from tbSystems lock in share mode"; Program.DBCon.TableRead(sqlString, Data.tbsystems); sqlString = "select * from tbSystems_org lock in share mode"; Program.DBCon.TableRead(sqlString, Data.tbsystems_org); sendProgressEvent("import systems", Counter, Systems.Count); foreach (EDSystem System in Systems) { FoundRows = (dsEliteDB.tbsystemsRow[])Data.tbsystems.Select("id=" + System.Id.ToString()); if ((FoundRows != null) && (FoundRows.Count() > 0)) { // system is existing if ((bool)(FoundRows[0]["is_changed"])) { // data is changed by user - hold it ... // ...and check table "tbSystems_org" for the original data FoundRows_org = (dsEliteDB.tbsystems_orgRow[])Data.tbsystems_org.Select("id=" + System.Id.ToString()); if ((FoundRows_org != null) && (FoundRows_org.Count() > 0)) { // system is in "tbSystems_org" existing - keep the newer version Timestamp_old = (DateTime)(FoundRows_org[0]["updated_at"]); Timestamp_new = DateTimeOffset.FromUnixTimeSeconds(System.UpdatedAt).DateTime; if (Timestamp_new > Timestamp_old) { // data from file is newer CopyEDSystemToDataRow(System, (DataRow)FoundRows_org[0], false, null, true); ImportCounter += 1; } } } else { // system is existing - keep the newer version Timestamp_old = (DateTime)(FoundRows[0]["updated_at"]); Timestamp_new = DateTimeOffset.FromUnixTimeSeconds(System.UpdatedAt).DateTime; if (Timestamp_new > Timestamp_old) { // data from file is newer CopyEDSystemToDataRow(System, FoundRows[0], false, null, true); ImportCounter += 1; } } } else { // check if theres a user generated system // self-created systems don't have the correct id so it must be identified by name FoundRows = (dsEliteDB.tbsystemsRow[])Data.tbsystems.Select(" systemname = " + DBConnector.SQLAString(DBConnector.DTEscape(System.Name.ToString())) + " and id < 0"); if (FoundRows.Count() > 0) { // self created systems is existing -> correct id and get new data from EDDB // (changed system_id in tbStations are automatically internal updated by the database itself) CopyEDSystemToDataRow(System, (DataRow)FoundRows[0], false, null, true); } else { // add a new system dsEliteDB.tbsystemsRow newRow = (dsEliteDB.tbsystemsRow)Data.tbsystems.NewRow(); CopyEDSystemToDataRow(System, (DataRow)newRow, false, null, true); Data.tbsystems.Rows.Add(newRow); } ImportCounter += 1; } if ((ImportCounter > 0) && ((ImportCounter % 100) == 0)) { // save changes Debug.Print("added Systems : " + ImportCounter.ToString()); Program.DBCon.TableUpdate(Data.tbsystems); Program.DBCon.TableUpdate(Data.tbsystems_org); } Counter++; sendProgressEvent("import systems", Counter, Systems.Count); } // save changes Program.DBCon.TableUpdate(Data.tbsystems, true); Program.DBCon.TableUpdate(Data.tbsystems_org, true); Program.DBCon.TransCommit(); // reset freaky performance Program.DBCon.Execute("set global innodb_flush_log_at_trx_commit=1"); } catch (Exception ex) { if (Program.DBCon.TransActive()) Program.DBCon.TransRollback(); try { // reset freaky performance Program.DBCon.Execute("set global innodb_flush_log_at_trx_commit=1"); Program.DBCon.TableReadRemove(Data.tbsystems); Program.DBCon.TableReadRemove(Data.tbsystems_org); } catch (Exception) { } throw new Exception("Error while importing system data", ex); } }
/// <summary> /// saving the data of the datarow, /// saves new entrys if the timestamp is not existingClassification, otherwise existingClassification data will be changed /// </summary> /// <param name="ChangedData">row with data to save</param> internal void SaveEvent(dsEliteDB.vilogRow ChangedData) { String sqlString; double? nDistance = null; try { try { nDistance = ChangedData.distance; } catch (Exception) { // typed datasets can't handle nullvalues for non-string columns // thank you, ms ]-P } sqlString = String.Format("INSERT INTO tbLog(time, system_id, station_id, event_id, commodity_id," + " cargoaction_id, cargovolume, credits_transaction, credits_total, notes, distance)" + " SELECT d.* FROM (SELECT" + " {0} AS time," + " (select id from tbSystems where systemname = {1}" + " ) AS system_id," + " (select id from tbStations where stationname = {2} " + " and system_id = (select id from tbSystems" + " where systemname = {1})" + " ) AS station_id," + " (select id from tbEventType where eventtype = {3}) As event_id," + " (select id from tbCommodity where commodity = {4} or loccommodity = {4} limit 1) As commodity_id," + " (select id from tbCargoAction where cargoaction = {5}) AS cargoaction_id," + " {6} AS cargovolume," + " {7} AS credits_transaction," + " {8} AS credits_total," + " {9} AS notes," + " {10} AS distance) AS d" + " ON DUPLICATE KEY UPDATE" + " system_id = d.system_id," + " station_id = d.station_id," + " event_id = d.event_id," + " commodity_id = d.commodity_id," + " cargoaction_id = d.cargoaction_id," + " cargovolume = d.cargovolume," + " credits_transaction = d.credits_transaction," + " credits_total = d.credits_total," + " notes = d.notes," + " distance = d.distance", DBConnector.SQLDateTime(ChangedData.time), DBConnector.SQLAString(DBConnector.SQLEscape(ChangedData.systemname)), DBConnector.SQLAString(DBConnector.SQLEscape(ChangedData.stationname)), DBConnector.SQLAString(ChangedData.eevent), DBConnector.SQLAString(ChangedData.loccommodity), DBConnector.SQLAString(ChangedData.action), ChangedData.cargovolume, ChangedData.credits_transaction, ChangedData.credits_total, ChangedData.notes.Trim() == String.Empty ? "null" : String.Format("'{0}'", DBConnector.SQLEscape(ChangedData.notes)), nDistance == null ? "null" : DBConnector.SQLDecimal(nDistance.Value)); Program.DBCon.Execute(sqlString); if(!m_NoGuiNotifyAfterSave) { Int32 RowIndex; m_NoGuiNotifyAfterSave = false; RowIndex = Program.DBCon.getRowIndex("viLog", "time", DBConnector.SQLSortOrder.desc, "time", DBConnector.SQLDateTime(ChangedData.time)); DataChanged.Raise(this, new DataChangedEventArgs() { DataRow = RowIndex, DataKey = ChangedData.time}); } } catch (Exception ex) { throw new Exception("Error while saving Commanders Log to DB", ex); } }
/// <summary> /// loads all possible trading data from one to another station (one direction) /// </summary> /// <param name="Data"></param> /// <param name="Station_From"></param> /// <param name="Station_To"></param> public void loadBestProfitStationCommodities(dsEliteDB.tmpa_s2s_stationdataDataTable Data, int? Station_From, int? Station_To, List<Int32> commodityFilter) { String sqlString; String orString = ""; try { if ((Station_From == null) || (Station_To == null)) { // if a id equals null it will result in a exception from mysql: // "There is already an open DataReader associated with this Connection which must be closed first." Data.Clear(); } else { if(commodityFilter.Count > 0) orString = " and " + DBConnector.GetString_Or<Int32>("commodity_id", commodityFilter); sqlString = String.Format( "select Sd1.Station_ID, Sd1.Commodity_Id, Cm.LocCommodity As Commodity, " + " Sd1.Buy, Sd1.Supply, Sd1.SupplyLevel, Sd1.timestamp As Timestamp1, " + " Sd2.Sell, Sd2.Demand, Sd2.DemandLevel, Sd2.timestamp As Timestamp2, " + " (nullif(Sd2.Sell, 0) - nullif(Sd1.Buy,0)) As Profit, Sd1.Sources_ID from " + " (select * from tbCommodityData " + " where Station_ID = {0} " + orString + ") Sd1 " + " join" + " (select * from tbCommodityData " + " where Station_ID = {1}) Sd2" + " on Sd1.Commodity_ID = Sd2.Commodity_ID" + " join" + " tbCommodity Cm" + " on Sd1.Commodity_ID = Cm.ID" + " having Profit is not null" + " order by Profit Desc;", Station_From.ToNString("null"), Station_To.ToNString("null")); m_lDBCon.Execute(sqlString, Data); } } catch (Exception ex) { throw new Exception("Error while loading trading data", ex); } }
/// <summary> /// returns the name of a ship from it's symbolname /// </summary> /// <param name="tbshipyardbase"></param> /// <param name="symbolName"></param> /// <returns></returns> internal String GetShipNameFromSymbol(dsEliteDB.tbshipyardbaseDataTable tbshipyardbase, String symbolName) { DataRow[] found = tbshipyardbase.Select("symbol = " + SQL.DBConnector.SQLAString(symbolName)); if(found.Count() > 0) { return (String)(found[0]["name"]); } else { return symbolName; } }