internal item SellItemNormal(unit shop, HabProperties hpsItem) { string ID = hpsItem.name; IRecord item = Items.GetByUnit("codeID", ID); bool isNewVersionItem = DHHELPER.IsNewVersionItem(ID); if (!(item is item) && !(item is unit)) { item = isNewVersionItem ? (IRecord) new unit(hpsItem.name) : (IRecord) new item(hpsItem.name); Items.Add(item); } item = item.Clone(); if (isNewVersionItem) { (item as unit).DoSummon = true; (item as unit).set_owningPlayer(Current.unit.get_owningPlayer()); //! shop.OnSell(item as unit); // sell item as unit } else { shop.OnSellItem(item as item, Current.unit); } // pay the gold for this item int goldCost = isNewVersionItem ? (item as unit).goldCost : (item as item).goldCost; Current.player.Gold = Current.player.Gold - goldCost; return(item as item); }
internal unit SellHero(HabProperties hps) { if (hps == null) { return(null); } string ID = hps.GetStringValue("Name"); unit hero = Heroes.GetByUnit("ID", ID) as unit; if (hero != null && hero.IsDisposed) { if (Current.unit != null && Current.unit != hero && Current.unit.codeID == hero.codeID && !Current.unit.IsDisposed) { Heroes.Remove(hero); Heroes.Add(Current.unit); hero = Current.unit; } else { Heroes.Remove(hero); hero = null; } } if (hero == null) { unit sellingTavern = null; // find the tavern that sold this hero string tavernID = DHLOOKUP.dcHeroesTaverns[hps.name]; foreach (unit tavern in DHLOOKUP.taverns) { if (tavern.ID == tavernID) { sellingTavern = tavern; } } // create new hero hero = new unit(hps.name); hero.DoSummon = true; hero.set_owningPlayer(Current.player, sellingTavern.x, sellingTavern.y); Heroes.Add(hero); // only new heroes process onsell event sellingTavern.OnSell(hero); // pay the gold for this hero Current.player.Gold = Current.player.Gold - hero.goldCost; } return(hero); }
/// <summary> /// Reads a CGT and creates all the objects needed to create /// a tokenizer and parser at a later time. /// </summary> /// <param name="stream">The CGT stream.</param> private void ReadFile(Stream stream) { try { Reset(); this.stream = stream; CalithaBinReader reader = new CalithaBinReader(stream); string header = ""; try { header = reader.ReadUnicodeString(); if (! header.StartsWith("GOLD")) throw new CGTStructureException("File header is invalid"); } catch (EndOfStreamException e) { throw new CGTStructureException("File header is invalid",e); } RecordCollection records = new RecordCollection(); while (!(stream.Position == stream.Length)) { records.Add(ReadRecord(reader)); } structure = new CGTStructure(header,records); content = new CGTContent(structure); dfaStates = CreateDFAStates(content); parserStates = CreateParserStates(content); } finally { stream.Close(); } }
/// <summary> /// 加载记录命令内容 /// </summary> async Task ExecuteLoadItemsCommand() { IsBusy = true; try { RecordCollection.Clear(); var records = await DataStore.GetItemsAsync(true); records = records.OrderByDescending(p => p.Year).ThenByDescending(p => p.Month) .ThenByDescending(p => p.Day).ThenByDescending(p => p.Id); foreach (var record in records) { RecordCollection.Add(record); } } catch (Exception ex) { Debug.WriteLine(ex); } finally { IsBusy = false; } }
public RecordCollection ExecuteRecord(string SQLText) { RecordCollection rcc = new RecordCollection(); using (IDataReader rd = ExecuteReader(SQLText)) { while (rd.Read()) { Record rc = new Record(); for (int i = 0; i < rd.FieldCount; i++) { rc.Add(rd.GetName(i), rd.GetValue(i)); } rcc.Add(rc); } } return rcc; }
static void SessionProfiles() { string path = @"C:\Documents and Settings\l438125\Desktop\SessionProfiles\France\Session2\"; RecordCollection records = new RecordCollection(path, true); using (StreamReader reader = new StreamReader(path + "HisRisServiceCache")) { String line; while ((line = reader.ReadLine()) != null) { string[] strings = line.Split(" ".ToCharArray()); if (strings.Length > 1) { records.Add(strings[1]); } } } WriteRecords(records); }
private void DispatcherTimerAction(Object sender, EventArgs e) { if (!File.Exists(@"D:\Maintain.csv")) { string[] heads = { "时间", "机台号", "触发次数", "扫码次数", "上传次数" }; Csvfile.AddNewLine(@"D:\Maintain.csv", heads); } if (LastCleanRecordFlag != GetBanciDate() + GetBanci()) { LastCleanRecordFlag = GetBanciDate() + GetBanci(); Inifile.INIWriteValue(ParameterIniPath, "Record", "LastCleanRecordFlag", LastCleanRecordFlag); if (!Directory.Exists("D:\\" + LastCleanRecordFlag)) { Directory.CreateDirectory("D:\\" + LastCleanRecordFlag); } string[] count = { DateTime.Now.ToString(), JiTaiHao, TrigerTimes.ToString(), ScanTimes.ToString(), UpdateTimes.ToString() }; Csvfile.AddNewLine(@"D:\Maintain.csv", count); TrigerTimes = 0; ScanTimes = 0; UpdateTimes = 0; Inifile.INIWriteValue(ParameterIniPath, "Times", "TrigerTimes", TrigerTimes.ToString()); Inifile.INIWriteValue(ParameterIniPath, "Times", "ScanTimes", ScanTimes.ToString()); Inifile.INIWriteValue(ParameterIniPath, "Times", "UpdateTimes", UpdateTimes.ToString()); MsgText = AddMessage("记录清空"); } if (recordItemList.Count > 0) { lock (LockObject) { foreach (RecordItem item in recordItemList) { RecordCollection.Add(item); } recordItemList.Clear(); } } }
internal override RecordCollection ExecuteReader(int offset, int limit) { if (string.IsNullOrEmpty(CommandText)) { throw new ArgumentNullException("CommandText"); } AdapterState = AdapterState.Fetching; CheckConnectionStr(); using (OracleConnection oracleCn = new OracleConnection(base.ConnectionStr)) { oracleCn.Open(); OracleCommand oracleCmd = new OracleCommand(CommandText, oracleCn); oracleCmd.CommandType = CommandType; if (Parameters != null) { oracleCmd.Parameters.AddRange(Parameters.ToArray()); } OracleDataReader oracleReader = oracleCmd.ExecuteReader(CommandBehavior.CloseConnection); RecordCollection records = new RecordCollection(); //获取字段信息集 for (int i = 0; i < oracleReader.FieldCount; i++) { records.Columns.Add(oracleReader.GetName(i), oracleReader.GetFieldType(i), i); } // 开始读取信息 int move = 0; while (oracleReader.Read()) { if (limit <= 0) { break; } if (move >= offset) { if (move < (offset + limit)) { Record newRecord = records.NewRecord(); for (int i = 0; i < newRecord.Count; i++) { try { newRecord[i] = oracleReader.GetValue(newRecord.Columns[i].ReadIdx); } catch { newRecord[i] = default(object); } } records.Add(newRecord); } else { break; } } move++; } oracleCmd.Cancel(); oracleReader.Close(); oracleReader.Dispose(); oracleCmd.Dispose(); oracleCn.Dispose(); AdapterState = AdapterState.Waiting; return(records); } }
public static void RunJob(ref BackgroundWorker backgroundWorker, Job job) { if (string.IsNullOrEmpty(job.SourceTable)) { TraceService.Instance.WriteConcat(TraceEvent.Error, job.Name, " does not have a source table specified!"); throw new ArgumentException("You must specify a source table!"); } if (string.IsNullOrEmpty(job.DestinationTable)) { TraceService.Instance.WriteConcat(TraceEvent.Error, job.Name, " does not have a destination table specified!"); throw new ArgumentException("You must specify a destination table!"); } if (job.FieldMappings.Count == 0) { TraceService.Instance.WriteConcat(TraceEvent.Error, job.Name, " does not have any mapped fields!"); throw new ArgumentException("You must have at least one Field Mapped!"); } var sourceProvider = GetProvider(Program.Configuration.SourceConnection); var destinationProvider = GetProvider(Program.Configuration.DestinationConnection); var sourceFields = job.FieldMappings.Select(f => f.SourceField); var destinationFields = job.FieldMappings.Select(f => f.DestinationField); int recordCount = sourceProvider.GetRecordCount(job.SourceTable); var buffer = new RecordCollection(); int processedRecordCount = 0; var recordsEnumerator = sourceProvider.GetRecordsEnumerator(job.SourceTable, sourceFields); while (recordsEnumerator.MoveNext()) { var record = recordsEnumerator.Current.Clone(); buffer.Add(record); processedRecordCount++; if (processedRecordCount.IsMultipleOf(Program.Configuration.BatchSize) || processedRecordCount == recordCount) { if (backgroundWorker.CancellationPending) { return; } buffer.ReMapFields(job.FieldMappings); // TODO: Apply Transform Functions // Create an ITransformerPlugin and let users assign them to columns // Then run them here on each record in `buffer`. destinationProvider.InsertRecords(job.DestinationTable, buffer); buffer = new RecordCollection(); double percent = processedRecordCount / (double)recordCount; percent = percent * 100; backgroundWorker.ReportProgress((int)percent); TraceService.Instance.WriteFormat(TraceEvent.Information, "{0}/{1} Records Processed", processedRecordCount, recordCount); if (backgroundWorker.CancellationPending) { return; } } } }