public static void WriteData(this XiDataList dataList, WriteData dataObjectToWrite) { dataList.WriteData(new List <WriteValue> { dataObjectToWrite.ToWriteValue() }); }
private async Task WriteValue() { var writer = new DataWriter(); writer.WriteBytes(new byte[] { WRITE_STATE }); await WriteData.WriteValueAsync(writer.DetachBuffer()); }
public override async Task RequestBatteryLevelAsync() { var writer = new DataWriter(); writer.WriteBytes(new byte[] { WRITE_BATTERY }); await WriteData.WriteValueAsync(writer.DetachBuffer()); }
static void Main(string[] args) { //WriteData a = new WriteData(); //var b = a?.BirthDate?.Year ; //? : List <WriteData> listText = new List <WriteData>(); for (int i = 0; i < 3; i++) { WriteData data = new WriteData { Id = i, Name = "Liu", Age = 20 + i }; listText.Add(data); } List <String> headColumn = new List <String>(); headColumn.Add("编号"); headColumn.Add("姓名"); headColumn.Add("年龄"); headColumn.Add("出生日期"); //WriteExcelFile<WriteData>(@"E:\TestOpenXML\writeTest2.xlsx", listText, headColumn); List <int> mulListString = new List <int>(); for (int i = 0; i < 1000; i++) { mulListString.Add(i); } WriteExcelFile <int>(@"D:\Test\writeTest3.xlsx", mulListString, headColumn); }
static void Main(String[] args) { Stopwatch Global = new Stopwatch(); Global.Start(); Console.WriteLine("Start"); //do not check the distributions. Control.CheckDistributionParameters = false; String ParamPath = "D:/Documents/R/AgentBasedModel/Comparison/"; String OutputPath = "C:/Users/Karar/Desktop/Output"; String[] ParamFiles = Utils.GetValidParams(ParamPath); //Prepare naming Scheme string Tag = Utils.GetTag(ParamFiles[0]); SimParams Par = new SimParams(localBreeding: true); WriteData Full = Simulations.Basic(Par); /* WriteData Temp; * WriteData Full = Simulations.Interval(Par, 200, false); * for(int j=1;j<2;j++){ * Temp = Simulations.Interval(Par, 200, false); * Full.ConCat(Temp, Par); * }*/ Full.Output(Par, OutputPath, Tag, true); Console.WriteLine(Global.ElapsedMilliseconds); Console.WriteLine("All simulations completed."); }
private static void CreaeteWorksheet(string fileName, LsData lsData, WriteData writeData, bool visible = true) { Excel.Application xlApp = new Excel.Application(); if (xlApp == null) { return; } if (lsData == null) { lsData = ExcelWorker.lsData; } object misValue = System.Reflection.Missing.Value; var xlWorkbook = (File.Exists(fileName)) ? xlApp.Workbooks.Open(fileName) : xlApp.Workbooks.Add(misValue); xlApp.Visible = visible; Task.Factory.StartNew(() => { writeData(xlWorkbook, lsData); Save(xlWorkbook, fileName); ReleaseMemory(xlApp, xlWorkbook); }); }
static void Main(String[] args) { Stopwatch Global = new Stopwatch(); Global.Start(); Console.WriteLine("Start"); //do not check the distributions. Control.CheckDistributionParameters = false; String ParamPath = "D:/Documents/ECK"; String OutputPath = "D:/Documents/ECK"; String[] ParamFiles = Utils.GetValidParams(ParamPath); //Prepare naming Scheme string Tag = Utils.GetTag(ParamFiles[0]); SimParams Par = new SimParams(reload: true, path: ParamFiles[0]); //int x = 1000000; //float[] sampler = new float[6]{.8f,.8f,.9f,.5f,.4f,.2f}; //int[] lol = Par.RandomSampleUnequal(sampler,x,true); WriteData Full = Simulations.Basic(Par); /* WriteData Temp; * WriteData Full = Simulations.Interval(Par, 200, false); * for(int j=1;j<2;j++){ * Temp = Simulations.Interval(Par, 200, false); * Full.ConCat(Temp, Par); * }*/ Full.Output(Par, OutputPath, Tag, true); Console.WriteLine(Global.ElapsedMilliseconds); Console.WriteLine("All simulations completed."); }
static void Run(string fileName, int repeats, int frequency, string outputPath, bool repAll, bool matchAll, bool ageAll, bool lrnThrshAll, bool accAll, bool chanForAll, bool chanInvAll) { Stopwatch Local = new Stopwatch(); Local.Start(); //Get Parameters and run appropriate Simulation SimParams Par = new SimParams(reload: true, path: fileName); WriteData Temp; WriteData Full = Simulations.Interval(Par, frequency, repAll, matchAll, ageAll, lrnThrshAll, accAll, chanForAll, chanInvAll); for (int j = 1; j < repeats; j++) { Temp = Simulations.Interval(Par, frequency, repAll, matchAll, ageAll, lrnThrshAll, accAll, chanForAll, chanInvAll); Full.ConCat(Par, Temp); } //Save data string Tag = Utils.GetTag(fileName); Full.Output(Par, outputPath, Tag, true); Console.WriteLine("{0}-{1}", fileName, Local.ElapsedMilliseconds); }
public void Write() { string filePath = FilePath; using (FileStream fs = new FileStream(filePath, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.Read)) { //создаём объект BinaryWriter для записи //простых типов данных в поток BinaryWriter bw = new BinaryWriter(fs); //вызовем перегрузку метода для записи данных в поток //OnWrite(bw); // використання делегату WriteData wd = OnWrite; wd(bw); // використання події OnDataWrite?.Invoke(this, new DataWriteEvent { Writer = bw }); //сохраним данные из буфера на диск bw.Flush(); //WriteDebug(fs); } }
private void UpdateClient() { if (dsUsage.Tables.Contains("Client")) { dsUsage.Tables.Remove(dsUsage.Tables["Client"]); } DateTime sDate = pp1.SelectedPeriod; DateTime eDate = sDate.AddMonths(1); SqlDataAdapter daClient = new SqlDataAdapter("Client_Select", cnSselData); daClient.SelectCommand.CommandType = CommandType.StoredProcedure; daClient.SelectCommand.Parameters.AddWithValue("@Action", "All"); daClient.SelectCommand.Parameters.AddWithValue("@sDate", sDate); daClient.SelectCommand.Parameters.AddWithValue("@eDate", eDate); daClient.Fill(dsUsage, "Client"); DataColumn[] pkc = new DataColumn[1]; pkc[0] = dsUsage.Tables["Client"].Columns["ClientID"]; dsUsage.Tables["Client"].PrimaryKey = pkc; if (sDate <= DateTime.Now.Date && eDate > DateTime.Now.Date && !Convert.ToBoolean(Session["Updated"])) { WriteData mWriteData = new WriteData(Provider); string[] types = new string[] { "Tool" }; mWriteData.UpdateTables(BillingCategory.Tool, UpdateDataType.DataClean); Session["Updated"] = false; } }
static void Main(string[] args) { WriteData writeData = new WriteData("filename.txt"); //do something writeData.Dispose(); }
public void write(WriteData w) { var json = JsonConvert.SerializeObject(w); Response.Clear(); Response.ContentType = "application/json; charset=utf-8"; Response.Write(json); Response.End(); }
public override void Flush() { while (queue.Count > 0) { WriteData data = queue.Dequeue(); Console.ForegroundColor = data.foreground; Console.BackgroundColor = data.background; Console.Write(data.value); } }
static void Main(String[] args) //do not change this line! //Starts a timer. You can omit this. { Stopwatch Global = new Stopwatch(); Global.Start(); //warns that the simulation has started. You can omit this. Console.WriteLine("Start"); //prevents checks on the distributions; delete if you are changing the library, otherwise leave it alone. //I have internal checks to ensure distributions have appropriate boundaries, so this just wastes time. Control.CheckDistributionParameters = false; //assign arguments that are passed in the console into the right data types. //Add more arguments and assignments or deleted unwanted ones as needed. String ParamPath = args[0]; //Required argument! String OutputPath = args[1]; //Required argument! int Optional = args.Length > 2 ? System.Convert.ToInt32(args[2]) : 4; //optional argument with a default! //get the .SEMP files //Use the next two lines if you want to change the params here rather than load .SEMPs, //and comment out the following three lines. //SimParams Par = new SimParams(); //string Tag = 'new'; String[] ParamFiles = Utils.GetValidParams(ParamPath); //for(int i = 0; i < ParamFiles.length; i++){ SimParams Par = new SimParams(reload: true, path: ParamFiles[0]); //i instead of 0 string Tag = Utils.GetTag(ParamFiles[0]); //i instead of 0 //Basic is not the only simulation option, but should cover most of your needs WriteData Full = Simulations.Basic(Par); //Writes the data files (.SEMP and .csvs) Full.Output(Par, OutputPath, Tag, true); //} //Note that this code will only run one parameter file. To do all .SEMPS in a folder, //uncomment the for loop and its end bracket and change marked the 0's to i. //Also notice that parallelization is absent. See the IntervalSim program an example of code that parallelizes. //Warns how much time has ellasped and that the simulation is over. you can omit this. Console.WriteLine(Global.ElapsedMilliseconds); Console.WriteLine("All simulations completed."); }
public static void WriteData(this XiDataList dataList, WriteData dataObjectToWrite, OnWriteDataComplete onComplete, object asyncState) { var xiWriteValue = dataObjectToWrite.ToWriteValue(); dataList.WriteData(new List <WriteValue> { xiWriteValue }, onComplete, asyncState); }
private static void SaveOrders(Orders order, string filename) { try { WriteData.WriteListToFile(order.OrdresCollection, filename); File.AppendAllText(filename, "Totale prijs: " + order.TotalPrice().ToString("0:C2")); } catch (Exception err) { Console.WriteLine("Er is een fout opgetreden bij het opslaan van de bestelling: " + err.Message); } }
public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state) { lock (_queue) { var data = new WriteData(buffer, offset, count, callback, state); if (_pendingWrite > 0) { _queue.Enqueue(data); return(data.AsyncResult); } return(BeginWriteInternal(buffer, offset, count, callback, state, data)); } }
void OnLoadFinished() { Debug.Log("路径拷贝完成"); //数据库存放沙盒的路径 dataBasePath = System.IO.Path.Combine(Application.persistentDataPath, "输入对应的表名"); //初始化读和写的功能 readData = new ReadData(dataBasePath); writeData = new WriteData(dataBasePath); //取出数据存入字典 readData.GetData(); }
private static void A2lDetailData(string targetPath, RadGridView gridView, GridViewData listData) { try { lock (obj) { StringBuilder _segmentStr = new StringBuilder(); StringBuilder _10msStr = new StringBuilder(); StringBuilder _100msStr = new StringBuilder(); //定义XCP时间分类限制数组名 _segmentHeadName = HEADER_NAME + FUN_TYPE + SEGMENT_NAME + _A2L_ARRAY_CHAR + _SPLIT_CHAR; _10msHeadName = HEADER_NAME + FUN_TYPE + _10MS_NAME + _A2L_ARRAY_CHAR + _SPLIT_CHAR; _100msHeadName = HEADER_NAME + FUN_TYPE + _100MS_NAME + _A2L_ARRAY_CHAR + _SPLIT_CHAR; if (listData.LimitTimeListSegMent.Count > 0) { //添加头 _segmentStr.Append(_segmentHeadName); //添加数据 AppendData(listData.LimitTimeListSegMent, _segmentStr, gridView); } if (listData.LimitTimeList10ms.Count > 0) { _10msStr.Append(_10msHeadName); AppendData(listData.LimitTimeList10ms, _10msStr, gridView); } if (listData.LimitTimeList100ms.Count > 0) { _100msStr.Append(_100msHeadName); AppendData(listData.LimitTimeList100ms, _100msStr, gridView); } //写入数据 if (_segmentStr.Length > 0) { WriteData.WriteString(_segmentStr, targetPath); } if (_10msStr.Length > 0) { WriteData.WriteString(_10msStr, targetPath); } if (_100msStr.Length > 0) { WriteData.WriteString(_100msStr, targetPath); } } } catch (Exception ex) { LogHelper.Log.Error(ex.Message + "\r\n" + ex.StackTrace); } }
public async Task WriteData_ShouldCreateNewRecord() { var userId = "111"; var bookId = 1; var expected = 3; var writeData = new WriteData(_context); await writeData.AddReservationAsync(bookId, userId); var actual = _context.Reservations.Count(); Assert.Equal(expected, actual); }
public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state) { lock (_queue) { var data = new WriteData(buffer, offset, count, callback, state); if (_pendingWrite > 0) { _queue.Enqueue(data); return data.AsyncResult; } return BeginWriteInternal(buffer, offset, count, callback, state, data); } }
/// <summary> /// 发送请求数据 /// </summary> /// <param name="sendFrameBase"></param> /// <returns></returns> private async Task <T> Request <T>(SendFrameBase sendFrameBase) where T : ReceiveFrameBase, new() { if (sendFrameBase == null) { throw new ArgumentNullException(nameof(sendFrameBase)); } var data = sendFrameBase.FullFrame; TXContainer container = new TXContainer(data); SerialPort.Write(container.FullFrame, 0, container.FullFrame.Length); WriteData?.Invoke(container.FullFrame); return(await GetRespond <T>()); }
public async Task WriteData_ShouldCreateNewRecord() { var writeData = new WriteData(_context); var expected = new Book { BookId = 101, Name = "Book55", Description = "Desc55" }; await writeData.AddBookAsync(expected); var actual = await _context.Books.FindAsync(101); Assert.Same(expected, actual); }
private static void DbcDetailData(string targetPath, RadGridView gridView, GridViewData listData) { try { //遍历行数据 StringBuilder dbcBuilder = new StringBuilder(); acturalDBCList = new List <AnalysisSignal>(); frameIdList = new List <string>(); allDbcGroupData = new List <StringBuilder>(); //遍历选择行数据 List <AnalysisSignal> analysisDbcDataList = DbcDataToSignal(listData, gridView); //根据分组ID查询该ID对应所有数据行 for (int i = 0; i < frameIdList.Count; i++) { var resdbcList = acturalDBCList.FindAll(dbc => dbc.DataAddress == frameIdList[i]); StringBuilder dbcGroupData = new StringBuilder(); dbcGroupData.Append(DBC_DETAIL_HEAD); string metholdName = DBC_DEATIL_METHOLD_NAME + "_" + frameIdList[i]; dbcGroupData.AppendLine(metholdName + "[] = \r\n{ "); //保存格式内容:名称+描述+单位+数据类型+数据长度+字节顺序+截取开始地址(dbc有用)+截取长度+数据地址(a2l-ecu地址,monitor-canid)+系数+偏移量 for (int j = 0; j < resdbcList.Count; j++) { dbcGroupData.Append("\t\t" + '"' + resdbcList[j].Name + '"' + ","); var dbcMsg = analysisDbcDataList.Find(dbc => dbc.DataAddress == resdbcList[j].DataAddress); dbcGroupData.Append('"' + dbcMsg.Describle + '"' + ","); dbcGroupData.Append('"' + resdbcList[j].Unit + '"' + ","); dbcGroupData.Append(resdbcList[j].SaveDataType + ","); dbcGroupData.Append(resdbcList[j].SaveDataLen + ","); dbcGroupData.Append(resdbcList[j].IsMotorola + ","); dbcGroupData.Append(resdbcList[j].StartIndex + ","); dbcGroupData.Append(resdbcList[j].DataBitLen + ","); dbcGroupData.Append(resdbcList[j].DataAddress + ","); dbcGroupData.Append(resdbcList[j].Factor + ","); dbcGroupData.AppendLine(resdbcList[j].OffSet + ","); } dbcGroupData.AppendLine("};"); LogHelper.Log.Info("DBC开始写入数据!"); allDbcGroupData.Add(dbcGroupData); LogHelper.Log.Info("DBC写数据完成!"); } //写数据 WriteData.WriteString(allDbcGroupData, targetPath); } catch (Exception ex) { LogHelper.Log.Info(ex.Message + ex.StackTrace); } }
/// <summary> /// 远程升级时,数据库操作 /// </summary> private void RemotUpdate() { if (WriteData == null) { return; } using (DataTable dt = WriteData.Read("select top 1 * from AllTestStationTime")) { if (dt == null || dt.Columns.Count <= 0) { WriteData.Write("CREATE TABLE [dbo].[AllTestStationTime](" + "[ID] [bigint] IDENTITY(1,1) NOT NULL," + " [TestTime] [datetime] NOT NULL,"+ " [TestYear] [int] NULL,"+ " [TestMonth] [int] NULL,"+ " [TestDay] [int] NULL,"+ " [WorkStation] [int] NOT NULL,"+ " [StationName] [nvarchar](100) NULL,"+ " [TimeCount] [int] NULL,"+ " [OperaCount] [int] NULL,"+ "CONSTRAINT [PK_AllTestStationTime] PRIMARY KEY CLUSTERED " + "(" + " [ID] ASC,"+ " [TestTime] ASC,"+ " [WorkStation] ASC"+ ")WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]" + ") ON [PRIMARY]"); } } using (DataTable dt = WriteData.Read("select top 1 * from AllTestStationTimeEveryHour")) { if (dt == null || dt.Columns.Count <= 0) { WriteData.Write("CREATE TABLE [dbo].[AllTestStationTimeEveryHour](" + " [ID] [bigint] IDENTITY(1,1) NOT NULL,"+ " [TestTime] [datetime] NOT NULL,"+ " [UseTime] [int] NULL,"+ " [WorkStation] [int] NULL,"+ " [StationName] [nvarchar](100) NOT NULL,"+ " CONSTRAINT [PK_AllTestStationTimeEveryHour] PRIMARY KEY CLUSTERED" + "(" + " [ID] ASC,"+ " [TestTime] ASC,"+ " [StationName] ASC"+ ")WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]" + ") ON [PRIMARY]"); } } }
private List <WriteData> createWriteDataList(byte[] derPrivatekey) { var fs = new MemoryStream(derPrivatekey); var writeDataList = new List <WriteData>(); for (byte recno = 0; ; recno++) { var rec = new WriteData(); // recno rec.recno = recno; { byte[] bs = new byte[62]; int readSize = fs.Read(bs, 0, bs.Length); if (readSize == 0) { break; } rec.data1 = bs.ToList().Take(readSize).ToArray(); } { byte[] bs = new byte[32]; int readSize = fs.Read(bs, 0, bs.Length); if (readSize == 0) { writeDataList.Add(rec); break; } rec.data2 = bs.ToList().Take(readSize).ToArray(); } { byte[] bs = new byte[32]; int readSize = fs.Read(bs, 0, bs.Length); if (readSize == 0) { writeDataList.Add(rec); break; } rec.data3 = bs.ToList().Take(readSize).ToArray(); } writeDataList.Add(rec); } fs.Close(); return(writeDataList); }
private async Task <string> writeRec(string rpid, string pin, WriteData rec) { string result = ""; try { result = await Task <string> .Run(async() => { byte[] challenge = System.Text.Encoding.ASCII.GetBytes("this is challenge"); byte[] userid = new byte[] { rec.recno, rec.filler }; userid = userid.ToList().Concat(rec.data1).ToArray(); string username = (rec.data2 == null) ? "" : gebo.CTAP2.Common.BytesToHexString(rec.data2); string userdisplayname = (rec.data3 == null) ? "" : gebo.CTAP2.Common.BytesToHexString(rec.data3); string json = "{" + @"rp : {" + string.Format($"id : '{rpid}',") + @"}," + @"user : {" + string.Format($"id_bytearray:[{string.Join(",", userid)}],") + string.Format($"name :'{username}',") + string.Format($"displayName :'{userdisplayname}',") + @"}," + @"pubKeyCredParams: [{type: 'public-key',alg: -7}]," + @"attestation: 'direct'," + @"timeout: 60000," + @"authenticatorSelection : {" + string.Format($"requireResidentKey : true,") + @"authenticatorAttachment : 'cross-platform'," + string.Format($"userVerification : 'discouraged'") + @"}," + string.Format($"challenge:[{string.Join(",", challenge)}],") + "}"; var ret = await gebo.CTAP2.WebAuthnModokiDesktop.Credentials.Create(gebo.CTAP2.DevParam.GetDefaultParams(), json, pin); if (ret.isSuccess == false) { return(ret.msg); } return("Success"); }); } catch (Exception ex) { result = ex.Message; } finally { } return(result); }
/// <summary> /// TODO: переименовать во что-то более осмысленное /// </summary> /// <param name="Folder"></param> /// <returns></returns> private async System.Threading.Tasks.Task BurnDaHausAsync(string Folder) { string _folder = Folder; string xmlPath = _folder; string xsdPath = _folder; string originalText = Text; Dictionary <string, string> tables = GetTablesNames.GetTables(xsdPath); foreach (KeyValuePair <string, string> table in tables.AsEnumerable()) { ResetProgressBar(); Text = table.Value; Action <int> progress = new Action <int>(ProgressChanged); string[] files = GetXMLSBySchema(table.Value, xmlPath, true); ProccessXSD proc = new ProccessXSD(); TableDefinition def = proc.XSDToTableDefinition(table.Key, TargetSchema); if (def.tableName == "as_param") { ResetProgressBar(); Action <int> localProgress = new Action <int>(ProgressChanged); string[] paramXmls = System.IO.Directory.GetFiles(xmlPath, "AS_*_PARAMS_*.xml", System.IO.SearchOption.AllDirectories); foreach (string paramXml in paramXmls) { textBox1.Text = paramXml; WriteData paramWrt = new WriteData(def, paramXml, _cs, 100); await paramWrt.ReadXmlAsync(localProgress); } continue; } if (files.Count() < 1) { continue; } foreach (string xmlfile in files) { textBox1.Text = xmlfile; WriteData wrt = new WriteData(def, xmlfile, _cs); await wrt.ReadXmlAsync(progress); } } Text = originalText; ResetProgressBar(); }
private static void AddDBCDetailGroup(string targPath) { StringBuilder sbExInfo = new StringBuilder(); sbExInfo.Append(EXINFO_TYPE_HEAD); sbExInfo.AppendLine(EXINFO_FUN_NAME_CAN2 + EXINFO_TYPE_METHOLD_NAME); //sbExInfo.AppendLine("\t\t0" + "," + "0" + "," + "0" + ","); //sbExInfo.AppendLine("\t\t1" + "," + "0" + "," + "0" + ","); for (int i = 0; i < frameIdList.Count; i++) { var resdbcList = acturalDBCList.FindAll(dbc => dbc.DataAddress == frameIdList[i]); string metholdName = DBC_DEATIL_METHOLD_NAME + "_" + frameIdList[i]; sbExInfo.AppendLine($"\t\t{(int)ExInfoType.MORNITOR_TAB_TYPE},(uint32_t){metholdName},{resdbcList.Count},"); } sbExInfo.AppendLine("};"); WriteData.WriteString(sbExInfo, targPath); }
static void Main(string[] args) { // Phase 1 // Wir legen zwei Objekte an, die Daten unerschiedlichen // Formats in Dateien schreiben XmlFileWriter myXmlFileWriter = new XmlFileWriter(); myXmlFileWriter.SetName("DataFile.xml"); myXmlFileWriter.WriteToFile("MessdatenMessdaten"); //IWriter Writer1 = new JsonFileWriter(); //Writer1.SetName("DataFile1"); // keine Element des Interfaces! //Writer1.WriteToFile("MessdatenMessdaten"); // Phase 2 ("Closely coupled classes") // Wir wollen die Methoden abermals erweitern // und die Daten vor dem Schreiben filtern // Dazu implementieren wir eine neue Klasse //WriteDataToXml xmlWriter = new WriteDataToXml(myXmlFileWriter); //xmlWriter.Write("DatenDatenDaten"); // Was stört? // 1. Sobald wir "XmlFileWriter" anpassen, muss auch // FilteredDataToXML korrigiert werden // 2. Wir schreiben eine Klasse für xml, eine für json usw. :-( // Phase 2 ("Losely coupled classes") // Wir ersetzen die "feste Integration" durch ein // Interface. Jede Klasse die dieses Interface bedient, kann dann // eingebundne werden. WriteData xmlOutput = new WriteData(myXmlFileWriter); xmlOutput.Write("DiesUndJenes"); JsonFileWriter myJsonFileWriter = new JsonFileWriter(); myJsonFileWriter.SetName("Data.json"); WriteData jsonOutput = new WriteData(myJsonFileWriter); jsonOutput.Write("UndNochWasAnderes"); // Ziel erreicht :-) }
//Writes and stores the total seats available for events for further bookings. private void WriteAndStoreData() { StreamWriter WriteData; //Clears the file to store a whole new data. using (var fs = new FileStream("AvailPlaces", FileMode.Truncate)) { fs.Close(); } WriteData = File.AppendText("AvailPlaces"); for (int i = 0; i < 10; i++) { for (int j = 0; j < 5; j++) { WriteData.WriteLine(result[i, j].ToString()); } } WriteData.Close(); }
private IAsyncResult BeginWriteInternal(byte[] buffer, int offset, int count, AsyncCallback callback, object state, WriteData queued) { _pendingWrite++; var result = _stream.BeginWrite(buffer, offset, count, ar => { // callback can be executed even before return value of BeginWriteInternal is set to this property queued.AsyncResult.ActualResult = ar; try { // so that we can call BeginWrite again _stream.EndWrite(ar); } catch (Exception exc) { queued.AsyncResult.Exception = exc; } // one down, another is good to go lock (_queue) { _pendingWrite--; while (_queue.Count > 0) { var data = _queue.Dequeue(); try { data.AsyncResult.ActualResult = BeginWriteInternal(data.Buffer, data.Offset, data.Count, data.Callback, data.State, data); break; } catch (Exception exc) { _pendingWrite--; data.AsyncResult.Exception = exc; callback(data.AsyncResult); return; } } callback(queued.AsyncResult); } }, state); return result; }
IAsyncResult BeginWriteInternal(byte[] buffer, int offset, int count, AsyncCallback callback, object state, WriteData queued) { _pendingWrite++; var result = _stream.BeginWrite(buffer, offset, count, ar => { // callback can be executed even before return value of BeginWriteInternal is set to this property queued.AsyncResult.ActualResult = ar; try { // so that we can call BeginWrite again _stream.EndWrite(ar); } catch (Exception exc) { queued.AsyncResult.Exception = exc; } // one down, another is good to go lock (_queue) { _pendingWrite--; while (_queue.Count > 0) { var data = _queue.Dequeue(); try { data.AsyncResult.ActualResult = BeginWriteInternal(data.Buffer, data.Offset, data.Count, data.Callback, data.State, data); break; } catch (Exception exc) { _pendingWrite--; data.AsyncResult.Exception = exc; data.Callback(data.AsyncResult); } } callback(queued.AsyncResult); } }, state); // always return the wrapped async result. // this is especially important if the underlying stream completed the operation synchronously (hence "result.CompletedSynchronously" is true!) queued.AsyncResult.ActualResult = result; return queued.AsyncResult; }