public void public_constructor_assigns_given_header_record() { var header = new HeaderRecord(); var data = new DataRecord(header); Assert.NotNull(data.HeaderRecord); Assert.Same(header, data.HeaderRecord); }
public SkillInfoTableRecord(DataRecord dataRecord) { ValueStr = dataRecord; Id = ValueStr[0]; Step = new List<SkillStepTableRecord>(); }
public void parser_complies_with_rule_RL() { var csv = "Kent,25,M,{0}Belinda,26,F{0}Tempany,0,F,{1}"; string[] lineBreaks = { "\r\n", "\r", "\n" }; string[] eofMarkers = { "\r\n", "\r", "\n", string.Empty }; foreach (var lineBreak in lineBreaks) { foreach (var eofMarker in eofMarkers) { var parser = this.CreateParserFromString(string.Format(csv, lineBreak, eofMarker)); var records = new DataRecord[3]; Assert.Equal(3, parser.ParseRecords(null, records, 0, records.Length)); Assert.Equal("Kent", records[0][0]); Assert.Equal("25", records[0][1]); Assert.Equal("M", records[0][2]); Assert.Equal("", records[0][3]); Assert.Equal("Belinda", records[1][0]); Assert.Equal("26", records[1][1]); Assert.Equal("F", records[1][2]); Assert.Equal("Tempany", records[2][0]); Assert.Equal("0", records[2][1]); Assert.Equal("F", records[2][2]); Assert.Equal("", records[2][3]); Assert.False(parser.HasMoreRecords); } } }
public void indexer_get_returns_corresponding_value_for_column() { var data = new DataRecord(new HeaderRecord(new string[] { "Name", "Age", "Gender" }), new string[] { "Kent", "25", "M" }); Assert.Equal("Kent", data["Name"]); Assert.Equal("25", data["Age"]); Assert.Equal("M", data["Gender"]); }
protected virtual void OnRaiseDataEvent(DataRecord.DataEvent e) { EventHandler<DataRecord.DataEvent> handler = RaiseDataEvent; if (handler != null) { handler(this, e); } }
public void TestSimpleConverterForNull() { var record = new DataRecord(new Tuple<string, object>("Id", null)); _mapper.Column(model => model.Id, "Id", ctx => int.Parse(ctx.ColumnValue.As<string>() ?? "0")); var user = new TestUser(); _mapper.Map(user, record); Assert.Equal(0, user.Id); }
public void TestSimpleConverterForIntToString() { var record = new DataRecord(new Tuple<string, object>("Name", 42)); _mapper.Column(model => model.Name, "Name", ctx => ctx.ColumnValue.As<string>()); var user = new TestUser(); _mapper.Map(user, record); Assert.Equal("42", user.Name); }
public VehicleAdditionalDetails BuildFromCSV(DataRecord data) { Manufacturer = data["MANUFACTURER"]; Model = data["MODEL"]; RegistrationYearAndLetter = data["REGYEAR"]; NumberOfDoors = data["NOOFDOORS"]; Transmission = data["TRANSMISSION"]; VehicleDescription = data["VEHICLEDESCRIPTION"]; return new VehicleAdditionalDetails(Manufacturer, Model, RegistrationYearAndLetter, NumberOfDoors, Transmission, VehicleDescription); }
public void indexer_get_throws_if_column_not_found() { var data = new DataRecord(new HeaderRecord(new string[] { "Name", "Age", "Gender" }), new string[] { "Kent", "25", "M" }); string s; Assert.Throws<ArgumentException>(() => s = data["foo"]); Assert.Throws<ArgumentException>(() => s = data["name"]); var ex = Assert.Throws<ArgumentException>(() => s = data["GENDER"]); Assert.Equal("No column named 'GENDER' was found in the header record.", ex.Message); }
public void TestPrimaryKey() { _mapper.PrimaryKey(model => model.Id, "Id"); Assert.Equal("Id", _mapper.PrimaryKeys.First()); Assert.Equal("Id", _mapper.GetColumnName("Id")); var record = new DataRecord(); record.Fields.Add(new Tuple<string, object>("Id", 10)); var user = new TestUser(); _mapper.Map(user, record); Assert.Equal(10, user.Id); }
protected override void sendRecord(DataRecord.DataRecord dr) { System.Console.WriteLine("Sending."); if (serv == null) throw new System.ArgumentNullException("TCPServer", "Client requires a reference to a TCP server."); //System.Console.WriteLine("Sending data to {0}.", client); try { serv.SendToClient(dr.getRecordAsJson(), client); } catch (Exception ex) { System.Console.WriteLine("Could not send data to {0}.", client); } }
/// Process the DataRecord. This is done asynchronously. public void process(ref DataRecord.DataRecord incoming) { bool runme = true; try{ foreach (string field in requiredDataFields){ if (!incoming.updated_fields.Contains(field)) runme = false; } if (runme) this.run(incoming); this.run(incoming); } catch (Exception ex){ throw ex;} incoming.addData(data); }
public void parser_complies_with_2_4() { var csv = $"aaa,bbb,ccc"; var parser = this.CreateParserFromString(csv); var records = new DataRecord[1]; Assert.Equal(1, parser.ParseRecords(null, records, 0, records.Length)); Assert.Equal("aaa", records[0][0]); Assert.Equal("bbb", records[0][1]); Assert.Equal("ccc", records[0][2]); Assert.False(parser.HasMoreRecords); }
protected override void run(DataRecord.DataRecord incoming) { // Sometimes the data trasmission is poor. Types.Skeleton skel = incoming.getData("Skeleton"); if (skel._empty) return; // Must add the avatar's skeleton before the player's. mov.addAvatar(skel); mov.addPlayer(skel); // Calculate the total angular displacement. mov.run(); data["ExerciseAdherence"] = mov.getFlail(); }
public void issue3_repro() { var csv = @"Col1,Col2,Col3 val1,val2,val3 val1,val2,"; using (var reader = CsvReader.FromCsvString(csv)) { reader.ReadHeaderRecord(); var buffer = new DataRecord[100]; var read = reader.ReadDataRecords(buffer, 0, buffer.Length); Assert.Equal(2, read); } }
public async Task issue3_repro_async() { var csv = @"Col1,Col2,Col3 val1,val2,val3 val1,val2,"; using (var reader = CsvReader.FromCsvString(csv)) { await reader.ReadHeaderRecordAsync(); var buffer = new DataRecord[100]; var read = await reader.ReadDataRecordsAsync(buffer, 0, buffer.Length); Assert.Equal(2, read); } }
public void FillGridWithLabTestValues() { foreach (Test test in this.viewModel.AllLabTests) { string TestName = test.TestType.Name; foreach (Record rec in xdgTests.Records) { DataRecord dr = rec as DataRecord; if (dr.Cells["Type"].Value.ToString() == TestName) { dr.Cells[test.Timestamp.ToString()].Value = test.Result; break; } } } }
public async Task issue3_repro_async() { var csv = @"Col1,Col2,Col3 val1,val2,val3 val1,val2,"; using (var reader = CsvReader.FromCsvString(csv)) { await reader.ReadHeaderRecordAsync(); var buffer = new DataRecord[100]; var read = await reader.ReadDataRecordsAsync(buffer, 0, buffer.Length); Assert.Equal(2, read); } }
public void parseDescribeCoverage(DataRecord Record, string Str) { Console.WriteLine(Str); var reader = System.Xml.XmlTextReader.Create(new System.IO.StringReader(Str)); XmlSerializer serial = new XmlSerializer(typeof(DescribeCoverageWCS.CoverageDescriptions)); DescribeCoverageWCS.CoverageDescriptions testc = new DescribeCoverageWCS.CoverageDescriptions(); if (serial.CanDeserialize(reader)) { testc = (DescribeCoverageWCS.CoverageDescriptions)serial.Deserialize(reader); } string bbox = (testc.CoverageDescription.Domain.SpatialDomain.WGS84BoundingBox.LowerCorner.Replace(" ", ",") + "," + testc.CoverageDescription.Domain.SpatialDomain.WGS84BoundingBox.UpperCorner.Replace(" ", ",")); Record.bbox = bbox; int[] dim = grab_dimensions(testc.CoverageDescription.Domain.SpatialDomain.BoundingBox[0].LowerCorner, testc.CoverageDescription.Domain.SpatialDomain.BoundingBox[0].UpperCorner); Record.width = dim[0]; Record.height = dim[1]; ///// This should be passed to GetCoverage //int width = dim[0]; //int height = dim[1]; Vector2[] utmWorldDimensions = grab_dimensions_float(testc.CoverageDescription.Domain.SpatialDomain.WGS84BoundingBox.LowerCorner, testc.CoverageDescription.Domain.SpatialDomain.WGS84BoundingBox.UpperCorner); //Debug.LogError(bbox + " " + manager.records[key].bbox); string epsg = "EPSG:" + "4326"; Record.boundingBox = new Rect(utmWorldDimensions[1].x, utmWorldDimensions[0].y - Mathf.Abs(utmWorldDimensions[0].y - utmWorldDimensions[1].y), Mathf.Abs(utmWorldDimensions[0].x - utmWorldDimensions[1].x), Mathf.Abs(utmWorldDimensions[0].y - utmWorldDimensions[1].y)); // Debug.LogError("Bounding BOX: " + manager.records[key].boundingBox); //int pot = Mathf.NextPowerOfTwo(width); //int pot2 = Mathf.NextPowerOfTwo(height); //pot = Mathf.Min(new int[] { pot, pot2 }); //// This is a hard fixed addition. //if (pot >= 2048) //{ // pot = 1024; //} //pot++; // Need to figure out a way to get the resolution //Record.resolution = new Vector2(Mathf.Abs(utmWorldDimensions[0].x - utmWorldDimensions[1].x) / dim[0], -Mathf.Abs(utmWorldDimensions[0].y - utmWorldDimensions[1].y) / dim[1]);//toVector2(testc.CoverageDescription.Domain.SpatialDomain.GridCRS.GridOffsets,new char[]{' '}); }
private void positionWatchGrid_MouseDoubleClick(object sender, MouseButtonEventArgs e) { DependencyObject source = e.OriginalSource as DependencyObject; if (source == null) { return; } if (source is FrameworkElement) { DataRecordPresenter drp = Infragistics.Windows.Utilities.GetAncestorFromType(source, typeof(DataRecordPresenter), true) as DataRecordPresenter; if (drp == null) { return; } if (drp.Record != null) { drp.Record.IsSelected = true; drp.IsActive = true; DataRecord r = drp.DataRecord; if (r != null) { StockPositionViewModel stockDetails = r.DataItem as StockPositionViewModel; if (stockDetails != null) { StockTransactionViewModel stockTransaction = new StockTransactionViewModel(); stockTransaction.Account = _vm.SelectedAccount; stockTransaction.StockTickerDetailsViewModel = stockDetails.StockTickerDetails; stockTransaction.StockPosition = stockDetails; stockTransaction.Quantity = stockDetails.Quantity; stockTransaction.TransactionType = TransactionType.Sell; if (this.BuySellCommand != null && this.BuySellCommand is RelayCommand <StockTransactionViewModel> ) { this.BuySellCommand.Execute(stockTransaction); } } } } } }
public void ItDoesntModifyARecordIfTheEtagDoesntMatch() { // Arrange var record = new DataRecord(); record.SetETag("foo"); this.cosmosDbSql.Setup(x => x.UpsertAsync(this.cosmosDbSqlClient.Object, this.storageConfig, It.IsAny <DataRecord>(), record.ETag)) .Throws(BuildDocumentClientException(HttpStatusCode.PreconditionFailed)); // Act Assert.ThrowsAsync <ConflictingResourceException>( async() => await this.target.UpsertAsync(record)).CompleteOrTimeout(); // Assert this.cosmosDbSql.Verify(x => x.UpsertAsync(this.cosmosDbSqlClient.Object, this.storageConfig, It.IsAny <DataRecord>(), record.ETag), Times.Once); }
void ParseWCSCapabilities(DataRecord Record, string Str) { var reader = System.Xml.XmlTextReader.Create(new System.IO.StringReader(Str)); Record.WCSCapabilities = Str; XmlSerializer serial = new XmlSerializer(typeof(GetCapabilites.Capabilities)); GetCapabilites.Capabilities capabilities = new GetCapabilites.Capabilities(); if (serial.CanDeserialize(reader)) { capabilities = ((GetCapabilites.Capabilities)serial.Deserialize(reader)); Record.WCSOperations = capabilities.OperationsMetadata; } }
public Vector(NAMEkey _namekey, DataRecord _VectorRecord) { namekey = _namekey; VectorRecord = _VectorRecord; var vrpt = _VectorRecord.Fields.GetFieldByTag("VRPT"); if (vrpt != null) { enhVectorPtrs = new VectorRecordPointer(vrpt.subFields); } var attv = _VectorRecord.Fields.GetFieldByTag("ATTV"); if (attv != null) { Attributes = GetAttributes(attv); } }
public void data_record_creation() { var repeatCount = 1000000; var creationCount = 0L; for (var i = 0; i < repeatCount; ++i) { var dataRecord = new DataRecord(); // ensure optimization doesn't remove the object creation creationCount += dataRecord.Count; ++creationCount; } Assert.Equal(repeatCount, creationCount); }
public ICollection <object> GetRecordsBy(int StartingIndex, int NumberOfRecords, object FilterTag) { List <object> result = new List <object>(); int i = StartingIndex; int iend = StartingIndex + NumberOfRecords; if (iend > _records.Count) { iend = _records.Count; } for (; i < iend; i++) { DataRecord record = _records[i]; result.Add(record); } return(result); }
/// <summary> /// Copies all remaining records in <paramref name="this"/> to <paramref name="destination"/>. /// </summary> /// <param name="this"> /// The data source. /// </param> /// <param name="destination"> /// The data destination. /// </param> /// <returns> /// The number of records written to <paramref name="destination"/>. /// </returns> public static int CopyTo(this CsvReader @this, CsvWriter destination) { @this.AssertNotNull("@this"); destination.AssertNotNull("destination"); var num = 0; var buffer = new DataRecord[16]; var read = 0; while ((read = @this.ReadDataRecords(buffer, 0, buffer.Length)) != 0) { destination.WriteRecords(buffer, 0, read); num += read; } return(num); }
public void data_record_creation() { var repeatCount = 1000000; var creationCount = 0L; for (var i = 0; i < repeatCount; ++i) { var dataRecord = new DataRecord(); // ensure optimization doesn't remove the object creation creationCount += dataRecord.Count; ++creationCount; } Assert.Equal(repeatCount, creationCount); }
public void ItCanCreateNewRecords() { // Arrange var record = new DataRecord("id"); TableResult response = BuildResponseWithContent(); this.tableStorage.Setup(x => x.ExecuteAsync(It.IsAny <CloudTable>(), It.Is <TableOperation>( o => o.OperationType == TableOperationType.InsertOrMerge))) .ReturnsAsync(response); // Act this.target.CreateAsync(record).CompleteOrTimeout(); // Assert this.tableStorage.Verify(x => x.ExecuteAsync(It.IsAny <CloudTable>(), It.Is <TableOperation>( o => o.OperationType == TableOperationType.InsertOrMerge)), Times.Once); }
public List <SoundingData> ExtractSoundings() { DataRecord dr = VectorPtrs[0].Vector.DataRecord; var sg3d = dr.Fields.GetFieldByTag("SG3D"); var bytes = sg3d.Bytes; var length = bytes.Count() - 1; int currentIndex = 0; var soundingDatas = new List <SoundingData>(); while (currentIndex < length && bytes[currentIndex] != DataField.UnitTerminator) { var soundingData = new SoundingData(); for (int i = 0; i < 4; i++) { int tempVal = bytes[currentIndex++]; for (int j = 0; j < i; j++) { tempVal = tempVal << 8; } soundingData.Y += tempVal; } soundingData.Y /= baseFile.coordinateMultiplicationFactor; for (int i = 0; i < 4; i++) { int tempVal = bytes[currentIndex++]; for (int j = 0; j < i; j++) { tempVal = tempVal << 8; } soundingData.X += tempVal; } soundingData.X /= baseFile.coordinateMultiplicationFactor; for (int i = 0; i < 4; i++) { int tempVal = bytes[currentIndex++]; for (int j = 0; j < i; j++) { tempVal = tempVal << 8; } soundingData.depth += tempVal; } soundingData.depth /= baseFile.soundingMultiplicationFactor; soundingDatas.Add(soundingData); } return(soundingDatas); }
public IActionResult OutOfRange() { var sb = new StringBuilder(); var list = new DataRecord(); for (int ctr = 0; ctr <= list.TotalCount; ctr++) { sb.Append(string.Format("Index {0}: {1}\r\n", ctr, list.MyList[ctr].LastName)); if (list.MyList[ctr].LastName == "test") { break; } } return(Ok(sb.ToString())); }
public void leading_whitespace_is_discarded_when_preserve_leading_whitespace_is_false() { var csv = @" value1, value2, value3 value4, value5, value6"; var parser = this.CreateParserFromString(csv); parser.PreserveLeadingWhiteSpace = false; var records = new DataRecord[2]; Assert.Equal(2, parser.ParseRecords(null, records, 0, records.Length)); Assert.Equal("value1", records[0][0]); Assert.Equal("value2", records[0][1]); Assert.Equal("value3", records[0][2]); Assert.Equal("value4", records[1][0]); Assert.Equal("value5", records[1][1]); Assert.Equal("value6", records[1][2]); }
public void trailing_whitespace_is_retained_when_preserve_trailing_whitespace_is_true() { var csv = @"value1 ,value2 ,value3 value4,value5 ,value6 "; var parser = this.CreateParserFromString(csv); parser.PreserveTrailingWhiteSpace = true; var records = new DataRecord[2]; Assert.Equal(2, parser.ParseRecords(null, records, 0, records.Length)); Assert.Equal("value1 ", records[0][0]); Assert.Equal("value2 ", records[0][1]); Assert.Equal("value3 ", records[0][2]); Assert.Equal("value4", records[1][0]); Assert.Equal("value5 ", records[1][1]); Assert.Equal("value6 ", records[1][2]); }
/// <summary> /// Loads the null bitmap values /// </summary> /// <param name="dataRecord">The data record.</param> private static void LoadNullBitmap(DataRecord dataRecord) { var nullBitmapBytes = new byte[dataRecord.NullBitmapSize]; // Null bitmap located after column count offset + column count 2-byte int var nullBitmapPosition = dataRecord.SlotOffset + dataRecord.ColumnCountOffset + sizeof(short); Array.Copy(dataRecord.Page.PageData, nullBitmapPosition, nullBitmapBytes, 0, dataRecord.NullBitmapSize); dataRecord.NullBitmap = new BitArray(nullBitmapBytes); dataRecord.Mark("NullBitmapDescription", nullBitmapPosition, dataRecord.NullBitmapSize); }
private void EnsureSorted() { var timer = Stopwatch.StartNew(); using var reader = new DataRecordReader(_options.Sorted); var charBuffer = new char[Constants.ValueMaxLength]; var counter = new Counter(); var fileLength = reader.Length; var readerPositionPrev = 0L; var repeatsCount = 0L; var dataRecordPrev = new DataRecord(long.MinValue, ReadOnlySpan <char> .Empty); foreach (var dataRecord in reader) { if (dataRecordPrev.CompareTo(dataRecord) == 1) { Console.WriteLine($"Not sorted data in line {counter.LinesCount}"); return; } var readerPosition = reader.Position; if (readerPosition - readerPositionPrev > Constants.HundredMegabytes) { ShowProgress(readerPosition, fileLength, timer.Elapsed.TotalSeconds); readerPositionPrev = readerPosition; } if (dataRecordPrev.Value.Equals(dataRecord.Value, Constants.ValueComparison)) { repeatsCount++; } counter.SetMinMax(dataRecord.Number); counter.IncrementLineCount(); dataRecordPrev = dataRecord.Copy(charBuffer); } Console.WriteLine(); Console.WriteLine("Data is really sorted"); Console.WriteLine($"Numbers between {counter.MinNumber} and {counter.MaxNumber}"); Console.WriteLine($"Lines count {counter.LinesCount}"); Console.WriteLine($"Repeats count {repeatsCount}"); }
public static bool TryAddRecord(long dataId, DataRecord rec, string pswd, out string eMessage) { eMessage = ""; try { using (var client = new SQLiteConnection(GetConnectionString(pswd, false))) { client.Open(); Create(client); var cmd = "INSERT INTO DATA_RECORD ('DataId', 'Name', 'Value', 'Description') VALUES(@DataId, @Name, @Value, @Description)"; using (var insertMsgCommand = new SQLiteCommand(cmd, client)) { insertMsgCommand.Parameters.Add(new SQLiteParameter("@DataId", (int)dataId) { DbType = DbType.Int32 }); insertMsgCommand.Parameters.Add(new SQLiteParameter("@Name", rec.Name) { DbType = DbType.String }); insertMsgCommand.Parameters.Add(new SQLiteParameter("@Value", rec.Value) { DbType = DbType.String }); insertMsgCommand.Parameters.Add(new SQLiteParameter("@Description", rec.Description) { DbType = DbType.String }); insertMsgCommand.ExecuteNonQuery(); } rec.Id = client.LastInsertRowId; } return(true); } catch (Exception ex) { eMessage = string.Format("Ошибка сохранения новой записи: {0}", ex.Message); return(false); } }
/// <summary> /// Inserts a new record into the database /// </summary> /// <param name="record">Record to insert</param> public void Insert(DataRecord record) { if (this.CurrentMemberID + 1 > kDefaultMaxCapacity) { throw new OutOfMemoryException(); } this.WriteMutex.WaitOne(); ++this.CurrentMemberID; record.MemberID = CurrentMemberID; this.XMLAppendRecord(record); Console.WriteLine("Record Added"); Console.WriteLine("\tId: {0}\n\tFirstName: {1}\n\tLastName: {2}\n\tDateOfBirth: {3}\n", record.MemberID, record.FirstName, record.LastName, record.DateOfBirth); this.WriteMutex.ReleaseMutex(); }
/// <summary> /// Copies all remaining records in <paramref name="this"/> to <paramref name="destination"/>. /// </summary> /// <param name="this"> /// The data source. /// </param> /// <param name="destination"> /// The data destination. /// </param> /// <returns> /// The number of records written to <paramref name="destination"/>. /// </returns> public static int CopyTo(this CsvReader @this, CsvWriter destination) { @this.AssertNotNull("@this"); destination.AssertNotNull("destination"); var num = 0; var buffer = new DataRecord[16]; var read = 0; while ((read = @this.ReadDataRecords(buffer, 0, buffer.Length)) != 0) { destination.WriteRecords(buffer, 0, read); num += read; } return num; }
public IActionResult ConcatString() { var html = "<table cellpadding=\"0\" cellspacing=\"0\"><tbody><tr>"; var newrocord = new DataRecord() { FirstName = "Marco", LastName = "Polo", Address1 = "Lichfield Road", Address2 = "", City = "", State = "Indiana" }; foreach (var rec in newrocord.MyList) { html += html + string.Format("<tr><td>{0}</td><td>{1}</td><td>{2}</td><td>{3}</td><td>{4}</td><td>{5}</td> </tr>", rec.FirstName, rec.LastName, rec.Address1, rec.Address2, rec.City, rec.State); } html += html + "</table>"; return(Ok(html)); }
public async Task parse_records_async_populates_the_buffer_from_the_specified_offset() { var csv = @"first second third fourth"; var parser = this.CreateParserFromString(csv); var records = new DataRecord[100]; Assert.Equal(2, await parser.ParseRecordsAsync(null, records, 5, 2)); Assert.Null(records[0]); Assert.Equal("first", records[5][0]); Assert.Equal("second", records[6][0]); Assert.Equal(2, parser.ParseRecords(null, records, 10, 2)); Assert.Equal("third", records[10][0]); Assert.Equal("fourth", records[11][0]); }
/// <summary> /// Asynchronously copies all remaining records in <paramref name="this"/> to <paramref name="destination"/>. /// </summary> /// <param name="this"> /// The data source. /// </param> /// <param name="destination"> /// The data destination. /// </param> /// <returns> /// The number of records written to <paramref name="destination"/>. /// </returns> public async static Task<int> CopyToAsync(this CsvReader @this, CsvWriter destination) { @this.AssertNotNull("@this"); destination.AssertNotNull("destination"); var num = 0; var buffer = new DataRecord[16]; var read = 0; while ((read = await @this.ReadDataRecordsAsync(buffer, 0, buffer.Length).ConfigureAwait(false)) != 0) { await destination.WriteRecordsAsync(buffer, 0, read).ConfigureAwait(false); num += read; } return num; }
public void value_delimiter_can_be_set_to_null_to_disable_value_delimiting() { var csv = @""" value1 "", ""value2"""""",""value 3 "",""value 4"""; var parser = this.CreateParserFromString(csv); parser.ValueDelimiter = null; var records = new DataRecord[2]; Assert.Equal(2, parser.ParseRecords(null, records, 0, records.Length)); Assert.Equal(@""" value1 """, records[0][0]); Assert.Equal(@"""value2""""""", records[0][1]); Assert.Equal(@"""value 3 """, records[0][2]); Assert.Equal(@"""value", records[0][3]); Assert.Equal(@"4""", records[1][0]); }
/// <summary> /// Populates <paramref name="this"/> with data read from <paramref name="csvReader"/>. /// </summary> /// <remarks> /// <para> /// If <paramref name="this"/> has columns defined, those columns will be used when populating the data. If no columns have been defined, <paramref name="csvReader"/> must have a /// <see cref="HeaderRecord"/>, which is then used to define the columns for <paramref name="this"/>. If any data record has more values than can fit into the columns defined on /// <paramref name="this"/>, an exception is thrown. /// </para> /// </remarks> /// <param name="this"> /// The <see cref="DataTable"/>. /// </param> /// <param name="csvReader"> /// The <see cref="CsvReader"/>. /// </param> /// <param name="maximumRecords"> /// The maximum number of records to read and add to <paramref name="this"/>. /// </param> /// <returns> /// The number of rows added to <paramref name="this"/> (and therefore the number of data records read from <paramref name="csvReader"/>). /// </returns> public static int Fill(this DataTable @this, CsvReader csvReader, int?maximumRecords) { @this.AssertNotNull("@this"); csvReader.AssertNotNull("csvReader"); exceptionHelper.ResolveAndThrowIf(maximumRecords.GetValueOrDefault() < 0, "maximumRecordsMustBePositive"); if (@this.Columns.Count == 0) { // table has no columns, so we need to use the CSV header record to populate them exceptionHelper.ResolveAndThrowIf(csvReader.HeaderRecord == null, "noColumnsAndNoHeaderRecord"); foreach (var columnName in csvReader.HeaderRecord) { @this.Columns.Add(columnName); } } var remaining = maximumRecords.GetValueOrDefault(int.MaxValue); var buffer = new DataRecord[16]; while (remaining > 0) { var read = csvReader.ReadDataRecords(buffer, 0, Math.Min(buffer.Length, remaining)); if (read == 0) { // no more data break; } for (var i = 0; i < read; ++i) { var record = buffer[i]; exceptionHelper.ResolveAndThrowIf(record.Count > @this.Columns.Count, "moreValuesThanColumns", @this.Columns.Count, record.Count); var recordAsStrings = new string[record.Count]; record.CopyTo(recordAsStrings, 0); @this.Rows.Add(recordAsStrings); } remaining -= read; } return(maximumRecords.GetValueOrDefault(int.MaxValue) - remaining); }
void server_RequestDataReceived(object sender, ArraySegment <byte> e) { if (!checkRecord.Checked) { lock (activeRows) { bytesUploaded += e.Count; } return; } var client = (Net.AssetProxy.Client)sender; DataGridViewRow row; lock (activeRows) { bytesUploaded += e.Count; if (!activeRows.TryGetValue(client.ID, out row)) { return; } } var c = row.Cells[columnResponse.Index]; DataRecord data = (DataRecord)c.Value; data.bytes += e.Count; if (checkRecordData.Checked) { try { using (var stream = File.Open(Path.Combine(PATH_TEMP, ((DataRecord)row.Cells[columnResponse.Index].Value).id.ToString()), FileMode.Append, FileAccess.Write, FileShare.ReadWrite)) { stream.Write(e.Array, e.Offset, e.Count); } } catch (Exception ex) { Util.Logging.Log(ex); } } }
private void EmissionFilter_SelectedItemChanged(object sender, RoutedPropertyChangedEventArgs <object> e) { XamComboEditor xce = (XamComboEditor)sender; DataRecord record = (DataRecord)xce.DataContext; if (record == null) { return; } IndicatorItem indItem = (IndicatorItem)record.DataItem; if (xamDataGrid.ActiveDataItem == null) { return; } if (xamDataGrid.ActiveDataItem.GetType() == typeof(IndicatorItem)) { IndicatorItem indicator = (IndicatorItem)xamDataGrid.ActiveDataItem; if (e.NewValue == null) { return; } if (e.NewValue.GetType() == typeof(FilterContainer)) { FilterContainer filter = (FilterContainer)e.NewValue; if (indicator.IndicatorID != 0 && indicator.IndicatorID == indItem.IndicatorID) // the 2nd condition makes sure the event is for the currently active Indicator { IndicatorContainer ic = new IndicatorContainer(); ic.Description = indicator.Description; ic.EmissionsFilterPosition = filter.PositionNumber; ic.ExcitationFilterPosition = indicator.ExcitationFilterPosition; ic.IndicatorID = indicator.IndicatorID; ic.MethodID = indicator.MethodID; ic.SignalType = indicator.SignalType; bool succcess = wgDB.UpdateIndicator(ic); } } } }
private void StorageReturnsSimulationRecordOnCreate(SimulationModel simulation = null) { if (simulation == null) { simulation = new SimulationModel { Id = Guid.NewGuid().ToString(), Enabled = false }; } IDataRecord simulationRecord = new DataRecord { Id = simulation.Id, Data = JsonConvert.SerializeObject(simulation), }; this.simulationsStorage.Setup(x => x.CreateAsync(It.IsAny <IDataRecord>())) .ReturnsAsync(simulationRecord); }
public void AddRightDataTest() { DataRecord record = null; _mockRepository.Setup(r => r.Insert(It.IsAny <DataRecord>())).Callback <DataRecord>(r => record = r); var data = new byte[] { 1, 2, 3 }; var response = new AddDataCommand(_mockRepository.Object) .Handle(new AddDataRequest { DiffId = 2, Data = data, Side = DataSide.Right }, CancellationToken.None) .Result; response.Success.Should().BeTrue(); record.Side.Should().Be(DataSide.Right); record.DiffId.Should().Be(2); record.Data.Should().BeSameAs(data); }
/// <summary> /// Asynchronously copies all remaining records in <paramref name="this"/> to <paramref name="destination"/>. /// </summary> /// <param name="this"> /// The data source. /// </param> /// <param name="destination"> /// The data destination. /// </param> /// <returns> /// The number of records written to <paramref name="destination"/>. /// </returns> public async static Task <int> CopyToAsync(this CsvReader @this, CsvWriter destination) { @this.AssertNotNull("@this"); destination.AssertNotNull("destination"); var num = 0; var buffer = new DataRecord[16]; var read = 0; while ((read = await @this.ReadDataRecordsAsync(buffer, 0, buffer.Length).ConfigureAwait(false)) != 0) { await destination.WriteRecordsAsync(buffer, 0, read).ConfigureAwait(false); num += read; } return(num); }
public void parser_complies_with_rule_VCW() { var csv = string.Format("{0} Kent {0},25 , M{1}{0}\tBelinda\t{0},26\t,\tF{1}", '"', Environment.NewLine); var parser = this.CreateParserFromString(csv); var records = new DataRecord[2]; Assert.Equal(2, parser.ParseRecords(null, records, 0, records.Length)); Assert.Equal(" Kent ", records[0][0]); Assert.Equal("25", records[0][1]); Assert.Equal("M", records[0][2]); Assert.Equal("\tBelinda\t", records[1][0]); Assert.Equal("26", records[1][1]); Assert.Equal("F", records[1][2]); Assert.False(parser.HasMoreRecords); }
public ContactDetails BuildFromCSV(DataRecord data) { Email = data["EMAIL"]; TelephoneNumber = data["TELEPHONENO"]; EmailOptIn = Convert.ToBoolean(data["EMAILOPTIN"]); TelephoneOptIn = Convert.ToBoolean(data["TELOPTIN"]); SMSOptIn = Convert.ToBoolean(data["SMSOPTIN"]); PostOptIn = Convert.ToBoolean(data["POSTOPTIN"]); return new ContactDetails() { Email = Email, TelephoneNumber = TelephoneNumber, EmailOptIn = EmailOptIn, TelephoneOptIn = TelephoneOptIn, SMSOptIn = SMSOptIn, PostOptIn = PostOptIn, }; }
public void leading_whitespace_is_retained_when_preserve_leading_whitespace_is_true() { var csv = @" value1, value2, value3 value4, value5, value6" ; var parser = this.CreateParserFromString(csv); parser.PreserveLeadingWhiteSpace = true; var records = new DataRecord[2]; Assert.Equal(2, parser.ParseRecords(null, records, 0, records.Length)); Assert.Equal(" value1", records[0][0]); Assert.Equal(" value2", records[0][1]); Assert.Equal(" value3", records[0][2]); Assert.Equal("value4", records[1][0]); Assert.Equal(" value5", records[1][1]); Assert.Equal(" value6", records[1][2]); }
public static object Get3d(IDictionary <Tuple <Guid, DateTime, string>, DataRecord> dict, DateTime date, string objectId, string parameter, string property) { var id = System.Guid.Parse(objectId); var key = new Tuple <Guid, DateTime, string>(id, date, parameter); if (dict == null || !dict.ContainsKey(key)) { return(null); } DataRecord value = dict[key]; object ret = value.GetProperty(property); if (ret != null) { return(ret); } return(""); }
public void BuildFromDataRecord(S57Reader reader, DataRecord cr, CatalogueFile catalogueFile) { // Record Identifier Field var catd = cr.Fields.GetFieldByTag("CATD"); if (catd != null) { subFieldRow = catd.subFields.Values[0]; tagLookup = catd.subFields.TagIndex; RecordIdentificationNumber = (uint)subFieldRow.GetInt32(tagLookup.IndexOf("RCID")); //this one ist stored as integer, so implementing GetUint32 to do merely a cast will fail fileName = subFieldRow.GetString(tagLookup.IndexOf("FILE")); fileLongName = subFieldRow.GetString(tagLookup.IndexOf("LFIL")); southernMostLatitude = subFieldRow.GetDouble(tagLookup.IndexOf("SLAT")); westernMostLongitude = subFieldRow.GetDouble(tagLookup.IndexOf("WLON")); northernMostLatitude = subFieldRow.GetDouble(tagLookup.IndexOf("NLAT")); easternMostLongitude = subFieldRow.GetDouble(tagLookup.IndexOf("ELON")); } }
public void parser_complies_with_2_2() { var CRLF = "\r\n"; var csv = $"aaa,bbb,ccc{CRLF}zzz,yyy,xxx"; var parser = this.CreateParserFromString(csv); var records = new DataRecord[2]; Assert.Equal(2, parser.ParseRecords(null, records, 0, records.Length)); Assert.Equal("aaa", records[0][0]); Assert.Equal("bbb", records[0][1]); Assert.Equal("ccc", records[0][2]); Assert.Equal("zzz", records[1][0]); Assert.Equal("yyy", records[1][1]); Assert.Equal("xxx", records[1][2]); Assert.False(parser.HasMoreRecords); }
/// <summary> /// Executes and returns an open IRecordSet, which encapsulates an OPEN DATAREADER. DISPOSE IN FINALLY CLAUSE. /// </summary> /// <param name="database"></param> /// <param name="procedureName"></param> /// <param name="parameterMapper"></param> /// <returns></returns> public static IRecordSet Execute(Database database, string procedureName, ParameterMapper parameterMapper) { SqlConnection connection = database.GetConnection(); SqlCommand command = CommandFactory.CreateParameterMappedCommand(connection, procedureName, parameterMapper); try { command.Connection.Open(); IRecordSet record = new DataRecord(command.ExecuteReader(CommandBehavior.CloseConnection)); return record; } catch(Exception exc) { command.Connection.Close(); throw new Core.Data.Exceptions.DatabaseExecutionException(database, procedureName, command, exc); } }
/// <summary> /// Executes and returns an open IRecordSet, which encapsulates an OPEN DATAREADER. DISPOSE IN FINALLY CLAUSE. /// </summary> /// <param name="database"></param> /// <param name="procedureName"></param> /// <param name="parameters"></param> /// <returns></returns> public static IRecordSet Execute(Database database, string procedureName, params object[] parameters) { IRecordSet recordSet; SqlConnection connection = database.GetConnection(); SqlCommand command = CommandFactory.CreateCommand(connection, database.InstanceName, procedureName, parameters); try { connection.Open(); recordSet = new DataRecord(command.ExecuteReader(CommandBehavior.CloseConnection)); return recordSet; } catch(Exception exc) { connection.Close(); throw new Core.Data.Exceptions.DatabaseExecutionException(database, procedureName, command, exc); } }
public void issue12693_repro() { var recordCount = 50; var random = new Random(); var memoryStream = new MemoryStream(); var csvWriter = new CsvWriter(memoryStream); // create some dummy data to work with for (var record = 0; record < recordCount; ++record) { var dataRecord = new DataRecord(null); for (var value = 0; value < random.Next(100, 200); ++value) { dataRecord.Add("value" + random.Next(0, 100000)); } csvWriter.WriteRecord(dataRecord); } memoryStream.Position = 0; // create a fake server that will feed us our dummy data in dribs and drabs using (var fakeServer = new FakeServer(memoryStream)) using (var connectionToServer = new TcpClient("localhost", FakeServer.Port)) using (var csvReader = new CsvReader(connectionToServer.GetStream())) { while (csvReader.HasMoreRecords) { csvReader.ReadDataRecord(); } // when the bug manifests itself, the parser may stop parsing prematurely so the record counts won't match // see below for an explanation of how the bug is triggered Assert.Equal(recordCount, csvReader.RecordNumber); } }
public void parser_complies_with_rule_VS() { var csv = "Kent{0}25{0}M{1}Belinda{0}26{0}F{0}Description{1}Tempany{0}10{0}F{0}Description{0}Something else{1}Xak{0}2{0}M{0}{1}"; char[] separators = { ',', '\t', ':', '.' }; foreach (var separator in separators) { var parser = this.CreateParserFromString(string.Format(csv, separator, Environment.NewLine)); var records = new DataRecord[5]; parser.ValueSeparator = separator; Assert.Equal(4, parser.ParseRecords(null, records, 0, records.Length)); Assert.Equal("Kent", records[0][0]); Assert.Equal("25", records[0][1]); Assert.Equal("M", records[0][2]); Assert.Equal("Belinda", records[1][0]); Assert.Equal("26", records[1][1]); Assert.Equal("F", records[1][2]); Assert.Equal("Description", records[1][3]); Assert.Equal("Tempany", records[2][0]); Assert.Equal("10", records[2][1]); Assert.Equal("F", records[2][2]); Assert.Equal("Description", records[2][3]); Assert.Equal("Something else", records[2][4]); Assert.Equal("Xak", records[3][0]); Assert.Equal("2", records[3][1]); Assert.Equal("M", records[3][2]); Assert.Equal("", records[3][3]); Assert.False(parser.HasMoreRecords); } }
public int ParseRecords(HeaderRecord headerRecord, DataRecord[] buffer, int offset, int count) { // see performance notes in SkipRecord var ch = char.MinValue; var recordsParsed = 0; var delimited = false; for (var i = offset; i < offset + count; ++i) { while (true) { if (!this.IsBufferEmpty) { ch = this.buffer[this.bufferIndex++]; if (!this.IsPossiblySpecialCharacter(ch)) { // if it's definitely not a special character, then we can just append it and continue on with the loop this.valueBuilder.NotifyPreviousCharIncluded(delimited); continue; } if (!delimited) { if (ch == this.valueSeparator) { this.values.Add(this.valueBuilder.GetValueAndClear()); } else if (ch == this.valueDelimiter) { this.valueBuilder.NotifyPreviousCharExcluded(); delimited = true; // since we're in a delimited area, the only special character is the value delimiter this.activeSpecialCharacterMask = this.valueDelimiter.Value; } else if (ch == Constants.CR) { // we need to look at the next character, so make sure it is available if (this.IsBufferEmpty && !this.FillBuffer()) { // undelimited CR indicates the end of a record, so add the existing value and then exit buffer[i] = this.values.GetDataRecordAndClear(headerRecord, this.valueBuilder.GetValueAndClear()); break; } // we deal with CRLF right here by checking if the next character is LF, in which case we just discard it if (this.buffer[this.bufferIndex] == Constants.LF) { ++this.bufferIndex; } // undelimited CR or CRLF both indicate the end of a record, so add the existing value and then exit buffer[i] = this.values.GetDataRecordAndClear(headerRecord, this.valueBuilder.GetValueAndClear()); break; } else if (ch == Constants.LF) { // undelimited LF indicates the end of a record, so add the existing value and then exit buffer[i] = this.values.GetDataRecordAndClear(headerRecord, this.valueBuilder.GetValueAndClear()); break; } else { // it wasn't a special character after all, so just append it this.valueBuilder.NotifyPreviousCharIncluded(false); } } else if (ch == this.valueDelimiter) { // we need to look at the next character, so make sure it is available if (this.IsBufferEmpty && !this.FillBuffer()) { // out of data delimited = false; this.activeSpecialCharacterMask = this.specialCharacterMask; buffer[i] = this.values.GetDataRecordAndClear(headerRecord, this.valueBuilder.GetValueAndClear()); break; } if (this.buffer[this.bufferIndex] == this.valueDelimiter) { // delimiter is escaped, so append it to the value and discard the escape character this.valueBuilder.NotifyPreviousCharExcluded(); ++this.bufferIndex; this.valueBuilder.NotifyPreviousCharIncluded(true); } else { // delimiter isn't escaped, so we are no longer in a delimited area this.valueBuilder.NotifyPreviousCharExcluded(); delimited = false; this.activeSpecialCharacterMask = this.specialCharacterMask; } } else { // it wasn't a special character after all, so just append it this.valueBuilder.NotifyPreviousCharIncluded(true); } } else if (!this.FillBuffer()) { if (this.valueBuilder.HasValue) { // a value is outstanding, so add it this.values.Add(this.valueBuilder.GetValueAndClear()); } if (ch == this.valueSeparator) { // special case: last character is a separator, which means there should be an empty value after it. eg. "foo," results in ["foo", ""] buffer[i] = this.values.GetDataRecordAndClear(headerRecord, string.Empty); ++recordsParsed; } else { var record = this.values.GetDataRecordAndClear(headerRecord); if (record != null) { buffer[i] = record; ++recordsParsed; } } // data exhausted - we're done, even though we may not have filled the records array return recordsParsed; } } ++recordsParsed; } return recordsParsed; }
public void get_value_or_null_throws_if_column_name_is_null() { var data = new DataRecord(new HeaderRecord(new string[] { "Name", "Age", "Gender" })); var ex = Assert.Throws<ArgumentNullException>(() => data.GetValueOrNull(null)); }
public void get_value_or_null_returns_null_if_column_is_not_found() { var data = new DataRecord(new HeaderRecord(true, "Name", "Age", "Gender", "Relationship Status"), true, "Kent", "25", "M"); Assert.Null(data.GetValueOrNull("foo")); Assert.Null(data.GetValueOrNull("name")); Assert.Null(data.GetValueOrNull("GENDER")); Assert.Null(data.GetValueOrNull("Relationship Status")); }