private static void Main(string[] args) { var dfr = new DataFileReader(); dfr.ParseKeyFinance(@"c:\users\sridh\Downloads\1. Key Ratios.xlsx"); var results = dfr.dataCollection.OrderByDescending(d => d.PiotroskiScoreCurrent) .ThenByDescending(d => d.PiotroskiScore1YrAgo) .ThenByDescending(d => d.PiotroskiScore2YrAgo) .ThenByDescending(d => d.PiotroskiScore3YrAgo) .ThenByDescending(d => d.Revenue) .ToArray(); Console.WriteLine("These are firms that have been doing well for the past 3 years"); for ( int i = 0; i < 20; i++) { Console.WriteLine($"Ticker :{results[i].Ticker} " + $"\nName : {results[i].CompanyName} " + $"\n\tCurrent rating : {results[i].PiotroskiScoreCurrent} " + $"\n\tRating year ago : {results[i].PiotroskiScore1YrAgo} " + $"\n\tRating year ago : {results[i].PiotroskiScore1YrAgo} " + $"\n\tRating two year ago : {results[i].PiotroskiScore2YrAgo} " + $"\n\tRating three years ago : {results[i].PiotroskiScore3YrAgo} " + $"\n\tEBITDA : {ToKMB((decimal)results[i].EbitdaCurrent)} " + $"\n\tRevenues : {ToKMB((decimal)results[i].Revenue)}" + $"\n\tNet Margin : {ToKMB((decimal)results[i].NetMargin)}%" + $"\n\tGross Margin : {ToKMB((decimal)results[i].GrossMargin)}" + $"\n\tOperating Margin : {ToKMB((decimal)results[i].OperatingMargin)}%" + $"\n\tReturn on Equity : {ToKMB((decimal)results[i].ReturnOnEquity)}%" + $"\n\tReturn on Assets : {ToKMB((decimal)results[i].ReturnOnAssets)}%\n"); } }
static async Task Main(string[] args) { string directory = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); var files = Directory.GetFiles(directory, "*.*", SearchOption.AllDirectories); var dataReader = new DataFileReader(new FileStreamReader()); IList <Model> models = await dataReader.ReadFiles(files); Console.WriteLine("Output1:"); foreach (var model in models.OrderBy(_ => _.LastName).OrderBy(_ => _.Gender).ToList()) { Console.WriteLine($"LastName: {model.LastName}\t FirstName: {model.FirstName}\t Gender: {model.Gender}\t FavoriteColor: {model.FavoriteColor}\t DateOfBirth: {model.DateOfBirth.ToShortDateString()}"); } Console.WriteLine("Output2:"); foreach (var model in models.OrderBy(_ => _.DateOfBirth).ToList()) { Console.WriteLine($"LastName: {model.LastName}\t FirstName: {model.FirstName}\t Gender: {model.Gender}\t FavoriteColor: {model.FavoriteColor}\t DateOfBirth: {model.DateOfBirth.ToShortDateString()}"); } Console.WriteLine("Output3:"); foreach (var model in models.OrderByDescending(_ => _.LastName).ToList()) { Console.WriteLine($"LastName: {model.LastName}\t FirstName: {model.FirstName}\t Gender: {model.Gender}\t FavoriteColor: {model.FavoriteColor}\t DateOfBirth: {model.DateOfBirth.ToShortDateString()}"); } Console.ReadKey(); }
/// <summary> /// Dumps the data from the Avro blob to the data warehouse (DW). /// Before running this, ensure that the DW has the required <see cref="TableName"/> table created. /// </summary> private static async void Dump(Uri fileUri) { // Get the blob reference BlobClient blob = new BlobClient(fileUri); using (var dataTable = GetWindTurbineMetricsTable()) { // Parse the Avro File Stream blobStream = await blob.OpenReadAsync(null); using (var avroReader = DataFileReader <GenericRecord> .OpenReader(blobStream)) { while (avroReader.HasNext()) { GenericRecord r = avroReader.Next(); byte[] body = (byte[])r["Body"]; var windTurbineMeasure = DeserializeToWindTurbineMeasure(body); // Add the row to in memory table AddWindTurbineMetricToTable(dataTable, windTurbineMeasure); } } if (dataTable.Rows.Count > 0) { BatchInsert(dataTable); } } }
/// <summary> /// Dumps the data from the Avro blob to the data warehouse (DW). /// Before running this, ensure that the DW has the required <see cref="TableName"/> table created. /// </summary> private static void Dump(Uri fileUri) { // Get the blob reference var storageAccount = CloudStorageAccount.Parse(StorageConnectionString); var blobClient = storageAccount.CreateCloudBlobClient(); var blob = blobClient.GetBlobReferenceFromServer(fileUri); using (var dataTable = GetStoveTempMetrics()) { // Parse the Avro File using (var avroReader = DataFileReader <GenericRecord> .OpenReader(blob.OpenRead())) { while (avroReader.HasNext()) { GenericRecord r = avroReader.Next(); byte[] body = (byte[])r["Body"]; var stoveTempMeasure = DeserializeToStoveTempMeasure(body); // Add the row to in memory table AddStoveMetricsToTable(dataTable, stoveTempMeasure); } } if (dataTable.Rows.Count > 0) { BatchInsert(dataTable); } } }
public void CorrectlyReadsEnumeratedTextFileColumnByValue() { string fileContents = "Persistence Mode\r\n1\r\n2\r\n"; MockRepository mocks = new MockRepository(); IResourceFileLocator fileLocator = mocks.CreateMock <IResourceFileLocator>(); Expect.Call(fileLocator.ReadTextFile("MyCompany.MyAssembly", _testDataFile)).Return(fileContents); mocks.ReplayAll(); using (IDataFileReader reader = new DataFileReader(fileLocator)) { reader.Open("MyCompany.MyAssembly", "Test", "DataFilePath"); bool canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(true)); Assert.That(reader.GetEnumerationByValue <PersistenceMode>("Persistence Mode"), Is.EqualTo(PersistenceMode.Live)); canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(true)); Assert.That(reader.GetEnumerationByValue <PersistenceMode>("Persistence Mode"), Is.EqualTo(PersistenceMode.Archive)); canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(false)); } mocks.VerifyAll(); }
/// <summary> /// Dumps the data from the Avro blob to the data warehouse (DW). /// Before running this, ensure that the DW has the required <see cref="TableName"/> table created. /// </summary> private static async void Dump(Uri fileUri) { // Get the blob reference var storageAccount = CloudStorageAccount.Parse(StorageConnectionString); var blobClient = storageAccount.CreateCloudBlobClient(); var blob = await blobClient.GetBlobReferenceFromServerAsync(fileUri); using (var dataTable = GetWindTurbineMetricsTable()) { // Parse the Avro File Stream blobStream = await blob.OpenReadAsync(null, null, null); using (var avroReader = DataFileReader <GenericRecord> .OpenReader(blobStream)) { while (avroReader.HasNext()) { GenericRecord r = avroReader.Next(); byte[] body = (byte[])r["Body"]; var windTurbineMeasure = DeserializeToWindTurbineMeasure(body); // Add the row to in memory table AddWindTurbineMetricToTable(dataTable, windTurbineMeasure); } } if (dataTable.Rows.Count > 0) { BatchInsert(dataTable); } } }
public void TestMetaData(string key, object value, Codec.Type codecType, bool useTypeGetter) { // create and write out object[] obj = new object[] { new object[] { "John", 23 } }; IList <Foo> records = MakeRecords(obj); MemoryStream dataFileOutputStream = new MemoryStream(); Schema schema = Schema.Parse(specificSchema); DatumWriter <Foo> writer = new SpecificWriter <Foo>(schema); using (IFileWriter <Foo> dataFileWriter = DataFileWriter <Foo> .OpenWriter(writer, dataFileOutputStream, Codec.CreateCodec(codecType))) { SetMetaData(dataFileWriter, key, value); foreach (Foo rec in records) { dataFileWriter.Append(rec); } } MemoryStream dataFileInputStream = new MemoryStream(dataFileOutputStream.ToArray()); // read back using (IFileReader <Foo> reader = DataFileReader <Foo> .OpenReader(dataFileInputStream)) { Assert.IsTrue(ValidateMetaData(reader, key, value, useTypeGetter), string.Format("Error validating header meta data for key: {0}, expected value: {1}", key, value)); } }
public void CorrectlyReadsMultiColumnTable() { string fileContents = "Column 1\tColumn 2\r\n80\t85"; MockRepository mocks = new MockRepository(); IResourceFileLocator fileLocator = mocks.CreateMock<IResourceFileLocator>(); Expect.Call(fileLocator.ReadTextFile("MyCompany.MyAssembly", _testDataFile)).Return(fileContents); mocks.ReplayAll(); using (IDataFileReader reader = new DataFileReader(fileLocator)) { reader.Open("MyCompany.MyAssembly", "Test", "DataFilePath"); bool canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(true)); Assert.That(reader.GetInteger("Column 1"), Is.EqualTo(80)); Assert.That(reader.GetInteger("Column 2"), Is.EqualTo(85)); canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(false)); } mocks.VerifyAll(); }
public void SequentialWriter_MicrosoftWriterApacherReaderOfDictionary() { var expected = new List <ContainingDictionaryClass <string, string> >(); for (var i = 0; i < 7; i++) { expected.Add(ContainingDictionaryClass <string, string> .Create( new Dictionary <string, string> { { "testkey" + i, "testvalue" + i } })); } var w = AvroContainer.CreateWriter <ContainingDictionaryClass <string, string> >(this.resultStream, Codec.Deflate); using (var writer = new SequentialWriter <ContainingDictionaryClass <string, string> >(w, 2)) { expected.ForEach(writer.Write); } this.resultStream.Seek(0, SeekOrigin.Begin); var reader = DataFileReader <GenericRecord> .OpenReader(this.resultStream); var actual = new List <GenericRecord>(reader); Assert.AreEqual(expected.Count, actual.Count); for (var i = 0; i < expected.Count; ++i) { var actualValue = actual[i]["Property"] as Dictionary <string, object>; Assert.IsNotNull(actualValue); Assert.AreEqual(actualValue["testkey" + i] as string, expected[i].Property["testkey" + i]); } }
public override IEnumerable <IRow> Extract(IUnstructuredReader input, IUpdatableRow output) { var avschema = Avro.Schema.Parse(avroSchema); var reader = new GenericDatumReader <GenericRecord>(avschema, avschema); using (var ms = new MemoryStream()) { CreateSeekableStream(input, ms); ms.Position = 0; var fileReader = DataFileReader <GenericRecord> .OpenReader(ms, avschema); while (fileReader.HasNext()) { var avroRecord = fileReader.Next(); foreach (var column in output.Schema) { if (avroRecord[column.Name] != null) { output.Set(column.Name, avroRecord[column.Name]); } else { output.Set <object>(column.Name, null); } yield return(output.AsReadOnly()); } } } }
public void CanReadEmptyString() { string fileContents = "StringColumn1\tStringColumn2\r\nString 1"; MockRepository mocks = new MockRepository(); IResourceFileLocator fileLocator = mocks.CreateMock <IResourceFileLocator>(); Expect.Call(fileLocator.ReadTextFile("MyCompany.MyAssembly", _testDataFile)).Return(fileContents); mocks.ReplayAll(); using (IDataFileReader reader = new DataFileReader(fileLocator)) { reader.Open("MyCompany.MyAssembly", "Test", "DataFilePath"); bool canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(true)); Assert.That(reader.GetString("StringColumn2"), Is.EqualTo(string.Empty)); canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(false)); } mocks.VerifyAll(); }
public void Dump() { // Get the blob reference BlobClient blob = new BlobClient(new Uri(EventHubsCaptureAvroBlobUri)); using (var dataTable = GetWindTurbineMetricsTable()) { // Parse the Avro File using (var avroReader = DataFileReader <GenericRecord> .OpenReader(blob.OpenRead())) { while (avroReader.HasNext()) { GenericRecord r = avroReader.Next(); byte[] body = (byte[])r["Body"]; var windTurbineMeasure = DeserializeToWindTurbineMeasure(body); // Add the row to in memory table AddWindTurbineMetricToTable(dataTable, windTurbineMeasure); } } if (dataTable.Rows.Count > 0) { BatchInsert(dataTable); } } }
public void Dump() { // Get the blob reference var storageAccount = CloudStorageAccount.Parse(StorageConnectionString); var blobClient = storageAccount.CreateCloudBlobClient(); //var blob = blobClient.GetBlobReferenceFromServer(new Uri(EventHubsCaptureAvroBlobUri)); using (var dataTable = GetWindTurbineMetricsTable()) { // Parse the Avro File using (var avroReader = DataFileReader <GenericRecord> .OpenReader(blob.OpenRead())) { while (avroReader.HasNext()) { GenericRecord r = avroReader.Next(); byte[] body = (byte[])r["Body"]; var windTurbineMeasure = DeserializeToWindTurbineMeasure(body); // Add the row to in memory table AddWindTurbineMetricToTable(dataTable, windTurbineMeasure); } } if (dataTable.Rows.Count > 0) { BatchInsert(dataTable); } } }
public void Dump() { // Get the blob reference BlobContainerClient blobContainer = new BlobContainerClient(StorageConnectionString, EventHubsCaptureAvroBlobContainer); BlobClient blob = blobContainer.GetBlobClient(EventHubsCaptureAvroBlobName); // Download the content to a memory stream using (Stream blobStream = new MemoryStream()) { blob.DownloadToAsync(blobStream); using (var dataTable = GetWindTurbineMetricsTable()) { // Parse the Avro File using (var avroReader = DataFileReader <GenericRecord> .OpenReader(blobStream)) { while (avroReader.HasNext()) { GenericRecord r = avroReader.Next(); byte[] body = (byte[])r["Body"]; var windTurbineMeasure = DeserializeToWindTurbineMeasure(body); // Add the row to in memory table AddWindTurbineMetricToTable(dataTable, windTurbineMeasure); } } if (dataTable.Rows.Count > 0) { BatchInsert(dataTable); } } } }
internal void Build(AvroFileAccess <V> fileAccess) { Preconditions.CheckNotNullArgument(fileAccess); Preconditions.CheckArgument(fileAccess.Stream.Name == FileName); Preconditions.CheckArgument(fileAccess.Stream.CanRead); lock (Index) { Index = new Dictionary <string, object>(); long oldPosition = fileAccess.Stream.Position; try { fileAccess.Stream.Position = 0; IFileReader <GenericRecord> reader = DataFileReader <GenericRecord> .OpenReader(fileAccess.Stream, fileAccess.ValueDef.Schema); while (reader.HasNext()) { long position = reader.PreviousSync(); GenericRecord record = reader.Next(); V value = fileAccess.ValueDef.GetValue(record); Index.Add(fileAccess.ValueDef.GetValueKey(value), position); IsIndexWritePending = true; } } finally { fileAccess.Stream.Position = oldPosition; } } }
public void TestInterop(string inputDir) { // Resolve inputDir relative to the TestDirectory inputDir = Path.Combine(TestContext.CurrentContext.TestDirectory, inputDir); Assert.True(Directory.Exists(inputDir), "Input directory does not exist. Run `build.sh interop-data-generate` first."); foreach (var avroFile in Directory.EnumerateFiles(inputDir, "*.avro")) { var codec = Path.GetFileNameWithoutExtension(avroFile).Split('_'); if (1 < codec.Length && !InteropDataConstants.SupportedCodecNames.Contains(codec[1])) { continue; } using (var reader = DataFileReader <GenericRecord> .OpenReader(avroFile)) { int i = 0; foreach (var record in reader.NextEntries) { i++; Assert.IsNotNull(record); } Assert.AreNotEqual(0, i); } } }
public override IEnumerable <T> Deserialize(Stream stream) { IFileReader <GenericRecord> reader = null; try { reader = DataFileReader <GenericRecord> .OpenReader(stream); } catch (Exception e) { this.diagnostics.WriteError( briefMessage: "Unable to open stream as avro. Please check if the stream is from eventhub capture. https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-capture-overview ", detailedMessage: e.Message); throw; } foreach (GenericRecord genericRecord in reader.NextEntries) { EventDataFromCapture eventData = this.ConvertToEventDataFromCapture(genericRecord); // deserialize records from eventdata body. foreach (T record in this.DeserializeEventData(eventData)) { yield return(record); } } reader.Dispose(); }
static void Main(string[] args) { String schema = new StreamReader("user.avsc").ReadToEnd(); Avro.Schema avschema = Avro.Schema.Parse(schema); DatumReader <User> reader = new Avro.Specific.SpecificDatumReader <User>(avschema, avschema); Stream inStr = new FileStream("users.avro", FileMode.Open); IFileReader <User> dataFileReader = DataFileReader <User> .OpenReader(inStr, avschema); while (dataFileReader.HasNext()) { User record = dataFileReader.Next(); Console.WriteLine("Specific Obj Read ==>" + record.name + ":" + record.favorite_color + ":" + record.favorite_number); } inStr.Close(); inStr = new FileStream("users.avro", FileMode.Open); DatumReader <GenericRecord> reader2 = new Avro.Generic.GenericDatumReader <GenericRecord>(avschema, avschema); IFileReader <GenericRecord> gdataFileReader = DataFileReader <GenericRecord> .OpenReader(inStr, avschema); while (gdataFileReader.HasNext()) { GenericRecord grecord = gdataFileReader.Next(); Console.WriteLine("Generic mode of read==>" + grecord["name"] + ":" + grecord["favorite_color"] + ":" + grecord["favorite_number"]); } Console.Write("Hit ENTER to Close:"); Console.ReadLine(); }
public void SequentialWriter_MicrosoftWriterApacheReader() { var expected = new List <ClassOfInt>(); for (var i = 0; i < 7; i++) { expected.Add(ClassOfInt.Create(true)); } var w = AvroContainer.CreateWriter <ClassOfInt>(this.resultStream, Codec.Deflate); using (var writer = new SequentialWriter <ClassOfInt>(w, 2)) { expected.ForEach(writer.Write); } this.resultStream.Seek(0, SeekOrigin.Begin); var reader = DataFileReader <GenericRecord> .OpenReader(this.resultStream); var actual = new List <GenericRecord>(reader); for (var i = 0; i < expected.Count; ++i) { Assert.AreEqual(expected[i].PrimitiveInt, actual[i]["PrimitiveInt"]); } }
public void CorrectlyReadsMultiColumnTable() { string fileContents = "Column 1\tColumn 2\r\n80\t85"; MockRepository mocks = new MockRepository(); IResourceFileLocator fileLocator = mocks.CreateMock <IResourceFileLocator>(); Expect.Call(fileLocator.ReadTextFile("MyCompany.MyAssembly", _testDataFile)).Return(fileContents); mocks.ReplayAll(); using (IDataFileReader reader = new DataFileReader(fileLocator)) { reader.Open("MyCompany.MyAssembly", "Test", "DataFilePath"); bool canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(true)); Assert.That(reader.GetInteger("Column 1"), Is.EqualTo(80)); Assert.That(reader.GetInteger("Column 2"), Is.EqualTo(85)); canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(false)); } mocks.VerifyAll(); }
// Disabled due to long runtime [TestCase(specificSchema, Codec.Type.Deflate, 1000, 588, 998)] public void TestSyncAndSeekPositions(string schemaStr, Codec.Type codecType, int iterations, int firstSyncPosition, int secondSyncPosition) { // create and write out IList <Foo> records = MakeRecords(GetTestFooObject()); MemoryStream dataFileOutputStream = new MemoryStream(); Schema schema = Schema.Parse(schemaStr); DatumWriter <Foo> writer = new SpecificWriter <Foo>(schema); using (IFileWriter <Foo> dataFileWriter = DataFileWriter <Foo> .OpenWriter(writer, dataFileOutputStream, Codec.CreateCodec(codecType))) { for (int i = 0; i < iterations; ++i) { foreach (Foo foo in records) { dataFileWriter.Append(foo); } // write out block if (i == firstSyncPosition || i == secondSyncPosition) { dataFileWriter.Sync(); } } } MemoryStream dataFileInputStream = new MemoryStream(dataFileOutputStream.ToArray()); // read syncs IList <long> syncs = new List <long>(); using (IFileReader <Foo> reader = DataFileReader <Foo> .OpenReader(dataFileInputStream)) { long previousSync = -1; foreach (Foo foo in reader.NextEntries) { if (reader.PreviousSync() != previousSync && reader.Tell() != reader.PreviousSync()) // EOF { previousSync = reader.PreviousSync(); syncs.Add(previousSync); } } // verify syncs wth seeks reader.Sync(0); // first sync Assert.AreEqual(reader.PreviousSync(), syncs[0], string.Format("Error syncing reader to position: {0}", syncs[0])); foreach (long sync in syncs) // the rest { reader.Seek(sync); Foo foo = reader.Next(); Assert.IsNotNull(foo, string.Format("Error seeking to sync position: {0}", sync)); } } }
internal static IEnumerable <EventData> ReadAvroStreamToEventHubData(this Stream stream, string partitionKey) { using var reader = DataFileReader <GenericRecord> .OpenReader(stream); while (reader.HasNext()) { GenericRecord genericAvroRecord = reader.Next(); var body = genericAvroRecord.GetValue <byte[]>(nameof(EventData.Body)); var sequenceNumber = genericAvroRecord.GetValue <long>(nameof(EventData.SystemProperties.SequenceNumber)); var enqueuedTimeUtc = genericAvroRecord.GetValue <string>(nameof(EventData.SystemProperties.EnqueuedTimeUtc)).ParseTime(); var offset = genericAvroRecord.GetValue <string>(nameof(EventData.SystemProperties.Offset)); var systemPropertiesCollection = new EventData.SystemPropertiesCollection( sequenceNumber: sequenceNumber, enqueuedTimeUtc: enqueuedTimeUtc, offset: offset, partitionKey: partitionKey); genericAvroRecord .GetValue <Dictionary <string, object> >(nameof(EventData.SystemProperties)) .Foreach(x => systemPropertiesCollection.Add(x.Key, x.Value)); IEnumerator <Field> avroSchemaField = genericAvroRecord.Schema.GetEnumerator(); while (avroSchemaField.MoveNext()) { var currentAvroSchemaField = avroSchemaField.Current; var currentFieldName = currentAvroSchemaField.Name; if (currentFieldName == nameof(EventData.Body)) { continue; } if (currentFieldName == nameof(EventData.Properties)) { continue; } if (currentFieldName == nameof(EventData.SystemProperties)) { continue; } if (genericAvroRecord.TryGetValue(currentFieldName, out object prop)) { systemPropertiesCollection[currentFieldName] = prop; } } EventData eventData = new(body) { SystemProperties = systemPropertiesCollection }; genericAvroRecord .GetValue <Dictionary <string, object> >(nameof(EventData.Properties)) .Foreach(eventData.Properties.Add); yield return(eventData); } } }
static void Main(string[] args) { var dataReader = new DataFileReader <GeoInfoModel>(); var dir = Path.Combine(Directory.GetCurrentDirectory(), "../../../../../data/001/"); var imgDir = Path.Combine(Directory.GetCurrentDirectory(), "img"); var pltFiles = FileTools.GetAllFile(dir, "*.plt"); var tracks = dataReader.GetTrajectories(pltFiles); Parallel.ForEach(tracks, t => { t.GeoCodes = t.GeoPoints.Select(tt => GeoHash.Encode(tt.Latitude, tt.Longitude, 7)) .Distinct() .ToList(); }); Console.WriteLine(tracks.Count()); var cluster = new CommonSubsequenceCluster(); var sh = new Stopwatch(); sh.Start(); var tree = cluster.BuildClusterTree(tracks.ToArray(), 0.7f, 0.4f); Console.WriteLine($"BuildClusterTree, count:{tracks.Count()}, time:{sh.Elapsed}"); if (Directory.Exists(imgDir)) { Directory.GetFiles(imgDir, "*.png") .ToList() .ForEach(File.Delete); } else { Directory.CreateDirectory(imgDir); } cluster.ForeachTree(tree, node => { var draw = new DrawBase(256, 256) .OpenAutoFit(node.MinLon, node.MinLat, node.MaxLon, node.MaxLat) .Draw(node, Color.Red, 3); draw.Image.Save($"{imgDir}/{node.LevelTag}.png"); draw.Image.Dispose(); // Console.WriteLine(node.LevelTag); foreach (var sib in node.Siblings) { var drawSib = new DrawBase(256, 256) .OpenAutoFit(sib.MinLon, sib.MinLat, sib.MaxLon, sib.MaxLat) .Draw(sib, Color.Red, 3); drawSib.Image.Save($"{imgDir}/{sib.LevelTag}.png"); drawSib.Image.Dispose(); // Console.WriteLine(sib.LevelTag); } }); }
public bool Read() { if (DataFileReader.IsEndOfFile) { return(false); } _columnValues = DataFileReader.ReadLine(_tableDefinition.Columns); return(true); }
public void FileReaderTest() { string path = $"{IOHelpers.DesktopPath}/TestFile.csv"; DataFileReader <TestObject> reader = new DataFileReader <TestObject>(ParserUtil.CSVLineReader); List <TestObject> results = reader.Read(path, null, true); Assert.IsNotNull(results); }
public DataLoaderViewModelFactory(IEventAggregator eventAggregator, LoadingBarManager loadingBarManager, IWindowManager windowManager, DataConfigurationService dataConfigurationService, DataService dataService, DataFileReader dataFileReader) { _eventAggregator = eventAggregator; _loadingBarManager = loadingBarManager; _windowManager = windowManager; _dataConfigurationService = dataConfigurationService; _dataService = dataService; _dataFileReader = dataFileReader; }
private bool ReadGeneric <T>(Stream input, T value) { IFileReader <T> reader = DataFileReader <T> .OpenReader(input); IList <T> readFoos = new List <T>(); foreach (T foo in reader.NextEntries) { readFoos.Add(foo); } return(readFoos != null && readFoos.Count > 0); }
static void Main(string[] args) { using (var reader = DataFileReader <GenericRecord> .OpenReader(@"weather.avro")) { WriteHeader(reader); foreach (var entry in reader.NextEntries) { Print(entry); } } }
static void Main(string[] args) { var testReader = new DataFileReader(); testReader.GetRecordsFromDataFile("MyFirstTextFile.txt"); testReader.GetRecordsFromDataFile("InvalidTestFile.txt"); testReader.GetRecordsFromDataFile("InvalidFirstName.txt"); testReader.GetRecordsFromDataFile("InvalidLastName.txt"); testReader.GetRecordsFromDataFile("InvalidDateOfBirth.txt"); testReader.GetRecordsFromDataFile("InvalidPlanType.txt"); testReader.GetRecordsFromDataFile("InvalidEffectiveDate.txt"); }
public void Container_MicrosoftWriterApacherReaderOfNestedType() { var expected = new List <NestedClass>(); for (var i = 0; i < 7; i++) { expected.Add(NestedClass.Create(true)); } using (var memoryStream = new MemoryStream()) { var writer = AvroContainer.CreateWriter <NestedClass>(memoryStream, new AvroSerializerSettings { Resolver = new AvroDataContractResolver(true) }, Codec.Deflate); var i = 0; while (i < expected.Count) { var block = writer.CreateBlockAsync().Result; for (var j = 0; j < 2; j++) { if (i >= expected.Count) { break; } block.Write(expected[i]); i++; } writer.WriteBlockAsync(block).Wait(); } writer.Dispose(); memoryStream.Seek(0, SeekOrigin.Begin); var reader = DataFileReader <GenericRecord> .OpenReader(memoryStream); var actual = new List <GenericRecord>(reader); for (var k = 0; k < expected.Count; ++k) { Assert.Equal(expected[k].PrimitiveInt, actual[k]["PrimitiveInt"]); if (expected[k].ClassOfIntReference == null) { Assert.Null(actual[k]["ClassOfIntReference"]); } else { Assert.Equal(expected[k].ClassOfIntReference.PrimitiveInt, (actual[k]["ClassOfIntReference"] as GenericRecord)["PrimitiveInt"]); } } } }
public void Container_MicrosoftWriterApacherReaderOfDictionary() { var expected = new List <ContainingDictionaryClass <string, string> >(); for (var i = 0; i < 7; i++) { expected.Add(ContainingDictionaryClass <string, string> .Create( new Dictionary <string, string> { { "testkey" + i, "testvalue" + i } })); } using (var memoryStream = new MemoryStream()) { var writer = AvroContainer.CreateWriter <ContainingDictionaryClass <string, string> >(memoryStream, Codec.Deflate); { var i = 0; while (i < expected.Count) { var block = writer.CreateBlockAsync().Result; for (var j = 0; j < 2; j++) { if (i >= expected.Count) { break; } block.Write(expected[i]); i++; } writer.WriteBlockAsync(block).Wait(); } writer.Dispose(); } memoryStream.Seek(0, SeekOrigin.Begin); var reader = DataFileReader <GenericRecord> .OpenReader(memoryStream); var actual = new List <GenericRecord>(reader); Assert.Equal(expected.Count, actual.Count); for (var i = 0; i < expected.Count; ++i) { var actualValue = actual[i]["Property"] as Dictionary <string, object>; Assert.Equal(actualValue["testkey" + i] as string, expected[i].Property["testkey" + i]); } } }
public void CanReadEmptyString() { string fileContents = "StringColumn1\tStringColumn2\r\nString 1"; MockRepository mocks = new MockRepository(); IResourceFileLocator fileLocator = mocks.CreateMock<IResourceFileLocator>(); Expect.Call(fileLocator.ReadTextFile("MyCompany.MyAssembly", _testDataFile)).Return(fileContents); mocks.ReplayAll(); using (IDataFileReader reader = new DataFileReader(fileLocator)) { reader.Open("MyCompany.MyAssembly", "Test", "DataFilePath"); bool canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(true)); Assert.That(reader.GetString("StringColumn2"), Is.EqualTo(string.Empty)); canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(false)); } mocks.VerifyAll(); }
public void CorrectlyReadsEnumeratedTextFileColumnByValue() { string fileContents = "Persistence Mode\r\n1\r\n2\r\n"; MockRepository mocks = new MockRepository(); IResourceFileLocator fileLocator = mocks.CreateMock<IResourceFileLocator>(); Expect.Call(fileLocator.ReadTextFile("MyCompany.MyAssembly", _testDataFile)).Return(fileContents); mocks.ReplayAll(); using (IDataFileReader reader = new DataFileReader(fileLocator)) { reader.Open("MyCompany.MyAssembly", "Test", "DataFilePath"); bool canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(true)); Assert.That(reader.GetEnumerationByValue<PersistenceMode>("Persistence Mode"), Is.EqualTo(PersistenceMode.Live)); canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(true)); Assert.That(reader.GetEnumerationByValue<PersistenceMode>("Persistence Mode"), Is.EqualTo(PersistenceMode.Archive)); canRead = reader.Read(); Assert.That(canRead, Is.EqualTo(false)); } mocks.VerifyAll(); }
public void ThrowsExceptionIfColumnsAreRetrievedFromClosedReader() { IDataFileReader reader = new DataFileReader(null); reader.GetColumnHeaders(); }
public void CorrectlyThrowsExceptionIfUnknownColumnSpecified() { string fileContents = "My Good Column Name\r\nMy Data\r\n"; MockRepository mocks = new MockRepository(); IResourceFileLocator fileLocator = mocks.CreateMock<IResourceFileLocator>(); Expect.Call(fileLocator.ReadTextFile("MyCompany.MyAssembly", _testDataFile)).Return(fileContents); mocks.ReplayAll(); using (IDataFileReader reader = new DataFileReader(fileLocator)) { reader.Open("MyCompany.MyAssembly", "Test", "DataFilePath"); reader.Read(); reader.GetInteger("My Bad Column Name"); } }
public void CorrectlyReadsColumnHeaders() { string fileContents = "First Column\tSecond Column\r\n"; MockRepository mocks = new MockRepository(); IResourceFileLocator fileLocator = mocks.CreateMock<IResourceFileLocator>(); Expect.Call(fileLocator.ReadTextFile("MyCompany.MyAssembly", _testDataFile)).Return(fileContents); mocks.ReplayAll(); using (IDataFileReader reader = new DataFileReader(fileLocator)) { reader.Open("MyCompany.MyAssembly", "Test", "DataFilePath"); string[] columnHeaders = reader.GetColumnHeaders(); Assert.That(columnHeaders, Is.EqualTo(new string[] { "First Column", "Second Column" })); } mocks.VerifyAll(); }