public void TestInitialize() { _description = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, IgnoreUnknownColumns = true, }; _input = @"Id,Name,Last Name,Age,City 1,John,Doe,15,Washington 2,Jane,Doe,20,New York "; Expected = new[] { new Person { Name = "John", LastName = "Doe", Age = 15 }, new Person { Name = "Jane", LastName = "Doe", Age = 20 } }; _stream = _input.GetStreamReader(); _dataAccess = new FileDataAccess(_stream, _description); _reader = _dataAccess.ReadDataPreparation <Person>(null); _dataAccess.Row = new DataRow(); }
public void TestLogRecord_FullFields_2() { FieldMapperReading <LogRecord> fm = new FieldMapperReading <LogRecord>(_fileDescriptionNamesUs, null, false); Assert.IsNotNull(fm); Assert.IsNotNull(fm.FieldIndexInfo); Assert.IsNotNull(fm.NameToInfo); Assert.AreEqual(fm.NameToInfo.Count, 27); Assert.IsNotNull(fm.NameToInfo["PCI"]); Assert.AreEqual(fm.NameToInfo["PCI"].HasColumnAttribute, true); List <int> charLengths = fm.GetCharLengths(); Assert.IsNull(charLengths); FileDataAccess dataAccess = new FileDataAccess(_testInput.GetStreamReader(), _fileDescriptionNamesUs); Assert.IsNotNull(dataAccess); RowReader <LogRecord> reader = dataAccess.ReadDataPreparation <LogRecord>(null); Assert.IsNotNull(reader); dataAccess.Row = new DataRow(); List <LogRecord> records = dataAccess.ReadFieldDataRows(reader, null, fm, null).ToList(); Assert.IsNotNull(records); Assert.AreEqual(records.Count, 1); Assert.AreEqual(records[0].Id, 0); Assert.AreEqual(records[0].Time.Millisecond, 359); Assert.AreEqual(records[0].Sinr, 10.1); Assert.AreEqual(records[0].UlMcs, 17); Assert.AreEqual(records[0].DlThroughput, 10749096); }
public void Setup() { Directory.CreateDirectory(TEST_DIR_PATH); IFormatter formatter = new BinaryFormatter(); this.dataAccess = new FileDataAccess <TestObj>(formatter); }
public void TestInitialize() { _description = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, IgnoreUnknownColumns = true, }; _input = @"Id,Name,Last Name,Age,City 1,John,Doe,15,Washington"; byte[] stringAsByteArray = Encoding.UTF8.GetBytes(_input); Stream stream = new MemoryStream(stringAsByteArray); _sr = new StreamReader(stream, Encoding.UTF8); _dataAccess = new FileDataAccess(_sr, _description); _cs = new CsvStream(_sr, null, _description.SeparatorChar, _description.IgnoreTrailingSeparatorChar); _row = new DataRow(); _fm = new FieldMapperReading <Person>(_description, null, false); _ae = new AggregatedException(typeof(Person).ToString(), null, _description.MaximumNbrExceptions); _reader = new RowReader <Person>(_description, _ae); }
public void TestInitialize() { _description = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, IgnoreUnknownColumns = true, }; _input = @"Id,Name,Last Name,Age,City 1,John,Doe,15,Washington 2,Jane,Doe,20,New York "; Expected = new[] { new Person { Name = "John", LastName = "Doe", Age = 15 }, new Person { Name = "Jane", LastName = "Doe", Age = 20 } }; _stream = _input.GetStreamReader(); _dataAccess = new FileDataAccess(_stream, _description); _reader = _dataAccess.ReadDataPreparation<Person>(null); _dataAccess.Row = new DataRow(); }
public void ClearDatabase_Returns() { var stubReader = new StubIFileReader(); var stubWriter = new StubIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(stubReader, stubWriter); var param_filePath = "path/to/the/file.json"; fileDataAccess.ClearDatabase(param_filePath); }
public void WriteToDatabase_ListOfEntityIsEmpty_Returns() { var stubReader = new StubIFileReader(); var stubWriter = new StubIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(stubReader, stubWriter); var param_filePath = "path/to/the/file.json"; var param_listOfEntity = new List <FakeBlogModel>(); fileDataAccess.WriteToDatabase(param_filePath, param_listOfEntity); }
public void WriteToDatabase_EntityIsNull_Returns() { var stubReader = new StubIFileReader(); var stubWriter = new StubIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(stubReader, stubWriter); var param_filePath = "path/to/the/file.json"; FakeBlogModel param_entity = null; fileDataAccess.WriteToDatabase(param_filePath, param_entity); }
public void GoodFileNoSeparatorCharUSEnglish() { // Arrange CsvFileDescription fileDescription_namesUs = new CsvFileDescription { NoSeparatorChar = true, UseOutputFormatForParsingCsvValue = false, FirstLineHasColumnNames = false, EnforceCsvColumnAttribute = true, // default is false FileCultureName = "en-US" // default is the current culture }; const string testInput = @"AAAAAAAA34.18405/23/08 BBBBBBBB10.31105/12/12 CCCCCCCC12.00012/23/08"; var expected = new[] { new ProductDataCharLength { Name = "AAAAAAAA", Weight = 34.184, StartDate = new DateTime(2008, 5, 23), }, new ProductDataCharLength { Name = "BBBBBBBB", Weight = 10.311, StartDate = new DateTime(2012, 5, 12), }, new ProductDataCharLength { Name = "CCCCCCCC", Weight = 12.000, StartDate = new DateTime(2008, 12, 23), } }; // Act and Assert FileDataAccess dataAccess = new FileDataAccess(testInput.GetStreamReader(), fileDescription_namesUs); RowReader<ProductDataCharLength> reader = dataAccess.ReadDataPreparation<ProductDataCharLength>(null); dataAccess.Row = new DataRow(); FieldMapperReading<ProductDataCharLength> fm = new FieldMapperReading<ProductDataCharLength>( fileDescription_namesUs, null, false); List<int> charLengths = fm.GetCharLengths(); Assert.AreEqual(charLengths.Count, 3); bool firstRow = true; List<ProductDataCharLength> actual=new List<ProductDataCharLength>(); while (dataAccess.Cs.ReadRow(dataAccess.Row, charLengths)) { if ((dataAccess.Row.Count == 1) && ((dataAccess.Row[0].Value == null) || (string.IsNullOrEmpty(dataAccess.Row[0].Value.Trim())))) { continue; } bool readingResult = reader.ReadingOneFieldRow(fm, dataAccess.Row, firstRow); if (readingResult) { actual.Add(reader.Obj); } firstRow = false; } AssertCollectionsEqual(actual, expected); }
public void ClearDatabase_VerifyWriter() { var stubReader = new StubIFileReader(); var mockWriter = new MockIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(stubReader, mockWriter); var param_filePath = "path/to/the/file.json"; fileDataAccess.ClearDatabase(param_filePath); mockWriter.VerifyWrite(param_filePath, false, ""); }
public async Task <HttpResponseMessage> DownloadFile(string fileId) { var dataAccess = new FileDataAccess(); var blobAccess = new BlobAccess.BlobAccess(); var fileEntity = await dataAccess.GetFileAsync(fileId); var file = await blobAccess.DownloadFileAsync(fileEntity.RowKey.ToLowerInvariant()); return(file); }
public void WriteToDatabase_ListOfEntityIsNull_VerifyReader() { var mockReader = new MockIFileReader(); var stubWriter = new StubIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(mockReader, stubWriter); var param_filePath = "path/to/the/file.json"; List <FakeBlogModel> param_listOfEntity = null; fileDataAccess.WriteToDatabase(param_filePath, param_listOfEntity); mockReader.VerifyReadNeverCalled(); }
public void WriteToDatabase_EntityIsNull_VerifyWriter() { var stubReader = new StubIFileReader(); var mockWriter = new MockIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(stubReader, mockWriter); var param_filePath = "path/to/the/file.json"; FakeBlogModel param_entity = null; fileDataAccess.WriteToDatabase(param_filePath, param_entity); mockWriter.VerifyWriteNeverCalled(); }
public void OverwriteDatabase_ListOfEntityIsValid_Returns() { var stubReader = new StubIFileReader(); var stubWriter = new StubIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(stubReader, stubWriter); var param_filePath = "path/to/the/file.json"; var param_listOfEntity = new List <FakeBlogModel> { new FakeBlogModel() }; fileDataAccess.OverwriteDatabase(param_filePath, param_listOfEntity); }
public void WriteToDatabase_EntityIsValidAndDatabaseIsEmpty_VerifyReader() { var mockReader = new MockIFileReader(); var stubWriter = new StubIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(mockReader, stubWriter); var param_filePath = "path/to/the/file.json"; var param_entity = new FakeBlogModel(); fileDataAccess.WriteToDatabase(param_filePath, param_entity); mockReader.VerifyRead(param_filePath); }
public void OverwriteToDatabase_ListOfEntityIsEmpty_VerifyWriter() { var stubReader = new StubIFileReader(); var mockWriter = new MockIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(stubReader, mockWriter); var param_filePath = "path/to/the/file.json"; var param_listOfEntity = new List <FakeBlogModel>(); fileDataAccess.OverwriteDatabase(param_filePath, param_listOfEntity); mockWriter.VerifyWriteNeverCalled(); }
public void ReadDatabase_FileContentsAreNullOrEmpty_VerifyReader(string stub_fileContents) { var mockReader = new MockIFileReader(); var stubWriter = new StubIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(mockReader, stubWriter); mockReader.StubRead(stub_fileContents); var param_filePath = "path/to/the/file.json"; fileDataAccess.ReadDatabase(param_filePath); mockReader.VerifyRead(param_filePath); }
public void WriteToDatabase_ListOfEntityIsValidAndDatabaseContainsOneObject_Returns() { var stubReader = new StubIFileReader(); var stubWriter = new StubIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(stubReader, stubWriter); var param_filePath = "path/to/the/file.json"; var param_listOfEntity = new List <FakeBlogModel> { new FakeBlogModel() }; stubReader.StubRead(JsonConvert.SerializeObject(param_listOfEntity)); fileDataAccess.WriteToDatabase(param_filePath, param_listOfEntity); }
public void ReadDatabase_FileContentsAreNullOrEmpty_ReturnsEmptyList(string stub_fileContents) { var stubReader = new StubIFileReader(); var stubWriter = new StubIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(stubReader, stubWriter); stubReader.StubRead(stub_fileContents); var param_filePath = "path/to/the/file.json"; var expected_return = new List <FakeBlogModel>(); var actual_return = fileDataAccess.ReadDatabase(param_filePath); Assert.Equal(expected_return, actual_return); }
public void OverwriteDatabase_ListOfEntityIsValid_VerifyWriter() { var stubReader = new StubIFileReader(); var mockWriter = new MockIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(stubReader, mockWriter); var param_filePath = "path/to/the/file.json"; var param_listOfEntity = new List <FakeBlogModel> { new FakeBlogModel() }; fileDataAccess.OverwriteDatabase(param_filePath, param_listOfEntity); mockWriter.VerifyWrite(param_filePath, false, JsonConvert.SerializeObject(param_listOfEntity)); }
public void WriteToDatabase_EntityIsValidAndDatabaseIsEmpty_VerifyWriter() { var stubReader = new StubIFileReader(); var mockWriter = new MockIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(stubReader, mockWriter); var param_filePath = "path/to/the/file.json"; FakeBlogModel param_entity = new FakeBlogModel(); var expected_listOfEntity = new List <FakeBlogModel> { param_entity }; fileDataAccess.WriteToDatabase(param_filePath, param_entity); mockWriter.VerifyWrite(param_filePath, false, JsonConvert.SerializeObject(expected_listOfEntity)); }
public void ReadDatabase_FileContainsOneObject_VerifyReader() { var mockReader = new MockIFileReader(); var stubWriter = new StubIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(mockReader, stubWriter); var param_filePath = "path/to/the/file.json"; var stub_listOfEntity = new List <FakeBlogModel> { new FakeBlogModel() }; var stub_fileContents = JsonConvert.SerializeObject(stub_listOfEntity); mockReader.StubRead(stub_fileContents); fileDataAccess.ReadDatabase(param_filePath); mockReader.VerifyRead(param_filePath); }
public void TestLogRecord_FullFields() { FieldMapperReading <LogRecord> fm = new FieldMapperReading <LogRecord>(_fileDescriptionNamesUs, null, false); Assert.IsNotNull(fm); Assert.IsNotNull(fm.FieldIndexInfo); Assert.IsNotNull(fm.NameToInfo); Assert.AreEqual(fm.NameToInfo.Count, 27); Assert.IsNotNull(fm.NameToInfo["PCI"]); Assert.AreEqual(fm.NameToInfo["PCI"].HasColumnAttribute, true); List <int> charLengths = fm.GetCharLengths(); Assert.IsNull(charLengths); FileDataAccess dataAccess = new FileDataAccess(_testInput.GetStreamReader(), _fileDescriptionNamesUs); Assert.IsNotNull(dataAccess); RowReader <LogRecord> reader = dataAccess.ReadDataPreparation <LogRecord>(null); Assert.IsNotNull(reader); Assert.IsNotNull(dataAccess.Cs); dataAccess.Row = new DataRow(); Assert.IsTrue(dataAccess.Cs.ReadRow(dataAccess.Row)); bool readingResult = reader.ReadingOneFieldRow(fm, dataAccess.Row, true); Assert.IsFalse(readingResult); Assert.IsTrue(dataAccess.Cs.ReadRow(dataAccess.Row)); Assert.AreEqual(dataAccess.Row[0].Value, "0"); Assert.AreEqual(dataAccess.Row[1].Value, "13:58:08:359"); Assert.AreEqual(dataAccess.Row.Count, 18, "row count"); Assert.AreEqual(fm.FieldIndexInfo.IndexToInfo.Length, 27, "index to info"); Assert.AreEqual(fm.FieldIndexInfo.GetMaxRowCount(18), 18); TypeFieldInfo tfi = fm.FieldIndexInfo.QueryTypeFieldInfo(true, 1); Assert.IsNotNull(tfi); Assert.AreEqual(tfi.OutputFormat, "HH:mm:ss.fff"); string value = dataAccess.Row[1].Value; Assert.AreEqual(value, "13:58:08:359"); }
private void button1_Click(object sender, EventArgs e) { EmployeeModel emp = new EmployeeModel() { BirthDate = new DateTime(1998, 4, 8), Name = "asdasda" }; IDataAccess <IEmployeeModel> dataAccess = new FileDataAccess <IEmployeeModel>(); EmployeeController employeeController = new EmployeeController(dataAccess); employeeController.SaveEmployee(emp); employeeController.LoadEmployee(); foreach (var item in employeeController.LoadEmployee()) { listBox1.Items.Add(item.Name); } }
public void ReadDatabase_FileContainsOneObject_ReturnsListWithTheOneObject() { var stubReader = new StubIFileReader(); var stubWriter = new StubIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(stubReader, stubWriter); var param_filePath = "path/to/the/file.json"; var stub_listOfEntity = new List <FakeBlogModel> { new FakeBlogModel() }; var stub_fileContents = JsonConvert.SerializeObject(stub_listOfEntity); stubReader.StubRead(stub_fileContents); var expected_return = JsonConvert.DeserializeObject <List <FakeBlogModel> >(stub_fileContents); var actual_return = fileDataAccess.ReadDatabase(param_filePath); Assert.Equal(expected_return.Count, actual_return.Count); Assert.Equal(expected_return[0].FakeProperty, actual_return[0].FakeProperty); }
public void WriteToDatabase_ListOfEntityIsValidAndDatabaseContainsOneObject_VerifyWriter() { var stubReader = new StubIFileReader(); var mockWriter = new MockIFileWriter(); var fileDataAccess = new FileDataAccess <FakeBlogModel>(stubReader, mockWriter); var param_filePath = "path/to/the/file.json"; var param_listOfEntity = new List <FakeBlogModel> { new FakeBlogModel() }; var stub_listOfEntity = new List <FakeBlogModel> { new FakeBlogModel() }; stubReader.StubRead(JsonConvert.SerializeObject(stub_listOfEntity)); stub_listOfEntity.AddRange(param_listOfEntity); fileDataAccess.WriteToDatabase(param_filePath, param_listOfEntity); mockWriter.VerifyWrite(param_filePath, false, JsonConvert.SerializeObject(stub_listOfEntity)); }
public async Task <string> UploadFile(HttpPostedFile file) { var dataAccess = new FileDataAccess(); var blobAccess = new BlobAccess.BlobAccess(); var fileName = file.FileName; var contetnLength = file.ContentLength; var fileId = Guid.NewGuid().ToString(); try { var writeResult = await dataAccess.WriteFileAsync(new FileEntity(fileName, fileId, contetnLength.ToString())); await blobAccess.UploadFileAsync(file); return(writeResult); } catch (StorageException exception) { throw new Exception(exception.Message); } }
public void TestLogRecord_FullFields() { FieldMapperReading<LogRecord> fm = new FieldMapperReading<LogRecord>(_fileDescriptionNamesUs, null, false); Assert.IsNotNull(fm); Assert.IsNotNull(fm.FieldIndexInfo); Assert.IsNotNull(fm.NameToInfo); Assert.AreEqual(fm.NameToInfo.Count, 27); Assert.IsNotNull(fm.NameToInfo["PCI"]); Assert.AreEqual(fm.NameToInfo["PCI"].HasColumnAttribute, true); List<int> charLengths = fm.GetCharLengths(); Assert.IsNull(charLengths); FileDataAccess dataAccess = new FileDataAccess(_testInput.GetStreamReader(), _fileDescriptionNamesUs); Assert.IsNotNull(dataAccess); RowReader<LogRecord> reader = dataAccess.ReadDataPreparation<LogRecord>(null); Assert.IsNotNull(reader); Assert.IsNotNull(dataAccess.Cs); dataAccess.Row = new DataRow(); Assert.IsTrue(dataAccess.Cs.ReadRow(dataAccess.Row)); bool readingResult = reader.ReadingOneFieldRow(fm, dataAccess.Row, true); Assert.IsFalse(readingResult); Assert.IsTrue(dataAccess.Cs.ReadRow(dataAccess.Row)); Assert.AreEqual(dataAccess.Row[0].Value, "0"); Assert.AreEqual(dataAccess.Row[1].Value, "13:58:08:359"); Assert.AreEqual(dataAccess.Row.Count, 18, "row count"); Assert.AreEqual(fm.FieldIndexInfo.IndexToInfo.Length, 27, "index to info"); Assert.AreEqual(fm.FieldIndexInfo.GetMaxRowCount(18), 18); TypeFieldInfo tfi = fm.FieldIndexInfo.QueryTypeFieldInfo(true, 1); Assert.IsNotNull(tfi); Assert.AreEqual(tfi.OutputFormat, "HH:mm:ss.fff"); string value = dataAccess.Row[1].Value; Assert.AreEqual(value, "13:58:08:359"); }
public void TestInitialize() { _description = new CsvFileDescription { SeparatorChar = ',', FirstLineHasColumnNames = true, IgnoreUnknownColumns = true, }; _input = @"Id,Name,Last Name,Age,City 1,John,Doe,15,Washington"; byte[] stringAsByteArray = Encoding.UTF8.GetBytes(_input); Stream stream = new MemoryStream(stringAsByteArray); _sr = new StreamReader(stream, Encoding.UTF8); _dataAccess = new FileDataAccess(_sr, _description); _cs = new CsvStream(_sr, null, _description.SeparatorChar, _description.IgnoreTrailingSeparatorChar); _row = new DataRow(); _fm = new FieldMapperReading<Person>(_description, null, false); _ae = new AggregatedException(typeof(Person).ToString(), null, _description.MaximumNbrExceptions); _reader = new RowReader<Person>(_description, _ae); }
public async Task <IEnumerable <FileEntity> > GetAllFiles() { var dataAccess = new FileDataAccess(); return(await dataAccess.GetAllFilesAsync()); }
public GnmaController(IOptions <AppSettings> appSettings) { this.appSettings = appSettings; this.fileDataAccess = new FileDataAccess(); }
public void TestLogRecord_FullFields_2() { FieldMapperReading<LogRecord> fm = new FieldMapperReading<LogRecord>(_fileDescriptionNamesUs, null, false); Assert.IsNotNull(fm); Assert.IsNotNull(fm.FieldIndexInfo); Assert.IsNotNull(fm.NameToInfo); Assert.AreEqual(fm.NameToInfo.Count, 27); Assert.IsNotNull(fm.NameToInfo["PCI"]); Assert.AreEqual(fm.NameToInfo["PCI"].HasColumnAttribute, true); List<int> charLengths = fm.GetCharLengths(); Assert.IsNull(charLengths); FileDataAccess dataAccess = new FileDataAccess(_testInput.GetStreamReader(), _fileDescriptionNamesUs); Assert.IsNotNull(dataAccess); RowReader<LogRecord> reader = dataAccess.ReadDataPreparation<LogRecord>(null); Assert.IsNotNull(reader); dataAccess.Row = new DataRow(); List<LogRecord> records = dataAccess.ReadFieldDataRows(reader, null, fm, null).ToList(); Assert.IsNotNull(records); Assert.AreEqual(records.Count, 1); Assert.AreEqual(records[0].Id, 0); Assert.AreEqual(records[0].Time.Millisecond, 359); Assert.AreEqual(records[0].Sinr, 10.1); Assert.AreEqual(records[0].UlMcs, 17); Assert.AreEqual(records[0].DlThroughput, 10749096); }
public void GoodFileNoSeparatorCharUSEnglish() { // Arrange CsvFileDescription fileDescription_namesUs = new CsvFileDescription { NoSeparatorChar = true, UseOutputFormatForParsingCsvValue = false, FirstLineHasColumnNames = false, EnforceCsvColumnAttribute = true, // default is false FileCultureName = "en-US" // default is the current culture }; const string testInput = @"AAAAAAAA34.18405/23/08 BBBBBBBB10.31105/12/12 CCCCCCCC12.00012/23/08"; var expected = new[] { new ProductDataCharLength { Name = "AAAAAAAA", Weight = 34.184, StartDate = new DateTime(2008, 5, 23), }, new ProductDataCharLength { Name = "BBBBBBBB", Weight = 10.311, StartDate = new DateTime(2012, 5, 12), }, new ProductDataCharLength { Name = "CCCCCCCC", Weight = 12.000, StartDate = new DateTime(2008, 12, 23), } }; // Act and Assert FileDataAccess dataAccess = new FileDataAccess(testInput.GetStreamReader(), fileDescription_namesUs); RowReader <ProductDataCharLength> reader = dataAccess.ReadDataPreparation <ProductDataCharLength>(null); dataAccess.Row = new DataRow(); FieldMapperReading <ProductDataCharLength> fm = new FieldMapperReading <ProductDataCharLength>( fileDescription_namesUs, null, false); List <int> charLengths = fm.GetCharLengths(); Assert.AreEqual(charLengths.Count, 3); bool firstRow = true; List <ProductDataCharLength> actual = new List <ProductDataCharLength>(); while (dataAccess.Cs.ReadRow(dataAccess.Row, charLengths)) { if ((dataAccess.Row.Count == 1) && ((dataAccess.Row[0].Value == null) || (string.IsNullOrEmpty(dataAccess.Row[0].Value.Trim())))) { continue; } bool readingResult = reader.ReadingOneFieldRow(fm, dataAccess.Row, firstRow); if (readingResult) { actual.Add(reader.Obj); } firstRow = false; } AssertCollectionsEqual(actual, expected); }
public async Task <IHttpActionResult> PutValue([FromODataUri] string key) { try { var contentTypeHeader = Request.Content.Headers.ContentType; if (contentTypeHeader == null || contentTypeHeader.MediaType == null) { return(BadRequest()); } var contentLength = Request.Content.Headers.ContentLength; if (!contentLength.HasValue) { return(StatusCode(HttpStatusCode.LengthRequired)); } if (contentLength.Value <= 0) { return(BadRequest()); } var contentRange = Request.Content.Headers.ContentRange; if (contentRange == null || !contentRange.HasRange || !contentRange.HasLength || !contentRange.From.HasValue || !contentRange.To.HasValue || !contentRange.Unit.Equals("bytes", StringComparison.Ordinal) || contentRange.Length <= 0 || contentRange.From.Value < 0 || contentRange.From.Value > contentRange.Length - 1 || contentRange.From.Value > contentRange.To.Value || contentRange.To.Value < 0 || contentRange.To.Value > contentRange.Length - 1 || contentRange.To.Value - contentRange.From.Value + 1 != contentLength) { return(BadRequest()); } var mediaType = contentTypeHeader.MediaType; var stream = await Request.Content.ReadAsStreamAsync(); var uploadSession = await UploadSessionDataAccess.UploadSegmentAsync( key, mediaType, contentRange.From.Value, contentRange.To.Value, contentRange.Length.Value, stream); if (uploadSession.Finished) { var uploadedFile = await FileDataAccess.GetAsync(uploadSession.FileIdentifier); return(Ok(new UploadSession(uploadSession, uploadedFile))); } else { return(StatusCode(HttpStatusCode.Accepted)); } } catch (ResourceNotFoundException) { return(NotFound()); } }