public void ExtractDataMustSetTheStatusToExtracting() { //Arrange var source = new FileDataSource { Id = newSourceId, InputStatus = SourceStatus.PendingExtraction, HandlerName = "HandlerName", CurrentFileName = "CurrentFileName" }; SetupFileDataContextToReturnDataSource(source); mockContext .Setup(context => context.SourceContainsErrors(It.Is <Guid>(guid => guid == newSourceId))) .Returns(false); //Act var result = CreateService().ExtractData(newSourceId).Result; //Assert mockContext.Verify(context => context.UpdateDataSource(It.Is <FileDataSource>( dataSource => (dataSource.Id == newSourceId) && (dataSource.InputStatus == SourceStatus.Extracting))), Times.Once); mockStreamManager.Verify( context => context.PrepareForExtraction( It.IsAny <Guid>(), It.IsAny <string>(), It.IsAny <string>()), Times.Once); Assert.AreEqual(newSourceId, result.SourceId); Assert.AreEqual(0, result.ErrorMessages.Count); Assert.IsTrue(result.Succeeded); }
public void ValidateSourceMustRemovePreviousErrorsInSource() { //Arrange var source = new FileDataSource { Id = newSourceId, InputStatus = SourceStatus.PendingExtraction, CurrentFileName = "CurrentFileName" }; SetupFileDataContextToReturnDataSource(source); CreateService(); mockContext .Setup(context => context.UpdateDataSource(It.IsAny <FileDataSource>())) .Returns(source); mockService .Setup( service => service.ReportSourceError(It.IsAny <Guid>(), It.IsAny <SourceErrorType>(), It.IsAny <string>())) .Callback(() => { }); //Act mockService.Object.ValidateSource(newSourceId); //Assert mockContext .Verify(context => context.RemoveSourceErrors(It.Is <Guid>(guid => guid == newSourceId)), Times.Once); mockStreamManager .Verify(manager => manager.RetrieveData(It.IsAny <Guid>(), It.IsAny <string>()), Times.Never); }
public void UpsertFileDataSourceMustSetTheFileHashIfInserting() { //Arrange var source = new FileDataSource { Id = newSourceId }; mockContext .Setup(context => context.AddDataSource(It.IsAny <FileDataSource>())) .Returns(source) .Verifiable(); var uploadContract = new FileDataContract { SourceId = newSourceId, OriginalFileName = "Original Name", }; var fileStream = new MemoryStream(new byte[] { 100 }); //Act CreateService().UpsertFileDataSource(uploadContract, fileStream); //Assert mockContext .Verify(context => context.AddDataSource( It.Is <FileDataSource>( dataSource => (dataSource.OriginalFileName == "Original Name") && (dataSource.FileHash) == "<686ÏNffi¢]¢€¡†\\-(t"))); }
public void UpsertFileDataSourceMustCallValidateIfUpdating() { //Arrange var source = new FileDataSource { Id = newSourceId, InputStatus = SourceStatus.PendingExtraction }; mockContext .Setup(context => context.GetFileDataSource(It.IsAny <Guid>(), It.IsAny <string>())) .Returns(source) .Verifiable(); var uploadContract = new FileDataContract { SourceId = newSourceId, OriginalFileName = "Original Name" }; var fileStream = new MemoryStream(new byte[] { 100 }); //Act CreateService().UpsertFileDataSource(uploadContract, fileStream); //Assert mockService .Verify(service => service.ValidateSource(It.Is <Guid>(guid => guid == newSourceId))); }
public void UpsertFileDataSourceMustCancelfInsertingAndSourceIdNotNull() { //Arrange var source = new FileDataSource { Id = newSourceId }; mockContext .Setup(context => context.AddDataSource(It.IsAny <FileDataSource>())) .Returns(source) .Verifiable(); var uploadContract = new FileDataContract { SourceId = newSourceId, OriginalFileName = "Original Name" }; //Act CreateService().UpsertFileDataSource(uploadContract, new MemoryStream()); //Assert mockContext.Verify(); mockService .Verify(service => service.CancelFileSourceExtraction(It.Is <Guid>(guid => guid == newSourceId))); }
public bool FileIsDuplicate(FileDataSource fileSource) { var hash = fileSource.FileHash; var sourceId = fileSource.Id; return(context.Set <FileDataSource>().Any(source => (source.Id != sourceId) && (source.FileHash == hash))); }
public void TestExcludeDataSource1() { var logFile1 = new InMemoryLogSource(); var source1 = new FileDataSource(_taskScheduler, new DataSource { Id = DataSourceId.CreateNew() }, logFile1, TimeSpan.Zero); var logFile2 = new InMemoryLogSource(); var source2 = new FileDataSource(_taskScheduler, new DataSource { Id = DataSourceId.CreateNew() }, logFile2, TimeSpan.Zero); _merged.SetDataSources(new [] { source1, source2 }); GetMergedLogFile().Sources.Should().Equal(logFile1, logFile2); _merged.SetExcluded(source1, true); GetMergedLogFile().Sources.Should().NotContain(logFile1, "because we've just excluded the first source"); _merged.SetExcluded(source1, false); GetMergedLogFile().Sources.Should().Equal(logFile1, logFile2); }
static void Main(string[] args) { // create source of jsoned customers and repository to load them var source = new FileDataSource(Path.Combine(Environment.CurrentDirectory, "data", "customers.json")); var customerRepository = new CustomerRepository(source); // create geo service for distance calculating var geoService = new GeoService(); // create customer service as entry point and domain service var customerService = new CustomerService(customerRepository, geoService); // our office ccordinate var centerPoint = new GeoCoordinate(53.339428, -6.257664); // radius in km double radiusKm = 100; var customers = customerService.Get(centerPoint, radiusKm); Console.WriteLine(string.Format("We found folowwing customers within {0} km around of point ({1}, {2})", radiusKm, centerPoint.Latitude, centerPoint.Longitude)); foreach (var customer in customers) { Console.WriteLine(string.Format("Name: {0}, Coordianes: ({1}, {2})", customer.Name, customer.Coordinate.Latitude, customer.Coordinate.Longitude)); } Console.ReadLine(); }
public void TestReadOneLine3([Values(true, false)] bool isSingleLine) { _settings.IsSingleLine = isSingleLine; using (var dataSource = new FileDataSource(_scheduler, _settings, _logSource, TimeSpan.Zero)) { _writer.Write("A"); _writer.Flush(); _scheduler.Run(3); _writer.Write("B"); _writer.Flush(); _scheduler.Run(3); _writer.Write("C"); _writer.Flush(); _scheduler.Run(3); dataSource.FilteredLogSource.GetProperty(Properties.LogEntryCount).Should().Be(1, "because only a single line has been written to disk"); var line = dataSource.FilteredLogSource.GetEntry(0); line.Index.Should().Be(0); line.LogEntryIndex.Should().Be(0); line.RawContent.Should().Be("ABC"); line.LogLevel.Should().Be(LevelFlags.Other); line.Timestamp.Should().Be(null); } }
public void TestDispose1() { LogSourceProxy permanentLogSource; LogSourceSearchProxy permanentSearch; LogSourceProxy permanentFindAllLogSource; LogSourceSearchProxy permanentFindAllSearch; FileDataSource source; using (source = new FileDataSource(_logSourceFactory, _scheduler, new DataSource(@"E:\somelogfile.txt") { Id = DataSourceId.CreateNew() })) { permanentLogSource = (LogSourceProxy)source.FilteredLogSource; permanentLogSource.IsDisposed.Should().BeFalse(); permanentSearch = (LogSourceSearchProxy)source.Search; permanentSearch.IsDisposed.Should().BeFalse(); permanentFindAllLogSource = (LogSourceProxy)source.FindAllLogSource; permanentFindAllLogSource.IsDisposed.Should().BeFalse(); permanentFindAllSearch = (LogSourceSearchProxy)source.FindAllSearch; permanentFindAllSearch.IsDisposed.Should().BeFalse(); } source.IsDisposed.Should().BeTrue(); permanentLogSource.IsDisposed.Should().BeTrue(); permanentSearch.IsDisposed.Should().BeTrue(); permanentFindAllLogSource.IsDisposed.Should().BeTrue(); permanentFindAllSearch.IsDisposed.Should().BeTrue(); }
public void TestDataSourceOrder() { var logFile2 = new InMemoryLogSource(); var source2 = new FileDataSource(_taskScheduler, new DataSource { Id = DataSourceId.CreateNew() }, logFile2, TimeSpan.Zero); var logFile1 = new InMemoryLogSource(); var source1 = new FileDataSource(_taskScheduler, new DataSource { Id = DataSourceId.CreateNew() }, logFile1, TimeSpan.Zero); var logFile3 = new InMemoryLogSource(); var source3 = new FileDataSource(_taskScheduler, new DataSource { Id = DataSourceId.CreateNew() }, logFile3, TimeSpan.Zero); _merged.SetDataSources(new [] { source1, source2, source3 }); var mergedLogFile = GetMergedLogFile(); mergedLogFile.Sources.Should().Equal(logFile1, logFile2, logFile3); }
public async Task <SpriteResourceContext> DeserializeSprite(string fileName, MonsterMetadata metadata) { var dataFile = new FileDataSource("monsterFile", fileName); var palEntries = metadata.ColorDepth == TileColorDepth.Bpp4 ? 16 : 8; var paletteSources = Enumerable.Range(0, palEntries) .Select(x => (IColorSource) new FileColorSource(new BitAddress(PaletteOffset + 16 * metadata.PaletteID + x * 2, 0), Endian.Little)) .ToList(); var pal = new Palette("monsterPalette", new ColorFactory(), ColorModel.Bgr15, paletteSources, true, PaletteStorageSource.Project); pal.DataSource = dataFile; Console.WriteLine(pal.GetNativeColor(0).ToString()); int arrangerWidth = metadata.TileSetSize == TileSetSize.Small ? 8 : 16; int arrangerHeight = metadata.TileSetSize == TileSetSize.Small ? 8 : 16; var formData = new byte[arrangerWidth * arrangerHeight / 8]; int formAddress = metadata.TileSetSize == TileSetSize.Small ? FormSmallOffset + 8 * metadata.FormID : FormLargeOffset + 32 * metadata.FormID; //dataFile.Stream.Seek(formAddress, SeekOrigin.Begin); //var length = await dataFile.Stream.ReadAsync(formData, 0, formData.Length); dataFile.Read(new BitAddress(formAddress, 0), formData.Length * 8, formData); if (metadata.TileSetSize == TileSetSize.Large) // Requires endian swapping the tile form { EndianSwapArray(formData); } var bitStream = BitStream.OpenRead(formData, formData.Length * 8); var arranger = ArrangerBuilder.WithTiledLayout() .WithArrangerElementSize(arrangerWidth, arrangerHeight) .WithElementPixelSize(8, 8) .WithPixelColorType(PixelColorType.Indexed) .WithName("monsterArranger") .AsScatteredArranger() .Build(); int elementsStored = 0; int tileOffset = TileSetOffset + 8 * metadata.TileSetID; int tileSize = metadata.ColorDepth == TileColorDepth.Bpp4 ? 32 : 24; for (int y = 0; y < arrangerHeight; y++) { for (int x = 0; x < arrangerWidth; x++) { if (bitStream.ReadBit() == 1) { IGraphicsCodec codec = metadata.ColorDepth == TileColorDepth.Bpp4 ? new Snes4bppCodec(8, 8) : new Snes3bppCodec(8, 8); var element = new ArrangerElement(x * 8, y * 8, dataFile, new BitAddress(tileOffset * 8), codec, pal); tileOffset += tileSize; arranger.SetElement(element, x, y); elementsStored++; } } } return(new SpriteResourceContext(dataFile, pal, arranger)); }
public void TestReadSingleLine() { _settings.IsSingleLine = true; using (var dataSource = new FileDataSource(_scheduler, _settings, _logSource, TimeSpan.Zero)) { _writer.WriteLine("2015-10-07 19:50:58,981 INFO Starting"); _writer.WriteLine("the application..."); _writer.Flush(); _scheduler.Run(3); dataSource.FilteredLogSource.GetProperty(Properties.LogEntryCount).Should().Be(2, "because two lines have been written to the file"); var t = new DateTime(2015, 10, 7, 19, 50, 58, 981); var line1 = dataSource.FilteredLogSource.GetEntry(0); line1.Index.Should().Be(0); line1.LogEntryIndex.Should().Be(0); line1.RawContent.Should().Be("2015-10-07 19:50:58,981 INFO Starting"); line1.LogLevel.Should().Be(LevelFlags.Info); line1.Timestamp.Should().Be(t); var line2 = dataSource.FilteredLogSource.GetEntry(1); line2.Index.Should().Be(1); line2.LogEntryIndex.Should().Be(1); line2.RawContent.Should().Be("the application..."); line2.LogLevel.Should().Be(LevelFlags.Other); line2.Timestamp.Should().Be(null); } }
public void TestSearch1() { var settings = new DataSource(AbstractTextLogSourceAcceptanceTest.File2Mb) { Id = DataSourceId.CreateNew() }; using (var logFile = Create(AbstractTextLogSourceAcceptanceTest.File2Mb)) using (var dataSource = new FileDataSource(_taskScheduler, settings, logFile, TimeSpan.Zero)) { var model = CreateFileViewModel(dataSource); logFile.Property(x => x.GetProperty(Properties.PercentageProcessed)).ShouldEventually().Be(Percentage.HundredPercent); model.Property(x => { x.Update(); return(x.TotalCount); }).ShouldEventually().Be(16114); //model.Update(); //model.TotalCount.Should().Be(16114); model.Search.Term = "RPC #12"; var search = dataSource.Search; search.Property(x => x.Count).ShouldEventually().Be(334); model.Update(); model.Search.ResultCount.Should().Be(334); model.Search.CurrentResultIndex.Should().Be(0); } }
public List <ISeries> GetSeriesList() { FileDataSource fileSource = Factory.CreateFileSource(); List <ISeries> allSeriesList = fileSource.GetAllSeries(); return(allSeriesList); }
private void SetupFileDataContextToReturnDataSource(FileDataSource source) { mockContext .Setup(context => context.GetDataSource <FileDataSource>(It.Is <Guid>(id => id == source.Id))) .Returns(source) .Verifiable(); }
static void Main(string[] args) { var pump = new FileDataSource(new StreamReader(@"data\TestData.txt")); var shifter = new CircularShifter(); var alphabetizer = new Alphabetizer(); #region Modifying the requirement - add a 'noise' list to remove words from the index //var noiseWords = new FileDataSource(new StreamReader(@"data\noise.txt")).Begin(); //var noiseRemover = new NoiseRemoval(noiseWords); //pump.Successor = noiseRemover; //noiseRemover.Successor = shifter; #endregion pump.Successor = shifter; shifter.Successor = alphabetizer; var pipeline = new Pipeline<string>(pump: pump, sink: new ConsoleWriter()); Console.WriteLine("Begin Execution At:{0}", DateTime.UtcNow); pipeline.Execute(); Console.WriteLine("Stop Execution At:{0}", DateTime.UtcNow); Console.WriteLine("Press any key to continue"); Console.ReadKey(); }
public async Task <IActionResult> EditFileDataSource([FromBody] FileDataSource item, int id) { if (item == null) { return(this.BadRequest()); } var fileDataSource = await this.unitOfWork.FileDataSourceRepository.GetSingleAsync(id, f => f.FileDataSourceFields); if (fileDataSource == null) { return(this.NotFound()); } fileDataSource.Name = item.Name; item.FileDataSourceFields?.ForEach(x => x.FileDataSource = fileDataSource); this.unitOfWork.FileDataSourceRepository.AddRemoveUpdateCollectionItems( fileDataSource.FileDataSourceFields, item.FileDataSourceFields, v => v.Id); this.unitOfWork.FileDataSourceRepository.Edit(fileDataSource); await this.unitOfWork.SaveAsync(); return(new NoContentResult()); }
public void TestSetDataSourcesOneLessSource() { var logFile1 = new InMemoryLogSource(); var source1 = new FileDataSource(_taskScheduler, new DataSource { Id = DataSourceId.CreateNew() }, logFile1, TimeSpan.Zero); var logFile2 = new InMemoryLogSource(); var source2 = new FileDataSource(_taskScheduler, new DataSource { Id = DataSourceId.CreateNew() }, logFile2, TimeSpan.Zero); _merged.SetDataSources(new [] { source1, source2 }); _merged.OriginalSources.Should().BeEquivalentTo(new object[] { source1, source2 }); source1.ParentId.Should().Be(_merged.Id); source2.ParentId.Should().Be(_merged.Id); _merged.SetDataSources(new [] { source2 }); _merged.OriginalSources.Should().BeEquivalentTo(new object[] { source2 }); source1.ParentId.Should().Be(DataSourceId.Empty); source2.ParentId.Should().Be(_merged.Id); }
private static void RunAlgoDemonstration() { var fileSource = new FileDataSource(_databasePath); var ruleSet = new RuleSet <string>(s => s.Split(','), new[] { (object)'?' }, new[] { _clusterColumnNumber }); var transactionStore = new TransactionStore <string>(fileSource, ruleSet); var clope = new clope.Clope(transactionStore, _repulsion); StartTimer(); transactionStore.InitStore(); StopTimerAndLogResult("Store initialization"); StartTimer(); var tableClusters = clope.Run(); StopTimerAndLogResult("Algorithm job"); ruleSet.Update(s => s.Split(','), new[] { (object)'?' }, new int[] { }); var display = new Display(0, transactionStore, tableClusters); display.Out(); if (_outputFileName == "") { return; } using (System.IO.StreamWriter file = new System.IO.StreamWriter(_outputFileName)) foreach (int num in tableClusters) { file.WriteLine(num.ToString()); } }
public void FillFromString(string str) { DataSource = new FileDataSource() { Path = str }; }
public void TestActivate1() { _quickFilters.AddQuickFilter(); var model = new QuickFiltersSidePanelViewModel(_settings, _quickFilters); model.QuickInfo.Should().BeNull(); var dataSource = new FileDataSource(_logSourceFactory, _scheduler, new DataSource("daw") { Id = DataSourceId.CreateNew() }); model.CurrentDataSource = CreateViewModel(dataSource); model.QuickFilters.ElementAt(0).IsActive = true; model.QuickInfo.Should().Be("1 active"); model.AddQuickFilter(); model.QuickFilters.ElementAt(1).IsActive = true; model.QuickInfo.Should().Be("2 active"); model.QuickFilters.ElementAt(0).IsActive = false; model.QuickFilters.ElementAt(1).IsActive = false; model.QuickInfo.Should().BeNull(); }
/// <summary>从文件创建实体</summary> /// <param name="fileName"></param> /// <param name="entryName"></param> /// <param name="stored"></param> /// <returns></returns> public static ZipEntry Create(String fileName, String entryName = null, Boolean?stored = false) { if (String.IsNullOrEmpty(entryName)) { if (String.IsNullOrEmpty(fileName)) { throw new ArgumentNullException("fileName"); } entryName = Path.GetFileName(fileName); } IDataSource ds = null; if (!entryName.EndsWith(ZipFile.DirSeparator)) { if (String.IsNullOrEmpty(fileName)) { throw new ArgumentNullException("fileName"); } ds = new FileDataSource(fileName); } var entry = Create(entryName, ds, stored); // 读取最后修改时间 if (entry.LastModified <= ZipFile.MinDateTime) { FileInfo fi = new FileInfo(fileName); entry.LastModified = fi.LastWriteTime; } return(entry); }
public void TestRemove2() { var filter1 = _quickFilters.AddQuickFilter(); var dataSource = new FileDataSource(_logSourceFactory, _scheduler, new DataSource("daw") { Id = DataSourceId.CreateNew() }); dataSource.ActivateQuickFilter(filter1.Id); var model = new QuickFiltersSidePanelViewModel(_settings, _quickFilters) { CurrentDataSource = CreateViewModel(dataSource) }; var filter1Model = model.QuickFilters.First(); filter1Model.IsActive = false; var changed = 0; model.OnFiltersChanged += () => ++ changed; filter1Model.RemoveCommand.Execute(null); model.QuickFilters.Should().BeEmpty("because we've just removed the only quick filter"); changed.Should().Be(0, "because removing an inactive quick-filter should never fire the OnFiltersChanged event"); }
public void Initialize() { _fileDataSource = TestHelpers.CreateFileDataSource <ParserContext10>("test-file-data.10.csv", false); _textDecoder = new TextDecoder { Pattern = @"*.", FailValidationResult = ValidationResultType.Critical }; _fileProcessorDefinition = new FileProcessorDefinition10 { HeaderRowProcessorDefinition = new RowProcessorDefinition { FieldProcessorDefinitions = new FieldProcessorDefinition[] { }, }, DataRowProcessorDefinition = new RowProcessorDefinition { FieldProcessorDefinitions = new FieldProcessorDefinition[] { new FieldProcessorDefinition { Decoder = _textDecoder, FieldName = "FieldA", Description = "Field A" }, new FieldProcessorDefinition { Decoder = _textDecoder, FieldName = "FieldB", Description = "Field B" }, new FieldProcessorDefinition { Decoder = _textDecoder, FieldName = "FieldC", Description = "Field C" } } }, TrailerRowProcessorDefinition = new RowProcessorDefinition { FieldProcessorDefinitions = new FieldProcessorDefinition[] { } } }; }
public FileDataSourceTest() { _mockFileDataParser = new Mock <IFileDataParser>(MockBehavior.Strict); _mockFileHelper = new Mock <IFileHelper>(MockBehavior.Strict); _fileConfig = new FileSourceConfig(); _fileDataSource = new FileDataSource(_mockFileDataParser.Object, _mockFileHelper.Object, Options.Create(_fileConfig)); }
public void ExtractCompletedMustUpdateStatusToPendingCalculation() { //Arrange var source = new FileDataSource { Id = newSourceId, InputStatus = SourceStatus.Extracting, UserName = "******" }; SetupFileDataContextToReturnDataSource(source); mockContext .Setup(context => context.GetUserProfile(It.Is <string>( s => s == "user name"))) .Returns(new UserProfile { Email = "test email" }) .Verifiable(); //Act CreateService().ExtractCompleted(newSourceId); //Assert mockContext.Verify(context => context.UpdateDataSource(It.Is <FileDataSource>( dataSource => (dataSource.Id == newSourceId) && (dataSource.InputStatus == SourceStatus.PendingCalculation))), Times.Once); mockContext.Verify(); }
public void Constructor_DataReaderAndFilePathIsProvided_FilePathPropertyIsSet() { var dataReader = MockRepository.GenerateMock <IDataReader>(); var filePath = @"C:\Temp\testfile.txt"; var fileDataSource = new FileDataSource(dataReader, filePath); Assert.AreEqual(filePath, fileDataSource.FilePath); }
public static DataFileModel MapToModel(this FileDataSource fileSource) { return(new DataFileModel() { Name = fileSource.Name, Location = fileSource.FileLocation }); }
private static void DecolatorDesignPattern() { IDataSource dataSource = new FileDataSource("data.sql"); IDataSource compressedDataSource = new ComporessionDecorator(dataSource); compressedDataSource.ReadData(); compressedDataSource.WriteData(new object()); }
public void SetUp() { _taskScheduler = new DefaultTaskScheduler(); _logSourceFactory = new SimplePluginLogSourceFactory(_taskScheduler); _settings = new DataSource(AbstractTextLogSourceAcceptanceTest.File20Mb) { Id = DataSourceId.CreateNew() }; _dataSource = new FileDataSource(_logSourceFactory, _taskScheduler, _settings, TimeSpan.FromMilliseconds(100)); }
static void Main(string[] args) { const int bufferSize = 32; var rawDataBuffer = new BlockingCollection<string>(bufferSize); var shiftedDataBuffer = new BlockingCollection<string>(bufferSize); var alphabetizedDataBuffer = new BlockingCollection<string>(bufferSize); var pump = new FileDataSource(rawDataBuffer , new StreamReader(@"data\TestData.txt")); var shifter = new CircularShifter(rawDataBuffer, shiftedDataBuffer); var alphabetizer = new Alphabetizer(shiftedDataBuffer, alphabetizedDataBuffer); var writer = new ConsoleWriter(alphabetizedDataBuffer); var pipeline = new Pipeline<string>(pump: pump, sink: writer, filters: new List<IAmAFilter<string>> {shifter, alphabetizer}); Console.WriteLine("Begin Execution At:{0}", DateTime.UtcNow); pipeline.Execute(); Console.WriteLine("Stop Execution At:{0}", DateTime.UtcNow); Console.WriteLine("Press any key to continue"); Console.ReadKey(); }
/// <summary>从文件创建实体</summary> /// <param name="fileName"></param> /// <param name="entryName"></param> /// <param name="stored"></param> /// <returns></returns> public static ZipEntry Create(String fileName, String entryName = null, Boolean? stored = false) { if (String.IsNullOrEmpty(entryName)) { if (String.IsNullOrEmpty(fileName)) throw new ArgumentNullException("fileName"); entryName = Path.GetFileName(fileName); } IDataSource ds = null; if (!entryName.EndsWith(ZipFile.DirSeparator)) { if (String.IsNullOrEmpty(fileName)) throw new ArgumentNullException("fileName"); ds = new FileDataSource(fileName); } var entry = Create(entryName, ds, stored); // 读取最后修改时间 if (entry.LastModified <= ZipFile.MinDateTime) { FileInfo fi = new FileInfo(fileName); entry.LastModified = fi.LastWriteTime; } return entry; }
/// <summary> /// Burns an existing ISO file /// </summary> /// <param name="iso_name">The iso's name</param> public static void burn(string iso_name) { bool burn = true; while (burn) { bool erase = false; // Check for initialization errors DiskOperationError status = Program.drive.Initialize(); if (status != null) { MessageBox.Show(status.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } /* if (Program.drive.CurrentProfile == Drive.SupportedProfiles.DVD_RW_RestrictedOverwrite || Program.drive.CurrentProfile == Drive.SupportedProfiles.DVD_RW_SequentialRecording || Program.drive.CurrentProfile == Drive.SupportedProfiles.DVD_PLUS_R || Program.drive.CurrentProfile == Drive.SupportedProfiles.DVD_PLUS_RW) {*/ DiscInformation info; // Read the disk information if (Program.device.ReadDiscInformation(out info) == Device.CommandStatus.Success) { // If the disk is empty if (info.DiscStatus != DiscInformation.DiscStatusType.EmptyDisc) { DialogResult res; res = MessageBox.Show("The disk in the drive is not erased. Do you wish to erase the disk?", "Erase", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (res == DialogResult.No) { return; } erase = true; } burn = false; // Erase if necessary if (erase) Program.drive.Erase(Drive.EraseType.Full, false); // Create the disk image to burn DiskBurnImage dsk = new DiskBurnImage(Program.logger); FileDataSource src = new FileDataSource(iso_name); TrackBurnImage t = new TrackBurnImage(TrackBurnImage.TrackType.Data_Mode_1, src); dsk.AddTrack(t); bool notburned = true; while (notburned) { // Burn the disk image onto the media in the drive status = Program.drive.BurnDisk(Drive.BurnType.DontCare, dsk, false, Program.burn_speed); // If we burned sucessfully, eject the disk if (status == null) { Program.device.StartStopUnit(false, Device.PowerControl.NoChange, Device.StartState.EjectDisc); MessageBox.Show("The process finished successfuly. You can play the movie in your DVD player now.", "Information", MessageBoxButtons.OK, MessageBoxIcon.Information); notburned = false; } else { string str = "The burn operation failed - " + status.Message; MessageBox.Show(str, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); DialogResult result = MessageBox.Show("Do you want to try burning the movie again?", "Question", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (result == DialogResult.No) notburned = false; } } } else { MessageBox.Show("Can't read information from the disk.", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); DialogResult result = MessageBox.Show("Do you want to try again?", "Question", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (result == DialogResult.No) burn = false; else Program.drive.Initialize(); } // } /* else { MessageBox.Show("The disk that's in the drive is not supported. Perhaps the disk isn't blank.", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); DialogResult result = MessageBox.Show("Do you want to try again with another disk? (if so replace it first)", "Question", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (result == DialogResult.No) burn = false; else Program.drive.Initialize(); }*/ } }