public int Delete(IEnumerator <DbRow> rows) { int deleteCount = 0; while (rows.MoveNext()) { DbRow row = rows.Current; if (row == null) { continue; } long rowid = row.RowId; IDataFile df = GetDataFile(rowIndexKey); SortedIndex rowsIndex = new SortedIndex(df); if (rowsIndex.ContainsSortKey(rowid)) { // Remove the row from the main index, RemoveRowFromRowSet(rowid); // Remove the row from any indexes defined on the table, RemoveRowFromIndexSet(rowid); // Delete the row file IDataFile rowFile = GetDataFile(GetRowIdKey(rowid)); rowFile.Delete(); // Add this event to the transaction log, AddTransactionEvent("deleteRow", rowid); deleteCount++; } } ++currentVersion; return(deleteCount); }
public void Process(IList <string> filesIn, IList <string> filesOut, ContourFormat formatIn, ContourFormat formatOut, bool changeFileExtension) { ContourFileFactory factory = new ContourFileFactory(); IDataFile <Contour> dataFileIn = factory.GetFile(formatIn); IDataFile <Contour> dataFileOut = factory.GetFile(formatOut); int successCount = 0; for (int i = 0; i < filesIn.Count(); i++) { try { Logger.AddMessage(String.Format("Обрабатывается файл '{0}'. {1} из {2}", filesIn[i], i + 1, filesIn.Count)); Contour contour = dataFileIn.Read(filesIn[i]); if (changeFileExtension) { filesOut[i] = Path.ChangeExtension(filesOut[i], dataFileOut.DefaultExtension); } dataFileOut.Write(filesOut[i], contour); successCount++; Logger.AddMessage(String.Format("Файл успешно обработан.")); } catch (Exception e) { Logger.AddMessage(String.Format("Ошибка при обработке файла. {0}", e.Message)); } } Logger.AddEmptyLine(); Logger.AddMessage(String.Format("Успешно обработано файлов: {0} из {1}.", successCount, filesIn.Count)); }
public DbIndex GetIndex(string columnName) { CheckColumnNameValid(columnName); StringDictionary p = TableProperties; long columnId = p.GetValue(columnName + ".id", -1); if (columnId == -1) { throw new ApplicationException("Column '" + columnName + "' not found"); } if (!p.GetValue(columnName + ".index", false)) { throw new ApplicationException("Column '" + columnName + "' is not indexed"); } // Fetch the index object, IDataFile df = GetDataFile(GetIndexIdKey(columnId)); SortedIndex list = new SortedIndex(df); // And return it, IIndexedObjectComparer <string> comparer = GetIndexComparerFor(columnName, columnId); return(new DbIndex(this, currentVersion, comparer, columnId, list)); }
public void Update(DbRow row) { if (rowBufferId != 0) { throw new ApplicationException("State error: previous table operation not completed"); } // Check row is currently indexed, long rowid = row.RowId; IDataFile df = GetDataFile(rowIndexKey); SortedIndex rows = new SortedIndex(df); if (!rows.ContainsSortKey(rowid)) { throw new ApplicationException("Row being updated is not in the table"); } if (rowBuffer == null) { rowBuffer = new Dictionary <string, string>(); } rowBufferId = rowid; // Copy from the existing data in the row, string[] cols = ColumnNames; foreach (string col in cols) { string val = row.GetValue(col); if (val != null) { rowBuffer[col] = val; } } }
/// <summary> /// Constructs the string, wrapped around the given <see cref="IDataFile"/>. /// </summary> /// <param name="file">The data file that wraps this string data container.</param> public StringData(IDataFile file) { this.file = file; fileReader = new BinaryReader(new DataFileStream(file), Encoding.Unicode); fileWriter = new BinaryWriter(new DataFileStream(file), Encoding.Unicode); }
public DataFile(FileInfo file, IStartupLog startupLog) { _file = file; _startupLog = startupLog; startupLog.WriteLine("Opening existing data file " + file.FullName); var fileStream = file.Open(FileMode.Open, FileAccess.ReadWrite, FileShare.None); var buffer = new byte[4]; fileStream.Read(buffer, 0, 4); var version = BitConverter.ToUInt32(buffer, 0); startupLog.WriteLine("Data file " + file.FullName + " is version " + version); if (version == 1) { _versionDataFile = new DataFileV1(fileStream); } else { startupLog.WriteLine("Data file version " + version + " is not supported in this version of the software, please install the latest software", true); fileStream.Close(); throw new UnsupportedVersionException(version, 1, "Data file", file.FullName); } }
public DbRowCursor GetReverseCursor() { IDataFile df = GetDataFile(rowIndexKey); SortedIndex list = new SortedIndex(df); return(new DbRowCursor(this, currentVersion, new DbIndex.ReverseCursor(list.GetCursor()))); }
public Key AddItem(string name) { ++directoryVersion; StringDictionary pset = new StringDictionary(GetDataFile(propertySetKey)); // Assert the item isn't already stored, if (pset.GetValue <long>(name, -1) != -1) { throw new ApplicationException("Item already exists: " + name); } // Generate a unique identifier for the name, long id = GenerateId(); pset.SetValue(name, id); SortedIndex iset = new SortedIndex(GetDataFile(indexSetKey)); iset.Insert(name, id, collator); Key itemKey = GetItemKey(id); IDataFile df = GetDataFile(itemKey); try { BinaryWriter dout = new BinaryWriter(new DataFileStream(df), Encoding.Unicode); dout.Write(name); } catch (IOException e) { throw new ApplicationException(e.Message); } return(itemKey); }
public BinaryCollection(IDataFile file, IComparer <Binary> comparer) : this(file, comparer, null, null) { version = 0; root = this; rootStateDirty = true; }
private static DataSet CreateDataSet() { IDataFile dataFile = LoadData(); var trainingTestSplitRatio = GetDataSplit(); return(new DataSet(dataFile, trainingTestSplitRatio, Args.Contains(FlagDumpData))); }
public void Do_actions_which_require_third_party_data_access( IDataFile <CredCard2Record> third_party_file, IDataFile <CredCard2InOutRecord> owned_file, ISpreadsheet spreadsheet, IInputOutput input_output) { }
public void Do_actions_which_require_third_party_data_access( IDataFile <ActualBankRecord> third_party_file, IDataFile <BankRecord> owned_file, ISpreadsheet spreadsheet, IInputOutput input_output) { }
public static WonderCharacter Character(string name, string gender, IDataFile backingStore) { WonderCharacter testSubject = DefaultCharacter(backingStore); testSubject.Name = name; testSubject.Gender = gender; return testSubject; }
public Key RemoveItem(String name) { ++directoryVersion; StringDictionary pset = new StringDictionary(GetDataFile(propertySetKey)); long id = pset.GetValue <long>(name, -1); // Assert the item is stored, if (id == -1) { throw new ApplicationException("Item not found: " + name); } pset.SetValue(name, null); SortedIndex iset = new SortedIndex(GetDataFile(indexSetKey)); iset.Remove(name, id, collator); // Delete the associated datafile Key k = GetItemKey(id); IDataFile df = GetDataFile(k); df.Delete(); return(k); }
/// <summary> /// Get spreadsheet Information from the imported file. /// </summary> /// <param name="fileUploadId"> /// File upload Id. /// </param> /// <param name="fileFormat"> /// Imported file fileFormat ( Excel or CSV) /// </param> /// <returns>Spreadsheet info /// </returns> public SpreadsheetInfo GetSpreadsheetInfo(string fileUploadId, ImportFormat fileFormat) { // Get service IDataFileReaderService service = _readerActivator(fileFormat); // Load info about sheets IReadOnlyList <SheetInfo> sheets; using (Stream stream = FileRepositoryHelper.GetTemporaryFileDataStream(fileUploadId)) { // Settings DataFileReaderSettings settings = new DataFileReaderSettings { ImportFormat = fileFormat }; IDataFile dataFile = service.OpenDataFile(stream, settings); sheets = dataFile.GetSheets( ); } var spreadsheetInfo = new SpreadsheetInfo { ImportFileFormat = fileFormat, SheetCollection = sheets }; return(spreadsheetInfo); }
public IDataFile GetItemDataFile(string name) { StringDictionary pset = new StringDictionary(GetDataFile(propertySetKey)); long id = pset.GetValue <long>(name, -1); // Assert the item is stored, if (id == -1) { throw new ApplicationException("Item not found: " + name); } Key k = GetItemKey(id); IDataFile df = GetDataFile(k); // Find out how large the header is, without actually reading it. This is // an optimization to improve queries that want to only find the size of // the file without touching the data. int headerSize; try { MemoryStream stream = new MemoryStream(64); BinaryWriter writer = new BinaryWriter(stream, Encoding.Unicode); writer.Write(name); writer.Flush(); headerSize = (int)stream.Length; writer.Close(); } catch (IOException e) { throw new ApplicationException(e.Message, e); } df.Position = headerSize; return(new SubDataFile(df, headerSize)); }
public void CopyTo(String name, Directory destination) { ++destination.directoryVersion; StringDictionary pset = new StringDictionary(GetDataFile(propertySetKey)); long id = pset.GetValue <long>(name, -1); // Assert the item is stored, if (id == -1) { throw new ApplicationException("Item not found: " + name); } // Get the source data file item, Key sourceK = GetItemKey(id); IDataFile sourceDf = GetDataFile(sourceK); // Get the item from the destination. Throw an error if the item not // already found in the destination file set. Key destK = destination.GetItem(name); if (destK == null) { throw new ApplicationException("Item not in destination: " + name); } IDataFile destinationDf = destination.GetDataFile(destK); // Copy the data, sourceDf.ReplicateTo(destinationDf); }
public void FixtureSetup() { _source = new FakeDataSource(); _query = from c in _source.AsQueryable<FakeData>() where c.Mea1 == 5 select c; }
protected Character FinishCreate(IDataFile characterData) { var serializer = new CharSerializer <TPersistableData>(this, characterData); PropertyChanged += this.ForProperty(() => PersistableData, serializer.UpdateFile); return(this); }
public void Setup() { _startUpLog = SetupMock <IStartupLog>(); _errorLog = SetupMock <IErrorLog>(); _pagePoolFactory = SetupMock <IPagePoolFactory>(); _pagePool = _pagePoolFactory.Create(_pageSize); _dataFileInfo = new FileInfo("C:\\temp\\test.mdf"); _dataFile = new DataFile(_dataFileInfo, _pageSize, _startUpLog); _logFileInfo = new FileInfo("C:\\temp\\test.ldf"); _logFile = new LogFile(_logFileInfo, true, _startUpLog); _fileSet = new FileSet( new[] { _dataFile }, new[] { _logFile }, _pagePoolFactory, _startUpLog); var databaseFactory = SetupMock <IDatabaseFactory>(); _database = databaseFactory.Open(null); var pageCache = new PageCache(_fileSet, _database, _pagePoolFactory, _startUpLog, _errorLog); _pageStore = new PageStore(pageCache, _startUpLog); _accessorFactory = new AccessorFactory(); _accessor = _accessorFactory.SmallSequentialAccessor(_pageStore); }
public long [] Get(long blockId) { long p = Search(new Record(blockId, 0)); if (p < 0) { p = -(p + 1); } List <long> serverIdList = new List <long>(); IDataFile dfile = DataFile; long size = dfile.Length; long pos = p * RecordSize; dfile.Position = pos; while (pos < size) { long readBlockId = Input.ReadInt64(); long readServerId = Input.ReadInt64(); if (readBlockId != blockId) { break; } serverIdList.Add(readServerId); pos += RecordSize; } return(serverIdList.ToArray()); }
internal TextReader GetLogReader() { IDataFile transactionLog = transaction.GetFile(TransactionLogKey, FileAccess.Read); StringData logFile = new StringData(transactionLog); return(new StringDataReader(logFile)); }
public StringCollection(IDataFile file, IComparer <string> comparer) : this(file, comparer, null, null) { version = 0; root = this; rootStateDirty = true; }
/// <summary> /// Unzip a zip file then use the inner reader to process it. /// </summary> /// <param name="zipStream">The zip file</param> /// <param name="settings">Settings for the inner provider.</param> /// <returns>Wrapped data file that can dispose both.</returns> public IDataFile OpenDataFile(Stream zipStream, DataFileReaderSettings settings) { if (zipStream == null) { throw new ArgumentNullException(nameof(zipStream)); } if (settings == null) { throw new ArgumentNullException(nameof(settings)); } if (!zipStream.CanSeek) { throw new InvalidOperationException("Stream must be seekable"); } ZipArchive archive = new ZipArchive(zipStream); if (archive.Entries.Count != 1) { throw new FileFormatException("Zip file must contain exactly one data."); } Stream dataStream = archive.Entries[0].Open( ); IDataFile dataFile = InnerReaderService.OpenDataFile(dataStream, settings); // Wrap so both get disposed ZipDataFile zipDataFile = new ZipDataFile(dataFile, zipStream); return(zipDataFile); }
public void RemoveEmpties() { AddEmpties(); IDataFile df = Transaction.GetFile(new Key(0, 0, 1), FileAccess.ReadWrite); BinaryCollection collection = new BinaryCollection(df); for (int i = 0; i < 500; i += 2) { byte[] data = new byte[i]; Assert.IsTrue(collection.Remove(new Binary(data))); } for (int i = 499; i > 0; i -= 2) { byte[] data = new byte[i]; Assert.IsTrue(collection.Remove(new Binary(data))); } // Check it's empty, Assert.IsTrue(collection.IsEmpty); // Check there's nothing to iterate, foreach (Binary arr in collection) { Assert.Fail("Erroneous elements found in set."); } }
private static Character _ParseByJustCreatingCharWithFilenameAsName([NotNull] IDataFile data) { return(new Character <GameSystem>(new _SimplisticGameSystem()) { Name = data.Location.Name }); }
internal string GetValue(long rowid, long columnid) { Key rowKey = GetRowIdKey(rowid); IDataFile rowFile = GetDataFile(rowKey); RowBuilder rowBuilder = new RowBuilder(rowFile); return(rowBuilder.GetValue(columnid)); }
private static void CopyFile(IDataFile s, IDataFile d) { // d.Delete(); // s.Position = 0; // d.Position = 0; // s.CopyTo(d, s.Length); s.ReplicateTo(d); }
public void ReplicateTo(IDataFile destFile) { // TODO: Placeholder implementation, destFile.Position = 0; destFile.Delete(); Position = 0; CopyTo(destFile, Length); }
public static void SaveFile(IDataFile dataFile) { if (dataFile.File.Exists) { dataFile.File.Delete(); } dataFile.File.Create(); }
// constructors /// <summary> /// Initializes a new member of the SelectQuery class. /// </summary> /// <param name="source">The data source being queried.</param> /// <param name="sourceType">The source type.</param> public SelectQuery(IDataFile source, Type sourceType) : base(source, sourceType) { _commandStack = new Stack(); _columns = new List<ColumnDeclaration>(); _sources = new List<SourceExpression>(); _joins = new List<JoinExpression>(); }
public static WonderCharacter Character(string name, string gender, IDataFile backingStore) { WonderCharacter testSubject = DefaultCharacter(backingStore); testSubject.Name = name; testSubject.Gender = gender; return(testSubject); }
/// <summary> /// Constructs an instance of the index, wrapped around the given /// <see cref="IDataFile"/>. /// </summary> /// <param name="file">The underlying <see cref="IDataFile"/> where the /// data of the index will be reflected.</param> /// <param name="readOnly">Indicates whether any change to the index /// is it possible (if <b>false</b>) or if is it a read-only instance /// (if <b>true</b>).</param> public SortedIndex(IDataFile file, bool readOnly) { this.file = file; this.readOnly = readOnly; fileReader = new BinaryReader(new DataFileStream(file)); fileWriter = new BinaryWriter(new DataFileStream(file)); }
public bool? OpenFileDialog() { OpenFileDialog d = new OpenFileDialog(); bool? result = d.ShowDialog(); if (result==true) { _dataFile = _factory.CreateDataFile(d.FileName); } return result; }
public void CopyTo(IDataFile destFile, long size) { transaction.CheckValid(); if (destFile is DbFile) { DbFile targetFile = (DbFile) destFile; parent.CopyTo(targetFile.parent, size); targetFile.LogChange(); } else { parent.CopyTo(destFile, size); } }
/* Load new data file into the model. */ internal void Load(IDataFile dataFile) { //set the new data file _dataFile = dataFile; //raise mode events this.RaisePropertyChanged("WindowTitle"); // abort old thread if alive if ((_discoveryThread != null) && (_discoveryThread.IsAlive)) { _discoveryThread.Abort(); _discoveryThread.Join(); } //launch the new discovery thread if (_dataFile != null) { _discoveryThread = new Thread(new ThreadStart(DiscoverThreadProc)); _discoveryThread.Start(); } }
public override Character Parse(IDataFile characterData) { throw new NotImplementedException(); }
public static WonderCharacter DefaultCharacter(IDataFile backingStore) { return WonderCharacter.Create(new Model.SenseOfWonder(), backingStore); }
public static WonderRulesCharacter EmptyRulesetCharacter(IDataFile backingStore) { return WonderRulesCharacter.Create(new RulesEditingSystem(), backingStore); }
public override Character Parse(IDataFile characterData) { return WonderCharacter.Load(this, characterData); }
public override Character CreateIn(IDataFile characterData) { return WonderCharacter.Create(this, characterData); }
public UidList(IDataFile data) : base(data, 16) { }
public void ReplicateTo(IDataFile destFile) { transaction.CheckValid(); if (destFile is DbFile) { DbFile targetFile = (DbFile) destFile; parent.ReplicateTo(targetFile.parent); targetFile.LogChange(); } else { parent.ReplicateTo(destFile); } }
public BlockServerTable(IDataFile data) : base(data, 16) { }
public void CopyFrom(IDataFile sourceFile, long size) { throw new NotImplementedException(); }
internal DbFile(DbTransaction transaction, string fileName, IDataFile parent) { this.transaction = transaction; this.fileName = fileName; this.parent = parent; }
public BlockIdUidList(IDataFile data) : base(data, 32) { }
// constructors /// <summary> /// Initializes a new member of the TranslatedQuery class. /// </summary> /// <param name="source">The data _source being queried.</param> /// <param name="sourceType">The _source type being queried.</param> protected TranslatedQuery(IDataFile source, Type sourceType) { _source = source; _sourceType = sourceType; }
public void ReplicateFrom(IDataFile sourceFile) { throw new NotImplementedException(); }
public void Setup() { _source = MockRepository.GenerateStub<IDataFile>(); _source.Stub(s => s.Name).Return(SOURCE_NAME); _source.Stub(s => s.AbsolutePath).Return(PATH_NAME); _fakeData = new Query<FakeData>(new QueryProvider(_source)); }
public virtual void Setup() { Source = MockRepository.GenerateStub<IDataFile>(); Source.Stub(s => s.Name).Return(SOURCE_NAME); Source.Stub(s => s.AbsolutePath).Return(PATH_NAME); }
public PathRootTable(IDataFile data) { dictionary = new StringDictionary(data); }
public override Character CreateIn(IDataFile characterData) { return new DescribeAGameSystem.SillyCharacter(characterData, this); }
private static void ByteBufferCopyTo(IDataFile source, IDataFile target, long size) { long pos = target.Position; // Make room to insert the data target.Shift(size); target.Position = pos; // Set a 1k buffer byte[] buf = new byte[1024]; // While there is data to copy, while (size > 0) { // Read an amount of data from the source int toRead = (int) Math.Min(buf.Length, size); // Read it into the buffer source.Read(buf, 0, toRead); // Write from the buffer out to the target target.Write(buf, 0, toRead); // Update the ref size = size - toRead; } }
public void CopyTo(IDataFile destFile, long size) { transaction.CheckErrorState(); try { // The actual amount of data to really copy size = Math.Min(Length - Position, size); // Return if we aren't doing anything if (size <= 0) { return; } // If the target isn't a TranDataFile then use standard byte buffer copy. if (!(destFile is DataFile)) { ByteBufferCopyTo(this, destFile, size); return; } // If the tree systems are different, then byte buffer copy. DataFile target = (DataFile)destFile; if (TreeSystem != target.TreeSystem) { ByteBufferCopyTo(this, destFile, size); return; } // Fail condition (same key and same transaction), if (target.key.Equals(key) && target.Transaction == Transaction) { throw new ArgumentException("Can not use 'copyTo' to copy data within a file"); } // initWrite on this and target. The reason we do this is because we // may change the root node on either source or target. We need to // initWrite on this object even though the data may not change, // because we may be writing out data from the heap as part of the // copy operation and the root node may change InitWrite(); target.InitWrite(); // Make sure internal vars are setup correctly EnsureCorrectBounds(); target.EnsureCorrectBounds(); // Remember the source and target positions long initSpos = Position; long initTpos = target.Position; // Ok, the target shares the same tree system, therefore we may be able // to optimize the copy. CopyDataTo(start + Position,target, target.start + target.Position, size); // Update the positions Position = initSpos + size; target.Position = initTpos + size; // Reset version to force a bound update version = -1; target.version = -1; target.UpdateLowestSizeChangedKey(); target.Transaction.FlushCache(); } catch (IOException e) { throw transaction.HandleIOException(e); } catch (OutOfMemoryException e) { throw transaction.HandleMemoryException(e); } }
public ReservedSlotsDataFile(IDataFile dataFile) { this.DataFile = dataFile; }