public DataFileExporter(SourceQueryParameters source, DataFileBase destination) { InitializeMembers(); this.source = source; this.destination = destination; }
public DataFileImporter(DataFileBase source, DestinationTableParameters destination) : base(destination) { InitializeMembers(); this.source = source; }
internal FileDataReader(DataFileBase parent) { InizializeMembers(); this.parent = parent; NextResult(); }
private void InizializeMembers() { this.parent = null; this.columnIndex = null; this.blockCounter = -1; this.rowCounter = -1; this.rowValues = null; this.isIdentity = null; }
private void CopyMembers(DataFileBlockBase old) { this.file = old.file; // Deep copy columns this.columns = new List <Column>(); foreach (var c in old.columns) { this.columns.Add((Column)c.Clone()); } }
public JobInstance ScheduleAsJob(Federation federation, TableOrView[] sources, string path, FileFormatDescription format, string queueName, string comments) { var job = GetInitializedJobInstance(queueName, comments); var settings = new ExportTablesJobSettings(job.JobDefinition.Settings); path = path ?? settings.OutputDirectory; path = Path.Combine(path, String.Format("{0}_{1}{2}", Context.UserName, job.JobID, Jhu.Graywulf.IO.Constants.FileExtensionZip)); var destinations = new DataFileBase[sources.Length]; for (int i = 0; i < sources.Length; i++) { var ff = FileFormatFactory.Create(federation.FileFormatFactory); var destination = ff.CreateFile(format); destination.Uri = Util.UriConverter.FromFilePath(String.Format("{0}{1}", sources[i].ObjectName, format.DefaultExtension)); // special initialization in case of a text file // TODO: move this somewhere else, maybe web? if (destination is TextDataFileBase) { var tf = (TextDataFileBase)destination; tf.Encoding = Encoding.ASCII; tf.Culture = System.Globalization.CultureInfo.InvariantCulture; tf.GenerateIdentityColumn = false; tf.ColumnNamesInFirstLine = true; } destinations[i] = destination; } var et = new ExportTables() { Sources = sources, Destinations = destinations, Archival = DataFileArchival.Zip, Uri = Util.UriConverter.FromFilePath(path), FileFormatFactoryType = federation.FileFormatFactory, StreamFactoryType = federation.StreamFactory, }; job.Parameters["Parameters"].Value = et; return job; }
private void CopyMembers(DataFileBase old) { this.baseStream = null; this.ownsBaseStream = false; this.fileMode = old.fileMode; this.uri = old.uri; this.generateIdentityColumn = old.generateIdentityColumn; // Deep copy of blocks this.blocks = new List <DataFileBlockBase>(); foreach (var b in old.blocks) { var nb = (DataFileBlockBase)b.Clone(); this.blocks.Add((DataFileBlockBase)b.Clone()); } this.blockCounter = -1; }
protected DataFileBase(DataFileBase old) { CopyMembers(old); }
public void Dispose() { this.parent = null; this.columnIndex = null; }
private void CopyMembers(DataFileBlockBase old) { this.file = old.file; // Deep copy columns this.columns = new List<Column>(); foreach (var c in old.columns) { this.columns.Add((Column)c.Clone()); } }
private void InitializeMembers(StreamingContext context) { //this.databaseInstanceName = String.Empty; //this.schemaName = null; //this.datasetName = null; //this.databaseName = null; //this.tableName = null; this.source = null; this.destination = null; }
private void InitializeMembers() { this.file = null; this.columns = new List <Column>(); }
private void InitializeMembers() { this.file = null; this.dataReader = null; }
private void CopyMembers(DataFileBlockBase old) { this.file = old.file; this.columns = new List<DataFileColumn>(old.columns); }
private void InitializeMembers() { this.source = null; }
public FileCommand(DataFileBase file) { InitializeMembers(); this.file = file; }
private void WriteTable(IDbCommand cmd, DataFileBase destination) { // Wrap command into a cancellable task var guid = Guid.NewGuid(); var ccmd = new CancelableDbCommand(cmd); RegisterCancelable(guid, ccmd); // Pass data reader to the file formatter ccmd.ExecuteReader(dr => { destination.WriteFromDataReader(dr); }); UnregisterCancelable(guid); }
protected void WriteTable(SourceTableQuery source, DataFileBase destination) { // Create command that reads the table using (var cmd = source.CreateCommand()) { using (var cn = source.OpenConnection()) { using (var tn = cn.BeginTransaction(IsolationLevel.ReadUncommitted)) { cmd.Connection = cn; cmd.Transaction = tn; cmd.CommandTimeout = Timeout; WriteTable(cmd, destination); } } } }
protected void ReadTable(DataFileBase source, DestinationTable destination) { // Import the file by wrapping it into a dummy command using (var cmd = new FileCommand(source)) { ImportTable(cmd, destination); } }
public DataFileBlockBase(DataFileBase file) { InitializeMembers(); this.file = file; }
private void InitializeMembers() { this.source = null; this.destination = null; }
private void CopyMembers(DataFileBase old) { this.baseStream = null; this.ownsBaseStream = false; this.fileMode = old.fileMode; this.uri = old.uri; this.generateIdentityColumn = old.generateIdentityColumn; // Deep copy of blocks this.blocks = new List<DataFileBlockBase>(); foreach (var b in old.blocks) { var nb = (DataFileBlockBase)b.Clone(); this.blocks.Add((DataFileBlockBase)b.Clone()); } this.blockCounter = -1; }
private void InitializeMembers() { this.file = null; this.columns = new List<DataFileColumn>(); }
private void CopyMembers(ExportTable old) { this.source = old.source; this.destination = old.destination; }