protected T GetRecord(OdbcCommand command) { T record = null; command.Connection = ConnectionInstance.Instance.GetConnection(_connectionString); command.Connection.Open(); try { //var reader = command.ExecuteReader(); dynamic reader = new DynamicDataReader(command.ExecuteReader()); try { while (reader.Read()) { record = PopulateRecord(reader); break; } } finally { // Always call Close when done reading. reader.Close(); } } finally { ConnectionInstance.CloseConnection(_connectionString); } return(record); }
protected IEnumerable <T> ExecuteStoredProc(OdbcCommand command) { var list = new List <T>(); command.Connection = ConnectionInstance.Instance.GetConnection(_connectionString); command.CommandType = CommandType.StoredProcedure; command.Connection.Open(); try { //var reader = command.ExecuteReader(); dynamic reader = new DynamicDataReader(command.ExecuteReader()); try { while (reader.Read()) { var record = PopulateRecord(reader); if (record != null) { list.Add(record); } } } finally { // Always call Close when done reading. reader.Close(); } } finally { ConnectionInstance.CloseConnection(_connectionString); } return(list); }
private static async Task RunCsvDatasetDemoAsync(SqlConnection connection, int count, CancellationToken cancellationToken) { using (var csvReader = new StreamReader(@"Data\CsvData.csv")) { var csvData = CsvParser.ParseHeadAndTail(csvReader, ',', '"'); var csvHeader = csvData.Item1 .Select((x, i) => new { Index = i, Field = x }) .ToDictionary(x => x.Field, x => x.Index); var csvLines = csvData.Item2; var fields = await new TableSchemaProvider(connection, "CsvData").GetFieldsAsync(); using (var bulkCopy = new SqlBulkCopy(connection)) { bulkCopy.DestinationTableName = "CsvData"; bulkCopy.BatchSize = 1000; bulkCopy.BulkCopyTimeout = (int)TimeSpan.FromMinutes(10).TotalSeconds; foreach (var field in fields) { bulkCopy.ColumnMappings.Add(field.FieldName, field.FieldName); } using (var reader = new DynamicDataReader <IList <string> >(fields, csvLines.Take(count), (x, k) => x.GetValueOrDefault(csvHeader.GetValueOrDefault(k, -1)))) { await bulkCopy.WriteToServerAsync(reader, cancellationToken); } } } }
protected IEnumerable <T> GetRecords(OleDbCommand command) { var list = new List <T>(); command.Connection = new OleDbConnectionInstance(_connectionString).GetConnection(); command.Connection.Open(); try { dynamic reader = new DynamicDataReader(command.ExecuteReader()); //var reader = command.ExecuteReader(); try { while (reader.Read()) { list.Add(PopulateRecord(reader)); } } finally { // Always call Close when done reading. reader.Close(); } } finally { command.Connection.Close(); } return(list); }
/// <summary> /// /// </summary> private void InitTopics() { Topic pre; DynamicDataReader reader = new DynamicDataReader(); var result = reader.Query("Select * FROM [UserCenter].[dbo].[Members]"); var list = new List <Topic>(); foreach (Object record in result) { Topic temp = new Topic(); temp.Name = record.GetHashCode().ToString(); _topicTree.Insert(temp, DuplicatePolicy.ReplaceFirst, out pre); } }
protected IEnumerable <T> GetRecords(OdbcCommand command, OdbcConnection connection = null) { var list = new List <T>(); if (connection == null) { command.Connection = ConnectionInstance.Instance.GetConnection(_connectionString); command.Connection.Open(); } else { command.Connection = connection; } try { dynamic reader = new DynamicDataReader(command.ExecuteReader()); //var reader = command.ExecuteReader(); try { while (reader.Read()) { list.Add(PopulateRecord(reader)); } } finally { // Always call Close when done reading. // using connection string - means we opened the connection, so we should close id reader.Close(); } } finally { if (connection == null) { ConnectionInstance.CloseConnection(_connectionString); } } return(list); }
public Alumno GetById(int id) { Alumno alumno = null; try { using (var connection = dataBase.CreateOpenConnection()) { using (var command = dataBase.CreateStoredProcCommand(@"usp_Alumno_get", connection)) { IDataParameter param = command.CreateParameter(); param.ParameterName = "@idAlumno"; param.Value = id; param.DbType = DbType.Int32; command.Parameters.Add(param); using (var reader = command.ExecuteReader()) { dynamic rd = new DynamicDataReader(reader); if (rd.Read()) { alumno = new Alumno() { IdAlumno = rd.IdAlumno, Nombre = rd.Nombre, Apellido = rd.Apellido, Dni = rd.Dni, NombreCurso = rd.Descripcion, IdCurso = rd.IdCurso, Edad = rd.Edad, Promedio = rd.Promedio }; } } } } } catch (Exception e) { throw new Exception(e.Message); } return(alumno); }
public void NewParametersxecuteDynamicTest() { using (var data = new SqlDataAccess(STR_ConnectionString)) { var swatch = Stopwatch.StartNew(); var reader = data.ExecuteReader("select * from ApplicationLog where entered > @0 and entered < @1 order by Entered", DateTime.Now.AddYears(-115), DateTime.Now.AddYears(-1)); dynamic dreader = new DynamicDataReader(reader); Assert.IsNotNull(reader, data.ErrorMessage); int readerCount = 0; while (reader.Read()) { DateTime date = (DateTime)dreader.Entered; // reader.Entered; Console.WriteLine(date); readerCount++; } swatch.Stop(); Console.WriteLine(readerCount); Console.WriteLine(swatch.ElapsedMilliseconds + "ms"); } }
public long Last() { long value = 0; WithConnection( c => { using (var reader = new SelectLast().ExecuteReader(c)) { if (reader.Read()) { dynamic r = new DynamicDataReader(reader); value = r.ms; } } } ); return(value); }
private static async Task RunDynamicDatasetDemoAsync(SqlConnection connection, int count, CancellationToken cancellationToken) { var fields = await new TableSchemaProvider(connection, "DynamicData").GetFieldsAsync(); using (var bulkCopy = new SqlBulkCopy(connection)) { bulkCopy.DestinationTableName = "DynamicData"; bulkCopy.BatchSize = 1000; bulkCopy.BulkCopyTimeout = (int)TimeSpan.FromMinutes(10).TotalSeconds; foreach (var field in fields) { bulkCopy.ColumnMappings.Add(field.FieldName, field.FieldName); } var data = new RandomDataGenerator().GetDynamicData(count); using (var reader = new DynamicDataReader <IDictionary <string, object> >(fields, data, (x, k) => x.GetValueOrDefault(k))) await bulkCopy.WriteToServerAsync(reader, cancellationToken); } }
private SyncSpiderProgress ReadSpiderProgress(DynamicDataReader reader) { dynamic ddr = reader; SyncSpiderProgress spiderProgress = null; if (!reader.Read()) { return(null); } var hasRepoList = ddr.HasRepoMetadata; if (!reader.NextResult()) { return(null); } var repoSpiderProgress = new List <(bool SyncStarted, int MaxIssueNumber, int IssueCount)>(); while (reader.Read()) { bool hasIssueMetadata = ddr.HasIssueMetadata; bool issuesFullyImported = ddr.IssuesFullyImported; int maxNumber = ddr.MaxNumber ?? 0; int issueCount = ddr.IssueCount ?? 0; if (issuesFullyImported) { // realartists/shiphub-server#460 Handle spider progress for repos with gaps in issue numbering // workaround gaps in issues numbers that can occur in some rare scenarios maxNumber = issueCount; } repoSpiderProgress.Add((hasIssueMetadata || maxNumber != 0, maxNumber, issueCount)); } var hasReposThatHaveNeverFetchedIssues = repoSpiderProgress.Exists(t => !t.SyncStarted); if (!hasRepoList || hasReposThatHaveNeverFetchedIssues) { spiderProgress = new SyncSpiderProgress() { Summary = "Fetching Repo List", Progress = -1.0 }; } else { var(expected, loaded) = repoSpiderProgress.Aggregate((expected: 0, loaded: 0), (accum, t) => { return(expected: t.MaxIssueNumber + accum.expected, loaded: t.IssueCount + accum.loaded); }); if (loaded >= expected) { spiderProgress = new SyncSpiderProgress() { Summary = "Issues Fully Fetched", Progress = 1.0 }; } else { spiderProgress = new SyncSpiderProgress() { Summary = "Fetching Issues", Progress = loaded / (double)expected }; } } return(spiderProgress); }