/// <summary> /// Commits data buffered during recording to disk. /// </summary> /// <param name="streamID">ID of the stream in the database to be written to.</param> /// <param name="indices">The indices of the new frames to be written.</param> /// <param name="indicesCount">The number of the frames in the indices array to be written.</param> /// <param name="buffer">The buffer where the data was buffered to.</param> static internal void SaveBufferAndIndices( int streamID, Index[] indices, int indicesCount, byte[] buffer ) { #region Save Buffer (saves the raw bytes into the RawStream table in the DB) int offset; try { SqlConnection conn = new SqlConnection(Constants.SQLConnectionString); SqlCommand cmd = new SqlCommand( "AppendBuffer ", conn); cmd.CommandType = CommandType.StoredProcedure; SqlParameter sqlStreamID = cmd.Parameters.Add( "@stream_id", SqlDbType.Int); sqlStreamID.Direction = ParameterDirection.Input; sqlStreamID.Value = streamID; SqlParameter dataParam = cmd.Parameters.Add("@chunk", SqlDbType.Image); dataParam.Direction = ParameterDirection.Input; dataParam.Value = buffer; dataParam.Size = indices[indicesCount-1].end + 1; SqlParameter offsetParam = cmd.Parameters.Add( "@offset", SqlDbType.Int); offsetParam.Direction = ParameterDirection.ReturnValue; conn.Open(); cmd.ExecuteNonQuery(); conn.Close(); offset = (int)offsetParam.Value; streamSizeDict[streamID] += (long)(indices[indicesCount - 1].end + 1); } catch( SqlException ex ) { eventLog.WriteEntry(string.Format(CultureInfo.CurrentCulture, Strings.DatabaseOperationFailedError, ex.ToString()), EventLogEntryType.Error, ArchiveServiceEventLog.ID.DBOpFailed ); throw; } #endregion #region Build the index inserts // lets do one big transaction to save on log writes etc. StringBuilder sb = new StringBuilder("BEGIN TRANSACTION" , 50 + 125*indicesCount); for ( int i = 0; i < indicesCount; i++) { sb.Append(" INSERT INTO Frame ( stream_id, raw_start, raw_end, frame_time ) VALUES ("); sb.Append( streamID); sb.Append(" ,"); sb.Append( indices[i].start + offset); sb.Append( ","); sb.Append( indices[i].end + offset); sb.Append( ","); sb.Append ( indices[i].timestamp); sb.Append( ") "); } sb.Append("COMMIT TRANSACTION"); string indexString = sb.ToString(); #endregion #region Save the index inserts int affected = ExecuteNonQuery( indexString ); if ( affected != indicesCount) { eventLog.WriteEntry(string.Format(CultureInfo.CurrentCulture, Strings.FailedToSaveAllIndices, indicesCount, affected), EventLogEntryType.Error, ArchiveServiceEventLog.ID.IndiciesFailedToSave); } #endregion }
public static void DirectWrite( FrameWithTicks frame, int streamID ) { Trace.WriteLine(String.Format("BufferRecorder::DirectWrite called with frame of size {0} bytes and steamID {1}.", frame.frame.Length, streamID)); Index ind = new Index(); ind.start = 0; ind.end = frame.frame.Length-1; ind.timestamp = frame.ticks; DBHelper.SaveBufferAndIndices(streamID, new Index[]{ind}, 1, (byte[])frame.frame); }
/// <summary> /// Loads the next block of indexes into an array of indices. /// </summary> internal static bool LoadIndices(Index[] indices, long startingTick, int streamID, int maxBytes, out int indexCount) { SqlConnection conn = new SqlConnection(Constants.SQLConnectionString); SqlCommand cmd = new SqlCommand("LoadIndices", conn); cmd.CommandType = CommandType.StoredProcedure; SqlParameter stream_id = cmd.Parameters.Add("@stream_id", SqlDbType.Int); stream_id.Direction = ParameterDirection.Input; stream_id.Value = streamID; SqlParameter starting_tick = cmd.Parameters.Add("@starting_tick", SqlDbType.BigInt); starting_tick.Direction = ParameterDirection.Input; starting_tick.Value = startingTick; SqlParameter ending_tick = cmd.Parameters.Add("@count", SqlDbType.BigInt); ending_tick.Direction = ParameterDirection.Input; ending_tick.Value = indices.Length; conn.Open(); SqlDataReader dr = cmd.ExecuteReader(CommandBehavior.SequentialAccess); int i = 0; int quitCase = 1; // keep up with _why_ we exit this loop... while (dr.Read()) { indices[i].start = dr.GetInt32(0); indices[i].end = dr.GetInt32(1); indices[i].timestamp = dr.GetInt64(2); // Check to make sure we have enough to fill our buffer if( (indices[i].end - indices[0].start + 1) >= maxBytes ) { quitCase = 0; break; } i++; // Make sure indices isn't full if( i >= indices.Length ) { quitCase = 2; break; } } // Close our connections dr.Close(); conn.Close(); bool streamOutOfData = false; indexCount = i; // Now we do the appropriate follow-up based why we exited the loop switch( quitCase ) { case 0: // enough data to fill buffer. (note that 'i' is one less than it should be only in this case) if( (indices[i].end - indices[0].start + 1) <= maxBytes ) indexCount = i + 1; // all the indices we parsed. // else the last frame won't fit Debug.Assert(indexCount > 0); // we can assert this because of the way maxBufferSize is calculated break; case 1: // we're out of data. Flag it. streamOutOfData = true; break; default: // we have enough indices to fill the array break; } return streamOutOfData; }
public static void DirectWrite( FrameWithTicks frame, int streamID ) { Index ind = new Index(); ind.start = 0; ind.end = frame.frame.Length-1; ind.timestamp = frame.ticks; DBHelper.SaveBufferAndIndices(streamID, new Index[]{ind}, 1, (byte[])frame.frame); }