コード例 #1
0
ファイル: AviFile.cs プロジェクト: thaolt/screenrecorder
 public void Close()
 {
     if (this.pAviCompressedStream != IntPtr.Zero)
     {
         Avi32Interop.AVIStreamRelease(this.pAviCompressedStream);
         this.pAviCompressedStream = IntPtr.Zero;
     }
     if (this.pVideoStream != IntPtr.Zero)
     {
         Avi32Interop.AVIStreamRelease(this.pVideoStream);
         this.pVideoStream = IntPtr.Zero;
     }
     if (this.pAudioStream != IntPtr.Zero)
     {
         Avi32Interop.AVIStreamRelease(this.pAudioStream);
         this.pAudioStream = IntPtr.Zero;
     }
     if (this.audioEncoder != null)
     {
         this.audioEncoder.Close();
         this.audioEncoder = null;
     }
     if (this.pAviFile != IntPtr.Zero)
     {
         Avi32Interop.AVIFileRelease(this.pAviFile);
         this.pAviFile = IntPtr.Zero;
     }
     this.opened = false;
 }
コード例 #2
0
        public AcmEncoder GetEncoder()
        {
            bool        encoderNeeded;
            SoundFormat inputFormat = GetInputFormat(out encoderNeeded);

            if (!encoderNeeded)
            {
                return(null);
            }
            AcmEncoder encoder = new AcmEncoder();

            encoder.InputFormat  = inputFormat;
            encoder.OutputFormat = this.format;
            return(encoder);
        }
コード例 #3
0
ファイル: AviFile.cs プロジェクト: thaolt/screenrecorder
        public void Open(string fileName, DisplayFormat videoFormat, int fps, VideoCompressor compressor,
                         SoundFormat audioFormat, AcmEncoder audioEncoder)
        {
            if (this.opened)
            {
                throw new InvalidOperationException();
            }
            if (string.IsNullOrEmpty(fileName))
            {
                throw new ArgumentNullException("fileName");
            }
            this.video = videoFormat != null;
            this.audio = audioFormat != null;
            if (!this.audio && !this.video)
            {
                // There is nothing to do!
                throw new InvalidOperationException();
            }
            // Open AVI File
            int hr = Avi32Interop.AVIFileOpen(out this.pAviFile, fileName, Avi32Interop.OF_CREATE, IntPtr.Zero);

            if (hr != 0)
            {
                throw new AviException("AVIFileOpen", hr);
            }
            try {
                if (this.video)
                {
                    this.SetupVideo(videoFormat, compressor, fps);
                }
                if (this.audio)
                {
                    this.SetupAudio(audioFormat, audioEncoder);
                }
                this.opened = true;
            }
            finally {
                if (!this.opened)
                {
                    this.Close();
                }
            }
        }
コード例 #4
0
        public static SoundFormat[] GetFormats(string deviceId, bool queryEncoderFormats)
        {
            // Get device by id
            if (string.IsNullOrEmpty(deviceId))
            {
                throw new ArgumentNullException("deviceId");
            }
            if (!queryEncoderFormats)
            {
                return(sharedWrapper.GetDeviceFormats(deviceId));
            }
            // Get device formats
            SoundFormat[] deviceFormats = sharedWrapper.GetDeviceFormats(deviceId);
            // Get list of input formats that has not been quieried yet
            List <SoundFormat> newInputFormats = new List <SoundFormat>();

            foreach (SoundFormat deviceFormat in deviceFormats)
            {
                // Any convertion exists?
                if (!convertionMap.Contains(deviceFormat) && deviceFormat.Tag == preferredFormat.Tag)
                {
                    newInputFormats.Add(deviceFormat);
                }
            }
            // Is there any new format to query
            if (newInputFormats.Count > 0)
            {
                var encoderConvertionMap = AcmEncoder.GetConvertionMap(newInputFormats.ToArray(), preferredFormat.Tag);
                // Add new map to the current map
                convertionMap.Add(encoderConvertionMap);
                // Add no convertion -input format as output format- to the map
                foreach (SoundFormat newInputFormat in newInputFormats)
                {
                    convertionMap.Add(newInputFormat, newInputFormat);
                }
            }
            // Get all of the output formats matching device formats as input
            return(convertionMap.GetOutputs(deviceFormats));
        }
コード例 #5
0
ファイル: AviFile.cs プロジェクト: thaolt/screenrecorder
        private void SetupAudio(SoundFormat audioFormat, AcmEncoder audioEncoder)
        {
            IntPtr pwfx = audioFormat.ToPtr();

            try {
                Avi32Interop.AVISTREAMINFO asi = new Avi32Interop.AVISTREAMINFO();
                asi.fccType               = Avi32Interop.streamtypeAUDIO;
                asi.dwScale               = audioFormat.BlockAlign;
                asi.dwRate                = audioFormat.AverageBytesPerSecond;
                asi.dwStart               = 0;
                asi.dwLength              = -1;
                asi.dwInitialFrames       = 0;
                asi.dwSuggestedBufferSize = 0;
                asi.dwQuality             = -1;
                asi.dwSampleSize          = audioFormat.BlockAlign;
                int hr = Avi32Interop.AVIFileCreateStream(this.pAviFile, out this.pAudioStream, ref asi);
                if (hr != 0)
                {
                    throw new AviException("AVIStreamSetFormat", hr);
                }
                hr = Avi32Interop.AVIStreamSetFormat(this.pAudioStream, 0, pwfx, audioFormat.ToalSize);
                if (hr != 0)
                {
                    throw new AviException("AVIStreamSetFormat", hr);
                }
                if (audioEncoder != null)
                {
                    audioEncoder.Open();
                }
                this.audioFormat  = audioFormat;
                this.audioEncoder = audioEncoder;
            }
            finally {
                Marshal.FreeHGlobal(pwfx);
            }
        }
コード例 #6
0
        private SoundFormat GetInputFormat(out bool encoderNeeded)
        {
            int nMaxAvgBytesPerSec = 0;

            // Get device formats
            SoundFormat[]      deviceFormats    = this.wrapper.GetDeviceFormats(deviceId);
            List <SoundFormat> deviceFormatList = new List <SoundFormat>(deviceFormats);
            SoundFormat        inputFormat      = null;

            if (this.format == null)
            {
                // If format is not specified, find the format with maximum average bytes per second
                foreach (SoundFormat deviceFormat in deviceFormatList)
                {
                    if (inputFormat == null || nMaxAvgBytesPerSec < deviceFormat.AverageBytesPerSecond)
                    {
                        inputFormat        = deviceFormat;
                        nMaxAvgBytesPerSec = deviceFormat.AverageBytesPerSecond;
                    }
                }
                if (inputFormat == null)
                {
                    // This happens only if device has not formats
                    throw new InvalidOperationException("Cannot find an appropriate input format.");
                }
                encoderNeeded = false;
                return(inputFormat);
            }

            // Check if device supports the format
            if (deviceFormatList.Contains(this.format))
            {
                encoderNeeded = false;
                return(this.format);
            }

            // Get available input formats for convertion
            SoundFormat[] availableInputs = convertionMap.GetInputs(this.format);
            if (availableInputs.Length == 0)
            {
                // Get convertion map again
                // We currenty use PCM format for output.
                convertionMap.Add(AcmEncoder.GetConvertionMap(deviceFormatList.ToArray(), preferredFormat.Tag));
                // Get available input formats for convertion
                availableInputs = convertionMap.GetInputs(this.format);
                if (availableInputs.Length == 0)
                {
                    throw new InvalidOperationException("Cannot find an appropriate input format.");
                }
            }

            // Find the input format that device supports and has
            // maximum average bytes per second
            foreach (SoundFormat input in availableInputs)
            {
                if (deviceFormatList.Contains(input))
                {
                    if (nMaxAvgBytesPerSec < input.AverageBytesPerSecond &&
                        (inputFormat == null ||
                         input.AverageBytesPerSecond == (input.BitsPerSample / 8) * input.Channels * input.SamplesPerSecond))
                    {
                        inputFormat        = input;
                        nMaxAvgBytesPerSec = (int)input.AverageBytesPerSecond;
                    }
                }
            }
            if (inputFormat == null)
            {
                throw new InvalidOperationException("Cannot find an appropriate input format.");
            }
            encoderNeeded = true;
            return(inputFormat);
        }
コード例 #7
0
        private void RecordPrivate(DisplayProvider displayProvider, SoundProvider soundProvider)
        {
            bool       recordDisplay = displayProvider != null;
            bool       recordSound   = soundProvider != null;
            AviFile    aviFile       = null;
            AcmEncoder audioEncoder  = null;

            this.duration = TimeSpan.Zero;
            try {
                DisplayFormat videoFormat = null;
                SoundFormat   audioFormat = null;

                int soundReadInterval = 0;
                if (recordDisplay)
                {
                    displayProvider.Open();
                    videoFormat = displayProvider.Format;
                }
                if (recordSound)
                {
                    soundProvider.Open();
                    soundReadInterval = (int)Math.Ceiling(soundProvider.BufferLength / 2.0); // ms
                    audioFormat       = soundProvider.Format;
                    audioEncoder      = soundProvider.GetEncoder();
                }
                // Open AVI file
                aviFile = new AviFile();
                aviFile.Open(fileName, videoFormat, fps, this.compressor, audioFormat, audioEncoder);

                // Initialize helper variables
                int      frameIndex         = 0;
                int      frameDuration      = recordDisplay ? (int)(msPerSecond / this.fps) : 0;
                int      frameBufferLength  = recordDisplay ? displayProvider.BitmapBytes : 0;
                int      startingFrameIndex = 0;
                int      soundSampleIndex   = 0;
                long     startTime          = DateTime.Now.Ticks;
                long     lastSoundRead      = DateTime.Now.Ticks;
                TimeSpan prevDuration       = TimeSpan.Zero;
                TimeSpan currentDuration    = TimeSpan.Zero;

                // Update state
                lock (syncRoot) {
                    this.state = RecordingState.Recording;
                }
                if (recordSound)
                {
                    // Start sound recording
                    soundProvider.Start();
                }
                // Recording loop; this is a long one huh?!
                do
                {
                    // Check if paused
                    if (this.state == RecordingState.Paused)
                    {
                        prevDuration = prevDuration.Add(currentDuration);
                        if (recordSound)
                        {
                            // Read remaining sound data and stop sound recording
                            byte[] soundData = soundProvider.Read(true);
                            soundSampleIndex += aviFile.AddSound(soundSampleIndex, soundData, true);
                            soundProvider.Stop();
                        }
                        // Let the thread executing Pause() know that pause is done
                        this.stateTransition.Set();
                        while (this.state == RecordingState.Paused)
                        {
                            Thread.Sleep(pauseDelay);
                        }

                        // State is changed, check new state
                        if (this.state == RecordingState.Idle)
                        {
                            return;
                        }

                        // Resume() is called
                        if (recordSound)
                        {
                            soundProvider.Start();
                            lastSoundRead = DateTime.Now.Ticks;
                        }
                        if (recordDisplay)
                        {
                            startingFrameIndex = frameIndex;
                        }

                        // Reset duration variables
                        startTime       = DateTime.Now.Ticks;
                        currentDuration = TimeSpan.Zero;

                        // Let that executing thread known resume is done
                        this.stateTransition.Set();
                    }

                    // Add a video from
                    if (recordDisplay)
                    {
                        // Render display and add rendered bitmap to the avi file
                        displayProvider.Render();
                        IntPtr pFrameData = displayProvider.Lock();
                        try {
                            aviFile.AddFrame(pFrameData, frameIndex, 1, frameBufferLength);
                        }
                        finally {
                            displayProvider.Unlock();
                        }
                        frameIndex++;
                    }

                    // Add sound
                    if (recordSound)
                    {
                        // Read recorded sound if it's time to do so
                        if ((DateTime.Now.Ticks - lastSoundRead) / ticksPerMs >= soundReadInterval)
                        {
                            // Read sound data
                            SoundFormat sourceFormat = soundProvider.SourceFormat;
                            byte[]      soundData    = soundProvider.Read();
                            int         samplesRead  = (int)(soundData.Length / sourceFormat.BlockAlign);

                            // Get number of out of sync samples
                            TimeSpan durationByNow     = prevDuration + new TimeSpan(DateTime.Now.Ticks - startTime);
                            int      nOutOfSyncSamples = GetOutOfSyncSamples(soundProvider, soundSampleIndex, durationByNow,
                                                                             samplesRead);
                            if (nOutOfSyncSamples > 0)
                            {
                                // Add silence samples if we have less than expected samples
                                soundSampleIndex += aviFile.AddSilence(soundSampleIndex, nOutOfSyncSamples);
                            }
                            else if (nOutOfSyncSamples < 0)
                            {
                                // Drop read samples as much as possible if we have more than expected samples
                                int nSamplesToKeep = Math.Max(0, samplesRead + nOutOfSyncSamples);
                                if (nSamplesToKeep > 0)
                                {
                                    int    nBytesToKeep     = nSamplesToKeep * sourceFormat.BlockAlign;
                                    int    nBytesToDrop     = soundData.Length - nBytesToKeep;
                                    byte[] droppedSoundData = new byte[nBytesToKeep];
                                    Array.Copy(soundData, nBytesToDrop, droppedSoundData, 0, nBytesToKeep);
                                    soundData = droppedSoundData;
                                }
                                samplesRead = nSamplesToKeep;
                            }
                            // Add sound data to the avi file
                            if (samplesRead > 0)
                            {
                                soundSampleIndex += aviFile.AddSound(soundSampleIndex, soundData, false);
                            }
                            lastSoundRead = DateTime.Now.Ticks;
                        }
                    }

                    // Synchronize display
                    if (recordDisplay)
                    {
                        long delay = (DateTime.Now.Ticks - startTime) / ticksPerMs -
                                     frameDuration * ((frameIndex - startingFrameIndex) - 1);
                        if (delay < frameDuration)
                        {
                            // Extra delay to synchornize with fps
                            Thread.Sleep((int)(frameDuration - delay));
                        }
                        else
                        {
                            // Calculate how many frames are lost
                            int lostFrames = (int)Math.Floor((decimal)delay / frameDuration);
                            frameIndex += lostFrames;
                            // Extra delay to synchornize with fps
                            Thread.Sleep((int)(frameDuration - delay % frameDuration));
                        }
                    }
                    else /* No display recording, just sleep for a while so that sound buffers get filled  */
                    {
                        Thread.Sleep(1);
                    }

                    // Update duration
                    currentDuration = new TimeSpan(DateTime.Now.Ticks - startTime);
                    this.duration   = prevDuration + currentDuration;
                } while (this.state != RecordingState.Idle);

                // Read remaining sound data and stop sound recording
                if (recordSound)
                {
                    byte[] soundData = soundProvider.Read(true);
                    soundSampleIndex += aviFile.AddSound(soundSampleIndex, soundData, true);
                    soundProvider.Stop();
                }
            }
            finally {
                if (recordSound)
                {
                    soundProvider.Close();
                    if (audioEncoder != null)
                    {
                        audioEncoder.Dispose();
                    }
                }
                if (recordDisplay)
                {
                    displayProvider.Close();
                }
                if (aviFile != null)
                {
                    aviFile.Dispose();
                }
            }
        }