public bool RetrieveNextFrame(out byte[] audioData, out byte[] videoData) { int nextId = this.CurrentFrameId + 1; if (nextId < 0 || nextId >= this.Frames.Count) { audioData = null; videoData = null; return(false); } this.CurrentFrameId = nextId; var frame = this.Frames[nextId]; if (frame.Audio == null) { audioData = null; } else { int decompressedSize = frame.Audio.NumSamples * this.AudioHeader.NumChannels * 2; audioData = Imc.Vima.Decompress(frame.Audio.Data, decompressedSize); } if (frame.Video == null) { videoData = null; } else { videoData = Blocky16.Decompress(frame.Video.Data, frame.Video, this.CurrentFrameContext); } return(true); }
public static byte[] Compress(byte[] input, out byte subcodecId) { if (input == null) { throw new ArgumentNullException(nameof(input)); } //subcodecId = 0; //return input; subcodecId = 5; byte[] destBuffer = new byte[input.Length * 2]; int destIndex = 0; for (int inputIndex = 0; inputIndex < input.Length;) { int rleLength = Blocky16.RleLengthEqual(input, inputIndex); if (rleLength == 1) { rleLength = Blocky16.RleLengthDiff(input, inputIndex); if (rleLength == 1) { byte value = input[inputIndex++]; byte code = 0; destBuffer[destIndex++] = code; destBuffer[destIndex++] = value; } else { while (rleLength > 0) { byte length = (byte)Math.Min(rleLength, 0x80); rleLength -= length; byte code = (byte)((length - 1) << 1); destBuffer[destIndex++] = code; Array.Copy(input, inputIndex, destBuffer, destIndex, length); inputIndex += length; destIndex += length; } } } else { byte rleLengthValue = input[inputIndex]; inputIndex += rleLength; while (rleLength > 0) { byte length = (byte)Math.Min(rleLength, 0x80); rleLength -= length; byte code = (byte)(((length - 1) << 1) | 1); destBuffer[destIndex++] = code; destBuffer[destIndex++] = rleLengthValue; } } } byte[] buffer = new byte[(destIndex + 3) & (~3)]; Array.Copy(destBuffer, buffer, destIndex); return(buffer); }
public static SnmFile FromAviFile(string fileName) { if (!File.Exists(fileName)) { throw new FileNotFoundException(); } var snm = new SnmFile(); var aviManager = new AviManager(fileName); try { AudioStream audioStream = aviManager.GetWaveStream(); byte[] audioData = null; if (audioStream != null) { if (audioStream.ChannelsCount != 2 || audioStream.BitsPerSample != 16) { throw new InvalidDataException(); } if (audioStream.SamplesPerSecond != 22050 && audioStream.SamplesPerSecond != 44100) { throw new NotSupportedException(); } snm.AudioHeader = new SnmAudioHeader { Frequency = audioStream.SamplesPerSecond, NumChannels = audioStream.ChannelsCount }; audioData = audioStream.GetStreamData(); if (snm.AudioHeader.Frequency == 44100) { snm.AudioHeader.Frequency = 22050; audioData = SnmFile.ConvertAudio44100To22050(audioData); } } VideoStream videoStream = aviManager.GetVideoStream(); if (videoStream.BitsPerPixel != 24 && videoStream.BitsPerPixel != 32) { throw new NotSupportedException(); } snm.Header.FrameDelay = (int)(1000000 / videoStream.FrameRate + 0.5); snm.Header.Width = (short)videoStream.Width; snm.Header.Height = (short)videoStream.Height; snm.Header.NumFrames = (short)videoStream.FramesCount; for (int i = 0; i < videoStream.FramesCount; i++) { snm.VideoHeaders.Add(new SnmVideoHeader { Width = snm.Header.Width, Height = snm.Header.Height }); } videoStream.GetFrameOpen(); try { int fps = (1000000 + snm.Header.FrameDelay / 2) / snm.Header.FrameDelay; int samplesPerFrame = snm.AudioHeader.Frequency / fps; if (samplesPerFrame * fps != snm.AudioHeader.Frequency) { throw new InvalidDataException(); } for (int i = 0; i < videoStream.FramesCount; i++) { byte[] videoData = videoStream.GetFrameData(i); var frame = new SnmFrame(); if (audioData != null) { int audioPosition = i * samplesPerFrame * 4; int audioLength = Math.Min(samplesPerFrame * 4, audioData.Length - audioPosition); if (audioPosition < audioData.Length && audioLength != 0) { frame.Audio = new SnmAudioFrame { NumSamples = audioLength / 4 }; byte[] buffer = new byte[audioLength]; Array.Copy(audioData, audioPosition, buffer, 0, audioLength); //frame.Audio.Data = Imc.Vima.Compress(buffer, 2); frame.Audio.Data = buffer; } } if (videoData != null) { frame.Video = new SnmVideoFrame { Width = snm.Header.Width, Height = snm.Header.Height, RleOutputSize = snm.Header.Width * snm.Header.Height * 2, SubcodecId = (byte)videoStream.BitsPerPixel, Data = videoData }; //byte[] buffer; //if (videoStream.BitsPerPixel == 24) //{ // buffer = SnmFile.Convert24BppTo16Bpp(videoData); //} //else //{ // buffer = SnmFile.Convert32BppTo16Bpp(videoData); //} //byte subcodecId; //frame.Video.Data = Blocky16.Compress(buffer, out subcodecId); //frame.Video.SubcodecId = subcodecId; } snm.Frames.Add(frame); } } finally { videoStream.GetFrameClose(); } } finally { aviManager.Close(); } snm.Frames .AsParallel() .ForAll(frame => { if (frame.Audio != null) { frame.Audio.Data = Imc.Vima.Compress(frame.Audio.Data, 2); } if (frame.Video != null) { byte[] buffer; if (frame.Video.SubcodecId == 24) { buffer = SnmFile.Convert24BppTo16Bpp(frame.Video.Data); } else { buffer = SnmFile.Convert32BppTo16Bpp(frame.Video.Data); } frame.Video.Data = Blocky16.Compress(buffer, out byte subcodecId); frame.Video.SubcodecId = subcodecId; } }); return(snm); }
public static SnmFile ConvertRead(string fileName) { if (string.IsNullOrEmpty(fileName)) { throw new ArgumentNullException(nameof(fileName)); } var snm = new SnmFile(); byte[] aviAudioDataBytes; try { aviAudioDataBytes = GetAviAudioBytes(fileName); } catch { aviAudioDataBytes = null; } InitializeSourceReader(fileName, out IMFSourceReader reader, out int videoStreamIndex, out int audioStreamIndex); var audioData = new MemoryStream(); try { GetSourceReaderAttributes(reader, out int width, out int height, out int fps); snm.AudioHeader = new SnmAudioHeader { Frequency = 22050, NumChannels = 2 }; snm.Header.FrameDelay = (int)(1000000 / fps + 0.5); snm.Header.Width = (short)width; snm.Header.Height = (short)height; snm.Header.NumFrames = 0; while (true) { byte[] bytes = ReadSample(reader, audioStreamIndex, out int streamIndex, out long timestamp); if (bytes == null) { break; } audioData.Write(bytes, 0, bytes.Length); } if (audioData.Length == 0 && aviAudioDataBytes != null) { audioData.Write(aviAudioDataBytes, 0, aviAudioDataBytes.Length); } while (true) { byte[] bytes = ReadSample(reader, videoStreamIndex, out int streamIndex, out long timestamp); if (bytes == null) { break; } snm.VideoHeaders.Add(new SnmVideoHeader { Width = snm.Header.Width, Height = snm.Header.Height }); var frame = new SnmFrame(); int audioPosition = snm.Header.NumFrames * snm.AudioHeader.Frequency / fps * 4; int audioLength = Math.Min(snm.AudioHeader.Frequency / fps * 4, (int)audioData.Length - audioPosition); if (audioPosition < audioData.Length && audioLength != 0) { frame.Audio = new SnmAudioFrame { NumSamples = audioLength / 4 }; byte[] buffer = new byte[audioLength]; audioData.Seek(audioPosition, SeekOrigin.Begin); audioData.Read(buffer, 0, buffer.Length); frame.Audio.Data = buffer; } frame.Video = new SnmVideoFrame { Width = snm.Header.Width, Height = snm.Header.Height, RleOutputSize = snm.Header.Width * snm.Header.Height * 2, SubcodecId = 32, Data = bytes }; snm.Frames.Add(frame); snm.Header.NumFrames++; } } finally { audioData.Dispose(); Marshal.ReleaseComObject(reader); } snm.Frames .AsParallel() .ForAll(frame => { if (frame.Audio != null) { frame.Audio.Data = Imc.Vima.Compress(frame.Audio.Data, 2); } if (frame.Video != null) { byte[] buffer = Convert32BppTo16Bpp(frame.Video.Data); frame.Video.Data = Blocky16.Compress(buffer, out byte subcodecId); frame.Video.SubcodecId = subcodecId; } }); return(snm); }