/// <summary>Copy a piece of video and wave sound int a new file</summary> /// <param name="newFileName">File name</param> /// <param name="startAtSecond">Start copying at second x</param> /// <param name="stopAtSecond">Stop copying at second y</param> /// <returns>AviManager for the new video</returns> public AviManager CopyTo(String newFileName, float startAtSecond, float stopAtSecond) { AviManager newFile = new AviManager(newFileName, false); try { //copy video stream VideoStream videoStream = GetVideoStream(); int startFrameIndex = (int)(videoStream.FrameRate * startAtSecond); int stopFrameIndex = (int)(videoStream.FrameRate * stopAtSecond); videoStream.GetFrameOpen(); Bitmap bmp = videoStream.GetBitmap(startFrameIndex); VideoStream newStream = newFile.AddVideoStream(false, videoStream.FrameRate, bmp); for (int n = startFrameIndex + 1; n <= stopFrameIndex; n++) { bmp = videoStream.GetBitmap(n); newStream.AddFrame(bmp); } videoStream.GetFrameClose(); //copy audio stream AudioStream waveStream = GetWaveStream(); Avi.AVISTREAMINFO streamInfo = new Avi.AVISTREAMINFO(); Avi.PCMWAVEFORMAT streamFormat = new Avi.PCMWAVEFORMAT(); int streamLength = 0; IntPtr ptrRawData = waveStream.GetStreamData( ref streamInfo, ref streamFormat, ref streamLength); int startByteIndex = (int)(startAtSecond * (float)(waveStream.CountSamplesPerSecond * streamFormat.nChannels * waveStream.CountBitsPerSample) / 8); int stopByteIndex = (int)(stopAtSecond * (float)(waveStream.CountSamplesPerSecond * streamFormat.nChannels * waveStream.CountBitsPerSample) / 8); IntPtr ptrWavePart = new IntPtr(ptrRawData.ToInt32() + startByteIndex); byte[] rawData = new byte[stopByteIndex - startByteIndex]; Marshal.Copy(ptrWavePart, rawData, 0, rawData.Length); Marshal.FreeHGlobal(ptrRawData); streamInfo.dwLength = rawData.Length; streamInfo.dwStart = 0; IntPtr unmanagedRawData = Marshal.AllocHGlobal(rawData.Length); Marshal.Copy(rawData, 0, unmanagedRawData, rawData.Length); newFile.AddAudioStream(unmanagedRawData, streamInfo, streamFormat, rawData.Length); Marshal.FreeHGlobal(unmanagedRawData); } catch (Exception ex) { newFile.Close(); throw ex; } return(newFile); }
private void btnAddSound_Click(object sender, System.EventArgs e) { String fileName = GetFileName("Sounds (*.wav)|*.wav"); if (fileName != null) { txtReportSound.Text = "Adding to sound.wav to " + txtAviFileName.Text + "...\r\n"; AviManager aviManager = new AviManager(txtAviFileName.Text, true); try { int countFrames = aviManager.GetVideoStream().CountFrames; if (countFrames > numInsertWavePosition.Value) { aviManager.AddAudioStream(fileName, (int)numInsertWavePosition.Value); } else { MessageBox.Show(this, "Frame " + numInsertWavePosition.Value + " does not exists. The video stream contains frame from 0 to " + (countFrames - 1) + "."); } } catch (Exception ex) { MessageBox.Show(this, "The file does not accept the new wave audio stream.\r\n" + ex.ToString(), "Error"); } aviManager.Close(); txtReportSound.Text += "...finished."; } }
/// <summary>Copy a piece of video and wave sound int a new file</summary> /// <param name="newFileName">File name</param> /// <param name="startAtSecond">Start copying at second x</param> /// <param name="stopAtSecond">Stop copying at second y</param> /// <returns>AviManager for the new video</returns> public AviManager CopyTo(String newFileName, float startAtSecond, float stopAtSecond) { AviManager newFile = new AviManager(newFileName, false); try { //copy video stream VideoStream videoStream = GetVideoStream(); int startFrameIndex = (int)(videoStream.FrameRate * startAtSecond); int stopFrameIndex = (int)(videoStream.FrameRate * stopAtSecond); videoStream.GetFrameOpen(); Bitmap bmp = videoStream.GetBitmap(startFrameIndex); VideoStream newStream = newFile.AddVideoStream(false, videoStream.FrameRate, bmp); for (int n = startFrameIndex + 1; n <= stopFrameIndex; n++) { bmp = videoStream.GetBitmap(n); newStream.AddFrame(bmp); } videoStream.GetFrameClose(); //copy audio stream AudioStream waveStream = GetWaveStream(); Avi.AVISTREAMINFO streamInfo = new Avi.AVISTREAMINFO(); Avi.PCMWAVEFORMAT streamFormat = new Avi.PCMWAVEFORMAT(); int streamLength = 0; IntPtr ptrRawData = waveStream.GetStreamData( ref streamInfo, ref streamFormat, ref streamLength); int startByteIndex = (int)( startAtSecond * (float)(waveStream.CountSamplesPerSecond * streamFormat.nChannels * waveStream.CountBitsPerSample) / 8); int stopByteIndex = (int)( stopAtSecond * (float)(waveStream.CountSamplesPerSecond * streamFormat.nChannels * waveStream.CountBitsPerSample) / 8); IntPtr ptrWavePart = new IntPtr(ptrRawData.ToInt32() + startByteIndex); byte[] rawData = new byte[stopByteIndex - startByteIndex]; Marshal.Copy(ptrWavePart, rawData, 0, rawData.Length); Marshal.FreeHGlobal(ptrRawData); streamInfo.dwLength = rawData.Length; streamInfo.dwStart = 0; IntPtr unmanagedRawData = Marshal.AllocHGlobal(rawData.Length); Marshal.Copy(rawData, 0, unmanagedRawData, rawData.Length); newFile.AddAudioStream(unmanagedRawData, streamInfo, streamFormat, rawData.Length); Marshal.FreeHGlobal(unmanagedRawData); } catch (Exception ex) { newFile.Close(); throw ex; } return newFile; }
static void Main(string[] args) { Console.WriteLine("MobiConverter by Gericom"); Console.WriteLine(); if (args.Length == 0) { PrintUsage(); return; } switch (args[0]) { case "-d": { if (args.Length != 2 && args.Length != 3) goto default; if (!File.Exists(args[1])) { Console.WriteLine("Error! File not found: " + args[1]); return; } String outfile = (args.Length == 3) ? args[2] : Path.ChangeExtension(args[1], "avi"); byte[] sig = new byte[4]; Stream s = File.OpenRead(args[1]); s.Read(sig, 0, 4); s.Close(); if (sig[0] == 0x4C && sig[1] == 0x32 && sig[2] == 0xAA && sig[3] == 0xAB)//moflex { Console.WriteLine("Moflex container detected!"); Console.Write("Converting: "); Console.CursorVisible = false; MobiclipDecoder ddd = null; AviManager m = new AviManager(outfile, false); MemoryStream audio = null; FastAudioDecoder[] mFastAudioDecoders = null; int audiorate = -1; int audiochannels = 0; VideoStream vs = null; FileStream stream = File.OpenRead(args[1]); var d = new MoLiveDemux(stream); int PlayingVideoStream = -1; d.OnCompleteFrameReceived += delegate(MoLiveChunk Chunk, byte[] Data) { if ((Chunk is MoLiveStreamVideo || Chunk is MoLiveStreamVideoWithLayout) && ((PlayingVideoStream == -1) || ((MoLiveStream)Chunk).StreamIndex == PlayingVideoStream)) { if (ddd == null) { ddd = new MobiclipDecoder(((MoLiveStreamVideo)Chunk).Width, ((MoLiveStreamVideo)Chunk).Height, MobiclipDecoder.MobiclipVersion.Moflex3DS); PlayingVideoStream = ((MoLiveStream)Chunk).StreamIndex; } ddd.Data = Data; ddd.Offset = 0; Bitmap b = ddd.DecodeFrame(); if (vs == null) vs = m.AddVideoStream(false, Math.Round(((double)((MoLiveStreamVideo)Chunk).FpsRate) / ((double)((MoLiveStreamVideo)Chunk).FpsScale), 3), b); else vs.AddFrame(b); } else if (Chunk is MoLiveStreamAudio) { if (audio == null) { audio = new MemoryStream(); audiochannels = (int)((MoLiveStreamAudio)Chunk).Channel; audiorate = (int)((MoLiveStreamAudio)Chunk).Frequency; } switch ((int)((MoLiveStreamAudio)Chunk).CodecId) { case 0://fastaudio { if (mFastAudioDecoders == null) { mFastAudioDecoders = new FastAudioDecoder[(int)((MoLiveStreamAudio)Chunk).Channel]; for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++) { mFastAudioDecoders[i] = new FastAudioDecoder(); } } List<short>[] channels = new List<short>[(int)((MoLiveStreamAudio)Chunk).Channel]; for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++) { channels[i] = new List<short>(); } int offset = 0; int size = 40; while (offset + size < Data.Length) { for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++) { mFastAudioDecoders[i].Data = Data; mFastAudioDecoders[i].Offset = offset; channels[i].AddRange(mFastAudioDecoders[i].Decode()); offset = mFastAudioDecoders[i].Offset; } } short[][] channelsresult = new short[(int)((MoLiveStreamAudio)Chunk).Channel][]; for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++) { channelsresult[i] = channels[i].ToArray(); } byte[] result = InterleaveChannels(channelsresult); audio.Write(result, 0, result.Length); } break; case 1://IMA-ADPCM { IMAADPCMDecoder[] decoders = new IMAADPCMDecoder[(int)((MoLiveStreamAudio)Chunk).Channel]; List<short>[] channels = new List<short>[(int)((MoLiveStreamAudio)Chunk).Channel]; for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++) { decoders[i] = new IMAADPCMDecoder(); decoders[i].GetWaveData(Data, 4 * i, 4); channels[i] = new List<short>(); } int offset = 4 * (int)((MoLiveStreamAudio)Chunk).Channel; int size = 128 * (int)((MoLiveStreamAudio)Chunk).Channel; while (offset + size < Data.Length) { for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++) { channels[i].AddRange(decoders[i].GetWaveData(Data, offset, 128)); offset += 128; } } short[][] channelsresult = new short[(int)((MoLiveStreamAudio)Chunk).Channel][]; for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++) { channelsresult[i] = channels[i].ToArray(); } byte[] result = InterleaveChannels(channelsresult); audio.Write(result, 0, result.Length); } break; case 2://PCM16 { audio.Write(Data, 0, Data.Length - (Data.Length % ((int)((MoLiveStreamAudio)Chunk).Channel * 2))); } break; } } }; bool left = false; int counter = 0; while (true) { uint error = d.ReadPacket(); if (error == 73) break; //report progress if (counter == 0) { Console.Write("{0,3:D}%", stream.Position * 100 / stream.Length); Console.CursorLeft -= 4; } counter++; if (counter == 50) counter = 0; } if (audio != null) { byte[] adata = audio.ToArray(); audio.Close(); var sinfo = new Avi.AVISTREAMINFO(); sinfo.fccType = Avi.streamtypeAUDIO; sinfo.dwScale = audiochannels * 2; sinfo.dwRate = audiorate * audiochannels * 2; sinfo.dwSampleSize = audiochannels * 2; sinfo.dwQuality = -1; var sinfo2 = new Avi.PCMWAVEFORMAT(); sinfo2.wFormatTag = 1; sinfo2.nChannels = (short)audiochannels; sinfo2.nSamplesPerSec = audiorate; sinfo2.nAvgBytesPerSec = audiorate * audiochannels * 2; sinfo2.nBlockAlign = (short)(audiochannels * 2); sinfo2.wBitsPerSample = 16; unsafe { fixed (byte* pAData = &adata[0]) { m.AddAudioStream((IntPtr)pAData, sinfo, sinfo2, adata.Length); } } } m.Close(); stream.Close(); Console.WriteLine("Done!"); Console.CursorVisible = true; } else if (sig[0] == 0x4D && sig[1] == 0x4F && sig[2] == 0x44 && sig[3] == 0x53) { //mods Console.WriteLine("Mods container detected!"); Console.Write("Converting: "); Console.CursorVisible = false; AviManager m = new AviManager(outfile, false); FileStream stream = File.OpenRead(args[1]); ModsDemuxer dm = new ModsDemuxer(stream); MemoryStream audio = null; if ((dm.Header.AudioCodec == 1 || dm.Header.AudioCodec == 3) && dm.Header.NbChannel > 0 && dm.Header.Frequency > 0) { audio = new MemoryStream(); } MobiclipDecoder d = new MobiclipDecoder(dm.Header.Width, dm.Header.Height, MobiclipDecoder.MobiclipVersion.ModsDS); VideoStream vs = null; int CurChannel = 0; List<short>[] channels = new List<short>[dm.Header.NbChannel]; IMAADPCMDecoder[] decoders = new IMAADPCMDecoder[dm.Header.NbChannel]; SxDecoder[] sxd = new SxDecoder[dm.Header.NbChannel]; FastAudioDecoder[] fad = new FastAudioDecoder[dm.Header.NbChannel]; bool[] isinit = new bool[dm.Header.NbChannel]; for (int i = 0; i < dm.Header.NbChannel; i++) { channels[i] = new List<short>(); decoders[i] = new IMAADPCMDecoder(); sxd[i] = new SxDecoder(); fad[i] = new FastAudioDecoder(); isinit[i] = false; } int counter = 0; while (true) { uint NrAudioPackets; bool IsKeyFrame; byte[] framedata = dm.ReadFrame(out NrAudioPackets, out IsKeyFrame); if (framedata == null) break; d.Data = framedata; d.Offset = 0; Bitmap b = d.DecodeFrame(); if (vs == null) vs = m.AddVideoStream(false, Math.Round(dm.Header.Fps / (double)0x01000000, 3), b); else vs.AddFrame(b); if (NrAudioPackets > 0 && audio != null) { int Offset = d.Offset - 2; if (dm.Header.TagId == 0x334E && (IOUtil.ReadU16LE(framedata, 0) & 0x8000) != 0) Offset += 4; if (dm.Header.AudioCodec == 3) { if (IsKeyFrame) { for (int i = 0; i < dm.Header.NbChannel; i++) { channels[i] = new List<short>(); decoders[i] = new IMAADPCMDecoder(); sxd[i] = new SxDecoder(); fad[i] = new FastAudioDecoder(); isinit[i] = false; } } for (int i = 0; i < NrAudioPackets; i++) { channels[CurChannel].AddRange(decoders[CurChannel].GetWaveData(framedata, Offset, 128 + (!isinit[CurChannel] ? 4 : 0))); Offset += 128 + (!isinit[CurChannel] ? 4 : 0); isinit[CurChannel] = true; CurChannel++; if (CurChannel >= dm.Header.NbChannel) CurChannel = 0; } } else if (dm.Header.AudioCodec == 1) { for (int i = 0; i < NrAudioPackets; i++) { if (!isinit[CurChannel]) sxd[CurChannel].Codebook = dm.AudioCodebooks[CurChannel]; isinit[CurChannel] = true; sxd[CurChannel].Data = framedata; sxd[CurChannel].Offset = Offset; channels[CurChannel].AddRange(sxd[CurChannel].Decode()); Offset = sxd[CurChannel].Offset; CurChannel++; if (CurChannel >= dm.Header.NbChannel) CurChannel = 0; } } else if (dm.Header.AudioCodec == 2) { for (int i = 0; i < NrAudioPackets; i++) { fad[CurChannel].Data = framedata; fad[CurChannel].Offset = Offset; channels[CurChannel].AddRange(fad[CurChannel].Decode()); Offset = fad[CurChannel].Offset; CurChannel++; if (CurChannel >= dm.Header.NbChannel) CurChannel = 0; } } int smallest = int.MaxValue; for (int i = 0; i < dm.Header.NbChannel; i++) { if (channels[i].Count < smallest) smallest = channels[i].Count; } if (smallest > 0) { //Gather samples short[][] samps = new short[dm.Header.NbChannel][]; for (int i = 0; i < dm.Header.NbChannel; i++) { samps[i] = new short[smallest]; channels[i].CopyTo(0, samps[i], 0, smallest); channels[i].RemoveRange(0, smallest); } byte[] result = InterleaveChannels(samps); audio.Write(result, 0, result.Length); } } //report progress if (counter == 0) { Console.Write("{0,3:D}%", stream.Position * 100 / stream.Length); Console.CursorLeft -= 4; } counter++; if (counter == 50) counter = 0; } if (audio != null) { byte[] adata = audio.ToArray(); audio.Close(); var sinfo = new Avi.AVISTREAMINFO(); sinfo.fccType = Avi.streamtypeAUDIO; sinfo.dwScale = dm.Header.NbChannel * 2; sinfo.dwRate = (int)dm.Header.Frequency * dm.Header.NbChannel * 2; sinfo.dwSampleSize = dm.Header.NbChannel * 2; sinfo.dwQuality = -1; var sinfo2 = new Avi.PCMWAVEFORMAT(); sinfo2.wFormatTag = 1; sinfo2.nChannels = (short)dm.Header.NbChannel; sinfo2.nSamplesPerSec = (int)dm.Header.Frequency; sinfo2.nAvgBytesPerSec = (int)dm.Header.Frequency * dm.Header.NbChannel * 2; sinfo2.nBlockAlign = (short)(dm.Header.NbChannel * 2); sinfo2.wBitsPerSample = 16; unsafe { fixed (byte* pAData = &adata[0]) { m.AddAudioStream((IntPtr)pAData, sinfo, sinfo2, adata.Length); } } } m.Close(); stream.Close(); Console.WriteLine("Done!"); Console.CursorVisible = true; return; } else if (sig[0] == 0x4D && sig[1] == 0x4F && sig[2] == 0x43 && sig[3] == 0x35) { //moc5 Console.WriteLine("MOC5 container detected!"); Console.WriteLine("Error! Not supported yet!"); return; } else { Console.WriteLine("Error! Unrecognized format!"); return; } break; } case "-e": { break; } default: case "-h": PrintUsage(); return; } }