static MediaSocket CreateOutputSocket(Options opt) { MediaSocket socket = new MediaSocket(); socket.File = null; socket.Stream = null; socket.StreamType = StreamType.Aac; socket.StreamSubType = StreamSubType.AacRaw; MediaPin pin = new MediaPin(); socket.Pins.Add(pin); AudioStreamInfo asi = new AudioStreamInfo(); pin.StreamInfo = asi; asi.StreamType = StreamType.Aac; asi.StreamSubType = StreamSubType.AacRaw; // You can change the sampling rate and the number of the channels //asi.Channels = 1; //asi.SampleRate = 44100; return(socket); }
public ElementaryStream(StreamType streamType, int width, int height, double fps) { vsi = new VideoStreamInfo(); pin = new MediaPin(); socket = new MediaSocket(); // set video stream properties vsi.StreamType = streamType; if (width > 0) { vsi.FrameWidth = width; } if (height > 0) { vsi.FrameHeight = height; } if (fps > 0.0) { vsi.FrameRate = fps; } // provide pin with stream info pin.StreamInfo = vsi; // set socket properties socket.StreamType = StreamType.H264; // set socket pin socket.Pins.Add(pin); }
static MediaSocket ConfigureOutputSocket(Options opt) { MediaSocket socket = new MediaSocket(); socket.File = opt.OutputFile; MediaPin pin = new MediaPin(); socket.Pins.Add(pin); VideoStreamInfo vsi = new VideoStreamInfo(); pin.StreamInfo = vsi; VideoStreamInfo overlayVsi = new VideoStreamInfo(); overlayVsi.StreamType = StreamType.Png; pin.Params.Add(Param.Video.Overlay.Mode, AlphaCompositingMode.Atop); pin.Params.Add(Param.Video.Overlay.LocationX, opt.PositionX); pin.Params.Add(Param.Video.Overlay.LocationY, opt.PositionY); pin.Params.Add(Param.Video.Overlay.BackgroundAlpha, 1); pin.Params.Add(Param.Video.Overlay.ForegroundAlpha, opt.Alpha); pin.Params.Add(Param.Video.Overlay.ForegroundBufferFormat, overlayVsi); pin.Params.Add(Param.Video.Overlay.ForegroundBuffer, new MediaBuffer(System.IO.File.ReadAllBytes(opt.Watermark))); return(socket); }
static MediaSocket CreateInputSocket(Options opt) { MediaSocket socket = new MediaSocket(); socket.StreamType = StreamType.UncompressedVideo; socket.File = null; socket.Stream = null; MediaPin pin = new MediaPin(); socket.Pins.Add(pin); VideoStreamInfo vsi = new VideoStreamInfo(); pin.StreamInfo = vsi; vsi.StreamType = StreamType.UncompressedVideo; vsi.ScanType = ScanType.Progressive; vsi.FrameWidth = opt.Width; vsi.FrameHeight = opt.Height; vsi.ColorFormat = opt.Color.Id; vsi.FrameRate = opt.Fps; return(socket); }
static bool Encode(Options opt) { try { File.Delete(opt.OutputFile); } catch { } using (var transcoder = new Transcoder()) { // Transcoder demo mode must be enabled, // in order to use the OEM release for testing (without a valid license). transcoder.AllowDemoMode = true; // Configure input // The input stream frame rate determines the playback speed var instream = new VideoStreamInfo { StreamType = PrimoSoftware.AVBlocks.StreamType.UncompressedVideo, FrameRate = opt.YuvFps, FrameWidth = opt.YuvWidth, FrameHeight = opt.YuvHeight, ColorFormat = opt.YuvColor.Id, ScanType = ScanType.Progressive }; var inpin = new MediaPin { StreamInfo = instream }; var insocket = new MediaSocket { StreamType = PrimoSoftware.AVBlocks.StreamType.UncompressedVideo, File = opt.YuvFile }; insocket.Pins.Add(inpin); transcoder.Inputs.Add(insocket); // Configure output var outsocket = MediaSocket.FromPreset(opt.OutputPreset.Name); outsocket.File = opt.OutputFile; transcoder.Outputs.Add(outsocket); bool res = transcoder.Open(); PrintError("Open Transcoder", transcoder.Error); if (!res) return false; res = transcoder.Run(); PrintError("Run Transcoder", transcoder.Error); if (!res) return false; transcoder.Close(); } return true; }
static Transcoder createTranscoder(Options opt) { var transcoder = new Transcoder() { AllowDemoMode = true }; using (MediaInfo info = new MediaInfo()) { info.Inputs[0].File = opt.InputFile; if (!info.Open()) { PrintError("MediaInfo open: ", info.Error); return(null); } MediaSocket inSocket = MediaSocket.FromMediaInfo(info); transcoder.Inputs.Add(inSocket); int vIndex = 0; int aIndex = 0; for (int i = 0; i < inSocket.Pins.Count; i++) { int fileIndex; string type; if (inSocket.Pins[i].StreamInfo.MediaType == MediaType.Audio) { fileIndex = ++aIndex; type = "aac"; } else if (inSocket.Pins[i].StreamInfo.MediaType == MediaType.Video) { fileIndex = ++vIndex; type = "h264"; } else { continue; } MediaSocket outSocket = new MediaSocket(); MediaPin pin = (MediaPin)inSocket.Pins[i].Clone(); outSocket.Pins.Add(pin); outSocket.File = GenerateOutputFileName(opt.OutputFile, fileIndex, type); File.Delete(outSocket.File); transcoder.Outputs.Add(outSocket); } return(transcoder); } }
private static MediaSocket SocketFromPin(MediaPin pin) { var socket = new MediaSocket(); socket.Pins.Add((MediaPin)pin.Clone()); socket.StreamType = pin.StreamInfo.StreamType; socket.StreamSubType = pin.StreamInfo.StreamSubType; socket.Params = pin.Params; return(socket); }
private void SetOverlayParamsToPin(MediaPin pin, string imageOverlay, StreamType imageType) { var videoInfo = new VideoStreamInfo() { StreamType = imageType }; pin.Params[Param.Video.Overlay.Mode] = AlphaCompositingMode.Atop; pin.Params[Param.Video.Overlay.LocationX] = 0; // left pin.Params[Param.Video.Overlay.LocationY] = 0; // top pin.Params[Param.Video.Overlay.BackgroundAlpha] = 1.0; pin.Params[Param.Video.Overlay.ForegroundBuffer] = new MediaBuffer(File.ReadAllBytes(imageOverlay)); pin.Params[Param.Video.Overlay.ForegroundBufferFormat] = videoInfo; pin.Params[Param.Video.Overlay.ForegroundAlpha] = 1.0; }
static MediaSocket CreateOutputSocket(Options opt) { MediaPin pin = new MediaPin(); MediaSocket socket = new MediaSocket(); socket.Pins.Add(pin); VideoStreamInfo vsi = new VideoStreamInfo(); vsi.ScanType = ScanType.Progressive; pin.StreamInfo = vsi; return(socket); }
static MediaSocket CreateOutputSocket(Options opt) { MediaSocket socket = new MediaSocket(); socket.File = opt.OutputFile; socket.StreamType = StreamType.UncompressedVideo; MediaPin pin = new MediaPin(); socket.Pins.Add(pin); VideoStreamInfo vsi = new VideoStreamInfo(); pin.StreamInfo = vsi; vsi.StreamType = StreamType.UncompressedVideo; vsi.ColorFormat = ColorFormat.YUV420; return(socket); }
static MediaSocket CreateOutputSocket(Options opt) { MediaSocket socket = new MediaSocket(); socket.File = opt.OutputFile; socket.StreamType = StreamType.H264; socket.StreamSubType = StreamSubType.AvcAnnexB; MediaPin pin = new MediaPin(); socket.Pins.Add(pin); VideoStreamInfo vsi = new VideoStreamInfo(); pin.StreamInfo = vsi; vsi.StreamType = StreamType.H264; vsi.StreamSubType = StreamSubType.AvcAnnexB; return(socket); }
static MediaSocket CreateOutputSocket(Options opt) { MediaSocket socket = new MediaSocket(); socket.File = opt.OutputFile; socket.StreamType = StreamType.Aac; socket.StreamSubType = StreamSubType.AacRaw; MediaPin pin = new MediaPin(); socket.Pins.Add(pin); AudioStreamInfo asi = new AudioStreamInfo(); pin.StreamInfo = asi; asi.StreamType = StreamType.Aac; asi.StreamSubType = StreamSubType.AacRaw; return(socket); }
static void ConfigureAudioInput(DSGraph graph, Transcoder transcoder) { AMMediaType mt = new AMMediaType(); int hr; try { hr = graph.audioGrabber.GetConnectedMediaType(mt); DsError.ThrowExceptionForHR(hr); if ((mt.majorType != DirectShowLib.MediaType.Audio) || (mt.formatType != DirectShowLib.FormatType.WaveEx)) { throw new COMException("Unexpected format type"); } WaveFormatEx wfx = (WaveFormatEx)Marshal.PtrToStructure(mt.formatPtr, typeof(WaveFormatEx)); AudioStreamInfo audioInfo = new AudioStreamInfo(); audioInfo.BitsPerSample = wfx.wBitsPerSample; audioInfo.Channels = wfx.nChannels; audioInfo.SampleRate = wfx.nSamplesPerSec; audioInfo.StreamType = StreamType.LPCM; MediaSocket inputSocket = new MediaSocket(); MediaPin inputPin = new MediaPin(); inputPin.StreamInfo = audioInfo; inputSocket.Pins.Add(inputPin); inputSocket.StreamType = StreamType.LPCM; graph.audioGrabberCB.Init(transcoder, transcoder.Inputs.Count, graph.mediaControl); transcoder.Inputs.Add(inputSocket); } finally { DsUtils.FreeAMMediaType(mt); } }
static MediaSocket createInputSocket(int frameWidth, int frameHeight) { MediaSocket socket = new MediaSocket(); socket.StreamType = StreamType.Jpeg; socket.Stream = null; socket.File = null; MediaPin pin = new MediaPin(); socket.Pins.Add(pin); VideoStreamInfo vsi = new VideoStreamInfo(); pin.StreamInfo = vsi; vsi.StreamType = StreamType.Jpeg; vsi.ScanType = ScanType.Progressive; vsi.FrameWidth = frameWidth; vsi.FrameHeight = frameHeight; return(socket); }
static MediaSocket createOutputSocket(string outputFile, int frameWidth, int frameHeight) { MediaSocket socket = new MediaSocket(); socket.File = outputFile; socket.StreamType = StreamType.UncompressedVideo; MediaPin pin = new MediaPin(); socket.Pins.Add(pin); VideoStreamInfo vsi = new VideoStreamInfo(); pin.StreamInfo = vsi; vsi.StreamType = StreamType.UncompressedVideo; vsi.ScanType = ScanType.Progressive; vsi.ColorFormat = ColorFormat.YUV420; vsi.FrameWidth = frameWidth; vsi.FrameHeight = frameHeight; return(socket); }
public YUVFile(string file, ColorFormat color, int width, int height, double fps) { vsi = new VideoStreamInfo(); pin = new MediaPin(); socket = new MediaSocket(); // set video stream properties vsi.StreamType = StreamType.UncompressedVideo; vsi.ScanType = ScanType.Progressive; vsi.ColorFormat = (color == ColorFormat.Unknown) ? ColorFormat.YUV420 : color; if (width > 0) { vsi.FrameWidth = width; } if (height > 0) { vsi.FrameHeight = height; } if (fps > 0.0) { vsi.FrameRate = fps; } // provide pin with stream info pin.StreamInfo = vsi; // set socket properties socket.File = file; socket.StreamType = StreamType.UncompressedVideo; // set socket pins socket.Pins.Add(pin); }
static bool Encode(Options opt) { string outFilename = "cube." + opt.FileExtension; const int imageCount = 250; const double inputFrameRate = 25.0; using (var transcoder = new Transcoder()) { // In order to use the OEM release for testing (without a valid license), // the transcoder demo mode must be enabled. transcoder.AllowDemoMode = true; try { bool result; try { File.Delete(outFilename); }catch {} // Configure Input { using (MediaInfo medInfo = new MediaInfo()) { medInfo.Inputs[0].File = GetImagePath(0); result = medInfo.Open(); PrintError("Open MediaInfo", medInfo.Error); if (!result) { return(false); } VideoStreamInfo vidInfo = (VideoStreamInfo)medInfo.Outputs[0].Pins[0].StreamInfo.Clone(); vidInfo.FrameRate = inputFrameRate; MediaPin pin = new MediaPin(); pin.StreamInfo = vidInfo; MediaSocket socket = new MediaSocket(); socket.Pins.Add(pin); transcoder.Inputs.Add(socket); } } // Configure Output { MediaSocket socket = MediaSocket.FromPreset(opt.PresetID); socket.File = outFilename; transcoder.Outputs.Add(socket); } // Encode Images result = transcoder.Open(); PrintError("Open Transcoder", transcoder.Error); if (!result) { return(false); } for (int i = 0; i < imageCount; i++) { string imagePath = GetImagePath(i); MediaBuffer mediaBuffer = new MediaBuffer(File.ReadAllBytes(imagePath)); MediaSample mediaSample = new MediaSample(); mediaSample.StartTime = i / inputFrameRate; mediaSample.Buffer = mediaBuffer; if (!transcoder.Push(0, mediaSample)) { PrintError("Push Transcoder", transcoder.Error); return(false); } } result = transcoder.Flush(); PrintError("Flush Transcoder", transcoder.Error); if (!result) { return(false); } transcoder.Close(); Console.WriteLine("Output video: \"{0}\"", outFilename); } catch (Exception ex) { Console.WriteLine(ex.ToString()); return(false); } } return(true); }
static bool SplitFile(string inputFile) { string outputFileExt = ".mpg"; string encodingPreset = Preset.Video.DVD.NTSC_4x3_PCM; const double splitPartDuration = 10; // seconds int audioStreamIndex = -1; int videoStreamIndex = -1; int audioFrameSize = 0; int audioSampleRate = 0; using (var transcoder1 = new Transcoder()) { // In order to use the OEM release for testing (without a valid license) the transcoder demo mode must be enabled. transcoder1.AllowDemoMode = true; using (var inputInfo = new MediaInfo()) { inputInfo.Inputs[0].File = inputFile; if (!inputInfo.Open()) { PrintError("Open MediaInfo", inputInfo.Error); return(false); } // Configure transcoder1 input and output var inputSocket = MediaSocket.FromMediaInfo(inputInfo); transcoder1.Inputs.Add(inputSocket); for (int i = 0; i < inputSocket.Pins.Count; i++) { StreamInfo inputStreamInfo = inputSocket.Pins[i].StreamInfo; if ((inputStreamInfo.MediaType == MediaType.Video) && videoStreamIndex < 0) { var streamInfo = new VideoStreamInfo(); VideoStreamInfo inputVideoStreamInfo = inputStreamInfo as VideoStreamInfo; streamInfo.ColorFormat = ColorFormat.YUV420; streamInfo.StreamType = StreamType.UncompressedVideo; streamInfo.ScanType = inputVideoStreamInfo.ScanType; streamInfo.FrameWidth = inputVideoStreamInfo.FrameWidth; streamInfo.FrameHeight = inputVideoStreamInfo.FrameHeight; streamInfo.DisplayRatioWidth = inputVideoStreamInfo.DisplayRatioWidth; streamInfo.DisplayRatioHeight = inputVideoStreamInfo.DisplayRatioHeight; var outputPin = new MediaPin(); outputPin.StreamInfo = streamInfo; var outputSocket = new MediaSocket(); outputSocket.Pins.Add(outputPin); outputSocket.StreamType = streamInfo.StreamType; videoStreamIndex = transcoder1.Outputs.Count; transcoder1.Outputs.Add(outputSocket); } if ((inputStreamInfo.MediaType == MediaType.Audio) && audioStreamIndex < 0) { var streamInfo = new AudioStreamInfo(); AudioStreamInfo inputAudioStreamInfo = inputStreamInfo as AudioStreamInfo; streamInfo.StreamType = StreamType.LPCM; streamInfo.PcmFlags = inputAudioStreamInfo.PcmFlags; streamInfo.Channels = inputAudioStreamInfo.Channels; streamInfo.SampleRate = inputAudioStreamInfo.SampleRate; streamInfo.BitsPerSample = inputAudioStreamInfo.BitsPerSample; var outputPin = new MediaPin(); outputPin.StreamInfo = streamInfo; var outputSocket = new MediaSocket(); outputSocket.Pins.Add(outputPin); outputSocket.StreamType = streamInfo.StreamType; audioStreamIndex = transcoder1.Outputs.Count; transcoder1.Outputs.Add(outputSocket); audioFrameSize = inputAudioStreamInfo.Channels * inputAudioStreamInfo.BitsPerSample / 8; audioSampleRate = inputAudioStreamInfo.SampleRate; } } } bool res = transcoder1.Open(); PrintError("Open Transcoder1", transcoder1.Error); if (!res) { return(false); } var sample = new MediaSample(); int outputIndex; int splitPartNum = 0; double splitTime = splitPartDuration; double partStartTime = 0; Transcoder transcoder2 = null; List <SplitRecord> splitStats = new List <SplitRecord>(); List <MediaSample> audioSamplesQueue = new List <MediaSample>(); try { for (; ;) { if ((audioSamplesQueue.Count > 0) && (audioSamplesQueue[0].StartTime < splitTime)) { outputIndex = audioStreamIndex; sample = audioSamplesQueue[0]; audioSamplesQueue.RemoveAt(0); } else { if (!transcoder1.Pull(out outputIndex, sample)) { break; } if ((outputIndex != audioStreamIndex) && (outputIndex != videoStreamIndex)) { continue; } } if (outputIndex == audioStreamIndex) { double sampleDuration = (double)(sample.Buffer.DataSize) / (double)(audioFrameSize * audioSampleRate); if (sample.StartTime >= splitTime) { audioSamplesQueue.Add(sample); sample = new MediaSample(); continue; } else if ((sample.StartTime + sampleDuration) > splitTime) { double sample1Duration = splitTime - sample.StartTime; int sample1BufferSize = (int)(sample1Duration * audioSampleRate) * audioFrameSize; if (sample1BufferSize < sample.Buffer.DataSize) { int buffer2Size = sample.Buffer.DataSize - sample1BufferSize; var buffer2 = new MediaBuffer(new byte[buffer2Size]); buffer2.SetData(0, buffer2Size); Array.Copy(sample.Buffer.Start, sample1BufferSize, buffer2.Start, 0, buffer2Size); var sample2 = new MediaSample(); sample2.StartTime = sample.StartTime + sample1Duration; sample2.Buffer = buffer2; if (sample1BufferSize > 0) { sample.Buffer.SetData(sample.Buffer.DataOffset, sample1BufferSize); } else { sample.Buffer.SetData(0, 0); } audioSamplesQueue.Add(sample2); } } } if ((transcoder2 == null) || ((sample.StartTime + 0.0001 >= splitTime) && (outputIndex == videoStreamIndex))) { if (transcoder2 != null) { transcoder2.Flush(); transcoder2.Close(); transcoder2.Dispose(); } SplitRecord splitStat = new SplitRecord(); splitStat.StartTime = splitTime; splitStat.StartTimeActual = sample.StartTime; splitPartNum += 1; splitTime = splitPartNum * splitPartDuration; partStartTime = sample.StartTime; transcoder2 = new Transcoder(); transcoder2.AllowDemoMode = true; // Configure transcoder2 input and output { for (int i = 0; i < transcoder1.Outputs.Count; i++) { var streamInfo = transcoder1.Outputs[i].Pins[0].StreamInfo.Clone() as StreamInfo; var pin = new MediaPin(); pin.StreamInfo = streamInfo; var socket = new MediaSocket(); socket.Pins.Add(pin); socket.StreamType = streamInfo.StreamType; transcoder2.Inputs.Add(socket); } var outputSocket = MediaSocket.FromPreset(encodingPreset); string fileName = GenerateOutputFileName(inputFile, splitPartNum) + outputFileExt; string filePath = Path.Combine(GetExeDir(), fileName); try { File.Delete(filePath); } catch { } outputSocket.File = filePath; transcoder2.Outputs.Add(outputSocket); splitStat.FileName = fileName; } if (splitStats.Count > 0) { SplitRecord lastRecord = splitStats[splitStats.Count - 1]; lastRecord.EndTime = splitStat.StartTime; lastRecord.EndTimeActual = splitStat.StartTimeActual; } splitStats.Add(splitStat); res = transcoder2.Open(); PrintError("Open Transcoder2", transcoder2.Error); if (!res) { return(false); } } if ((splitStats.Count > 0)) { SplitRecord lastRecord = splitStats[splitStats.Count - 1]; lastRecord.EndTime = sample.StartTime; lastRecord.EndTimeActual = lastRecord.EndTime; } if (sample.StartTime >= 0) { sample.StartTime = sample.StartTime - partStartTime; } res = transcoder2.Push(outputIndex, sample); if (!res) { PrintError("Push Transcoder2", transcoder2.Error); return(false); } } } finally { if (transcoder2 != null) { transcoder2.Flush(); transcoder2.Close(); transcoder2.Dispose(); transcoder2 = null; } } if ((transcoder1.Error.Facility != ErrorFacility.Codec) || (transcoder1.Error.Code != (int)CodecError.EOS)) { PrintError("Pull Transcoder1", transcoder1.Error); return(false); } transcoder1.Close(); // print split stats Console.WriteLine(); foreach (var record in splitStats) { Console.WriteLine("{0} start: {1} end: {2} act. start: {3} act. end: {4}", record.FileName, FormatTime(record.StartTime), FormatTime(record.EndTime), FormatTime(record.StartTimeActual), FormatTime(record.EndTimeActual)); } Console.WriteLine(); } return(true); }
private bool ConfigurePins(out MediaPin audioPin, out MediaPin videoPin) { audioPin = null; videoPin = null; if (Outputs.Count == 0) { return(false); } audioPin = SelectPin(Outputs[0], MediaType.Audio); videoPin = SelectPin(Outputs[0], MediaType.Video); if (audioPin == null || videoPin == null) { return(false); } var audioIn = Inputs[_audioInputIndex].Pins[0].StreamInfo as AudioStreamInfo; var audioOut = audioPin.StreamInfo as AudioStreamInfo; var videoIn = Inputs[_videoInputIndex].Pins[0].StreamInfo as VideoStreamInfo; var videoOut = videoPin.StreamInfo as VideoStreamInfo; if (videoOut.ScanType == ScanType.Unknown) { videoOut.ScanType = videoIn.ScanType; } if (videoOut.FrameHeight == 0) { videoOut.FrameHeight = videoIn.FrameHeight; } if (videoOut.FrameWidth == 0) { videoOut.FrameWidth = videoIn.FrameWidth; } if (videoOut.FrameRate == 0) { videoOut.FrameRate = videoIn.FrameRate; } if (videoOut.DisplayRatioHeight == 0) { videoOut.DisplayRatioHeight = videoIn.DisplayRatioHeight; } if (videoOut.DisplayRatioWidth == 0) { videoOut.DisplayRatioWidth = videoIn.DisplayRatioWidth; } if (videoOut.ScanType == ScanType.Unknown) { videoOut.ScanType = videoIn.ScanType; } if (audioOut.Channels == 0) { audioOut.Channels = audioIn.Channels; } if (audioOut.SampleRate == 0) { audioOut.SampleRate = audioIn.SampleRate; } if (audioOut.BitsPerSample == 0) { audioOut.BitsPerSample = audioIn.BitsPerSample; } return(true); }
static bool MP4Mux(Options opt) { try { File.Delete(opt.OutputFile); } catch (Exception) { } using (var transcoder = new Transcoder()) { // Transcoder demo mode must be enabled, // in order to use the production release for testing (without a valid license) transcoder.AllowDemoMode = true; MediaSocket outputSocket = new MediaSocket(); outputSocket.File = opt.OutputFile; outputSocket.StreamType = StreamType.Mp4; // audio for (int i = 0; i < (int)opt.AudioFiles.Length; i++) { MediaPin outputPin = new MediaPin(); AudioStreamInfo asi = new AudioStreamInfo(); asi.StreamType = StreamType.Aac; outputPin.StreamInfo = asi; outputSocket.Pins.Add(outputPin); MediaSocket inputSocket = new MediaSocket(); inputSocket.File = opt.AudioFiles[i]; inputSocket.StreamType = StreamType.Mp4; transcoder.Inputs.Add(inputSocket); Console.WriteLine("Muxing audio input: {0}", opt.AudioFiles[i]); } // video for (int i = 0; i < (int)opt.VideoFiles.Length; i++) { MediaPin outputPin = new MediaPin(); VideoStreamInfo vsi = new VideoStreamInfo(); vsi.StreamType = StreamType.H264; outputPin.StreamInfo = vsi; outputSocket.Pins.Add(outputPin); MediaSocket inputSocket = new MediaSocket(); inputSocket.File = opt.VideoFiles[i]; inputSocket.StreamType = StreamType.Mp4; transcoder.Inputs.Add(inputSocket); Console.WriteLine("Muxing video input: {0}", opt.VideoFiles[i]); } transcoder.Outputs.Add(outputSocket); if (!transcoder.Open()) { PrintError("Open Transcoder", transcoder.Error); return(false); } if (!transcoder.Run()) { PrintError("Run Transcoder", transcoder.Error); return(false); } transcoder.Close(); Console.WriteLine("Output file: {0}", opt.OutputFile); return(true); } }
static bool Encode(Options opt) { try { File.Delete(opt.OutputFile); } catch { } System.IO.Stream inputStream = null; System.IO.Stream outputStream = null; try { inputStream = new System.IO.FileStream(opt.YuvFile, FileMode.Open, FileAccess.Read, FileShare.Read); outputStream = new System.IO.FileStream(opt.OutputFile, FileMode.Create, FileAccess.Write, FileShare.None); using (var transcoder = new Transcoder()) { // In order to use the OEM release for testing (without a valid license) the transcoder demo mode must be enabled. transcoder.AllowDemoMode = true; // Configure input var instream = new VideoStreamInfo { FrameRate = opt.YuvFps, // the input frame rate determines how fast the video is played FrameWidth = opt.YuvWidth, FrameHeight = opt.YuvHeight, ColorFormat = opt.YuvColor.Id, StreamType = PrimoSoftware.AVBlocks.StreamType.UncompressedVideo, ScanType = ScanType.Progressive }; var inpin = new MediaPin { StreamInfo = instream }; var insocket = new MediaSocket { Stream = inputStream, StreamType = PrimoSoftware.AVBlocks.StreamType.UncompressedVideo }; insocket.Pins.Add(inpin); transcoder.Inputs.Add(insocket); // Configure output var outsocket = MediaSocket.FromPreset(opt.OutputPreset.Name); outsocket.Stream = outputStream; transcoder.Outputs.Add(outsocket); bool res = transcoder.Open(); PrintError("Open Transcoder", transcoder.Error); if (!res) { return(false); } res = transcoder.Run(); PrintError("Run Transcoder", transcoder.Error); if (!res) { return(false); } transcoder.Close(); } } finally { if (inputStream != null) { inputStream.Dispose(); inputStream = null; } if (outputStream != null) { outputStream.Dispose(); outputStream = null; } } return(true); }
static void ConfigureVideoInput(DSGraph graph, Transcoder transcoder) { AMMediaType mt = new AMMediaType(); int hr; try { hr = graph.videoGrabber.GetConnectedMediaType(mt); DsError.ThrowExceptionForHR(hr); if ((mt.majorType != DirectShowLib.MediaType.Video) || (mt.formatType != DirectShowLib.FormatType.VideoInfo)) { throw new COMException("Unexpected format type"); } VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); VideoStreamInfo videoInfo = new VideoStreamInfo(); if (vih.AvgTimePerFrame > 0) { videoInfo.FrameRate = (double)10000000 / vih.AvgTimePerFrame; } videoInfo.Bitrate = 0; videoInfo.FrameHeight = Math.Abs(vih.BmiHeader.Height); videoInfo.FrameWidth = vih.BmiHeader.Width; videoInfo.DisplayRatioWidth = videoInfo.FrameWidth; videoInfo.DisplayRatioHeight = videoInfo.FrameHeight; videoInfo.ColorFormat = Util.GetColorFormat(ref mt.subType); videoInfo.Duration = 0; videoInfo.StreamType = StreamType.UncompressedVideo; videoInfo.ScanType = ScanType.Progressive; switch (videoInfo.ColorFormat) { case ColorFormat.BGR32: case ColorFormat.BGRA32: case ColorFormat.BGR24: case ColorFormat.BGR444: case ColorFormat.BGR555: case ColorFormat.BGR565: videoInfo.FrameBottomUp = (vih.BmiHeader.Height > 0); break; } MediaSocket inputSocket = new MediaSocket(); MediaPin inputPin = new MediaPin(); inputPin.StreamInfo = videoInfo; inputSocket.Pins.Add(inputPin); inputSocket.StreamType = StreamType.UncompressedVideo; graph.videoGrabberCB.Init(transcoder, transcoder.Inputs.Count, graph.mediaControl); transcoder.Inputs.Add(inputSocket); } finally { DsUtils.FreeAMMediaType(mt); } }
static bool MP4Remux(Options opt) { try { File.Delete(opt.OutputFile); } catch (Exception) { } bool audioStreamDetected = false; bool videoStreamDetected = false; using (var transcoder = new Transcoder()) { // Transcoder demo mode must be enabled, // in order to use the production release for testing (without a valid license). transcoder.AllowDemoMode = true; // configure inputs using (MediaInfo info = new MediaInfo()) { info.Inputs[0].File = opt.InputFile; if (!info.Open()) { PrintError("mediaInfo.Open", info.Error); return(false); } MediaSocket inputSocket = MediaSocket.FromMediaInfo(info); info.Close(); for (int i = 0; i < inputSocket.Pins.Count; i++) { MediaPin pin = inputSocket.Pins[i]; if (pin.StreamInfo.StreamType == StreamType.H264) { if (videoStreamDetected) { pin.Connection = PinConnection.Disabled; } else { videoStreamDetected = true; Console.WriteLine("Muxing video input: {0}", opt.InputFile); } } else if (pin.StreamInfo.StreamType == StreamType.Aac) { if (audioStreamDetected) { pin.Connection = PinConnection.Disabled; } else { audioStreamDetected = true; Console.WriteLine("Muxing audio input: {0}", opt.InputFile); } } else { pin.Connection = PinConnection.Disabled; } } transcoder.Inputs.Add(inputSocket); } // Configure output { MediaSocket socket = new MediaSocket(); socket.File = opt.OutputFile; socket.StreamType = StreamType.Mp4; if (videoStreamDetected) { VideoStreamInfo streamInfo = new VideoStreamInfo(); streamInfo.StreamType = StreamType.H264; streamInfo.StreamSubType = StreamSubType.Avc1; MediaPin pin = new MediaPin(); pin.StreamInfo = streamInfo; socket.Pins.Add(pin); } if (audioStreamDetected) { AudioStreamInfo streamInfo = new AudioStreamInfo(); streamInfo.StreamType = StreamType.Aac; streamInfo.StreamSubType = StreamSubType.AacMp4; MediaPin pin = new MediaPin(); pin.StreamInfo = streamInfo; socket.Pins.Add(pin); } if (opt.FastStart) { socket.Params.Add(Param.Muxer.MP4.FastStart, 1); } transcoder.Outputs.Add(socket); } bool res = transcoder.Open(); PrintError("Open Transcoder", transcoder.Error); if (!res) { return(false); } res = transcoder.Run(); PrintError("Run Transcoder", transcoder.Error); if (!res) { return(false); } transcoder.Close(); } return(true); }
static public MediaSocket InitOutputSocket(Options opt, StreamInfo streamInfo) { var outSocket = new MediaSocket(); var outPin = new MediaPin(); var outVsi = new VideoStreamInfo(); if (opt.Color != null) { outVsi.ColorFormat = (opt.Color.Id == ColorFormat.Unknown) ? ColorFormat.YUV420 : opt.Color.Id; } else { outVsi.ColorFormat = ColorFormat.YUV420; } if (opt.Height > 0) { outVsi.FrameHeight = opt.Height; } if (opt.Width > 0) { outVsi.FrameWidth = opt.Width; } if (opt.Fps > 0) { outVsi.FrameRate = opt.Fps; } outVsi.StreamType = StreamType.UncompressedVideo; outVsi.ScanType = ScanType.Progressive; outPin.StreamInfo = outVsi; outSocket.Pins.Add(outPin); string exeDir = Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location); if (opt.OutputFile == null) { if (opt.Height > 0 && opt.Width > 0) { opt.OutputFile = exeDir + "\\decoded_" + opt.FrameSize + ".yuv"; } else { VideoStreamInfo vsi = (VideoStreamInfo)streamInfo; opt.OutputFile = exeDir + "\\decoded_" + vsi.FrameWidth + "x" + vsi.FrameHeight + ".yuv"; } } outSocket.File = opt.OutputFile; outSocket.StreamType = StreamType.UncompressedVideo; return(outSocket); }
static bool EncodeImagesToMpeg2Video(string imagesFolder, string outputFile, int imagesCount, int videoFramesPerImage, int step, string encodingPreset) { double frameRate = -1; switch (encodingPreset) { case Preset.Video.DVD.PAL_4x3_MP2: case Preset.Video.DVD.PAL_16x9_MP2: frameRate = 25.0; break; case Preset.Video.DVD.NTSC_4x3_MP2: case Preset.Video.DVD.NTSC_16x9_MP2: frameRate = 30000.0 / 1001; // 29.97 break; default: return(false); } Transcoder transcoder = new Transcoder(); // In order to use the OEM release for testing (without a valid license) the transcoder demo mode must be enabled. transcoder.AllowDemoMode = true; try { File.Delete(outputFile); // Configure Input { MediaInfo info = new MediaInfo(); info.InputFile = GetImagePath(imagesFolder, 0); if (!info.Load()) { PrintError("MediaInfo load", info.Error); return(false); } MediaPin pin = new MediaPin(); VideoStreamInfo vInfo = (VideoStreamInfo)info.Streams[0]; vInfo.FrameRate = frameRate; pin.StreamInfo = vInfo; MediaSocket socket = new MediaSocket(); socket.Pins.Add(pin); transcoder.Inputs.Add(socket); } // Configure Output { MediaSocket socket = MediaSocket.FromPreset(encodingPreset); socket.File = outputFile; transcoder.Outputs.Add(socket); } // Encode Images if (!transcoder.Open()) { PrintError("Transcoder open", transcoder.Error); return(false); } int totalVideoFrames = imagesCount * videoFramesPerImage; for (int i = 0; i < totalVideoFrames; i++) { string imagePath = GetImagePath(imagesFolder, i / videoFramesPerImage * step); MediaBuffer mediaBuffer = new MediaBuffer(File.ReadAllBytes(imagePath)); MediaSample mediaSample = new MediaSample(); mediaSample.Buffer = mediaBuffer; mediaSample.StartTime = i / frameRate; if (!transcoder.Push(0, mediaSample)) { PrintError("Transcoder write", transcoder.Error); return(false); } } if (!transcoder.Flush()) { PrintError("Transcoder flush", transcoder.Error); return(false); } transcoder.Close(); } catch (Exception ex) { Console.WriteLine(ex.ToString()); return(false); } finally { transcoder.Dispose(); } return(true); }
public bool Open(string filePath) { Close(); if (!ConfigureStreams(filePath)) { Close(); return(false); } _transcoder = new Transcoder(); // In order to use the OEM release for testing (without a valid license) the transcoder demo mode must be enabled. _transcoder.AllowDemoMode = true; // Configure input { using (MediaInfo mediaInfo = new MediaInfo()) { mediaInfo.Inputs[0].File = filePath; if (!(mediaInfo.Open())) { return(false); } MediaSocket socket = MediaSocket.FromMediaInfo(mediaInfo); _transcoder.Inputs.Add(socket); } } // Configure video output if (_videoStreamInfo != null) { _videoStreamInfo.ColorFormat = ColorFormat.BGR24; _videoStreamInfo.FrameBottomUp = true; _videoStreamInfo.StreamType = StreamType.UncompressedVideo; _videoStreamInfo.ScanType = ScanType.Progressive; MediaPin pin = new MediaPin(); int displayWidth = Screen.PrimaryScreen.Bounds.Width; int displayHeight = Screen.PrimaryScreen.Bounds.Height; if ((_videoStreamInfo.FrameWidth > displayWidth) || ((_videoStreamInfo.FrameHeight > displayHeight))) { // resize the video double displayAspect = (double)displayWidth / (double)displayHeight; double videoAspect = (double)_videoStreamInfo.DisplayRatioWidth / (double)_videoStreamInfo.DisplayRatioHeight; int width = 0; int height = 0; if (videoAspect < displayAspect) { width = displayWidth; height = (int)(displayWidth / videoAspect); } else { width = (int)(displayHeight * videoAspect); height = displayHeight; } width += calculatePadding(width, 2); height += calculatePadding(height, 2); _videoStreamInfo.FrameWidth = width; _videoStreamInfo.FrameHeight = height; { pin.Params.Add(Param.Video.Resize.InterpolationMethod, PrimoSoftware.AVBlocks.InterpolationMethod.Linear); } } pin.StreamInfo = _videoStreamInfo; MediaSocket socket = new MediaSocket(); socket.StreamType = StreamType.UncompressedVideo; socket.Pins.Add(pin); _videoStreamIndex = _transcoder.Outputs.Count; _transcoder.Outputs.Add(socket); } // Configure audio output if (_audioStreamInfo != null) { _audioStreamInfo.BitsPerSample = 16; // WinMM audio render supports only mono and stereo if (_audioStreamInfo.Channels > 2) { _audioStreamInfo.Channels = 2; } _audioStreamInfo.StreamType = StreamType.LPCM; MediaPin pin = new MediaPin(); pin.StreamInfo = _audioStreamInfo; MediaSocket socket = new MediaSocket(); socket.StreamType = StreamType.LPCM; socket.Pins.Add(pin); _audioStreamIndex = _transcoder.Outputs.Count; _transcoder.Outputs.Add(socket); } if (!_transcoder.Open()) { Close(); return(false); } return(true); }
private bool ConfigureTranscoder() { ms.transcoder = new CompositeTranscoder(); ms.transcoder.AllowDemoMode = true; // set audio input pin if audio is not disabled if (ms.audioInput != null) { if ((ms.audioType.majorType != DirectShowLib.MediaType.Audio) || (ms.audioType.formatType != DirectShowLib.FormatType.WaveEx)) { return(false); } WaveFormatEx wfx = new WaveFormatEx(); Marshal.PtrToStructure(ms.audioType.formatPtr, wfx); if (wfx.wFormatTag != 1) // WAVE_FORMAT_PCM { return(false); } AudioStreamInfo audioInfo = new AudioStreamInfo(); audioInfo.BitsPerSample = wfx.wBitsPerSample; audioInfo.Channels = wfx.nChannels; audioInfo.SampleRate = wfx.nSamplesPerSec; audioInfo.StreamType = StreamType.LPCM; MediaSocket inputSocket = new MediaSocket(); MediaPin inputPin = new MediaPin(); inputPin.StreamInfo = audioInfo; inputSocket.Pins.Add(inputPin); inputSocket.StreamType = StreamType.LPCM; m_audioCB.StreamNumber = ms.transcoder.Inputs.Count; ms.transcoder.Inputs.Add(inputSocket); } // set video input pin { if ((ms.videoType.majorType != DirectShowLib.MediaType.Video) || (ms.videoType.formatType != DirectShowLib.FormatType.VideoInfo)) { return(false); } VideoStreamInfo videoInfo = new VideoStreamInfo(); VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(ms.videoType.formatPtr, typeof(VideoInfoHeader)); if (vih.AvgTimePerFrame > 0) { videoInfo.FrameRate = (double)10000000 / vih.AvgTimePerFrame; } videoInfo.Bitrate = 0; //vih.BitRate; videoInfo.FrameHeight = Math.Abs(vih.BmiHeader.Height); videoInfo.FrameWidth = vih.BmiHeader.Width; videoInfo.DisplayRatioWidth = videoInfo.FrameWidth; videoInfo.DisplayRatioHeight = videoInfo.FrameHeight; videoInfo.ScanType = ScanType.Progressive; videoInfo.Duration = 0; if (ms.videoType.subType == MediaSubType.MJPG) { videoInfo.StreamType = StreamType.Mjpeg; videoInfo.ColorFormat = ColorFormat.YUV422; } else { videoInfo.StreamType = StreamType.UncompressedVideo; videoInfo.ColorFormat = Util.GetColorFormat(ref ms.videoType.subType); } // unsupported capture format if (videoInfo.ColorFormat == ColorFormat.Unknown) { return(false); } switch (videoInfo.ColorFormat) { case ColorFormat.BGR32: case ColorFormat.BGRA32: case ColorFormat.BGR24: case ColorFormat.BGR444: case ColorFormat.BGR555: case ColorFormat.BGR565: videoInfo.FrameBottomUp = (vih.BmiHeader.Height > 0); break; } MediaSocket inputSocket = new MediaSocket(); MediaPin inputPin = new MediaPin(); inputPin.StreamInfo = videoInfo; inputSocket.Pins.Add(inputPin); inputSocket.StreamType = StreamType.UncompressedVideo; m_videoCB.StreamNumber = ms.transcoder.Inputs.Count; ms.transcoder.Inputs.Add(inputSocket); } PresetDescriptor preset = comboPresets.SelectedItem as PresetDescriptor; MediaSocket outputSocket; outputSocket = Util.MediaSocketFromPreset(preset.Name); outputSocket.File = txtOutput.Text; SetRealTimeVideoMode(outputSocket); ms.transcoder.Outputs.Add(outputSocket); ms.transcoder.AudioLog = txtAudioLog.Text; if (!ms.transcoder.Open()) { MessageBox.Show(ms.transcoder.Error.Message + "\n" + ms.transcoder.Error.Hint, "Transcoder Open Error", MessageBoxButtons.OK, MessageBoxIcon.Error); return(false); } return(true); }
static bool DecodeH264Stream(Options opt) { // Create an input socket from file MediaSocket inSocket = new MediaSocket(); inSocket.File = opt.InputFile; // Create an output socket with one YUV 4:2:0 video pin VideoStreamInfo outStreamInfo = new VideoStreamInfo(); outStreamInfo.StreamType = StreamType.UncompressedVideo; outStreamInfo.ColorFormat = ColorFormat.YUV420; outStreamInfo.ScanType = ScanType.Progressive; MediaPin outPin = new MediaPin(); outPin.StreamInfo = outStreamInfo; MediaSocket outSocket = new MediaSocket(); outSocket.StreamType = StreamType.UncompressedVideo; outSocket.Pins.Add(outPin); // Create Transcoder using (var transcoder = new Transcoder()) { transcoder.AllowDemoMode = true; transcoder.Inputs.Add(inSocket); transcoder.Outputs.Add(outSocket); if (transcoder.Open()) { DeleteFile(opt.OutputFile); int inputIndex; MediaSample yuvFrame = new MediaSample(); int frameCounter = 0; using (System.IO.FileStream outfile = System.IO.File.OpenWrite(opt.OutputFile)) { while (transcoder.Pull(out inputIndex, yuvFrame)) { // Each call to Transcoder::pull returns a raw YUV 4:2:0 frame. outfile.Write(yuvFrame.Buffer.Start, yuvFrame.Buffer.DataOffset, yuvFrame.Buffer.DataSize); ++frameCounter; } PrintError("Transcoder pull", transcoder.Error); Console.WriteLine("Frames decoded: {0}", frameCounter); Console.WriteLine("Output file: {0}", opt.OutputFile); outfile.Close(); } transcoder.Close(); return(true); } PrintError("Transcoder open", transcoder.Error); return(false); } }
static bool ReEncode(Options opt) { if (File.Exists(opt.OutputFile)) { File.Delete(opt.OutputFile); } using (var transcoder = new Transcoder()) { // In order to use the production release for testing (without a valid license), // the transcoder demo mode must be enabled. transcoder.AllowDemoMode = true; using (var mediaInfo = new MediaInfo()) { mediaInfo.Inputs[0].File = opt.InputFile; if (!mediaInfo.Open()) { PrintError("Open MediaInfo", mediaInfo.Error); return(false); } // Add Inputs { var socket = MediaSocket.FromMediaInfo(mediaInfo); transcoder.Inputs.Add(socket); } } // Add Outputs { // Create output socket var socket = new MediaSocket(); var inSocket = transcoder.Inputs[0]; socket.StreamType = inSocket.StreamType; socket.File = opt.OutputFile; // Add pins with ReEncode parameter set to Use.On foreach (var inPin in inSocket.Pins) { StreamInfo si = (StreamInfo)inPin.StreamInfo.Clone(); var pin = new MediaPin(); pin.StreamInfo = (StreamInfo)si.Clone(); if ((MediaType.Video == si.MediaType) && opt.ReEncodeVideo) { pin.Params.Add(Param.ReEncode, Use.On); } if ((MediaType.Audio == si.MediaType) && opt.ReEncodeAudio) { pin.Params.Add(Param.ReEncode, Use.On); } socket.Pins.Add(pin); } transcoder.Outputs.Add(socket); } bool result = transcoder.Open(); PrintError("Open Transcoder", transcoder.Error); if (!result) { return(false); } result = transcoder.Run(); PrintError("Run Transcoder", transcoder.Error); if (!result) { return(false); } transcoder.Close(); } return(true); }