void TypedTranscoderTest <W>() where W : IWriterTest, new() { var transcoder = new Transcoder <CompactBinaryReader <InputBuffer>, W>(typeof(Foo)); var data = new[] { (byte)BondDataType.BT_STOP }; var reader = new CompactBinaryReader <InputBuffer>(new InputBuffer(data)); var writer = new W(); transcoder.Transcode(reader, writer); Assert.IsTrue(writer.WasCalled); if (writer is IWriterTypeTest) { Assert.IsTrue((writer as IWriterTypeTest).Type == typeof(Foo)); } }
public string GenerateCodes() { Random random = new Random(); DateTime timeValue = DateTime.MinValue; // Create 10 codes just to see the random generation. int rand = random.Next(3600) + 1; // add one to avoid 0 result. timeValue = timeValue.AddMinutes(rand); byte[] b = System.BitConverter.GetBytes(timeValue.Ticks); string voucherCode = Transcoder.Base32Encode(b); string code = string.Format("{0}-{1}-{2}", voucherCode.Substring(0, 4), voucherCode.Substring(4, 4), voucherCode.Substring(8, 5)); return(code); }
public void Close() { if (null != _transcoder) { _transcoder.Dispose(); _transcoder = null; } _decoderThread = null; _videoStreamInfo = null; _audioStreamInfo = null; _videoStreamIndex = -1; _audioStreamIndex = -1; _decoderEOS = false; _cancellationPending = false; }
public override uint?GetValue() { if (Data.Length > 0) { try { var buffer = Data; ReadExtras(buffer.Span); return(Transcoder.Decode <uint>(buffer.Slice(31, 4), Flags, OpCode.GetCidByName)); } catch (Exception e) { Exception = e; HandleClientError(e.Message, ResponseStatus.ClientFailure); } } return(0u); }
private void ProcessQueue() { TranscodeContext context; lock (queue) { if (queue.Count == 0) { Reset(); return; } context = queue.Dequeue(); transcoding = true; } current_context = context; UserJob.Status = String.Format("{0} - {1}", context.Track.ArtistName, context.Track.TrackTitle); Transcoder.TranscodeTrack(context.Track, context.OutUri, context.Config); }
public virtual IBucketConfig GetConfig() { IBucketConfig config = null; if (GetResponseStatus() == ResponseStatus.VBucketBelongsToAnotherServer) { var offset = HeaderLength + Header.ExtrasLength; var length = Header.BodyLength - Header.ExtrasLength; //Override any flags settings since the body of the response has changed to a config config = Transcoder.Decode <BucketConfig>(Data.ToArray(), offset, length, new Flags { Compression = Compression.None, DataFormat = DataFormat.Json, TypeCode = TypeCode.Object }); } return(config); }
/// <summary> /// Mains the specified args. /// </summary> /// <param name="args">The args.</param> public static void Main(string[] args) { Console.CancelKeyPress += new ConsoleCancelEventHandler(ConsoleClosingHandler); Console.Title = string.Format("{0} {1}", FileVersionInfo.GetVersionInfo(Assembly.GetExecutingAssembly().Location).ProductName, FileVersionInfo.GetVersionInfo(Assembly.GetExecutingAssembly().Location).ProductVersion); ThreadPool.SetMaxThreads(Environment.ProcessorCount + Settings.Default.MaxThreadsOffset, Environment.ProcessorCount + Settings.Default.MaxThreadsOffset); EventManager.Instance.TranscodingChanged += TranscodingChangedHandler; EventManager.Instance.TranscodingStarted += TranscodingStartedHandler; EventManager.Instance.TranscodingFinished += TranscodingFinishedHandler; Console.WriteLine("Waiting for work..."); using (Transcoder transcoder = new Transcoder()) { transcoder.Start(Settings.Default.LogPath, Settings.Default.InputPath, Settings.Default.OutputPath, Settings.Default.DeleteInputWhenFinished, Settings.Default.ClearOutputOnStartup, Settings.Default.LookForNewFilesOnStartup); Program.Quit.WaitOne(); } }
public virtual T GetValue() { var result = default(T); if (Success && Data != null) { try { var buffer = Data.ToArray(); ReadExtras(buffer); result = Transcoder.Decode <T>(buffer, BodyOffset, TotalLength - BodyOffset, Flags); } catch (Exception e) { Exception = e; HandleClientError(e.Message); } } return(result); }
public virtual T GetValue() { var result = default(T); if (Success && Data != null && Data.Length > 0) { try { var buffer = Data.ToArray(); ReadExtras(buffer); var offset = 24 + Header.KeyLength + Header.ExtrasLength; result = Transcoder.Decode <T>(buffer, offset, TotalLength - offset, Flags, OperationCode); } catch (Exception e) { Exception = e; HandleClientError(e.Message, ResponseStatus.ClientFailure); } } return(result); }
internal VideoDetails(int[] supportedAudioBitRates, int frameRate, Size frameSize, int bitRate, int audioSamplesPerSecond, int audioAverageBytesPerSecond, string videoEncoding, string audioEncoding, Transcoder transcoder, string errorMessage) { this.SupportedAudioBitRates = supportedAudioBitRates; this.FrameRate = frameRate; this.FrameSize = frameSize; this.BitRate = bitRate; this.AudioAverageBytesPerSecond = audioAverageBytesPerSecond; this.AudioSamplesPerSecond = audioSamplesPerSecond; this.AudioEncoding = audioEncoding; this.VideoEncoding = videoEncoding; this.Transcoder = transcoder; this.ErrorMessage = errorMessage; }
static bool ApplyOverlay(Options opt) { DeleteFile(opt.OutputFile); // create Transcoder using (Transcoder transcoder = new Transcoder()) { transcoder.AllowDemoMode = true; MediaSocket inputSocket = new MediaSocket(); inputSocket.File = opt.InputFile; transcoder.Inputs.Add(inputSocket); MediaSocket outputSocket = ConfigureOutputSocket(opt); transcoder.Outputs.Add(outputSocket); bool res = transcoder.Open(); if (!res) { PrintError("Transcoder open", transcoder.Error); return(false); } res = transcoder.Run(); if (!res) { PrintError("Transcoder run", transcoder.Error); return(false); } transcoder.Close(); } Console.WriteLine("Output: " + opt.OutputFile); return(true); }
public virtual T GetValue() { var result = default(T); if (Data.Length > 0) { try { var buffer = Data; ReadExtras(buffer.Span); var offset = Header.BodyOffset; var length = Header.TotalLength - Header.BodyOffset; result = Transcoder.Decode <T>(buffer.Slice(offset, length), Flags, OpCode); } catch (Exception e) { Exception = e; HandleClientError(e.Message, ResponseStatus.ClientFailure); } } return(result); }
static void ConfigureAudioInput(DSGraph graph, Transcoder transcoder) { AMMediaType mt = new AMMediaType(); int hr; try { hr = graph.audioGrabber.GetConnectedMediaType(mt); DsError.ThrowExceptionForHR(hr); if ((mt.majorType != DirectShowLib.MediaType.Audio) || (mt.formatType != DirectShowLib.FormatType.WaveEx)) { throw new COMException("Unexpected format type"); } WaveFormatEx wfx = (WaveFormatEx)Marshal.PtrToStructure(mt.formatPtr, typeof(WaveFormatEx)); AudioStreamInfo audioInfo = new AudioStreamInfo(); audioInfo.BitsPerSample = wfx.wBitsPerSample; audioInfo.Channels = wfx.nChannels; audioInfo.SampleRate = wfx.nSamplesPerSec; audioInfo.StreamType = StreamType.LPCM; MediaSocket inputSocket = new MediaSocket(); MediaPin inputPin = new MediaPin(); inputPin.StreamInfo = audioInfo; inputSocket.Pins.Add(inputPin); inputSocket.StreamType = StreamType.LPCM; graph.audioGrabberCB.Init(transcoder, transcoder.Inputs.Count, graph.mediaControl); transcoder.Inputs.Add(inputSocket); } finally { DsUtils.FreeAMMediaType(mt); } }
/// <summary> /// /// </summary> /// <param name="dest_encoding"></param> /// <returns></returns> public int getSize(int dest_encoding) { if (vn.encoding == dest_encoding) { return(Size); } int src_encoding = vn.encoding; byte[] ba = vn.getXML().getBytes(); int len = Transcoder.getOutLength(ba, (int)l, (int)(l >> 32), vn.encoding, dest_encoding); if (stLen != 0) { for (int i = 0; i < fib.size_Renamed_Field; i++) { int k = fib.intAt(i); if (vn.encoding < VTDNav.FORMAT_UTF_16BE) { len += Transcoder.getOutLength(ba, vn.getTokenOffset(k), (vn.getTokenLength(k) & 0xffff), vn.encoding, dest_encoding) + Transcoder.getOutLength(ba, vn.getTokenOffset(k + 1), vn.getTokenLength(k + 1), vn.encoding, dest_encoding) + ((dest_encoding < VTDNav.FORMAT_UTF_16BE) ? 4 : 8); } else { len += Transcoder.getOutLength(ba, vn.getTokenOffset(k) << 1, (vn.getTokenLength(k) & 0xffff) << 1, vn.encoding, dest_encoding) + Transcoder.getOutLength(ba, vn.getTokenOffset(k + 1) << 1, vn.getTokenLength(k + 1) << 1, vn.encoding, dest_encoding) + ((dest_encoding < VTDNav.FORMAT_UTF_16BE) ? 4 : 8); } } } return(len); }
static int Main(string[] args) { Environment.CurrentDirectory = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); bool verbose = false; string inFilename = null; string outFilename = null; var p = new OptionSet { { "v", "Verbose", v => verbose = v != null }, { "in=", "Input dll file", s => inFilename = s }, { "out=", "Output JavaScript file. Will be overwritten if already exists", s => outFilename = s }, }; var r = p.Parse(args); if (inFilename == null || outFilename == null || r.Any()) { Console.WriteLine("Cil2JsCon"); Console.WriteLine("Convert .NET library to JavaScript"); Console.WriteLine(); Console.WriteLine("Options:"); p.WriteOptionDescriptions(Console.Out); return(1); } var jsResult = Transcoder.ToJs(inFilename, verbose); var typeMapString = jsResult.TypeMap.ToString(); try { File.WriteAllText(outFilename, jsResult.Js, Encoding.UTF8); File.WriteAllText(outFilename + ".typemap", typeMapString); } catch (Exception e) { Console.WriteLine("Error:"); Console.WriteLine(e); } return(0); }
static bool EncodeYuvToH264(Options opt) { DeleteFile(opt.OutputFile); MediaSocket inSocket = CreateInputSocket(opt); MediaSocket outSocket = CreateOutputSocket(opt); // create Transcoder using (Transcoder transcoder = new Transcoder()) { transcoder.AllowDemoMode = true; transcoder.Inputs.Add(inSocket); transcoder.Outputs.Add(outSocket); bool res = transcoder.Open(); PrintError("Transcoder open", transcoder.Error); if (!res) { return(false); } res = transcoder.Run(); PrintError("Transcoder run", transcoder.Error); if (!res) { return(false); } transcoder.Close(); PrintError("Transcoder close", transcoder.Error); if (!res) { return(false); } } return(true); }
private static async Task <HttpContent> Transcode(HttpContent input, Transcoder transcoder) { var pipe = new Pipe(); var reader = PipeReader.Create(await input.ReadAsStreamAsync()); // TODO: Figure out if I can do fire and forget without this pragma #pragma warning disable 4014 Task.Run(async() => #pragma warning restore 4014 { await transcoder(reader, pipe.Writer); reader.Complete(); pipe.Writer.Complete(); }); var output = new StreamContent(pipe.Reader.AsStream()); foreach (var(key, value) in input.Headers) { output.Headers.Add(key, value); } return(output); }
public override BucketConfig GetValue() { BucketConfig bucketConfig = null; if (Success && Data.Length > 0) { try { var buffer = Data; ReadExtras(buffer.Span); var offset = Header.BodyOffset; var length = TotalLength - Header.BodyOffset; bucketConfig = Transcoder.Decode <BucketConfig>(buffer.Slice(offset, length), Flags, OpCode); } catch (Exception e) { Exception = e; HandleClientError(e.Message, ResponseStatus.ClientFailure); } } return(bucketConfig); }
public static void Apply(string gameDataFile, int videoBitRate, string destFile, bool highlights, Action <long, long> progressReporter, Func <bool> isAborted, string pluginName) { try { var leaderBoard = new LeaderBoard { OverlayData = OverlayData.FromFile(gameDataFile), PluginName = pluginName }; var transcoder = new Transcoder { VideoFiles = leaderBoard.OverlayData.VideoFiles.ToSourceReaderExtra(), DestinationFile = destFile, VideoBitRate = videoBitRate }; new TranscodeAndOverlay(leaderBoard, progressReporter).Process(transcoder, highlights, progressReporter, isAborted); } catch (Exception e) { TraceError.WriteLine(e.Message); TraceError.WriteLine(e.StackTrace); throw e; } }
static bool DecodeH264Stream(Options opt) { // Create an input socket from file MediaSocket inSocket = new MediaSocket(); inSocket.File = opt.InputFile; // Create an output socket with one YUV 4:2:0 video pin VideoStreamInfo outStreamInfo = new VideoStreamInfo(); outStreamInfo.StreamType = StreamType.UncompressedVideo; outStreamInfo.ColorFormat = ColorFormat.YUV420; outStreamInfo.ScanType = ScanType.Progressive; MediaPin outPin = new MediaPin(); outPin.StreamInfo = outStreamInfo; MediaSocket outSocket = new MediaSocket(); outSocket.StreamType = StreamType.UncompressedVideo; outSocket.Pins.Add(outPin); // Create Transcoder using (var transcoder = new Transcoder()) { transcoder.AllowDemoMode = true; transcoder.Inputs.Add(inSocket); transcoder.Outputs.Add(outSocket); if (transcoder.Open()) { DeleteFile(opt.OutputFile); int inputIndex; MediaSample yuvFrame = new MediaSample(); int frameCounter = 0; using (System.IO.FileStream outfile = System.IO.File.OpenWrite(opt.OutputFile)) { while (transcoder.Pull(out inputIndex, yuvFrame)) { // Each call to Transcoder::pull returns a raw YUV 4:2:0 frame. outfile.Write(yuvFrame.Buffer.Start, yuvFrame.Buffer.DataOffset, yuvFrame.Buffer.DataSize); ++frameCounter; } PrintError("Transcoder pull", transcoder.Error); Console.WriteLine("Frames decoded: {0}", frameCounter); Console.WriteLine("Output file: {0}", opt.OutputFile); outfile.Close(); } transcoder.Close(); return(true); } PrintError("Transcoder open", transcoder.Error); return(false); } }
protected override void BeginSend() { Flags = Transcoder.GetFormat(Content); }
static bool Encode(Options opt) { try { File.Delete(opt.OutputFile); } catch { } System.IO.Stream inputStream = null; System.IO.Stream outputStream = null; try { inputStream = new System.IO.FileStream(opt.YuvFile, FileMode.Open, FileAccess.Read, FileShare.Read); outputStream = new System.IO.FileStream(opt.OutputFile, FileMode.Create, FileAccess.Write, FileShare.None); using (var transcoder = new Transcoder()) { // In order to use the OEM release for testing (without a valid license) the transcoder demo mode must be enabled. transcoder.AllowDemoMode = true; // Configure input var instream = new VideoStreamInfo { FrameRate = opt.YuvFps, // the input frame rate determines how fast the video is played FrameWidth = opt.YuvWidth, FrameHeight = opt.YuvHeight, ColorFormat = opt.YuvColor.Id, StreamType = PrimoSoftware.AVBlocks.StreamType.UncompressedVideo, ScanType = ScanType.Progressive }; var inpin = new MediaPin { StreamInfo = instream }; var insocket = new MediaSocket { Stream = inputStream, StreamType = PrimoSoftware.AVBlocks.StreamType.UncompressedVideo }; insocket.Pins.Add(inpin); transcoder.Inputs.Add(insocket); // Configure output var outsocket = MediaSocket.FromPreset(opt.OutputPreset.Name); outsocket.Stream = outputStream; transcoder.Outputs.Add(outsocket); bool res = transcoder.Open(); PrintError("Open Transcoder", transcoder.Error); if (!res) { return(false); } res = transcoder.Run(); PrintError("Run Transcoder", transcoder.Error); if (!res) { return(false); } transcoder.Close(); } } finally { if (inputStream != null) { inputStream.Dispose(); inputStream = null; } if (outputStream != null) { outputStream.Dispose(); outputStream = null; } } return(true); }
public void noid() { var t = new Transcoder(); }
/// <summary> /// /// </summary> /// <param name="dest_encoding"></param> /// <returns></returns> public byte[] toBytes(int dest_encoding) { if (dest_encoding == vn.encoding) { return(toBytes()); } // find out how long the output bytes are byte[] ba = new byte[getSize(dest_encoding)]; int os = (int)l; int len = (int)(l >> 32); int os1 = 0; byte[] xml = vn.getXML().getBytes(); if (stLen == 0) { Transcoder.transcodeAndFill(xml, ba, os, len, vn.encoding, dest_encoding); return(ba); } int enc = vn.getEncoding(); int temp = 0; int outPosition = 0; // transcode and fill the bytes switch (enc) { case VTDNav.FORMAT_UTF_16BE: case VTDNav.FORMAT_UTF_16LE: temp = (stLen + 1) << 1; break; default: temp = stLen + 1; break; } // transcode starting length outPosition = Transcoder.transcodeAndFill2(outPosition, xml, ba, os, temp, vn.encoding, dest_encoding); //System.arraycopy(xml, os, ba, 0, temp); //namespace compensation os1 += temp; int tos = 0, tlen = 0; for (int i = 0; i < fib.size_Renamed_Field; i++) { //System.out.println("i ==>"+fib.intAt(i)); switch (enc) { case VTDNav.FORMAT_UTF_16BE: //write a 0 and ws //System.arraycopy(ws, 0, ba, os1, 2); outPosition = Transcoder.transcodeAndFill2(outPosition, ws, ba, 0, 2, vn.encoding, dest_encoding); os1 += 2; tos = vn.getTokenOffset(fib.intAt(i)) << 1; tlen = (vn.getTokenLength(fib.intAt(i)) & 0xffff) << 1; //System.arraycopy(xml, tos, ba, os1, tlen); outPosition = Transcoder.transcodeAndFill2(outPosition, xml, ba, tos, tlen, vn.encoding, dest_encoding); os1 += tlen; // write a 0 and = //System.arraycopy(ws, 2, ba, os1, 2); outPosition = Transcoder.transcodeAndFill2(outPosition, ws, ba, 2, 2, vn.encoding, dest_encoding); os1 += 2; tos = (vn.getTokenOffset(fib.intAt(i) + 1) - 1) << 1; tlen = ((vn.getTokenLength(fib.intAt(i) + 1) & 0xffff) + 2) << 1; //System.arraycopy(xml, tos, ba, os1, tlen); outPosition = Transcoder.transcodeAndFill2(outPosition, xml, ba, tos, tlen, vn.encoding, dest_encoding); os1 += tlen; break; case VTDNav.FORMAT_UTF_16LE: // write a ws and 0 //System.arraycopy(ws, 1, ba, os1, 2); outPosition = Transcoder.transcodeAndFill2(outPosition, ws, ba, 1, 2, vn.encoding, dest_encoding); os1 += 2; tos = vn.getTokenOffset(fib.intAt(i)) << 1; tlen = (vn.getTokenLength(fib.intAt(i)) & 0xffff) << 1; //System.arraycopy(xml, tos, ba, os1, tlen); outPosition = Transcoder.transcodeAndFill2(outPosition, xml, ba, tos, tlen, vn.encoding, dest_encoding); os1 += tlen; // write a = and 0 //System.arraycopy(ws, 3, ba, os1, 2); outPosition = Transcoder.transcodeAndFill2(outPosition, ws, ba, 3, 2, vn.encoding, dest_encoding); os1 += 2; tos = (vn.getTokenOffset(fib.intAt(i) + 1) - 1) << 1; tlen = ((vn.getTokenLength(fib.intAt(i) + 1) & 0xffff) + 2) << 1; // System.arraycopy(xml, tos, ba, os1, tlen); outPosition = Transcoder.transcodeAndFill2(outPosition, xml, ba, tos, tlen, vn.encoding, dest_encoding); os1 += tlen; break; default: // write a ws //System.arraycopy(ws, 1, ba, os1, 1); outPosition = Transcoder.transcodeAndFill2(outPosition, ws, ba, 1, 1, vn.encoding, dest_encoding); os1++; tos = vn.getTokenOffset(fib.intAt(i)); tlen = (vn.getTokenLength(fib.intAt(i)) & 0xffff); //System.arraycopy(xml, tos, ba, os1, tlen); outPosition = Transcoder.transcodeAndFill2(outPosition, xml, ba, tos, tlen, vn.encoding, dest_encoding); os1 += tlen; // write a = //System.arraycopy(ws, 3, ba, os1, 1); outPosition = Transcoder.transcodeAndFill2(outPosition, ws, ba, 3, 1, vn.encoding, dest_encoding); os1++; tos = vn.getTokenOffset(fib.intAt(i) + 1) - 1; tlen = (vn.getTokenLength(fib.intAt(i) + 1) & 0xffff) + 2; //System.arraycopy(xml, tos, ba, os1, tlen); outPosition = Transcoder.transcodeAndFill2(outPosition, xml, ba, tos, tlen, vn.encoding, dest_encoding); os1 += tlen; break; } } //System.out.println(new String(ba)); switch (enc) { case VTDNav.FORMAT_UTF_16BE: case VTDNav.FORMAT_UTF_16LE: temp = (stLen + 1) << 1; break; default: temp = stLen + 1; break; } //System.arraycopy(xml, os + temp, ba, os1, len - temp); outPosition = Transcoder.transcodeAndFill2(outPosition, xml, ba, os + temp, len - temp, vn.encoding, dest_encoding); //System.out.println(new String(ba)); return(ba); }
static bool MP4Remux(Options opt) { try { File.Delete(opt.OutputFile); } catch (Exception) { } bool audioStreamDetected = false; bool videoStreamDetected = false; using (var transcoder = new Transcoder()) { // Transcoder demo mode must be enabled, // in order to use the production release for testing (without a valid license). transcoder.AllowDemoMode = true; // configure inputs using (MediaInfo info = new MediaInfo()) { info.Inputs[0].File = opt.InputFile; if (!info.Open()) { PrintError("mediaInfo.Open", info.Error); return(false); } MediaSocket inputSocket = MediaSocket.FromMediaInfo(info); info.Close(); for (int i = 0; i < inputSocket.Pins.Count; i++) { MediaPin pin = inputSocket.Pins[i]; if (pin.StreamInfo.StreamType == StreamType.H264) { if (videoStreamDetected) { pin.Connection = PinConnection.Disabled; } else { videoStreamDetected = true; Console.WriteLine("Muxing video input: {0}", opt.InputFile); } } else if (pin.StreamInfo.StreamType == StreamType.Aac) { if (audioStreamDetected) { pin.Connection = PinConnection.Disabled; } else { audioStreamDetected = true; Console.WriteLine("Muxing audio input: {0}", opt.InputFile); } } else { pin.Connection = PinConnection.Disabled; } } transcoder.Inputs.Add(inputSocket); } // Configure output { MediaSocket socket = new MediaSocket(); socket.File = opt.OutputFile; socket.StreamType = StreamType.Mp4; if (videoStreamDetected) { VideoStreamInfo streamInfo = new VideoStreamInfo(); streamInfo.StreamType = StreamType.H264; streamInfo.StreamSubType = StreamSubType.Avc1; MediaPin pin = new MediaPin(); pin.StreamInfo = streamInfo; socket.Pins.Add(pin); } if (audioStreamDetected) { AudioStreamInfo streamInfo = new AudioStreamInfo(); streamInfo.StreamType = StreamType.Aac; streamInfo.StreamSubType = StreamSubType.AacMp4; MediaPin pin = new MediaPin(); pin.StreamInfo = streamInfo; socket.Pins.Add(pin); } if (opt.FastStart) { socket.Params.Add(Param.Muxer.MP4.FastStart, 1); } transcoder.Outputs.Add(socket); } bool res = transcoder.Open(); PrintError("Open Transcoder", transcoder.Error); if (!res) { return(false); } res = transcoder.Run(); PrintError("Run Transcoder", transcoder.Error); if (!res) { return(false); } transcoder.Close(); } return(true); }
/// <summary> /// Start the transcoding. /// </summary> public void StartTranscoding() { transcoder = new Transcoder(); transcoder.Source = this.Source; transcoder.Profile = this.Transcoder; transcoder.WantLogStream = this.WantLogStream; transcoder.StartTranscoding(); currentState = State.TranscodingStarted; }
static bool DecodeJpeg(string inputFile, string outputFile) { int frameWidth, frameHeight; if (!GetFrameSize(inputFile, out frameWidth, out frameHeight)) { return(false); } Console.WriteLine("Input frame size: {0}x{1}", frameWidth, frameHeight); // read input bytes byte[] inputData; try { inputData = System.IO.File.ReadAllBytes(inputFile); } catch (System.Exception e) { Console.WriteLine(e.ToString()); return(false); } DeleteFile(outputFile); MediaSocket inSocket = createInputSocket(frameWidth, frameHeight); MediaSocket outSocket = createOutputSocket(outputFile, frameWidth, frameHeight); // create Transcoder using (Transcoder transcoder = new Transcoder()) { transcoder.AllowDemoMode = true; transcoder.Inputs.Add(inSocket); transcoder.Outputs.Add(outSocket); bool res = transcoder.Open(); PrintError("Open Transcoder", transcoder.Error); if (!res) { return(false); } MediaBuffer buffer = new MediaBuffer(); buffer.Attach(inputData, true); MediaSample sample = new MediaSample(); sample.Buffer = buffer; res = transcoder.Push(0, sample); PrintError("Push Transcoder", transcoder.Error); if (!res) { return(false); } transcoder.Flush(); transcoder.Close(); } return(true); }
/* * uncompressed audio and video input */ public static bool Run(MediaSocket vinput, string vfile, MediaSocket ainput, string afile, MediaSocket output) { bool res; TrackState vtrack = new TrackState(); TrackState atrack = new TrackState(); using (UncompressedAVSplitter vsplit = new UncompressedAVSplitter()) using (UncompressedAVSplitter asplit = new UncompressedAVSplitter()) using (var transcoder = new Transcoder() { AllowDemoMode = true }) { try { if (vinput != null) { Console.WriteLine("video input file: \"{0}\"", vfile); vsplit.Init(vinput, vfile); Console.WriteLine("OK"); } if (ainput != null) { Console.WriteLine("audio input file: \"{0}\"", afile); asplit.Init(ainput, afile); Console.WriteLine("OK"); } } catch (Exception ex) { Console.WriteLine(ex.Message); return(false); } // setup transcoder int trackIndex = 0; // start index if (vinput != null) { transcoder.Inputs.Add(vinput); vtrack.Index = trackIndex++; } if (ainput != null) { transcoder.Inputs.Add(ainput); atrack.Index = trackIndex++; } transcoder.Outputs.Add(output); res = transcoder.Open(); PrintError("transcoder open", transcoder.Error); if (!res) { return(false); } // transcoding loop for (;;) { if (vtrack.Index != TrackState.Disabled && vtrack.Frame == null) { vtrack.Frame = vsplit.GetFrame(); } if (atrack.Index != TrackState.Disabled && atrack.Frame == null) { atrack.Frame = asplit.GetFrame(); } TrackState track = SelectMuxTrack(vtrack, atrack); if (track == null) { break; } // log if (track.Frame != null) { if (track.Frame.StartTime - track.Progress >= 1.0) { track.Progress = track.Frame.StartTime; Console.WriteLine("track {0} frame #{1} pts:{2}", track.Index, track.FrameCount, track.Frame.StartTime); } } else { Console.WriteLine("track {0} eos", track.Index); } if (track.Frame != null) { res = transcoder.Push(track.Index, track.Frame); if (!res) { PrintError("transcoder push frame", transcoder.Error); return(false); } track.Frame = null; // clear the muxed frame in order to read to the next one track.FrameCount++; } else { res = transcoder.Push(track.Index, null); if (!res) { PrintError("transcoder push eos", transcoder.Error); return(false); } track.Index = TrackState.Disabled; // disable track } } res = transcoder.Flush(); if (!res) { PrintError("transcoder flush", transcoder.Error); return(false); } transcoder.Close(); } return(true); }
public static string GetTranscodedPath(string path) { if (Helper.IsExtenderNativeVideo(path)) { return path; } else { if (_transcoder == null) { _transcoder = new MediaBrowser.Library.Transcoder(); } string bufferpath = _transcoder.BeginTranscode(path); // if bufferpath comes back null, that means the transcoder i) failed to start or ii) they // don't even have it installed if (string.IsNullOrEmpty(bufferpath)) { Application.DisplayDialog("Could not start transcoding process", "Transcode Error"); throw new Exception("Could not start transcoding process"); } return bufferpath; } }
protected override void BeginSend() { Flags = Transcoder.GetFormat(Content); Format = Flags.DataFormat; Compression = Flags.Compression; }
private Server(Section.IProvider sections, Music.IProvider music, Transcoder.IProvider transcoder) { Sections = sections; Music = music; Transcoder = transcoder; }
public bool Open(string filePath) { Close(); if (!ConfigureStreams(filePath)) { Close(); return(false); } _transcoder = new Transcoder(); // In order to use the OEM release for testing (without a valid license) the transcoder demo mode must be enabled. _transcoder.AllowDemoMode = true; // Configure input { using (MediaInfo mediaInfo = new MediaInfo()) { mediaInfo.Inputs[0].File = filePath; if (!(mediaInfo.Open())) { return(false); } MediaSocket socket = MediaSocket.FromMediaInfo(mediaInfo); _transcoder.Inputs.Add(socket); } } // Configure video output if (_videoStreamInfo != null) { _videoStreamInfo.ColorFormat = ColorFormat.BGR24; _videoStreamInfo.FrameBottomUp = true; _videoStreamInfo.StreamType = StreamType.UncompressedVideo; _videoStreamInfo.ScanType = ScanType.Progressive; MediaPin pin = new MediaPin(); int displayWidth = Screen.PrimaryScreen.Bounds.Width; int displayHeight = Screen.PrimaryScreen.Bounds.Height; if ((_videoStreamInfo.FrameWidth > displayWidth) || ((_videoStreamInfo.FrameHeight > displayHeight))) { // resize the video double displayAspect = (double)displayWidth / (double)displayHeight; double videoAspect = (double)_videoStreamInfo.DisplayRatioWidth / (double)_videoStreamInfo.DisplayRatioHeight; int width = 0; int height = 0; if (videoAspect < displayAspect) { width = displayWidth; height = (int)(displayWidth / videoAspect); } else { width = (int)(displayHeight * videoAspect); height = displayHeight; } width += calculatePadding(width, 2); height += calculatePadding(height, 2); _videoStreamInfo.FrameWidth = width; _videoStreamInfo.FrameHeight = height; { pin.Params.Add(Param.Video.Resize.InterpolationMethod, PrimoSoftware.AVBlocks.InterpolationMethod.Linear); } } pin.StreamInfo = _videoStreamInfo; MediaSocket socket = new MediaSocket(); socket.StreamType = StreamType.UncompressedVideo; socket.Pins.Add(pin); _videoStreamIndex = _transcoder.Outputs.Count; _transcoder.Outputs.Add(socket); } // Configure audio output if (_audioStreamInfo != null) { _audioStreamInfo.BitsPerSample = 16; // WinMM audio render supports only mono and stereo if (_audioStreamInfo.Channels > 2) { _audioStreamInfo.Channels = 2; } _audioStreamInfo.StreamType = StreamType.LPCM; MediaPin pin = new MediaPin(); pin.StreamInfo = _audioStreamInfo; MediaSocket socket = new MediaSocket(); socket.StreamType = StreamType.LPCM; socket.Pins.Add(pin); _audioStreamIndex = _transcoder.Outputs.Count; _transcoder.Outputs.Add(socket); } if (!_transcoder.Open()) { Close(); return(false); } return(true); }
static void EncodeDirectShowInput(string inputFile, string outputFile, string preset) { if (File.Exists(outputFile)) { File.Delete(outputFile); } DSGraph dsGraph = new DSGraph(); Transcoder transcoder = new Transcoder(); // In order to use the OEM release for testing (without a valid license) the transcoder demo mode must be enabled. transcoder.AllowDemoMode = true; try { Console.WriteLine("Initializing DirectShow graph."); /* * If the source is a DirectShow filter instead of a file then: * 1) Create an instance of the source filter * 2) Configure source filter * 3) Call dsGraph.Init(sourceFilter); * * For example dsGraph.Init(inputFile) can be replaced with the following code: * * 1) Create an instance of the source filter * * // FileSourceAsync filter * IBaseFilter sourceFilter = Util.CreateFilter(new Guid("e436ebb5-524f-11ce-9f53-0020af0ba770")); * * // or WM ASF Reader filter * IBaseFilter sourceFilter = Util.CreateFilter(new Guid("187463A0-5BB7-11D3-ACBE-0080C75E246E")); * * 2) Configure source filter * IFileSourceFilter fileSourceFilter = sourceFilter as IFileSourceFilter; * fileSourceFilter.Load(inputFile, null); * * 3) * dsGraph.Init(sourceFilter); */ dsGraph.Init(inputFile); if (dsGraph.videoGrabber != null) { ConfigureVideoInput(dsGraph, transcoder); } if (dsGraph.audioGrabber != null) { ConfigureAudioInput(dsGraph, transcoder); } if ((dsGraph.videoGrabber == null) && (dsGraph.audioGrabber == null)) { Console.WriteLine("No audio or video can be read from the DirectShow graph."); return; } // Configure output { MediaSocket outSocket = MediaSocket.FromPreset(preset); outSocket.File = outputFile; transcoder.Outputs.Add(outSocket); } bool res = transcoder.Open(); PrintError("Open Transcoder", transcoder.Error); if (!res) { return; } //DBG //var rot = new DsROTEntry(dsGraph.graph); Console.WriteLine("Running DirectShow graph."); int hr = dsGraph.mediaControl.Run(); DsError.ThrowExceptionForHR(hr); while (true) { FilterState fs; dsGraph.mediaControl.GetState(-1, out fs); if (fs != FilterState.Running) { break; } EventCode ev; dsGraph.mediaEvent.WaitForCompletion(1000, out ev); if (EventCode.Complete == ev) { break; } } Console.WriteLine("DirectShow graph is stopped."); if ((dsGraph.videoGrabberCB != null) && (dsGraph.videoGrabberCB.TranscoderError != null)) { PrintError("Transcoder Error", transcoder.Error); } if ((dsGraph.audioGrabberCB != null) && (dsGraph.audioGrabberCB.TranscoderError != null)) { PrintError("Transcoder Error", transcoder.Error); } Console.WriteLine("Closing transcoder."); if (!transcoder.Flush()) { PrintError("Flush Transcoder", transcoder.Error); } transcoder.Close(); } finally { dsGraph.Reset(); transcoder.Dispose(); } }
static void ConfigureVideoInput(DSGraph graph, Transcoder transcoder) { AMMediaType mt = new AMMediaType(); int hr; try { hr = graph.videoGrabber.GetConnectedMediaType(mt); DsError.ThrowExceptionForHR(hr); if ((mt.majorType != DirectShowLib.MediaType.Video) || (mt.formatType != DirectShowLib.FormatType.VideoInfo)) { throw new COMException("Unexpected format type"); } VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); VideoStreamInfo videoInfo = new VideoStreamInfo(); if (vih.AvgTimePerFrame > 0) { videoInfo.FrameRate = (double)10000000 / vih.AvgTimePerFrame; } videoInfo.Bitrate = 0; videoInfo.FrameHeight = Math.Abs(vih.BmiHeader.Height); videoInfo.FrameWidth = vih.BmiHeader.Width; videoInfo.DisplayRatioWidth = videoInfo.FrameWidth; videoInfo.DisplayRatioHeight = videoInfo.FrameHeight; videoInfo.ColorFormat = Util.GetColorFormat(ref mt.subType); videoInfo.Duration = 0; videoInfo.StreamType = StreamType.UncompressedVideo; videoInfo.ScanType = ScanType.Progressive; switch (videoInfo.ColorFormat) { case ColorFormat.BGR32: case ColorFormat.BGRA32: case ColorFormat.BGR24: case ColorFormat.BGR444: case ColorFormat.BGR555: case ColorFormat.BGR565: videoInfo.FrameBottomUp = (vih.BmiHeader.Height > 0); break; } MediaSocket inputSocket = new MediaSocket(); MediaPin inputPin = new MediaPin(); inputPin.StreamInfo = videoInfo; inputSocket.Pins.Add(inputPin); inputSocket.StreamType = StreamType.UncompressedVideo; graph.videoGrabberCB.Init(transcoder, transcoder.Inputs.Count, graph.mediaControl); transcoder.Inputs.Add(inputSocket); } finally { DsUtils.FreeAMMediaType(mt); } }