/// <summary> /// Yuv420P sample /// </summary> /// <param name="outputFile">output file</param> /// <param name="width">video width</param> /// <param name="height">video height</param> /// <param name="fps">video fps</param> public FillYuv420PSample(string outputFile, int width, int height, int fps) { var dir = Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(outputFile), Path.GetFileNameWithoutExtension(outputFile))).FullName; using (MediaWriter writer = new MediaWriter(outputFile)) { writer.AddStream(MediaEncoder.CreateVideoEncode(writer.Format, width, height, fps)); writer.Initialize(); VideoFrame srcframe = new VideoFrame(width, height, FFmpeg.AutoGen.AVPixelFormat.AV_PIX_FMT_YUV420P); PixelConverter pixelConverter = new PixelConverter(writer[0].Codec); Random random = new Random(); for (int i = 0; i < fps * 10; i++) { // fill video frame FillYuv420P(srcframe, i); foreach (var dstframe in pixelConverter.Convert(srcframe)) { dstframe.Pts = i; SaveFrame(dstframe, Path.Combine(dir, $"{i}.bmp")); foreach (var packet in writer[0].WriteFrame(dstframe)) { writer.WritePacket(packet); } } } // flush cache writer.FlushMuxer(); } }
public void EndRecording() { if (encoder != null) { encoder.Dispose(); encoder = null; } if (videoInput != null) { videoInput.EndRecording(); Destroy(videoInput); } #if UNITY_2018_1_OR_NEWER if (audioInput != null) { if (recorder.recordAudio) { audioInput.EndRecording(); } Destroy(audioInput); } #endif Time.captureFramerate = 0; currentFrame = 0; isRecording = false; }
public void EndRecording() { if (encoder != null) { encoder.Dispose(); encoder = null; } if (videoInput != null) { videoInput.EndRecording(); if (recorder.recordAudio) { audioInput.EndRecording(); } } Destroy(videoInput); Time.captureFramerate = 0; currentFrame = 0; isRecording = false; }
/// <summary> /// Gets the converted ass path. /// </summary> /// <param name="video">The video.</param> /// <param name="subtitleStream">The subtitle stream.</param> /// <param name="startTimeTicks">The start time ticks.</param> /// <param name="performConversion">if set to <c>true</c> [perform conversion].</param> /// <returns>System.String.</returns> private string GetConvertedAssPath(Video video, MediaStream subtitleStream, long?startTimeTicks, bool performConversion) { var offset = TimeSpan.FromTicks(startTimeTicks ?? 0); var path = Kernel.Instance.FFMpegManager.GetSubtitleCachePath(video, subtitleStream.Index, offset, ".ass"); if (performConversion && !File.Exists(path)) { try { var parentPath = Path.GetDirectoryName(path); if (!Directory.Exists(parentPath)) { Directory.CreateDirectory(parentPath); } var task = MediaEncoder.ConvertTextSubtitleToAss(subtitleStream.Path, path, subtitleStream.Language, offset, CancellationToken.None); Task.WaitAll(task); } catch { return(null); } } return(path); }
public void EndRecording() { if (encoder != null) { encoder.Dispose(); encoder = null; } if (commandBuffer != null) { _camera.RemoveCommandBuffer(CameraEvent.AfterEverything, commandBuffer); commandBuffer.Release(); commandBuffer = null; } if (renderBuffer != null) { renderBuffer.Release(); renderBuffer = null; } if (isRecording) { Debug.Log("VimeoRecorder: EndRecording()"); } isRecording = false; }
/// <summary> /// Registers the media encoder. /// </summary> /// <returns>Task.</returns> private async Task RegisterMediaEncoder(IProgress <double> progress) { var info = await new FFMpegDownloader(Logger, ApplicationPaths, HttpClient, ZipClient, FileSystemManager).GetFFMpegInfo(_startupOptions, progress).ConfigureAwait(false); MediaEncoder = new MediaEncoder(LogManager.GetLogger("MediaEncoder"), JsonSerializer, info.EncoderPath, info.ProbePath, info.Version); RegisterSingleInstance(MediaEncoder); }
/// <summary> /// Gets the extracted ass path. /// </summary> /// <param name="video">The video.</param> /// <param name="subtitleStream">The subtitle stream.</param> /// <param name="startTimeTicks">The start time ticks.</param> /// <param name="performConversion">if set to <c>true</c> [perform conversion].</param> /// <returns>System.String.</returns> private string GetExtractedAssPath(Video video, MediaStream subtitleStream, long?startTimeTicks, bool performConversion) { var offset = TimeSpan.FromTicks(startTimeTicks ?? 0); var path = Kernel.Instance.FFMpegManager.GetSubtitleCachePath(video, subtitleStream.Index, offset, ".ass"); if (performConversion) { InputType type; var inputPath = MediaEncoderHelpers.GetInputArgument(video, null, out type); try { var parentPath = Path.GetDirectoryName(path); Directory.CreateDirectory(parentPath); var task = MediaEncoder.ExtractTextSubtitle(inputPath, type, subtitleStream.Index, offset, path, CancellationToken.None); Task.WaitAll(task); } catch { return(null); } } return(path); }
/// <summary> /// transcode audio /// </summary> /// <param name="input">input audio file</param> /// <param name="output">output audio file</param> /// <param name="outChannels">output audio file channels</param> /// <param name="outSampleRate">output audio file sample rate</param> public AudioTranscode(string input, string output, int outChannels = 2, int outSampleRate = 44100) { using (MediaWriter writer = new MediaWriter(output)) using (MediaReader reader = new MediaReader(input)) { int audioIndex = reader.First(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_AUDIO).Index; writer.AddStream(MediaEncoder.CreateAudioEncode(writer.Format, outChannels, outSampleRate)); writer.Initialize(); AudioFrame dst = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dst); long pts = 0; foreach (var packet in reader.ReadPacket()) { foreach (var srcframe in reader[audioIndex].ReadFrame(packet)) { foreach (var dstframe in converter.Convert(srcframe)) { pts += dstframe.AVFrame.nb_samples; dstframe.Pts = pts; // audio's pts is total samples, pts can only increase. foreach (var outpacket in writer[0].WriteFrame(dstframe)) { writer.WritePacket(outpacket); } } } } writer.FlushMuxer(); } }
public void StartRecord() { var videoAttr = new VideoTrackAttributes { frameRate = new MediaRational(30), width = (uint)recordTexture.width, height = (uint)recordTexture.height, includeAlpha = false }; var audioAttr = new AudioTrackAttributes { sampleRate = new MediaRational(48000), channelCount = 2, language = "jp" }; var time = DateTime.Now; var encodedFilePath = Path.Combine(Path.GetTempPath(), time.Year.ToString() + time.Month.ToString() + time.Day.ToString() + time.Hour.ToString() + time.Minute.ToString() + time.Second.ToString() + ".mp4"); Debug.Log(encodedFilePath); mediaEncoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr); isRecording = true; StartCoroutine(Record()); }
/// <summary> /// Gets the converted ass path. /// </summary> /// <param name="video">The video.</param> /// <param name="subtitleStream">The subtitle stream.</param> /// <param name="startTimeTicks">The start time ticks.</param> /// <param name="performConversion">if set to <c>true</c> [perform conversion].</param> /// <returns>System.String.</returns> private string GetConvertedAssPath(Video video, MediaStream subtitleStream, long?startTimeTicks, bool performConversion) { // If it's already ass, no conversion neccessary //if (string.Equals(Path.GetExtension(subtitleStream.Path), ".ass", StringComparison.OrdinalIgnoreCase)) //{ // return subtitleStream.Path; //} var offset = TimeSpan.FromTicks(startTimeTicks ?? 0); var path = Kernel.Instance.FFMpegManager.GetSubtitleCachePath(video, subtitleStream.Index, offset, ".ass"); if (performConversion) { try { var parentPath = Path.GetDirectoryName(path); Directory.CreateDirectory(parentPath); var task = MediaEncoder.ConvertTextSubtitleToAss(subtitleStream.Path, path, subtitleStream.Language, offset, CancellationToken.None); Task.WaitAll(task); } catch { return(null); } } return(path); }
public EncodeAudioByMat(string output) { using (MediaWriter writer = new MediaWriter(output)) { writer.AddStream(MediaEncoder.CreateAudioEncode(writer.Format, 2, 44100)); writer.Initialize(); AudioFrame dstFrame = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dstFrame); using (Mat mat = CreateMat(writer[0].Codec.AVCodecContext.channels)) { long pts = 0; for (int i = 0; i < 1000; i++) { foreach (var item in converter.Convert(mat.ToAudioFrame(dstSampleRate: writer[0].Codec.AVCodecContext.sample_rate))) { pts += item.NbSamples; item.Pts = pts; foreach (var packet in writer[0].WriteFrame(item)) { writer.WritePacket(packet); } } } } writer.FlushMuxer(); } }
private void BeginMediaEncoderRecording() { InitInputs(); if (recorder.realTime) { Application.targetFrameRate = recorder.frameRate; } else { Time.captureFramerate = recorder.frameRate; } // Configure encoder AudioSpeakerMode speakerMode = AudioSettings.speakerMode; audioAttrs = new AudioTrackAttributes { sampleRate = new MediaRational { numerator = AudioSettings.outputSampleRate, denominator = 1 }, channelCount = (ushort)speakerMode, language = "" }; videoInput.BeginRecording(); videoAttrs = new VideoTrackAttributes { frameRate = new MediaRational(recorder.frameRate), width = (uint)videoInput.outputWidth, height = (uint)videoInput.outputHeight, includeAlpha = false }; encodedFilePath = Path.Combine(outputPath, GetFileName()); Debug.Log("[VimeoRecorder] Recording to " + GetFileName()); if (!recorder.realTime) { recorder.recordAudio = false; } if (recorder.recordAudio) { #if UNITY_2018_1_OR_NEWER audioInput.BeginRecording(); encoder = new UnityEditor.Media.MediaEncoder(encodedFilePath, videoAttrs, audioAttrs); #else encoder = new UnityEditor.Media.MediaEncoder(encodedFilePath, videoAttrs); #endif } else { encoder = new UnityEditor.Media.MediaEncoder(encodedFilePath, videoAttrs); } }
protected override string GetCommandLineArguments(string outputPath, StreamState state, bool isEncoding) { var threads = GetNumberOfThreads(state, false); var inputModifier = GetInputModifier(state, false); // If isEncoding is true we're actually starting ffmpeg var startNumberParam = isEncoding ? GetStartNumber(state).ToString(UsCulture) : "0"; var toTimeParam = string.Empty; var timestampOffsetParam = string.Empty; if (state.IsOutputVideo && !EnableCopyTs(state) && !string.Equals(state.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase) && (state.Request.StartTimeTicks ?? 0) > 0) { timestampOffsetParam = " -output_ts_offset " + MediaEncoder.GetTimeParameter(state.Request.StartTimeTicks ?? 0).ToString(CultureInfo.InvariantCulture); } var mapArgs = state.IsOutputVideo ? GetMapArgs(state) : string.Empty; var enableGenericSegmenter = false; if (enableGenericSegmenter) { var outputTsArg = Path.Combine(Path.GetDirectoryName(outputPath), Path.GetFileNameWithoutExtension(outputPath)) + "%d" + GetSegmentFileExtension(state); return(string.Format("{0} {10} {1} -map_metadata -1 -threads {2} {3} {4} {5} -f segment -max_delay 5000000 -avoid_negative_ts disabled -start_at_zero -segment_time {6} -segment_format mpegts -segment_list_type m3u8 -segment_start_number {7} -segment_list \"{8}\" -y \"{9}\"", inputModifier, GetInputArgument(state), threads, mapArgs, GetVideoArguments(state), GetAudioArguments(state), state.SegmentLength.ToString(UsCulture), startNumberParam, outputPath, outputTsArg, toTimeParam ).Trim()); } return(string.Format("{0}{11} {1} -map_metadata -1 -threads {2} {3} {4}{5} {6} -max_delay 5000000 -avoid_negative_ts disabled -start_at_zero -hls_time {7} -start_number {8} -hls_list_size {9} -y \"{10}\"", inputModifier, GetInputArgument(state), threads, mapArgs, GetVideoArguments(state), timestampOffsetParam, GetAudioArguments(state), state.SegmentLength.ToString(UsCulture), startNumberParam, state.HlsListSize.ToString(UsCulture), outputPath, toTimeParam ).Trim()); }
private async void RunSimpleButton_Click(object sender, RoutedEventArgs e) { if (Validate()) { ProcessStartOptions Options = new ProcessStartOptions(FFmpegDisplayMode.Interface, "Encoding to H264/AAC (Simple)"); string Src = SourceTextBox.Text; string Dst = DestinationTextBox.Text; await Task.Run(() => { MediaEncoder.Encode(Src, "h264", "aac", null, Dst, Options); }); } }
public Mp4VideoWriter AddAudio(int dstChannels, int dstSampleRate) { if (writer.Where(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_AUDIO).Count() == 0) { Channels = dstChannels; SampleRate = dstSampleRate; var stream = writer.AddStream(MediaEncoder.CreateAudioEncode(writer.Format, dstChannels, dstSampleRate)); audioIndex = writer.Count - 1; sampleConverter = new SampleConverter(stream.Codec); } return(this); }
private void Start() { _defaultBackground = background.texture; WebCamDevice[] devices = WebCamTexture.devices; //Loop Over Cameras and use the last webcam, if it is front facing. if (devices.Length == 0) { Debug.Log("No camera could be found"); _camAvailable = false; return; } for (int i = 0; i < devices.Length; i++) { if (devices[i].isFrontFacing) { _frontCam = new WebCamTexture(devices[i].name, Screen.width, Screen.height); } } if (_frontCam == null) { Debug.Log("No front camera found"); return; } //Startup Webcam and save texture onto the RawImage GameObject _frontCam.Play(); background.texture = _frontCam; //Path for the finished mp4 file _filename = string.Format("TestVideo_{0}.mp4", System.DateTime.Now.ToFileTime()); _filepath = System.IO.Path.Combine(System.IO.Path.GetTempPath(), _filename); _filepath = _filepath.Replace("/", @"\"); //Setup the Video Attributes for the Media Encoder Debug.Log("setting up"); _videoTrackAttributes = new VideoTrackAttributes(); _videoTrackAttributes.width = (uint)_frontCam.width; _videoTrackAttributes.height = (uint)_frontCam.height; _currentTexture = new Texture2D(_frontCam.width, _frontCam.height); _videoTrackAttributes.frameRate = new MediaRational(30); _videoTrackAttributes.includeAlpha = false; _mediaEncoder = new MediaEncoder(_filepath, _videoTrackAttributes); Debug.Log("Camera is setup"); _camAvailable = true; }
public Mp4VideoWriter AddVideo(int width, int height, int fps) { if (writer.Where(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_VIDEO).Count() == 0) { Height = height; Width = width; FPS = fps; var st = writer.AddStream(MediaEncoder.CreateVideoEncode(writer.Format, width, height, fps)); videoIndex = writer.Count() - 1; pixelConverter = new PixelConverter(st.Codec); } return(this); }
public MainWindow() : base(Gtk.WindowType.Toplevel) { Build(); encoder = new MediaEncoder("test.avi"); vlcplayer.width = 1280; vlcplayer.height = 720; vlcplayer.Play("big_buck_bunny_480p_surround-fix.avi"); //vlcplayer.PlayFromUri(new Uri("rtsp://*****:*****@192.168.1.2/1/")); }
private CompletionStatus GenerateDeshakerLogSegment(MediaEncoderSettings settings, string inputFile, int segment, long jobStart, long frameStart, long frameEnd, ProcessStartOptions jobOptions) { // Write Deshaker Pass 1 script to file. string Script = MediaEncoderScript.GenerateDeshakerScript(settings, inputFile, segment, frameStart, frameEnd); File.WriteAllText(settings.DeshakerScript, Script); // Run pass. jobOptions.IsMainTask = true; jobOptions.Title = "Running Deshaker Prescan"; jobOptions.ResumePos = frameStart - jobStart; CompletionStatus Result = MediaEncoder.ConvertToAvi(settings.DeshakerScript, settings.DeshakerTempOut, false, jobOptions); File.Delete(settings.DeshakerScript); File.Delete(settings.DeshakerTempOut); return Result; }
/// <summary> /// Encodes specified audio file according to settings. The script file must already be written. /// </summary> /// <param name="settings">An object containing the encoding settings.</param> /// <returns>The endoding completion status..</returns> public static CompletionStatus EncodeAudio(MediaEncoderSettings settings) { CompletionStatus Result = CompletionStatus.Success; string WavFile = PathManager.GetAudioFile(settings.JobIndex, AudioActions.Wav); ProcessStartOptions Options = new ProcessStartOptions(settings.JobIndex, "Exporting Audio", false).TrackProcess(settings); if (!File.Exists(WavFile)) { EncoderBusiness.SaveAudioToWav(settings, WavFile, Options); if (settings.CompletionStatus == CompletionStatus.Cancelled) { File.Delete(WavFile); return(CompletionStatus.Cancelled); } if (!File.Exists(WavFile)) { settings.Cancel(); return(CompletionStatus.Error); } } string DestFile = PathManager.GetAudioFile(settings.JobIndex, settings.AudioAction); if (!File.Exists(DestFile)) { Options.Title = "Encoding Audio"; if (settings.AudioAction == AudioActions.Opus) { string Args = string.Format(@"--bitrate {0} ""{1}"" ""{2}""", settings.AudioQuality, WavFile, DestFile); FFmpegProcess Worker = new FFmpegProcess(Options); Result = Worker.Run("Encoder\\opusenc.exe", Args); } else if (settings.AudioAction == AudioActions.Aac || settings.AudioAction == AudioActions.Flac) { Result = MediaEncoder.Encode(WavFile, null, settings.AudioAction == AudioActions.Flac ? "flac" : "aac", string.Format("-b:a {0}k", settings.AudioQuality), DestFile, Options); } } if (Result != CompletionStatus.Success || !File.Exists(DestFile)) { File.Delete(DestFile); settings.Cancel(); } return(Result); }
public override void EndRecording(RecordingSession session) { base.EndRecording(session); if (m_Encoder != null) { m_Encoder.Dispose(); m_Encoder = null; } // When adding a file to Unity's assets directory, trigger a refresh so it is detected. if (m_Settings.m_DestinationPath.root == OutputPath.ERoot.AssetsPath) { AssetDatabase.Refresh(); } }
public override void EndRecording(RecordingSession session) { base.EndRecording(session); if (m_Encoder != null) { m_Encoder.Dispose(); m_Encoder = null; } // When adding a file to Unity's assets directory, trigger a refresh so it is detected. if (settings.fileNameGenerator.root == OutputPath.Root.AssetsFolder || settings.fileNameGenerator.root == OutputPath.Root.StreamingAssets) { AssetDatabase.Refresh(); } }
private CompletionStatus ExecuteComplex(string src, string dst) { string DstEncode = GetPathWithoutExtension(dst) + "_.mp4"; string DstExtract = GetPathWithoutExtension(dst) + "_.mkv"; string DstAac = GetPathWithoutExtension(dst) + "_.aac"; jobId++; CompletionStatus Result; FFmpegConfig.UserInterfaceManager.Start(jobId, "Encoding to H264/AAC (Complex)"); ProcessStartOptions OptionsMain = new ProcessStartOptions(jobId, "", true); FFmpegProcess ProcessMain = null; OptionsMain.Started += (sender, e) => { ProcessMain = e.Process; }; Task <CompletionStatus> TaskMain = Task.Run(() => MediaEncoder.Encode(src, "h264", null, "", DstEncode, OptionsMain)); ProcessStartOptions Options = new ProcessStartOptions(jobId, "Extracting Audio", false); Result = MediaMuxer.ExtractAudio(src, DstExtract, Options); if (Result == CompletionStatus.Success) { Options.Title = "Encoding Audio"; Result = MediaEncoder.Encode(DstExtract, null, "aac", null, DstAac, Options); } if (Result != CompletionStatus.Success) { ProcessMain?.Cancel(); } TaskMain.Wait(); CompletionStatus Result2 = TaskMain.Result; if (Result == CompletionStatus.Success && Result2 == CompletionStatus.Success) { Options.Title = "Muxing Audio and Video"; Result = MediaMuxer.Muxe(DstEncode, DstAac, dst, Options); } File.Delete(DstEncode); File.Delete(DstExtract); File.Delete(DstAac); FFmpegConfig.UserInterfaceManager.Stop(jobId); return(Result); }
/// <summary> /// Gets the media info. /// </summary> /// <param name="item">The item.</param> /// <param name="isoMount">The iso mount.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>Task{MediaInfoResult}.</returns> /// <exception cref="System.ArgumentNullException">inputPath /// or /// cache</exception> protected async Task <MediaInfoResult> GetMediaInfo(BaseItem item, IIsoMount isoMount, CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var type = InputType.AudioFile; var inputPath = isoMount == null ? new[] { item.Path } : new[] { isoMount.MountedPath }; var video = item as Video; if (video != null) { inputPath = MediaEncoderHelpers.GetInputArgument(video, isoMount, out type); } return(await MediaEncoder.GetMediaInfo(inputPath, type, cancellationToken).ConfigureAwait(false)); }
/// <summary> /// recording audio. /// <para> /// first set inputDeviceName = null, you will get inputDeviceName list in vs output, /// </para> /// <para> /// then set inputDeviceName to your real device name and run again,you will get a audio output. /// </para> /// <para> /// if you want stop record, exit console; /// </para> /// <para>ffmpeg </para> /// </summary> /// <param name="outputFile"></param> /// <param name="inputDeviceName"></param> public RecordingAudio(string outputFile, string inputDeviceName = null) { // console output FFmpegHelper.SetupLogging(logWrite: _ => Console.Write(_)); // register all device FFmpegHelper.RegisterDevice(); var dshowInput = InFormat.Get("dshow"); // list all "dshow" device at console output, ffmpeg does not support direct reading of device names MediaDevice.PrintDeviceInfos(dshowInput, "list", MediaDevice.ListDevicesOptions); if (string.IsNullOrWhiteSpace(inputDeviceName)) { return; } // get your audio input device name from console output // NOTE: DO NOT delete "audio=" using (MediaReader reader = new MediaReader($"audio={inputDeviceName}", dshowInput)) using (MediaWriter writer = new MediaWriter(outputFile)) { var stream = reader.Where(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_AUDIO).First(); writer.AddStream(MediaEncoder.CreateAudioEncode(writer.Format, stream.Codec.AVCodecContext.channels, stream.Codec.AVCodecContext.sample_rate)); writer.Initialize(); AudioFrame dstFrame = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dstFrame); long pts = 0; foreach (var packet in reader.ReadPacket()) { foreach (var frame in stream.ReadFrame(packet)) { foreach (var dstframe in converter.Convert(frame)) { pts += dstFrame.AVFrame.nb_samples; dstFrame.Pts = pts; foreach (var dstpacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstpacket); } } } } writer.FlushMuxer(); } }
public void MakeVideo() { float[] soundArray; sounds.GetData(soundArray = new float[sounds.samples], 0); using (var encoder = new MediaEncoder(encodedFilePath, videoAttrs, audioAttrs)) using (var audioBuffer = new NativeArray <float>(sampleFramesPerVideoFrame, Allocator.Temp)) { sounds.GetData(audioBuffer.ToArray(), 0); for (int i = 0; i < images.Count; ++i) { //tex.SetPixels(images[i].GetPixels()); encoder.AddFrame(images[i]); encoder.AddSamples(audioBuffer); } } Microphone.End(""); }
static public void EncodeVideo() { Debug.Log("Enconde Start"); VideoInfo info = VideoInfo.videoInfo; VideoTrackAttributes videoAttr = new VideoTrackAttributes { frameRate = new MediaRational(info.FrameRate), width = info.width, height = info.height, includeAlpha = info.includeAlpha }; AudioTrackAttributes audioAttr = new AudioTrackAttributes { sampleRate = new MediaRational(48000), channelCount = 2, language = "fr" }; int sampleFramesPerVideoFrame = audioAttr.channelCount * audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator; string encodedFilePath = Path.Combine(Application.dataPath + "/Resources/ConvertVideo", "my_movie.mp4"); Texture2D tex = new Texture2D((int)videoAttr.width, (int)videoAttr.height, TextureFormat.RGBA32, false); using (MediaEncoder encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr)) using (NativeArray <float> audioBuffer = new NativeArray <float>(sampleFramesPerVideoFrame, Allocator.Temp)) { foreach (Texture2D _tex in info.TexList) { encoder.AddFrame(_tex); } //for (int i = 0; i < info.TotalFrameCount; ++i) //{ // // Fill 'tex' with the video content to be encoded into the file for this frame. // // ... // // Fill 'audioBuffer' with the audio content to be encoded into the file for this frame. // // ... // encoder.AddSamples(audioBuffer); //} } }
public void InitFromConfig() { LoadConfig(); bufferMaterial = new Material(Shader.Find("RenderProcess/Output")); int width = int.Parse(config["Output"]["width"]); int height = int.Parse(config["Output"]["height"]); int frame = int.Parse(config["Output"]["frame"]); OutputConf = OutputConfig.BuildConfig(width, height, frame, config["Output"]["outputUrl"].ToString()); encoder = gameObject.AddComponent <MediaEncoder>(); encoder.videoCaptureType = NativeEncoder.VIDEO_CAPTURE_TYPE.LIVE; encoder.liveVideoWidth = width; encoder.liveVideoHeight = height; encoder.liveVideoBitRate = 400000; encoder.liveVideoFrameRate = OutputConf.FPS; encoder.liveStreamUrl = OutputConf.OutputPath; }
/// <summary> /// Gets the input argument. /// </summary> /// <param name="item">The item.</param> /// <param name="isoMount">The iso mount.</param> /// <returns>System.String.</returns> protected string GetInputArgument(BaseItem item, IIsoMount isoMount) { var type = InputType.AudioFile; var inputPath = new[] { item.Path }; var video = item as Video; if (video != null) { if (!(video.VideoType == VideoType.Iso && isoMount == null)) { inputPath = MediaEncoderHelpers.GetInputArgument(video, isoMount, out type); } } return(MediaEncoder.GetInputArgument(inputPath, type)); }
protected override void DisposeEncoder() { base.DisposeEncoder(); if (m_Encoder == null) { return; } m_Encoder.Dispose(); m_Encoder = null; // When adding a file to Unity's assets directory, trigger a refresh so it is detected. if (settings.fileNameGenerator.root == OutputPath.Root.AssetsFolder || settings.fileNameGenerator.root == OutputPath.Root.StreamingAssets) { AssetDatabase.Refresh(); } }