/// <inheritdoc />
        protected override IEnumerable <AssetItem> CreateAssets(AssetTemplateGeneratorParameters parameters)
        {
            var files = parameters.Tags.Get(SourceFilesPathKey);

            if (files == null)
            {
                return(base.CreateAssets(parameters));
            }

            var importedAssets = new List <AssetItem>();

            foreach (var file in files)
            {
                using (var media = new FFmpegMedia())
                {
                    media.Open(file.ToWindowsPath());

                    var videoStream = media.Streams.OfType <VideoStream>().FirstOrDefault();
                    if (videoStream != null)
                    {
                        var videoItem = ImportVideo(file, videoStream);
                        importedAssets.Add(videoItem);
                    }
                }
            }

            return(MakeUniqueNames(importedAssets));
        }
Ejemplo n.º 2
0
        /// <inheritdoc />
        protected override IEnumerable <AssetItem> CreateAssets(AssetTemplateGeneratorParameters parameters)
        {
            var importedAssets = new List <AssetItem>();

            foreach (var assetItem in base.CreateAssets(parameters))
            {
                if (assetItem.Asset is SoundAsset soundAsset)
                {
                    using (var media = new FFmpegMedia())
                    {
                        media.Open(soundAsset.Source.ToWindowsPath());
                        foreach (var audioTrack in media.Streams.OfType <AudioStream>().ToList())
                        {
                            var assetCopy = AssetCloner.Clone(soundAsset);
                            assetCopy.Index      = audioTrack.Index;
                            assetCopy.SampleRate = audioTrack.SampleRate;

                            importedAssets.Add(new AssetItem(assetItem.Location + (audioTrack.Index > 0 ? " track " + audioTrack.Index : ""), assetCopy));
                        }
                    }
                }
            }

            return(MakeUniqueNames(importedAssets));
        }
Ejemplo n.º 3
0
        /// <inheritdoc />
        protected override IEnumerable <AssetItem> CreateAssets(AssetTemplateGeneratorParameters parameters)
        {
            var importedAssets = new List <AssetItem>();

            foreach (var assetItem in base.CreateAssets(parameters))
            {
                if (assetItem.Asset is SoundAsset soundAsset)
                {
                    using (var media = new FFmpegMedia())
                    {
                        media.Open(soundAsset.Source.ToWindowsPath());
                        var audioStreams = media.Streams.OfType <AudioStream>().ToList();
                        foreach (var audioTrack in audioStreams)
                        {
                            var assetCopy = AssetCloner.Clone(soundAsset);
                            assetCopy.Index      = audioTrack.Index;
                            assetCopy.SampleRate = audioTrack.SampleRate;

                            // If there's more than one streams, append the track index to the asset name
                            var fileLocation = audioStreams.Count > 1
                                ? (UFile)(assetItem.Location + " track " + audioTrack.Index)
                                : assetItem.Location;
                            importedAssets.Add(new AssetItem(fileLocation, assetCopy));
                        }
                    }
                }
            }

            return(MakeUniqueNames(importedAssets));
        }
Ejemplo n.º 4
0
        public void Start()
        {
            Break = false;

            audioBuffer = new CircularBuffer <MediaData>();
            videoBuffer = new CircularBuffer <MediaData>();

            ffmpegMedia = new FFmpegMedia();

            FFmpegMedia.LogDataReceived += (log) => { Logger.Write(log, EnumLoggerType.LogFile); };

            inVideoParams = MediaDevice.ONVIF.GetInputCodecParams();

            outVideoParams = new CodecParams(CodecType.MPEG4, inVideoParams.Width, inVideoParams.Height);

            ffmpegMedia.VideoDecoderParams = inVideoParams;
            ffmpegMedia.VideoEncoderParams = outVideoParams;

            string path = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.MyVideos), "PanasonicVideo");

            if (Directory.Exists(path) == false)
            {
                Directory.CreateDirectory(path);
            }

            string file = String.Format(@"{0}_{1}x{2}_to_{3}_{4}x{5}_{6:yyyy-MM-dd_HH-mm-ss}.{7}",
                                        inVideoParams.ID, inVideoParams.Width, inVideoParams.Height,
                                        outVideoParams.ID, outVideoParams.Width, outVideoParams.Height,
                                        DateTime.Now,
                                        "mkv");

            string filepath = Path.Combine(path, file);

            ffmpegMedia.OutputFilename = filepath;
            //ffmpegProcessor.Start();

            ffmpegMedia.VideoFrameReceived += ProcessVideoFrame;
            ffmpegMedia.AudioFrameReceived += ProcessAudioFrame;

            videoEvent = new AutoResetEvent(true);
            audioEvent = new AutoResetEvent(true);

            videoWorker = new Thread(ProcessVideo);
            audioWorker = new Thread(ProcessAudio);

            ffmpegMedia.Open();

            videoWorker.Start();
            audioWorker.Start();

            //videoWorker = Task.Factory.StartNew(() => ProcessVideo());
            //audioWorker = Task.Factory.StartNew(() => ProcessAudio());
        }
Ejemplo n.º 5
0
        public void Start()
        {
            FFmpegDecoder = new FFmpegMedia();

            FFmpegMedia.LogDataReceived += (log) => { Logger.Write(log, EnumLoggerType.LogFile); };

            DecoderParams = MediaDevice.ONVIF.GetInputCodecParams();

            FFmpegDecoder.VideoDecoderParams = DecoderParams;

            FFmpegDecoder.VideoFrameReceived += ProcessVideoFrame;
            FFmpegDecoder.AudioFrameReceived += ProcessAudioFrame;

            FFmpegDecoder.Open();
        }
Ejemplo n.º 6
0
        public void Stop()
        {
            if (ffmpegMedia != null)
            {
                //audioBuffer.IsComplete = true;
                //videoBuffer.IsComplete = true;
                Break = true;

                videoEvent.Set();
                audioEvent.Set();

                //Task.WaitAll(videoWorker, audioWorker);

                //videoWorker.Join();
                //audioWorker.Join();

                videoWorker.Abort();
                audioWorker.Abort();

                ffmpegMedia.Close();
                ffmpegMedia = null;
            }
        }
Ejemplo n.º 7
0
        partial void InitializeMediaImpl(string url, long startPosition, long length, ref bool succeeded)
        {
            succeeded = false;

            if (media != null)
            {
                throw new InvalidOperationException();
            }

            try
            {
                // Create and open the media
                media = new FFmpegMedia(GraphicsDevice);
                media.DisposeBy(this);
                media.Open(url, startPosition, length);
                // Get the first video stream
                stream = media.Streams.OfType <VideoStream>().FirstOrDefault();
                if (stream == null)
                {
                    ReleaseMedia();
                    Duration = TimeSpan.Zero;
                    Logger.Warning("This media doesn't contain a video stream.");
                    return;
                }

                Duration = stream.Duration;
                AllocateVideoTexture(stream.Width, stream.Height);
            }
            catch
            {
                ReleaseMedia();
                return;
            }

            succeeded = true;
        }
Ejemplo n.º 8
0
            /// <inheritdoc />
            protected override async Task <ResultStatus> DoCommandOverride(ICommandContext commandContext)
            {
                VideoAsset videoAsset = Parameters.Video;

                try
                {
                    // Get path to ffmpeg
                    var ffmpeg = ToolLocator.LocateTool("ffmpeg.exe")?.ToWindowsPath() ?? throw new AssetException("Failed to compile a video asset, ffmpeg was not found.");

                    // Get absolute path of asset source on disk
                    var assetDirectory = videoAsset.Source.GetParent();
                    var assetSource    = UPath.Combine(assetDirectory, videoAsset.Source);

                    //=====================================================================================
                    //Get the info from the video codec

                    //Check if we need to reencode the video
                    var mustReEncodeVideo    = false;
                    var sidedataStripCommand = "";

                    // check that the video file format is supported
                    if (Parameters.Platform == PlatformType.Windows && videoAsset.Source.GetFileExtension() != ".mp4")
                    {
                        mustReEncodeVideo = true;
                    }

                    //Use FFmpegMedia object (need to check more details first before I can use it)
                    VideoStream videoStream = null;
                    AudioStream audioStream = null;
                    FFmpegUtils.PreloadLibraries();
                    FFmpegUtils.Initialize();
                    using (var media = new FFmpegMedia())
                    {
                        media.Open(assetSource.ToWindowsPath());

                        // Get the first video stream
                        videoStream = media.Streams.OfType <VideoStream>().FirstOrDefault();
                        if (videoStream == null)
                        {
                            throw new AssetException("Failed to compile a video asset. Did not find the VideoStream from the media.");
                        }

                        // On windows MediaEngineEx player only decode the first video if the video is detected as a stereoscopic video,
                        // so we remove the tags inside the video in order to ensure the same behavior as on other platforms (side by side decoded texture)
                        // Unfortunately it does seem possible to disable this behavior from the MediaEngineEx API.
                        if (Parameters.Platform == PlatformType.Windows && media.IsStereoscopicVideo(videoStream))
                        {
                            mustReEncodeVideo    = true;
                            sidedataStripCommand = "-vf sidedata=delete";
                        }

                        // Get the first audio stream
                        audioStream = media.Streams.OfType <AudioStream>().FirstOrDefault();
                    }
                    Size2 videoSize = new Size2(videoStream.Width, videoStream.Height);

                    //check the format
                    if (ListSupportedCodecNames != null)
                    {
                        if (Array.IndexOf(ListSupportedCodecNames, videoStream.Codec) < 0)
                        {
                            mustReEncodeVideo = true;
                        }
                    }

                    // check if video need to be trimmed
                    var videoDuration = videoAsset.VideoDuration;
                    if (videoDuration.Enabled && (videoDuration.StartTime != TimeSpan.Zero ||
                                                  videoDuration.EndTime.TotalSeconds < videoStream.Duration.TotalSeconds - MathUtil.ZeroToleranceDouble))
                    {
                        mustReEncodeVideo = true;
                    }

                    //check the video target and source resolution
                    Size2 targetSize;
                    if (videoAsset.IsSizeInPercentage)
                    {
                        targetSize = new Size2((int)(videoSize.Width * videoAsset.Width / 100.0f), (int)(videoSize.Height * videoAsset.Height / 100.0f));
                    }
                    else
                    {
                        targetSize = new Size2((int)(videoAsset.Width), (int)(videoAsset.Height));
                    }

                    // ensure that the size is a multiple of 2 (ffmpeg cannot output video not multiple of 2, at least with this codec)
                    if (targetSize.Width % 2 == 1)
                    {
                        targetSize.Width += 1;
                    }
                    if (targetSize.Height % 2 == 1)
                    {
                        targetSize.Height += 1;
                    }

                    if (targetSize.Width != videoSize.Width || targetSize.Height != videoSize.Height)
                    {
                        mustReEncodeVideo = true;
                    }

                    //check the audio settings
                    int  audioChannelsTarget       = audioStream == null? 0: audioStream.ChannelCount;
                    bool mustReEncodeAudioChannels = false;
                    if (videoAsset.IsAudioChannelMono)
                    {
                        audioChannelsTarget = 1;
                        if (audioStream != null && audioStream.ChannelCount != audioChannelsTarget)
                        {
                            mustReEncodeAudioChannels = true;
                            mustReEncodeVideo         = true;
                        }
                    }

                    // Execute ffmpeg to convert source to H.264
                    string tempFile = null;
                    try
                    {
                        if (mustReEncodeVideo)
                        {
                            string targetCodecFormat = "h264";  //hardcodec for now
                            commandContext.Logger.Info(string.Format("Video Asset Compiler: \"{0}\". Re-encode the Video. Format:{1}, Size:{2}x{3}. Audio Channels:{4}",
                                                                     videoAsset.Source.GetFileName(), targetCodecFormat, targetSize.Width, targetSize.Height, audioChannelsTarget));

                            tempFile = Path.GetTempFileName();
                            string channelFlag = "";
                            if (mustReEncodeAudioChannels)
                            {
                                channelFlag = string.Format(" -ac {0}", audioChannelsTarget);
                            }

                            var startTime       = videoDuration.StartTime;
                            var duration        = videoDuration.EndTime - videoDuration.StartTime;
                            var trimmingOptions = videoDuration.Enabled ?
                                                  $" -ss {startTime.Hours:D2}:{startTime.Minutes:D2}:{startTime.Seconds:D2}.{startTime.Milliseconds:D3}" +
                                                  $" -t {duration.Hours:D2}:{duration.Minutes:D2}:{duration.Seconds:D2}.{duration.Milliseconds:D3}":
                                                  "";

                            var commandLine = "  -hide_banner -loglevel error" +                       // hide most log output
                                              "  -nostdin" +                                           // no interaction (background process)
                                              $" -i \"{assetSource.ToWindowsPath()}\"" +               // input file
                                              $"{trimmingOptions}" +
                                              "  -f mp4 -vcodec " + targetCodecFormat +                // codec
                                              channelFlag +                                            // audio channels
                                              $"  -vf scale={targetSize.Width}:{targetSize.Height} " + // adjust the resolution
                                              sidedataStripCommand +                                   // strip of stereoscopic sidedata tag
                                                                                                       //" -an" + // no audio
                                                                                                       //" -pix_fmt yuv422p" + // pixel format (planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples))
                                              $" -y \"{tempFile}\"";                                   // output file (always overwrite)
                            var ret = await ShellHelper.RunProcessAndGetOutputAsync(ffmpeg, commandLine, commandContext.Logger);

                            if (ret != 0 || commandContext.Logger.HasErrors)
                            {
                                throw new AssetException($"Failed to compile a video asset. ffmpeg failed to convert {assetSource}.");
                            }
                        }
                        else
                        {
                            commandContext.Logger.Info(string.Format("Video Asset Compiler: \"{0}\". No Re-encoding necessary",
                                                                     videoAsset.Source.GetFileName()));

                            // Use temporary file
                            tempFile = assetSource.ToWindowsPath();
                        }

                        var dataUrl = Url + "_Data";
                        var video   = new Video.Video
                        {
                            CompressedDataUrl = dataUrl,
                        };

                        // Make sure we don't compress h264 data
                        commandContext.AddTag(new ObjectUrl(UrlType.Content, dataUrl), Builder.DoNotCompressTag);

                        // Write the data
                        using (var reader = new BinaryReader(new FileStream(tempFile, FileMode.Open, FileAccess.Read)))
                            using (var outputStream = MicrothreadLocalDatabases.DatabaseFileProvider.OpenStream(dataUrl, VirtualFileMode.Create, VirtualFileAccess.Write, VirtualFileShare.Read, StreamFlags.Seekable))
                            {
                                // For now write everything at once, 1MB at a time
                                var length = reader.BaseStream.Length;
                                for (var position = 0L; position < length; position += 2 << 20)
                                {
                                    var buffer = reader.ReadBytes(2 << 20);
                                    outputStream.Write(buffer, 0, buffer.Length);
                                }
                            }

                        var assetManager = new ContentManager(MicrothreadLocalDatabases.ProviderService);
                        assetManager.Save(Url, video);

                        return(ResultStatus.Successful);
                    }
                    finally
                    {
                        if (mustReEncodeVideo)
                        {
                            if (tempFile != null)
                            {
                                File.Delete(tempFile);
                            }
                        }
                    }
                }
                catch (AssetException)
                {
                    throw;
                }
                catch (Exception ex)
                {
                    throw new AssetException("Failed to compile a video asset. Unexpected exception.", ex);
                }
            }
Ejemplo n.º 9
0
 partial void ReleaseMediaImpl()
 {
     media.RemoveDisposeBy(this);
     media.Dispose();
     media = null;
 }
Ejemplo n.º 10
0
        public void Stop()
        {
            if (ffmpegMedia != null)
            {
                //audioBuffer.IsComplete = true;
                //videoBuffer.IsComplete = true;
                Break = true;

                videoEvent.Set();
                audioEvent.Set();

                //Task.WaitAll(videoWorker, audioWorker);

                //videoWorker.Join();
                //audioWorker.Join();

                videoWorker.Abort();
                audioWorker.Abort();

                ffmpegMedia.Close();
                ffmpegMedia = null;
            }
        }
Ejemplo n.º 11
0
        public void Start()
        {
            Break = false;

            audioBuffer = new CircularBuffer<MediaData>();
            videoBuffer = new CircularBuffer<MediaData>();

            ffmpegMedia = new FFmpegMedia();

            FFmpegMedia.LogDataReceived += (log) => { Logger.Write(log, EnumLoggerType.LogFile); };

            inVideoParams = MediaDevice.ONVIF.GetInputCodecParams();

            outVideoParams = new CodecParams(CodecType.MPEG4, inVideoParams.Width, inVideoParams.Height);

            ffmpegMedia.VideoDecoderParams = inVideoParams;
            ffmpegMedia.VideoEncoderParams = outVideoParams;

            string path = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.MyVideos), "PanasonicVideo");

            if (Directory.Exists(path) == false)
                Directory.CreateDirectory(path);

            string file = String.Format(@"{0}_{1}x{2}_to_{3}_{4}x{5}_{6:yyyy-MM-dd_HH-mm-ss}.{7}",
                inVideoParams.ID, inVideoParams.Width, inVideoParams.Height,
                outVideoParams.ID, outVideoParams.Width, outVideoParams.Height,
                DateTime.Now,
                "mkv");

            string filepath = Path.Combine(path, file);

            ffmpegMedia.OutputFilename = filepath;
            //ffmpegProcessor.Start();

            ffmpegMedia.VideoFrameReceived += ProcessVideoFrame;
            ffmpegMedia.AudioFrameReceived += ProcessAudioFrame;

            videoEvent = new AutoResetEvent(true);
            audioEvent = new AutoResetEvent(true);

            videoWorker = new Thread(ProcessVideo);
            audioWorker = new Thread(ProcessAudio);

            ffmpegMedia.Open();

            videoWorker.Start();
            audioWorker.Start();

            //videoWorker = Task.Factory.StartNew(() => ProcessVideo());
            //audioWorker = Task.Factory.StartNew(() => ProcessAudio());
        }
Ejemplo n.º 12
0
        public void Start()
        {
            FFmpegDecoder = new FFmpegMedia();

            FFmpegMedia.LogDataReceived += (log) => { Logger.Write(log, EnumLoggerType.LogFile); };

            DecoderParams = MediaDevice.ONVIF.GetInputCodecParams();

            FFmpegDecoder.VideoDecoderParams = DecoderParams;

            FFmpegDecoder.VideoFrameReceived += ProcessVideoFrame;
            FFmpegDecoder.AudioFrameReceived += ProcessAudioFrame;

            FFmpegDecoder.Open();
        }