public void Setup()
        {
            _configManager     = Substitute.For <IConfigManager <FFmpegConfig> >();
            _argumentGenerator = Substitute.For <IFFmpegArgumentGenerator>();
            _jobRunner         = new MockJobRunner(_configManager, _argumentGenerator);

            _videoSource = new VideoStreamInfo()
            {
                Index = 0
            };
            _videoOutput = new VideoOutputStream()
            {
                SourceStreamIndex = 0
            };
            _transcodeJob = new TranscodeJob()
            {
                SourceInfo = new MediaInfo()
                {
                    FileName = "source",
                    Streams  = new List <StreamInfo>()
                    {
                        _videoSource
                    }
                },
                OutputFileName = "destination",
                Streams        = new List <OutputStream>()
                {
                    _videoOutput
                }
            };
        }
Example #2
0
        static MediaSocket ConfigureOutputSocket(Options opt)
        {
            MediaSocket socket = new MediaSocket();

            socket.File = opt.OutputFile;

            MediaPin pin = new MediaPin();

            socket.Pins.Add(pin);
            VideoStreamInfo vsi = new VideoStreamInfo();

            pin.StreamInfo = vsi;

            VideoStreamInfo overlayVsi = new VideoStreamInfo();

            overlayVsi.StreamType = StreamType.Png;

            pin.Params.Add(Param.Video.Overlay.Mode, AlphaCompositingMode.Atop);
            pin.Params.Add(Param.Video.Overlay.LocationX, opt.PositionX);
            pin.Params.Add(Param.Video.Overlay.LocationY, opt.PositionY);
            pin.Params.Add(Param.Video.Overlay.BackgroundAlpha, 1);
            pin.Params.Add(Param.Video.Overlay.ForegroundAlpha, opt.Alpha);

            pin.Params.Add(Param.Video.Overlay.ForegroundBufferFormat, overlayVsi);
            pin.Params.Add(Param.Video.Overlay.ForegroundBuffer, new MediaBuffer(System.IO.File.ReadAllBytes(opt.Watermark)));

            return(socket);
        }
Example #3
0
        /// <summary>
        /// Connects to an url and starts waiting for mjpeg video frames
        /// </summary>
        public override void Connect(string url, LoadOptions loadOptions = null)
        {
                        #if !UNITY_WINRT
            if (loadOptions != null)
            {
                videoStreamInfo = loadOptions.videoStreamInfo != null ? loadOptions.videoStreamInfo : new VideoStreamInfo();
                timeout         = loadOptions.connectTimeout;
            }
            else
            {
                videoStreamInfo = new VideoStreamInfo();
                timeout         = 10;
            }
            videoStreamInfo.codecFourCC = MP.Decoder.VideoDecoderMJPEG.FOURCC_MJPG;
            videoStreamInfo.frameCount  = 0;
            videoStreamInfo.framerate   = 0;

            frameRingBuffer    = new byte[1][];
            receivedFrameCount = 0;

            shouldStop = false;
            thread     = new Thread(ThreadRun);
            thread.Start(url);
                        #else
            Status = "Streaming not supported on Windows Store build target";
            throw new NotSupportedException("Streaming is currently not possible on Windows Store build target");
                        #endif
        }
        public ucVideoStreamDetail(VideoStreamInfo videoStreamInfo)
        {
            InitializeComponent();

            InitControls();
            RefreshControls(videoStreamInfo);
        }
Example #5
0
        public bool Open(string imageOverlay, StreamType imageType, VideoStreamInfo uncompressedVideo)
        {
            var inSocket = new MediaSocket()
            {
                File       = null, // Push
                StreamType = StreamType.UncompressedVideo,
            };

            inSocket.Pins.Add(new MediaPin()
            {
                StreamInfo = uncompressedVideo
            });
            t.Inputs.Add(inSocket);

            var outSocket = new MediaSocket()
            {
                File       = null, // Pull
                StreamType = StreamType.UncompressedVideo
            };

            outSocket.Pins.Add(new MediaPin()
            {
                StreamInfo = uncompressedVideo
            });

            SetOverlayParamsToPin(outSocket.Pins[0], imageOverlay, imageType);

            t.Outputs.Add(outSocket);

            return(t.Open());
        }
Example #6
0
        /// <summary>
        /// Downloads the given video stream
        /// </summary>
        public async Task <Stream> DownloadVideoAsync(VideoStreamInfo streamInfo)
        {
            if (streamInfo == null)
            {
                throw new ArgumentNullException(nameof(streamInfo));
            }
            if (streamInfo.Url.IsBlank())
            {
                throw new Exception("Given stream does not have a URL");
            }
            if (streamInfo.NeedsDeciphering)
            {
                throw new Exception("Given stream's signature needs to be deciphered first");
            }

            // Get stream
            var stream = await RequestService.GetStreamAsync(streamInfo.Url);

            if (stream == null)
            {
                throw new Exception("Could not get response stream");
            }

            return(stream);
        }
Example #7
0
        static MediaSocket CreateInputSocket(Options opt)
        {
            MediaSocket socket = new MediaSocket();

            socket.StreamType = StreamType.UncompressedVideo;
            socket.File       = null;
            socket.Stream     = null;

            MediaPin pin = new MediaPin();

            socket.Pins.Add(pin);
            VideoStreamInfo vsi = new VideoStreamInfo();

            pin.StreamInfo = vsi;

            vsi.StreamType = StreamType.UncompressedVideo;
            vsi.ScanType   = ScanType.Progressive;

            vsi.FrameWidth  = opt.Width;
            vsi.FrameHeight = opt.Height;
            vsi.ColorFormat = opt.Color.Id;
            vsi.FrameRate   = opt.Fps;

            return(socket);
        }
Example #8
0
        public ElementaryStream(StreamType streamType, int width, int height, double fps)
        {
            vsi    = new VideoStreamInfo();
            pin    = new MediaPin();
            socket = new MediaSocket();

            // set video stream properties
            vsi.StreamType = streamType;

            if (width > 0)
            {
                vsi.FrameWidth = width;
            }

            if (height > 0)
            {
                vsi.FrameHeight = height;
            }

            if (fps > 0.0)
            {
                vsi.FrameRate = fps;
            }

            // provide pin with stream info
            pin.StreamInfo = vsi;

            // set socket properties
            socket.StreamType = StreamType.H264;

            // set socket pin
            socket.Pins.Add(pin);
        }
Example #9
0
        static bool GetFrameSize(string inputFile, out int frameWidth, out int frameHeight)
        {
            frameWidth  = 0;
            frameHeight = 0;

            using (MediaInfo info = new MediaInfo())
            {
                info.Inputs[0].File = inputFile;

                bool res = info.Open();
                PrintError("Open MediaInfo", info.Error);
                if (!res)
                {
                    return(false);
                }

                foreach (var socket in info.Outputs)
                {
                    foreach (var pin in socket.Pins)
                    {
                        VideoStreamInfo vsi = (VideoStreamInfo)pin.StreamInfo;
                        frameWidth  = vsi.FrameWidth;
                        frameHeight = vsi.FrameHeight;
                        return(true);
                    }
                }
            }

            return(false);
        }
Example #10
0
        static Transcoder CreateEncoder(VideoStreamInfo inputVideo, VideoStreamInfo outputVideo, string outputFile)
        {
            Transcoder transcoder = new Transcoder()
            {
                AllowDemoMode = true
            };


            var inSocket = new MediaSocket()
            {
                File       = null, //Push
                StreamType = StreamType.UncompressedVideo,
            };

            inSocket.Pins.Add(new MediaPin()
            {
                StreamInfo = inputVideo
            });
            transcoder.Inputs.Add(inSocket);

            var outSocket = new MediaSocket()
            {
                File = outputFile,
            };

            outSocket.Pins.Add(new MediaPin()
            {
                StreamInfo = outputVideo
            });

            transcoder.Outputs.Add(outSocket);

            return(transcoder);
        }
Example #11
0
        public override void Connect(string url, LoadOptions loadOptions = null)
        {
            if (!Application.HasUserAuthorization(UserAuthorization.WebCam))                // | UserAuthorization.Microphone
            {
                throw new MpException("Not authorized to use webcam. Use Application.RequestUserAuthorization before calling this");
            }

            if (loadOptions != null && loadOptions.videoStreamInfo != null)
            {
                videoStreamInfo = loadOptions.videoStreamInfo;
                webcam          = new WebCamTexture(url.Substring(9),
                                                    loadOptions.videoStreamInfo.width,
                                                    loadOptions.videoStreamInfo.height,
                                                    (int)loadOptions.videoStreamInfo.framerate);
            }
            else
            {
                webcam = new WebCamTexture(url.Substring(9));
            }
            webcam.Play();

            videoStreamInfo              = new VideoStreamInfo();
            videoStreamInfo.codecFourCC  = MP.Decoder.VideoDecoderRGB.FOURCC_NULL;
            videoStreamInfo.width        = webcam.width;
            videoStreamInfo.height       = webcam.height;
            videoStreamInfo.bitsPerPixel = 24;
            videoStreamInfo.framerate    = webcam.requestedFPS;

            colorBuffer = new Color32[webcam.width * webcam.height];
            rawBuffer   = new byte[colorBuffer.Length * 3];
        }
Example #12
0
        private static int WriteVideoStreamHeader(RiffWriter rw, VideoStreamInfo vsi)
        {
            rw.BeginChunk(AviDemux.ID_strh);
            int offset = (int)rw.binaryWriter.Seek(0, SeekOrigin.Current);
            var bw     = rw.binaryWriter;

            bw.Write(AviDemux.FCC_vids);
            bw.Write(vsi.codecFourCC);
            bw.Write((int)0);              // dwFlags
            bw.Write((short)0);            // wPriority
            bw.Write((short)0);            // wLanguage
            bw.Write((int)0);              // dwInitialFrames
            int scale, rate;

            FindScaleAndRate(out scale, out rate, vsi.framerate);
            bw.Write(scale);             // dwScale
            bw.Write(rate);              // dwRate
            bw.Write((int)0);            // dwStart
            bw.Write(vsi.frameCount);    // dwLength. that's how many frames will be in this RIFF element, written over later
            bw.Write((int)0);            // dwSuggestedBufferSize, not suggesting any value
            bw.Write((int)-1);           // dwQuality = -1 meaning "default quality"
            bw.Write((int)0);            // dwSampleSize = 0 for video
            bw.Write((short)0);
            bw.Write((short)0);
            bw.Write((short)vsi.width);
            bw.Write((short)vsi.height);
            rw.EndChunk();
            return(offset);
        }
Example #13
0
        /// <summary>
        /// Gets and populates the total file size of the video, streamed on the given endpoint
        /// <returns>The file size of the video (in bytes)</returns>
        /// </summary>
        public async Task <long> GetFileSizeAsync(VideoStreamInfo streamInfo)
        {
            if (streamInfo == null)
            {
                throw new ArgumentNullException(nameof(streamInfo));
            }
            if (streamInfo.Url.IsBlank())
            {
                throw new Exception("Given stream does not have a URL");
            }
            if (streamInfo.NeedsDeciphering)
            {
                throw new Exception("Given stream's signature needs to be deciphered first");
            }

            // Get the headers
            var headers = await RequestService.GetHeadersAsync(streamInfo.Url);

            if (headers == null)
            {
                throw new Exception("Could not obtain headers (HEAD request failed)");
            }

            // Get file size header
            if (!headers.ContainsKey("Content-Length"))
            {
                throw new Exception("Content-Length header not found");
            }

            return(streamInfo.FileSize = headers["Content-Length"].ParseLongOrDefault());
        }
Example #14
0
        public MediaStreamInfo ToStreamInfo()
        {
            MediaStreamInfo stream = null;

            if (CodecType == "audio")
            {
                stream = new AudioStreamInfo {
                    ChannelLayout = this.ChannelLayout,
                    ChannelCount  = this.ChannelCount,
                    SampleFormat  = FfmpegHelper.ParseSampleFormat(SampleFormat),
                    SampleRate    = SampleRate,
                };
            }
            else if (CodecType == "video")
            {
                stream = new VideoStreamInfo {
                    PixelFormat = PixelFormatHelper.Parse(PixelFormat),
                    Width       = this.Width,
                    Height      = this.Height
                };

                if (Tags != null && Tags.TryGetValue("rotate", out var rotate))
                {
                    ((VideoStreamInfo)stream).Rotate = (int)rotate;
                }
            }
            else if (CodecType == "subtitle")
            {
                stream = new SubtitleStreamInfo
                {
                };
            }
            else if (CodecType == "data")
            {
                stream = new DataStreamInfo
                {
                };
            }
            else
            {
                stream = new MediaStreamInfo();
            }

            stream.Codec = Profile != null?CodecInfo.Create(CodecIdHelper.Parse(CodecName), Profile).Name : CodecName;

            if (TimeBase != null && Rational.TryParse(TimeBase, out var timeBase))
            {
                stream.TimeBase = timeBase;
            }


            stream.Duration  = Duration ?? TimeSpan.Zero;
            stream.StartTime = StartTime ?? TimeSpan.Zero;

            stream.FrameCount = FrameCount;


            return(stream);
        }
Example #15
0
 private void PrintStreamDetails(VideoStreamInfo stream)
 {
     _output.WriteLine("------------");
     _output.WriteLine($"Url: {stream.StreamUrl}");
     _output.WriteLine($"Quality label: {stream.QualityLabel}");
     _output.WriteLine($"Quality: {stream.Quality}");
     _output.WriteLine($"Height: {stream.Height}");
     _output.WriteLine($"Width: {stream.Width}");
 }
Example #16
0
 protected virtual MappedVideoStream MapVideoStream(FFmpegConfig config,
                                                    VideoStreamInfo sourceStream,
                                                    VideoOutputStream outputStream)
 {
     return(new MappedVideoStream()
     {
         Input = GetStreamInput(sourceStream),
     });
 }
Example #17
0
        static bool Encode(Options opt)
        {
            try { File.Delete(opt.OutputFile); }
            catch { }

            using (var transcoder = new Transcoder())
            {
                // Transcoder demo mode must be enabled, 
                // in order to use the OEM release for testing (without a valid license).
                transcoder.AllowDemoMode = true;

                // Configure input
                // The input stream frame rate determines the playback speed
                var instream = new VideoStreamInfo {
                    StreamType = PrimoSoftware.AVBlocks.StreamType.UncompressedVideo,
                    FrameRate = opt.YuvFps, 
                    FrameWidth = opt.YuvWidth,
                    FrameHeight = opt.YuvHeight,
                    ColorFormat = opt.YuvColor.Id,
                    ScanType = ScanType.Progressive
                };

                var inpin = new MediaPin {
                    StreamInfo = instream
                };

                var insocket = new MediaSocket {
                    StreamType = PrimoSoftware.AVBlocks.StreamType.UncompressedVideo,
                    File = opt.YuvFile
                };

                insocket.Pins.Add(inpin);

                transcoder.Inputs.Add(insocket);

                // Configure output
                var outsocket = MediaSocket.FromPreset(opt.OutputPreset.Name);

                outsocket.File = opt.OutputFile;

                transcoder.Outputs.Add(outsocket);

                bool res = transcoder.Open();
                PrintError("Open Transcoder", transcoder.Error);
                if (!res)
                    return false;

                res = transcoder.Run();
                PrintError("Run Transcoder", transcoder.Error);
                if (!res)
                    return false;

                transcoder.Close();
            }

            return true;
        }
Example #18
0
        private async void DownloadVideoAsync(VideoStreamInfo videoStreamInfo)
        {
            // Check params
            if (videoStreamInfo == null)
            {
                return;
            }
            if (VideoInfo == null)
            {
                return;
            }

            // Copy values
            string title = VideoInfo.Title;
            string ext   = videoStreamInfo.FileExtension;

            // Select destination
            var sfd = new SaveFileDialog
            {
                AddExtension = true,
                DefaultExt   = ext,
                FileName     = $"{title}.{ext}".Without(Path.GetInvalidFileNameChars()),
                Filter       = $"{ext.ToUpperInvariant()} Video Files|*.{ext}|All files|*.*"
            };

            if (sfd.ShowDialog() == false)
            {
                return;
            }
            string filePath = sfd.FileName;

            // Try download
            IsBusy   = true;
            Progress = 0;
            using (var output = File.Create(filePath))
                using (var input = await _client.DownloadVideoAsync(videoStreamInfo))
                {
                    // Read the response and copy it to output stream
                    var buffer = new byte[1024];
                    int bytesRead;
                    do
                    {
                        bytesRead = await input.ReadAsync(buffer, 0, buffer.Length);

                        await output.WriteAsync(buffer, 0, bytesRead);

                        if (videoStreamInfo.FileSize > 0)
                        {
                            Progress += 1.0 * bytesRead / videoStreamInfo.FileSize;
                        }
                    } while (bytesRead > 0);
                }

            Progress = 0;
            IsBusy   = false;
        }
        private async Task ResolveAdaptiveStreamInfosAsync(PlayerContext context, string encodedData,
                                                           ICollection <AudioStreamInfo> audioStreamInfos, ICollection <VideoStreamInfo> videoStreamInfos)
        {
            foreach (var streamEncoded in encodedData.Split(","))
            {
                var streamInfoDic = UrlHelper.GetDictionaryFromUrlQuery(streamEncoded);

                var itag          = streamInfoDic.Get("itag").ParseInt();
                var url           = streamInfoDic.Get("url");
                var sig           = streamInfoDic.GetOrDefault("s");
                var contentLength = streamInfoDic.Get("clen").ParseLong();
                var bitrate       = streamInfoDic.Get("bitrate").ParseLong();

#if RELEASE
                if (!MediaStreamInfo.IsKnown(itag))
                {
                    continue;
                }
#endif

                // Decipher signature if needed
                if (sig.IsNotBlank())
                {
                    var playerSource = await GetPlayerSourceAsync(context.SourceUrl).ConfigureAwait(false);

                    sig = playerSource.Decipher(sig);
                    url = UrlHelper.SetUrlQueryParameter(url, "signature", sig);
                }

                // Set rate bypass
                url = UrlHelper.SetUrlQueryParameter(url, "ratebypass", "yes");

                // Check if audio
                var isAudio = streamInfoDic.Get("type").Contains("audio/");

                // If audio stream
                if (isAudio)
                {
                    var streamInfo = new AudioStreamInfo(itag, url, contentLength, bitrate);
                    audioStreamInfos.Add(streamInfo);
                }
                // If video stream
                else
                {
                    // Parse additional data
                    var size       = streamInfoDic.Get("size");
                    var width      = size.SubstringUntil("x").ParseInt();
                    var height     = size.SubstringAfter("x").ParseInt();
                    var resolution = new VideoResolution(width, height);
                    var framerate  = streamInfoDic.Get("fps").ParseInt();

                    var streamInfo = new VideoStreamInfo(itag, url, contentLength, bitrate, resolution, framerate);
                    videoStreamInfos.Add(streamInfo);
                }
            }
        }
Example #20
0
 public VideoStreamInfo(VideoStreamInfo vi)
 {
     codecFourCC = vi.codecFourCC;
     bitsPerPixel = vi.bitsPerPixel;
     frameCount = vi.frameCount;
     width = vi.width;
     height = vi.height;
     framerate = vi.framerate;
     lengthBytes = vi.lengthBytes;
 }
Example #21
0
 public VideoStreamInfo(VideoStreamInfo vi)
 {
     codecFourCC  = vi.codecFourCC;
     bitsPerPixel = vi.bitsPerPixel;
     frameCount   = vi.frameCount;
     width        = vi.width;
     height       = vi.height;
     framerate    = vi.framerate;
     lengthBytes  = vi.lengthBytes;
 }
Example #22
0
    IEnumerator CaptureWebcamToFileCoroutine()
    {
        // Open a webcam streamer. The url prefix for this is webcam://
        // Optionally a webcam device id can be added (to get a list, use WebCamTexture.devices)
        string   webcamStreamUrl = "webcam://";
        Streamer streamer        = Streamer.forUrl(webcamStreamUrl);

        streamer.Connect(webcamStreamUrl);

        // Set up a remux @ 15fps
        var vi = new VideoStreamInfo(streamer.videoStreamInfo);

        vi.framerate = 15;         // must be lower than framerate with this approach!
        AviRemux remux = new AviRemux();

        remux.Init(File.OpenWrite(outfile), vi, null);

        // Do fixed time capture, 10 seconds (150 frames @ 15fps)
        // The webcam framerate can be lower or higher than this. If it is lower then
        // a frame is written multiple times, if higher, then some frames are now written.
        float captureStartTime = Time.realtimeSinceStartup;
        int   realFrameNr, lastRealFrameNr = -1;

        do
        {
            // Read a frame from webcam. It returns a frame number, but we're not using it.
            byte[] buf;
            frame = streamer.VideoPosition;
            int bytesCnt = streamer.ReadVideoFrame(out buf);

            // Calculate the video frame number that we should be writing.
            realFrameNr = Mathf.RoundToInt((Time.realtimeSinceStartup - captureStartTime) * vi.framerate);

            // If the loop is being executed too seldom compared to vi.framerate, write a warning to console.
            if (realFrameNr - lastRealFrameNr > 1)
            {
                Debug.LogWarning("Output framerate too high, possibly just skipped " + (realFrameNr - lastRealFrameNr) + " frames");
            }

            // Write as many frames as we need. Normally this is 0 or 1, but can be higher (see the warning a few lines above)
            while (lastRealFrameNr < realFrameNr)
            {
                remux.WriteNextVideoFrame(buf, bytesCnt);
                lastRealFrameNr++;
            }

            // Give control back to Unity for one frame
            yield return(1);
        } while(realFrameNr < 150);

        // We're done. Close the remux and streamer
        remux.Shutdown();
        streamer.Shutdown();
        Debug.Log("Done capturing");
    }
Example #23
0
        protected virtual Codec GetVideoCodec(FFmpegConfig config,
                                              VideoStreamInfo sourceStream,
                                              VideoOutputStream outputStream)
        {
            VideoFormat format    = outputStream.Format;
            VideoCodec  codec     = config?.Video?.Codecs.GetValueOrDefault(format);
            string      codecName = GetVideoCodecName(format);
            X26xCodec   result    = format == VideoFormat.Hevc ? new X265Codec(codecName) : new X26xCodec(codecName);

            result.Preset = codec?.Preset;
            result.Crf    = outputStream.Quality;

            if (outputStream.DynamicRange == DynamicRange.High)
            {
                if (outputStream.Format != VideoFormat.Hevc)
                {
                    throw new NotSupportedException($"HDR is not supported with the video format {outputStream.Format}.");
                }

                var options = new List <Option>()
                {
                    new Option("colorprim", "bt2020"),
                    new Option("colormatrix", "bt2020nc"),
                    new Option("transfer", "smpte2084")
                };

                if (outputStream.CopyHdrMetadata)
                {
                    if (sourceStream.MasterDisplayProperties != null)
                    {
                        var properties = sourceStream.MasterDisplayProperties;
                        var value      = string.Format("\"G{0}B{1}R{2}WP{3}L({4},{5})\"",
                                                       properties.Green,
                                                       properties.Blue,
                                                       properties.Red,
                                                       properties.WhitePoint,
                                                       properties.Luminance.Max,
                                                       properties.Luminance.Min);

                        options.Add(new Option("master-display", value));
                    }

                    if (sourceStream.LightLevelProperties != null)
                    {
                        var properties = sourceStream.LightLevelProperties;

                        options.Add(new Option("max-cll", $"\"{properties.MaxCll},{properties.MaxFall}\""));
                    }
                }

                ((X265Codec)result).Options = options;
            }

            return(result);
        }
Example #24
0
        public override void Init(Stream dstStream, VideoStreamInfo videoStreamInfo, AudioStreamInfo audioStreamInfo)
        {
            if (dstStream == null || videoStreamInfo == null)
            {
                throw new ArgumentException("At least destination stream and video stream info is needed");
            }
            base.Init(dstStream, videoStreamInfo, audioStreamInfo);

            usingMultipleRiffs = false;
            totalFramesOld     = 0;
            totalFrames        = 0;
            totalSamples       = 0;

            writer = new RiffWriter(dstStream);
            writer.BeginRiff(AviDemux.ID_AVI_);
            writer.BeginList(AviDemux.ID_hdrl);

            // main header
            offsets.avih = WriteMainHeader(writer, videoStreamInfo, hasAudioStream);

            // video stream header
            writer.BeginList(AviDemux.ID_strl);
            offsets.videoStrh = WriteVideoStreamHeader(writer, videoStreamInfo);
            WriteVideoFormatHeader(writer, videoStreamInfo);
            offsets.videoIndx         = WriteDummySuperIndex(writer, AviDemux.ID_00dc, maxSuperindexEntries);
            videoSuperIndexEntryCount = 0;
            writer.EndList();              // end of strl

            videoIndex          = new AviStreamIndex();
            videoIndex.streamId = AviDemux.ID_00dc;

            if (hasAudioStream)
            {
                // audio stream header
                writer.BeginList(AviDemux.ID_strl);
                offsets.audioStrh = WriteAudioStreamHeader(writer, audioStreamInfo);
                WriteAudioFormatHeader(writer, audioStreamInfo);
                offsets.audioIndx         = WriteDummySuperIndex(writer, AviDemux.ID_01wb, maxSuperindexEntries);
                audioSuperIndexEntryCount = 0;
                writer.EndList();                  // end of strl

                audioIndex          = new AviStreamIndex();
                audioIndex.streamId = AviDemux.ID_01wb;
            }

            // odml header
            writer.BeginList(AviDemux.ID_odml);
            offsets.dmlh = WriteDmlhHeader(writer, videoStreamInfo.frameCount);
            writer.EndList();

            writer.EndList();              // end of hdrl

            writer.BeginList(AviDemux.ID_movi);
            offsets.indexBase = writer.binaryWriter.Seek(0, SeekOrigin.Current);
        }
Example #25
0
        protected override MappedVideoStream MapVideoStream(FFmpegConfig config,
                                                            VideoStreamInfo sourceStream,
                                                            VideoOutputStream outputStream)
        {
            var result = base.MapVideoStream(config, sourceStream, outputStream);

            result.Codec = GetVideoCodec(config, sourceStream, outputStream);
            result.Tag   = outputStream.Tag;

            return(result);
        }
        public static VideoEncoding GetVideoEncoding(MediaStreamInfo stream)
        {
            VideoStreamInfo VInfo = stream as VideoStreamInfo;
            MuxedStreamInfo MInfo = stream as MuxedStreamInfo;

            if (VInfo == null && MInfo == null)
            {
                return(VideoEncoding.H264);
            }
            return(VInfo?.VideoEncoding ?? MInfo.VideoEncoding);
        }
Example #27
0
        public override void Init(Stream dstStream, VideoStreamInfo videoStreamInfo, AudioStreamInfo audioStreamInfo)
        {
            if (dstStream == null || videoStreamInfo == null) {
                throw new ArgumentException ("At least destination stream and video stream info is needed");
            }
            base.Init (dstStream, videoStreamInfo, audioStreamInfo);

            usingMultipleRiffs = false;
            totalFramesOld = 0;
            totalFrames = 0;
            totalSamples = 0;

            writer = new RiffWriter (dstStream);
            writer.BeginRiff (AviDemux.ID_AVI_);
            writer.BeginList (AviDemux.ID_hdrl);

            // main header
            offsets.avih = WriteMainHeader (writer, videoStreamInfo, hasAudioStream);

            // video stream header
            writer.BeginList (AviDemux.ID_strl);
            offsets.videoStrh = WriteVideoStreamHeader (writer, videoStreamInfo);
            WriteVideoFormatHeader (writer, videoStreamInfo);
            offsets.videoIndx = WriteDummySuperIndex (writer, AviDemux.ID_00dc, maxSuperindexEntries);
            videoSuperIndexEntryCount = 0;
            writer.EndList (); // end of strl

            videoIndex = new AviStreamIndex ();
            videoIndex.streamId = AviDemux.ID_00dc;

            if (hasAudioStream) {
                // audio stream header
                writer.BeginList (AviDemux.ID_strl);
                offsets.audioStrh = WriteAudioStreamHeader (writer, audioStreamInfo);
                WriteAudioFormatHeader (writer, audioStreamInfo);
                offsets.audioIndx = WriteDummySuperIndex (writer, AviDemux.ID_01wb, maxSuperindexEntries);
                audioSuperIndexEntryCount = 0;
                writer.EndList (); // end of strl

                audioIndex = new AviStreamIndex ();
                audioIndex.streamId = AviDemux.ID_01wb;
            }

            // odml header
            writer.BeginList (AviDemux.ID_odml);
            offsets.dmlh = WriteDmlhHeader (writer, videoStreamInfo.frameCount);
            writer.EndList ();

            writer.EndList (); // end of hdrl

            writer.BeginList (AviDemux.ID_movi);
            offsets.indexBase = writer.binaryWriter.Seek (0, SeekOrigin.Current);
        }
        public static double GetVideoFrameRate(MediaStreamInfo stream)
        {
            VideoStreamInfo VInfo = stream as VideoStreamInfo;

            if (VInfo != null)
            {
                return(VInfo.Framerate);
            }
            else
            {
                return(0);
            }
        }
Example #29
0
        static void PrintVideo(VideoStreamInfo vsi)
        {
            Console.WriteLine("bitrate: {0} mode: {1}", vsi.Bitrate, vsi.BitrateMode);

            Console.WriteLine("color format: {0}", vsi.ColorFormat);

            Console.WriteLine("display ratio: {0}:{1}", vsi.DisplayRatioWidth, vsi.DisplayRatioHeight);

            Console.WriteLine("frame bottom up: {0}", vsi.FrameBottomUp);
            Console.WriteLine("frame size: {0}x{1}", vsi.FrameWidth, vsi.FrameHeight);
            Console.WriteLine("frame rate: {0:f3}", vsi.FrameRate);

            Console.WriteLine("scan type: {0}", vsi.ScanType);
        }
Example #30
0
        private void SetOverlayParamsToPin(MediaPin pin, string imageOverlay, StreamType imageType)
        {
            var videoInfo = new VideoStreamInfo()
            {
                StreamType = imageType
            };

            pin.Params[Param.Video.Overlay.Mode]                   = AlphaCompositingMode.Atop;
            pin.Params[Param.Video.Overlay.LocationX]              = 0; // left
            pin.Params[Param.Video.Overlay.LocationY]              = 0; // top
            pin.Params[Param.Video.Overlay.BackgroundAlpha]        = 1.0;
            pin.Params[Param.Video.Overlay.ForegroundBuffer]       = new MediaBuffer(File.ReadAllBytes(imageOverlay));
            pin.Params[Param.Video.Overlay.ForegroundBufferFormat] = videoInfo;
            pin.Params[Param.Video.Overlay.ForegroundAlpha]        = 1.0;
        }
Example #31
0
        static MediaSocket CreateOutputSocket(Options opt)
        {
            MediaPin pin = new MediaPin();

            MediaSocket socket = new MediaSocket();

            socket.Pins.Add(pin);

            VideoStreamInfo vsi = new VideoStreamInfo();

            vsi.ScanType = ScanType.Progressive;

            pin.StreamInfo = vsi;

            return(socket);
        }
Example #32
0
        private void Init(VideoStreamInfo vinfo)
        {
            if (vinfo.StreamType == StreamType.UncompressedVideo)
            {
                // YUV 420
                uncompressedFrameSize = vinfo.FrameWidth * vinfo.FrameHeight * 3 / 2;
                inframe.Buffer        = new MediaBuffer(new byte[uncompressedFrameSize]);
                inframe.Buffer.SetData(0, 0); // needed because setting the buffer actually sets the data as well
            }
            else
            {
                throw new Exception("Unsupported: video input is compressed");
            }

            frameRate = vinfo.FrameRate;
        }
Example #33
0
 /// <summary>
 /// Constructor. It's always reated for a stream, so you need to provide info about it here.
 /// </summary>
 public VideoDecoderMPNG(VideoStreamInfo streamInfo = null)
     : base(streamInfo)
 {
 }
 /// <summary>
 /// Constructor. It's always reated for a stream, so you need to provide info about it here.
 /// </summary>
 public VideoDecoderUnity(VideoStreamInfo streamInfo = null)
 {
     this.streamInfo = streamInfo;
 }
Example #35
0
 /// <summary>
 /// Constructor. It's always reated for a stream, so you need to provide info about it here.
 /// </summary>
 public VideoDecoderRGB(VideoStreamInfo info = null)
 {
     this.info = info;
 }
        public void GetMediaInfo(string fileName)
        {
            const NumberStyles numStyle = NumberStyles.Number;

            if (Processing.mediaInfo == null)
            {
                Processing.mediaInfo = new MediaInfo();
                Processing.mediaInfo.Option("Internet", "No");
            }

            Processing.mediaInfo.Open(fileName);

            _videoStreams = Processing.mediaInfo.Count_Get(StreamKind.Video);
            _audioStreams = Processing.mediaInfo.Count_Get(StreamKind.Audio);
            _imageStreams = Processing.mediaInfo.Count_Get(StreamKind.Image);
            _textStreams = Processing.mediaInfo.Count_Get(StreamKind.Text);
            _menuCount = Processing.mediaInfo.Count_Get(StreamKind.Menu);

            #region Get General Info
            General.CompleteName        = Processing.mediaInfo.Get(StreamKind.General, 0, "CompleteName");
            General.FileName            = Processing.mediaInfo.Get(StreamKind.General, 0, "FileName");
            General.FileExtension       = Processing.mediaInfo.Get(StreamKind.General, 0, "FileExtension");
            General.Format              = Processing.mediaInfo.Get(StreamKind.General, 0, "Format");
            General.FormatExtensions    = Processing.mediaInfo.Get(StreamKind.General, 0, "Format/Extensions");
            General.InternetMediaType   = Processing.mediaInfo.Get(StreamKind.General, 0, "InternetMediaType");
            DateTime.TryParse(Processing.mediaInfo.Get(StreamKind.General, 0, "Duration/String3"), AppSettings.CInfo, DateTimeStyles.AssumeLocal, out General.DurationTime);
            General.Title               = Processing.mediaInfo.Get(StreamKind.General, 0, "Title");
            General.EncodedApplication  = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Application");
            General.EncodedApplicationUrl = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Application/Url");
            General.EncodedLibrary      = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Library");
            General.EncodedLibraryName  = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Library/Name");
            General.EncodedLibraryVersion = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Library/Version");
            General.EncodedLibraryDate  = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Library/Date");
            General.EncodedLibrarySettings = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Library_Settings");
            #endregion

            #region Get Video Info

            for (int i = 0; i < _videoStreams; i++)
            {
                VideoStreamInfo videoStream = new VideoStreamInfo();

                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "StreamKindID"), numStyle, AppSettings.CInfo, out videoStream.StreamKindID);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "StreamKindPos"), numStyle, AppSettings.CInfo, out videoStream.StreamKindPos);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "ID"), numStyle, AppSettings.CInfo, out videoStream.ID);
                videoStream.Format              = Processing.mediaInfo.Get(StreamKind.Video, i, "Format");
                videoStream.FormatInfo          = Processing.mediaInfo.Get(StreamKind.Video, i, "Format/Info");
                videoStream.FormatVersion       = Processing.mediaInfo.Get(StreamKind.Video, i, "Format_Version");
                videoStream.FormatProfile       = Processing.mediaInfo.Get(StreamKind.Video, i, "Format_Profile");
                videoStream.MultiViewBaseProfile = Processing.mediaInfo.Get(StreamKind.Video, i, "MultiView_BaseProfile");
                videoStream.MultiViewCount      = Processing.mediaInfo.Get(StreamKind.Video, i, "MultiView_Count");
                videoStream.InternetMediaType   = Processing.mediaInfo.Get(StreamKind.Video, i, "InternetMediaType");
                videoStream.CodecID             = Processing.mediaInfo.Get(StreamKind.Video, i, "CodecID");
                videoStream.CodecIDInfo         = Processing.mediaInfo.Get(StreamKind.Video, i, "CodecID/Info");
                videoStream.CodecIDUrl          = Processing.mediaInfo.Get(StreamKind.Video, i, "CodecID/Url");
                videoStream.CodecIDDescription  = Processing.mediaInfo.Get(StreamKind.Video, i, "CodecID_Description");
                DateTime.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "Duration/String3"), AppSettings.CInfo, DateTimeStyles.AssumeLocal, out videoStream.DurationTime);
                videoStream.BitRateMode         = Processing.mediaInfo.Get(StreamKind.Video, i, "BitRate_Mode");
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "BitRate"), numStyle, AppSettings.CInfo, out videoStream.BitRate);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "BitRate_Minimum"), numStyle, AppSettings.CInfo, out videoStream.BitRateMin);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "BitRate_Nominal"), numStyle, AppSettings.CInfo, out videoStream.BitRateNom);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "BitRate_Maximum"), numStyle, AppSettings.CInfo, out videoStream.BitRateMax);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "Width"), numStyle, AppSettings.CInfo, out videoStream.Width);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "Height"), numStyle, AppSettings.CInfo, out videoStream.Height);
                videoStream.PixelAspectRatio    = Processing.mediaInfo.Get(StreamKind.Video, i, "PixelAspectRatio");
                videoStream.DisplayAspectRatio  = Processing.mediaInfo.Get(StreamKind.Video, i, "DisplayAspectRatio");
                videoStream.FrameRateMode       = Processing.mediaInfo.Get(StreamKind.Video, i, "FrameRate_Mode");

                Single.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "FrameRate"), numStyle, AppSettings.CInfo, out videoStream.FrameRate);
                Processing.GetFPSNumDenom(videoStream.FrameRate, out videoStream.FrameRateEnumerator, out videoStream.FrameRateDenominator);

                Single.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "FrameRate_Minimum"), numStyle, AppSettings.CInfo, out videoStream.FrameRateMin);
                Single.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "FrameRate_Nominal"), numStyle, AppSettings.CInfo, out videoStream.FrameRateNom);
                Single.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "FrameRate_Maximum"), numStyle, AppSettings.CInfo, out videoStream.FrameRateMax);
                Int64.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "FrameCount"), numStyle, AppSettings.CInfo, out videoStream.FrameCount);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "BitDepth"), numStyle, AppSettings.CInfo, out videoStream.BitDepth);
                videoStream.ScanType            = Processing.mediaInfo.Get(StreamKind.Video, i, "ScanType");
                videoStream.ScanOrder           = Processing.mediaInfo.Get(StreamKind.Video, i, "ScanOrder");
                UInt64.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "StreamSize"), numStyle, AppSettings.CInfo, out videoStream.StreamSize);
                videoStream.EncodedApplication  = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Application");
                videoStream.EncodedApplicationUrl = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Application/Url");
                videoStream.EncodedLibrary      = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Library");
                videoStream.EncodedLibraryName  = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Library/Name");
                videoStream.EncodedLibraryVersion = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Library/Version");
                videoStream.EncodedLibraryDate  = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Library/Date");
                videoStream.EncodedLibrarySettings = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Library_Settings");

                if (videoStream.Width > 1280)
                {
                    if ((videoStream.ScanType == "Progressive") || (videoStream.ScanType == ""))
                        videoStream.VideoSize = VideoFormat.Videoformat1080P;
                    else
                        videoStream.VideoSize = VideoFormat.Videoformat1080I;
                }
                else if (videoStream.Width > 720)
                {
                    videoStream.VideoSize = VideoFormat.Videoformat720P;
                }
                else if ((videoStream.Height > 480) && (videoStream.Height <= 576) && (videoStream.Width <= 720))
                {
                    if ((videoStream.ScanType == "Progressive") || (videoStream.ScanType == ""))
                        videoStream.VideoSize = VideoFormat.Videoformat576P;
                    else
                        videoStream.VideoSize = VideoFormat.Videoformat576I;
                }
                else
                {
                    if ((videoStream.ScanType == "Progressive") || (videoStream.ScanType == ""))
                        videoStream.VideoSize = VideoFormat.Videoformat480P;
                    else
                        videoStream.VideoSize = VideoFormat.Videoformat480I;
                }

                Video.Add(videoStream);
            }
            #endregion

            #region Get Audio Info
            for (int i = 0; i < _audioStreams; i++)
            {
                AudioStreamInfo audioStream = new AudioStreamInfo();

                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "StreamKindID"), numStyle, AppSettings.CInfo, out audioStream.StreamKindID);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "StreamKindPos"), numStyle, AppSettings.CInfo, out audioStream.StreamKindPos);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "ID"), numStyle, AppSettings.CInfo, out audioStream.ID);
                audioStream.Format              = Processing.mediaInfo.Get(StreamKind.Audio, i, "Format");
                audioStream.FormatInfo          = Processing.mediaInfo.Get(StreamKind.Audio, i, "Format/Info");
                audioStream.FormatVersion       = Processing.mediaInfo.Get(StreamKind.Audio, i, "Format_Version");
                audioStream.FormatProfile       = Processing.mediaInfo.Get(StreamKind.Audio, i, "Format_Profile");
                audioStream.CodecID             = Processing.mediaInfo.Get(StreamKind.Audio, i, "CodecID");
                audioStream.CodecIDInfo         = Processing.mediaInfo.Get(StreamKind.Audio, i, "CodecID/Info");
                audioStream.CodecIDUrl          = Processing.mediaInfo.Get(StreamKind.Audio, i, "CodecID/Url");
                audioStream.CodecIDDescription  = Processing.mediaInfo.Get(StreamKind.Audio, i, "CodecID_Description");
                Int64.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "Duration"), numStyle, AppSettings.CInfo, out audioStream.Duration);
                audioStream.BitRateMode         = Processing.mediaInfo.Get(StreamKind.Audio, i, "BitRate_Mode");
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "BitRate"), numStyle, AppSettings.CInfo, out audioStream.BitRate);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "BitRate_Minimum"), numStyle, AppSettings.CInfo, out audioStream.BitRateMin);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "BitRate_Nominal"), numStyle, AppSettings.CInfo, out audioStream.BitRateNom);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "BitRate_Maximum"), numStyle, AppSettings.CInfo, out audioStream.BitRateMax);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "Channel(s)"), numStyle, AppSettings.CInfo, out audioStream.Channels);
                audioStream.ChannelsString      = Processing.mediaInfo.Get(StreamKind.Audio, i, "Channel(s)/String");
                audioStream.ChannelPositions    = Processing.mediaInfo.Get(StreamKind.Audio, i, "ChannelPositions");
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "SamplingRate"), numStyle, AppSettings.CInfo, out audioStream.SamplingRate);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "BitDepth"), numStyle, AppSettings.CInfo, out audioStream.BitDepth);
                audioStream.CompressionMode     = Processing.mediaInfo.Get(StreamKind.Audio, i, "Compression_Mode");
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "Delay"), numStyle, AppSettings.CInfo, out audioStream.Delay);
                UInt64.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "StreamSize"), numStyle, AppSettings.CInfo, out audioStream.StreamSize);
                audioStream.EncodedLibrary      = Processing.mediaInfo.Get(StreamKind.Audio, i, "Encoded_Library");
                audioStream.EncodedLibraryName  = Processing.mediaInfo.Get(StreamKind.Audio, i, "Encoded_Library/Name");
                audioStream.EncodedLibraryVersion = Processing.mediaInfo.Get(StreamKind.Audio, i, "Encoded_Library/Version");
                audioStream.EncodedLibraryDate  = Processing.mediaInfo.Get(StreamKind.Audio, i, "Encoded_Library/Date");
                audioStream.EncodedLibrarySettings = Processing.mediaInfo.Get(StreamKind.Audio, i, "Encoded_Library_Settings");
                audioStream.LanguageFull        = Processing.mediaInfo.Get(StreamKind.Audio, i, "Language/String1");
                audioStream.LanguageIso6391     = Processing.mediaInfo.Get(StreamKind.Audio, i, "Language/String2");
                audioStream.LanguageIso6392     = Processing.mediaInfo.Get(StreamKind.Audio, i, "Language/String3");

                Audio.Add(audioStream);
            }
            #endregion

            #region Get Image Info
            for (int i = 0; i < _imageStreams; i++)
            {
                ImageStreamInfo imageStream = new ImageStreamInfo();

                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Image, i, "StreamKindID"), numStyle, AppSettings.CInfo, out imageStream.StreamKindID);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Image, i, "ID"), numStyle, AppSettings.CInfo, out imageStream.ID);
                imageStream.Format                  = Processing.mediaInfo.Get(StreamKind.Image, i, "Format");
                imageStream.CodecIDInfo             = Processing.mediaInfo.Get(StreamKind.Image, i, "CodecID/Info");
                UInt64.TryParse(Processing.mediaInfo.Get(StreamKind.Image, i, "StreamSize"), numStyle, AppSettings.CInfo, out imageStream.StreamSize);
                imageStream.LanguageFull            = Processing.mediaInfo.Get(StreamKind.Image, i, "Language/String1");
                imageStream.LanguageIso6392         = Processing.mediaInfo.Get(StreamKind.Image, i, "Language/String3");

                Image.Add(imageStream);
            }
            #endregion

            #region Get Text Info
            for (int i = 0; i < _textStreams; i++)
            {
                TextStreamInfo textStream = new TextStreamInfo();

                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Text, i, "StreamKindID"), numStyle, AppSettings.CInfo, out textStream.StreamKindID);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Text, i, "ID"), numStyle, AppSettings.CInfo, out textStream.ID);
                textStream.Format                   = Processing.mediaInfo.Get(StreamKind.Text, i, "Format");
                textStream.CodecIDInfo              = Processing.mediaInfo.Get(StreamKind.Text, i, "CodecID/Info");
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Text, i, "Delay"), numStyle, AppSettings.CInfo, out textStream.Delay);
                UInt64.TryParse(Processing.mediaInfo.Get(StreamKind.Text, i, "StreamSize"), numStyle, AppSettings.CInfo, out textStream.StreamSize);
                textStream.LanguageFull             = Processing.mediaInfo.Get(StreamKind.Text, i, "Language/String1");
                textStream.LanguageIso6392          = Processing.mediaInfo.Get(StreamKind.Text, i, "Language/String3");

                Text.Add(textStream);
            }
            #endregion

            #region Get Menu Info
            for (int i = 0; i < _menuCount; i++)
            {
                MenuStreamInfo menuStream = new MenuStreamInfo();

                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Menu, i, "Chapters_Pos_Begin"), numStyle, AppSettings.CInfo, out menuStream.ChaptersPosBegin);
                Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Menu, i, "Chapters_Pos_End"), numStyle, AppSettings.CInfo, out menuStream.ChaptersPosEnd);

                for (int j = menuStream.ChaptersPosBegin; j < menuStream.ChaptersPosEnd; j++)
                {
                    DateTime tempTime;
                    DateTime.TryParse(Processing.mediaInfo.Get(StreamKind.Menu, i, j, InfoKind.Name), AppSettings.CInfo, DateTimeStyles.AssumeLocal, out tempTime);
                    Chapters.Add(tempTime.TimeOfDay);
                }
            }
            #endregion
            Processing.mediaInfo.Option("Complete");
            Processing.mediaInfo.Close();
        }
Example #37
0
 private static int WriteMainHeader(RiffWriter rw, VideoStreamInfo vsi, bool hasAudioStream)
 {
     rw.BeginChunk (AviDemux.ID_avih);
     int offset = (int)rw.binaryWriter.Seek (0, SeekOrigin.Current);
     var bw = rw.binaryWriter;
     bw.Write (Mathf.RoundToInt (1000000f / vsi.framerate)); // swMicroSecPerFrame
     bw.Write ((int)0); // dwMaxBytesPerSec
     bw.Write ((int)0); // dwPaddingGranularity
     bw.Write ((int)(AVIMainHeader.AVIF_HASINDEX | AVIMainHeader.AVIF_MUSTUSEINDEX)); // dwFlags. Maybe use AVIMainHeader.AVIF_MUSTUSEINDEX too?
     bw.Write (vsi.frameCount); // dwTotalFrames. this will be written over later!
     bw.Write ((int)0); // dwInitialFrames
     bw.Write (hasAudioStream ? 2 : 1);
     bw.Write ((int)0); // dwSuggestedBufferSize, not suggesting any value
     bw.Write (vsi.width);
     bw.Write (vsi.height);
     bw.Write ((long)0); // dwReserver0 and dwReserver1
     bw.Write ((long)0); // dwReserver2 and dwReserver3
     rw.EndChunk ();
     return offset;
 }
Example #38
0
 private static void WriteVideoFormatHeader(RiffWriter rw, VideoStreamInfo vsi)
 {
     rw.BeginChunk (AviDemux.ID_strf);
     var bw = rw.binaryWriter;
     bw.Write ((int)40); // biSize
     bw.Write (vsi.width); // biWidth
     bw.Write (vsi.height); // biHeight
     bw.Write ((short)1); // biPlanes
     bw.Write ((short)vsi.bitsPerPixel);
     bw.Write (vsi.codecFourCC); // biCompression
     bw.Write (vsi.width * vsi.height * vsi.bitsPerPixel / 8); // biSizeImage
     bw.Write ((int)0); // biXPelsPerMeter
     bw.Write ((int)0); // biYPelsPerMeter
     bw.Write ((int)0); // biClrUsed
     bw.Write ((int)0); // biClrImportant
     rw.EndChunk ();
 }
        /// <summary>
        /// Connects to an url and starts waiting for mjpeg video frames
        /// </summary>
        public override void Connect(string url, LoadOptions loadOptions = null)
        {
            #if !UNITY_WINRT

            if (loadOptions != null) {
                videoStreamInfo = loadOptions.videoStreamInfo != null ? loadOptions.videoStreamInfo : new VideoStreamInfo ();
                timeout = loadOptions.connectTimeout;
            } else {
                videoStreamInfo = new VideoStreamInfo ();
                timeout = 10;
            }
            videoStreamInfo.codecFourCC = MP.Decoder.VideoDecoderMJPEG.FOURCC_MJPG;
            videoStreamInfo.frameCount = 0;
            videoStreamInfo.framerate = 0;

            frameRingBuffer = new byte[1][];
            receivedFrameCount = 0;

            shouldStop = false;
            thread = new Thread (ThreadRun);
            thread.Start (url);

            #else
            Status = "Streaming not supported on Windows Store build target";
            throw new NotSupportedException("Streaming is currently not possible on Windows Store build target");
            #endif
        }
Example #40
0
 private static int WriteVideoStreamHeader(RiffWriter rw, VideoStreamInfo vsi)
 {
     rw.BeginChunk (AviDemux.ID_strh);
     int offset = (int)rw.binaryWriter.Seek (0, SeekOrigin.Current);
     var bw = rw.binaryWriter;
     bw.Write (AviDemux.FCC_vids);
     bw.Write (vsi.codecFourCC);
     bw.Write ((int)0); // dwFlags
     bw.Write ((short)0); // wPriority
     bw.Write ((short)0); // wLanguage
     bw.Write ((int)0); // dwInitialFrames
     int scale, rate;
     FindScaleAndRate (out scale, out rate, vsi.framerate);
     bw.Write (scale); // dwScale
     bw.Write (rate); // dwRate
     bw.Write ((int)0); // dwStart
     bw.Write (vsi.frameCount); // dwLength. that's how many frames will be in this RIFF element, written over later
     bw.Write ((int)0); // dwSuggestedBufferSize, not suggesting any value
     bw.Write ((int)-1); // dwQuality = -1 meaning "default quality"
     bw.Write ((int)0); // dwSampleSize = 0 for video
     bw.Write ((short)0);
     bw.Write ((short)0);
     bw.Write ((short)vsi.width);
     bw.Write ((short)vsi.height);
     rw.EndChunk ();
     return offset;
 }