Esempio n. 1
0
        protected override void OnClipInitializing(FilterInitializationEventArgs e)
        {
            base.OnClipInitializing(e);

            using (var stream = this.OpenStreamForFrame(0))
            {
                using (var decoder = new PNGDecoder())
                {
                    decoder.Bitstream = stream;
                    decoder.Initialize();
                    var frame = decoder.Decode();
                    if (frame == null)
                    {
                        return;
                    }
                    else if (frame.Video != null)
                    {
                        var bitmap = frame.Video[0];
                        if (bitmap != null)
                        {
                            var track = new VideoTrack();
                            //track.SampleCount = this.FrameCount;
                            track.Width           = bitmap.Width;
                            track.Height          = bitmap.Height;
                            track.Format          = bitmap.Format;
                            track.SamplesPerFrame = 1;

                            //this.Clip = new Clip(track);
                        }
                    }
                }
            }
        }
//
// Adds a given media fileName to the current track at the specified cursorPosition
//
    void InsertFileAt(Vegas vegas, string fileName, Timecode cursorPosition)
    {
        PlugInNode plugIn = vegas.Transitions.FindChildByName("VEGAS Linear Wipe");

        VideoEvent videoEvent = null;

        Media      media      = new Media(fileName);
        VideoTrack videoTrack = FindSelectedVideoTrack(vegas.Project);

        videoEvent = videoTrack.AddVideoEvent(cursorPosition, Timecode.FromSeconds(stillLength));
        Take take = videoEvent.AddTake(media.GetVideoStreamByIndex(0));

        videoEvent.MaintainAspectRatio = false;

        VideoMotionKeyframe key1 = new VideoMotionKeyframe(Timecode.FromSeconds(stillLength));

        videoEvent.VideoMotion.Keyframes.Add(key1);
        VideoMotionKeyframe key0 = videoEvent.VideoMotion.Keyframes[0];

        key0.ScaleBy(new VideoMotionVertex(initialScale, initialScale));
        key0.RotateBy(initialRotationRadians);


        Effect fx = new Effect(plugIn);

        videoEvent.FadeIn.Transition = fx;
        fx.Preset = "Top-Down, Soft Edge";
    }
Esempio n. 3
0
        // 画像トラックの作り方
        // https://www.youtube.com/watch?v=GdrXo_HiNZM
        TrackEvent AddMedia(Project project, string mediaPath, int trackIndex, Timecode start, Timecode length)
        {
#if false
            Media media = Media.CreateInstance(project, mediaPath);
            Track track = project.Tracks[trackIndex];
            if (track.MediaType == MediaType.Video)
            {
                VideoTrack videoTrack = (VideoTrack)track;
                VideoEvent videoEvent = videoTrack.AddVideoEvent(start, length);
                Take       take       = videoEvent.AddTake(media.GetVideoStreamByIndex(0));
                return(videoEvent);
            }

            // サンプル:画像はこれで追加できる
            //AddMediaImage(vegas.Project, @"C:\Users\Administrator\Desktop\mouse.png", 0, );
            {
                Media media = Media.CreateInstance(vegas.Project, @"C:\Users\Administrator\Desktop\mouse.png");
                Track track = vegas.Project.Tracks[0];
                if (track.MediaType == MediaType.Video)
                {
                    VideoTrack videoTrack = (VideoTrack)track;
                    VideoEvent videoEvent = videoTrack.AddVideoEvent(Timecode.FromSeconds(1), Timecode.FromSeconds(5));
                    Take       take       = videoEvent.AddTake(media.GetVideoStreamByIndex(0));
                    return(videoEvent);
                }
            }
#endif
            return(null);
        }
        public void CheckDisplaySize(VideoTrack track)
        {
            if (!IsAspectRatioChecked)
            {
                var sar = 1.0 * track.SarNum / track.SarDen;

                if (track.SarNum == 0 || track.SarDen == 0)
                {
                    return;
                }

                Debug.WriteLine(String.Format("Video Size:{0}x{1}\r\nSAR:{2}/{3}", track.Width, track.Height,
                                              track.SarNum, track.SarDen));

                if (sar > 1)
                {
                    DisplayWidth  = sar * track.Width;
                    DisplayHeight = track.Height;
                }
                else
                {
                    DisplayWidth  = track.Width;
                    DisplayHeight = track.Height / sar;
                }
            }
        }
Esempio n. 5
0
        public void DidRemoveVideoTrack(LocalMedia media, VideoTrack videoTrack)
        {
            Console.WriteLine("Local video track removed: {0}", videoTrack);

            /* You do not need to call [videoTrack detach:] here, your view will be detached once this call returns. */
            camera = null;
        }
Esempio n. 6
0
        /// <summary>
        /// Helper method that creates a user friendly type from the internal interop structure.
        /// </summary>
        /// <param name="s">TrackDescriptionStructure from interop</param>
        /// <returns>public TrackDescription to be consumed by the user</returns>
        internal static MediaTrack Build(this MediaTrackStructure s)
        {
            AudioTrack    audioTrack    = default;
            VideoTrack    videoTrack    = default;
            SubtitleTrack subtitleTrack = default;

            switch (s.TrackType)
            {
            case TrackType.Audio:
                audioTrack = MarshalUtils.PtrToStructure <AudioTrack>(s.TrackData);
                break;

            case TrackType.Video:
                videoTrack = MarshalUtils.PtrToStructure <VideoTrack>(s.TrackData);
                break;

            case TrackType.Text:
                subtitleTrack = MarshalUtils.PtrToStructure <SubtitleTrackStructure>(s.TrackData).Build();
                break;

            case TrackType.Unknown:
                break;
            }

            return(new MediaTrack(s.Codec,
                                  s.OriginalFourcc,
                                  s.Id,
                                  s.TrackType,
                                  s.Profile,
                                  s.Level,
                                  new MediaTrackData(audioTrack, videoTrack, subtitleTrack), s.Bitrate,
                                  s.Language.FromUtf8(),
                                  s.Description.FromUtf8()));
        }
Esempio n. 7
0
//
// Adds a given media fileName to the current track at the specified cursorPosition
//
    void InsertFileAt(Vegas vegas, string fileName, Timecode cursorPosition)
    {
        VideoEvent videoEvent = null;

        Media      media      = new Media(fileName);
        VideoTrack videoTrack = FindSelectedVideoTrack(vegas.Project);

        videoEvent = videoTrack.AddVideoEvent(cursorPosition, Timecode.FromSeconds(stillLength));
        Take take = videoEvent.AddTake(media.GetVideoStreamByIndex(0));

        videoEvent.MaintainAspectRatio = false;

        VideoMotionKeyframe key1 = new VideoMotionKeyframe(Timecode.FromSeconds(stillLength));

        videoEvent.VideoMotion.Keyframes.Add(key1);
        VideoMotionKeyframe key0 = videoEvent.VideoMotion.Keyframes[0];

        key0.ScaleBy(new VideoMotionVertex(initialScale, initialScale));
        key0.RotateBy(initialRotationRadians * (double)rnd.Next(-1, 1));
        key0.MoveBy(new VideoMotionVertex((float)rnd.Next(-15, 15), (float)rnd.Next(-20, 20)));

        PlugInNode plugIn = vegas.Transitions.FindChildByName(desiredTransitions[rnd.Next(0, desiredTransitions.Length - 1)]);

        Effect fx = new Effect(plugIn);

        videoEvent.FadeIn.Transition = fx;
    }
        public VideoRenderer(VideoTrack videoTrack, RendererOptions options)
        {
            VideoTrack = videoTrack;

            VideoFrameWidth     = options.VideoFrameWidth;
            VideoFrameHeight    = options.VideoFrameHeight;
            VideoFrameQueueSize = options.VideoFrameQueueSize;

            videoTrack.LocalVideoFrameEncoded += OnLocalVideoFrameEncoded;

            // _onMissedFrame = options.OnMissedFrame ?? OnMissedFrame;

            bool debug = options.CreationFlags.HasFlag(D3D11.DeviceCreationFlags.Debug);

            FactoryDXGI = new DXGI.Factory2(debug);

            // Find the requested adapter.
            using (var adapters = FactoryDXGI.Adapters.ToDisposableList())
            {
                var adapter = adapters.First(a => a.Description.VendorId == options.AdapterVendorId);

                Device3D = new D3D11.Device(adapter, options.CreationFlags, options.FeatureLevels);

                DeviceDXGI = Device3D.QueryInterface <DXGI.Device>();

                // We need to access D3D11 on multiple threads, so enable multi-threading
                ThreadLock3D = Device3D.ImmediateContext.QueryInterface <D3D11.Multithread>();
                ThreadLock3D.SetMultithreadProtected(true);
            }
        }
Esempio n. 9
0
        private VideoTrack createTrackAbove(Project project, Track track)
        {
            VideoTrack tr = new VideoTrack(track.Index);

            project.Tracks.Add(tr);
            return(tr);
        }
Esempio n. 10
0
        private VideoEvent createText(Media media, VideoTrack track, TrackEvent eventBelow)
        {
            VideoEvent txtEvent = track.AddVideoEvent(eventBelow.Start, eventBelow.End - eventBelow.Start);
            Take       take     = txtEvent.AddTake(media.GetVideoStreamByIndex(0));

            return(txtEvent);
        }
Esempio n. 11
0
        void AddMedias(Vegas vegas, string file)
        {
            // トラックを追加
            Track track = new VideoTrack(0, _trackName);

            vegas.Project.Tracks.Add(track);
            track.Selected = true;
            // テキストファイルを開く
            // 1行につき1イベントを作ります
            System.IO.StreamReader sr = new System.IO.StreamReader(file, System.Text.Encoding.GetEncoding("shift_jis"));
            string line = "";

            while ((line = sr.ReadLine()) != null)
            {
                // ビデオイベント作成
                VideoEvent videoEvent = new VideoEvent(Timecode.FromSeconds(_currentTime), Timecode.FromSeconds(_timeLength));
                track.Events.Add(videoEvent);
                // Takeを作成。ここにMediaのText情報が入っている
                Take take = GenerateTakeText(vegas, line);
                if (take != null)
                {
                    videoEvent.Takes.Add(take); // イベントにテキストTakeを登録
                }
                // 次の開始位置を決める
                _currentTime += _timeLength + _timeInterval;
            }
            sr.Close();
        }
Esempio n. 12
0
 private IEnumerable <IVideoTrack> PrepareVideoTracks(FileInfo fileInfo, IEnumerable <TrackInfoOutput> trackModels)
 {
     foreach (var model in trackModels)
     {
         var track = new VideoTrack
         {
             ID                      = model.ID,
             FileInfo                = fileInfo,
             StreamOrder             = model.StreamOrder,
             Format                  = model.Format,
             FormatProfile           = model.Format_Profile,
             FormatLevel             = model.Format_Level,
             Width                   = model.Width,
             Height                  = model.Height,
             CodecId                 = model.CodecID,
             Duration                = model.Duration,
             Bitrate                 = model.Bitrate,
             PixelAspectRatio        = model.PixelAspectRatio,
             DisplayAspectRatio      = model.DisplayAspectRatio,
             FrameRateMode           = model.FrameRate_Mode,
             FrameRate               = model.FrameRate,
             ColorSpace              = model.ColorSpace,
             ChromaSubSampling       = model.ChromaSubSampling,
             BitDepth                = model.BitDepth,
             EncodedLibrarySetteings = model.Encoded_Library_Settings
         };
         yield return(track);
     }
 }
        private async Task ProcessObjectDetectionInsightsAsync(ImageAnalyzer analyzer, int frameNumber)
        {
            this.detectedObjectsInFrame.Add(frameNumber, analyzer);

            foreach (var detectedObject in analyzer.DetectedObjects)
            {
                if (this.detectedObjectsInVideo.ContainsKey(detectedObject.ObjectProperty))
                {
                    this.detectedObjectsInVideo[detectedObject.ObjectProperty]++;
                }
                else
                {
                    this.detectedObjectsInVideo[detectedObject.ObjectProperty] = 1;

                    ImageSource croppedContent = await Util.GetCroppedBitmapAsync(analyzer.GetImageStreamCallback,
                                                                                  new Microsoft.Azure.CognitiveServices.Vision.Face.Models.FaceRectangle
                    {
                        Left   = detectedObject.Rectangle.X,
                        Top    = detectedObject.Rectangle.Y,
                        Width  = detectedObject.Rectangle.W,
                        Height = detectedObject.Rectangle.H
                    });

                    BitmapImage frameBitmap = new BitmapImage();
                    await frameBitmap.SetSourceAsync((await analyzer.GetImageStreamCallback()).AsRandomAccessStream());

                    VideoTrack videoTrack = new VideoTrack
                    {
                        Tag         = detectedObject.ObjectProperty,
                        CroppedFace = croppedContent,
                        DisplayText = detectedObject.ObjectProperty,
                        Duration    = (int)this.videoPlayer.NaturalDuration.TimeSpan.TotalSeconds,
                    };

                    videoTrack.Tapped += this.TimelineTapped;
                    this.detectedObjectsListView.Children.Insert(0, videoTrack);

                    this.FilterDetectedObjectTimeline();
                }

                // Update the timeline for this tag
                VideoTrack track = (VideoTrack)this.detectedObjectsListView.Children.FirstOrDefault(f => (string)((FrameworkElement)f).Tag == detectedObject.ObjectProperty);
                if (track != null)
                {
                    track.SetVideoFrameState(frameNumber, new Emotion {
                        Neutral = 1
                    }, analyzer);

                    uint childIndex = (uint)this.detectedObjectsListView.Children.IndexOf(track);
                    if (childIndex > 5)
                    {
                        // Bring towards the top so it becomes visible
                        this.detectedObjectsListView.Children.Move(childIndex, 5);
                    }
                }
            }

            this.UpdateObjectDetectionFilters();
        }
Esempio n. 14
0
        public virtual void Send(VideoTrack videoTrack)
        {
            var description = Texture.Description;

            videoTrack.SendVideoFrame(Texture.NativePointer,
                                      0, description.Width, description.Height,
                                      VideoFrameFormat.GpuTextureD3D11);
        }
Esempio n. 15
0
    public void FromVegas(Vegas vegas)
    {
        int       trackCount  = 10;
        int       eventCount  = 10;
        Timecode  eventLength = Timecode.FromSeconds(10);
        MediaType mediaType   = MediaType.Audio;

        for (int i = 0; i < trackCount; i++)
        {
            // create a track
            Track track;
            if (mediaType == MediaType.Audio)
            {
                track = new AudioTrack(i, "Audio " + ((i / 2) + 1));
            }
            else
            {
                track = new VideoTrack(i, "Video " + ((i / 2) + 1));
            }

            // add the track
            vegas.Project.Tracks.Add(track);

            Timecode startTime = Timecode.FromSeconds(0);

            for (int j = 0; j < eventCount; j++)
            {
                // create an event
                TrackEvent trackEvent;
                if (mediaType == MediaType.Audio)
                {
                    trackEvent = new AudioEvent(startTime, eventLength, "Audio Event " + (j + 1));
                }
                else
                {
                    trackEvent = new VideoEvent(startTime, eventLength, "Video Event " + (j + 1));
                }

                // add the event to the track
                track.Events.Add(trackEvent);

                // increment the start time
                startTime += eventLength;
            }

            // toggle the media type
            if (mediaType == MediaType.Audio)
            {
                mediaType = MediaType.Video;
            }
            else
            {
                mediaType = MediaType.Audio;
            }
        }
    }
Esempio n. 16
0
    // --------------------------------------------------------------------
    // パーツを連結
    // --------------------------------------------------------------------
    private void ConcatParts(List <VideoPart> oVideoParts, Double oAudioLen, Double oInterval)
    {
        // トラック作成
        VideoTrack aVideoTrack = new VideoTrack(mVegas.Project, 0, TRACK_NAME);

        mVegas.Project.Tracks.Add(aVideoTrack);

        // パーツ使用数
        Int32        aUseNum = (Int32)Math.Ceiling(oAudioLen / oInterval);
        PartsUseMode aPartsUseMode;

        if (oVideoParts.Count >= aUseNum * 3)
        {
            aPartsUseMode = PartsUseMode.OnceGroup;
        }
        else if (oVideoParts.Count >= aUseNum)
        {
            aPartsUseMode = PartsUseMode.Once;
        }
        else
        {
            aPartsUseMode = PartsUseMode.Reuse;
        }

        // パーツ追加
        Random aRandom = new Random();

        for (Int32 i = 0; i < aUseNum; i++)
        {
            // パーツ番号
            Int32 aPartsIndex = aRandom.Next(0, oVideoParts.Count);

            // パーツ
            VideoEvent aVideoEvent = aVideoTrack.AddVideoEvent(new Timecode(i * oInterval), new Timecode(oInterval));
            Take       aTake       = aVideoEvent.AddTake(oVideoParts[aPartsIndex].VideoStream);
            aTake.Offset = oVideoParts[aPartsIndex].Offset;

            // パーツ廃棄
            switch (aPartsUseMode)
            {
            case PartsUseMode.Once:
                oVideoParts.RemoveAt(aPartsIndex);
                break;

            case PartsUseMode.OnceGroup:
                for (Int32 j = aPartsIndex + 1; j >= aPartsIndex - 1; j--)
                {
                    if (0 <= j && j < oVideoParts.Count)
                    {
                        oVideoParts.RemoveAt(j);
                    }
                }
                break;
            }
        }
    }
Esempio n. 17
0
 public void OnVideoTrackRemoved(Participant participant, VideoTrack videoTrack)
 {
     //LogHelper.Call(GetType(), videoTrack.TrackId);
     if (RemoteVideoTrack.TrackId != videoTrack.TrackId)
     {
         return;
     }
     _listener?.RemoveRemoteVideoTrack(RemoteVideoTrack);
     RemoteVideoTrack = null;
 }
Esempio n. 18
0
        private static void SetTrackMotionKeyFrame(long currentFrame, VideoTrack selectedTrack, int width, PointInSpace point,
                                                   int height)
        {
            var mkf = GetOrCreateTrackMotionKeyframe(currentFrame, selectedTrack);

            mkf.Width     = width * point.Zoom;
            mkf.Height    = height * point.Zoom;
            mkf.PositionX = point.X - (width / 2d);
            mkf.PositionY = point.Y - (height / 2d);
        }
Esempio n. 19
0
 void DropRenderers(VideoTrack track)
 {
     if (track?.Renderers?.Any() == true)
     {
         foreach (var r in track.Renderers.ToArray())
         {
             track.RemoveRenderer(r);
         }
     }
 }
Esempio n. 20
0
        public void OnVideoTrackRemoved(Participant participant, VideoTrack videoTrack)
        {
            if (RemoteVideoTrack.TrackId != videoTrack.TrackId)
            {
                return;
            }

            _listener?.RemoveRemoteVideoTrack(RemoteVideoTrack);
            RemoteVideoTrack = null;
        }
        private async Task ProcessVisualFeaturesInsightsAsync(ImageAnalyzer analyzer, int frameNumber)
        {
            var tags = analyzer.AnalysisResult.Tags;

            if (!ShowAgeAndGender)
            {
                tags = tags.Where(t => !Util.ContainsGenderRelatedKeyword(t.Name)).ToList();
            }

            foreach (var tag in tags)
            {
                if (this.tagsInVideo.ContainsKey(tag.Name))
                {
                    this.tagsInVideo[tag.Name]++;
                }
                else
                {
                    this.tagsInVideo[tag.Name] = 1;

                    BitmapImage frameBitmap = new BitmapImage();
                    await frameBitmap.SetSourceAsync((await analyzer.GetImageStreamCallback()).AsRandomAccessStream());

                    VideoTrack videoTrack = new VideoTrack
                    {
                        Tag         = tag.Name,
                        CroppedFace = frameBitmap,
                        DisplayText = tag.Name,
                        Duration    = (int)this.videoPlayer.NaturalDuration.TimeSpan.TotalSeconds,
                    };

                    videoTrack.Tapped += this.TimelineTapped;
                    this.tagsListView.Children.Insert(0, videoTrack);

                    this.FilterFeatureTimeline();
                }

                // Update the timeline for this tag
                VideoTrack track = (VideoTrack)this.tagsListView.Children.FirstOrDefault(f => (string)((FrameworkElement)f).Tag == tag.Name);
                if (track != null)
                {
                    track.SetVideoFrameState(frameNumber, new Emotion {
                        Neutral = 1
                    });

                    uint childIndex = (uint)this.tagsListView.Children.IndexOf(track);
                    if (childIndex > 5)
                    {
                        // Bring towards the top so it becomes visible
                        this.tagsListView.Children.Move(childIndex, 5);
                    }
                }
            }

            this.UpdateTagFilters();
        }
 public void RemoveRemoteVideoTrack(VideoTrack track)
 {
     try
     {
         MainUserViewProfile.Visibility = ViewStates.Visible;
     }
     catch (Exception e)
     {
         Console.WriteLine(e);
     }
 }
Esempio n. 23
0
 public void RemoveRemoteVideoTrack(VideoTrack track)
 {
     try
     {
         MainUserViewProfile.Visibility = ViewStates.Visible;
     }
     catch (Exception e)
     {
         Methods.DisplayReportResultTrack(e);
     }
 }
Esempio n. 24
0
        public void AddedVideoTrack(Participant participant, VideoTrack videoTrack)
        {
            Console.WriteLine("Video added for participant: {0}", participant.Identity);

            remoteVideoTrack = videoTrack;

            BeginInvokeOnMainThread(() => {
                remoteVideoTrack.Attach(this.remoteVideoContainer);
                remoteVideoTrack.Delegate = this;
            });
        }
Esempio n. 25
0
 public void OnVideoTrackAdded(Participant participant, VideoTrack videoTrack)
 {
     try
     {
         RemoteVideoTrack = videoTrack;
         _listener?.SetRemoteVideoTrack(RemoteVideoTrack);
     }
     catch (Exception e)
     {
         Console.WriteLine(e);
     }
 }
Esempio n. 26
0
        public LocalVideoLink(VideoRouter parent, VideoSource source, IPeerConnection target)
        {
            if (source is null)
            {
                throw new ArgumentNullException(nameof(source));
            }
            if (target is null)
            {
                throw new ArgumentNullException(nameof(target));
            }
            if (null == source.VideoTrackSource)
            {
                throw new InvalidProgramException("VideoTrackSource is NULL");
            }

            TargetPeerConnection = target;
            VideoSource          = source;

            _parent = parent
                      ?? throw new ArgumentNullException(nameof(parent));

            // Create track
            var trackId = Guid.NewGuid();

            _track = parent.PeerConnectionFactory.CreateVideoTrack(trackId.ToString(), source.VideoTrackSource);

            // Find the first available transceiver (or create it)
            GetOrCreateTransceiver(out var transceiver, out var isReusingTransceiver);
            Transceiver = transceiver;

            // Next, set/replace the track:
            Transceiver.ToBusyState(_track);

            // If we're re-using an existing transceivers.
            // Transceiver metadata will need to be sent for clients to update their UIs.
            // If we are creating new transceivers, no need to do this,
            // since PeerConnection will re-negotiate automatically
            if (isReusingTransceiver)
            {
                RaiseTransceiverMetadataUdatedEvent();
            }

            // If stream id has not been set, set it.
            // WebRTC does not allow us to change the stream id, but we don't care either,
            // we just want it to be unique.
            if (string.IsNullOrWhiteSpace(Transceiver.Sender.StreamId))
            {
                Transceiver.Sender.StreamId = Guid.NewGuid().ToString();
            }

            // Add track to peer
            _logger.Debug($"Local track created {_track}");
        }
Esempio n. 27
0
 public void OnVideoTrackAdded(Participant participant, VideoTrack videoTrack)
 {
     try
     {
         Console.WriteLine("ParticipantListener.OnVideoTrackAdded");
         this.mainActivity.primaryVideoView.SetMirror(true);
         videoTrack.AddRenderer(this.mainActivity.primaryVideoView);
     }
     catch (Exception e)
     {
         Console.WriteLine("ParticipantListener.OnVideoTrackAdded " + e.Message);
     }
 }
Esempio n. 28
0
        private VideoTrack CreateVideoTrack(IVideoCapturer videoCapturer)
        {
            _surfaceTextureHelper = SurfaceTextureHelper.Create("CaptureThread", _eglBase.EglBaseContext);

            _videoSource = _factory.CreateVideoSource(true);
            videoCapturer.Initialize(_surfaceTextureHelper, _context, _videoSource.CapturerObserver);
            videoCapturer.StartCapture(VideoWidth, VideoHeight, 30);

            _localVideoTrack = _factory.CreateVideoTrack("ARDAMSv0", _videoSource);
            _localVideoTrack.SetEnabled(true);
            _localVideoTrack.AddSink(_localVideoSink);
            return(_localVideoTrack);
        }
Esempio n. 29
0
        public OKEFile StartMuxing(string path, MediaFile mediaFile)
        {
            List <string> input             = new List <string>();
            string        videoFps          = "";
            string        videoName         = "";
            string        timeCodeFile      = null;
            List <string> audioLanguages    = new List <string>();
            List <string> audioNames        = new List <string>();
            List <string> subtitleLanguages = new List <string>();
            List <string> subtitleNames     = new List <string>();

            foreach (var track in mediaFile.Tracks)
            {
                if (track.Info.MuxOption != MuxOption.Default && track.Info.MuxOption != MuxOption.Mka)
                {
                    continue;
                }
                switch (track.TrackType)
                {
                case TrackType.Audio:
                    AudioTrack audioTrack = track as AudioTrack;
                    audioLanguages.Add(audioTrack.Info.Language);
                    audioNames.Add(audioTrack.Info.Name);
                    break;

                case TrackType.Video:
                    VideoTrack videoTrack = track as VideoTrack;
                    VideoInfo  videoInfo  = track.Info as VideoInfo;
                    videoFps     = $"{videoInfo.FpsNum}/{videoInfo.FpsDen}";
                    videoName    = videoInfo.Name;
                    timeCodeFile = videoInfo.TimeCodeFile;
                    break;

                case TrackType.Subtitle:
                    SubtitleTrack subtitleTrack = track as SubtitleTrack;
                    subtitleLanguages.Add(subtitleTrack.Info.Language);
                    subtitleNames.Add(subtitleTrack.Info.Name);
                    break;
                }

                input.Add(track.File.GetFullPath());
            }

            this.StartMerge(input, path, videoFps, videoName, timeCodeFile, audioLanguages, audioNames, subtitleLanguages, subtitleNames);

            OKEFile outFile = new OKEFile(path);

            outFile.AddCRC32();

            return(outFile.Exists() ? outFile : null);
        }
Esempio n. 30
0
        private static TrackMotionKeyframe SelectOrInsertKeyFrame(VideoTrack videoTrack, KeyFrameInfo kf)
        {
            if (kf.Time == 0)
            {
                var mkf = videoTrack.TrackMotion.MotionKeyframes[0];
                if (mkf.Position.FrameCount > 0)
                {
                    mkf.Position = new Timecode((double)kf.Time * 1000);
                }
                return(mkf);
            }

            return(videoTrack.TrackMotion.InsertMotionKeyframe(new Timecode((double)kf.Time * 1000)));
        }
Esempio n. 31
0
        public void CheckDisplaySize(VideoTrack track)
        {
            if (!IsAspectRatioChecked)
            {
                if (track.SarNum == 0 || track.SarDen == 0) return;

                Debug.WriteLine(String.Format("Video Size:{0}x{1}\r\nSAR:{2}/{3}", track.Width, track.Height, track.SarNum, track.SarDen));

                var sar = 1.0*track.SarNum/track.SarDen;
                if (sar > 1)
                {
                    DisplayWidth = sar*track.Width;
                    DisplayHeight = track.Height;
                }
                else
                {
                    DisplayWidth = track.Width;
                    DisplayHeight = track.Height/sar;
                }
            }
        }
Esempio n. 32
0
 //==========================================================================
 internal VideoStream(VideoTrack videoTrack)
   : base(videoTrack)
 { 
   // ...
 }
 private void VideoTrack_OpenFailed(VideoTrack sender, VideoTrackOpenFailedEventArgs args)
 {
     Log($"VideoTrack.OpenFailed: ExtendedError:{args.ExtendedError} DecoderStatus:{sender.SupportInfo.DecoderStatus} MediaSourceStatus:{sender.SupportInfo.MediaSourceStatus}");
 }