コード例 #1
1
ファイル: IfoParser.cs プロジェクト: ItsJustSean/subtitleedit
 public VtsVobs()
 {
     VideoStream = new VideoStream();
     AudioStreams = new List<AudioStream>();
     Subtitles = new List<string>();
     SubtitleIDs = new List<string>();
     SubtitleTypes = new List<string>();
 }
コード例 #2
0
        public HttpResponseMessage Get(string filename, string ext)
        {
            var video = new VideoStream(filename, ext);

            var response = Request.CreateResponse();
            response.Content = new PushStreamContent((Action<Stream, HttpContent, TransportContext>)video.WriteToStream, new MediaTypeHeaderValue("video/" + ext));

            return response;
        }
コード例 #3
0
ファイル: DisplayVideoCmd.cs プロジェクト: TNOCS/csTouch
 public DisplayVideoCmd(ref string[] theParams)
 {
     if (theParams.Length < 3) throw new ArgumentException("Number of parameters doesnt match command");
     SecondScreenId = Convert.ToInt32(theParams[1]);
     Model = new VideoStream()
     {
         VideoUrl = theParams[2],
         Description = (theParams.Length >= 4) ? theParams[3] : ""
     };
 }
コード例 #4
0
        // Create video source object
        public IVideoSource CreateVideoSource(object config)
        {
            StreamConfiguration cfg = (StreamConfiguration) config;

            if (cfg != null)
            {
                VideoStream source = new VideoStream();

                source.VideoSource	= cfg.source;

                return (IVideoSource) source;
            }
            return null;
        }
コード例 #5
0
 private void _sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
 {
     using (ColorImageFrame frame = e.OpenColorImageFrame())
     {
         if (frame != null)
         {
             byte[] pixelData = new byte[frame.PixelDataLength];
             frame.CopyPixelDataTo(pixelData);
             int stride = frame.Width * 4;
             BitmapSource source = BitmapSource.Create(frame.Width, frame.Height, 96.0, 96.0, PixelFormats.Bgr32, null, pixelData, stride);
             this.image1.Source = source;
             if (this._movieRecordingOn)
             {
                 Bitmap firstFrame = new Bitmap(source.PixelWidth, source.PixelHeight, System.Drawing.Imaging.PixelFormat.Format32bppRgb);
                 BitmapData bitmapdata = firstFrame.LockBits(new System.Drawing.Rectangle(System.Drawing.Point.Empty, firstFrame.Size), ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format32bppRgb);
                 source.CopyPixels(Int32Rect.Empty, bitmapdata.Scan0, bitmapdata.Height * bitmapdata.Stride, bitmapdata.Stride);
                 firstFrame.UnlockBits(bitmapdata);
                 if (this._colorFrameCount == 0)
                 {
                     this._lastFrameTimestamp = frame.Timestamp;
                     this._colorStream = this._colorRecord.AddVideoStream(false, 25.0, firstFrame);
                     this._colorFrameCount++;
                 }
                 else
                 {
                     this._colorStream.AddFrame(firstFrame);
                     this._colorFrameCount++;
                 }
                 firstFrame.Dispose();
             }
         }
     }
 }
コード例 #6
0
ファイル: Program.cs プロジェクト: venkatvishnu/vidplaycorder
        public static void StartRecording()
        {
            Console.WriteLine("Start Recording ...");

            var videoTransfert = new InterProcessCommunication.VideoTranfert();

            VideoStream aviStream  = null;
            AviManager  aviManager = null;

            bool   endOfReccord    = false;
            string lastFileReccord = "";

            do
            {
                // Récupère le frame suivant
                Console.Write("Reading frame");
                var frame = videoTransfert.ReadFrame();
                Console.WriteLine(" ...");

                // Vérfie que nous somme pas à la fin de l'enregistrement
                endOfReccord = frame.EndOfRecord;
                if (!endOfReccord)
                {
                    var bitmap = frame.Bitmap;
                    // Si le nom du fichier d'enregistrement est changé un nouveau fichier vidéo doit être créé
                    if (lastFileReccord.Equals(frame.FileName) == false)
                    {
                        // Ferme le fichier s'il y en a un d'ouvert
                        if (aviManager != null)
                        {
                            Console.WriteLine(@"Close video stream ""{0}"" ...", lastFileReccord);
                            aviManager.Close();
                        }

                        lastFileReccord = frame.FileName;

                        Console.Write(@"Creating video stream ""{0}"" ...", lastFileReccord);

                        // Si le fichier est déjà existant on le supprime
                        if (System.IO.File.Exists(lastFileReccord))
                        {
                            System.IO.File.Delete(lastFileReccord);
                        }

                        aviManager = new AviManager(lastFileReccord, false);
                        aviStream  = aviManager.AddVideoStream(false, frame.FrameRate, bitmap); //bitmap étant la première image, elle sert a sizer le format du vidéo de sorti
                        Console.WriteLine(" ...");
                    }
                    else
                    {
                        Console.Write("Add frame to stream");
                        aviStream.AddFrame(bitmap);
                        Console.WriteLine(" ...");
                    }
                    bitmap.Dispose();
                }
            } while (!endOfReccord);

            // Ferme le fichier s'il y en a un d'ouvert
            if (aviManager != null)
            {
                Console.WriteLine(@"Close video stream ""{0}"" ...", lastFileReccord);
                aviManager.Close();
            }


            Console.WriteLine("End Recording ...");
        }
コード例 #7
0
 public AutoEncodeWindow(VideoStream videoStream, List<AudioJob> audioStreams, MainForm mainForm, bool prerender, VideoInfo vInfo)
     : this()
 {
     this.vInfo = vInfo;
     mainForm.Log.Add(log);
     this.videoStream = videoStream;
     this.audioStreams = audioStreams;
     this.prerender = prerender;
     this.mainForm = mainForm;
     jobUtil = new JobUtil(mainForm);
     vUtil = new VideoUtil(mainForm);
     muxProvider = mainForm.MuxProvider;
     container.Items.AddRange(muxProvider.GetSupportedContainers().ToArray());
     splitting.MinimumFileSize = new FileSize(Unit.MB, 1);
 }
コード例 #8
0
ファイル: CheeseInfoService.cs プロジェクト: Loongtze/downkyi
        /// <summary>
        /// 获取视频流的信息,从VideoPage返回
        /// </summary>
        /// <param name="page"></param>
        public void GetVideoStream(VideoPage page)
        {
            PlayUrl playUrl = VideoStream.GetCheesePlayUrl(page.Avid, page.Bvid, page.Cid, page.EpisodeId);

            Utils.VideoPageInfo(playUrl, page);
        }
コード例 #9
0
 public CsVideoStreamViewModel(AvaliableVideoStreamsViewModel avaliableVideoStream, VideoStream videoStream) : 
     base(videoStream)
 {
     AvaliableVideoStreamsVM = avaliableVideoStream;
 }
コード例 #10
0
ファイル: VideoStream.cs プロジェクト: apakian/rtmp-cpp
 // Constructor
 public Grabber(VideoStream parent)
 {
     this.parent = parent;
 }
コード例 #11
0
		// TODO: will move to dispose
		/// <summary>
		/// Stops the conference.
		/// </summary>
		/// <returns>The conference.</returns>
		private void StopConference()
		{
			try
			{
#if __ANDROID__
				// Stop echo canceller.
				OpusEchoCanceller.Stop();
				OpusEchoCanceller = null;
#endif
				conference.OnLinkInit -= LogLinkInit;
				conference.OnLinkUp -= LogLinkUp;
				conference.OnLinkDown -= LogLinkDown;

				conference.OnLinkOfferAnswer -= OnLinkSendOfferAnswer;
				conference.OnLinkCandidate -= OnLinkSendCandidate;
				conference = null;

				videoStream.OnLinkInit -= AddRemoteVideoControl;
				videoStream.OnLinkDown -= RemoveRemoteVideoControl;
				videoStream = null;

				audioStream = null;
			}
			catch (Exception ex)
			{
				FM.Log.Debug(ex.ToString());
			}
		}
コード例 #12
0
		/// <summary>
		/// Inits the audio and video streams.
		/// </summary>
		/// <returns>The audio and video streams.</returns>
		private void InitAudioAndVideoStreams()
		{
			// Create a WebRTC audio stream description (requires a
			// reference to the local audio feed).
			audioStream = new AudioStream(LocalMedia.LocalMediaStream);

			// Create a WebRTC video stream description (requires a
			// reference to the local video feed). Whenever a P2P link
			// initializes using this description, position and display
			// the remote video control on-screen by passing it to the
			// layout manager created above. Whenever a P2P link goes
			// down, remove it.
			videoStream = new VideoStream(LocalMedia.LocalMediaStream);
			videoStream.OnLinkInit += AddRemoteVideoControl;
			videoStream.OnLinkDown += RemoveRemoteVideoControl;
		}
コード例 #13
0
ファイル: App.cs プロジェクト: QuickBlox/quickblox-dotnet-sdk
        //Video Chat is the main form
        public void StartConference(MainPage videoWindow, Action<Exception> callback)
        {
            if (!SignallingExists())
            {
                callback(new Exception("Signalling must exist before starting a conference."));
            }
            else if (!LocalMediaExists())
            {
                callback(new Exception("Local media must exist before starting a conference."));
            }
            else if (ConferenceExists())
            {
                //trying to start a conference again
                callback(signalling.LastConferenceException);
            }
            else
            {
                try
                {
                    var localMediaStream = localMedia.LocalStream;
                    
                    // This is our local video control, a WinForms Control or
                    // WPF FrameworkElement. It is constantly updated with
                    // our live video feed since we requested video above.
                    // Add it directly to the UI or use the IceLink layout
                    // manager, which we do below.
                    var localVideoControl = localMedia.LocalVideoControl;

                    // Create an IceLink layout manager, which makes the task
                    // of arranging video controls easy. Give it a reference
                    // to a WinForms control that can be filled with video feeds.
                    // For WPF users, the WebRTC extension includes
                    // WpfLayoutManager, which accepts a Canvas.
                    var layoutManager = localMedia.LayoutManager;

                    // Create a WebRTC audio stream description (requires a
                    // reference to the local audio feed).
                    var audioStream = new AudioStream(localMediaStream);

                    // Create a WebRTC video stream description (requires a
                    // reference to the local video feed). Whenever a P2P link
                    // initializes using this description, position and display
                    // the remote video control on-screen by passing it to the
                    // layout manager created above. Whenever a P2P link goes
                    // down, remove it.
                    var videoStream = new VideoStream(localMediaStream);
                    videoStream.OnLinkInit += (e) =>
                    {
                        var remoteVideoControl = (FrameworkElement)e.Link.GetRemoteVideoControl();
                        layoutManager.AddRemoteVideoControl(e.PeerId, remoteVideoControl);

                        // When double-clicked, mute/unmute the remote video.
                        videoWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () =>
                        {
                            // When double-tapped, mute/unmute the remote video.
                            remoteVideoControl.DoubleTapped += (sender, ce) =>
                            {
                                if (e.Link.RemoteVideoIsMuted())
                                {
                                    // Resume rendering incoming video.
                                    e.Link.UnmuteRemoteVideo();
                                }
                                else
                                {
                                    // Stop rendering incoming video.
                                    e.Link.MuteRemoteVideo();
                                }
                            };
                        });
                    };
                    videoStream.OnLinkDown += (e) =>
                    {
                        layoutManager.RemoveRemoteVideoControl(e.PeerId);
                    };

                    // Create a new IceLink conference.
                    conference = new FM.IceLink.Conference(IceLinkServerAddress, new Stream[] { audioStream, videoStream });

                    //Use our generated DTLS certificate.
                    conference.DtlsCertificate = Certificate;
                    
                    // Supply TURN relay credentials in case we are behind a
                    // highly restrictive firewall. These credentials will be
                    // verified by the TURN server.
                    conference.RelayUsername = "******";
                    conference.RelayPassword = "******";

                    // Add a few event handlers to the conference so we can see
                    // when a new P2P link is created or changes state.
                    conference.OnLinkInit += (e) =>
                    {
                        Log.Info("Link to peer initializing...");
                    };
                    conference.OnLinkUp += (e) =>
                    {
                        Log.Info("Link to peer is UP.");
                    };
                    conference.OnLinkDown += (e) =>
                    {
                        Log.InfoFormat("Link to peer is DOWN. {0}", e.Exception.Message);
                    };
                    callback(null);
                }
                catch (Exception ex)
                {
                    callback(ex);
                }
            }
        }
コード例 #14
0
 /// <summary>
 /// Initializes a new instance of the <see cref="VtsVobs"/> class.
 /// </summary>
 public VtsVobs()
 {
     this.VideoStream = new VideoStream();
     this.AudioStreams = new List<AudioStream>();
     this.Subtitles = new List<string>();
     this.SubtitleIDs = new List<string>();
     this.SubtitleTypes = new List<string>();
 }
コード例 #15
0
        public void Run(MainForm info)
        {
            // normal video verification
            string error = null;
            if ((error = info.Video.verifyVideoSettings()) != null)
            {
                MessageBox.Show(error, "Unsupported video configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return;
            }
            if ((error = info.Audio.verifyAudioSettings()) != null && !error.Equals("No audio input defined."))            {
                MessageBox.Show(error, "Unsupported audio configuration", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return;
            }

            if (info.Video.CurrentSettings.EncodingMode == 2 || info.Video.CurrentSettings.EncodingMode == 5)
            {
                MessageBox.Show("First pass encoding is not supported for automated encoding as no output is generated.\nPlease choose another encoding mode", "Improper configuration",
                    MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return;
            }

            VideoCodecSettings vSettings = info.Video.CurrentSettings.Clone();
            Zone[] zones = info.Video.Info.Zones; // We can't simply modify the zones in place because that would reveal the final zones config to the user, including the credits/start zones
            bool cont = info.JobUtil.getFinalZoneConfiguration(vSettings, info.Video.Info.IntroEndFrame, info.Video.Info.CreditsStartFrame, ref zones);
            if (cont)
            {
                ulong length = 0;
                double framerate = 0.0;
                VideoStream myVideo = new VideoStream();
                JobUtil.getInputProperties(out length, out framerate, info.Video.VideoInput);
                myVideo.Input = info.Video.Info.VideoInput;
                myVideo.Output = info.Video.Info.VideoOutput;
                myVideo.NumberOfFrames = length;
                myVideo.Framerate = (decimal)framerate;
                myVideo.DAR = info.Video.Info.DAR;
                myVideo.VideoType = info.Video.CurrentMuxableVideoType;
                myVideo.Settings = vSettings;

                VideoInfo vInfo = info.Video.Info.Clone(); // so we don't modify the data on the main form
                vInfo.Zones = zones;

                using (AutoEncodeWindow aew = new AutoEncodeWindow(myVideo, info.Audio.AudioStreams, info, info.Video.PrerenderJob, vInfo))
                {
                    if (aew.init())
                    {
                        info.ClosePlayer();
                        aew.ShowDialog();
                    }
                    else
                        MessageBox.Show("The currently selected combination of video and audio output cannot be muxed", "Unsupported configuration", MessageBoxButtons.OK, MessageBoxIcon.Warning);
                }
            }
        }
    public Job CreateJobWithSetNumberImagesSpritesheet(
        string projectId, string location, string inputUri, string outputUri)
    {
        // Create the client.
        TranscoderServiceClient client = TranscoderServiceClient.Create();

        // Build the parent location name.
        LocationName parent = new LocationName(projectId, location);

        // Build the job config.
        VideoStream videoStream0 = new VideoStream
        {
            H264 = new VideoStream.Types.H264CodecSettings
            {
                BitrateBps = 550000,
                FrameRate = 60,
                HeightPixels = 360,
                WidthPixels = 640
            }
        };

        AudioStream audioStream0 = new AudioStream
        {
            Codec = "aac",
            BitrateBps = 64000
        };

        // Generates a 10x10 spritesheet of small images from the input video.
        // To preserve the source aspect ratio, you should set the
        // SpriteWidthPixels field or the SpriteHeightPixels field, but not
        // both (the API will automatically calculate the missing field). For
        // this sample, we don't care about the aspect ratio so we set both
        // fields.
        SpriteSheet smallSpriteSheet = new SpriteSheet
        {
            FilePrefix = SmallSpritesheetFilePrefix,
            SpriteHeightPixels = 32,
            SpriteWidthPixels = 64,
            ColumnCount = 10,
            RowCount = 10,
            TotalCount = 100
        };

        // Generates a 10x10 spritesheet of larger images from the input video.
        // input video. To preserve the source aspect ratio, you should set the
        // SpriteWidthPixels field or the SpriteHeightPixels field, but not
        // both (the API will automatically calculate the missing field). For
        // this sample, we don't care about the aspect ratio so we set both
        // fields.
        SpriteSheet largeSpriteSheet = new SpriteSheet
        {
            FilePrefix = LargeSpritesheetFilePrefix,
            SpriteHeightPixels = 72,
            SpriteWidthPixels = 128,
            ColumnCount = 10,
            RowCount = 10,
            TotalCount = 100
        };

        ElementaryStream elementaryStream0 = new ElementaryStream
        {
            Key = "video_stream0",
            VideoStream = videoStream0
        };

        ElementaryStream elementaryStream1 = new ElementaryStream
        {
            Key = "audio_stream0",
            AudioStream = audioStream0
        };

        MuxStream muxStream0 = new MuxStream
        {
            Key = "sd",
            Container = "mp4",
            ElementaryStreams = { "video_stream0", "audio_stream0" }
        };

        Input input = new Input
        {
            Key = "input0",
            Uri = inputUri
        };

        Output output = new Output
        {
            Uri = outputUri
        };

        JobConfig jobConfig = new JobConfig
        {
            Inputs = { input },
            Output = output,
            ElementaryStreams = { elementaryStream0, elementaryStream1 },
            MuxStreams = { muxStream0 },
            SpriteSheets = { smallSpriteSheet, largeSpriteSheet }
        };

        // Build the job.
        Job newJob = new Job();
        newJob.InputUri = inputUri;
        newJob.OutputUri = outputUri;
        newJob.Config = jobConfig;

        // Call the API.
        Job job = client.CreateJob(parent, newJob);

        // Return the result.
        return job;
    }
コード例 #17
0
 public void Start()
 {
     vs      = new VideoStream(this);
     thVideo = new Thread(vs.Execute);
     thVideo.Start();
 }
コード例 #18
0
ファイル: App.cs プロジェクト: QuickBlox/quickblox-dotnet-sdk
        //Video Chat is the main form
        public void StartConference(Action<string> callback)
        {
            // Create a WebRTC audio stream description (requires a
            // reference to the local audio feed).
            AudioStream = new AudioStream(LocalMedia.LocalMediaStream);

            // Create a WebRTC video stream description (requires a
            // reference to the local video feed). Whenever a P2P link
            // initializes using this description, position and display
            // the remote video control on-screen by passing it to the
            // layout manager created above. Whenever a P2P link goes
            // down, remove it.
            VideoStream = new VideoStream(LocalMedia.LocalMediaStream);
            VideoStream.OnLinkInit += AddRemoteVideoControl;
            VideoStream.OnLinkDown += RemoveRemoteVideoControl;

            // Create a new IceLink conference.
            Conference = new FM.IceLink.Conference(IceLinkServerAddress, new Stream[] { AudioStream, VideoStream });

            // Supply TURN relay credentials in case we are behind a
            // highly restrictive firewall. These credentials will be
            // verified by the TURN server.
            Conference.RelayUsername = "******";
            Conference.RelayPassword = "******";

            // Add a few event handlers to the conference so we can see
            // when a new P2P link is created or changes state.
            Conference.OnLinkInit += LogLinkInit;
            Conference.OnLinkUp += LogLinkUp;
            Conference.OnLinkDown += LogLinkDown;

            // Attach signalling to the conference.
            Signalling.Attach(Conference, SessionId, callback);
        }
コード例 #19
0
ファイル: Conv.cs プロジェクト: stackprobe/Kirara2
        private void convTh_main()
        {
            using (WorkingDir wd = new WorkingDir())
            {
                string rExt = Path.GetExtension(_rFile);

                if (Gnd.i.audioVideoExtensions.contains(rExt) == false)
                {
                    throw new Exception("再生可能なファイルではありません。(不明な拡張子)");
                }

                string midFile = wd.makePath() + rExt;

                try
                {
                    using (critSect.parallel())
                    {
                        File.Copy(_rFile, midFile);
                    }
                    if (File.Exists(midFile) == false)
                    {
                        throw null;
                    }
                }
                catch
                {
                    throw new Exception("ファイルにアクセス出来ません。");
                }
                string redirFile = wd.makePath();

                ProcessTools.runOnBatch("ffprobe.exe " + _rFile + " 2> " + redirFile, FFmpeg.getBDir(), critSect);

                foreach (string line in FileTools.readAllLines(redirFile, Encoding.ASCII))
                {
                    if (line.Contains("Duration:"))
                    {
                        _duration = new Duration();

                        List <string> tokens = StringTools.tokenize(line, " :.,", false, true);

                        if (tokens[1] == "N/A")
                        {
                            throw new Exception("再生可能なファイルではありません。(Duration)");
                        }

                        int h = int.Parse(tokens[1]);
                        int m = int.Parse(tokens[2]);
                        int s = int.Parse(tokens[3]);

                        int sec = h * 3600 + m * 60 + s;

                        if (sec < 1)
                        {
                            throw new Exception("映像又は曲の長さが短すぎます。");
                        }

                        if (IntTools.IMAX < sec)
                        {
                            throw new Exception("映像又は曲の長さが長すぎます。");
                        }

                        _duration.secLength = sec;
                    }
                    else if (_audioStream == null && line.Contains("Stream") && line.Contains("Audio:"))
                    {
                        _audioStream = new AudioStream();

                        List <string> tokens = StringTools.tokenize(line, StringTools.DIGIT, true, true);

                        _audioStream.mapIndex = int.Parse(tokens[1]);
                    }
                    else if (_videoStream == null && line.Contains("Stream") && line.Contains("Video:"))
                    {
                        _videoStream = new VideoStream();

                        {
                            List <string> tokens = StringTools.tokenize(line, StringTools.DIGIT, true, true);

                            _videoStream.mapIndex = int.Parse(tokens[1]);
                        }

                        {
                            List <string> tokens = StringTools.tokenize(line, " ,");

                            foreach (string fToken in tokens)
                            {
                                string token = fToken;

                                if (StringTools.toFormat(token, true) == "9x9")
                                {
                                    List <string> whTokens = StringTools.tokenize(token, "x");

                                    _videoStream.w = int.Parse(whTokens[0]);
                                    _videoStream.h = int.Parse(whTokens[1]);
                                }
                            }
                        }

                        if (_videoStream.w < Consts.VIDEO_W_MIN)
                        {
                            throw new Exception("映像の幅が小さすぎます。");
                        }

                        if (_videoStream.h < Consts.VIDEO_H_MIN)
                        {
                            throw new Exception("映像の高さが小さすぎます。");
                        }

                        if (IntTools.IMAX < _videoStream.w)
                        {
                            throw new Exception("映像の幅が大きすぎます。");
                        }

                        if (IntTools.IMAX < _videoStream.h)
                        {
                            throw new Exception("映像の高さが大きすぎます。");
                        }
                    }
                }
                if (_duration == null)
                {
                    throw null;
                }

                if (_audioStream == null)
                {
                    throw new Exception("再生可能なファイルではありません。(音声ストリームがありません)");
                }

                if (_videoStream == null)
                {
                    _type = Consts.MediaType_e.AUDIO;
                }
                else
                {
                    _type = Consts.MediaType_e.MOVIE;
                }

                string wFile = Utils.getOgxFile(_wIndex, _type);

                if (Gnd.i.convWavMastering)
                {
                    string wavFile = wd.makePath() + ".wav";

                    ProcessTools.runOnBatch(
                        "ffmpeg.exe -i " + _rFile + " -map 0:" + _audioStream.mapIndex + " -ac 2 " + wavFile,
                        FFmpeg.getBDir(),
                        critSect
                        );

                    if (File.Exists(wavFile) == false)
                    {
                        throw new Exception("音声ストリームの抽出に失敗しました。");
                    }

                    string wmDir      = wd.makePath();
                    string wmFile     = Path.Combine(wmDir, "Master.exe");
                    string wavFileNew = wd.makePath() + ".wav";

                    Directory.CreateDirectory(wmDir);
                    File.Copy(wavMasteringFile, wmFile);

                    ProcessTools.runOnBatch(
                        "Master.exe " + wavFile + " " + wavFileNew + " 0001.txt",
                        wmDir,
                        critSect
                        );

                    if (File.Exists(wavFileNew) == false)
                    {
                        throw new Exception("wavFileNew does not exist");
                    }

                    if (_type == Consts.MediaType_e.AUDIO)
                    {
                        ProcessTools.runOnBatch(
                            "ffmpeg.exe -i " + wavFileNew + " -map 0:" + _audioStream.mapIndex + " " + Gnd.i.ffmpegOptAudio + " " + wFile,
                            FFmpeg.getBDir(),
                            critSect
                            );
                    }
                    else
                    {
                        ProcessTools.runOnBatch(
                            "ffmpeg.exe -i " + _rFile + " -i " + wavFileNew + " -map 0:" + _videoStream.mapIndex + " -map 1:" + _audioStream.mapIndex + " " + Gnd.i.ffmpegOptVideo + " " + Gnd.i.ffmpegOptAudio + " " + wFile,
                            FFmpeg.getBDir(),
                            critSect
                            );
                    }
                }
                else
                {
                    if (_type == Consts.MediaType_e.AUDIO)
                    {
                        ProcessTools.runOnBatch(
                            "ffmpeg.exe -i " + _rFile + " -map 0:" + _audioStream.mapIndex + " " + Gnd.i.ffmpegOptAudio + " " + wFile,
                            FFmpeg.getBDir(),
                            critSect
                            );
                    }
                    else
                    {
                        ProcessTools.runOnBatch(
                            "ffmpeg.exe -i " + _rFile + " -map 0:" + _videoStream.mapIndex + " -map 0:" + _audioStream.mapIndex + " " + Gnd.i.ffmpegOptVideo + " " + Gnd.i.ffmpegOptAudio + " " + wFile,
                            FFmpeg.getBDir(),
                            critSect
                            );
                    }
                }
                if (File.Exists(wFile) == false)
                {
                    throw new Exception("wFile does not exist");
                }

                _wFile = wFile;
            }
        }
コード例 #20
0
        static void Main(string[] args)
        {
            Console.WriteLine("MobiConverter by Gericom");
            Console.WriteLine();
            if (args.Length == 0)
            {
                PrintUsage();
                return;
            }
            switch (args[0])
            {
            case "-d":
            {
                if (args.Length != 2 && args.Length != 3)
                {
                    goto default;
                }
                if (!File.Exists(args[1]))
                {
                    Console.WriteLine("Error! File not found: " + args[1]);
                    return;
                }
                String outfile = (args.Length == 3) ? args[2] : Path.ChangeExtension(args[1], "avi");
                byte[] sig     = new byte[4];
                Stream s       = File.OpenRead(args[1]);
                s.Read(sig, 0, 4);
                s.Close();
                if (sig[0] == 0x4C && sig[1] == 0x32 && sig[2] == 0xAA && sig[3] == 0xAB)        //moflex
                {
                    Console.WriteLine("Moflex container detected!");
                    Console.Write("Converting: ");
                    Console.CursorVisible = false;
                    MobiclipDecoder    ddd   = null;
                    AviManager         m     = new AviManager(outfile, false);
                    MemoryStream       audio = null;
                    FastAudioDecoder[] mFastAudioDecoders = null;
                    int         audiorate          = -1;
                    int         audiochannels      = 0;
                    VideoStream vs                 = null;
                    FileStream  stream             = File.OpenRead(args[1]);
                    var         d                  = new MoLiveDemux(stream);
                    int         PlayingVideoStream = -1;
                    d.OnCompleteFrameReceived += delegate(MoLiveChunk Chunk, byte[] Data)
                    {
                        if ((Chunk is MoLiveStreamVideo || Chunk is MoLiveStreamVideoWithLayout) && ((PlayingVideoStream == -1) || ((MoLiveStream)Chunk).StreamIndex == PlayingVideoStream))
                        {
                            if (ddd == null)
                            {
                                ddd = new MobiclipDecoder(((MoLiveStreamVideo)Chunk).Width, ((MoLiveStreamVideo)Chunk).Height, MobiclipDecoder.MobiclipVersion.Moflex3DS);
                                PlayingVideoStream = ((MoLiveStream)Chunk).StreamIndex;
                            }
                            ddd.Data   = Data;
                            ddd.Offset = 0;
                            Bitmap b = ddd.DecodeFrame();
                            if (vs == null)
                            {
                                vs = m.AddVideoStream(false, Math.Round(((double)((MoLiveStreamVideo)Chunk).FpsRate) / ((double)((MoLiveStreamVideo)Chunk).FpsScale), 3), b);
                            }
                            else
                            {
                                vs.AddFrame(b);
                            }
                        }
                        else if (Chunk is MoLiveStreamAudio)
                        {
                            if (audio == null)
                            {
                                audio         = new MemoryStream();
                                audiochannels = (int)((MoLiveStreamAudio)Chunk).Channel;
                                audiorate     = (int)((MoLiveStreamAudio)Chunk).Frequency;
                            }
                            switch ((int)((MoLiveStreamAudio)Chunk).CodecId)
                            {
                            case 0:            //fastaudio
                            {
                                if (mFastAudioDecoders == null)
                                {
                                    mFastAudioDecoders = new FastAudioDecoder[(int)((MoLiveStreamAudio)Chunk).Channel];
                                    for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++)
                                    {
                                        mFastAudioDecoders[i] = new FastAudioDecoder();
                                    }
                                }
                                List <short>[] channels = new List <short> [(int)((MoLiveStreamAudio)Chunk).Channel];
                                for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++)
                                {
                                    channels[i] = new List <short>();
                                }

                                int offset = 0;
                                int size   = 40;
                                while (offset + size < Data.Length)
                                {
                                    for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++)
                                    {
                                        mFastAudioDecoders[i].Data   = Data;
                                        mFastAudioDecoders[i].Offset = offset;
                                        channels[i].AddRange(mFastAudioDecoders[i].Decode());
                                        offset = mFastAudioDecoders[i].Offset;
                                    }
                                }
                                short[][] channelsresult = new short[(int)((MoLiveStreamAudio)Chunk).Channel][];
                                for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++)
                                {
                                    channelsresult[i] = channels[i].ToArray();
                                }
                                byte[] result = InterleaveChannels(channelsresult);
                                audio.Write(result, 0, result.Length);
                            }
                            break;

                            case 1:            //IMA-ADPCM
                            {
                                IMAADPCMDecoder[] decoders = new IMAADPCMDecoder[(int)((MoLiveStreamAudio)Chunk).Channel];
                                List <short>[]    channels = new List <short> [(int)((MoLiveStreamAudio)Chunk).Channel];
                                for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++)
                                {
                                    decoders[i] = new IMAADPCMDecoder();
                                    decoders[i].GetWaveData(Data, 4 * i, 4);
                                    channels[i] = new List <short>();
                                }

                                int offset = 4 * (int)((MoLiveStreamAudio)Chunk).Channel;
                                int size   = 128 * (int)((MoLiveStreamAudio)Chunk).Channel;
                                while (offset + size < Data.Length)
                                {
                                    for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++)
                                    {
                                        channels[i].AddRange(decoders[i].GetWaveData(Data, offset, 128));
                                        offset += 128;
                                    }
                                }
                                short[][] channelsresult = new short[(int)((MoLiveStreamAudio)Chunk).Channel][];
                                for (int i = 0; i < (int)((MoLiveStreamAudio)Chunk).Channel; i++)
                                {
                                    channelsresult[i] = channels[i].ToArray();
                                }
                                byte[] result = InterleaveChannels(channelsresult);
                                audio.Write(result, 0, result.Length);
                            }
                            break;

                            case 2:            //PCM16
                            {
                                audio.Write(Data, 0, Data.Length - (Data.Length % ((int)((MoLiveStreamAudio)Chunk).Channel * 2)));
                            }
                            break;
                            }
                        }
                    };
                    bool left    = false;
                    int  counter = 0;
                    while (true)
                    {
                        uint error = d.ReadPacket();
                        if (error == 73)
                        {
                            break;
                        }
                        //report progress
                        if (counter == 0)
                        {
                            Console.Write("{0,3:D}%", stream.Position * 100 / stream.Length);
                            Console.CursorLeft -= 4;
                        }
                        counter++;
                        if (counter == 50)
                        {
                            counter = 0;
                        }
                    }
                    if (audio != null)
                    {
                        byte[] adata = audio.ToArray();
                        audio.Close();
                        var sinfo = new Avi.AVISTREAMINFO();
                        sinfo.fccType      = Avi.streamtypeAUDIO;
                        sinfo.dwScale      = audiochannels * 2;
                        sinfo.dwRate       = audiorate * audiochannels * 2;
                        sinfo.dwSampleSize = audiochannels * 2;
                        sinfo.dwQuality    = -1;
                        var sinfo2 = new Avi.PCMWAVEFORMAT();
                        sinfo2.wFormatTag      = 1;
                        sinfo2.nChannels       = (short)audiochannels;
                        sinfo2.nSamplesPerSec  = audiorate;
                        sinfo2.nAvgBytesPerSec = audiorate * audiochannels * 2;
                        sinfo2.nBlockAlign     = (short)(audiochannels * 2);
                        sinfo2.wBitsPerSample  = 16;
                        unsafe
                        {
                            fixed(byte *pAData = &adata[0])
                            {
                                m.AddAudioStream((IntPtr)pAData, sinfo, sinfo2, adata.Length);
                            }
                        }
                    }
                    m.Close();
                    stream.Close();
                    Console.WriteLine("Done!");
                    Console.CursorVisible = true;
                }
                else if (sig[0] == 0x4D && sig[1] == 0x4F && sig[2] == 0x44 && sig[3] == 0x53)
                {
                    //mods
                    Console.WriteLine("Mods container detected!");
                    Console.Write("Converting: ");
                    Console.CursorVisible = false;
                    AviManager   m      = new AviManager(outfile, false);
                    FileStream   stream = File.OpenRead(args[1]);
                    ModsDemuxer  dm     = new ModsDemuxer(stream);
                    MemoryStream audio  = null;
                    if ((dm.Header.AudioCodec == 1 || dm.Header.AudioCodec == 3) && dm.Header.NbChannel > 0 && dm.Header.Frequency > 0)
                    {
                        audio = new MemoryStream();
                    }
                    MobiclipDecoder    d          = new MobiclipDecoder(dm.Header.Width, dm.Header.Height, MobiclipDecoder.MobiclipVersion.ModsDS);
                    VideoStream        vs         = null;
                    int                CurChannel = 0;
                    List <short>[]     channels   = new List <short> [dm.Header.NbChannel];
                    IMAADPCMDecoder[]  decoders   = new IMAADPCMDecoder[dm.Header.NbChannel];
                    SxDecoder[]        sxd        = new SxDecoder[dm.Header.NbChannel];
                    FastAudioDecoder[] fad        = new FastAudioDecoder[dm.Header.NbChannel];
                    bool[]             isinit     = new bool[dm.Header.NbChannel];
                    for (int i = 0; i < dm.Header.NbChannel; i++)
                    {
                        channels[i] = new List <short>();
                        decoders[i] = new IMAADPCMDecoder();
                        sxd[i]      = new SxDecoder();
                        fad[i]      = new FastAudioDecoder();
                        isinit[i]   = false;
                    }
                    int counter = 0;
                    while (true)
                    {
                        uint   NrAudioPackets;
                        bool   IsKeyFrame;
                        byte[] framedata = dm.ReadFrame(out NrAudioPackets, out IsKeyFrame);
                        if (framedata == null)
                        {
                            break;
                        }
                        d.Data   = framedata;
                        d.Offset = 0;
                        Bitmap b = d.DecodeFrame();
                        if (vs == null)
                        {
                            vs = m.AddVideoStream(false, Math.Round(dm.Header.Fps / (double)0x01000000, 3), b);
                        }
                        else
                        {
                            vs.AddFrame(b);
                        }
                        if (NrAudioPackets > 0 && audio != null)
                        {
                            int Offset = d.Offset - 2;
                            if (dm.Header.TagId == 0x334E && (IOUtil.ReadU16LE(framedata, 0) & 0x8000) != 0)
                            {
                                Offset += 4;
                            }
                            if (dm.Header.AudioCodec == 3)
                            {
                                if (IsKeyFrame)
                                {
                                    for (int i = 0; i < dm.Header.NbChannel; i++)
                                    {
                                        channels[i] = new List <short>();
                                        decoders[i] = new IMAADPCMDecoder();
                                        sxd[i]      = new SxDecoder();
                                        fad[i]      = new FastAudioDecoder();
                                        isinit[i]   = false;
                                    }
                                }
                                for (int i = 0; i < NrAudioPackets; i++)
                                {
                                    channels[CurChannel].AddRange(decoders[CurChannel].GetWaveData(framedata, Offset, 128 + (!isinit[CurChannel] ? 4 : 0)));
                                    Offset            += 128 + (!isinit[CurChannel] ? 4 : 0);
                                    isinit[CurChannel] = true;
                                    CurChannel++;
                                    if (CurChannel >= dm.Header.NbChannel)
                                    {
                                        CurChannel = 0;
                                    }
                                }
                            }
                            else if (dm.Header.AudioCodec == 1)
                            {
                                for (int i = 0; i < NrAudioPackets; i++)
                                {
                                    if (!isinit[CurChannel])
                                    {
                                        sxd[CurChannel].Codebook = dm.AudioCodebooks[CurChannel];
                                    }
                                    isinit[CurChannel]     = true;
                                    sxd[CurChannel].Data   = framedata;
                                    sxd[CurChannel].Offset = Offset;
                                    channels[CurChannel].AddRange(sxd[CurChannel].Decode());
                                    Offset = sxd[CurChannel].Offset;
                                    CurChannel++;
                                    if (CurChannel >= dm.Header.NbChannel)
                                    {
                                        CurChannel = 0;
                                    }
                                }
                            }
                            else if (dm.Header.AudioCodec == 2)
                            {
                                for (int i = 0; i < NrAudioPackets; i++)
                                {
                                    fad[CurChannel].Data   = framedata;
                                    fad[CurChannel].Offset = Offset;
                                    channels[CurChannel].AddRange(fad[CurChannel].Decode());
                                    Offset = fad[CurChannel].Offset;
                                    CurChannel++;
                                    if (CurChannel >= dm.Header.NbChannel)
                                    {
                                        CurChannel = 0;
                                    }
                                }
                            }
                            int smallest = int.MaxValue;
                            for (int i = 0; i < dm.Header.NbChannel; i++)
                            {
                                if (channels[i].Count < smallest)
                                {
                                    smallest = channels[i].Count;
                                }
                            }
                            if (smallest > 0)
                            {
                                //Gather samples
                                short[][] samps = new short[dm.Header.NbChannel][];
                                for (int i = 0; i < dm.Header.NbChannel; i++)
                                {
                                    samps[i] = new short[smallest];
                                    channels[i].CopyTo(0, samps[i], 0, smallest);
                                    channels[i].RemoveRange(0, smallest);
                                }
                                byte[] result = InterleaveChannels(samps);
                                audio.Write(result, 0, result.Length);
                            }
                        }
                        //report progress
                        if (counter == 0)
                        {
                            Console.Write("{0,3:D}%", stream.Position * 100 / stream.Length);
                            Console.CursorLeft -= 4;
                        }
                        counter++;
                        if (counter == 50)
                        {
                            counter = 0;
                        }
                    }
                    if (audio != null)
                    {
                        byte[] adata = audio.ToArray();
                        audio.Close();
                        var sinfo = new Avi.AVISTREAMINFO();
                        sinfo.fccType      = Avi.streamtypeAUDIO;
                        sinfo.dwScale      = dm.Header.NbChannel * 2;
                        sinfo.dwRate       = (int)dm.Header.Frequency * dm.Header.NbChannel * 2;
                        sinfo.dwSampleSize = dm.Header.NbChannel * 2;
                        sinfo.dwQuality    = -1;
                        var sinfo2 = new Avi.PCMWAVEFORMAT();
                        sinfo2.wFormatTag      = 1;
                        sinfo2.nChannels       = (short)dm.Header.NbChannel;
                        sinfo2.nSamplesPerSec  = (int)dm.Header.Frequency;
                        sinfo2.nAvgBytesPerSec = (int)dm.Header.Frequency * dm.Header.NbChannel * 2;
                        sinfo2.nBlockAlign     = (short)(dm.Header.NbChannel * 2);
                        sinfo2.wBitsPerSample  = 16;
                        unsafe
                        {
                            fixed(byte *pAData = &adata[0])
                            {
                                m.AddAudioStream((IntPtr)pAData, sinfo, sinfo2, adata.Length);
                            }
                        }
                    }
                    m.Close();
                    stream.Close();
                    Console.WriteLine("Done!");
                    Console.CursorVisible = true;
                    return;
                }
                else if (sig[0] == 0x4D && sig[1] == 0x4F && sig[2] == 0x43 && sig[3] == 0x35)
                {
                    //moc5
                    Console.WriteLine("MOC5 container detected!");
                    Console.WriteLine("Error! Not supported yet!");
                    return;
                }
                else if (Path.GetExtension(args[1]).ToLower() == ".vx2")
                {
                    //mods
                    Console.WriteLine("VX2 container detected!");
                    Console.Write("Converting: ");
                    Console.CursorVisible = false;
                    AviManager      m         = new AviManager(outfile, false);
                    FileStream      fs        = File.OpenRead(args[1]);
                    MemoryStream    audio     = new MemoryStream();
                    MobiclipDecoder d         = new MobiclipDecoder(256, 192, MobiclipDecoder.MobiclipVersion.Moflex3DS);
                    VideoStream     vs        = null;
                    int             framerate = 20;
                    int             counter   = 0;
                    int             frame     = 0;
                    while (true)
                    {
                        if (fs.Position >= fs.Length)
                        {
                            break;
                        }
                        if ((frame % framerate) == 0)        //Audio
                        {
                            byte[] adata = new byte[32768 * 2];
                            fs.Read(adata, 0, 32768 * 2);
                            audio.Write(adata, 0, adata.Length);
                        }
                        int    length = (fs.ReadByte() << 0) | (fs.ReadByte() << 8) | (fs.ReadByte() << 16) | (fs.ReadByte() << 24);
                        byte[] data   = new byte[length];
                        fs.Read(data, 0, length);
                        d.Data   = data;
                        d.Offset = 0;
                        Bitmap b = d.DecodeFrame();
                        if (vs == null)
                        {
                            vs = m.AddVideoStream(false, framerate, b);
                        }
                        else
                        {
                            vs.AddFrame(b);
                        }
                        frame++;
                        //report progress
                        if (counter == 0)
                        {
                            Console.Write("{0,3:D}%", fs.Position * 100 / fs.Length);
                            Console.CursorLeft -= 4;
                        }
                        counter++;
                        if (counter == 50)
                        {
                            counter = 0;
                        }
                    }
                    if (audio != null)
                    {
                        byte[] adata = audio.ToArray();
                        audio.Close();
                        var sinfo = new Avi.AVISTREAMINFO();
                        sinfo.fccType      = Avi.streamtypeAUDIO;
                        sinfo.dwScale      = 1 * 2;
                        sinfo.dwRate       = (int)32768 * 1 * 2;
                        sinfo.dwSampleSize = 1 * 2;
                        sinfo.dwQuality    = -1;
                        var sinfo2 = new Avi.PCMWAVEFORMAT();
                        sinfo2.wFormatTag      = 1;
                        sinfo2.nChannels       = (short)1;
                        sinfo2.nSamplesPerSec  = (int)32768;
                        sinfo2.nAvgBytesPerSec = (int)32768 * 1 * 2;
                        sinfo2.nBlockAlign     = (short)(1 * 2);
                        sinfo2.wBitsPerSample  = 16;
                        unsafe
                        {
                            fixed(byte *pAData = &adata[0])
                            {
                                m.AddAudioStream((IntPtr)pAData, sinfo, sinfo2, adata.Length);
                            }
                        }
                    }
                    m.Close();
                    fs.Close();
                    Console.WriteLine("Done!");
                    Console.CursorVisible = true;
                    return;
                }
                else
                {
                    Console.WriteLine("Error! Unrecognized format!");
                    return;
                }
                break;
            }

            case "-e":
            {
                break;
            }

            default:
            case "-h":
                PrintUsage();
                return;
            }
        }
コード例 #21
0
ファイル: JobUtil.cs プロジェクト: RoDaniel/featurehouse
        public JobChain GenerateMuxJobs(VideoStream video, decimal? framerate, MuxStream[] audioStreamsArray, MuxableType[] audioTypes,
            MuxStream[] subtitleStreamsArray, MuxableType[] subTypes,
            string chapterFile, MuxableType chapterInputType, ContainerType container, string output, FileSize? splitSize, List<string> inputsToDelete, string deviceType, MuxableType deviceOutputType)
        {
            Debug.Assert(splitSize == null || splitSize.Value != FileSize.Empty);

            MuxProvider prov = mainForm.MuxProvider;
            List<MuxableType> allTypes = new List<MuxableType>();
            allTypes.Add(video.VideoType);
            allTypes.AddRange(audioTypes);
            allTypes.AddRange(subTypes);
            if (chapterInputType != null)
                allTypes.Add(chapterInputType);
            if (deviceOutputType != null)
                allTypes.Add(deviceOutputType);
            MuxPath muxPath = prov.GetMuxPath(container, splitSize.HasValue, allTypes.ToArray());
            List<MuxJob> jobs = new List<MuxJob>();
            List<MuxStream> subtitleStreams = new List<MuxStream>(subtitleStreamsArray);
            List<MuxStream> audioStreams = new List<MuxStream>(audioStreamsArray);
            int index = 0;
            int tempNumber = 1;
            string previousOutput = null;
            foreach (MuxPathLeg mpl in muxPath)
            {
                List<string> filesToDeleteThisJob = new List<string>();

                MuxJob mjob = new MuxJob();

                if (previousOutput != null)
                {
                    mjob.Settings.MuxedInput = previousOutput;
                    filesToDeleteThisJob.Add(previousOutput);
                }

                mjob.NbOfFrames = video.NumberOfFrames;
                mjob.NbOfBFrames = video.Settings.NbBframes;
                mjob.Codec = video.Settings.Codec.ToString();
                string fpsFormated = String.Format("{0:##.###}", framerate); // this formating is required for mkvmerge at least to avoid fps rounding error
                mjob.Settings.Framerate = Convert.ToDecimal(fpsFormated);
                mjob.Settings.VideoName = video.Settings.VideoName;

                string tempOutputName = Path.Combine(Path.GetDirectoryName(output),
                    Path.GetFileNameWithoutExtension(output) + tempNumber + ".");
                tempNumber++;
                foreach (MuxableType o in mpl.handledInputTypes)
                {
                    if (o.outputType is VideoType)
                    {
                        mjob.Settings.VideoInput = video.Output;
                        if (inputsToDelete.Contains(video.Output))
                            filesToDeleteThisJob.Add(video.Output);
                        mjob.Settings.DAR = video.DAR;
                    }
                    else if (o.outputType is AudioType)
                    {
                        MuxStream stream = audioStreams.Find(delegate(MuxStream m)
                        {
                            return (VideoUtil.guessAudioType(m.path) == o.outputType);
                        });

                        if (stream != null)
                        {
                            mjob.Settings.AudioStreams.Add(stream);
                            audioStreams.Remove(stream);

                            if (inputsToDelete.Contains(stream.path))
                                filesToDeleteThisJob.Add(stream.path);
                        }
                    }
                    else if (o.outputType is SubtitleType)
                    {
                        MuxStream stream = subtitleStreams.Find(delegate(MuxStream m)
                        {
                            return (VideoUtil.guessSubtitleType(m.path) == o.outputType);
                        });

                        if (stream != null)
                        {
                            mjob.Settings.SubtitleStreams.Add(stream);
                            subtitleStreams.Remove(stream);

                            if (inputsToDelete.Contains(stream.path))
                                filesToDeleteThisJob.Add(stream.path);
                        }
                    }
                    else if (o.outputType is ChapterType)
                    {
                        if ((VideoUtil.guessChapterType(chapterFile) == o.outputType))
                            mjob.Settings.ChapterFile = chapterFile;
                        if (inputsToDelete.Contains(chapterFile))
                            filesToDeleteThisJob.Add(chapterFile);
                    }
                    else if (o.outputType is DeviceType)
                    {
                        if ((VideoUtil.guessDeviceType(deviceType) == o.outputType))
                            mjob.Settings.DeviceType = deviceType;
                    }
                }
                foreach (MuxStream s in mjob.Settings.AudioStreams)
                {
                    audioStreams.Remove(s);
                }
                foreach (MuxStream s in mjob.Settings.SubtitleStreams)
                {
                    subtitleStreams.Remove(s);
                }
                mjob.FilesToDelete.AddRange(filesToDeleteThisJob);
                if (index == muxPath.Length - 1)
                {
                    mjob.Settings.MuxedOutput = output;
                    mjob.Settings.SplitSize = splitSize;
                    mjob.Settings.DAR = video.DAR;
                    mjob.ContainerType = container;
                }
                else
                {
                    ContainerType cot = mpl.muxerInterface.GetContainersInCommon(muxPath[index + 1].muxerInterface)[0];
                    mjob.Settings.MuxedOutput = tempOutputName + cot.Extension;
                    mjob.ContainerType = cot;
                }
                previousOutput = mjob.Settings.MuxedOutput;
                index++;
                jobs.Add(mjob);
                if (string.IsNullOrEmpty(mjob.Settings.VideoInput))
                    mjob.Input = mjob.Settings.MuxedInput;
                else
                    mjob.Input = mjob.Settings.VideoInput;
                mjob.Output = mjob.Settings.MuxedOutput;
                mjob.MuxType = mpl.muxerInterface.MuxerType;
            }

            return new SequentialChain(jobs.ToArray());
        }
コード例 #22
0
ファイル: IfoParser.cs プロジェクト: rragu/subtitleedit
 public VtsVobs()
 {
     VideoStream  = new VideoStream();
     AudioStreams = new List <AudioStream>();
     Subtitles    = new List <string>();
 }
コード例 #23
0
        private VideoStream MakeVideoStream(int id, string line)
        {
            var stream = new VideoStream ();
            stream.Id = id;

            if (line.Contains ("h264"))
            {
                stream.Codec = CodecType.H264;
            }
            else if (line.Contains ("jpeg"))
            {
                stream.Codec = CodecType.JPEG;
            }
            else if (line.Contains ("png"))
            {
                stream.Codec = CodecType.PNG;
            }

            var dimensionsMatch = new Regex (@"(?<Width>\d{2,})x(?<Height>\d{2,})").Match (line);
            stream.Width = Int32.Parse (dimensionsMatch.Groups ["Width"].Value);
            stream.Height = Int32.Parse (dimensionsMatch.Groups ["Height"].Value);

            return stream;
        }
コード例 #24
0
	static VideoStream Stream(uint deviceIndex)
	{
		if (videostreams == null)
			videostreams = new VideoStream[OpenVR.k_unMaxTrackedDeviceCount];
		if (videostreams[deviceIndex] == null)
			videostreams[deviceIndex] = new VideoStream(deviceIndex);
		return videostreams[deviceIndex];
	}
コード例 #25
0
 private CsVideoStreamViewModel CreateVideoStreamViewModel(VideoStream videoStream)
 {
     var vm = new CsVideoStreamViewModel(this, videoStream);
     AvaliableVideoStreamsVM.Add(vm);
     return vm;
 }
コード例 #26
0
 public MovieBuilder AddVideoStream(VideoStream stream)
 {
     _movie.VideoStreams.Add(stream);
     return(this);
 }
コード例 #27
0
 private void RemoveVideoStreamViewModel(VideoStream videoStream)
 {
     var vm = AvaliableVideoStreamsVM.FirstOrDefault(x => Object.ReferenceEquals(x.Model , videoStream));
     if (vm != null) AvaliableVideoStreamsVM.Remove(vm);
 }
 public void Close()
 {
     VideoStream.WriteFrame(true, lastBuffer, 0, lastBuffer.Length);
     Writer.Close();
 }
コード例 #29
0
        public LogItem postprocess()
        {
            audioFiles = vUtil.getAllDemuxedAudio(job.AudioTracks, job.Output, 8);

            fillInAudioInformation();

            log.LogValue("Desired size", job.PostprocessingProperties.OutputSize);
            log.LogValue("Split size", job.PostprocessingProperties.Splitting);

            VideoCodecSettings videoSettings = job.PostprocessingProperties.VideoSettings;

            string videoOutput = Path.Combine(Path.GetDirectoryName(job.Output),
                Path.GetFileNameWithoutExtension(job.Output) + "_Video");
            string muxedOutput = job.PostprocessingProperties.FinalOutput;

            //Open the video
            Dar? dar;
            string videoInput = openVideo(job.Output, job.PostprocessingProperties.DAR,
                job.PostprocessingProperties.HorizontalOutputResolution, job.PostprocessingProperties.SignalAR, log,
                job.PostprocessingProperties.AvsSettings, job.PostprocessingProperties.AutoDeinterlace, videoSettings, out dar,
                job.PostprocessingProperties.AutoCrop, job.PostprocessingProperties.KeepInputResolution,
                job.PostprocessingProperties.UseChaptersMarks);

            VideoStream myVideo = new VideoStream();
            ulong length;
            double framerate;
            JobUtil.getInputProperties(out length, out framerate, videoInput);
            myVideo.Input = videoInput;
            myVideo.Output = videoOutput;
            myVideo.NumberOfFrames = length;
            myVideo.Framerate = (decimal)framerate;
            myVideo.DAR = dar;
            myVideo.VideoType = new MuxableType((new VideoEncoderProvider().GetSupportedOutput(videoSettings.EncoderType))[0], videoSettings.Codec);
            myVideo.Settings = videoSettings;
            List<string> intermediateFiles = new List<string>();
            intermediateFiles.Add(videoInput);
            intermediateFiles.Add(job.Output);
            intermediateFiles.AddRange(audioFiles.Values);
            if (!string.IsNullOrEmpty(qpfile))
                intermediateFiles.Add(qpfile);

            if (!string.IsNullOrEmpty(videoInput))
            {
                //Create empty subtitles for muxing (subtitles not supported in one click mode)
                MuxStream[] subtitles = new MuxStream[0];
                JobChain c = vUtil.GenerateJobSeries(myVideo, muxedOutput, job.PostprocessingProperties.AudioJobs, subtitles,
                    job.PostprocessingProperties.ChapterFile, job.PostprocessingProperties.OutputSize,
                    job.PostprocessingProperties.Splitting, job.PostprocessingProperties.Container,
                    job.PostprocessingProperties.PrerenderJob, job.PostprocessingProperties.DirectMuxAudio, log, job.PostprocessingProperties.DeviceOutputType);
                if (c == null)
                {
                    log.Warn("Job creation aborted");
                    return log;
                }

                c = CleanupJob.AddAfter(c, intermediateFiles);
                mainForm.Jobs.addJobsWithDependencies(c);
            }
            return log;
        }
コード例 #30
0
ファイル: DisplayVideoCmd.cs プロジェクト: TNOCS/csTouch
 public DisplayVideoCmd(VideoStream model, int pSecondScreenId)
 {
     Model = model;
     SecondScreenId = pSecondScreenId;
 }
コード例 #31
0
        private void StartMedia()
        {
            // Get a video-based local media stream. This
            // conference will be send-only.
            // ทำการ Get user media ของเครื่อง (Audio and Video Device)
            UserMedia.GetMedia(new GetMediaArgs(true, true)
            {
                AudioCaptureProvider = new NAudioCaptureProvider(),
                VideoCaptureProvider = new WpfAForgeVideoCaptureProvider(Dispatcher),

                CreateAudioRenderProvider = (e) =>
                {
                    return(new NAudioRenderProvider());
                },
                CreateVideoRenderProvider = (e) =>
                {
                    return(new ImageVideoRenderProvider(Dispatcher, LayoutScale.Contain));
                },

                // กำหนดขนาด Video ที่จะแสดงพร้อมทั้ง Frame rate ของ Video
                VideoWidth     = 352, // optional
                VideoHeight    = 288, // optional
                VideoFrameRate = 30,  // optional

                // เมื่อไม่สามารถ Access user media ของเครื่องได้ จะแสดง message error
                OnFailure = (e) =>
                {
                    Alert("Could not get media. {0}", e.Exception.Message);
                },

                // เมื่อสามารถ Access user media ของเครื่องได้
                OnSuccess = (e) =>
                {
                    // พิมพ์แสดงผลลัพธ์ว่าสามารถ Access user media ของเครื่องได้
                    Console.WriteLine("Get User media ok...");

                    // กำหนด localmedia audio and video ที่ใช้งาน
                    LocalMediaReceiver = e.LocalStream;

                    // Create a WebRTC audio stream description (requires a
                    // reference to the local audio feed).
                    // กำหนด Audio ในการสนทนา
                    audioStream_Receiver = new AudioStream(LocalMediaReceiver);

                    // Create a WebRTC video stream description (requires a
                    // reference to the local video feed). Whenever a P2P link
                    // initializes using this description, position and display
                    // the remote video control on-screen by passing it to the
                    // layout manager created above. Whenever a P2P link goes
                    // down, remove it.
                    // กำหนด Video ในการสนทนา
                    videoStream_Receiver = new VideoStream(LocalMediaReceiver);

                    // กำหนดแสดง Video เมื่อมีการสนทนากับเลิกสนทนา
                    videoStream_Receiver.OnLinkInit += AddRemoteVideoControl;
                    videoStream_Receiver.OnLinkDown += RemoveRemoteVideoControl;

                    // แสดงรายชื่อ Audio และ Video Device ที่สามารถ Access ได้โดยแสดงใน ComboBox
                    AudioDevices.ItemsSource = LocalMediaReceiver.GetAudioDeviceNames();
                    VideoDevices.ItemsSource = LocalMediaReceiver.GetVideoDeviceNames();

                    // เลือก Audio and Video อันแรกของอุปกรณ์เพื่อใช้งาน
                    AudioDevices.SelectedIndex = LocalMediaReceiver.GetAudioDeviceNumber();
                    VideoDevices.SelectedIndex = LocalMediaReceiver.GetVideoDeviceNumber();

                    // เปลี่ยน Localmedia เมื่อมีการเปลี่ยน Audio and Video Device
                    AudioDevices.SelectionChanged += SwitchAudioDevice;
                    VideoDevices.SelectionChanged += SwitchVideoDevice;

                    // อัพเดตแสดง Video Device เมื่อมีการเลือกหรือเปลี่ยนอุปกรณ์การใช้งาน
                    LocalMediaReceiver.OnAudioDeviceNumberChanged += UpdateSelectedAudioDevice;
                    LocalMediaReceiver.OnVideoDeviceNumberChanged += UpdateSelectedVideoDevice;

                    // This is our local video control, a WinForms control
                    // that displays video coming from the capture source.
                    // กำหนดการ capture source ของ Video device
                    var localVideoControl = (FrameworkElement)e.LocalVideoControl;

                    // Create an IceLink layout manager, which makes the task
                    // of arranging video controls easy. Give it a reference
                    // to a WinForms control that can be filled with video feeds.
                    // Use WpfLayoutManager for WPF-based applications.
                    // กำหนดให้ Video แสดงผลที่ canvas ใน UI ของ Sender
                    LayoutManager = new WpfLayoutManager(containerLocal);

                    // แสดงผล Video ที่ได้จากการ capture source
                    LayoutManager.SetLocalVideoControl(localVideoControl);

                    // เริ่มทำการ Conference
                    StartConference();
                }
            });
        }
コード例 #32
0
        private void SplineChart(string filedirectory)
        {
            chart2.Series.Clear();
            var series1 = new System.Windows.Forms.DataVisualization.Charting.Series
            {
                Name              = "Series1",
                Color             = System.Drawing.Color.Green,
                IsVisibleInLegend = false,
                IsXValueIndexed   = true,
                ChartType         = SeriesChartType.Line
            };

            chart2.Series.Add(series1);

            //Set X and Y Axis Minimum and Maximum
            chart2.ChartAreas[0].AxisX.Minimum = 0;
            //chart2.ChartAreas[0].AxisX.Maximum = Convert.ToDouble(lines.Count() - 1);
            chart2.ChartAreas[0].AxisX.Interval          = 25;
            chart2.ChartAreas[0].AxisX.IntervalAutoMode  = IntervalAutoMode.VariableCount;
            chart2.ChartAreas[0].AxisX.IntervalType      = DateTimeIntervalType.Number;
            chart2.ChartAreas[0].AxisX.IsStartedFromZero = true;
            chart2.ChartAreas[0].AxisY.Minimum           = Math.Floor(Convert.ToDouble(lines[lines.Count() - 1][0]));
            chart2.ChartAreas[0].AxisY.Maximum           = Math.Ceiling(Convert.ToDouble(lines[1][0]));

            double        cost            = -1;
            double        previouscost    = double.MaxValue;
            int           lasti           = int.MinValue;
            List <Bitmap> designpics      = new List <Bitmap>();
            List <Bitmap> chartpics       = new List <Bitmap>();
            List <Bitmap> mergedpics      = new List <Bitmap>();
            MemoryStream  ms              = new MemoryStream();
            int           maxWidthdesign  = -1;
            int           maxHeightdesign = -1;

            //Create bmp pictures and save them for every iteration
            for (int i = 1; i < lines.Count(); i++)
            {
                cost = Convert.ToDouble(lines[i][0]);
                //There is a new design picture if this is true
                if (cost < previouscost)
                {
                    lasti = (i - 1);
                    designpics.Add(new Bitmap(filedirectory + "/" + (i - 1).ToString() + ".bmp"));
                    if (designpics[i - 1].Width > maxWidthdesign)
                    {
                        maxWidthdesign = designpics[i - 1].Width;
                    }
                    if (designpics[i - 1].Height > maxHeightdesign)
                    {
                        maxHeightdesign = designpics[i - 1].Height;
                    }
                    previouscost = cost;
                }
                else//No new picture use the old picture
                {
                    designpics.Add(new Bitmap(filedirectory + "/" + lasti.ToString() + ".bmp"));
                }
                series1.Points.AddXY(Convert.ToDouble(i - 1), cost);
                chart2.Invalidate();
                //chart2.SaveImage(filedirectory + "/chart" + (i - 1).ToString() + ".bmp", System.Windows.Forms.DataVisualization.Charting.ChartImageFormat.Bmp);
                chart2.SaveImage(ms, ChartImageFormat.Bmp);
                chartpics.Add(new Bitmap(ms));
            }

            //Merge pictures
            for (int i = 0; i < lines.Count() - 1; i++)
            {
                mergedpics.Add(MergeTwoImages(chartpics[i], designpics[i], maxWidthdesign > chart2.Width ? maxWidthdesign : chart2.Width, maxHeightdesign + chart2.Width + 1));
            }

            //Create avi video from images
            AviManager avimanager = new AviManager(filedirectory + "/video.avi", false);
            //add a new video stream and one frame to the new file
            VideoStream aviStream = avimanager.AddVideoStream(false, 10, mergedpics[0]);

            for (int i = 1; i < lines.Count() - 1; i++)
            {
                //aviStream.AddFrame(chartpics[i]);
                aviStream.AddFrame(mergedpics[i]);
            }
            avimanager.Close();
        }
コード例 #33
0
 public override void OnInitialized()
 {
     base.OnInitialized();
     defaultVideo = new VideoStream();
 }
コード例 #34
0
 public VideoStream OpenVideo(VideoStream stream)
 {
     return(stream);
 }
コード例 #35
0
 public HttpContent VideoContent(VideoStream video, string extension)
 {
     return(new PushStreamContent((Action <Stream, HttpContent, TransportContext>)video.WriteToStream,
                                  MediaUtilities.GetMimeType(extension)));
 }
コード例 #36
0
ファイル: App.cs プロジェクト: QuickBlox/quickblox-dotnet-sdk
        public void StopConference(Action<string> callback)
        {
            // Detach signalling from the conference.
            Signalling.Detach((error) =>
            {
                Conference.OnLinkInit -= LogLinkInit;
                Conference.OnLinkUp   -= LogLinkUp;
                Conference.OnLinkDown -= LogLinkDown;
                Conference = null;

                VideoStream.OnLinkInit -= AddRemoteVideoControl;
                VideoStream.OnLinkDown -= RemoveRemoteVideoControl;
                VideoStream = null;

                AudioStream = null;
                
                callback(error);
            });
        }
コード例 #37
0
        public HttpResponseMessage Get(string p)
        {
            try
            {
                //không check login vì ảnh hưởng đến performance
                //if (!Common.CheckLogin()) return null;
                var response = Request.CreateResponse();
                response.Headers.TransferEncodingChunked = true;
                response.Headers.Add("Accept-Ranges", "bytes");
                response.Headers.Add("Keep-Alive", "timeout=10");

                //path.combine error??
                //var appPath = WebConfigurationManager.AppSettings["PhysicalPath"];
                var fileInfo = new FileInfo((appPath + p).Replace("\\", "/"));

                //check path
                if (string.IsNullOrEmpty(p))
                {
                    response.StatusCode = HttpStatusCode.NotFound;
                    return(response);
                }
                if (!File.Exists(appPath + p))
                {
                    response.StatusCode = HttpStatusCode.NotFound;
                    return(response);
                }
                var rangeHeader = Request.Headers.Range;
                var totalLength = fileInfo.Length;

                long start = 0, end = totalLength - 1;
                ContentRangeHeaderValue contentRange = null;
                //không có range hoặc là range gồm toàn bộ file
                if (rangeHeader == null || !rangeHeader.Ranges.Any())
                {
                    Common.WriteDebug("StreammingController, line 49 ", "Request video range is null or all file! \r\n" + p);
                    response.Headers.AcceptRanges.Add("bytes");
                    response.StatusCode = HttpStatusCode.OK;
                }
                else
                {
                    if (rangeHeader.Unit != "bytes" || rangeHeader.Ranges.Count > 1 ||
                        !MediaUtilities.CheckRangeItem(rangeHeader.Ranges.First(), totalLength, out start, out end))
                    {
                        Common.WriteDebug("StreammingController, line 58", "Request video range is invalid! \r\n" + p);
                        response.StatusCode = HttpStatusCode.RequestedRangeNotSatisfiable;
                        response.Content    = new StreamContent(Stream.Null);
                        response.Content.Headers.ContentRange = new ContentRangeHeaderValue(totalLength);
                        response.Content.Headers.ContentType  =
                            new MediaTypeHeaderValue(MediaUtilities.GetMimeType(fileInfo.Extension));
                        //trả về null stream
                        return(response);
                    }
                    //range hợp lệ
                    Common.WriteDebug("StreammingController, line 68", "Request video range is valid, send content! \r\n" + p);
                    contentRange = new ContentRangeHeaderValue(start, end, totalLength);
                }

                // status code là partial content.
                response.StatusCode = HttpStatusCode.PartialContent;
                response.Headers.TransferEncodingChunked = true;
                if (fileInfo.Extension.ToLower().Equals(".oga") ||
                    fileInfo.Extension.ToLower().Equals(".mp3") ||
                    fileInfo.Extension.ToLower().Equals(".flac"))
                {
                    response.Content = AudioContent(fileInfo, start, end);
                    response.Content.Headers.ContentLength   = end - start + 1;
                    response.Headers.TransferEncodingChunked = null;
                    response.Content.Headers.ContentType     = new MediaTypeHeaderValue(MediaUtilities.GetMimeType(fileInfo.Extension));
                }
                else
                {
                    var vid = new VideoStream(appPath + p, start, end);
                    response.Content = VideoContent(vid, fileInfo.Extension);
                    response.Content.Headers.ContentType = new MediaTypeHeaderValue("video/mp4");
                }
                if (contentRange != null)
                {
                    response.Content.Headers.ContentRange = contentRange;
                }
                //if (Request.Headers.UserAgent.ToString().Contains("Firefox"))  return response;
                response.Content.Headers.ContentLength = end - start + 1;
                return(response);
            }
            catch (Exception ex)
            {
                Common.WriteLog(MethodBase.GetCurrentMethod().Name, ex + ex.StackTrace);
                return(null);
            }
        }
コード例 #38
0
ファイル: VideoAssetCompiler.cs プロジェクト: Aggror/Stride
            /// <inheritdoc />
            protected override async Task <ResultStatus> DoCommandOverride(ICommandContext commandContext)
            {
                VideoAsset videoAsset = Parameters.Video;

                try
                {
                    // Get path to ffmpeg
                    var ffmpeg = ToolLocator.LocateTool("ffmpeg.exe")?.ToWindowsPath() ?? throw new AssetException("Failed to compile a video asset, ffmpeg was not found.");

                    // Get absolute path of asset source on disk
                    var assetDirectory = videoAsset.Source.GetParent();
                    var assetSource    = UPath.Combine(assetDirectory, videoAsset.Source);

                    //=====================================================================================
                    //Get the info from the video codec

                    //Check if we need to reencode the video
                    var mustReEncodeVideo    = false;
                    var sidedataStripCommand = "";

                    // check that the video file format is supported
                    if (Parameters.Platform == PlatformType.Windows && videoAsset.Source.GetFileExtension() != ".mp4")
                    {
                        mustReEncodeVideo = true;
                    }

                    //Use FFmpegMedia object (need to check more details first before I can use it)
                    VideoStream videoStream = null;
                    AudioStream audioStream = null;
                    FFmpegUtils.PreloadLibraries();
                    FFmpegUtils.Initialize();
                    using (var media = new FFmpegMedia())
                    {
                        media.Open(assetSource.ToWindowsPath());

                        // Get the first video stream
                        videoStream = media.Streams.OfType <VideoStream>().FirstOrDefault();
                        if (videoStream == null)
                        {
                            throw new AssetException("Failed to compile a video asset. Did not find the VideoStream from the media.");
                        }

                        // On windows MediaEngineEx player only decode the first video if the video is detected as a stereoscopic video,
                        // so we remove the tags inside the video in order to ensure the same behavior as on other platforms (side by side decoded texture)
                        // Unfortunately it does seem possible to disable this behavior from the MediaEngineEx API.
                        if (Parameters.Platform == PlatformType.Windows && media.IsStereoscopicVideo(videoStream))
                        {
                            mustReEncodeVideo    = true;
                            sidedataStripCommand = "-vf sidedata=delete";
                        }

                        // Get the first audio stream
                        audioStream = media.Streams.OfType <AudioStream>().FirstOrDefault();
                    }
                    Size2 videoSize = new Size2(videoStream.Width, videoStream.Height);

                    //check the format
                    if (ListSupportedCodecNames != null)
                    {
                        if (Array.IndexOf(ListSupportedCodecNames, videoStream.Codec) < 0)
                        {
                            mustReEncodeVideo = true;
                        }
                    }

                    // check if video need to be trimmed
                    var videoDuration = videoAsset.VideoDuration;
                    if (videoDuration.Enabled && (videoDuration.StartTime != TimeSpan.Zero ||
                                                  videoDuration.EndTime.TotalSeconds < videoStream.Duration.TotalSeconds - MathUtil.ZeroToleranceDouble))
                    {
                        mustReEncodeVideo = true;
                    }

                    //check the video target and source resolution
                    Size2 targetSize;
                    if (videoAsset.IsSizeInPercentage)
                    {
                        targetSize = new Size2((int)(videoSize.Width * videoAsset.Width / 100.0f), (int)(videoSize.Height * videoAsset.Height / 100.0f));
                    }
                    else
                    {
                        targetSize = new Size2((int)(videoAsset.Width), (int)(videoAsset.Height));
                    }

                    // ensure that the size is a multiple of 2 (ffmpeg cannot output video not multiple of 2, at least with this codec)
                    if (targetSize.Width % 2 == 1)
                    {
                        targetSize.Width += 1;
                    }
                    if (targetSize.Height % 2 == 1)
                    {
                        targetSize.Height += 1;
                    }

                    if (targetSize.Width != videoSize.Width || targetSize.Height != videoSize.Height)
                    {
                        mustReEncodeVideo = true;
                    }

                    //check the audio settings
                    int  audioChannelsTarget       = audioStream == null? 0: audioStream.ChannelCount;
                    bool mustReEncodeAudioChannels = false;
                    if (videoAsset.IsAudioChannelMono)
                    {
                        audioChannelsTarget = 1;
                        if (audioStream != null && audioStream.ChannelCount != audioChannelsTarget)
                        {
                            mustReEncodeAudioChannels = true;
                            mustReEncodeVideo         = true;
                        }
                    }

                    // Execute ffmpeg to convert source to H.264
                    string tempFile = null;
                    try
                    {
                        if (mustReEncodeVideo)
                        {
                            string targetCodecFormat = "h264";  //hardcodec for now
                            commandContext.Logger.Info(string.Format("Video Asset Compiler: \"{0}\". Re-encode the Video. Format:{1}, Size:{2}x{3}. Audio Channels:{4}",
                                                                     videoAsset.Source.GetFileName(), targetCodecFormat, targetSize.Width, targetSize.Height, audioChannelsTarget));

                            tempFile = Path.GetTempFileName();
                            string channelFlag = "";
                            if (mustReEncodeAudioChannels)
                            {
                                channelFlag = string.Format(" -ac {0}", audioChannelsTarget);
                            }

                            var startTime       = videoDuration.StartTime;
                            var duration        = videoDuration.EndTime - videoDuration.StartTime;
                            var trimmingOptions = videoDuration.Enabled ?
                                                  $" -ss {startTime.Hours:D2}:{startTime.Minutes:D2}:{startTime.Seconds:D2}.{startTime.Milliseconds:D3}" +
                                                  $" -t {duration.Hours:D2}:{duration.Minutes:D2}:{duration.Seconds:D2}.{duration.Milliseconds:D3}":
                                                  "";

                            var commandLine = "  -hide_banner -loglevel error" +                       // hide most log output
                                              "  -nostdin" +                                           // no interaction (background process)
                                              $" -i \"{assetSource.ToWindowsPath()}\"" +               // input file
                                              $"{trimmingOptions}" +
                                              "  -f mp4 -vcodec " + targetCodecFormat +                // codec
                                              channelFlag +                                            // audio channels
                                              $"  -vf scale={targetSize.Width}:{targetSize.Height} " + // adjust the resolution
                                              sidedataStripCommand +                                   // strip of stereoscopic sidedata tag
                                                                                                       //" -an" + // no audio
                                                                                                       //" -pix_fmt yuv422p" + // pixel format (planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples))
                                              $" -y \"{tempFile}\"";                                   // output file (always overwrite)
                            var ret = await ShellHelper.RunProcessAndGetOutputAsync(ffmpeg, commandLine, commandContext.Logger);

                            if (ret != 0 || commandContext.Logger.HasErrors)
                            {
                                throw new AssetException($"Failed to compile a video asset. ffmpeg failed to convert {assetSource}.");
                            }
                        }
                        else
                        {
                            commandContext.Logger.Info(string.Format("Video Asset Compiler: \"{0}\". No Re-encoding necessary",
                                                                     videoAsset.Source.GetFileName()));

                            // Use temporary file
                            tempFile = assetSource.ToWindowsPath();
                        }

                        var dataUrl = Url + "_Data";
                        var video   = new Video.Video
                        {
                            CompressedDataUrl = dataUrl,
                        };

                        // Make sure we don't compress h264 data
                        commandContext.AddTag(new ObjectUrl(UrlType.Content, dataUrl), Builder.DoNotCompressTag);

                        // Write the data
                        using (var reader = new BinaryReader(new FileStream(tempFile, FileMode.Open, FileAccess.Read)))
                            using (var outputStream = MicrothreadLocalDatabases.DatabaseFileProvider.OpenStream(dataUrl, VirtualFileMode.Create, VirtualFileAccess.Write, VirtualFileShare.Read, StreamFlags.Seekable))
                            {
                                // For now write everything at once, 1MB at a time
                                var length = reader.BaseStream.Length;
                                for (var position = 0L; position < length; position += 2 << 20)
                                {
                                    var buffer = reader.ReadBytes(2 << 20);
                                    outputStream.Write(buffer, 0, buffer.Length);
                                }
                            }

                        var assetManager = new ContentManager(MicrothreadLocalDatabases.ProviderService);
                        assetManager.Save(Url, video);

                        return(ResultStatus.Successful);
                    }
                    finally
                    {
                        if (mustReEncodeVideo)
                        {
                            if (tempFile != null)
                            {
                                File.Delete(tempFile);
                            }
                        }
                    }
                }
                catch (AssetException)
                {
                    throw;
                }
                catch (Exception ex)
                {
                    throw new AssetException("Failed to compile a video asset. Unexpected exception.", ex);
                }
            }
コード例 #39
0
        private void loadFile()
        {
            string redirFile = _wd.makePath();

            Gnd.i.progressMessage.post("入力ファイルをコピーしています...");

            File.Copy(_origFile, _duplFile);

            Gnd.i.progressMessage.post("入力ファイルのフォーマットを調べています...");

            ProcessTools.runOnBatch(
                "ffprobe.exe " + _duplFile + " 2> " + redirFile,
                FFmpegBin.i.getBinDir()
                );

            foreach (string fLine in FileTools.readAllLines(redirFile, Encoding.ASCII))
            {
                string line = fLine.Trim();

                if (line.StartsWith("Stream"))
                {
                    List <string> sInts    = StringTools.tokenize(line, StringTools.DIGIT, true, true);
                    int           mapIndex = int.Parse(sInts[1]);

                    List <string> tokens = StringTools.tokenize(line, " ,", false, true);

                    if (line.Contains("Audio:"))
                    {
                        AudioStream stream = new AudioStream();

                        stream.mapIndex = mapIndex;

                        _audioStreams.Add(stream);
                    }
                    else if (line.Contains("Video:"))
                    {
                        VideoStream stream = new VideoStream();

                        stream.mapIndex = mapIndex;

                        {
                            int index = ArrayTools.indexOf <string>(tokens.ToArray(), "fps", StringTools.comp);

                            if (index == -1)
                            {
                                throw new Exception("映像ストリームの秒間フレーム数を取得出来ませんでした。");
                            }

                            stream.fps = IntTools.toInt(double.Parse(tokens[index - 1]));
                        }

                        {
                            string token = Utils.getTokenDigitFormat(tokens.ToArray(), "9x9");

                            if (token == null)
                            {
                                throw new Exception("映像ストリームの画面サイズを取得出来ませんでした。");
                            }

                            List <string> s_wh = StringTools.tokenize(token, StringTools.DIGIT, true, true);

                            stream.w = int.Parse(s_wh[0]);
                            stream.h = int.Parse(s_wh[1]);
                        }

                        if (IntTools.isRange(stream.fps, 1, IntTools.IMAX) == false)
                        {
                            throw new FailedOperation("映像ストリームの秒間フレーム数を認識出来ません。" + stream.fps);
                        }

                        if (IntTools.isRange(stream.w, 1, IntTools.IMAX) == false)
                        {
                            throw new FailedOperation("映像ストリームの画面の幅を認識出来ません。" + stream.w);
                        }

                        if (IntTools.isRange(stream.h, 1, IntTools.IMAX) == false)
                        {
                            throw new FailedOperation("映像ストリームの画面の高さを認識出来ません。" + stream.h);
                        }

                        _videoStreams.Add(stream);
                    }
                    else
                    {
                        // "Data:" とか
                    }
                }
            }

            if (_audioStreams.Count == 0)
            {
                throw new FailedOperation("音声ストリームがありません。");
            }

            if (_videoStreams.Count == 0)
            {
                throw new FailedOperation("映像ストリームがありません。");
            }

            _targetAudioStream = _audioStreams[0];
            _targetVideoStream = _videoStreams[0];

            // ---- Audio Stream ----

            Gnd.i.progressMessage.post("音声ストリームを取り出しています...");

            ProcessTools.runOnBatch(
                "ffmpeg.exe -i " + _duplFile + " -map 0:" + _targetAudioStream.mapIndex + " -ac 2 " + _wavFile + " 2> " + _wd.makePath("mk_wav_stderr.txt"),
                FFmpegBin.i.getBinDir()
                );

            Gnd.i.progressMessage.post("音声ストリームを展開しています...");

            _wavHz = CTools.wavFileToCsvFile(_wavFile, _wavCsvFile, _wd.makePath("mk_wav-csv_stdout.txt"));

            // 1 <= 音声の長さ < IMAX

            {
                long size = new FileInfo(_wavCsvFile).Length;

                if (size % 12L != 0)
                {
                    throw new Exception("wav-csv data size error");
                }

                long count = size / 12L;

                if (count == 0L)
                {
                    throw new FailedOperation("音声ストリームに最初のサンプリング値がありません。");
                }

                if (IntTools.IMAX <= count)
                {
                    throw new FailedOperation("音声ストリームが長過ぎます。");
                }
            }

            // ---- Video Stream ----

            Gnd.i.progressMessage.post("映像ストリームを展開しています...");

            ProcessTools.runOnBatch(
                "ffmpeg.exe -i " + _duplFile + " -map 0:" + _targetVideoStream.mapIndex + " -r " + _targetVideoStream.fps + " -f image2 -vcodec " + Consts.V_IMG_VCODEC + " " + _imgDir + "\\%%010d" + Consts.V_IMG_EXT + " 2> " + _wd.makePath("mk_img_stderr.txt"),
                FFmpegBin.i.getBinDir()
                );

            // 1 <= 映像の長さ < IMAX

            if (File.Exists(_imgDir + "\\0000000001" + Consts.V_IMG_EXT) == false)
            {
                throw new FailedOperation("映像ストリームに最初のフレームがありません。");
            }

            if (File.Exists(_imgDir + "\\1000000001" + Consts.V_IMG_EXT))
            {
                throw new FailedOperation("映像ストリームが長過ぎます。");
            }

            // ----

            Gnd.i.progressMessage.post("");             // 完了

            ed = new EditData(this);

            // VIDEO_W/H_MIN/MAX
            {
                Image img = ed.v.getImage(0);

                if (img.Width < Consts.VIDEO_W_MIN)
                {
                    throw new FailedOperation("映像の幅が小さ過ぎます。" + img.Width);
                }

                if (Consts.VIDEO_W_MAX < img.Width)
                {
                    throw new FailedOperation("映像の幅が大き過ぎます。" + img.Width);
                }

                if (img.Height < Consts.VIDEO_H_MIN)
                {
                    throw new FailedOperation("映像の高さが小さ過ぎます。" + img.Height);
                }

                if (Consts.VIDEO_H_MAX < img.Height)
                {
                    throw new FailedOperation("映像の高さが大き過ぎます。" + img.Height);
                }
            }

            // AUDIO_HZ_MIN/MAX
            {
                int hz = this._wavHz;

                if (hz < Consts.AUDIO_HZ_MIN)
                {
                    throw new FailedOperation("音声ストリームのサンプリング周波数が小さ過ぎます。" + hz);
                }

                if (Consts.AUDIO_HZ_MAX < hz)
                {
                    throw new FailedOperation("音声ストリームのサンプリング周波数が大き過ぎます。" + hz);
                }
            }
        }
コード例 #40
0
 public OpenVideoResults(VideoStream stream)
 {
     this.stream = stream;
 }
コード例 #41
0
    public class EntryPoint {   //The usual stuff for a Vegas script, I'll explain it later (no)
        public void FromVegas(Vegas myVegas)
        {
            PlugInNode pipeffect = myVegas.VideoFX.GetChildByName("VEGAS Picture In Picture"); //Getting the PiP effetc

            if (pipeffect == null)                                                             //if the effect doesn't exists we exit the script with an error message
            {
                MessageBox.Show("You don't have the VEGAS Picture In Picture effect. \n Please install it and try again!");
                return;
            }
            List <VideoEvent> videvents = new List <VideoEvent>();         //A list for the selected events

            foreach (Track myTrack in myVegas.Project.Tracks)              //going through every track and every event, adding the selected video events to the list
            {
                foreach (TrackEvent myEvent in myTrack.Events)
                {
                    if ((myEvent.MediaType == MediaType.Video) && (myEvent.Selected == true))
                    {
                        videvents.Add((VideoEvent)myEvent);
                    }
                }
            }
            double            proWidth  = myVegas.Project.Video.Width;                           //the project's width
            double            proHeight = myVegas.Project.Video.Height;                          //the project's height
            VideoMotionBounds newBound;                                                          //variable for the crop's size
            VideoMotionBounds newerBound;                                                        //variable for crop size if the first one doesn't fit the whole picture

            foreach (VideoEvent pipevent in videvents)                                           // for each video event in the list
            {
                Take                piptake   = pipevent.ActiveTake;                             //getting the width and height of the event's source
                VideoStream         pipstream = piptake.MediaStream as VideoStream;
                int                 myWidth   = pipstream.Width;                                 //the event's width
                int                 myHeight  = pipstream.Height;                                //the event"s height
                double              proAspect = myWidth / (myHeight * (proWidth / proHeight));   //calculating the correct number to multiply later the width/height later
                VideoMotionKeyframe reframe   = new VideoMotionKeyframe(Timecode.FromFrames(0)); //creating a new Pan/Crop keyframe at the beginning of the event
                pipevent.VideoMotion.Keyframes.Add(reframe);
                if (myWidth > myHeight)                                                          //calculating the size of the pan/crop keyframe with the help of the previously calculated value (proAspect) (EXTREMLY COMPLEX AND DANGEROUS, handle with care)
                {
                    newBound = new VideoMotionBounds(new VideoMotionVertex((float)(reframe.Center.X - (double)(myWidth / 2)), (float)(reframe.Center.Y - (double)(myHeight / 2) * proAspect)), new VideoMotionVertex((float)(reframe.Center.X + (double)(myWidth / 2)), (float)(reframe.Center.Y - (double)(myHeight / 2) * proAspect)), new VideoMotionVertex((float)(reframe.Center.X + (double)(myWidth / 2)), (float)(reframe.Center.Y + (double)(myHeight / 2) * proAspect)), new VideoMotionVertex((float)(reframe.Center.X - (double)(myWidth / 2)), (float)(reframe.Center.Y + (double)(myHeight / 2) * proAspect)));
                    if (Math.Abs(newBound.TopLeft.Y - newBound.BottomLeft.Y) < myHeight)                   //if the crop is the correct aspect ration, but it still cuts out part of the image, this code will run and make a cropize, which covers the whole picture with the correct ratio (MORE MATH)
                    {
                        float multiply = myHeight / Math.Abs(newBound.TopLeft.Y - newBound.BottomLeft.Y);
                        float actWidth = Math.Abs(newBound.TopRight.X - newBound.TopLeft.X) / 2;
                        float toHeight = myHeight / 2;
                        newerBound = new VideoMotionBounds(new VideoMotionVertex(reframe.Center.X - actWidth * multiply, reframe.Center.Y - toHeight), new VideoMotionVertex(reframe.Center.X + actWidth * multiply, reframe.Center.Y - toHeight), new VideoMotionVertex(reframe.Center.X + actWidth * multiply, reframe.Center.Y + toHeight), new VideoMotionVertex(reframe.Center.X - actWidth * multiply, reframe.Center.Y + toHeight));
                        newBound   = newerBound;
                    }
                }
                else                 //almost same as above, casual math
                {
                    newBound = new VideoMotionBounds(new VideoMotionVertex((float)(reframe.Center.X - (double)(myWidth / 2) / proAspect), (float)(reframe.Center.Y - (double)(myHeight / 2))), new VideoMotionVertex((float)(reframe.Center.X + (double)(myWidth / 2) / proAspect), (float)(reframe.Center.Y - (double)(myHeight / 2))), new VideoMotionVertex((float)(reframe.Center.X + (double)(myWidth / 2) / proAspect), (float)(reframe.Center.Y + (double)(myHeight / 2))), new VideoMotionVertex((float)(reframe.Center.X - (double)(myWidth / 2) / proAspect), (float)(reframe.Center.Y + (double)(myHeight / 2))));
                    if (Math.Abs(newBound.TopRight.X - newBound.TopLeft.X) < myWidth)
                    {
                        float multiply  = myHeight / Math.Abs(newBound.TopRight.X - newBound.TopLeft.X);
                        float toWidth   = myWidth / 2;
                        float actHeight = Math.Abs(newBound.TopLeft.Y - newBound.BottomLeft.Y / 2);
                        newerBound = new VideoMotionBounds(new VideoMotionVertex(reframe.Center.X - toWidth, reframe.Center.Y - actHeight * multiply), new VideoMotionVertex(reframe.Center.X + toWidth, reframe.Center.Y - actHeight * multiply), new VideoMotionVertex(reframe.Center.X + toWidth, reframe.Center.Y + actHeight * multiply), new VideoMotionVertex(reframe.Center.X - toWidth, reframe.Center.Y + actHeight * multiply));
                        newBound   = newerBound;
                    }
                }
                reframe.Bounds = newBound;                //setting the keyframe's size
                pipevent.Effects.AddEffect(pipeffect);    //adding the PiP effect to the event
            }
        }
コード例 #42
0
        public void perform()
        {
            using (WorkingDir wd = new WorkingDir())
            {
                string rExt = Path.GetExtension(Gnd.i.rFile);

                if (Gnd.i.audioVideoExtensions.contains(rExt) == false)
                {
                    throw new Exception("再生可能なファイルではありません。(不明な拡張子)");
                }

                string midRFile = wd.makePath() + rExt;

                try
                {
                    File.Copy(Gnd.i.rFile, midRFile);

                    if (File.Exists(midRFile) == false)
                    {
                        throw null;
                    }
                }
                catch
                {
                    throw new Exception("ファイルにアクセス出来ません。");
                }

                string redirFile = wd.makePath();

                ProcessTools.runOnBatch("ffprobe.exe " + midRFile + " 2> " + redirFile, Gnd.i.ffmpegBinDir);

                foreach (string line in FileTools.readAllLines(redirFile, Encoding.ASCII))
                {
                    if (line.Contains("Duration:"))
                    {
                        _duration = new Duration();

                        List <string> tokens = StringTools.tokenize(line, " :.,", false, true);

                        if (tokens[1] == "N/A")
                        {
                            throw new Exception("再生可能なファイルではありません。(Duration)");
                        }

                        int h = int.Parse(tokens[1]);
                        int m = int.Parse(tokens[2]);
                        int s = int.Parse(tokens[3]);

                        int sec = h * 3600 + m * 60 + s;

                        if (sec < 1)
                        {
                            throw new Exception("映像又は曲の長さが短すぎます。");
                        }

                        if (IntTools.IMAX < sec)
                        {
                            throw new Exception("映像又は曲の長さが長すぎます。");
                        }

                        _duration.secLength = sec;
                    }
                    else if (_audioStream == null && line.Contains("Stream") && line.Contains("Audio:"))
                    {
                        _audioStream = new AudioStream();

                        List <string> tokens = StringTools.tokenize(line, StringTools.DIGIT, true, true);

                        _audioStream.mapIndex = int.Parse(tokens[1]);
                    }
                    else if (_videoStream == null && line.Contains("Stream") && line.Contains("Video:"))
                    {
                        _videoStream = new VideoStream();

                        {
                            List <string> tokens = StringTools.tokenize(line, StringTools.DIGIT, true, true);

                            _videoStream.mapIndex = int.Parse(tokens[1]);
                        }

                        {
                            List <string> tokens = StringTools.tokenize(line, " ,");

                            foreach (string token in tokens)
                            {
                                if (StringTools.toDigitFormat(token, true) == "9x9")
                                {
                                    List <string> whTokens = StringTools.tokenize(token, "x");

                                    _videoStream.w = int.Parse(whTokens[0]);
                                    _videoStream.h = int.Parse(whTokens[1]);
                                }
                            }
                        }

                        if (_videoStream.w < Consts.VIDEO_W_MIN)
                        {
                            throw new Exception("映像の幅が小さすぎます。");
                        }

                        if (_videoStream.h < Consts.VIDEO_H_MIN)
                        {
                            throw new Exception("映像の高さが小さすぎます。");
                        }

                        if (IntTools.IMAX < _videoStream.w)
                        {
                            throw new Exception("映像の幅が大きすぎます。");
                        }

                        if (IntTools.IMAX < _videoStream.h)
                        {
                            throw new Exception("映像の高さが大きすぎます。");
                        }
                    }
                }
                if (_duration == null)
                {
                    throw null;
                }

                if (_audioStream == null)
                {
                    throw new Exception("再生可能なファイルではありません。(音声ストリームがありません)");
                }

                if (_videoStream == null)
                {
                    _type = Consts.MediaType_e.AUDIO;
                }
                else
                {
                    _type = Consts.MediaType_e.MOVIE;
                }

                string wFile = Utils.getFile(Gnd.i.wFileNoExt, _type);

                if (Utils.equalsExt(midRFile, wFile))
                {
                    try
                    {
                        File.Copy(midRFile, wFile, true);

                        if (File.Exists(wFile) == false)
                        {
                            throw null;
                        }
                    }
                    catch
                    {
                        throw new Exception("ファイルの出力に失敗しました。(単にコピー)");
                    }
                }
                else
                {
                    string midWFile = wd.makePath() + Utils.getExt(_type);

                    if (Gnd.i.convWavMastering)
                    {
                        string wavFile = wd.makePath() + ".wav";

                        ProcessTools.runOnBatch(
                            "ffmpeg.exe -i " + midRFile + " -map 0:" + _audioStream.mapIndex + " -ac 2 " + wavFile,
                            Gnd.i.ffmpegBinDir
                            );

                        if (File.Exists(wavFile) == false)
                        {
                            throw new Exception("音声ストリームの抽出に失敗しました。");
                        }

                        string wavFileNew = wd.makePath() + ".wav";

                        ProcessTools.runOnBatch(
                            "Master.exe " + wavFile + " " + wavFileNew + " " + wd.makePath() + "_DMY_REP.tmp > " + redirFile,
                            Gnd.i.wavMasterBinDir
                            );

                        Utils.textFileToLog(redirFile, StringTools.ENCODING_SJIS);

                        if (File.Exists(wavFileNew) == false)
                        {
                            throw new Exception("wavFileNew does not exist");
                        }

                        if (_type == Consts.MediaType_e.AUDIO)
                        {
                            ProcessTools.runOnBatch(
                                "ffmpeg.exe -i " + wavFileNew + " -map 0:0 " + Gnd.i.ffmpegOptAudio + " " + midWFile + " 2> " + redirFile,
                                Gnd.i.ffmpegBinDir
                                );
                        }
                        else
                        {
                            ProcessTools.runOnBatch(
                                "ffmpeg.exe -i " + midRFile + " -i " + wavFileNew + " -map 0:" + _videoStream.mapIndex + " -map 1:0 " + Gnd.i.ffmpegOptVideo + " " + Gnd.i.ffmpegOptAudio + " " + midWFile + " 2> " + redirFile,
                                Gnd.i.ffmpegBinDir
                                );
                        }
                    }
                    else
                    {
                        if (_type == Consts.MediaType_e.AUDIO)
                        {
                            ProcessTools.runOnBatch(
                                "ffmpeg.exe -i " + midRFile + " -map 0:" + _audioStream.mapIndex + " " + Gnd.i.ffmpegOptAudio + " " + midWFile + " 2> " + redirFile,
                                Gnd.i.ffmpegBinDir
                                );
                        }
                        else
                        {
                            ProcessTools.runOnBatch(
                                "ffmpeg.exe -i " + midRFile + " -map 0:" + _videoStream.mapIndex + " -map 0:" + _audioStream.mapIndex + " " + Gnd.i.ffmpegOptVideo + " " + Gnd.i.ffmpegOptAudio + " " + midWFile + " 2> " + redirFile,
                                Gnd.i.ffmpegBinDir
                                );
                        }
                    }
                    Utils.textFileToLog(redirFile, Encoding.ASCII);

                    if (File.Exists(midWFile) == false)
                    {
                        throw new Exception("midWFile does not exist");
                    }

                    try
                    {
                        File.Copy(midWFile, wFile, true);

                        if (File.Exists(wFile) == false)
                        {
                            throw null;
                        }
                    }
                    catch
                    {
                        throw new Exception("ファイルの出力に失敗しました。(変換した)");
                    }
                }
                Gnd.i.convReturn.wFile = wFile;
            }
            if (_videoStream != null)
            {
                Gnd.i.convReturn.w = _videoStream.w;
                Gnd.i.convReturn.h = _videoStream.h;
            }
        }
コード例 #43
0
ファイル: frm_Main.cs プロジェクト: ob-tim-liu/NiWrapper.Net
        private void CurrentSensorOnNewFrame(VideoStream videoStream)
        {
            if (videoStream.IsValid && videoStream.IsFrameAvailable())
            {
                using (VideoFrameRef frame = videoStream.ReadFrame())
                {
                    if (frame.IsValid)
                    {
                        VideoFrameRef.CopyBitmapOptions options = VideoFrameRef.CopyBitmapOptions.Force24BitRgb
                                                                  | VideoFrameRef.CopyBitmapOptions.DepthFillShadow;
                        if (this.cb_invert.Checked)
                        {
                            options |= VideoFrameRef.CopyBitmapOptions.DepthInvert;
                        }

                        if (this.cb_equal.Checked)
                        {
                            options |= VideoFrameRef.CopyBitmapOptions.DepthHistogramEqualize;
                        }

                        if (this.cb_fill.Checked)
                        {
                            options |= videoStream.Mirroring
                                           ? VideoFrameRef.CopyBitmapOptions.DepthFillRigthBlack
                                           : VideoFrameRef.CopyBitmapOptions.DepthFillLeftBlack;
                        }

                        lock (this.bitmap)
                        {
                            /////////////////////// Instead of creating a bitmap object for each frame, you can simply
                            /////////////////////// update one you have. Please note that you must be very careful
                            /////////////////////// with multi-thread situations.
                            try
                            {
                                frame.UpdateBitmap(this.bitmap, options);
                            }
                            catch (Exception)
                            {
                                // Happens when our Bitmap object is not compatible with returned Frame
                                this.bitmap = frame.ToBitmap(options);
                            }

                            /////////////////////// END NOTE

                            /////////////////////// You can always use .toBitmap() if you dont want to
                            /////////////////////// clone image later and be safe when using it in multi-thread situations
                            /////////////////////// This is little slower, but easier to handle
                            // bitmap = frame.toBitmap(options);
                            /////////////////////// END NOTE
                            if (this.cb_mirrorSoft.Checked)
                            {
                                this.bitmap.RotateFlip(RotateFlipType.RotateNoneFlipX);
                            }
                        }

                        ///////////////////// You can simply pass the newly created/updated image to a
                        ///////////////////// PictureBox right here instead of drawing it with Graphic object
                        // this.BeginInvoke(new MethodInvoker(delegate()
                        // {
                        // if (!pb_image.Visible)
                        // pb_image.Visible = true;
                        // if (bitmap == null)
                        // return;
                        // lock (bitmap) // this.BeginInvoke happens on UI Thread so it is better to always keep this lock in place
                        // {
                        // if (pb_image.Image != null)
                        // pb_image.Image.Dispose();

                        // /////////////////////// If you want to use one bitmap object for all frames, the
                        // /////////////////////// best way to prevent and multi-thread access problems
                        // /////////////////////// is to clone the bitmap each time you want to send it to PictureBox
                        // pb_image.Image = new Bitmap(bitmap, bitmap.Size);
                        // /////////////////////// END NOTE

                        // /////////////////////// If you only use toBitmap() method. you can simply skip the
                        // /////////////////////// cloning process. It is perfectly thread-safe.
                        // //pb_image.Image = bitmap;
                        // /////////////////////// END NOTE

                        // pb_image.Refresh();
                        // }
                        // }));
                        ///////////////////// END NOTE
                        if (!this.pb_image.Visible)
                        {
                            this.Invalidate();
                        }
                    }
                }
            }
        }
コード例 #44
0
ファイル: SaveVideo.cs プロジェクト: marlonnn/Demo.Frames
 public SaveVideo(GrabFrames grabFrames, AviManager aviManager)
 {
     this._grabFrames = grabFrames;
     this._aviManager = aviManager;
     this._aviStream  = aviManager.AddVideoStream(false, this._grabFrames.MediaInfo.FPS, (Bitmap)this._grabFrames.Frames[0].Image);
 }
コード例 #45
0
		public void StartConference(ViewGroup videoContainer, Action<Exception> callback)
		{           
			try {                   
				var localVideoControl = LocalMedia.LocalVideoControl;
				var layoutManager = new AndroidLayoutManager (videoContainer);					                   
				layoutManager.SetLocalVideoControl (localVideoControl);					                   
				var videoStream = new VideoStream (LocalMedia.LocalStream);
				videoStream.OnLinkInit += (e) => {
					var remoteVideoControl = e.Link.GetRemoteVideoControl ();
					layoutManager.AddRemoteVideoControl (e.PeerId, remoteVideoControl);
				};
				videoStream.OnLinkDown += (e) => {
					layoutManager.RemoveRemoteVideoControl (e.PeerId);
				};

				// Create a conference using our stream descriptions.
				Conference = new FM.IceLink.Conference (videoStream);                
				Conference.MaxLinks = 1;

				Signalling.Attach (Conference, (ex) => {
					if (ex != null) {
						ex = new Exception ("Could not attach signalling to conference.", ex);
						LastStartConferenceException = ex;
					}

					callback (ex);
				});
			} catch (Exception ex) {
				LastStartConferenceException = ex;
				callback (ex);
			}          
        
		}
コード例 #46
0
ファイル: Conv.cs プロジェクト: stackprobe/Kirara2
        private void doConv()
        {
            addOperation(delegate
            {
                rExt = Path.GetExtension(_rFile);

                if (Gnd.i.audioVideoExtensions.contains(rExt) == false)
                {
                    throw new Exception("再生可能なファイルではありません。(不明な拡張子)");
                }

                midRFile = _wd.makePath() + rExt;

                {
                    long rFileSize = -1;

                    try
                    {
                        rFileSize = new FileInfo(_rFile).Length;
                    }
                    catch
                    { }

                    if (Gnd.i.rFileSizeMax_MB * 1000000L < rFileSize)
                    {
                        throw new Exception("ファイルが大きすぎます。");
                    }
                }

                {
                    long diskFree = new DriveInfo(FileTools.getTMP().Substring(0, 1)).AvailableFreeSpace;

                    if (diskFree < Gnd.i.keepDiskFree_MB * 1000000L)
                    {
                        throw new Exception("ディスクの空き領域が不足しています。");
                    }
                }

                runCopyFile(_rFile, midRFile, "ファイルにアクセス出来ません。");
            });
            addOperation(delegate
            {
                redirFile = _wd.makePath();

                runCommand("ffprobe.exe " + midRFile + " 2> " + redirFile, Gnd.i.ffmpeg.binDir);
            });
            addOperation(delegate
            {
                foreach (string line in FileTools.readAllLines(redirFile, Encoding.ASCII))
                {
                    if (line.Contains("Duration:"))
                    {
                        _duration = new Duration();

                        List <string> tokens = StringTools.tokenize(line, " :.,", false, true);

                        if (tokens[1] == "N/A")
                        {
                            throw new Exception("再生可能なファイルではありません。(Duration)");
                        }

                        int h = int.Parse(tokens[1]);
                        int m = int.Parse(tokens[2]);
                        int s = int.Parse(tokens[3]);

                        int sec = h * 3600 + m * 60 + s;

                        if (sec < 1)
                        {
                            throw new Exception("映像又は曲の長さが短すぎます。");
                        }

                        if (IntTools.IMAX < sec)
                        {
                            throw new Exception("映像又は曲の長さが長すぎます。");
                        }

                        _duration.secLength = sec;
                    }
                    else if (_audioStream == null && line.Contains("Stream") && line.Contains("Audio:"))
                    {
                        _audioStream = new AudioStream();

                        List <string> tokens = StringTools.tokenize(line, StringTools.DIGIT, true, true);

                        _audioStream.mapIndex = int.Parse(tokens[1]);
                    }
                    else if (_videoStream == null && line.Contains("Stream") && line.Contains("Video:"))
                    {
                        _videoStream = new VideoStream();

                        {
                            List <string> tokens = StringTools.tokenize(line, StringTools.DIGIT, true, true);

                            _videoStream.mapIndex = int.Parse(tokens[1]);
                        }

                        {
                            List <string> tokens = StringTools.tokenize(line, " ,");

                            foreach (string token in tokens)
                            {
                                if (StringTools.toDigitFormat(token, true) == "9x9")
                                {
                                    List <string> whTokens = StringTools.tokenize(token, "x");

                                    _videoStream.w = int.Parse(whTokens[0]);
                                    _videoStream.h = int.Parse(whTokens[1]);
                                }
                            }
                        }

                        if (_videoStream.w < Consts.VIDEO_W_MIN)
                        {
                            throw new Exception("映像の幅が小さすぎます。");
                        }

                        if (_videoStream.h < Consts.VIDEO_H_MIN)
                        {
                            throw new Exception("映像の高さが小さすぎます。");
                        }

                        if (IntTools.IMAX < _videoStream.w)
                        {
                            throw new Exception("映像の幅が大きすぎます。");
                        }

                        if (IntTools.IMAX < _videoStream.h)
                        {
                            throw new Exception("映像の高さが大きすぎます。");
                        }
                    }
                }
                if (_duration == null)
                {
                    throw new Exception("fatal: ffprobe _duration null");
                }

                if (_audioStream == null)
                {
                    throw new Exception("再生可能なファイルではありません。(音声ストリームがありません)");
                }

                if (_duration.secLength < 3)
                {
                    throw new Exception("再生時間が短すぎます。");
                }

                if (_videoStream == null)
                {
                    _type = Consts.MediaType_e.AUDIO;
                }
                else
                {
                    _type = Consts.MediaType_e.MOVIE;
                }

                wFile = _wFileNoExt + (_type == Consts.MediaType_e.AUDIO ? ".ogg" : ".ogv");

                if (StringTools.equalsIgnoreCase(rExt, Path.GetExtension(wFile)))
                {
                    doConvMode = false;
                    midWFile   = midRFile;
                }
                else
                {
                    doConvMode = true;
                    midWFile   = _wd.makePath() + Path.GetExtension(wFile);
                }
            });
            if (Gnd.i.convWavMastering)
            {
                addOperation(delegate
                {
                    if (doConvMode)
                    {
                        wavFile = _wd.makePath() + ".wav";

                        runCommand(
                            "ffmpeg.exe -i " + midRFile + " -map 0:" + _audioStream.mapIndex + " -ac 2 " + wavFile,
                            Gnd.i.ffmpeg.binDir
                            );
                    }
                });
                addOperation(delegate
                {
                    if (doConvMode)
                    {
                        if (File.Exists(wavFile) == false)
                        {
                            throw new Exception("音声ストリームの抽出に失敗しました。");
                        }

                        wavFileNew = _wd.makePath() + ".wav";

                        runCommand(
                            "Master.exe " + wavFile + " " + wavFileNew + " 0001.txt > " + redirFile,
                            Gnd.i.wavMaster.binDir
                            );
                    }
                });
                addOperation(delegate
                {
                    if (doConvMode)
                    {
                        reportToLog(redirFile, StringTools.ENCODING_SJIS);

                        if (File.Exists(wavFileNew) == false)                         // ? 音量の均一化 不要だった。
                        {
                            Gnd.i.logger.writeLine("wavFileNew <- wavFile");
                            wavFileNew = wavFile;
                        }
                        if (_type == Consts.MediaType_e.AUDIO)
                        {
                            runCommand(
                                "ffmpeg.exe -i " + wavFileNew + " -map 0:0 " + Gnd.i.ffmpegOptAudio + " " + midWFile + " 2> " + redirFile,
                                Gnd.i.ffmpeg.binDir
                                );
                        }
                        else
                        {
                            runCommand(
                                "ffmpeg.exe -i " + midRFile + " -i " + wavFileNew + " -map 0:" + _videoStream.mapIndex + " -map 1:0 " + Gnd.i.ffmpegOptVideo + " " + Gnd.i.ffmpegOptAudio + " " + midWFile + " 2> " + redirFile,
                                Gnd.i.ffmpeg.binDir
                                );
                        }
                    }
                });
            }
            else
            {
                addOperation(delegate
                {
                    if (doConvMode)
                    {
                        if (_type == Consts.MediaType_e.AUDIO)
                        {
                            runCommand(
                                "ffmpeg.exe -i " + midRFile + " -map 0:" + _audioStream.mapIndex + " " + Gnd.i.ffmpegOptAudio + " " + midWFile + " 2> " + redirFile,
                                Gnd.i.ffmpeg.binDir
                                );
                        }
                        else
                        {
                            runCommand(
                                "ffmpeg.exe -i " + midRFile + " -map 0:" + _videoStream.mapIndex + " -map 0:" + _audioStream.mapIndex + " " + Gnd.i.ffmpegOptVideo + " " + Gnd.i.ffmpegOptAudio + " " + midWFile + " 2> " + redirFile,
                                Gnd.i.ffmpeg.binDir
                                );
                        }
                    }
                });
            }
            addOperation(delegate
            {
                if (doConvMode)
                {
                    reportToLog(redirFile, Encoding.ASCII);
                }
            });
            addOperation(delegate
            {
                try
                {
                    string dir = Path.GetDirectoryName(wFile);

                    Gnd.i.logger.writeLine("dir: " + dir);

                    if (Directory.Exists(dir) == false)
                    {
                        Directory.CreateDirectory(dir);
                    }
                }
                catch
                {
                    throw new Exception("ファイルを出力できません。(フォルダ作成失敗)");
                }

                runCopyFile(midWFile, wFile, "ファイルを出力できません。");
            });
        }
コード例 #47
0
    public Job CreateJobWithAnimatedOverlay(
        string projectId, string location, string inputUri, string overlayImageUri, string outputUri)
    {
        // Create the client.
        TranscoderServiceClient client = TranscoderServiceClient.Create();

        // Build the parent location name.
        LocationName parent = new LocationName(projectId, location);

        // Build the job config.
        VideoStream videoStream0 = new VideoStream
        {
            H264 = new VideoStream.Types.H264CodecSettings
            {
                BitrateBps   = 550000,
                FrameRate    = 60,
                HeightPixels = 360,
                WidthPixels  = 640
            }
        };

        AudioStream audioStream0 = new AudioStream
        {
            Codec      = "aac",
            BitrateBps = 64000
        };

        // Create the overlay image. Only JPEG is supported. Image resolution is based on output
        // video resolution. This example uses the values x: 0 and y: 0 to maintain the original
        // resolution of the overlay image.
        Overlay.Types.Image overlayImage = new Overlay.Types.Image
        {
            Uri        = overlayImageUri,
            Alpha      = 1,
            Resolution = new Overlay.Types.NormalizedCoordinate
            {
                X = 0,
                Y = 0
            }
        };

        // Create the starting animation (when the overlay starts to fade in). Use the values x: 0.5
        // and y: 0.5 to position the top-left corner of the overlay in the center of the output
        // video.
        Overlay.Types.Animation animationFadeIn = new Overlay.Types.Animation
        {
            AnimationFade = new Overlay.Types.AnimationFade
            {
                FadeType = Overlay.Types.FadeType.FadeIn,
                Xy       = new Overlay.Types.NormalizedCoordinate
                {
                    X = 0.5,
                    Y = 0.5
                },
                StartTimeOffset = Duration.FromTimeSpan(TimeSpan.FromSeconds(5)),
                EndTimeOffset   = Duration.FromTimeSpan(TimeSpan.FromSeconds(10))
            }
        };

        // Create the ending animation (when the overlay starts to fade out). The overlay will start
        // to fade out at the 12-second mark in the output video.
        Overlay.Types.Animation animationFadeOut = new Overlay.Types.Animation
        {
            AnimationFade = new Overlay.Types.AnimationFade
            {
                FadeType = Overlay.Types.FadeType.FadeOut,
                Xy       = new Overlay.Types.NormalizedCoordinate
                {
                    X = 0.5,
                    Y = 0.5
                },
                StartTimeOffset = Duration.FromTimeSpan(TimeSpan.FromSeconds(12)),
                EndTimeOffset   = Duration.FromTimeSpan(TimeSpan.FromSeconds(15))
            }
        };

        // Create the overlay and add the image and animations to it.
        Overlay overlay = new Overlay
        {
            Image      = overlayImage,
            Animations = { animationFadeIn, animationFadeOut }
        };

        ElementaryStream elementaryStream0 = new ElementaryStream
        {
            Key         = "video_stream0",
            VideoStream = videoStream0
        };

        ElementaryStream elementaryStream1 = new ElementaryStream
        {
            Key         = "audio_stream0",
            AudioStream = audioStream0
        };

        MuxStream muxStream0 = new MuxStream
        {
            Key               = "sd",
            Container         = "mp4",
            ElementaryStreams = { "video_stream0", "audio_stream0" }
        };

        Input input = new Input
        {
            Key = "input0",
            Uri = inputUri
        };

        Output output = new Output
        {
            Uri = outputUri
        };

        JobConfig jobConfig = new JobConfig
        {
            Inputs            = { input },
            Output            = output,
            ElementaryStreams = { elementaryStream0, elementaryStream1 },
            MuxStreams        = { muxStream0 },
            Overlays          = { overlay }
        };

        // Build the job.
        Job newJob = new Job
        {
            InputUri  = inputUri,
            OutputUri = outputUri,
            Config    = jobConfig
        };

        // Call the API.
        Job job = client.CreateJob(parent, newJob);

        // Return the result.
        return(job);
    }
コード例 #48
0
    public Job CreateJobWithStaticOverlay(
        string projectId, string location, string inputUri, string overlayImageUri, string outputUri)
    {
        // Create the client.
        TranscoderServiceClient client = TranscoderServiceClient.Create();

        // Build the parent location name.
        LocationName parent = new LocationName(projectId, location);

        // Build the job config.
        VideoStream videoStream0 = new VideoStream
        {
            H264 = new VideoStream.Types.H264CodecSettings
            {
                BitrateBps   = 550000,
                FrameRate    = 60,
                HeightPixels = 360,
                WidthPixels  = 640
            }
        };

        AudioStream audioStream0 = new AudioStream
        {
            Codec      = "aac",
            BitrateBps = 64000
        };

        // Create the overlay image. Only JPEG is supported. Image resolution is based on output
        // video resolution. To respect the original image aspect ratio, set either x or y to 0.0.
        // This example stretches the overlay image the full width and half of the height of the
        // output video.
        Overlay.Types.Image overlayImage = new Overlay.Types.Image
        {
            Uri        = overlayImageUri,
            Alpha      = 1,
            Resolution = new Overlay.Types.NormalizedCoordinate
            {
                X = 1,
                Y = 0.5
            }
        };

        // Create the starting animation (when the overlay appears). Use the values x: 0 and y: 0 to
        // position the top-left corner of the overlay in the top-left corner of the output video.
        Overlay.Types.Animation animationStart = new Overlay.Types.Animation
        {
            AnimationStatic = new Overlay.Types.AnimationStatic
            {
                Xy = new Overlay.Types.NormalizedCoordinate
                {
                    X = 0,
                    Y = 0
                },
                StartTimeOffset = Duration.FromTimeSpan(TimeSpan.FromSeconds(0))
            }
        };


        // Create the ending animation (when the overlay disappears). In this example, the overlay
        // disappears at the 10-second mark in the output video.
        Overlay.Types.Animation animationEnd = new Overlay.Types.Animation
        {
            AnimationEnd = new Overlay.Types.AnimationEnd
            {
                StartTimeOffset = Duration.FromTimeSpan(TimeSpan.FromSeconds(10))
            }
        };

        // Create the overlay and add the image and animations to it.
        Overlay overlay = new Overlay
        {
            Image      = overlayImage,
            Animations = { animationStart, animationEnd }
        };

        ElementaryStream elementaryStream0 = new ElementaryStream
        {
            Key         = "video_stream0",
            VideoStream = videoStream0
        };

        ElementaryStream elementaryStream1 = new ElementaryStream
        {
            Key         = "audio_stream0",
            AudioStream = audioStream0
        };

        MuxStream muxStream0 = new MuxStream
        {
            Key               = "sd",
            Container         = "mp4",
            ElementaryStreams = { "video_stream0", "audio_stream0" }
        };

        Input input = new Input
        {
            Key = "input0",
            Uri = inputUri
        };

        Output output = new Output
        {
            Uri = outputUri
        };

        JobConfig jobConfig = new JobConfig
        {
            Inputs            = { input },
            Output            = output,
            ElementaryStreams = { elementaryStream0, elementaryStream1 },
            MuxStreams        = { muxStream0 },
            Overlays          = { overlay }
        };

        // Build the job.
        Job newJob = new Job
        {
            InputUri  = inputUri,
            OutputUri = outputUri,
            Config    = jobConfig
        };

        // Call the API.
        Job job = client.CreateJob(parent, newJob);

        // Return the result.
        return(job);
    }
コード例 #49
0
        /// <summary>
        /// 获取视频流的信息,从VideoPage返回
        /// </summary>
        /// <param name="page"></param>
        public void GetVideoStream(VideoPage page)
        {
            PlayUrl playUrl = VideoStream.GetBangumiPlayUrl(page.Avid, page.Bvid, page.Cid);

            Utils.VideoPageInfo(playUrl, page);
        }
コード例 #50
0
        /// <summary> buffer callback, COULD BE FROM FOREIGN THREAD. </summary>
        int ISampleGrabberCB.BufferCB(double SampleTime, IntPtr pBuffer, int BufferLen)
        {
            Console.WriteLine("BufferCB " + DateTime.Now.Millisecond + " pbtime " + SampleTime);
            framecount++;

            videopos = startlogtime.AddSeconds(SampleTime);

            trackBar_mediapos.Value = (int)SampleTime;

            // The buffer should be long enought
            if (BufferLen <= m_stride * m_videoHeight)
            {
                // Copy the frame to the buffer
                // CopyMemory(m_handle, pBuffer, m_stride * m_videoHeight);
                m_handle = pBuffer;
            }
            else
            {
                throw new Exception("Buffer is wrong size");
            }

            try
            {
                Console.WriteLine("1 " + DateTime.Now.Millisecond);
                //ip = this.GetBitMap();
                image = new Bitmap(m_videoWidth, m_videoHeight, m_stride, PixelFormat.Format24bppRgb, m_handle);
                Console.WriteLine("1a " + DateTime.Now.Millisecond);

                image.RotateFlip(RotateFlipType.RotateNoneFlipY);

                Console.WriteLine("1b " + DateTime.Now.Millisecond);


                hud1.HoldInvalidation = true;
                hud1.opengl           = true;
                hud1.bgimage          = image;
                hud1.streamjpgenable  = true;
                if (fullresolution)
                {
                    hud1.Width  = image.Width;
                    hud1.Height = image.Height;
                }

                Console.WriteLine("1c " + DateTime.Now.Millisecond);

                hud1.Refresh();

                Console.WriteLine("1d " + DateTime.Now.Millisecond);

                Bitmap bmp = (Bitmap)hud1.objBitmap.Clone();

                //  bmp.Save(framecount+".bmp");

                Console.WriteLine("1e " + DateTime.Now.Millisecond);

                if (newStream == null)
                {
                    //double frate = GetFrameRate(txtAviFileName.Text);

                    double frate = Math.Round(10000000.0 / m_avgtimeperframe, 0);

                    newStream = newManager.AddVideoStream(true, frate, bmp);
                }

                Console.WriteLine("2 " + DateTime.Now.Millisecond);

                addframe(bmp);
                lock (avienclock)
                {
                    //    System.Threading.ThreadPool.QueueUserWorkItem(addframe, bmp);
                }


                Console.WriteLine("3 " + DateTime.Now.Millisecond);
            }
            //System.Windows.Forms.CustomMessageBox.Show("Problem with capture device, grabbing frame took longer than 5 sec");
            catch (Exception ex)
            {
                Console.WriteLine("Grab bmp failed " + ex.ToString());
            }


            return(0);
        }
コード例 #51
0
        private void AssignDlnaMetadata(MetadataContainer info, int edition)
        {
            if (info == null)
            {
                return;
            }

            List <string> profileList = new List <string>();

            if (TranscodingParameter == null)
            {
                Metadata  = info.Metadata[edition];
                Video     = info.Video[edition];
                Audio     = info.Audio[edition];
                Image     = info.Image[edition];
                Subtitles = info.Subtitles[edition];
            }
            else
            {
                if (IsImage)
                {
                    ImageTranscoding        image    = (ImageTranscoding)TranscodingParameter;
                    TranscodedImageMetadata metadata = MediaConverter.GetTranscodedImageMetadata(image);
                    Metadata = new MetadataStream
                    {
                        Mime = info.Metadata[edition].Mime,
                        ImageContainerType = metadata.TargetImageCodec,
                        Size = Client.EstimateTransodedSize ? info.Metadata[edition].Size : 0,
                    };
                    Image = new ImageStream
                    {
                        Height          = metadata.TargetMaxHeight,
                        Orientation     = metadata.TargetOrientation,
                        PixelFormatType = metadata.TargetPixelFormat,
                        Width           = metadata.TargetMaxWidth
                    };
                }
                else if (IsAudio)
                {
                    AudioTranscoding        audio    = (AudioTranscoding)TranscodingParameter;
                    TranscodedAudioMetadata metadata = MediaConverter.GetTranscodedAudioMetadata(audio);
                    Metadata = new MetadataStream
                    {
                        Mime = info.Metadata[edition].Mime,
                        AudioContainerType = metadata.TargetAudioContainer,
                        Bitrate            = metadata.TargetAudioBitrate > 0 ? metadata.TargetAudioBitrate : null,
                        Duration           = info.Metadata[edition].Duration,
                        Size = Client.EstimateTransodedSize ? (metadata.TargetAudioBitrate > 0 ? Convert.ToInt64((metadata.TargetAudioBitrate * 1024 * info.Metadata[edition].Duration) / 8.0) : (long?)null) : null,
                    };
                    AudioStream audioStream = new AudioStream();
                    audioStream.Bitrate   = metadata.TargetAudioBitrate;
                    audioStream.Channels  = metadata.TargetAudioChannels;
                    audioStream.Codec     = metadata.TargetAudioCodec;
                    audioStream.Frequency = metadata.TargetAudioFrequency;
                    Audio = new List <AudioStream> {
                        audioStream
                    };
                }
                else if (IsVideo)
                {
                    VideoTranscoding        video    = (VideoTranscoding)TranscodingParameter;
                    TranscodedVideoMetadata metadata = MediaConverter.GetTranscodedVideoMetadata(video);
                    Metadata = new MetadataStream
                    {
                        Mime = info.Metadata[edition].Mime,
                        VideoContainerType = metadata.TargetVideoContainer,
                        Bitrate            = metadata.TargetAudioBitrate > 0 && metadata.TargetVideoBitrate > 0 ? metadata.TargetAudioBitrate + metadata.TargetVideoBitrate : null,
                        Duration           = info.Metadata[edition].Duration,
                        Size = Client.EstimateTransodedSize ? (metadata.TargetAudioBitrate > 0 && info.Metadata[edition].Duration > 0 ? Convert.ToInt64((metadata.TargetAudioBitrate * 1024 * info.Metadata[edition].Duration) / 8.0) : (long?)null) : null,
                    };
                    Video = new VideoStream
                    {
                        AspectRatio      = metadata.TargetVideoAspectRatio,
                        Bitrate          = metadata.TargetVideoBitrate,
                        Codec            = metadata.TargetVideoCodec,
                        Framerate        = metadata.TargetVideoFrameRate,
                        HeaderLevel      = metadata.TargetLevel,
                        ProfileType      = metadata.TargetProfile,
                        RefLevel         = metadata.TargetLevel,
                        Height           = metadata.TargetVideoMaxHeight,
                        PixelAspectRatio = metadata.TargetVideoPixelAspectRatio,
                        PixelFormatType  = metadata.TargetVideoPixelFormat,
                        TimestampType    = metadata.TargetVideoTimestamp,
                        Width            = metadata.TargetVideoMaxWidth,
                    };
                    AudioStream audioStream = new AudioStream();
                    audioStream.Bitrate   = metadata.TargetAudioBitrate;
                    audioStream.Channels  = metadata.TargetAudioChannels;
                    audioStream.Codec     = metadata.TargetAudioCodec;
                    audioStream.Frequency = metadata.TargetAudioFrequency;
                    Audio = new List <AudioStream> {
                        audioStream
                    };
                }
            }

            if (IsImage)
            {
                profileList = DlnaProfiles.ResolveImageProfile(Metadata.ImageContainerType, Image.Width, Image.Height);
            }
            else if (IsAudio)
            {
                var audio = Audio.FirstOrDefault();
                profileList = DlnaProfiles.ResolveAudioProfile(Metadata.AudioContainerType, audio?.Codec ?? AudioCodec.Unknown, audio?.Bitrate, audio?.Frequency, audio?.Channels);
            }
            else if (IsVideo)
            {
                var audio = Audio.FirstOrDefault();
                profileList = DlnaProfiles.ResolveVideoProfile(Metadata.VideoContainerType, Video.Codec, audio?.Codec ?? AudioCodec.Unknown, Video.ProfileType, Video.HeaderLevel,
                                                               Video.Framerate, Video.Width, Video.Height, Video.Bitrate, audio?.Bitrate, Video.TimestampType);
            }

            string profile = "";
            string mime    = info.Metadata[edition].Mime;

            if (DlnaProfiles.TryFindCompatibleProfile(Client, profileList, ref profile, ref mime))
            {
                DlnaMime    = mime;
                DlnaProfile = profile;
            }
        }
コード例 #52
0
 public static void LoadVideos(VideoStream stream)
 {
     stream.AddVideo(new Video
     {
         Id       = "001",
         Name     = "whatever",
         Category = { "friends", "chill", "sunday", "enthralling" },
         Genre    = { "action", "drama", "police" },
         Actors   = { "brad", "pitt", "johnny", "deep" }
     });
     stream.AddVideo(new Video
     {
         Id       = "002",
         Name     = "whatever2",
         Category = { "friends", "chill", "sunday", "enthralling" },
         Genre    = { "action", "comedy" },
         Actors   = { "brad", "johnny", "deep", "mother" }
     });
     stream.AddVideo(new Video
     {
         Id       = "003",
         Name     = "whatever4",
         Category = { "friends", "sunday", "enthralling" },
         Genre    = { "action", "drama", "police" },
         Actors   = { "brad", "pitt", "johnny", "deep" }
     });
     stream.AddVideo(new Video
     {
         Id       = "004",
         Name     = "whatever",
         Category = { "friends", "chill", "sunday", "enthralling" },
         Genre    = { "action", "drama", "police" },
         Actors   = { "brad", "pitt", "johnny", "deep" }
     });
     stream.AddVideo(new Video
     {
         Id       = "005",
         Name     = "whatever55",
         Category = { "some" },
         Genre    = { "action", "drama", "police" },
         Actors   = { "brad", "pitt", "johnny", "deep" }
     });
     stream.AddVideo(new Video
     {
         Id       = "006",
         Name     = "whatever9",
         Category = { "friends", "chill", "sunday", "enthralling" },
         Genre    = { "blah" },
         Actors   = { "brad", "pitt", "johnny", "deep" }
     });
     stream.AddVideo(new Video
     {
         Id       = "007",
         Name     = "whatever",
         Category = { "friends", "chill", "sunday", "enthralling" },
         Genre    = { "action", "drama", "police" },
         Actors   = { "brad", "pitt", "johnny", "deep" }
     });
     stream.AddVideo(new Video
     {
         Id       = "008",
         Name     = "whatever",
         Category = { "friends", "chill", "sunday", "enthralling" },
         Genre    = { "action", "drama", "police" },
         Actors   = { "brad", "pitt", "johnny", "deep" }
     });
 }
コード例 #53
-1
ファイル: CallActivity.cs プロジェクト: clburns/MICE
        private void SetupCall()
        {
            Signalling Signalling = new Signalling(Constants.WEB_SYNC_SERVER);
            Signalling.Start((error) =>
            {
                if (error != null)
                {
                    // TODO: Handle Errors
                }
            });

            LocalMedia = new LocalMedia();
            LocalMedia.Start(Container, (error) =>
            {
                if (error != null)
                {
                    //TODO: Handle Errors
                }
            });

            var audioStream = new AudioStream(LocalMedia.LocalMediaStream);
            var videoStream = new VideoStream(LocalMedia.LocalMediaStream);
            var conference = new Conference(Constants.ICE_LINK_ADDRESS, new Stream[]
                    {
                        audioStream,
                        videoStream
                    });
            conference.RelayUsername = "******";
            conference.RelayPassword = "******";

            Signalling.Attach(conference, Constants.SESSION_ID, (error) =>
            {
                if (error != null)
                {
                    // TODO: Handle Errors
                }
            });
        }
コード例 #54
-1
        partial void InitializeMediaImpl(string url, long startPosition, long length, ref bool succeeded)
        {
            succeeded = false;

            if (media != null)
            {
                throw new InvalidOperationException();
            }

            try
            {
                // Create and open the media
                media = new FFmpegMedia(GraphicsDevice);
                media.DisposeBy(this);
                media.Open(url, startPosition, length);
                // Get the first video stream
                stream = media.Streams.OfType <VideoStream>().FirstOrDefault();
                if (stream == null)
                {
                    ReleaseMedia();
                    Duration = TimeSpan.Zero;
                    Logger.Warning("This media doesn't contain a video stream.");
                    return;
                }

                Duration = stream.Duration;
                AllocateVideoTexture(stream.Width, stream.Height);
            }
            catch
            {
                ReleaseMedia();
                return;
            }

            succeeded = true;
        }