예제 #1
0
        private void okButton_Click(object sender, EventArgs e)
        {
            VideoCodec vid = new VideoCodec();
            vid.nCodec = Codec.NO_CODEC;

            if (info.videoFmt.picFourCC != FourCC.FOURCC_NONE)
            {
                vid.nCodec = Codec.WEBM_VP8_CODEC;
                vid.webm_vp8.nRcTargetBitrate = 0;
            }
            if (!ttclient.StartStreamingMediaFileToChannel(textBox1.Text,
                                                           vid))
                MessageBox.Show("Failed to stream media file. Ensure media file can be played in WMP!");
        }
예제 #2
0
        private void button1_Click(object sender, EventArgs e)
        {
            ClientFlag flags = ttclient.GetFlags();

            //Audio-tab
            if ((ttclient.Flags & ClientFlag.CLIENT_SNDINOUTPUT_DUPLEX) ==
                ClientFlag.CLIENT_SNDINOUTPUT_DUPLEX)
                ttclient.CloseSoundDuplexDevices();
            else
            {
                ttclient.CloseSoundInputDevice();
                ttclient.CloseSoundOutputDevice();
            }

            ItemData inputItem = (ItemData)sndinputComboBox.SelectedItem;
            ItemData outputItem = (ItemData)sndoutputComboBox.SelectedItem;
            settings.sndinputid = inputItem.id;
            settings.sndoutputid = outputItem.id;

            if (duplexCheckBox.Checked)
            {
                if (!ttclient.InitSoundDuplexDevices(settings.sndinputid, settings.sndoutputid))
                    MessageBox.Show("Failed to init sound devices");

                SpeexDSP spxdsp = new SpeexDSP(false);
                ttclient.GetSoundInputPreprocess(ref spxdsp);
                spxdsp.nEchoSuppress = SpeexDSPConstants.DEFAULT_ECHO_SUPPRESS;
                spxdsp.nEchoSuppressActive = SpeexDSPConstants.DEFAULT_ECHO_SUPPRESS_ACTIVE;
                spxdsp.bEnableEchoCancellation = echocancelCheckBox.Checked;
                ttclient.SetSoundInputPreprocess(spxdsp);
            }
            else
            {
                if (!ttclient.InitSoundInputDevice(settings.sndinputid))
                    MessageBox.Show("Failed to init sound input device");

                if (!ttclient.InitSoundOutputDevice(settings.sndoutputid))
                    MessageBox.Show("Failed to init sound output device");
            }

            if (wasapiRadioButton.Checked)
                settings.soundsystem = SoundSystem.SOUNDSYSTEM_WASAPI;
            else if (dsoundRadioButton.Checked)
                settings.soundsystem = SoundSystem.SOUNDSYSTEM_DSOUND;
            else if(winmmRadioButton.Checked)
                settings.soundsystem = SoundSystem.SOUNDSYSTEM_WINMM;

            //Video-tab
            if (viddevComboBox.Items.Count > 0)
            {
                VideoCodec codec = new VideoCodec();
                codec.nCodec = Codec.WEBM_VP8_CODEC;
                codec.webm_vp8.nRcTargetBitrate = (int)vidbitrateNumericUpDown.Value;
                codec.webm_vp8.nEncodeDeadline = WebMVP8CodecConstants.WEBM_VPX_DL_REALTIME;

                VideoFormat capformat = videodevs[viddevComboBox.SelectedIndex].videoFormats[formatComboBox.SelectedIndex];

                if (ttclient.Flags.HasFlag(ClientFlag.CLIENT_VIDEOCAPTURE_READY) &&
                   !(settings.videoid == videodevs[viddevComboBox.SelectedIndex].szDeviceID &&
                    Util.Equals(codec, settings.codec) &&
                    Util.Equals(capformat, settings.capformat)))
                    ttclient.CloseVideoCaptureDevice();

                settings.codec.nCodec = Codec.WEBM_VP8_CODEC;
                settings.codec.webm_vp8.nRcTargetBitrate = (int)vidbitrateNumericUpDown.Value;

                settings.videoid = videodevs[viddevComboBox.SelectedIndex].szDeviceID;
                settings.capformat = capformat;
                if (!ttclient.Flags.HasFlag(ClientFlag.CLIENT_VIDEOCAPTURE_READY))
                {
                    if(!ttclient.InitVideoCaptureDevice(settings.videoid, settings.capformat))
                        MessageBox.Show("Failed to initialize video capture device");
                }
            }

            //Advanced-tab
            if (fwCheckBox.Checked != WindowsFirewall.AppExceptionExists(Application.ExecutablePath))
            {
                if (fwCheckBox.Checked)
                    WindowsFirewall.AddAppException(Application.ProductName, Application.ExecutablePath);
                else
                    WindowsFirewall.RemoveAppException(Application.ExecutablePath);
            }
        }
예제 #3
0
 private void vidtxCheckBox_CheckedChanged(object sender, EventArgs e)
 {
     if (ttclient.Flags.HasFlag(ClientFlag.CLIENT_VIDEOCAPTURE_READY))
     {
         if(vidtxCheckBox.Checked)
         {
             VideoCodec vidcodec = new VideoCodec();
             vidcodec.nCodec = Codec.WEBM_VP8_CODEC;
             vidcodec.webm_vp8.nRcTargetBitrate = 0;
             ttclient.StartVideoCaptureTransmission(vidcodec);
         }
         else
             ttclient.StopVideoCaptureTransmission();
     }
     else
     {
         MessageBox.Show("Video device hasn't been configured in Preferences");
         vidtxCheckBox.Checked = false;
     }
     UpdateControls();
 }
예제 #4
0
 /**
  * @brief Start transmitting from video capture device.
  *
  * The video capture device is initiated by calling
  * TeamTalk.InitVideoCaptureDevice(). After joining a channel and
  * calling this function the other users will see the video from
  * the capture device.
  *
  * Video capture data is transmitted with stream type
  * ::STREAMTYPE_VIDEOCAPTURE and is subscribed/unsubscribed using
  * ::SUBSCRIBE_VIDEOCAPTURE.
  *
  * To stop transmitting call TeamTalk.StopVideoCaptureTransmission()
  *
  * User rights required:
  * - ::USERRIGHT_TRANSMIT_VIDEOCAPTURE.
  *
  * @param lpVideoCodec The video codec settings to use for
  * transmission.
  *
  * @see TeamTalk.StartStreamingMediaFileToChannel()
  * @see TeamTalk.EnableVoiceTransmission() */
 public bool StartVideoCaptureTransmission(VideoCodec lpVideoCodec)
 {
     return TTDLL.TT_StartVideoCaptureTransmission(m_ttInst, ref lpVideoCodec);
 }
예제 #5
0
        /**
         * @brief Get or query the raw RGB32 bitmap data of a user's video frame.
         *
         * @param nUserID The user's ID. 0 for local client instance.
         * @param lpPicture Reference to a bitmap which will receive the video frame.
         * If the @a lpPicture reference is null or not the proper size a new Bitmap object
         * will be allocated and returned instead. Ensure to ALWAYS call with the
         * same @a lpPicture reference for each @a nUserID, otherwise a new Bitmap
         * object will be allocated each time and cause the application to use a
         * significant amount of memory.
         * @see PaintVideoFrame */
        /*
        public bool GetUserVideoFrame(int nUserID,
                               ref System.Drawing.Bitmap lpPicture)
        {
            CaptureFormat cap;
            if (!TTDLL.TT_GetUserVideoFrame(m_ttInst, nUserID, IntPtr.Zero, 0, out cap))
                return false;

            PixelFormat pixelformat = PixelFormat.Format32bppRgb;

            if (lpPicture == null ||
               lpPicture.Width != cap.nWidth ||
               lpPicture.Height != cap.nHeight
        #if PocketPC
                )
        #else
               || lpPicture.PixelFormat != pixelformat)
        #endif
            {
                lpPicture = new Bitmap(cap.nWidth, cap.nHeight,
                                       pixelformat);
            }

            // Lock the bitmap's bits.
            System.Drawing.Rectangle rect = new System.Drawing.Rectangle(0, 0, lpPicture.Width, lpPicture.Height);
            BitmapData bmpData = lpPicture.LockBits(rect, ImageLockMode.ReadWrite, pixelformat);

            // Get the address of the first line.
            IntPtr ptr = bmpData.Scan0;

            bool b = TTDLL.TT_GetUserVideoFrame(m_ttInst, nUserID, ptr,
                                              cap.nWidth * cap.nHeight * 4, out cap);
            // Unlock the bits.
            lpPicture.UnlockBits(bmpData);
            return b;
        }
         * */
        /** @} */
        /** @addtogroup mediastream
         * @{ */
        /**
         * @brief Stream media file to channel, e.g. avi-, wav- or MP3-file.
         *
         * Call TeamTalk.GetMediaFileInfo() to get the properties of a media
         * file, i.e. audio and video format.
         *
         * The event ::CLIENTEVENT_STREAM_MEDIAFILE  is posted when
         * the media file starts streaming. The flags ::CLIENT_STREAM_AUDIO
         * and/or ::CLIENT_STREAM_VIDEO will be set if the call is successful.
         *
         * A media file is streamed using ::STREAMTYPE_MEDIAFILE_AUDIO
         * and/or ::STREAMTYPE_MEDIAFILE_VIDEO. To subscribe/unsubscribe a
         * media file being streamed use ::SUBSCRIBE_MEDIAFILE.
         *
         * Streaming a media file requires
         * ::USERRIGHT_TRANSMIT_MEDIAFILE_VIDEO and/or
         * ::USERRIGHT_TRANSMIT_MEDIAFILE_AUDIO.
         *
         * @param szMediaFilePath File path to media file.
         * @param lpVideoCodec If video file then specify output codec properties
         * here. Specify #Codec ::NO_CODEC if video should be ignored.
         *
         * @see TeamTalk.StopStreamingMediaFileToChannel() */
        public bool StartStreamingMediaFileToChannel(string szMediaFilePath,
            VideoCodec lpVideoCodec)
        {
            return TTDLL.TT_StartStreamingMediaFileToChannel(m_ttInst, szMediaFilePath,
                                                           ref lpVideoCodec);
        }
예제 #6
0
 public static bool Equals(VideoCodec c1, VideoCodec c2)
 {
     return c1.nCodec == c2.nCodec &&
         c1.webm_vp8.nRcTargetBitrate == c2.webm_vp8.nRcTargetBitrate;
 }
예제 #7
0
        public void TestMediaStreamVideo()
        {
            const string USERNAME = "******", PASSWORD = "******"; string NICKNAME = "TeamTalk.NET - " + GetCurrentMethod();
            const UserRight USERRIGHTS = UserRight.USERRIGHT_TRANSMIT_MEDIAFILE_AUDIO | UserRight.USERRIGHT_TRANSMIT_MEDIAFILE_VIDEO;
            MakeUserAccount(GetCurrentMethod(), USERNAME, PASSWORD, USERRIGHTS);
            TeamTalk ttclient = NewClientInstance();

            InitSound(ttclient);
            Connect(ttclient);
            Login(ttclient, NICKNAME, USERNAME, PASSWORD);
            JoinRoot(ttclient);

            MediaFileInfo mf = new MediaFileInfo();

            Assert.IsTrue(TeamTalk.GetMediaFileInfo(MEDIAFILE_VIDEO, ref mf), "get media file info");
            Assert.IsTrue(mf.uDurationMSec > 0, "media file time");
            Assert.IsTrue(mf.audioFmt.nSampleRate > 0, "sample rate");
            Assert.IsTrue(mf.audioFmt.nChannels > 0, "channels");
            Assert.IsTrue(mf.videoFmt.nWidth > 0, "has video");

            VideoCodec vidcodec = new VideoCodec();
            vidcodec.nCodec = Codec.WEBM_VP8_CODEC;
            vidcodec.webm_vp8.nRcTargetBitrate = 0;

            Assert.IsTrue(ttclient.StartStreamingMediaFileToChannel(MEDIAFILE_VIDEO, vidcodec), "start stream media file");

            TTMessage msg = new TTMessage();
            User user = new User();
            bool video = false, audio = false;
            int videoframes = 0;
            while (ttclient.GetMessage(ref msg, 2000))
            {
                switch (msg.nClientEvent)
                {
                    case ClientEvent.CLIENTEVENT_USER_STATECHANGE:
                        user = (User)msg.DataToObject();
                        audio |= user.uUserState.HasFlag(UserState.USERSTATE_MEDIAFILE_AUDIO);
                        video |= user.uUserState.HasFlag(UserState.USERSTATE_MEDIAFILE_VIDEO);
                        break;
                    case ClientEvent.CLIENTEVENT_USER_MEDIAFILE_VIDEO:
                        {
                            Bitmap bmp;
                            VideoFrame f = ttclient.AcquireUserMediaVideoFrame(msg.nSource, out bmp);
                            videoframes++;
                            Assert.IsTrue(f.nWidth > 0, "acquired video frame");
                            Assert.IsTrue(ttclient.ReleaseUserMediaVideoFrame(f), "release video frame");
                            break;
                        }
                }
            }
            
            Assert.IsTrue(audio, "media audio playback");
            Assert.IsTrue(video, "media video playback");
            Assert.IsTrue(videoframes>0, "got video frames");

            UserStatistics stats = new UserStatistics();
            Assert.IsTrue(ttclient.GetUserStatistics(ttclient.GetMyUserID(), ref stats));
            Assert.IsTrue(stats.nMediaFileAudioPacketsRecv > 0);
            Assert.IsTrue(stats.nMediaFileVideoFramesRecv > 0);
            Assert.IsTrue(stats.nMediaFileVideoPacketsRecv > 0);

            Assert.IsTrue(ttclient.StopStreamingMediaFileToChannel(), "stop stream");
        }
예제 #8
0
        public void TestMediaStreamAudio()
        {
            const string USERNAME = "******", PASSWORD = "******"; string NICKNAME = "TeamTalk.NET - " + GetCurrentMethod();
            const UserRight USERRIGHTS = UserRight.USERRIGHT_TRANSMIT_MEDIAFILE_AUDIO;
            MakeUserAccount(GetCurrentMethod(), USERNAME, PASSWORD, USERRIGHTS);
            TeamTalk ttclient = NewClientInstance();

            InitSound(ttclient);
            Connect(ttclient);
            Login(ttclient, NICKNAME, USERNAME, PASSWORD);
            JoinRoot(ttclient);

            MediaFileInfo mf = new MediaFileInfo();

            Assert.IsTrue(TeamTalk.GetMediaFileInfo(MEDIAFILE_AUDIO, ref mf), "get media file info");
            Assert.IsTrue(mf.uDurationMSec > 0, "media file time");
            Assert.IsTrue(mf.audioFmt.nSampleRate > 0, "sample rate");
            Assert.IsTrue(mf.audioFmt.nChannels > 0, "channels");
            
            VideoCodec vidcodec = new VideoCodec();
            vidcodec.nCodec = Codec.NO_CODEC;
            Assert.IsTrue(ttclient.StartStreamingMediaFileToChannel(MEDIAFILE_AUDIO, vidcodec), "start stream media file");

            TTMessage msg = new TTMessage();
            Assert.IsTrue(WaitForEvent(ttclient, ClientEvent.CLIENTEVENT_USER_STATECHANGE, DEF_WAIT, ref msg), "wait for state change, player launch");

            Assert.IsTrue(WaitForEvent(ttclient, ClientEvent.CLIENTEVENT_USER_STATECHANGE, DEF_WAIT, ref msg), "wait for state change, playback");
            User user = (User)msg.DataToObject();
            Assert.IsTrue(user.uUserState.HasFlag(UserState.USERSTATE_MEDIAFILE_AUDIO), "media playback");

            WaitForEvent(ttclient, ClientEvent.CLIENTEVENT_NONE, 5000);

            Assert.IsTrue(ttclient.SetUserAudioStreamBufferSize(ttclient.UserID, StreamType.STREAMTYPE_MEDIAFILE_AUDIO, 5000));

            Assert.IsTrue(ttclient.GetUser(ttclient.UserID, ref user));
            Assert.AreEqual(5000, user.nBufferMSecMediaFile);

            Assert.IsTrue(ttclient.StopStreamingMediaFileToChannel(), "stop stream");

            UserStatistics stats = new UserStatistics();
            Assert.IsTrue(ttclient.GetUserStatistics(ttclient.GetMyUserID(), ref stats));
            Assert.IsTrue(stats.nMediaFileAudioPacketsRecv > 0);

            Assert.IsFalse(ttclient.StartStreamingMediaFileToChannel("sdfsdf", vidcodec), "start stream invalid media file");
        }
예제 #9
0
        public void TestVideoCapture()
        {
            const string USERNAME = "******", PASSWORD = "******"; string NICKNAME = "TeamTalk.NET - " + GetCurrentMethod();
            const UserRight USERRIGHTS = UserRight.USERRIGHT_TRANSMIT_VIDEOCAPTURE;
            MakeUserAccount(GetCurrentMethod(), USERNAME, PASSWORD, USERRIGHTS);
            TeamTalk ttclient = NewClientInstance();

            VideoCaptureDevice[] devs;
            Assert.IsTrue(TeamTalk.GetVideoCaptureDevices(out devs), "get video devs");

            Assert.IsTrue(devs.Length > 0, "Video devs available");

            VideoCaptureDevice dev = devs[0];

            Assert.IsTrue(ttclient.InitVideoCaptureDevice(dev.szDeviceID, dev.videoFormats[0]),
                          "Init video capture device");

            TTMessage msg = new TTMessage();
            Assert.IsTrue(WaitForEvent(ttclient, ClientEvent.CLIENTEVENT_USER_VIDEOCAPTURE, 10000, ref msg),
                          "Get Video capture frame");
            Assert.AreEqual(0, msg.nSource, "Video from self");

            Bitmap bmp = null;
            int counter = 0;

            VideoFrame frm = ttclient.AcquireUserVideoCaptureFrame(0, out bmp);
            Assert.AreNotEqual(0, frm.nHeight, "Valid video frame");
            Assert.AreEqual(dev.videoFormats[0].nWidth, frm.nWidth);
            Assert.AreEqual(dev.videoFormats[0].nHeight, frm.nHeight);

            bmp.Save(MEDIAFOLDER + "\\" + counter++ + ".bmp");
            
            Assert.IsTrue(ttclient.ReleaseUserVideoCaptureFrame(frm), "release video frame");

            Connect(ttclient);
            Login(ttclient, NICKNAME, USERNAME, PASSWORD);
            JoinRoot(ttclient);

            Assert.IsTrue(ttclient.CloseVideoCaptureDevice(), "Close vid dev");

            VideoFormat vidfmt = new VideoFormat();
            vidfmt.nFPS_Numerator = 10;
            vidfmt.nFPS_Denominator = 1;
            vidfmt.nWidth = 640;
            vidfmt.nHeight = 480;
            vidfmt.picFourCC = FourCC.FOURCC_RGB32;

            Assert.IsTrue(ttclient.InitVideoCaptureDevice(dev.szDeviceID, vidfmt),
                          "Init video capture device");
            Assert.IsTrue(ttclient.Flags.HasFlag(ClientFlag.CLIENT_VIDEOCAPTURE_READY), "vid cap ready");

            VideoCodec vidcodec = new VideoCodec();
            vidcodec.nCodec = Codec.WEBM_VP8_CODEC;
            vidcodec.webm_vp8.nRcTargetBitrate = 0;

            Assert.IsTrue(ttclient.StartVideoCaptureTransmission(vidcodec), "Start vid tx");

            int cmdid = ttclient.DoSubscribe(ttclient.GetMyUserID(), Subscription.SUBSCRIBE_VIDEOCAPTURE);
            Assert.IsTrue(WaitCmdComplete(ttclient, cmdid, DEF_WAIT), "sub vidcap");

            List<VideoFrame> vidframes = new List<VideoFrame>();

            while (WaitForEvent(ttclient, ClientEvent.CLIENTEVENT_USER_VIDEOCAPTURE, 10000, ref msg))
            {
                if (msg.nSource == ttclient.GetMyUserID())
                {
                    frm = ttclient.AcquireUserVideoCaptureFrame(msg.nSource, out bmp);
                    Assert.IsTrue(frm.nStreamID > 0, "got remote video frame");
                    bmp.Save(MEDIAFOLDER + "\\" + counter++ + ".bmp");

                    vidframes.Add(frm);
                }
                if (vidframes.Count == 10)
                    break;
            }

            foreach (VideoFrame v in vidframes)
                Assert.IsTrue(ttclient.ReleaseUserVideoCaptureFrame(v), "release vid frame");

            Assert.IsTrue(ttclient.Flags.HasFlag(ClientFlag.CLIENT_TX_VIDEOCAPTURE), "Tx'ing vidcap");

            Assert.IsTrue(ttclient.StopVideoCaptureTransmission(), "stop vidcap");

            Assert.IsTrue(ttclient.CloseVideoCaptureDevice(), "close vidcap");
        }