private static void ListBox_ItemsSourceChanged(object sender, EventArgs e)
 {
     var listBox = (ListBox) sender;
     if (Associations.ContainsKey(listBox))
         Associations[listBox].Dispose();
     Associations[listBox] = new Capture(listBox);
 }
        // Full constructor takes Capture instance and specific values for
        // channels, bits, and samples.
        internal SoundFormat(Capture captureDevice, SampleRate rate, SampleSize size, short channels)
        {
            if (captureDevice == null)
            {
                throw new ArgumentNullException("captureDevice");
            }

            this._captureDevice = captureDevice;

            try
            {
                // Test the supplied format characteristics.
                this._currentFormat = ConstructFormat((int)rate, (short)size, (short)channels);
            }
            catch (Exception ex)
            {
                string errMsg =
                    string.Format("Sound format not supported: {0} samples/sec, {1} bits/sample, {2} channels.",
                        (int)rate, (short)size, (short)channels);
                throw new Exception(errMsg, ex);
            }

            this._channels = channels;
            this._bitsPerSample = (short)size;
            this._samplesPerSecond = (int)rate;
        }
        public void StartCapture(int sampleRate, Capture captureDevice)
        {
            StopCapture();
            EmptyRequest();

            this.sampleRate = sampleRate;
            readPos = 0;
            IsRecording = false;
            record = null;
            recordTime = 0;
            noRecordTime = 0;
            lastSample = null;
            lastSize = 0;

            capture = (captureDevice == null) ? new Capture() : captureDevice;

            WaveFormat waveFormat = new WaveFormat();// Load the sound 
            waveFormat.BitsPerSample = 16;
            waveFormat.BlockAlign = 2;
            waveFormat.Channels = 1;
            waveFormat.AverageBytesPerSecond = sampleRate * 2;
            waveFormat.SamplesPerSecond = sampleRate;
            waveFormat.FormatTag = WaveFormatTag.Pcm;

            CaptureBufferDescription captureBuffDesc = new CaptureBufferDescription();
            captureBuffDesc.BufferBytes = bufferSize;
            captureBuffDesc.Format = waveFormat;

            captureBuffer = new CaptureBuffer(captureBuffDesc, capture);
            captureBuffer.Start(true);

            captureThread = new Thread(captureLoop);
            captureThread.Start();
            new Thread(EmptyRequest).Start();
        }
 static void ListBox_Loaded(object sender, RoutedEventArgs e) {
     var listBox = (ListBox)sender;
     var incc = listBox.Items as INotifyCollectionChanged;
     if (incc == null) return;
     listBox.Loaded -= ListBox_Loaded;
     Associations[listBox] = new Capture(listBox);
 }
Exemple #5
0
        public void Init()
        {
            if (_capture == null)
            {
                try
                {
                    _capture = new Capture();
                }
                catch (NullReferenceException excpt)
                {
                    MessageBox.Show(excpt.Message);
                }
            }

            if (_capture != null)
            {
                if (_captureInProgress)
                {
                    //stop the capture
                    Application.Idle -= ProcessFrame;
                }
                else
                {
                    //start the capture
                    Application.Idle += ProcessFrame;
                }

                _captureInProgress = !_captureInProgress;
            }
        }
 static void ListView_Loaded(object sender, RoutedEventArgs e)
 {
     var ListView = (ListView)sender;
     var incc = ListView.Items as INotifyCollectionChanged;
     if (incc == null) return;
     ListView.Loaded -= ListView_Loaded;
     Associations[ListView] = new Capture(ListView);
 }
Exemple #7
0
        private static void DataGrid_Loaded(object sender, RoutedEventArgs e)
        {
            var dataGrid = (DataGrid) sender;
            var itemsNotify = dataGrid.Items as INotifyCollectionChanged;
            if (itemsNotify == null)
                return;

            dataGrid.Loaded -= DataGrid_Loaded;
            _associations[dataGrid] = new Capture(dataGrid);
        }
Exemple #8
0
        private static IEnumerable<Match> MatchCapture(IEnumerable<Shape> shapes, Capture capture)
        {
            //O(n*log(n) * m)
            HashSet<string> readSet = new HashSet<string>(capture.Reads.Select(x => x.ClosestStop));

            //O(?)
            foreach (Shape shape in shapes)
            {
                if (!shape.StopSet.IsSubsetOf(readSet)) continue;

                List<OrderedStop> stopsToMatch = shape.Trips.First().Stops.ToList();
                Dictionary<string, Stop> stopsMap = stopsToMatch.Select(x => x.Unordered).Distinct().ToDictionary(x => x.Id);
                int searchIndex = 0;

                var readsPerStop = new SortedDictionary<OrderedStop, List<MatchRead>>(new OrderedStopComparer());
                List<MatchRead> matchReads = new List<MatchRead>();
                OrderedStop readingStop = null;

                //match shape to reads
                foreach (GpsRead read in capture.Reads)
                {
                    if (!stopsMap.ContainsKey(read.ClosestStop)) continue;
                    Stop currentStop = stopsMap[read.ClosestStop];

                    if (searchIndex < stopsToMatch.Count && stopsToMatch[searchIndex].Equals(currentStop))
                    {
                        if (readingStop != null)
                        {
                            // we've finished a consecutive match of reads to the search stop
                            readsPerStop[readingStop] = matchReads;
                        }

                        readingStop = stopsToMatch[searchIndex];
                        matchReads = new List<MatchRead> { new MatchRead(read) };

                        searchIndex++;
                    }
                    else if (currentStop.Equals(readingStop))
                    {
                        // we're in the middle of a consecutive match of reads to the search stop
                        matchReads.Add(new MatchRead(read));
                    }
                    else if (searchIndex == stopsToMatch.Count)
                    {
                        // we have a full match
                        readsPerStop[readingStop] = matchReads.OrderBy(x => x.Read.Date).ToList();
                        searchIndex = Int32.MaxValue;

                        yield return new Match(capture.Device, shape.Id, readsPerStop);
                    }
                }
            }
        }
        public MainWindow(int device)
        {
            InitializeComponent();

            if (SystemParameters.PrimaryScreenWidth <= Width || SystemParameters.PrimaryScreenHeight <= Height)
                WindowState = System.Windows.WindowState.Maximized;

            capture = new Capture(device);

            new Thread(CaptureLoop).Start();
            new Thread(ProcessLoop).Start();
        }
 private Capture(string value, int index, int length, bool createCaptures)
 {
     Value = value;
     Index = index;
     Length = length;
     
     // Prevent a StackOverflow recursion in the constructor
     if (createCaptures)
     {
         Captures = new Capture[] { new Capture(value, index, length, false) };
     }
 }
    void OpenCapture()
    {
        if(m_captureType == VideoCaptureType.FileStream)
        {
            m_cap = new Capture(m_streamUrl);
        }
        else
        {
            m_cap = new Capture(m_webcamIndex);
        }

        if(m_cap == null) m_running = false;
    }
Exemple #12
0
        public MainWindow(string videoFile)
        {
            InitializeComponent();

            if (SystemParameters.PrimaryScreenWidth <= Width || SystemParameters.PrimaryScreenHeight <= Height)
                WindowState = System.Windows.WindowState.Maximized;

            this.videoFile = videoFile;
            capture = new Capture(videoFile);
            fpsLimit = capture.GetFPS();

            new Thread(CaptureLoop).Start();
            new Thread(ProcessLoop).Start();
        }
        public static void StartCapture(int width, int height, ToolStripButton button, uint handle)
        {
            OsirtVideoCapture.button = button;

            captureThreadEntry = new Capture();
            captureThreadEntry.WindowHandle = handle;
            captureThreadEntry.WholeWindow = 1;
            captureThreadEntry.X = 0;
            captureThreadEntry.Y = 0;
            captureThreadEntry.Width = (uint)width;
            captureThreadEntry.Height = (uint)height;
            captureThreadEntry.BitRate = 20000000;
            captureThreadEntry.FrameRate = (uint) UserSettings.Load().FramesPerSecond;
            uint audio = 0xFFFFFFFF;
            if(HasStereoMix() && UserSettings.Load().UseStereoMix  || HasMicrophone() && UserSettings.Load().UseMicrophone)
            {
                foreach(string line in AudioDevices())
                {
                    if ((line.Contains("Stereo Mix") && UserSettings.Load().UseStereoMix)|| (line.Contains("Microphone") && UserSettings.Load().UseMicrophone))
                    {
                        audio = Convert.ToUInt32(line.Trim()[7].ToString());
                        Console.WriteLine("AUDIO: " + audio);
                        break;
                    }
                }
            }
            captureThreadEntry.Audio = audio;
            captureThreadEntry.Filename = Constants.TempVideoFile;
            captureThread = new Thread(new ThreadStart(captureThreadEntry.Start));
            captureThread.Start();

            while (!captureThread.IsAlive) Thread.Sleep(100);
            while (!captureThreadEntry.VideoCaptureAttempted) Thread.Sleep(100);

            if (captureThreadEntry.VideoCaptureStarted)
            {

                button.Image = Properties.Resources.stop_rec;
                button.ToolTipText = "Stop screen capture";
            }
            else
            {
                while (captureThread.IsAlive) ;
                captureThread = null;
                captureThreadEntry = null;
            }

        }
        private void EnumerateCameras()
        {
            Thread.CurrentThread.IsBackground = true;
            int i = 0;
            while (continueListing)
            {
                using (Capture c = new Capture(i))
                {
                    try
                    {
                        var frame = c.GetNextFrame();

                        if (frame.Width == 0)
                            break;

                        var b = frame.CreateWriteableBitmap();
                        frame.UpdateWriteableBitmap(b);
                        b.Freeze();

                        Dispatcher.Invoke(() =>
                        {
                            int idx = i;
                            Image img = new Image();
                            img.Source = b;
                            img.Margin = new Thickness(20);
                            camerasPanel.ColumnDefinitions.Add(new ColumnDefinition());
                            img.SetValue(Grid.ColumnProperty, i);
                            img.SetValue(Grid.RowProperty, 1);
                            camerasPanel.Children.Add(img);

                            img.MouseDown += (s, e) =>
                            {
                                ChooseCamera(idx);
                            };
                        });
                    }
                    catch (PsycheInterop.CaptureFailedException)
                    {
                        break;
                    }

                }
                i++;
            }

            doneListing.Set();
        }
Exemple #15
0
    public void startLoad()
    {
        Capture c = new Capture(path);
        Image<Bgr, Byte> frame = null;

        do
        {
            frame = c.QueryFrame();
            if (frame == null)
                break;
            Bitmap bmp = frame.ToBitmap();
            byte[] payload = FrameHelper.getFrameBytes(bmp);
            RTPModel pkg = new RTPModel(0, frames.Count, frames.Count, payload);
            frames.Add(pkg.toBytes());
            FrameLoaded.Invoke(this, frames.Count - 1);
        } while (true);
    }
Exemple #16
0
 public bool CreateCaputerDevice()
 {
     // 首先要枚舉可用的捕捉設備
     CaptureDevicesCollection capturedev = new CaptureDevicesCollection();
     Guid devguid;
     if (capturedev.Count > 0)
     {
         devguid = capturedev[0].DriverGuid;  // 0 為音效卡本身之錄音
         //devguid = capturedev[2].DriverGuid;  // *********************** 這台電腦的錄音設備為 2
     }
     else
     {
         Console.WriteLine("當前沒有可用於音頻捕捉的設備", "系统提示");
         return false;
     }
     // 利用設備 GUID 來建立一個捕捉設備對象
     capture = new Capture(devguid);
     return true;
 }
        public static void StopCapture()
        {
            button.Image = Properties.Resources.start_rec;
            button.ToolTipText = "Start screen capture";

            if (captureThreadEntry == null || captureThread == null)
            {
                MessageBox.Show("No capture in progress");
                return;
            }
            captureThreadEntry.ThreadExitSignaled = true;

            while (captureThread.IsAlive) ;

            captureThread = null;
            captureThreadEntry = null;


            VideoCaptureComplete?.Invoke(null, new VideoCaptureCompleteEventArgs());
        }
Exemple #18
0
 public static void OnScrollOnNewItemChanged(
     DependencyObject d,
     DependencyPropertyChangedEventArgs e)
 {
     var listBox = d as ListBox;
       if (listBox == null) return;
       bool oldValue = (bool)e.OldValue, newValue = (bool)e.NewValue;
       if (newValue == oldValue) return;
       if (newValue)
       {
     Associations[listBox] = new Capture(listBox);
       }
       else
       {
     if (Associations.ContainsKey(listBox))
     {
       Associations[listBox].Dispose();
     }
     Associations[listBox] = null;
       }
 }
Exemple #19
0
 private static void GrayCapture(Capture capture)
 {
     using (var window = new NamedWindow("test"))
     {
         while (CV.WaitKey(10) < 0) //laisse la main
         {
             using (var src = capture.QueryFrame())// recupere une image
             {
                 if (src == null)
                 {
                     break;
                 }
                 using (var gray = new IplImage(src.Size, IplDepth.U8, 1)) //filtre
                 using (var dstCanny = new IplImage(src.Size, IplDepth.U8, 1))
                 {
                     CV.CvtColor(src, gray, ColorConversion.Bgr2Gray);
                     CV.Canny(gray, dstCanny, 50, 50);
                     window.ShowImage(dstCanny);
                 }
             }
         }
     }
 }
Exemple #20
0
        private void InicializeCaptureSound()
        {
            device = new Device();
            //device.SetCooperativeLevel(this, CooperativeLevel.Normal);

            //CaptureDevicesCollection captureDeviceCollection = new CaptureDevicesCollection();
            //capture = new Capture(captureDeviceCollection[0].DriverGuid);
            //DeviceInformation deviceInfo = (DeviceInformation) cmbRecordDevices.SelectedItem;  //captureDeviceCollection[0];
            capture = new Capture(record_source);

            SetWaveFormat();

            captureBufferDescription = new CaptureBufferDescription();
            captureBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;//approx 200 milliseconds of PCM data.
            captureBufferDescription.Format = waveFormat;

            playbackBufferDescription = new BufferDescription();
            playbackBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;
            playbackBufferDescription.Format = waveFormat;

            playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
            bufferSize = captureBufferDescription.BufferBytes;
        }
Exemple #21
0
        private void Form1_Load(object sender, EventArgs e)
        {
            waveFormat.Channels = 1;                     //モノラル
            waveFormat.FormatTag = WaveFormatTag.Pcm;    //PCM指定
            waveFormat.BitsPerSample = 16;               //16bit
            waveFormat.SamplesPerSecond = 44100;         //44.1KHz
            waveFormat.BlockAlign = (short)(waveFormat.Channels * (waveFormat.BitsPerSample / (short)8));//1サンプルあたりのバイト数
            waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * waveFormat.SamplesPerSecond;//1秒間あたりのバイト数

            // バッファの確保
            //バッファを0.1秒分確保
            captureBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 10;
            captureBufferDescription.Format = waveFormat;

            // 録音デバイスの準備
            DeviceInformation deviceInfo = captureDevice[0];
            capture = new Capture(deviceInfo.DriverGuid);

            //バッファ作成
            try
            {
                //フォーマット構造体・デバイス指定 buffer取得
                captureBuffer = new CaptureBuffer(captureBufferDescription, capture);
            }
            catch
            {
                MessageBox.Show("録音デバイスが無いか、録音フォーマットをサポートしていません。");
                Close();
                return;
            }
            if (null == capture)
                throw new NullReferenceException();

            captureBuffer.Start(true);
            timer1.Enabled = true;
        }
 private async void CaptureGIF(object sender, RoutedEventArgs e)
 {
     await Capture.Start(Models.CaptureMode.GIF);
 }
        /// <summary>
        /// 演示 多次匹配,并取得匹配数据的 例子.
        /// </summary>
        public void DemoRegexUse6()
        {
            string text = "One car red car blue car";

            // (1个到多个)单词 (1个到多个)空白 car
            string pat = @"(\w+)\s+(car)";


            Console.WriteLine("[06]字符串={0}", text);
            Console.WriteLine("[06]正则表达式={0}", pat);


            // 初始化 正则表达式  忽略大小写
            Regex r = new Regex(pat, RegexOptions.IgnoreCase);

            // 指定的输入字符串中搜索 Regex 构造函数中指定的正则表达式的第一个匹配项。
            Match m = r.Match(text);

            // 匹配的 计数.
            int matchCount = 0;


            // Match类 表示单个正则表达式匹配的结果。
            // Success 属性表示:匹配是否成功
            while (m.Success)
            {
                Console.WriteLine("[06]第{0}次匹配!", (++matchCount));

                for (int i = 1; i <= 2; i++)
                {
                    // Groups 为 获取由正则表达式匹配的组的集合。
                    Group g = m.Groups[i];
                    Console.WriteLine("==Group[{0}]={1}", i, g);

                    // Captures 为 按从里到外、从左到右的顺序获取由捕获组匹配的所有捕获的集合
                    // 该集合可以有零个或更多的项。
                    CaptureCollection cc = g.Captures;
                    for (int j = 0; j < cc.Count; j++)
                    {
                        // Capture 表示来自单个子表达式捕获的结果。
                        //     属性 Index 原始字符串中发现捕获的子字符串的第一个字符的位置。
                        //     属性 Length 捕获的子字符串的长度。
                        //     属性 Value 从输入字符串中获取捕获的子字符串。
                        Capture c = cc[j];
                        Console.WriteLine("====Capture[{0}] = {1}, Position={2}", j, c, c.Index);
                    }
                }
                m = m.NextMatch();
            }


            // 电子邮件地址
            string emailPat = @"^(\w)+(\.\w+)*@(\w)+((\.\w+)+)$";

            string[] emailValueArray =
            {
                "*****@*****.**",
                "*****@*****.**",
                "*****@*****.**",
                "@sina.com",
                "zhang3@",
                "zhang3@com",
            };

            foreach (string value in emailValueArray)
            {
                Console.WriteLine("[06]使用正则表达式静态方法IsMatch({0}, {1})的结果为:{2}", value, emailPat, Regex.IsMatch(value, emailPat));
            }



            // 网页匹配.
            String[] testArray =
            {
                "http://*****:*****@"([^/]+).htm";
            // 初始化 正则表达式  忽略大小写
            Regex rWeb = new Regex(patWeb, RegexOptions.IgnoreCase);

            foreach (string str in testArray)
            {
                // 指定的输入字符串中搜索 Regex 构造函数中指定的正则表达式的第一个匹配项。
                Match mWeb = rWeb.Match(str);
                if (mWeb.Success)
                {
                    Group g = mWeb.Groups[0];
                    Console.WriteLine("[06] 原始数据:{0}, 解析后的结果:{1} ", str, g.Value);
                }
            }
        }
Exemple #24
0
 private void button1_Click(object sender, EventArgs e)
 {
     cap = new Emgu.CV.Capture(0);
     cap.Start();
 }
 static void ListBox_Loaded(object sender, RoutedEventArgs e)
 {
     var listBox = (ListBox)sender;
     if (!(listBox.Items is INotifyCollectionChanged))
         return;
     listBox.Loaded -= ListBox_Loaded;
     _associations[listBox] = new Capture(listBox);
 }
Exemple #26
0
        public Camera(CameraInfo cam)
        {
            InitializeComponent();
            info = cam;

            eh = new EventHandler(getFrame);
            ////Process p = new Process();
            ////p.Start

            //url = "http://";
            //url = url + cam.IP;

            //For streaming
            //url = url + "/videofeed";



            //for hikvision DS-2CD2132F-IS
            url = "rtsp://";
            url = url + info.User + ":" + info.Password;
            url = url + "@" + info.IP + "/Streaming/channels/1";


            //url = url + @"/Streaming/channels/1/picture?snapShotImageType=JPEG";
            //url =  "http://*****:*****@" + url + "/Streaming/channels/1/preview";
            //url = url + @"/Streaming/channels/1/preview";


            //for Image
            //url = url + "/short.jpg";

            //for commented emgucv
            //eh = new EventHandler(getFrameEmgu);
            //url = cam.IP;


            cap = new Capture(url);
            //cap = new Capture(0);
            //cap = new Capture("rtsp://*****:*****@192.168.10.201/Streaming/channels/1");
            //cap = new Emgu.CV.Capture("http://*****:*****@192.168.10.201/Streaming/channels/102/httppreview");
            //Application.Idle += process;
            //fps = (int)cap.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.Fps);


            groupBox1.Text    = cam.Name;
            Application.Idle += eh;


            //for Recording thread
            //recordingThread = new System.Threading.Thread(Record);
            //recordingThread.IsBackground = true;
            //recordingThread.Start();


            //for Recording Process
            Record();


            CameraManager cMan = CameraManager.Instance;

            cMan.addEH(info.name, eh);
        }
Exemple #27
0
        public override Capture BeginCapture(InputState input, CaptureSide eSide)
        {
            if (input.LeftCaptureActive || input.RightCaptureActive || popup != null)
            {
                DebugUtil.Warning("SceneRightClickBehavior.BeginCapture - we should not be here...");
                return(Capture.Ignore);
            }

            Ray3f useRay = new Ray3f(Vector3f.Zero, Vector3f.AxisY);

            if (input.IsForDevice(InputDevice.Mouse) || input.IsForDevice(InputDevice.Gamepad))
            {
                useRay = (input.bRightMousePressed) ? input.vMouseWorldRay : input.vGamepadWorldRay;
            }
            else if (input.IsForDevice(InputDevice.AnySpatialDevice))
            {
                useRay = (eSide == CaptureSide.Left) ? input.vLeftSpatialWorldRay : input.vRightSpatialWorldRay;
            }

            // raycast into scene to find hit object/position for menu. We try Pivots first,
            // because they are special and have priority (?? always ??). Then we cast into general scene.
            SORayHit  pivotHit;
            bool      bHitPivot = cockpit.Scene.FindSORayIntersection(useRay, out pivotHit, (x) => { return(x is PivotSO); });
            AnyRayHit rayHit;

            if (bHitPivot)
            {
                rayHit = new AnyRayHit(pivotHit);
            }
            else
            {
                if (cockpit.Scene.FindSceneRayIntersection(useRay, out rayHit) == false)
                {
                    return(Capture.Ignore);
                }
            }

            // find center of menu in space
            Vector3f vHUDCenter   = cockpit.RootGameObject.GetPosition();
            Vector3f menuLocation = rayHit.hitPos;

            if (Placement == PlacementMode.OnHUDSphere)
            {
                float fRayT;
                bool  bHit = RayIntersection.Sphere(useRay.Origin, useRay.Direction,
                                                    vHUDCenter, HUDRadius, out fRayT);
                Debug.Assert(bHit);
                menuLocation = useRay.Origin + fRayT * useRay.Direction;
            }

            // compute extents
            float fDiameter = VRUtil.GetVRRadiusForVisualAngle(
                menuLocation, cockpit.ActiveCamera.GetPosition(), VisualDiameter);

            if (rayHit.eType == HitType.SceneObjectHit && rayHit.hitSO is PivotSO)
            {
                popup = GeneratePivotRadialMenu(fDiameter, rayHit.hitSO as PivotSO);
            }
            else if (rayHit.eType == HitType.SceneObjectHit)
            {
                popup = GenerateSceneObjectRadialMenu(fDiameter, rayHit);
            }
            else
            {
                popup = GenerateDefaultRadialMenu(fDiameter, rayHit);
            }
            popup.Create();
            popup.Name = "popup_menu";

            if (Placement == PlacementMode.InScene)
            {
                HUDUtil.PlaceInScene(popup, vHUDCenter, menuLocation);
            }
            else
            {
                HUDUtil.PlaceInSphere(popup, HUDRadius, vHUDCenter, menuLocation);
            }

            // this is a bit of a hack...radial menu lives in-scene, if we attach it to
            //   cockpit then it will move with cockpit, which is wrong. So we want to
            //   stick it in the scene. But, at least for now, we still want it to be
            //   drawn in the cockpit layer, so we add to cockpit first, them remove and
            //   re-add to the scene
            cockpit.AddUIElement(popup, false);
            cockpit.RemoveUIElement(popup, false);
            cockpit.Scene.AddUIElement(popup, false);

            HUDUtil.AnimatedShow(popup, 0.2f);

            return(Capture.Begin(this, eSide));
        }
Exemple #28
0
 protected HtmlContentFragment CreateFragment(Capture capture)
 {
     return(new HtmlContentFragment(this, capture.Index, capture.Length));
 }
        /*
         * Initializes all the data members.
         */
        private void Initialize()
        {
            try
            {
                device = new Device();
                device.SetCooperativeLevel(this, CooperativeLevel.Normal);

                CaptureDevicesCollection captureDeviceCollection = new CaptureDevicesCollection();

                DeviceInformation deviceInfo = captureDeviceCollection[0];

                capture = new Capture(deviceInfo.DriverGuid);

                short channels = 1; //Stereo.
                short bitsPerSample = 16; //16Bit, alternatively use 8Bits.
                int samplesPerSecond = 22050; //11KHz use 11025 , 22KHz use 22050, 44KHz use 44100 etc.

                //Set up the wave format to be captured.
                waveFormat = new WaveFormat();
                waveFormat.Channels = channels;
                waveFormat.FormatTag = WaveFormatTag.Pcm;
                waveFormat.SamplesPerSecond = samplesPerSecond;
                waveFormat.BitsPerSample = bitsPerSample;
                waveFormat.BlockAlign = (short)(channels * (bitsPerSample / (short)8));
                waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * samplesPerSecond;

                captureBufferDescription = new CaptureBufferDescription();
                captureBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;//approx 200 milliseconds of PCM data.
                captureBufferDescription.Format = waveFormat;

                playbackBufferDescription = new BufferDescription();
                playbackBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;
                playbackBufferDescription.Format = waveFormat;
                playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);

                bufferSize = captureBufferDescription.BufferBytes;

                bIsCallActive = false;
                nUdpClientFlag = 0;

                //Using UDP sockets
                clientSocket = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp);
                EndPoint ourEP = new IPEndPoint(IPAddress.Any, 1450);
                //Listen asynchronously on port 1450 for coming messages (Invite, Bye, etc).
                clientSocket.Bind(ourEP);

                //Receive data from any IP.
                EndPoint remoteEP = (EndPoint)(new IPEndPoint(IPAddress.Any, 0));

                byteData = new byte[1024];
                //Receive data asynchornously.
                clientSocket.BeginReceiveFrom(byteData,
                                           0, byteData.Length,
                                           SocketFlags.None,
                                           ref remoteEP,
                                           new AsyncCallback(OnReceive),
                                           null);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message, "VoiceChat-Initialize ()", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Exemple #30
0
        public PoseData GetNextPose()
        {
            switch (CurrentPoseInputSource)
            {
            case PoseInputSource.WEBSOCKET:
#if !UNITY_WEBGL || UNITY_EDITOR
                websocket.DispatchMessageQueue();
#endif
                // poseLiveWS is non-null if alternative is sending pose data over websocket
                if (poseLiveWS != null)
                {
                    // Assign last pose from websocket
                    CurrentPose = poseLiveWS;
                }
                else
                {
                    Debug.Log("No pose recieved from WebSocket!");
                }
                break;

            case PoseInputSource.FILE:

                if (SequenceEnum != null && SequenceEnum.MoveNext())
                {
                    _CurrentFilePoseNumber++;
                }
                else
                {
                    // Quick and dirty way to loop (by reloading file)
                    if (SequenceEnum != null && !loop)
                    {
                        break;
                    }
                    LoadData();
                    SequenceEnum.MoveNext();
                    _CurrentFilePoseNumber = 1;
                }


                string   frame_json     = SequenceEnum.Current;
                PoseData fake_live_data = PoseDataUtils.JSONstring2PoseData(frame_json);
                CurrentPose = fake_live_data;

                if (recording)     // recording
                {
                    File.AppendAllText(WriteDataPath, frame_json + Environment.NewLine);
                }
                break;

            case PoseInputSource.KINECT:
                if (device != null)
                {
                    using (Capture capture = device.GetCapture())
                    {
                        // Make tracker estimate body
                        tracker.EnqueueCapture(capture);

                        // Code for getting RGB image from camera

                        Microsoft.Azure.Kinect.Sensor.Image color = capture.Color;
                        if (color != null && color.WidthPixels > 0 && (streamCanvas != null || videoRenderer != null))
                        {
                            UnityEngine.Object.Destroy(tex);    // required to not keep old images in memory
                            tex = new Texture2D(color.WidthPixels, color.HeightPixels, TextureFormat.BGRA32, false);
                            tex.LoadRawTextureData(color.Memory.ToArray());
                            tex.Apply();

                            //Fetch the RawImage component from the GameObject
                            if (tex != null)
                            {
                                if (streamCanvas != null)
                                {
                                    m_RawImage         = streamCanvas.GetComponent <RawImage>();
                                    m_RawImage.texture = tex;
                                }
                                if (videoRenderer != null)
                                {
                                    videoRenderer.material.mainTexture = tex;
                                }
                            }
                        }
                    }

                    // Get pose estimate from tracker
                    using (Frame frame = tracker.PopResult())
                    {
                        //Debug.LogFormat("{0} bodies found.", frame.NumberOfBodies);

                        //  At least one body found by Body Tracking
                        if (frame.NumberOfBodies > 0)
                        {
                            // Use first estimated person, if mutiple are in the image
                            // !!! There are (probably) no guarantees on consisitent ordering between estimates
                            //var bodies = frame.Bodies;
                            var body = frame.GetBody(0);

                            // Apply pose to user avatar(s)
                            PoseData live_data = PoseDataUtils.Body2PoseData(body);

                            if (recording)     // recording
                            {
                                PoseDataJSON jdl = PoseDataUtils.Body2PoseDataJSON(body);
                                AppendRecordedFrame(jdl);
                            }
                            CurrentPose = live_data;
                        }
                    }
                }
                else
                {
                    Debug.Log("device is null!");
                }
                break;
            }
            return(CurrentPose);
        }
Exemple #31
0
 private void btnCapture_Click(object sender, EventArgs e)
 {
     videoCapture = new Capture();
     videoCapture.ImageGrabbed += ProcessFrame;
     videoCapture.Start();
 }
Exemple #32
0
 private void InitializeCapture()
 {
     capture = new Capture();
 }
 /// <inheritdoc />
 protected override void DecorateTest(IPatternScope scope, ICodeElementInfo codeElement)
 {
     scope.TestBuilder.TestInstanceActions.SetUpTestInstanceChain.Before(state =>
                                                                         Capture.AutoEmbedRecording(TriggerEvent, AttachmentName, Parameters, FramesPerSecond));
 }
Exemple #34
0
        private async Task _insertAsync(Capture capture)
        {
            await this._captureRepositoryWriterAsync.InsertAsync(CaptureMapper.Map(capture)).ConfigureAwait(false);

            await this._unitOfWork.SaveChangesAsync().ConfigureAwait(false);
        }
Exemple #35
0
 public bool StopCodec(ref string strFault)
 {
     bool functionReturnValue = false;
     // Stop the capture
     lock (objCodecLock)
     {
         try
         {
             if (MCB.DebugLog)
                 Logs.WriteDebug("[Main.StopCodec] Stop thrNotify with blnSCCapturing = False");
             blnSCCapturing = false;
             // this should end the wait thread if it is still running
             Thread.Sleep(200);
             //If blnInWaitThread Then
             if (thrNotify != null && thrNotify.IsAlive)
             {
                 if (MCB.DebugLog)
                     Logs.WriteDebug("[Main.StopCodec] Aborting thrNotify");
                 thrNotify.Abort();
                 Thread.Sleep(100);
                 thrNotify.Join(3000);
             }
             thrNotify = null;
             //blnInWaitThread = False
             //lblCapture.BackColor = Color.LightSalmon
             // Stop the buffer
             if (objCapture != null)
             {
                 objCapture.Stop();
                 objCapture.Dispose();
             }
             objCapture = null;
             if (devCaptureDevice != null)
                 devCaptureDevice.Dispose();
             devCaptureDevice = null;
             if (devSelectedPlaybackDevice != null)
             {
                 devSelectedPlaybackDevice.Dispose();
             }
             devSelectedPlaybackDevice = null;
             if (MCB.DebugLog)
                 Logs.WriteDebug("[Main.StopCodec] = True");
             functionReturnValue = true;
             objProtocol.ARDOPProtocolState = ProtocolState.OFFLINE;
         }
         catch (Exception ex)
         {
             Logs.Exception("[Main.StopCodec] Err: " + ex.ToString);
             if (MCB.DebugLog)
                 Logs.WriteDebug("[Main.StopCodec] = False");
             strFault = Err.Number.ToString + "/" + Err.Description;
             functionReturnValue = false;
         }
         //blnEnableCaptureRestart = False
     }
     return functionReturnValue;
 }
 static T Token <T>(Capture capture, Func <int, int, string, T> tokenSelector)
 {
     return(tokenSelector(capture.Index, capture.Length, capture.Value));
 }
Exemple #37
0
        /// <summary>
        /// Records sound data from the given audio input.
        /// </summary>
        ///
        /// <remarks>
        /// Note that this method will block forever. Threading will be required
        /// to get the data back.
        /// </remarks>
        /// 
        /// <param name="capture">The input to record from.</param>
        /// <returns>The audio data recorded from the input.</returns>
        public void Record(Capture capture)
        {
            if (Recording) {
            throw new Exception("Already recording.");
              }

              WaveFormat format = (WaveFormat) GetAmibiguousType(typeof(WaveFormat));
              format.SamplesPerSecond = 96000;
              format.BitsPerSample = 16;
              format.Channels = 1;
              format.FormatTag = WaveFormatTag.Pcm;
              format.BlockAlign = (Int16) (format.Channels * (format.BitsPerSample / 8));
              format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlign;

              int notifySize = Math.Max(4096, format.AverageBytesPerSecond / 16);
              notifySize -= notifySize % format.BlockAlign;

              // This is a fairly arbitrary choice.
              int inputSize = notifySize * 16;
              // Output is half of input, as every two bytes is a piece of sound data.
              int outputSize = inputSize / 2;

              CaptureBufferDescription description = (CaptureBufferDescription)
              GetAmibiguousType(typeof(CaptureBufferDescription));
              description.Format = format;
              description.BufferBytes = inputSize;

              CaptureBuffer buffer;
              try {
            buffer = new CaptureBuffer(description, capture);
              } catch {
            throw new IOException(
            "An error occurred attempting to set up a read buffer.");
              }

              AutoResetEvent reset = new AutoResetEvent(false);
              Notify notify = new Notify(buffer);

              BufferPositionNotify bpn1 = (BufferPositionNotify)
              GetAmibiguousType(typeof(BufferPositionNotify));

              bpn1.Offset = buffer.Caps.BufferBytes / 2 - 1;
              bpn1.EventNotifyHandle = reset.SafeWaitHandle.DangerousGetHandle();
              BufferPositionNotify bpn2 = (BufferPositionNotify)
              GetAmibiguousType(typeof(BufferPositionNotify));
              bpn2.Offset = buffer.Caps.BufferBytes - 1;
              bpn2.EventNotifyHandle = reset.SafeWaitHandle.DangerousGetHandle();

              notify.SetNotificationPositions(new BufferPositionNotify[] {
            bpn1, bpn2
              });

              int offset = 0;
              Data = new List<Int16>();

              Recording = true;
              new Thread((ThreadStart) delegate {
            buffer.Start(true);

            while (Recording) {
              // Let the buffer fill up from the last read.
              reset.WaitOne();

              byte[] read;
              try {
            read = (byte[]) buffer.Read(offset, typeof(byte), LockFlag.None,
                               outputSize);
              } catch {
            throw new IOException(
                "An error occurred attempting to read the input data.");
              }
              offset = (offset + outputSize) % inputSize;

              bool written = false;
              Int16 old = 0;
              foreach (byte b in read) {
            if (!written) {
              old = (Int16) b;
            } else {
              old = (Int16) (old | (((Int16) (b << 8))));
              Data.Add(old);
            }
            written = !written;
              }
            }

            buffer.Stop();
              }).Start();
        }
        private void ThenGetRequiredUpdatesShouldHaveBeenCalled(Build withBuild)
        {
            var assets       = new List <IEnumerable <AssetData> >();
            var dependencies = new List <IEnumerable <DependencyDetail> >();

            DarcRemotes[TargetRepo]
            .Verify(r => r.GetRequiredNonCoherencyUpdatesAsync(SourceRepo, NewCommit, Capture.In(assets), Capture.In(dependencies)));
            DarcRemotes[TargetRepo]
            .Verify(r => r.GetDependenciesAsync(TargetRepo, TargetBranch, null, false));
            DarcRemotes[TargetRepo]
            .Verify(r => r.GetRequiredCoherencyUpdatesAsync(Capture.In(dependencies), RemoteFactory.Object, CoherencyMode.Legacy));
            assets.Should()
            .BeEquivalentTo(
                new List <List <AssetData> >
            {
                withBuild.Assets.Select(
                    a => new AssetData(false)
                {
                    Name    = a.Name,
                    Version = a.Version
                })
                .ToList()
            });
        }
Exemple #39
0
 private static int ExtractEndEpisode(Capture endEpisodeGroup, int episode)
 {
     return(!string.IsNullOrEmpty(endEpisodeGroup.Value) ? int.Parse(endEpisodeGroup.Value) : episode);
 }
 public Task RegexOne(
     [Entity("captureOne")] Capture capture)
 {
     return(methods.RegexOne(capture));
 }
Exemple #41
0
 private DateTime DetermineDateModified(Capture name)
 {
     return(name.Value.Length == 0
         ? DateTime.MinValue
         : name.Value.ExtractFtpDate(DateTimeStyles.AssumeLocal));
 }
Exemple #42
0
        /// <summary>
        /// 摄像头开始录制视频
        /// </summary>
        /// <param name="VideoContro">展现控件</param>
        /// <param name="Path">视频存储路径</param>
        /// <param name="FileName">视频存储文件名</param>
        /// <returns>录像开启成功:1 录像开启失败:0,系统程序:-1,ffshow视频解码器不存在:2,视像头录像正在录制:3</returns>
        public int StartRecording(Control videoControl, string filePath, string fileName)
        {
            var state = 0;

            //开始录制前判断摄像头是否在进行录像工作,工作时关掉它
            if (this.capture != null)
            {
                this.capture.Stop();
                this.capture.DisposeCapture();
                this.stauts = "NoThing";
            }
            CameraManage cameraManage = new CameraManage(logFilePath, logFileName);

            //当前不为录像状态时则开始录像
            if (!this.stauts.Equals("Recing"))
            {
                //获取ffshow视频解码器索引
                var ffshowIndex = cameraManage.GetffshowIndex();
                if (ffshowIndex > 0)//解码器1判断,ffshowIndex
                {
                    try
                    {
                        var Flie = filePath + fileName;
                        cameraManage.CreatFile(Flie);
                        this.capture = new Capture(new Filters().VideoInputDevices[0], null);      //实例化视像头对象
                        this.capture.PreviewWindow   = videoControl;                               //设置承载控件
                        this.capture.VideoCompressor = this.filters.VideoCompressors[ffshowIndex]; //设置视频解码器
                        this.capture.Filename        = Flie;                                       //设置要保存的文件路径和文件名,格式例如d:\\ssss.avi
                        this.capture.FrameRate       = 15;                                         //设置帧

                        this.capture.FrameSize = new Size(320, 240);                               //设置视频分辨率
                        this.capture.Start();                                                      //开启录制

                        //封面cover
                        this.stauts = "Recing";
                        //拍照事件
                        Capture.FrameCapHandler f = new Capture.FrameCapHandler(GetNewImage);
                        this.capture.FrameCaptureComplete += new DirectX.Capture.Capture.FrameCapHandler(f.Invoke);

                        state = 1;
                    }
                    catch (Exception ex)
                    {
                        cameraManage.RecordErrorLog(ex.Message + "当前状态:" + this.stauts);
                        this.stauts = "Error";
                        StopRecord();
                        state = -1;
                    }
                }
                else
                {
                    cameraManage.RecordErrorLog("ffshow视频解码器不存在,没安装,当前状态:" + this.stauts);
                    state = 2;
                }
            }
            else
            {
                cameraManage.RecordErrorLog("视像头录像正在录制,无法调用。当前状态:" + this.stauts);
                state = 3;
            }
            return(state);
        }
Exemple #43
0
 public static bool Contains(this Capture larger, Capture smaller)
 {
     return
         (larger.Index <= smaller.Index &&
          (smaller.Index + smaller.Length) <= (larger.Index + larger.Length));
 }
Exemple #44
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="capKeys"></param>
        /// <param name="capVals"></param>
        /// <param name="sid">Just for Logger</param>
        private void ParseFoundProperties(CaptureCollection capKeys, CaptureCollection capVals, string sid)
        {
            for (int i = 0; i < capKeys.Count; i++)
            {
                Capture capKey = capKeys[i];
                Capture capVal = capVals[i];

                switch (capKey.Value)
                {
                case "category":
                    _categoryId = Helper.ParseInt(capVal.Value);
                    break;

                case "category2":
                    _category2Id = Helper.ParseInt(capVal.Value);
                    break;

                case "level":
                    Level = Helper.ParseInt(capVal.Value);
                    break;

                case "name":
                    Name = Helper.ParseString(capVal.Value);
                    break;

                case "reqlevel":
                    RequiredLevel = Helper.ParseInt(capVal.Value);
                    break;

                case "side":
                    Side = (Side)Helper.ParseInt(capVal.Value);
                    break;

                case "wflags":
                    wflags = Helper.ParseInt(capVal.Value);
                    break;

                case "reqrace":
                    _races = Race.ParseFlags(Helper.ParseInt(capVal.Value));
                    break;

                case "reqclass":
                    _classes = wClass.ParseFlags(Helper.ParseInt(capVal.Value));
                    break;

                case "type":
                    int t = Helper.ParseInt(capVal.Value);
                    if (!Enum.IsDefined(typeof(QuestType), t))
                    {
                        Helper.LogDebug("Unknown Type [" + (Id == 0 ? sid : Id.ToString()) + "]:" + capVal.Value);
                    }
                    else
                    {
                        Type = (QuestType)t;
                    }

                    break;

                case "id":
                    if (Id == 0)
                    {
                        Id = Helper.ParseInt(capVal.Value);
                    }
                    break;

                case "money":
                case "xp":
                case "reprewards":
                case "itemrewards":
                case "itemchoices":
                case "currencyrewards":
                case "race":                         // same as reqrace
                case "classs":                       // same as reqclass
                    break;

                default:
                    Helper.LogDebug("Unknown property[" + (Id == 0 ? sid : Id.ToString()) + "]:" + capKey.Value);
                    break;
                }
            }
        }
Exemple #45
0
 internal TextRange(TextRange readRange, Capture capture, TextSource source)
     : this(readRange.From.OffsetColumn(capture.Index), capture.Length, source)
 {
 }
Exemple #46
0
        private void ProcessQIKey(string key, int pos, string line, bool force = false)
        {
            Regex           myRegex = null;
            MatchCollection mc = null;
            Match           m = null;
            int             j, k;

            switch (key)
            {
            case "level":
                if (force || Level < 1)
                {
                    k = line.Length - 1;
                    while (Char.IsDigit(line[k]))
                    {
                        k--;
                    }
                    Level = Helper.ParseInt(line.Substring(k + 1));
                }
                break;

            case "requires":
                if (force || RequiredLevel < 1)
                {
                    k = line.Length - 1;
                    while (Char.IsDigit(line[k]))
                    {
                        k--;
                    }
                    Level = Helper.ParseInt(line.Substring(k + 1));
                }
                break;

            case "loremaster":
                Loremaster = true;
                break;

            case "side":
                if (force || (int)Side == -1)
                {
                    myRegex = new Regex(@"](?<side>\w+)\[", RegexOptions.Compiled);
                    mc      = myRegex.Matches(line);

                    m = mc.Count > 0 ? mc[0] : null;
                    if (m == null || !m.Success)
                    {
                        line = line.Substring(pos + 1).Trim();
                    }
                    else
                    {
                        line = m.Groups["side"].Value;
                    }
                    Side _dside = (Wowhead.Side)(-1);

                    if (!Enum.TryParse(line, true, out _dside))
                    {
                        Helper.LogDebug("Can't parse QI Side[" + Id + "]:" + line);
                    }
                    else
                    {
                        Side = _dside;
                    }
                }
                break;

            case "[icon":
                myRegex = new Regex(@"](?<key>\w+):\s*\[(url=/)?(?<type>\w*)=(?<id>\d+)]", RegexOptions.Compiled);
                mc      = myRegex.Matches(line);

                m = mc.Count > 0 ? mc[0] : null;
                if (m == null || !m.Success)
                {
                    Helper.LogDebug("Can't parse QI Icon[" + Id + "]:" + line);
                }

                Group             gKey     = m.Groups["key"];
                CaptureCollection gTypeCap = m.Groups["type"].Captures;
                CaptureCollection gIdCap   = m.Groups["id"].Captures;
                for (j = 0; j < gKey.Captures.Count; j++)
                {
                    Capture c = gKey.Captures[j];

                    switch (c.Value.ToLower())
                    {
                    case "start":
                        QuestGiver = Helper.GetInteractEntity(Helper.ParseInt(gIdCap[j].Value), gTypeCap[j].Value);
                        break;

                    case "end":
                        QuestTurnIn = Helper.GetInteractEntity(Helper.ParseInt(gIdCap[j].Value), gTypeCap[j].Value);
                        break;

                    default:
                        Helper.LogDebug("Unknown QI line[" + Id + "]:" + line);
                        break;
                    }
                }

                break;

            case "not":
                line = line.Substring(3).Trim().ToLower();
                if (line == "sharable")
                {
                    Sharable = false;
                }
                else
                {
                    Helper.LogDebug("Unknown sub 'not' key[" + Id + "]:" + line);
                }
                break;

            case "difficulty":
                myRegex = new Regex(@"r(?<d>\d)](?<level>\d+)\[", RegexOptions.Compiled);
                mc      = myRegex.Matches(line);

                for (j = 0; j < mc.Count; j++)
                {
                    if (!mc[j].Success)
                    {
                        Helper.LogDebug("Can't parse QI difficulty[" + Id + "]:" + line);
                    }

                    Group             gIdx      = mc[j].Groups["d"];
                    CaptureCollection gLevelCap = mc[j].Groups["level"].Captures;

                    pos             = Helper.ParseInt(gIdx.Captures[0].Value) - 1;
                    Difficulty[pos] = Helper.ParseInt(gLevelCap[0].Value);
                }
                break;

            case "added":
                k = line.Length - 1;
                while (Char.IsDigit(line[k]) || line[k] == '.')
                {
                    k--;
                }
                AddedIn = line.Substring(k + 1);
                break;

            case "type":
                string    lt = line.Substring("Type:".Length).Trim();
                QuestType qt = (QuestType)(-1);
                if (!Enum.TryParse(lt, out qt))
                {
                    Helper.LogDebug("Unknown QI Type [" + Id + "]:" + lt);
                }
                else
                if (force || Type == QuestType.Normal)
                {
                    Type = qt;
                }
                break;

            case "race":
            case "races":
                if (force || _races == null)
                {
                    myRegex = new Regex(@"=(?<id>\d+)", RegexOptions.Compiled);
                    mc      = myRegex.Matches(line);
                    _races  = new List <Race>();

                    for (j = 0; j < mc.Count; j++)
                    {
                        if (!mc[j].Success)
                        {
                            Helper.LogDebug("Can't parse QI Races[" + Id + "]:" + line);
                        }

                        _races.Add(Race.GetById(Helper.ParseInt(mc[j].Groups["id"].Captures[0].Value)));
                    }
                }
                break;

            case "class":
                if (force || _classes == null)
                {
                    myRegex  = new Regex(@"=(?<id>\d+)", RegexOptions.Compiled);
                    mc       = myRegex.Matches(line);
                    _classes = new List <wClass>();

                    for (j = 0; j < mc.Count; j++)
                    {
                        if (!mc[j].Success)
                        {
                            Helper.LogDebug("Can't parse QI Races[" + Id + "]:" + line);
                        }

                        _classes.Add(wClass.GetById(Helper.ParseInt(mc[j].Groups["id"].Captures[0].Value)));
                    }
                }
                break;

            case "sharable":     // by default - sharable
                break;

            default:
                Helper.LogDebug("Unknown key[" + Id + "]:" + key);
                break;
            }
        }
Exemple #47
0
        /// <summary>
        /// Records sound data from the given audio input.
        /// </summary>
        ///
        /// <remarks>
        /// Note that this method will block forever. Threading will be required
        /// to get the data back.
        /// </remarks>
        /// 
        /// <param name="capture">The input to record from.</param>
        /// <returns>The audio data recorded from the input.</returns>
        public bool Record(Capture cap)
        {
            if(recording){
            return false;
              }
              // string captureDescriptor – string for eg “Mic”, “Input”
              // Control owner – maybe Window or Form would do for this – was Native.GetDesktopWindow()
              // if windowless application use desktop window as message broker
              // Returns true for setup done and thread started, false for problem

              // Choose a Wave format, calculating BlockAlign and AverageBytesPerSecond
              ConstructorInfo nom = typeof(WaveFormat).GetConstructor(Type.EmptyTypes);
              format = (WaveFormat)nom.Invoke(null);
              format.SamplesPerSecond = 96000;
              format.BitsPerSample = 16;
              format.Channels = 1;
              format.FormatTag = WaveFormatTag.Pcm;

              SData = new List<Int16>();

              // Both of these are calculate for All channels
              // BlockAlign = BytesPerSampleAllChannels, AverageBytesPerSecond = BytesPerSecondAllChannels
              format.BlockAlign = (short)(format.Channels * (format.BitsPerSample / 8));
              format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlign;

              // Set the size of input and output buffers

              // Multiplier of both delay and minimum buffer size in units of 1/16th secs,
              int NUM_BUFFERS = 8;

              // Sets _dwNotifySize to enough bytes for 1/16th of a second, all channels
              // Note that this was 1/8th (ie line ended ‘/ 8);’), and output buffer size = capture size/2
              // But this was changed to allow output buffer size to be a multiple of BlockAlign
              int _dwNotifySize = Math.Max(4096, format.AverageBytesPerSecond / (8 * 2));
              // rounds _dwNotifySize to a multiple of BlockAlign (BytesPerSampleAllChannel)
              _dwNotifySize -= _dwNotifySize % format.BlockAlign;

              // Capture buffer is looped – when the end is reached, it starts from the beginning again.
              // Capturing one should be twice as large as output – so that when completed capture
              // is being read to output buffer there is still room to for the buffer to keep filling
              // without overwriting the output. I think.
              int _dwCaptureBufferSize = NUM_BUFFERS * _dwNotifySize * 2;
              int _dwOutputBufferSize = NUM_BUFFERS * _dwNotifySize;

              // Check a matching capture device was found
              if (cap == null)
            return false; // no matching sound card/capture device
              {

            // Make the description and create a CaptureBuffer accordingly
            ConstructorInfo capnom = typeof(CaptureBufferDescription).GetConstructor(Type.EmptyTypes);
            var capDesc = (CaptureBufferDescription)capnom.Invoke(null);

            capDesc.Format = format;
            capDesc.BufferBytes = _dwCaptureBufferSize;

            var _dwCapBuffer = new CaptureBuffer(capDesc, cap);

            // Create two output buffers – this seems to avoid the buffer being locked and written
            // to while it's still playing, helping to avoid a sound glitch on my machine.
            var _dwDevBuffers = new SecondaryBuffer[2];

            // Set autoResetEvent to be fired when it's filled and subscribe to buffer notifications

            var _resetEvent = new AutoResetEvent(false);
            var _notify = new Notify(_dwCapBuffer);
            // Half&half – one notification halfway through the output buffer, one at the end
            ConstructorInfo buffnom = typeof(BufferPositionNotify).GetConstructor(Type.EmptyTypes);
            var bpn1 = (BufferPositionNotify)buffnom.Invoke(null);
            bpn1.Offset = _dwCapBuffer.Caps.BufferBytes / 2 - 1;
            bpn1.EventNotifyHandle = _resetEvent.SafeWaitHandle.DangerousGetHandle();
            var bpn2 = (BufferPositionNotify)buffnom.Invoke(null);
            bpn2.Offset = _dwCapBuffer.Caps.BufferBytes - 1;
            bpn2.EventNotifyHandle = _resetEvent.SafeWaitHandle.DangerousGetHandle();

            _notify.SetNotificationPositions(new BufferPositionNotify[] { bpn1, bpn2 });

            recording = true; // ready to capture sound

            // Fire worker thread to take care of messages
            // Note that on a uniprocessor, the new thread may not get any processor time
            // until the main thread is preempted or yields, eg by ending button click event or
            // calling Thread.Sleep(0)

            // botch – not sure if these are thread safe for multiple threads
            int offset = 0;
            int devbuffer = 0;

            // Make a new thread – as countained in the { }
            Thread _dwCaptureThread = new Thread((ThreadStart)delegate
            {
              _dwCapBuffer.Start(true); // start capture

              // IsReady – This should be true while you wish to capture and then output the sound.
              while (recording)
              {
            _resetEvent.WaitOne(); // blocks thread until _dwCapBuffer is half/totally full
            // Read the capture buffer into an array, and output it to the next DevBuffer
            byte[] read = (byte[])_dwCapBuffer.Read(offset, typeof(byte), LockFlag.None, _dwOutputBufferSize);

            for (int i = 0; i < read.Length; i++)
            {
              SData.Add(Int16.Parse(read[i].ToString()));
            }

            //       _dwDevBuffers[devbuffer].Write(0, read, LockFlag.EntireBuffer);

            // Update offset
            offset = (offset + _dwOutputBufferSize) % _dwCaptureBufferSize;

            devbuffer = 1 - devbuffer; // toggle between 0 and 1

              }
              _dwCapBuffer.Stop(); // stop capture

            });

            _dwCaptureThread.Start(); // start the new Thread
            return true;
              }
        }
 public DirectShowVideoWrapper(string videoPath, Panel videoOwnerPanel, Panel stepsOwnerPanel, VideoUserOptions pVideoUserOptions)
 {
     cam = new Capture(videoPath, "Test String", videoOwnerPanel, true, pVideoUserOptions);
     m_StepsOwnerPanel        = stepsOwnerPanel;
     cam.FrameMilestoneEvent += new Capture.OnFrameMilestoneEvent(OnFrameMilestoneEvent);
 }
 private async void CaptureImage(object sender, RoutedEventArgs e)
 {
     await Capture.Start(Models.CaptureMode.PNG);
 }
 public void CaptureNullIdTest()
 {
     TestingUtil.AssertThrownException <System.ArgumentNullException>(() => Capture.Get(new APIContext("token"), null));
 }
Exemple #51
0
        public bool StartCodec(ref string strFault)
        {
            bool functionReturnValue = false;
            //Returns true if successful
            Thread.Sleep(100);
            // This delay is necessary for reliable starup following a StopCodec
            lock (objCodecLock) {
            dttLastSoundCardSample = Now;
            bool blnSpectrumSave = MCB.DisplaySpectrum;
            bool blnWaterfallSave = MCB.DisplayWaterfall;
            System.DateTime dttStartWait = Now;
            MCB.DisplayWaterfall = false;
            MCB.DisplaySpectrum = false;
            string[] strCaptureDevices = EnumerateCaptureDevices();
            string[] strPlaybackDevices = EnumeratePlaybackDevices();
            functionReturnValue = false;
            DeviceInformation objDI = new DeviceInformation();
            int intPtr = 0;
            // Playback devices
            try {
                cllPlaybackDevices = null;

                cllPlaybackDevices = new Microsoft.DirectX.DirectSound.DevicesCollection();
                if ((devSelectedPlaybackDevice != null)) {
                    devSelectedPlaybackDevice.Dispose();
                    devSelectedPlaybackDevice = null;
                }

                foreach (DeviceInformation objDI in cllPlaybackDevices) {
                    DeviceDescription objDD = new DeviceDescription(objDI);
                    if (strPlaybackDevices(intPtr) == MCB.PlaybackDevice) {
                        if (MCB.DebugLog)
                            Logs.WriteDebug("[Main.StartCodec] Setting SelectedPlaybackDevice = " + MCB.PlaybackDevice);
                        devSelectedPlaybackDevice = new Device(objDD.info.DriverGuid);
                        functionReturnValue = true;
                        break; // TODO: might not be correct. Was : Exit For
                    }
                    intPtr += 1;
                }
                if (!functionReturnValue) {
                    strFault = "Playback Device setup, Device " + MCB.PlaybackDevice + " not found in Windows enumerated Playback Devices";
                }
            } catch (Exception ex) {
                strFault = Err.Number.ToString + "/" + Err.Description;
                Logs.Exception("[StartCodec], Playback Device setup] Err: " + ex.ToString);
                functionReturnValue = false;
            }
            if (functionReturnValue) {
                // Capture Device
                CaptureBufferDescription dscheckboxd = new CaptureBufferDescription();
                try {
                    functionReturnValue = false;
                    cllCaptureDevices = null;
                    cllCaptureDevices = new CaptureDevicesCollection();
                    intPtr = 0;
                    for (int i = 0; i <= cllCaptureDevices.Count - 1; i++) {
                        if (MCB.CaptureDevice == strCaptureDevices(i)) {
                            objCaptureDeviceGuid = cllCaptureDevices(i).DriverGuid;
                            devCaptureDevice = new Capture(objCaptureDeviceGuid);
                            stcSCFormat.SamplesPerSecond = 12000;
                            // 12000 Hz sample rate
                            stcSCFormat.Channels = 1;
                            stcSCFormat.BitsPerSample = 16;
                            stcSCFormat.BlockAlign = 2;
                            stcSCFormat.AverageBytesPerSecond = 2 * 12000;
                            stcSCFormat.FormatTag = WaveFormatTag.Pcm;
                            objApplicationNotify = null;
                            objCapture = null;
                            // Set the buffer sizes
                            intCaptureBufferSize = intNotifySize * intNumberRecordNotifications;
                            // Create the capture buffer
                            dscheckboxd.BufferBytes = intCaptureBufferSize;
                            stcSCFormat.FormatTag = WaveFormatTag.Pcm;
                            dscheckboxd.Format = stcSCFormat;
                            // Set the format during creatation
                            if ((objCapture != null)) {
                                objCapture.Dispose();
                                objCapture = null;
                            }
                            //objCapture = New CaptureBuffer(dscheckboxd, devCaptureDevice)
                            intNextCaptureOffset = 0;
                            WriteTextToSpectrum("CODEC Start OK", Brushes.LightGreen);
                            while (Now.Subtract(dttStartWait).TotalSeconds < 3) {
                                Application.DoEvents();
                                Thread.Sleep(100);
                            }
                            objCapture = new CaptureBuffer(dscheckboxd, devCaptureDevice);
                            InititializeNotifications();
                            objCapture.Start(true);
                            // start with looping
                            InititializeSpectrum(Color.Black);

                            functionReturnValue = true;
                        }
                    }
                    if (!functionReturnValue) {
                        strFault = "Could not find DirectSound capture device " + MCB.CaptureDevice.ToUpper;
                        //Logs.Exception("[Main.StartCodec] Could not find DirectSound capture device " & MCB.CaptureDevice & " in Windows enumerated Capture Devices")
                    }
                } catch (Exception ex) {
                    strFault = Err.Number.ToString + "/" + Err.Description;
                    functionReturnValue = false;
                    //Logs.Exception("[Main.StartCodec] Err: " & ex.ToString)
                }
            }

            if (functionReturnValue) {
                if (MCB.DebugLog)
                    Logs.WriteDebug("[Main.StartCodec] Successful start of codec");
                objProtocol.ARDOPProtocolState = ProtocolState.DISC;
            } else {
                if (MCB.DebugLog)
                    Logs.WriteDebug("[Main.StartCodec] CODEC Start Failed");
                WriteTextToSpectrum("CODEC Start Failed", Brushes.Red);
                objProtocol.ARDOPProtocolState = ProtocolState.OFFLINE;
                while (Now.Subtract(dttStartWait).TotalSeconds < 3) {
                    Application.DoEvents();
                    Thread.Sleep(100);
                }
                tmrStartCODEC.Interval = 5000;
                tmrStartCODEC.Start();
            }
            InititializeSpectrum(Color.Black);
            MCB.DisplayWaterfall = blnWaterfallSave;
            MCB.DisplaySpectrum = blnSpectrumSave;
            }
            return functionReturnValue;
        }
        public static void Run(Options options)
        {
            //In order to playback video opencv_ffmpeg*.dll must be found.
            string includePath = Environment.Is64BitProcess ? @".\x64" : @".\x86";

            foreach (string file in Directory.EnumerateFiles(includePath, "*.dll"))
            {
                File.Copy(file, Path.GetFileName(file), true);
            }

            //define the upper and lower boundaries of the HSV pixel
            //intensities to be considered 'skin'
            var lower = new Hsv(0, 48, 80);
            var upper = new Hsv(20, 255, 255);

            //if a video path was not supplied, grab the reference
            //to the gray
            //otherwise, load the video
            Capture capture = string.IsNullOrEmpty(options.Video)
                ? new Capture(CaptureType.Any)
                : new Capture(Path.GetFullPath(options.Video));

            using (capture)
            {
                //keep looping over the frames in the video
                while (true)
                {
                    //grab the current frame
                    //bool grabbed = capture.Grab();
                    //capture.Retrieve()
                    using (Image <Bgr, byte> frame = capture.QueryFrame()?.ToImage <Bgr, byte>())
                    {
                        bool grabbed = frame != null;
                        //if we are viewing a video and we did not grab a
                        //frame, then we have reached the end of the video
                        if (!grabbed || frame.Width == 0 || frame.Height == 0)
                        {
                            if (!string.IsNullOrEmpty(options.Video))
                            {
                                break;
                            }
                            continue;
                        }

                        //resize the frame, convert it to the HSV color space,
                        //and determine the HSV pixel intensities that fall into
                        //the speicifed upper and lower boundaries
                        using (Image <Bgr, byte> resizedFrame = ImageUtil.Resize(frame, width: 400))
                            using (Image <Hsv, byte> converted = resizedFrame.Convert <Hsv, byte>())
                                using (Image <Gray, byte> skinMask = converted.InRange(lower, upper))
                                {
                                    //apply a series of erosions and dilations to the mask
                                    //using an elliptical kernel
                                    using (Mat kernel = CvInvoke.GetStructuringElement(ElementShape.Ellipse, new Size(11, 11), Point.Empty))
                                    {
                                        CvInvoke.Erode(skinMask, skinMask, kernel, new Point(-1, -1), 2, BorderType.Constant,
                                                       CvInvoke.MorphologyDefaultBorderValue);
                                        CvInvoke.Dilate(skinMask, skinMask, kernel, new Point(-1, -1), 2, BorderType.Constant,
                                                        CvInvoke.MorphologyDefaultBorderValue);
                                    }
                                    //blur the mask to help remove noise, then apply the
                                    //mask to the frame
                                    CvInvoke.GaussianBlur(skinMask, skinMask, new Size(3, 3), 0);

                                    Image <Bgr, byte> skin = resizedFrame.And(resizedFrame, skinMask);

                                    //show the skin in the image along with the mask
                                    CvInvoke.Imshow("images", resizedFrame);
                                    CvInvoke.Imshow("mask", skin);

                                    //if the 'q' key is pressed, stop the loop
                                    if ((CvInvoke.WaitKey(1) & 0xff) == 'q')
                                    {
                                        break;
                                    }
                                }
                    }
                }
            }

            CvInvoke.DestroyAllWindows();
        }
Exemple #53
0
 private bool CreateCaptuerDevice()
 {
     //首先要玫举可用的捕捉设备
     CaptureDevicesCollection capturedev = new CaptureDevicesCollection();
     Guid devguid;
     if (capturedev.Count > 0)
     {
         devguid = capturedev[0].DriverGuid;
     }
     else
     {
         MessageBox.Show("当前没有可用于音频捕捉的设备", "系统提示");
         return false;
     }
     //利用设备GUID来建立一个捕捉设备对象
     capture = new Capture(devguid);
     return true;
 }
Exemple #54
0
 //학생 추가
 private void Add_Student2_Click(object sender, EventArgs e)
 {
     camera = new Capture();
     camera.QueryFrame();
     Application.Idle += FrameProcedure_2;
 }
 private void Awake()
 {
     filteringParameters = new FilteringParameters(new UnityAppSettingsManager());
     if (CamerasDetected())
     {
         capture = new Capture();
         newFrame = true;
         cameraTimer = new Timer(40);
         cameraTimer.Elapsed += new ElapsedEventHandler(OnCameraFrame);
         capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FPS, 25);
         ChangeEditedRange(0);
     }
     else
     {
         NoCameraWarning.SetActive(true);
     }
 }
Exemple #56
0
 //출석 시작
 public void Attendance_Click(object sender, EventArgs e)
 {
     camera = new Capture();
     camera.QueryFrame();
     Application.Idle += FrameProcedure_1;
 }
Exemple #57
0
 public static int ValueI(this Capture capture)
 {
     return(int.Parse(capture.Value));
 }
Exemple #58
0
        static bool initCamera()
        {
            Capture capture = null;
            Filters filters = null;;

            Capture.HeFrame heFrame   = null;
            bool            isSuccess = false;
            bool            isSucess  = false;

            //Init with config param loaded from DB
            try
            {
                filters = new Filters();
                //Load config from DB
                string videoDevice     = Settings.Default.vDevice;
                string videoCompressor = Settings.Default.vCompress;
                if ((videoDevice != null) && (videoDevice != ""))
                {
                    capture = new Capture(new Filter(videoDevice));
                    if ((videoCompressor != null) && (videoCompressor != ""))
                    {
                        capture.VideoCompressor = new Filter(videoCompressor);
                    }
                    isSucess = true;
                }
                else
                {
                    isSucess = false;
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.ToString());
                isSucess = false;
            }

            if (isSucess == false)
            {
                //Auto reinit camera with default param
                try
                {
                    if (filters != null)
                    {
                        capture = new Capture(filters.VideoInputDevices[0]);
                        //Save new config param
                        Settings.Default.vDevice = capture.VideoDevice.MonikerString;
                        if (capture.VideoCompressor != null)
                        {
                            Settings.Default.vCompress = capture.VideoCompressor.MonikerString;
                        }
                        Settings.Default.Save();
                        isSucess = true;
                    }
                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex.ToString());
                    isSucess = false;
                }
            }

            //init for capture frame
            if (isSucess)
            {
                try
                {
                    heFrame = new Capture.HeFrame(CaptureComplete);
                    if (capture != null)
                    {
                        capture.FrameEvent2 += heFrame;
                        capture.GrapImg();
                    }
                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex.ToString());
                }
            }
            return(isSuccess);
        }
 static void TextBoxLoaded(object sender, RoutedEventArgs routedEventArgs)
 {
     var textBox = (TextBox)sender;
     textBox.Loaded -= TextBoxLoaded;
     _associations[textBox] = new Capture(textBox);
 }
Exemple #60
0
 /// <summary>
 /// 提升阅读位置
 /// </summary>
 /// <param name="capture"></param>
 protected void Ascend(Capture capture)
 {
     //设置阅读位置
     this.Position = capture.Index + capture.Length;
 }