Exemplo n.º 1
1
        public AForgeCamera(string preferedCameraName = null, int preferedWidth = 640, int preferedHeight = 480, DownSample downSampleVideo = DownSample.None)
        {
            this.downSampleVideo = downSampleVideo;

            if (preferedCameraName != null)
            {
                FilterInfoCollection videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
                foreach (FilterInfo info in videoDevices)
                {
                    if (info.Name.Contains(preferedCameraName))
                    {
                        videoCaptureDevice = new VideoCaptureDevice(info.MonikerString);
                        videoCaptureDevice.DesiredFrameSize = new Size(preferedWidth, preferedHeight);
                        break;
                    }
                }
            }

            if (videoCaptureDevice == null)
            {
                VideoCaptureDeviceForm form = new VideoCaptureDeviceForm();
                if (form.ShowDialog(null) == DialogResult.OK)
                {
                    // create video source
                    videoCaptureDevice = form.VideoDevice;
                }
            }

            if (videoCaptureDevice != null)
            {
                //videoCaptureDevice.DesiredFrameRate = 5;
                //videoCaptureDevice.ProvideSnapshots = true;
                //videoCaptureDevice.DesiredSnapshotSize = new Size(preferedWidth, preferedHeight);
                //videoCaptureDevice.SnapshotFrame += new NewFrameEventHandler(videoCaptureDevice_SnapshotFrame);

                asyncCopiedVideoImage = new ImageBuffer(videoCaptureDevice.DesiredFrameSize.Width, videoCaptureDevice.DesiredFrameSize.Height, 32, new BlenderBGRA());
                if (downSampleVideo == DownSample.HalfSize)
                {
                    imageForExternalUse = new ImageBuffer(videoCaptureDevice.DesiredFrameSize.Width / 2, videoCaptureDevice.DesiredFrameSize.Height / 2, 32, new BlenderBGRA());
                }
                else
                {
                    imageForExternalUse = new ImageBuffer(videoCaptureDevice.DesiredFrameSize.Width, videoCaptureDevice.DesiredFrameSize.Height, 32, new BlenderBGRA());
                }
                videoCaptureDevice.Start();
                videoCaptureDevice.NewFrame += new NewFrameEventHandler(source_NewFrame);
            }
        }
Exemplo n.º 2
0
    /// <summary>
    ///
    /// </summary>
    void ChangeResolution()
    {
        _currentResolution = downSample;
        int width  = _camera.pixelWidth;
        int height = _camera.pixelHeight;

        if (_volumeLightTexture != null)
        {
            Destroy(_volumeLightTexture);
        }

        _volumeLightTexture            = new RenderTexture(width, height, 0, RenderTextureFormat.ARGBHalf);
        _volumeLightTexture.name       = "VolumeLightBuffer";
        _volumeLightTexture.filterMode = FilterMode.Point;

        if (_halfDepthBuffer != null)
        {
            Destroy(_halfDepthBuffer);
        }
        if (_halfVolumeLightTexture != null)
        {
            Destroy(_halfVolumeLightTexture);
        }

        _halfVolumeLightTexture            = new RenderTexture(width / (int)_currentResolution, height / (int)_currentResolution, 0, RenderTextureFormat.ARGBHalf);
        _halfVolumeLightTexture.name       = "VolumeLightBufferHalf";
        _halfVolumeLightTexture.filterMode = FilterMode.Point;

        _halfDepthBuffer      = new RenderTexture(width / (int)_currentResolution, height / (int)_currentResolution, 0, RenderTextureFormat.RFloat);
        _halfDepthBuffer.name = "VolumeLightHalfDepth";
        _halfDepthBuffer.Create();
        _halfDepthBuffer.filterMode = FilterMode.Point;
        UpdateMacroKeyword();
    }
Exemplo n.º 3
0
 void Awake()
 {
     envelope   = GetComponent <Envelope>();
     oscillator = GetComponent <Oscillator>();
     downSample = GetComponent <DownSample>();
     for (int i = 0; i < NUM_DELAYS; i++)
     {
         int delayLength = (int)(sample_freq / 2f) * (i + 1);
         delays[i] = new Delay(delayLength);
     }
     envelope.amplifier = volume;
 }
Exemplo n.º 4
0
        public AForgeCamera(string preferedCameraName = null, int preferedWidth = 640, int preferedHeight = 480, DownSample downSampleVideo = DownSample.None)
        {
            this.downSampleVideo = downSampleVideo;

            if (preferedCameraName != null)
            {
                FilterInfoCollection videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
                foreach (FilterInfo info in videoDevices)
                {
                    if (info.Name.Contains(preferedCameraName))
                    {
                        videoCaptureDevice = new VideoCaptureDevice(info.MonikerString);
                        videoCaptureDevice.DesiredFrameSize = new Size(preferedWidth, preferedHeight);
                        break;
                    }
                }
            }

            if (videoCaptureDevice == null)
            {
                VideoCaptureDeviceForm form = new VideoCaptureDeviceForm();
                if (form.ShowDialog(null) == DialogResult.OK)
                {
                    // create video source
                    videoCaptureDevice = form.VideoDevice;
                }
            }

            if (videoCaptureDevice != null)
            {
                //videoCaptureDevice.DesiredFrameRate = 5;
                //videoCaptureDevice.ProvideSnapshots = true;
                //videoCaptureDevice.DesiredSnapshotSize = new Size(preferedWidth, preferedHeight);
                //videoCaptureDevice.SnapshotFrame += new NewFrameEventHandler(videoCaptureDevice_SnapshotFrame);

                asyncCopiedVideoImage = new ImageBuffer(videoCaptureDevice.DesiredFrameSize.Width, videoCaptureDevice.DesiredFrameSize.Height, 32, new BlenderBGRA());
                if (downSampleVideo == DownSample.HalfSize)
                {
                    imageForExternalUse = new ImageBuffer(videoCaptureDevice.DesiredFrameSize.Width / 2, videoCaptureDevice.DesiredFrameSize.Height / 2, 32, new BlenderBGRA());
                }
                else
                {
                    imageForExternalUse = new ImageBuffer(videoCaptureDevice.DesiredFrameSize.Width, videoCaptureDevice.DesiredFrameSize.Height, 32, new BlenderBGRA());
                }
                videoCaptureDevice.Start();
                videoCaptureDevice.NewFrame += new NewFrameEventHandler(source_NewFrame);
            }
        }
Exemplo n.º 5
0
    void ReChangeResolution()
    {
        _currentResolution = downSample;
        int width  = _camera.pixelWidth;
        int height = _camera.pixelHeight;

        _halfVolumeLightTexture.Release();
        _halfVolumeLightTexture.width  = width / (int)_currentResolution;
        _halfVolumeLightTexture.height = height / (int)_currentResolution;

        _halfDepthBuffer.Release();
        _halfDepthBuffer.width  = width / (int)_currentResolution;
        _halfDepthBuffer.height = height / (int)_currentResolution;
        _halfDepthBuffer.Create();

        _volumeLightTexture.Release();
        _volumeLightTexture.width  = width;
        _volumeLightTexture.height = height;

        UpdateMacroKeyword();
    }