Exemplo n.º 1
0
        public RoverRobot(ExpansionPlate expansionPlate, IFrameSource <Image <Rgb24> > camera, RoverRobotConfiguration configuration)
        {
            using var operation = Log.OnEnterAndExit();
            operation.Info("configuring platform ", configuration.LeftMotorPort, configuration.RightMotorPort, configuration.PanMotorPort, configuration.TiltMotorPort);

            ExpansionPlate = expansionPlate ?? throw new ArgumentNullException(nameof(expansionPlate));
            Camera         = camera;
            DashBoard      = new RoverDashboard(expansionPlate.PiTop4Board.Display);

            TiltController = new PanTiltController(
                ExpansionPlate.GetOrCreateServoMotor(configuration.PanMotorPort),
                ExpansionPlate.GetOrCreateServoMotor(configuration.TiltMotorPort)
                );

            MotionComponent = new SteeringMotorController(
                ExpansionPlate.GetOrCreateEncoderMotor(configuration.LeftMotorPort),
                ExpansionPlate.GetOrCreateEncoderMotor(configuration.RightMotorPort)
                );

            FrontRightLed = ExpansionPlate.GetOrCreateLed(DigitalPort.D3, Color.Green);
            FrontLeftLed  = ExpansionPlate.GetOrCreateLed(DigitalPort.D4, Color.Green);

            BackRightLed = ExpansionPlate.GetOrCreateLed(DigitalPort.D0, Color.Red);
            BackLeftLed  = ExpansionPlate.GetOrCreateLed(DigitalPort.D5, Color.Red);

            UltrasoundFront = ExpansionPlate.GetOrCreateUltrasonicSensor(DigitalPort.D7);
            UltrasoundBack  = ExpansionPlate.GetOrCreateUltrasonicSensor(DigitalPort.D6);

            Sound = ExpansionPlate.GetOrCreateSoundSensor(AnaloguePort.A3);

            FrontRightLed.Off();
            FrontLeftLed.Off();
            BackRightLed.Off();
            BackLeftLed.Off();
        }
Exemplo n.º 2
0
        /// <summary>Begins the decoding process.</summary>
        /// <param name="source">The source.</param>
        /// <exception cref="NotSupportedException">Only Layer 3 Audio is supported!.</exception>
        /// <exception cref="Exception">Decoding already started!.</exception>
        public override void BeginDecode(IFrameSource source)
        {
            if (m_FrameDecoder != null)
            {
                Close();
            }

            SourceName = source.Name;
            m_Source   = source;

            // get first audio frame
            MP3AudioFrame l_MP3Frame = ReadNextAudioFrame();

            if (l_MP3Frame.Header.Layer != MP3AudioFrameLayer.Layer3)
            {
                throw new NotSupportedException("Source " + SourceName + ": Only Layer 3 Audio is supported!");
            }

            // prepare decoder
            m_OutputChannels = l_MP3Frame.Header.ChannelCount;
            float[] isEqualizerFactors = m_Equalizer.GetFactors();
            m_Filter1 = new MP3AudioSynthesisFilter(0, 32000.0f, isEqualizerFactors);
            if (m_OutputChannels == 2)
            {
                m_Filter2 = new MP3AudioSynthesisFilter(1, 32000.0f, isEqualizerFactors);
            }

            m_SamplingRate = l_MP3Frame.Header.SamplingRate;
            m_OutputBuffer = new MP3AudioStereoBuffer(m_SamplingRate);
            m_FrameDecoder = new MP3AudioLayerIIIDecoder(l_MP3Frame.Header, m_Filter1, m_Filter2, m_OutputBuffer, (int)MP3AudioOutputMode.Both);

            DecodeFrame(l_MP3Frame);
        }
Exemplo n.º 3
0
        public FrameSourceSampleForm(IFrameSource source, IFrameData data)
        {
            InitializeComponent();
            toolStripLabelUsage.Text = "";

            frameSource                    = source;
            frameData                      = data;
            frameSource.OnNewFrame        += OnNewFrame;
            frameSource.OnWorkerException += FrameSource_OnWorkerException;
        }
Exemplo n.º 4
0
        public FrameToSampleConverter([NotNull] IFrameSource source)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }

            _source = source;
            _temp   = new float[source.FrameSize * source.WaveFormat.Channels];
        }
Exemplo n.º 5
0
        public static IProducer <T> CreateComponent <T>(this IFrameSource <T> frameSource, Pipeline pipeline, TimeSpan samplingInterval)
        {
            var initialFrame = frameSource.GetFrame();

            return(Generators.Sequence(pipeline, initialFrame, _ =>
            {
                var frame = frameSource.GetFrame();
                return frame;
            }, samplingInterval));
        }
Exemplo n.º 6
0
        public void OnImageCaptured(IFrameSource frameSource, Frame frame, double fps)
        {
            _latestFrame = frame.Image;
            histograms   = BitmapConverting.getHistoGrams(_latestFrame, 8, 8);

            BitmapConverting.markRedSectors(_latestFrame, histograms, 8, 8);
            pictureBoxDisplay.Invalidate();

            frameIndex++;
        }
Exemplo n.º 7
0
        /// <summary>
        /// Encodes a specified range of frames obtained from the specified frame source.
        /// </summary>
        /// <param name="frameSource"></param>
        /// <param name="start"></param>
        /// <param name="count"></param>
        public void Encode(IFrameSource frameSource, long start, long count)
        {
            Contract.Requires(frameSource != null);

            var end = start + count;

            for (long i = start; i < end; ++i)
            {
                this.EncodeFrame(frameSource.GetFrame(i));
            }
        }
        /// <summary>
        /// Initializes a new instance of the KinectFacialRecognitionEngine class
        /// </summary>
        public KinectFacialRecognitionEngine(KinectSensor kinect, IFrameSource frameSource)
        {
            this.Kinect = kinect;
            this.ProcessingMutex = new object();
            this.ProcessingEnabled = true;
            this.Processor = new FacialRecognitionProcessor();
            this.frameSource = frameSource;
            this.frameSource.FrameDataUpdated += this.FrameSource_FrameDataUpdated;

            this.recognizerWorker = new BackgroundWorker();
            this.recognizerWorker.DoWork += this.RecognizerWorker_DoWork;
            this.recognizerWorker.RunWorkerCompleted += this.RecognizerWorker_RunWorkerCompleted;
        }
        /// <summary>
        /// Initializes a new instance of the KinectFacialRecognitionEngine class
        /// </summary>
        public KinectFacialRecognitionEngine(KinectSensor kinect, IFrameSource frameSource)
        {
            this.Kinect                        = kinect;
            this.ProcessingMutex               = new object();
            this.ProcessingEnabled             = true;
            this.Processor                     = new FacialRecognitionProcessor();
            this.frameSource                   = frameSource;
            this.frameSource.FrameDataUpdated += this.FrameSource_FrameDataUpdated;

            this.recognizerWorker                     = new BackgroundWorker();
            this.recognizerWorker.DoWork             += this.RecognizerWorker_DoWork;
            this.recognizerWorker.RunWorkerCompleted += this.RecognizerWorker_RunWorkerCompleted;
        }
Exemplo n.º 10
0
        /// <summary>
        /// Configure an IFrameSource from a StorageFile or MediaCapture instance to produce optionally a specified format of frame
        /// </summary>
        /// <param name="source"></param>
        /// <param name="inputImageDescriptor"></param>
        /// <returns></returns>
        private async Task ConfigureFrameSourceAsync(object source, ISkillFeatureImageDescriptor inputImageDescriptor = null)
        {
            await m_lock.WaitAsync();

            {
                // Reset bitmap rendering component
                UIProcessedPreview.Source = null;
                m_renderTargetFrame       = null;
                m_processedBitmapSource   = new SoftwareBitmapSource();
                UIProcessedPreview.Source = m_processedBitmapSource;

                // Clean up previous frame source
                if (m_frameSource != null)
                {
                    m_frameSource.FrameArrived -= FrameSource_FrameAvailable;
                    var disposableFrameSource = m_frameSource as IDisposable;
                    if (disposableFrameSource != null)
                    {
                        // Lock disposal based on frame source consumers
                        disposableFrameSource.Dispose();
                    }
                }

                // Create new frame source and register a callback if the source fails along the way
                m_frameSource = await FrameSourceFactory.CreateFrameSourceAsync(
                    source,
                    (sender, message) =>
                {
                    NotifyUser(message);
                },
                    inputImageDescriptor);

                // TODO: Workaround for a bug in ObjectDetectorBinding when binding consecutively VideoFrames with Direct3DSurface and SoftwareBitmap
                await m_skillWrappers[0].InitializeSkillAsync(m_skillWrappers[0].Skill.Device);

                // Set additional input features as exposed in the UI
                await m_skillWrappers[0].Binding["InputObjectKindFilterList"].SetFeatureValueAsync(m_objectKindFilterList);
                await m_skillWrappers[0].Binding["InputConfidenceThreshold"].SetFeatureValueAsync((float)UIConfidenceThresholdControl.Value);
            }
            m_lock.Release();

            // If we obtained a valid frame source, start it
            if (m_frameSource != null)
            {
                m_frameSource.FrameArrived += FrameSource_FrameAvailable;
                await m_frameSource.StartAsync();
            }
        }
Exemplo n.º 11
0
        /// <summary>
        /// Configure an IFrameSource from a StorageFile or MediaCapture instance to produce optionally a specified format of frame
        /// </summary>
        /// <param name="source"></param>
        /// <param name="inputImageDescriptor"></param>
        /// <returns></returns>
        private async Task ConfigureFrameSourceAsync(object source, ISkillFeatureImageDescriptor inputImageDescriptor = null)
        {
            await m_lock.WaitAsync();

            {
                // Reset bitmap rendering component
                UIProcessedPreview.Source = null;
                m_renderTargetFrame       = null;
                m_processedBitmapSource   = new SoftwareBitmapSource();
                UIProcessedPreview.Source = m_processedBitmapSource;

                // Clean up previous frame source
                if (m_frameSource != null)
                {
                    m_frameSource.FrameArrived -= FrameSource_FrameAvailable;
                    var disposableFrameSource = m_frameSource as IDisposable;
                    if (disposableFrameSource != null)
                    {
                        // Lock disposal based on frame source consumers
                        disposableFrameSource.Dispose();
                    }
                }

                // Create new frame source and register a callback if the source fails along the way
                m_frameSource = await FrameSourceFactory.CreateFrameSourceAsync(
                    source,
                    (sender, message) =>
                {
                    NotifyUser(message);
                },
                    inputImageDescriptor);

                // Clear existing trackers
                m_frameCounter = 0;
                m_trackerBindings.Clear();
                m_trackerHistories.Clear();
            }
            m_lock.Release();

            // If we obtained a valid frame source, start it
            if (m_frameSource != null)
            {
                m_frameSource.FrameArrived += FrameSource_FrameAvailable;
                await m_frameSource.StartAsync();
            }
        }
Exemplo n.º 12
0
        /// <summary>
        /// Conditionally dispose old frame source and create new frame source
        /// </summary>
        /// <param name="source"></param>
        /// <returns></returns>
        private async Task ConfigureFrameSourceAsync(object source)
        {
            await m_lock.WaitAsync();

            {
                // Dispose old frame source
                if (m_frameSource != null)
                {
                    m_frameSource.FrameArrived -= FrameSource_FrameArrived;
                    var disposableFrameSource = m_frameSource as IDisposable;
                    if (disposableFrameSource != null)
                    {
                        disposableFrameSource.Dispose();
                    }
                }

                // Create new frame source
                m_frameSource = await FrameSourceFactory.CreateFrameSourceAsync(source, (sender, message) =>
                {
                    NotifyUser(message, NotifyType.ErrorMessage);
                });

                // If we obtained a valid frame source, hook a frame callback
                if (m_frameSource != null)
                {
                    m_frameSource.FrameArrived += FrameSource_FrameArrived;
                    await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                    {
                        UIPlayButton.IsEnabled = true;
                    });
                }
                else
                {
                    await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                    {
                        UIPlayButton.IsEnabled = false;
                    });
                }
            }
            m_lock.Release();

            // Update playback button state. Warning that this method acquires m_lock, so must be called from outside the lock
            await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => TogglePlaybackState(false));

            NotifyUser("Frame source configured, ready to begin");
        }
Exemplo n.º 13
0
 // Start is called before the first frame update
 void Awake()
 {
     if (_useCamera)
     {
         WebCamDevice[] cams = WebCamTexture.devices;
         if (cams.Length > 0)
         {
             WebCamTexture tmpWebCamTexture = new WebCamTexture(cams[0].name);
             _frameSource = new CameraFrame(tmpWebCamTexture);
             tmpWebCamTexture.Play();
             _cameraIsAvailable = true;
         }
     }
     else
     {
         _frameSource = new StaticImage(string.Concat(Application.dataPath, "/Resources/Images/frame.png"), 640, 512);
     }
 }
        /// <summary>
        /// Configure an IFrameSource from a StorageFile or MediaCapture instance to produce optionally a specified format of frame
        /// </summary>
        /// <param name="source"></param>
        /// <param name="inputImageDescriptor"></param>
        /// <returns></returns>
        private async Task ConfigureFrameSourceAsync(object source, ISkillFeatureImageDescriptor inputImageDescriptor = null)
        {
            await m_lock.WaitAsync();

            {
                // Reset bitmap rendering component
                UIProcessedPreview.Source = null;
                m_renderTargetFrame       = null;
                m_processedBitmapSource   = new SoftwareBitmapSource();
                UIProcessedPreview.Source = m_processedBitmapSource;

                // Clean up previous frame source
                if (m_frameSource != null)
                {
                    m_frameSource.FrameArrived -= FrameSource_FrameAvailable;
                    var disposableFrameSource = m_frameSource as IDisposable;
                    if (disposableFrameSource != null)
                    {
                        // Lock disposal based on frame source consumers
                        disposableFrameSource.Dispose();
                    }
                }

                // Create new frame source and register a callback if the source fails along the way
                m_frameSource = await FrameSourceFactory.CreateFrameSourceAsync(
                    source,
                    (sender, message) =>
                {
                    NotifyUser(message);
                },
                    inputImageDescriptor);

                // TODO: Workaround for a bug in ObjectDetectorBinding when binding consecutively VideoFrames with Direct3DSurface and SoftwareBitmap
                m_binding = await m_skill.CreateSkillBindingAsync() as ObjectDetectorBinding;
            }
            m_lock.Release();

            // If we obtained a valid frame source, start it
            if (m_frameSource != null)
            {
                m_frameSource.FrameArrived += FrameSource_FrameAvailable;
                await m_frameSource.StartAsync();
            }
        }
Exemplo n.º 15
0
        /// <summary>Closes the underlying stream and calls Dispose.</summary>
        public void Close()
        {
            if (m_Disposed)
            {
                throw new ObjectDisposedException(LogSourceName);
            }

            if (m_Initialized)
            {
                M123.Deinitialize();
                m_Initialized = false;
            }
            if (m_Source != null)
            {
                m_Source.Close();
                m_Source           = null;
                m_DecodeFifoBuffer = null;
            }
        }
Exemplo n.º 16
0
        /// <summary>Starts the decoding process.</summary>
        /// <param name="source">The source.</param>
        /// <exception cref="InvalidOperationException">Source: Decoding already started!.</exception>
        public void BeginDecode(IFrameSource source)
        {
            if (disposed)
            {
                throw new ObjectDisposedException(LogSourceName);
            }

            if (initialized)
            {
                throw new InvalidOperationException(string.Format("Source {0}: Decoding already started!", SourceName));
            }

            if (SourceName != null)
            {
                SourceName = source.Name;
            }

            initialized = true;
            M123.Initialize();

            m_Source = source;

            // open new decoder handle
            M123.RESULT result;
            m_DecoderHandle = M123.SafeNativeMethods.mpg123_new(null, out result);
            M123.CheckResult(result);

            // reset formats
            M123.CheckResult(M123.SafeNativeMethods.mpg123_format_none(m_DecoderHandle));

            // allow all mp3 native samplerates
            var mode = useFloatingPoint ? M123.ENC.FLOAT_32 : M123.ENC.SIGNED_16;

            foreach (var sampleRate in M123.SafeNativeMethods.mpg123_rates())
            {
                M123.CheckResult(M123.SafeNativeMethods.mpg123_format(m_DecoderHandle, new IntPtr(sampleRate), M123.CHANNELCOUNT.STEREO, mode));
            }

            // open feed
            result = M123.SafeNativeMethods.mpg123_open_feed(m_DecoderHandle);
            M123.CheckResult(result);
            m_DecodeFifoBuffer = new FifoBuffer();
        }
Exemplo n.º 17
0
        /// <summary>
        /// Configure an IFrameSource from a StorageFile or MediaCapture instance
        /// </summary>
        /// <param name="source"></param>
        /// <returns></returns>
        private async Task ConfigureFrameSourceAsync(object source)
        {
            await m_lock.WaitAsync();

            {
                // Reset bitmap rendering component
                UIImageViewer.Source     = null;
                m_processedBitmapSource  = new SoftwareBitmapSource();
                UIImageViewer.Source     = m_processedBitmapSource;
                m_bodyRenderer.IsVisible = false;

                // Clean up previous frame source
                if (m_frameSource != null)
                {
                    m_frameSource.FrameArrived -= FrameSource_FrameAvailable;
                    var disposableFrameSource = m_frameSource as IDisposable;
                    if (disposableFrameSource != null)
                    {
                        // Lock disposal based on frame source consumers
                        disposableFrameSource.Dispose();
                    }
                }

                // Create new frame source and register a callback if the source fails along the way
                m_frameSource = await FrameSourceFactory.CreateFrameSourceAsync(source, (sender, message) =>
                {
                    NotifyUser(message);
                });
            }
            m_lock.Release();

            // If we obtained a valid frame source, start it
            if (m_frameSource != null)
            {
                m_frameSource.FrameArrived += FrameSource_FrameAvailable;
                await m_frameSource.StartAsync();
            }
        }
Exemplo n.º 18
0
 private void _frameSource_NewFrame(IFrameSource frameSource, Frame frame, double fps)
 {
     BitMaps.Add(frame.Image);
 }
Exemplo n.º 19
0
 /// <summary>
 /// Asynchronously encodes a specified range of frames obtained from the specified frame source.
 /// </summary>
 /// <param name="frameSource"></param>
 /// <param name="start"></param>
 /// <param name="count"></param>
 /// <param name="userState"></param>
 public void EncodeAsync(IFrameSource frameSource, long start, long count, object userState)
 {
 }
Exemplo n.º 20
0
        /// <summary>
        /// Encodes a specified range of frames obtained from the specified frame source.
        /// </summary>
        /// <param name="frameSource"></param>
        /// <param name="start"></param>
        /// <param name="count"></param>
        public void Encode(IFrameSource frameSource, long start, long count)
        {
            Contract.Requires(frameSource != null);

            var end = start + count;
            for (long i = start; i < end; ++i)
                this.EncodeFrame(frameSource.GetFrame(i));
        }
Exemplo n.º 21
0
 /// <summary>Starts the decoding process.</summary>
 /// <param name="source">The source.</param>
 /// <exception cref="InvalidOperationException">Source: Decoding already started!.</exception>
 public abstract void BeginDecode(IFrameSource source);
Exemplo n.º 22
0
 public void OnImageCaptured(IFrameSource frameSource,
     Frame frame, double fps)
 {          
     ImgVideo.Dispatcher.BeginInvoke(
         (Action) (() => ImgVideo.Source = ImageDataConverter.BitmapToBitmapSource(frame.Image)));
 }               
Exemplo n.º 23
0
 private void _frameSource_NewFrame(IFrameSource frameSource, Frame frame, double fps)
 {
     BitMaps.Add(frame.Image);
 }
Exemplo n.º 24
0
 public void OnImageCaptured(IFrameSource frameSource,
                             Frame frame, double fps)
 {
     ImgVideo.Dispatcher.BeginInvoke(
         (Action)(() => ImgVideo.Source = ImageDataConverter.BitmapToBitmapSource(frame.Image)));
 }
Exemplo n.º 25
0
 public VolumeRampedFrameSource(IFrameSource source, IVolumeProvider volumeProvider)
 {
     _source         = source;
     _volumeProvider = volumeProvider;
 }
Exemplo n.º 26
0
 public FrameToSampleConverter(IFrameSource source)
 {
     _source = source;
     _temp   = new float[source.FrameSize * source.WaveFormat.Channels];
 }
Exemplo n.º 27
0
 public FrameManipulator(IFrameSource frameSource, string filename)
 {
     _frameSource = frameSource;
     _filename    = filename;
 }
Exemplo n.º 28
0
        public void OnImageCaptured(IFrameSource frameSource, Frame frame, double fps)
        {
            _latestFrame = frame.Image;
            histograms = BitmapConverting.getHistoGrams(_latestFrame, 8, 8);

            BitmapConverting.markRedSectors(_latestFrame, histograms, 8, 8);
            pictureBoxDisplay.Invalidate();
            
            frameIndex++;

            
        }
Exemplo n.º 29
0
 /// <summary>
 /// Asynchronously encodes a specified range of frames obtained from the specified frame source.
 /// </summary>
 /// <param name="frameSource"></param>
 /// <param name="start"></param>
 /// <param name="count"></param>
 /// <param name="userState"></param>
 public void EncodeAsync(IFrameSource frameSource, long start, long count, object userState)
 {
 }
Exemplo n.º 30
0
 public void CaptureFromCamera(IFrameSource <Image> camera)
 {
     CaptureImage = camera.GetFrame;
     Threshold    = 0.9;
 }