public RoverRobot(ExpansionPlate expansionPlate, IFrameSource <Image <Rgb24> > camera, RoverRobotConfiguration configuration) { using var operation = Log.OnEnterAndExit(); operation.Info("configuring platform ", configuration.LeftMotorPort, configuration.RightMotorPort, configuration.PanMotorPort, configuration.TiltMotorPort); ExpansionPlate = expansionPlate ?? throw new ArgumentNullException(nameof(expansionPlate)); Camera = camera; DashBoard = new RoverDashboard(expansionPlate.PiTop4Board.Display); TiltController = new PanTiltController( ExpansionPlate.GetOrCreateServoMotor(configuration.PanMotorPort), ExpansionPlate.GetOrCreateServoMotor(configuration.TiltMotorPort) ); MotionComponent = new SteeringMotorController( ExpansionPlate.GetOrCreateEncoderMotor(configuration.LeftMotorPort), ExpansionPlate.GetOrCreateEncoderMotor(configuration.RightMotorPort) ); FrontRightLed = ExpansionPlate.GetOrCreateLed(DigitalPort.D3, Color.Green); FrontLeftLed = ExpansionPlate.GetOrCreateLed(DigitalPort.D4, Color.Green); BackRightLed = ExpansionPlate.GetOrCreateLed(DigitalPort.D0, Color.Red); BackLeftLed = ExpansionPlate.GetOrCreateLed(DigitalPort.D5, Color.Red); UltrasoundFront = ExpansionPlate.GetOrCreateUltrasonicSensor(DigitalPort.D7); UltrasoundBack = ExpansionPlate.GetOrCreateUltrasonicSensor(DigitalPort.D6); Sound = ExpansionPlate.GetOrCreateSoundSensor(AnaloguePort.A3); FrontRightLed.Off(); FrontLeftLed.Off(); BackRightLed.Off(); BackLeftLed.Off(); }
/// <summary>Begins the decoding process.</summary> /// <param name="source">The source.</param> /// <exception cref="NotSupportedException">Only Layer 3 Audio is supported!.</exception> /// <exception cref="Exception">Decoding already started!.</exception> public override void BeginDecode(IFrameSource source) { if (m_FrameDecoder != null) { Close(); } SourceName = source.Name; m_Source = source; // get first audio frame MP3AudioFrame l_MP3Frame = ReadNextAudioFrame(); if (l_MP3Frame.Header.Layer != MP3AudioFrameLayer.Layer3) { throw new NotSupportedException("Source " + SourceName + ": Only Layer 3 Audio is supported!"); } // prepare decoder m_OutputChannels = l_MP3Frame.Header.ChannelCount; float[] isEqualizerFactors = m_Equalizer.GetFactors(); m_Filter1 = new MP3AudioSynthesisFilter(0, 32000.0f, isEqualizerFactors); if (m_OutputChannels == 2) { m_Filter2 = new MP3AudioSynthesisFilter(1, 32000.0f, isEqualizerFactors); } m_SamplingRate = l_MP3Frame.Header.SamplingRate; m_OutputBuffer = new MP3AudioStereoBuffer(m_SamplingRate); m_FrameDecoder = new MP3AudioLayerIIIDecoder(l_MP3Frame.Header, m_Filter1, m_Filter2, m_OutputBuffer, (int)MP3AudioOutputMode.Both); DecodeFrame(l_MP3Frame); }
public FrameSourceSampleForm(IFrameSource source, IFrameData data) { InitializeComponent(); toolStripLabelUsage.Text = ""; frameSource = source; frameData = data; frameSource.OnNewFrame += OnNewFrame; frameSource.OnWorkerException += FrameSource_OnWorkerException; }
public FrameToSampleConverter([NotNull] IFrameSource source) { if (source == null) { throw new ArgumentNullException("source"); } _source = source; _temp = new float[source.FrameSize * source.WaveFormat.Channels]; }
public static IProducer <T> CreateComponent <T>(this IFrameSource <T> frameSource, Pipeline pipeline, TimeSpan samplingInterval) { var initialFrame = frameSource.GetFrame(); return(Generators.Sequence(pipeline, initialFrame, _ => { var frame = frameSource.GetFrame(); return frame; }, samplingInterval)); }
public void OnImageCaptured(IFrameSource frameSource, Frame frame, double fps) { _latestFrame = frame.Image; histograms = BitmapConverting.getHistoGrams(_latestFrame, 8, 8); BitmapConverting.markRedSectors(_latestFrame, histograms, 8, 8); pictureBoxDisplay.Invalidate(); frameIndex++; }
/// <summary> /// Encodes a specified range of frames obtained from the specified frame source. /// </summary> /// <param name="frameSource"></param> /// <param name="start"></param> /// <param name="count"></param> public void Encode(IFrameSource frameSource, long start, long count) { Contract.Requires(frameSource != null); var end = start + count; for (long i = start; i < end; ++i) { this.EncodeFrame(frameSource.GetFrame(i)); } }
/// <summary> /// Initializes a new instance of the KinectFacialRecognitionEngine class /// </summary> public KinectFacialRecognitionEngine(KinectSensor kinect, IFrameSource frameSource) { this.Kinect = kinect; this.ProcessingMutex = new object(); this.ProcessingEnabled = true; this.Processor = new FacialRecognitionProcessor(); this.frameSource = frameSource; this.frameSource.FrameDataUpdated += this.FrameSource_FrameDataUpdated; this.recognizerWorker = new BackgroundWorker(); this.recognizerWorker.DoWork += this.RecognizerWorker_DoWork; this.recognizerWorker.RunWorkerCompleted += this.RecognizerWorker_RunWorkerCompleted; }
/// <summary> /// Configure an IFrameSource from a StorageFile or MediaCapture instance to produce optionally a specified format of frame /// </summary> /// <param name="source"></param> /// <param name="inputImageDescriptor"></param> /// <returns></returns> private async Task ConfigureFrameSourceAsync(object source, ISkillFeatureImageDescriptor inputImageDescriptor = null) { await m_lock.WaitAsync(); { // Reset bitmap rendering component UIProcessedPreview.Source = null; m_renderTargetFrame = null; m_processedBitmapSource = new SoftwareBitmapSource(); UIProcessedPreview.Source = m_processedBitmapSource; // Clean up previous frame source if (m_frameSource != null) { m_frameSource.FrameArrived -= FrameSource_FrameAvailable; var disposableFrameSource = m_frameSource as IDisposable; if (disposableFrameSource != null) { // Lock disposal based on frame source consumers disposableFrameSource.Dispose(); } } // Create new frame source and register a callback if the source fails along the way m_frameSource = await FrameSourceFactory.CreateFrameSourceAsync( source, (sender, message) => { NotifyUser(message); }, inputImageDescriptor); // TODO: Workaround for a bug in ObjectDetectorBinding when binding consecutively VideoFrames with Direct3DSurface and SoftwareBitmap await m_skillWrappers[0].InitializeSkillAsync(m_skillWrappers[0].Skill.Device); // Set additional input features as exposed in the UI await m_skillWrappers[0].Binding["InputObjectKindFilterList"].SetFeatureValueAsync(m_objectKindFilterList); await m_skillWrappers[0].Binding["InputConfidenceThreshold"].SetFeatureValueAsync((float)UIConfidenceThresholdControl.Value); } m_lock.Release(); // If we obtained a valid frame source, start it if (m_frameSource != null) { m_frameSource.FrameArrived += FrameSource_FrameAvailable; await m_frameSource.StartAsync(); } }
/// <summary> /// Configure an IFrameSource from a StorageFile or MediaCapture instance to produce optionally a specified format of frame /// </summary> /// <param name="source"></param> /// <param name="inputImageDescriptor"></param> /// <returns></returns> private async Task ConfigureFrameSourceAsync(object source, ISkillFeatureImageDescriptor inputImageDescriptor = null) { await m_lock.WaitAsync(); { // Reset bitmap rendering component UIProcessedPreview.Source = null; m_renderTargetFrame = null; m_processedBitmapSource = new SoftwareBitmapSource(); UIProcessedPreview.Source = m_processedBitmapSource; // Clean up previous frame source if (m_frameSource != null) { m_frameSource.FrameArrived -= FrameSource_FrameAvailable; var disposableFrameSource = m_frameSource as IDisposable; if (disposableFrameSource != null) { // Lock disposal based on frame source consumers disposableFrameSource.Dispose(); } } // Create new frame source and register a callback if the source fails along the way m_frameSource = await FrameSourceFactory.CreateFrameSourceAsync( source, (sender, message) => { NotifyUser(message); }, inputImageDescriptor); // Clear existing trackers m_frameCounter = 0; m_trackerBindings.Clear(); m_trackerHistories.Clear(); } m_lock.Release(); // If we obtained a valid frame source, start it if (m_frameSource != null) { m_frameSource.FrameArrived += FrameSource_FrameAvailable; await m_frameSource.StartAsync(); } }
/// <summary> /// Conditionally dispose old frame source and create new frame source /// </summary> /// <param name="source"></param> /// <returns></returns> private async Task ConfigureFrameSourceAsync(object source) { await m_lock.WaitAsync(); { // Dispose old frame source if (m_frameSource != null) { m_frameSource.FrameArrived -= FrameSource_FrameArrived; var disposableFrameSource = m_frameSource as IDisposable; if (disposableFrameSource != null) { disposableFrameSource.Dispose(); } } // Create new frame source m_frameSource = await FrameSourceFactory.CreateFrameSourceAsync(source, (sender, message) => { NotifyUser(message, NotifyType.ErrorMessage); }); // If we obtained a valid frame source, hook a frame callback if (m_frameSource != null) { m_frameSource.FrameArrived += FrameSource_FrameArrived; await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { UIPlayButton.IsEnabled = true; }); } else { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { UIPlayButton.IsEnabled = false; }); } } m_lock.Release(); // Update playback button state. Warning that this method acquires m_lock, so must be called from outside the lock await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => TogglePlaybackState(false)); NotifyUser("Frame source configured, ready to begin"); }
// Start is called before the first frame update void Awake() { if (_useCamera) { WebCamDevice[] cams = WebCamTexture.devices; if (cams.Length > 0) { WebCamTexture tmpWebCamTexture = new WebCamTexture(cams[0].name); _frameSource = new CameraFrame(tmpWebCamTexture); tmpWebCamTexture.Play(); _cameraIsAvailable = true; } } else { _frameSource = new StaticImage(string.Concat(Application.dataPath, "/Resources/Images/frame.png"), 640, 512); } }
/// <summary> /// Configure an IFrameSource from a StorageFile or MediaCapture instance to produce optionally a specified format of frame /// </summary> /// <param name="source"></param> /// <param name="inputImageDescriptor"></param> /// <returns></returns> private async Task ConfigureFrameSourceAsync(object source, ISkillFeatureImageDescriptor inputImageDescriptor = null) { await m_lock.WaitAsync(); { // Reset bitmap rendering component UIProcessedPreview.Source = null; m_renderTargetFrame = null; m_processedBitmapSource = new SoftwareBitmapSource(); UIProcessedPreview.Source = m_processedBitmapSource; // Clean up previous frame source if (m_frameSource != null) { m_frameSource.FrameArrived -= FrameSource_FrameAvailable; var disposableFrameSource = m_frameSource as IDisposable; if (disposableFrameSource != null) { // Lock disposal based on frame source consumers disposableFrameSource.Dispose(); } } // Create new frame source and register a callback if the source fails along the way m_frameSource = await FrameSourceFactory.CreateFrameSourceAsync( source, (sender, message) => { NotifyUser(message); }, inputImageDescriptor); // TODO: Workaround for a bug in ObjectDetectorBinding when binding consecutively VideoFrames with Direct3DSurface and SoftwareBitmap m_binding = await m_skill.CreateSkillBindingAsync() as ObjectDetectorBinding; } m_lock.Release(); // If we obtained a valid frame source, start it if (m_frameSource != null) { m_frameSource.FrameArrived += FrameSource_FrameAvailable; await m_frameSource.StartAsync(); } }
/// <summary>Closes the underlying stream and calls Dispose.</summary> public void Close() { if (m_Disposed) { throw new ObjectDisposedException(LogSourceName); } if (m_Initialized) { M123.Deinitialize(); m_Initialized = false; } if (m_Source != null) { m_Source.Close(); m_Source = null; m_DecodeFifoBuffer = null; } }
/// <summary>Starts the decoding process.</summary> /// <param name="source">The source.</param> /// <exception cref="InvalidOperationException">Source: Decoding already started!.</exception> public void BeginDecode(IFrameSource source) { if (disposed) { throw new ObjectDisposedException(LogSourceName); } if (initialized) { throw new InvalidOperationException(string.Format("Source {0}: Decoding already started!", SourceName)); } if (SourceName != null) { SourceName = source.Name; } initialized = true; M123.Initialize(); m_Source = source; // open new decoder handle M123.RESULT result; m_DecoderHandle = M123.SafeNativeMethods.mpg123_new(null, out result); M123.CheckResult(result); // reset formats M123.CheckResult(M123.SafeNativeMethods.mpg123_format_none(m_DecoderHandle)); // allow all mp3 native samplerates var mode = useFloatingPoint ? M123.ENC.FLOAT_32 : M123.ENC.SIGNED_16; foreach (var sampleRate in M123.SafeNativeMethods.mpg123_rates()) { M123.CheckResult(M123.SafeNativeMethods.mpg123_format(m_DecoderHandle, new IntPtr(sampleRate), M123.CHANNELCOUNT.STEREO, mode)); } // open feed result = M123.SafeNativeMethods.mpg123_open_feed(m_DecoderHandle); M123.CheckResult(result); m_DecodeFifoBuffer = new FifoBuffer(); }
/// <summary> /// Configure an IFrameSource from a StorageFile or MediaCapture instance /// </summary> /// <param name="source"></param> /// <returns></returns> private async Task ConfigureFrameSourceAsync(object source) { await m_lock.WaitAsync(); { // Reset bitmap rendering component UIImageViewer.Source = null; m_processedBitmapSource = new SoftwareBitmapSource(); UIImageViewer.Source = m_processedBitmapSource; m_bodyRenderer.IsVisible = false; // Clean up previous frame source if (m_frameSource != null) { m_frameSource.FrameArrived -= FrameSource_FrameAvailable; var disposableFrameSource = m_frameSource as IDisposable; if (disposableFrameSource != null) { // Lock disposal based on frame source consumers disposableFrameSource.Dispose(); } } // Create new frame source and register a callback if the source fails along the way m_frameSource = await FrameSourceFactory.CreateFrameSourceAsync(source, (sender, message) => { NotifyUser(message); }); } m_lock.Release(); // If we obtained a valid frame source, start it if (m_frameSource != null) { m_frameSource.FrameArrived += FrameSource_FrameAvailable; await m_frameSource.StartAsync(); } }
private void _frameSource_NewFrame(IFrameSource frameSource, Frame frame, double fps) { BitMaps.Add(frame.Image); }
/// <summary> /// Asynchronously encodes a specified range of frames obtained from the specified frame source. /// </summary> /// <param name="frameSource"></param> /// <param name="start"></param> /// <param name="count"></param> /// <param name="userState"></param> public void EncodeAsync(IFrameSource frameSource, long start, long count, object userState) { }
/// <summary> /// Encodes a specified range of frames obtained from the specified frame source. /// </summary> /// <param name="frameSource"></param> /// <param name="start"></param> /// <param name="count"></param> public void Encode(IFrameSource frameSource, long start, long count) { Contract.Requires(frameSource != null); var end = start + count; for (long i = start; i < end; ++i) this.EncodeFrame(frameSource.GetFrame(i)); }
/// <summary>Starts the decoding process.</summary> /// <param name="source">The source.</param> /// <exception cref="InvalidOperationException">Source: Decoding already started!.</exception> public abstract void BeginDecode(IFrameSource source);
public void OnImageCaptured(IFrameSource frameSource, Frame frame, double fps) { ImgVideo.Dispatcher.BeginInvoke( (Action) (() => ImgVideo.Source = ImageDataConverter.BitmapToBitmapSource(frame.Image))); }
public void OnImageCaptured(IFrameSource frameSource, Frame frame, double fps) { ImgVideo.Dispatcher.BeginInvoke( (Action)(() => ImgVideo.Source = ImageDataConverter.BitmapToBitmapSource(frame.Image))); }
public VolumeRampedFrameSource(IFrameSource source, IVolumeProvider volumeProvider) { _source = source; _volumeProvider = volumeProvider; }
public FrameToSampleConverter(IFrameSource source) { _source = source; _temp = new float[source.FrameSize * source.WaveFormat.Channels]; }
public FrameManipulator(IFrameSource frameSource, string filename) { _frameSource = frameSource; _filename = filename; }
public void CaptureFromCamera(IFrameSource <Image> camera) { CaptureImage = camera.GetFrame; Threshold = 0.9; }