public Task SetSourceAsync(IMediaStreamSource source) { return Dispatch(() => { source.ValidateEvent(MediaStreamFsm.MediaEvent.MediaStreamSourceAssigned); var wasSet = Interlocked.Exchange(ref _sourceIsSet, 1); Debug.Assert(0 == wasSet); if (null != _mediaElement) { UiThreadCleanup(); var mediaElement = _mediaElement; _mediaElement = null; _destroyMediaElement(mediaElement); } _mediaElement = _createMediaElement(); if (null != _mediaElement) _mediaElement.SetSource((MediaStreamSource)source); else Debug.WriteLine("MediaElementManager.SetSourceAsync() null media element"); }); }
internal void Play(float volume) { //Creating and setting source within constructor starts //playing song immediately. song = new MediaElement(); song.MediaEnded += new RoutedEventHandler(song_MediaEnded); _graphics.Root.Children.Add(song); song.SetSource(resourceInfo.Stream); song.Volume = volume; song.Play(); }
public static void LoadMedia(string key) { Uri uri = new Uri(string.Format("XamlTetris;component/{0}", key), UriKind.Relative); StreamResourceInfo sri = System.Windows.Application.GetResourceStream(uri); MediaElement element = new MediaElement(); element.AutoPlay = false; element.Visibility = Visibility.Collapsed; element.SetSource(sri.Stream); rootPanel.Children.Add(element); mediaElements.Add(key, element); }
private void Initialize() { _Frequencies = TestFrequencies.GetFrequencies(); _generator = new OscillationSoundWave() { SoundWaveData = _Frequencies }; _MediaElement = new MediaElement() { AutoPlay = false }; _MediaElement.SetSource(_generator.Source); _currentIndex = 0; _Timer = new DispatcherTimer(); _Timer.Interval = new TimeSpan( 0, 0, 1 ); _Timer.Tick += _Timer_Tick; //_Track.PropertyChanged += _Track_PropertyChanged; _StartCommand = new SimpleCommand(); _StartCommand.Executed += TestFrequenciesViewModel_Executed; _StartCommand.MayBeExecuted = true; }
public Application() { this.Startup += delegate { this.RootVisual = new Canvas(); this.Canvas.Loaded += delegate { MediaElement audioElement = new MediaElement(); this.Canvas.Children.Clear(); this.Canvas.Children.Add(audioElement); this.Canvas.Children.Add(new Canvas()); audioElement.SetSource(this.GetType().Assembly.GetManifestResourceStream("Monotone.Monotone.mp3")); audioElement.MediaOpened += delegate { this.NextPart(); this.timer.Interval = new TimeSpan(0, 0, 0, 0, (int)((60f / 140f) * 4 * 4 * 2 * 1000f)); this.timer.Tick += delegate { this.timer.Stop(); this.NextPart(); this.timer.Start(); }; this.timer.Start(); }; }; }; }
// ======================================================================= // Start a sound effect // ======================================================================= public static void S_StartSound(int entnum, int entchannel, sfx_t sfx, double[] origin, double fvol, double attenuation) { channel_t target_chan, check; sfxcache_t sc; int vol; int ch_idx; int skip; /*if (entnum != 195) return;*/ if (sound_started == 0) return; if (sfx == null) return; if (nosound.value != 0) return; vol = (int)(fvol*255); // pick a channel to play on target_chan = SND_PickChannel(entnum, entchannel); if (target_chan == null) return; // spatialize mathlib.VectorCopy(origin, ref target_chan.origin); target_chan.dist_mult = attenuation / sound_nominal_clip_dist; target_chan.master_vol = vol; target_chan.entnum = entnum; target_chan.entchannel = entchannel; SND_Spatialize(target_chan); if (target_chan.leftvol == 0 && target_chan.rightvol == 0) return; // not audible at all // new channel sc = S_LoadSound (sfx); if (sc == null) { target_chan.sfx = null; return; // couldn't load the sound's data } target_chan.sfx = sfx; /* if (sc.loopstart != -1) console.Con_Printf(sfx.name + " " + entnum + " " + entchannel + "\n");*/ MediaElement media = new MediaElement(); target_chan.media = media; media.AutoPlay = true; media.SetSource(new MemoryStream(sc.data)); media.Tag = target_chan; /*if (sc.loopstart != -1) { media.MediaEnded += media_MediaEnded; target_chan.looping = 1; } else*/ media.MediaEnded += media_MediaEnded2; SetVolume(target_chan); Page.thePage.parentCanvas.Children.Add(media); }
/// <summary> /// If camera has not been initialized when navigating to this page, initialization /// will be started asynchronously in this method. Once initialization has been /// completed the camera will be set as a source to the VideoBrush element /// declared in XAML. On-screen controls are enabled when camera has been initialized. /// </summary> protected async override void OnNavigatedTo(NavigationEventArgs e) { if (Camera != null) { Camera.Dispose(); Camera = null; } ShowProgress(AppResources.InitializingCameraText); await InitializeCamera(PerfectCamera.DataContext.Instance.SensorLocation); HideProgress(); InitEffectPanel(); if (PerfectCamera.DataContext.Instance.CameraType == PerfectCameraType.Selfie) { _mediaElement = new MediaElement { Stretch = Stretch.UniformToFill, BufferingTime = new TimeSpan(0) }; _mediaElement.SetSource(_cameraStreamSource); BackgroundVideoBrush.SetSource(_mediaElement); EffectNameTextBlock.Text = _cameraEffect.EffectName; EffectNameFadeIn.Begin(); } else { BackgroundVideoBrush.SetSource(Camera); } SetScreenButtonsEnabled(true); SetCameraButtonsEnabled(true); Storyboard sb = (Storyboard)Resources["CaptureAnimation"]; sb.Stop(); SetOrientation(this.Orientation); base.OnNavigatedTo(e); }
private async Task Initialize() { var resolution = PhotoCaptureDevice.GetAvailableCaptureResolutions(_cameraLocation).First(); _photoCaptureDevice = await PhotoCaptureDevice.OpenAsync(_cameraLocation, resolution); Windows.Foundation.Size PreviewResolution; foreach (var res in PhotoCaptureDevice.GetAvailablePreviewResolutions(_cameraLocation).ToArray().Reverse()) { try { await _photoCaptureDevice.SetPreviewResolutionAsync(res); PreviewResolution = res; break; } catch (Exception e) { } } _cameraStreamSource = new CameraStreamSource(_photoCaptureDevice, PreviewResolution); _mediaElement = new MediaElement(); _mediaElement.BufferingTime = new TimeSpan(0); _mediaElement.SetSource(_cameraStreamSource); // Using VideoBrush in XAML instead of MediaElement, because otherwise // CameraStreamSource.CloseMedia() does not seem to be called by the framework:/ BackgroundVideoBrush.SetSource(_mediaElement); AdjustOrientation(); }
public void SetSource_StreamNull () { MediaElement media = new MediaElement (); media.MediaFailed += delegate { /* do nothing */ }; Assert.IsNull (media.Source, "Source-1"); media.SetSource (Stream.Null); Assert.IsNull (media.Source, "Source-2"); media.Source = new Uri ("thisfinedoesnotexist.wmv", UriKind.Relative); Assert.IsNotNull (media.Source, "Source-3"); media.SetSource (Stream.Null); Assert.IsNull (media.Source, "Source-4"); }
public void ThreadPool () { int tid = Thread.CurrentThread.ManagedThreadId; bool opened = false; OpenMediaOnSameThread = false; CloseMediaOnSameThread = false; Enqueue (() => { Assert.AreEqual (tid, Thread.CurrentThread.ManagedThreadId, "Different thread ids"); MediaStreamSourceBase mss = new MediaStreamSourceBase (); mss.InitializeSource (true, 5000000); mss.AddVideoStream (); MediaElement mel = new MediaElement (); mel.MediaOpened += new RoutedEventHandler (delegate (object sender, RoutedEventArgs e) { opened = true; Assert.AreEqual (tid, Thread.CurrentThread.ManagedThreadId, "MediaOpened"); }); mel.SetSource (mss); #if false Assert.Throws<InvalidOperationException> (delegate { mel.SetSource (mss); // 2nd SetSource to get a Close event }, "Close"); #endif TestPanel.Children.Add (mel); }); EnqueueConditional (() => opened); Enqueue (delegate () { Assert.IsTrue (OpenMediaOnSameThread, "OpenMediaOnSameThread"); // Assert.IsTrue (CloseMediaOnSameThread, "CloseMediaOnSameThread"); }); EnqueueTestComplete (); }
public CN1Media(Stream s, string mime, java.lang.Runnable onComplete) { System.Windows.Deployment.Current.Dispatcher.BeginInvoke(() => { elem = new MediaElement(); elem.SetSource(s); video = true; this.onComplete = onComplete; elem.MediaEnded += elem_MediaEnded; }); }
public void UserThread () { int tid = Thread.CurrentThread.ManagedThreadId; bool opened = false; // set them to true to make sure we're not checking the default (false) value later OpenMediaOnSameThread = true; CloseMediaOnSameThread = true; Dispatcher dispatcher = TestPanel.Dispatcher; Thread t = new Thread (delegate () { Assert.AreNotEqual (tid, Thread.CurrentThread.ManagedThreadId, "Same thread ids"); MediaStreamSourceBase mss = new MediaStreamSourceBase (); mss.InitializeSource (false, 5000000); mss.AddVideoStream (); dispatcher.BeginInvoke (delegate { MediaElement mel = new MediaElement (); mel.MediaOpened += new RoutedEventHandler (delegate (object sender, RoutedEventArgs e) { opened = true; Assert.AreEqual (tid, Thread.CurrentThread.ManagedThreadId, "MediaOpened"); }); mel.SetSource (mss); #if false Assert.Throws<InvalidOperationException> (delegate { mel.SetSource (mss); // 2nd SetSource to get a Close event }, "Close"); #endif TestPanel.Children.Add (mel); }); }); t.Start (); EnqueueConditional (() => opened); Enqueue (delegate () { Assert.IsFalse (OpenMediaOnSameThread, "OpenMediaOnSameThread"); // Assert.IsFalse (CloseMediaOnSameThread, "CloseMediaOnSameThread"); }); EnqueueTestComplete (); }
/// <summary> /// Opens and sets up the camera if not already. Creates a new /// CameraStreamSource with an effect and shows it on the screen via /// the media element. /// </summary> private async void Initialize() { Size mediaElementSize = new Size(MediaElementWidth, MediaElementHeight); if (camera == null) { // Resolve the capture resolution and open the camera var captureResolutions = PhotoCaptureDevice.GetAvailableCaptureResolutions(CameraSensorLocation.Back); Size selectedCaptureResolution = captureResolutions.Where( resolution => Math.Abs(AspectRatio - resolution.Width / resolution.Height) <= 0.1) .OrderBy(resolution => resolution.Width).Last(); camera = await PhotoCaptureDevice.OpenAsync( CameraSensorLocation.Back, selectedCaptureResolution); // Set the image orientation prior to encoding camera.SetProperty(KnownCameraGeneralProperties.EncodeWithOrientation, camera.SensorLocation == CameraSensorLocation.Back ? camera.SensorRotationInDegrees : -camera.SensorRotationInDegrees); // Resolve and set the preview resolution var previewResolutions = PhotoCaptureDevice.GetAvailablePreviewResolutions(CameraSensorLocation.Back); Size selectedPreviewResolution = previewResolutions.Where( resolution => Math.Abs(AspectRatio - resolution.Width / resolution.Height) <= 0.1) .Where(resolution => (resolution.Height >= mediaElementSize.Height) && (resolution.Width >= mediaElementSize.Width)) .OrderBy(resolution => resolution.Width).First(); await camera.SetPreviewResolutionAsync(selectedPreviewResolution); cameraEffect.CaptureDevice = camera; } if (mediaElement == null) { mediaElement = new MediaElement(); mediaElement.Stretch = Stretch.UniformToFill; mediaElement.BufferingTime = new TimeSpan(0); mediaElement.Tap += OnMyCameraMediaElementTapped; source = new CameraStreamSource(cameraEffect, mediaElementSize); mediaElement.SetSource(source); MediaElementContainer.Children.Add(mediaElement); } // Show the index and the name of the current effect if (cameraEffect is NokiaSketchEffect) { NokiaSketchEffect effects = cameraEffect as NokiaSketchEffect; } }
/* ================= S_StaticSound ================= */ public static void S_StaticSound(sfx_t sfx, double[] origin, double vol, double attenuation) { channel_t ss; sfxcache_t sc; if (sfx == null) return; if (total_channels == MAX_CHANNELS) { console.Con_Printf ("total_channels == MAX_CHANNELS\n"); return; } ss = channels[total_channels]; total_channels++; sc = S_LoadSound (sfx); if (sc == null) return; if (sc.loopstart == -1) { console.Con_Printf ("Sound " + sfx.name + " not looped\n"); return; } ss.sfx = sfx; mathlib.VectorCopy (origin, ref ss.origin); ss.master_vol = (int)vol; ss.dist_mult = (attenuation/64) / sound_nominal_clip_dist; SND_Spatialize (ss); MediaElement media = new MediaElement(); ss.media = media; media.AutoPlay = true; media.SetSource(new MemoryStream(sc.data)); media.Tag = ss; media.MediaEnded += media_MediaEnded; SetVolume(ss); Page.thePage.parentCanvas.Children.Add(media); }
private void Test (MediaElement mel, string value, bool can_seek) { MediaStreamSourceBase mss; Enqueue (delegate () { mediafailed = false; mediaopened = false; mss = new MediaStreamSourceBase (); mss.InitializeSource (value, "5000000"); mss.AddVideoStream (); mel.Tag = value; mel.SetSource (mss); }); EnqueueConditional (() => mediafailed || mediaopened); Enqueue (delegate () { Assert.AreEqual (can_seek, mel.CanSeek, "CanSeek: " + (string) mel.Tag); }); }
public void TestNonSeekable () { bool failed = false; bool opened = false; MediaStreamSourceBase mss = new MediaStreamSourceBase (); mss.InitializeSource (false, 5000000); mss.AddVideoStream (); MediaElement mel = new MediaElement (); mel.SetSource (mss); mel.MediaFailed += new EventHandler<ExceptionRoutedEventArgs> (delegate (object sender, ExceptionRoutedEventArgs e) { failed = true; }); mel.MediaOpened += new RoutedEventHandler (delegate (object sender, RoutedEventArgs e) { opened = true; }); TestPanel.Children.Add (mel); EnqueueConditional (() => failed || opened); Enqueue (delegate () { Assert.IsFalse (failed, "failed"); Assert.IsTrue (mss.Log.Count >= 2); Assert.AreEqual ("OpenMediaAsync", mss.Log [0].Name, "OpenMediaAsync"); Assert.AreEqual ("SeekAsync", mss.Log [1].Name, "SeekAsync"); Assert.AreEqual ((long) 0, (long) mss.Log [1].Value, "SeekAsync:Value"); Assert.IsFalse (mel.CanSeek, "CanSeek"); }); EnqueueTestComplete (); }
public void Init(Syscalls mSyscalls, Core mCore, Runtime mRuntime) { mSyscalls.maSoundPlay = delegate(int _data, int _offset, int _size) { mSyscalls.maSoundStop(); Resource audiores = mRuntime.GetResource(MoSync.Constants.RT_BINARY, _data); BoundedStream s = new BoundedStream((Stream)audiores.GetInternalObject(), _offset, _size); // Read MIME type. Mp3MediaStreamSource is not clever enough to bypass it. StringBuilder sb = new StringBuilder(); int b; while ((b = s.ReadByte()) > 0) { sb.Append((char)b); } if (b < 0) { // The MIME type was interrupted. // Bad stream. We don't want to play it. return -2; } if (sb.ToString() != "audio/mpeg") { // We can only play MP3 files. return -3; } Mp3MediaStreamSource source = new Mp3MediaStreamSource(s); // all Controls code must be Dispatched to the proper thread, // or you'll get a fatal Exception. Deployment.Current.Dispatcher.BeginInvoke(() => { mElement = new MediaElement(); mElement.Volume = mVolume; mElement.SetSource(source); mElement.Play(); }); return 0; }; mSyscalls.maSoundStop = delegate() { MoSync.Util.RunActionOnMainThreadSync(() => { if (mElement != null) { mElement.Stop(); } }); }; mSyscalls.maSoundIsPlaying = delegate() { int result = 0; MoSync.Util.RunActionOnMainThreadSync(() => { if (mElement != null) { MediaElementState s = mElement.CurrentState; result = (s == MediaElementState.Buffering) || (s == MediaElementState.Playing) ? 1 : 0; } }); return result; }; mSyscalls.maSoundGetVolume = delegate() { return (int)mVolume; }; mSyscalls.maSoundSetVolume = delegate(int _vol) { mVolume = _vol; if (mVolume > 100) mVolume = 100; else if (mVolume < 0) mVolume = 0; Deployment.Current.Dispatcher.BeginInvoke(() => { if (mElement != null) { mElement.Volume = mVolume; } }); }; }
private void playNextItem() { lock (sound_lists) { if (current_list_id == -1) return; if (sound_lists[current_list_id] == null) return; } while ((current_list_item < current_list.count) && (current_list.streams[current_list_item] == null)) { current_list_item++; } try { int copy_index = current_list_item; int copy_count = current_list.count; int copy_list_id = current_list_id; System.Windows.Deployment.Current.Dispatcher.BeginInvoke(() => { try { lock (sound_lists) { // re-create media element each time in order to avoid multiple subscribers to MediaEnded event mediaElement = new MediaElement(); FreeMapMainScreen.get().LayoutRoot.Children.Add(mediaElement); mediaElement.SetSource(current_list.streams[copy_index]); mediaElement.Volume = 1.0;//todomt (double)((double)sound_level / 100.0); mediaElement.MediaEnded += delegate { lock (sound_lists) { if (current_list != null && current_list.streams != null && current_list.streams[copy_index] != null) { current_list.streams[copy_index].Close(); } copy_index++; if (copy_index == copy_count) { closeCurrentList(); FreeMapMainScreen.get().LayoutRoot.Children.Remove(mediaElement); if ((current_list.flags & SoundList.SOUND_LIST_NO_FREE) == 0) { listFree(sound_lists[copy_list_id]); } sound_lists[copy_list_id] = null; current_list = null; playNextList(); } else { mediaElement.SetSource(current_list.streams[copy_index]); mediaElement.Volume = 1.0;//todomt (double)((double)sound_level / 100.0); mediaElement.Play(); } } }; try { mediaElement.Play(); } catch (Exception e) { Console.WriteLine(e); } } } catch (Exception e) { Logger.log("Exception: " + e); } }); } catch (Exception e) { Logger.log("Exception: " + e); closeCurrentList(); playNextList(); return; } }
void WebClient_OpenReadCompleted(object sender, OpenReadCompletedEventArgs e) { WebClient = null; if (e.Cancelled || e.Error != null) return; byte[] module = new byte[e.Result.Length]; int moduleLen = e.Result.Read(module, 0, module.Length); ASAP asap = new ASAP(); asap.Load(Filename, module, moduleLen); ASAPInfo info = asap.GetInfo(); if (Song < 0) Song = info.GetDefaultSong(); int duration = info.GetLoop(Song) ? -1 : info.GetDuration(Song); asap.PlaySong(Song, duration); Stop(); MediaElement = new MediaElement(); MediaElement.Volume = 1; MediaElement.AutoPlay = true; MediaElement.SetSource(new ASAPMediaStreamSource(asap, duration)); }
private void Initialize() { StatusTextBlock.Text = AppResources.MainPage_Status_InitializingCamera; _cameraEffect = new Effects {PhotoCaptureDevice = App.Camera, GlobalAlpha = 0.5}; _cameraEffect.SetTexture(App.Texture.File); if (App.Texture.IsPositional) { DragHintText.Visibility = Visibility.Visible; PinchHintText.Visibility = Visibility.Visible; _angle = 0; _initialAngle = 0; _scale = DefaultScale; _position = new Point(0.5, 0.5); _initialPosition = new Point(0.5, 0.5); RefreshTargetArea(); } else { DragHintText.Visibility = Visibility.Collapsed; PinchHintText.Visibility = Visibility.Collapsed; } LevelSlider.Value = 0.5; _cameraStreamSource = new CameraStreamSource(_cameraEffect, App.Camera.CaptureResolution); _cameraStreamSource.FrameRateChanged += CameraStreamSource_FPSChanged; _mediaElement = new MediaElement {Stretch = Stretch.UniformToFill, BufferingTime = new TimeSpan(0)}; _mediaElement.SetSource(_cameraStreamSource); // Using VideoBrush in XAML instead of MediaElement, because otherwise // CameraStreamSource.CloseMedia() does not seem to be called by the framework:/ BackgroundVideoBrush.SetSource(_mediaElement); StatusTextBlock.Text = _cameraEffect.EffectName; }
/// <summary> /// Opens and sets up the camera if not already. Creates a new /// CameraStreamSource with an effect and shows it on the screen via /// the media element. /// </summary> private async void Initialize() { Debug.WriteLine("MainPage.Initialize()"); var mediaElementSize = new Size(MediaElementWidth, MediaElementHeight); if (_camera == null) { // Resolve the capture resolution and open the camera var captureResolutions = PhotoCaptureDevice.GetAvailableCaptureResolutions(CameraSensorLocation.Back); var selectedCaptureResolution = captureResolutions.Where( resolution => Math.Abs(AspectRatio - resolution.Width/resolution.Height) <= 0.1) .OrderBy(resolution => resolution.Width).Last(); _camera = await PhotoCaptureDevice.OpenAsync( CameraSensorLocation.Back, selectedCaptureResolution); // Set the image orientation prior to encoding _camera.SetProperty(KnownCameraGeneralProperties.EncodeWithOrientation, _camera.SensorLocation == CameraSensorLocation.Back ? _camera.SensorRotationInDegrees : -_camera.SensorRotationInDegrees); // Resolve and set the preview resolution var previewResolutions = PhotoCaptureDevice.GetAvailablePreviewResolutions(CameraSensorLocation.Back); Size selectedPreviewResolution = previewResolutions.Where( resolution => Math.Abs(AspectRatio - resolution.Width/resolution.Height) <= 0.1) .Where(resolution => (resolution.Height >= mediaElementSize.Height) && (resolution.Width >= mediaElementSize.Width)) .OrderBy(resolution => resolution.Width).First(); await _camera.SetPreviewResolutionAsync(selectedPreviewResolution); _cameraEffect.CaptureDevice = _camera; } if (_mediaElement == null) { _mediaElement = new MediaElement { Stretch = Stretch.UniformToFill, BufferingTime = new TimeSpan(0) }; _mediaElement.Tap += OnMyCameraMediaElementTapped; _source = new CameraStreamSource(_cameraEffect, mediaElementSize); _mediaElement.SetSource(_source); MediaElementContainer.Children.Add(_mediaElement); _source.FPSChanged += OnFPSChanged; } // Show the index and the name of the current effect if (_cameraEffect is NokiaImagingSDKEffects) { var effects = _cameraEffect as NokiaImagingSDKEffects; EffectNameTextBlock.Text = (effects.EffectIndex + 1) + "/" + NokiaImagingSDKEffects.NumberOfEffects + ": " + effects.EffectName; } else { EffectNameTextBlock.Text = _cameraEffect.EffectName; } }
private void QueueItemLoaded(object sender, LoadedEventArgs e) { if (e.Loaded.Category != "flicker video") return; MediaElement flickerMediaElement = new MediaElement(); this.flickerContainer.Children.Add(flickerMediaElement); int index = int.Parse(e.Key); _flickers[index] = flickerMediaElement; flickerMediaElement.SetSource(e.Loaded.Stream); InitialiseFlickerVideo(flickerMediaElement); flickerMediaElement.MediaEnded += FlickerMediaEnded; _loadedItems++; }
private async void Initialize() { StatusTextBlock.Text = AppResources.MainPage_StatusTextBlock_StartingCamera; _cameraEffect = new Effects() {PhotoCaptureDevice = App.Camera}; _cameraStreamSource = new CameraStreamSource(_cameraEffect, App.Camera.CaptureResolution); _cameraStreamSource.FrameRateChanged += CameraStreamSource_FPSChanged; _mediaElement = new MediaElement {Stretch = Stretch.UniformToFill, BufferingTime = new TimeSpan(0)}; _mediaElement.SetSource(_cameraStreamSource); // Using VideoBrush in XAML instead of MediaElement, because otherwise // CameraStreamSource.CloseMedia() does not seem to be called by the framework:/ BackgroundVideoBrush.SetSource(_mediaElement); StatusTextBlock.Text = _cameraEffect.EffectName; }
private async void Initialize() { StatusTextBlock.Text = AppResources.MainPage_StatusTextBlock_StartingCamera; var resolution = PhotoCaptureDevice.GetAvailablePreviewResolutions(CameraSensorLocation.Back).Last(); _photoCaptureDevice = await PhotoCaptureDevice.OpenAsync(CameraSensorLocation.Back, resolution); await _photoCaptureDevice.SetPreviewResolutionAsync(resolution); _cameraEffect = new NokiaImagingSDKEffects(); _cameraEffect.PhotoCaptureDevice = _photoCaptureDevice; _cameraStreamSource = new CameraStreamSource(_cameraEffect, resolution); _cameraStreamSource.FrameRateChanged += CameraStreamSource_FPSChanged; _mediaElement = new MediaElement(); _mediaElement.Stretch = Stretch.UniformToFill; _mediaElement.BufferingTime = new TimeSpan(0); _mediaElement.SetSource(_cameraStreamSource); // Using VideoBrush in XAML instead of MediaElement, because otherwise // CameraStreamSource.CloseMedia() does not seem to be called by the framework:/ BackgroundVideoBrush.SetSource(_mediaElement); _cameraEffect.PreviousEffect(); StatusTextBlock.Text = _cameraEffect.EffectName; }
private async Task ResetCamera() { Uninitialize(); if (Camera != null) { Camera.Dispose(); Camera = null; } SetScreenButtonsEnabled(false); SetCameraButtonsEnabled(false); ShowProgress(AppResources.InitializingCameraText); await InitializeCamera(PerfectCamera.DataContext.Instance.SensorLocation); HideProgress(); InitEffectPanel(); if (PerfectCamera.DataContext.Instance.CameraType == PerfectCameraType.Selfie) { _mediaElement = new MediaElement { Stretch = Stretch.UniformToFill, BufferingTime = new TimeSpan(0) }; _mediaElement.SetSource(_cameraStreamSource); BackgroundVideoBrush.SetSource(_mediaElement); EffectNameTextBlock.Text = _cameraEffect.EffectName; EffectNameFadeIn.Begin(); } else { BackgroundVideoBrush.SetSource(Camera); } SetScreenButtonsEnabled(true); SetCameraButtonsEnabled(true); SetOrientation(this.Orientation); }
public void SetSource_MediaStreamSouce_Null () { MediaElement media = new MediaElement (); media.MediaFailed += delegate { /* do nothing */ }; Assert.IsNull (media.Source, "Source-1"); Assert.Throws<ArgumentNullException> (delegate { media.SetSource ((MediaStreamSource) null); }, "null"); Assert.IsNull (media.Source, "Source-2"); media.Source = new Uri ("thisfinedoesnotexist.wmv", UriKind.Relative); Assert.IsNotNull (media.Source, "Source-3"); Assert.Throws<ArgumentNullException> (delegate { media.SetSource ((MediaStreamSource) null); }, "null"); Assert.IsNotNull (media.Source, "Source-4"); }
public void Dettach(MediaElement target) { if (this.LicenseAcquirer != null) { target.LicenseAcquirer = this.LicenseAcquirer; this.LicenseAcquirer = null; } if (this.Markers.Count > 0) { foreach (var marker in this.Markers) { target.Markers.Add(marker); } this.Markers.Clear(); } if (this.Stream != null) { target.SetSource(this.Stream); this.Stream = null; } else if (this.Source != null) { target.Source = this.Source; } }