partial void ReleaseMediaImpl() { IsInitialized = false; MediaSynchronizer = null; MediaCodecVideoExtractor?.Dispose(); MediaCodecVideoExtractor = null; audioSoundInstance?.Stop(); audioSoundInstance?.Dispose(); audioSoundInstance = null; if (audioSound != null) { foreach (var emitter in videoComponent.AudioEmitters) { emitter?.DetachSound(audioSound); } audioSound?.Dispose(); audioSound = null; } videoTexture.SetTargetContentToOriginalPlaceholder(); TextureExternal?.ReleaseData(); TextureExternal = null; }
partial void InitializeMediaImpl(string url, long startPosition, long length, ref bool succeeded) { if (MediaSynchronizer != null || MediaCodecVideoExtractor != null) { throw new InvalidOperationException("mediaCodec has already been initialized"); } if (videoComponent == null) { throw new ArgumentNullException("videoComponent is null"); } ReceivedNotificationToUpdateVideoTextureSurface = false; //============================================================================================== //Create the Texture and Surface where the codec will directly extract the video //The texture is set as external (GlTextureExternalOes): the mediaCodec API will create it and fill it //We don't know its size and format (size / format will depend on the media and on the device implementation) TextureExternal = Texture.NewExternalOES(GraphicsDevice); // TODO: Can we just allocate a mip mapped texture for this? var textureId = (int)TextureExternal.TextureId; VideoSurfaceTexture = new SurfaceTexture(textureId); VideoSurface = new Surface(VideoSurfaceTexture); //============================================================================================== ///Initialize the mediaCodec MediaSynchronizer = new MediaSynchronizer(); //Init the video extractor MediaCodecVideoExtractor = new MediaCodecVideoExtractor(this, MediaSynchronizer, VideoSurface); MediaCodecVideoExtractor.Initialize(services, url, startPosition, length); MediaSynchronizer.RegisterExtractor(MediaCodecVideoExtractor); MediaSynchronizer.RegisterPlayer(MediaCodecVideoExtractor); Duration = MediaCodecVideoExtractor.MediaDuration; if (MediaCodecVideoExtractor.HasAudioTrack && videoComponent.PlayAudio) { //Init the audio decoder AudioEngine audioEngine = services.GetService <IAudioEngineProvider>()?.AudioEngine; if (audioEngine == null) { throw new Exception("VideoInstance mediaCodec failed to get the AudioEngine"); } var isSpacialized = videoComponent.AudioEmitters.Any(x => x != null); audioSound = new StreamedBufferSound(audioEngine, MediaSynchronizer, url, startPosition, length, isSpacialized); MediaSynchronizer.RegisterExtractor(audioSound); if (isSpacialized) // we play the audio through the emitters if any are set { if (audioSound.GetCountChannels() == 1) { //Attach the sound to the audioEmitters foreach (var emitter in videoComponent.AudioEmitters) { if (emitter == null) { continue; } var controller = emitter.AttachSound(audioSound); MediaSynchronizer.RegisterPlayer(controller); audioControllers.Add(controller); } } else { Logger.Error("Stereo sound tracks cannot be played through audio emitters. The sound track need to be a mono audio track"); audioSound.Dispose(); audioSound = null; } } else // otherwise we play the audio as an unlocalized sound. { audioSoundInstance = (SoundInstanceStreamedBuffer)audioSound.CreateInstance(); MediaSynchronizer.RegisterPlayer(audioSoundInstance); } } var videoMetadata = MediaCodecVideoExtractor.MediaMetadata; AllocateVideoTexture(videoMetadata.Width, videoMetadata.Height); succeeded = IsInitialized = true; }