public bool StopDecoder() { try { if (_mediaCodec != null) { _mediaCodec.Stop(); _mediaCodec.Release(); } if (_audioTrack != null) { _audioTrack.Stop(); _audioTrack.Release(); } _encoderThread?.Interrupt(); _decoderThread?.Interrupt(); return(true); } catch (Exception) { return(false); } }
public void stop() { if (mStreaming) { try { if (mMode == MODE_MEDIARECORDER_API) { mMediaRecorder.Stop(); mMediaRecorder.Release(); mMediaRecorder = null; closeSockets(); mPacketizer.stop(); } else { mPacketizer.stop(); mMediaCodec.Stop(); mMediaCodec.Release(); mMediaCodec = null; } } catch (Exception e) { System.Diagnostics.Trace.WriteLine(e.StackTrace.ToString()); } mStreaming = false; } }
public void Close() { Console.WriteLine("called Close at PlatformVideoDecoderAndroid Class."); try { if (callbackThread != null) { myCallback.isClosed = true; callbackThread.Looper.Quit(); callbackThread.Looper.Dispose(); callbackThread.Interrupt(); callbackThread.Dispose(); callbackThread = null; } if (handler != null) { handler.Dispose(); handler = null; } if (mDecoder != null) { mDecoder.Stop(); mDecoder.Release(); mDecoder.Dispose(); eosReceived = true; } } catch (Exception e) { Console.WriteLine(e.ToString()); } }
/** * Checks the video file to see if the contents match our expectations. We decode the * video to a Surface and check the pixels with GL. */ private void checkVideoFile(VideoChunks inputData) { OutputSurface surface = null; MediaCodec decoder = null; mLargestColorDelta = -1; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "checkVideoFile"); } try { surface = new OutputSurface(mWidth, mHeight); MediaFormat format = inputData.getMediaFormat(); decoder = MediaCodec.CreateDecoderByType(MIME_TYPE); decoder.Configure(format, surface.Surface, null, 0); decoder.Start(); int badFrames = checkVideoData(inputData, decoder, surface); if (badFrames != 0) { fail("Found " + badFrames + " bad frames"); } } finally { if (surface != null) { surface.Release(); } if (decoder != null) { decoder.Stop(); decoder.Release(); } Log.Info(TAG, "Largest color delta: " + mLargestColorDelta); } }
/** * Edits a video file, saving the contents to a new file. This involves decoding and * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy. * <p> * If we recognize the decoded format we can do this in Java code using the ByteBuffer[] * output, but it's not practical to support all OEM formats. By using a SurfaceTexture * for output and a Surface for input, we can avoid issues with obscure formats and can * use a fragment shader to do transformations. */ private VideoChunks editVideoFile(VideoChunks inputData) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "editVideoFile " + mWidth + "x" + mHeight); } VideoChunks outputData = new VideoChunks(); MediaCodec decoder = null; MediaCodec encoder = null; InputSurface inputSurface = null; OutputSurface outputSurface = null; try { MediaFormat inputFormat = inputData.getMediaFormat(); // Create an encoder format that matches the input format. (Might be able to just // re-use the format used to generate the video, since we want it to be the same.) MediaFormat outputFormat = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); outputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecInfo.CodecCapabilities.COLORFormatSurface); outputFormat.SetInteger(MediaFormat.KeyBitRate, inputFormat.GetInteger(MediaFormat.KeyBitRate)); outputFormat.SetInteger(MediaFormat.KeyFrameRate, inputFormat.GetInteger(MediaFormat.KeyFrameRate)); outputFormat.SetInteger(MediaFormat.KeyIFrameInterval, inputFormat.GetInteger(MediaFormat.KeyIFrameInterval)); outputData.setMediaFormat(outputFormat); encoder = MediaCodec.CreateEncoderByType(MIME_TYPE); encoder.Configure(outputFormat, null, null, MediaCodecConfigFlags.Encode); inputSurface = new InputSurface(encoder.CreateInputSurface()); inputSurface.MakeCurrent(); encoder.Start(); // OutputSurface uses the EGL context created by InputSurface. decoder = MediaCodec.CreateDecoderByType(MIME_TYPE); outputSurface = new OutputSurface(); outputSurface.ChangeFragmentShader(FRAGMENT_SHADER); decoder.Configure(inputFormat, outputSurface.Surface, null, 0); decoder.Start(); editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData); } finally { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "shutting down encoder, decoder"); } if (outputSurface != null) { outputSurface.Release(); } if (inputSurface != null) { inputSurface.Release(); } if (encoder != null) { encoder.Stop(); encoder.Release(); } if (decoder != null) { decoder.Stop(); decoder.Release(); } } return(outputData); }
public void stop() { if (!running) { return; } running = false; lock (pendingFrames) { pendingFrames.Clear(); availableBuffers.Clear(); } if (audioDecoder != null) { try { audioDecoder.Stop(); audioDecoder.Release(); } catch (Exception) { } audioDecoder.Dispose(); audioDecoder = null; } }
public void stop() { if (!running) { return; } running = false; if (audioEncoder != null) { try { audioEncoder.Stop(); audioEncoder.Release(); } catch (Exception) { } audioEncoder.Dispose(); audioEncoder = null; } lock (availableBuffers) { availableBuffers.Clear(); } }
private void FeedClipToEncoder(SamplerClip clip) { mLastSampleTime = 0; MediaCodec decoder = null; MediaExtractor extractor = SetupExtractorForClip(clip); if (extractor == null) { return; } int trackIndex = GetVideoTrackIndex(extractor); extractor.SelectTrack(trackIndex); MediaFormat clipFormat = extractor.GetTrackFormat(trackIndex); if (clip.getStartTime() != -1) { extractor.SeekTo(clip.getStartTime() * 1000, MediaExtractorSeekTo.PreviousSync); clip.setStartTime(extractor.SampleTime / 1000); } try { decoder = MediaCodec.CreateDecoderByType(MediaHelper.MIME_TYPE_AVC); mOutputSurface = new OutputSurface(); decoder.Configure(clipFormat, mOutputSurface.Surface, null, 0); decoder.Start(); ResampleVideo(extractor, decoder, clip); } catch (System.Exception e) { } finally { if (mOutputSurface != null) { mOutputSurface.Release(); } if (decoder != null) { decoder.Stop(); decoder.Release(); } if (extractor != null) { extractor.Release(); extractor = null; } } }
public void Dispose() { if (_videoCodec != null) { _videoCodec.Stop(); _videoCodec.Release(); _videoCodec.Dispose(); } _videoFrameQueue.Clear(); }
public void Dispose() { if (_audioCodec != null) { _audioCodec.Stop(); _audioCodec.Release(); _audioCodec.Dispose(); } _audioFrameQueue.Clear(); }
private void releaseEncoder() { if (mEncoder != null) { try { mEncoder.Stop(); } catch (Java.Lang.Exception ignore) {} try { mEncoder.Release(); } catch (Java.Lang.Exception ignore) {} } }
partial void ReleaseMediaInternal() { audioMediaDecoder?.Stop(); audioMediaDecoder?.Release(); audioMediaDecoder = null; audioMediaExtractor?.Release(); audioMediaExtractor = null; InputFileStream?.Dispose(); InputFileStream = null; InputFile?.Dispose(); InputFile = null; }
private void releaseEncoder() { if (_Encoder != null) { _Encoder.Stop(); _Encoder.Release(); _Encoder = null; } if (_Muxer != null && _MuxerStarted) { _Muxer.Stop(); _Muxer.Release(); _Muxer = null; } }
public void stop() { bConfigured = false; if (codec != null) { try { //codec.Stop(); codec.Release(); } catch (Exception ex) { MainActivity.getActivity().notifyUser("VideoDecoder stop exception " + ex.Message, false); } } codec = null; }
static public void stop() { if (codec == null) { return; } bConfigured = false; try { codec.Stop(); codec.Release(); } catch (Exception ex) { } codec = null; }
/** * Releases encoder resources. */ private void releaseEncoder() { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "releasing encoder objects"); } if (mEncoder != null) { mEncoder.Stop(); mEncoder.Release(); mEncoder = null; } if (_inputSurface != null) { _inputSurface.Release(); _inputSurface = null; } }
private void cleanUp() { running = false; if (audioRecorder != null) { try { audioRecorder.Stop(); audioRecorder.Release(); }catch (Exception) { } audioRecorder.Dispose(); audioRecorder = null; } if (audioEncoder != null) { try { audioEncoder.Stop(); audioEncoder.Release(); } catch (Exception) { } audioEncoder.Dispose(); audioEncoder = null; } buffer = null; bufferSize = 0; lock (outputBuffers) { outputBuffers.Clear(); } lock (availableBuffers) { availableBuffers.Clear(); } }
private void Release() { Scheduler.UnregisterExtractor(this); //to avoid receiving any more event from the scheduler MediaDecoder?.Stop(); MediaDecoder?.Release(); MediaDecoder = null; mediaExtractor?.Release(); mediaExtractor = null; inputFile = null; MediaMetadata = null; MediaDuration = TimeSpan.Zero; inputFileDescriptor?.Close(); inputFileDescriptor = null; isInitialized = false; }
private void ReleaseOutputResources() { if (mInputSurface != null) { mInputSurface.Release(); } if (mEncoder != null) { mEncoder.Stop(); mEncoder.Release(); } if (mMuxer != null) { mMuxer.Stop(); mMuxer.Release(); mMuxer = null; } }
private void DisposeVideoDecoder(bool disposing) { if (_disposed) { return; } if (disposing) { if (_mediaCodec != null) { _mediaCodec.Stop(); _mediaCodec.Release(); _mediaCodec.Dispose(); } } Initialized = false; _disposed = true; }
public void EndEncoding(bool abort) { Debug.WriteLine("Releasing encoder objects"); abortAudioEncoding = abort; if (audioEncodingTask != null) { audioEncodingTask.Wait(); audioEncodingTask = null; } if (!abortAudioEncoding) { DrainEncoder(videoEncoder, videoBufferInfo, videoTrackIndex, false); DrainEncoder(audioEncoder, audioBufferInfo, audioTrackIndex, false); } if (videoEncoder != null) { videoEncoder.Stop(); videoEncoder.Release(); videoEncoder = null; } if (audioEncoder != null) { audioEncoder.Stop(); audioEncoder.Release(); audioEncoder = null; } ElgShutdown(); if (muxer != null) { muxer.Stop(); muxer.Release(); muxer = null; } }
/** * Releases encoder resources. */ private void releaseEncoder() { if (VERBOSE) { Log.Debug(TAG, "releasing encoder objects"); } if (mEncoder != null) { mEncoder.Stop(); mEncoder.Release(); mEncoder = null; } if (_inputSurface != null) { _inputSurface.Release(); _inputSurface = null; } if (mMuxer != null) { mMuxer.Stop(); mMuxer.Release(); mMuxer = null; } }
static public void stop() { bConfigured = false; codec = null; if (vidCodec != null) { try { vidCodec.Stop(); vidCodec.Release(); } catch { } } if (picCodec != null) { try { picCodec.Stop(); picCodec.Release(); } catch { } } }
public void PrepareEncoder(string path, File _downloaddir) { MediaCodec _Decoder = null; MediaExtractor extractor = null; _downloadsfilesdir = _downloaddir; try { //for (int i = 0; i < extractor.TrackCount; i++) //{ // MediaFormat Format = extractor.GetTrackFormat(i); // //MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, 640, 360); // String mime = Format.GetString(MediaFormat.KeyMime); // if (mime.StartsWith("video/")) // { // extractor.SelectTrack(i); // _Decoder = MediaCodec.CreateEncoderByType(mime); // _Decoder.Configure(Format, null, null, 0); // break; // } //} extractor = new MediaExtractor(); extractor.SetDataSource(path); int trackIndex = selectTrack(extractor); //if (trackIndex < 0) //{ // throw new RuntimeException("No video track found in " + inputFile); //} extractor.SelectTrack(trackIndex); MediaFormat format = extractor.GetTrackFormat(trackIndex); _Width = format.GetInteger(MediaFormat.KeyWidth); _Height = format.GetInteger(MediaFormat.KeyHeight); // Could use width/height from the MediaFormat to get full-size frames. //outputSurface = new CodecOutputSurface(saveWidth, saveHeight); // Create a MediaCodec decoder, and configure it with the MediaFormat from the // extractor. It's very important to use the format from the extractor because // it contains a copy of the CSD-0/CSD-1 codec-specific data chunks. String mime = format.GetString(MediaFormat.KeyMime); _Decoder = MediaCodec.CreateDecoderByType(mime); _Decoder.Configure(format, null, null, 0); _Decoder.Start(); Decode(_Decoder, extractor); } catch (Exception e) { Log.Error(TAG, e.Message, e); throw; } finally { // release everything we grabbed //if (outputSurface != null) //{ // outputSurface.release(); // outputSurface = null; //} if (_Decoder != null) { _Decoder.Stop(); _Decoder.Release(); _Decoder = null; } if (extractor != null) { extractor.Release(); extractor = null; } } _TrackIndex = -1; //_MuxerStarted = false; }
/** * Generates a test video file, saving it as VideoChunks. We generate frames with GL to * avoid having to deal with multiple YUV formats. * * @return true on success, false on "soft" failure */ private bool generateVideoFile(VideoChunks output) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "generateVideoFile " + mWidth + "x" + mHeight); } MediaCodec encoder = null; InputSurface inputSurface = null; try { MediaCodecInfo codecInfo = selectCodec(MIME_TYPE); if (codecInfo == null) { // Don't fail CTS if they don't have an AVC codec (not here, anyway). Log.Error(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); return(false); } if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "found codec: " + codecInfo.Name); } // We avoid the device-specific limitations on width and height by using values that // are multiples of 16, which all tested devices seem to be able to handle. MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); format.SetInteger(MediaFormat.KeyBitRate, mBitRate); format.SetInteger(MediaFormat.KeyFrameRate, FRAME_RATE); format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "format: " + format); } output.setMediaFormat(format); // Create a MediaCodec for the desired codec, then configure it as an encoder with // our desired properties. encoder = MediaCodec.CreateByCodecName(codecInfo.Name); encoder.Configure(format, null, null, MediaCodecConfigFlags.Encode); inputSurface = new InputSurface(encoder.CreateInputSurface()); inputSurface.MakeCurrent(); encoder.Start(); generateVideoData(encoder, inputSurface, output); } finally { if (encoder != null) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "releasing encoder"); } encoder.Stop(); encoder.Release(); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "released encoder"); } } if (inputSurface != null) { inputSurface.Release(); } } return(true); }
/** * Tests extraction from an MP4 to a series of PNG files. * <p> * We scale the video to 640x480 for the PNG just to demonstrate that we can scale the * video with the GPU. If the input video has a different aspect ratio, we could preserve * it by adjusting the GL viewport to get letterboxing or pillarboxing, but generally if * you're extracting frames you don't want black bars. */ public void extractMpegFrames(int saveWidth, int saveHeight) { MediaCodec decoder = null; CodecOutputSurface outputSurface = null; MediaExtractor extractor = null; try { // must be an absolute path The MediaExtractor error messages aren't very useful. Check to see if the input file exists so we can throw a better one if it's not there. File inputFile = new File(_filesdir, INPUT_FILE); if (!inputFile.CanRead()) { throw new FileNotFoundException("Unable to read " + inputFile); } extractor = new MediaExtractor(); extractor.SetDataSource(inputFile.ToString()); int trackIndex = selectTrack(extractor); if (trackIndex < 0) { throw new RuntimeException("No video track found in " + inputFile); } extractor.SelectTrack(trackIndex); MediaFormat format = extractor.GetTrackFormat(trackIndex); if (VERBOSE) { Log.Info(TAG, "Video size is " + format.GetInteger(MediaFormat.KeyWidth) + "x" + format.GetInteger(MediaFormat.KeyHeight)); } // Could use width/height from the MediaFormat to get full-size frames. outputSurface = new CodecOutputSurface(saveWidth, saveHeight); // Create a MediaCodec decoder, and configure it with the MediaFormat from the // extractor. It's very important to use the format from the extractor because // it contains a copy of the CSD-0/CSD-1 codec-specific data chunks. String mime = format.GetString(MediaFormat.KeyMime); decoder = MediaCodec.CreateDecoderByType(mime); decoder.Configure(format, outputSurface.getSurface(), null, 0); decoder.Start(); doExtract(extractor, trackIndex, decoder, outputSurface); } finally { // release everything we grabbed if (outputSurface != null) { outputSurface.release(); outputSurface = null; } if (decoder != null) { decoder.Stop(); decoder.Release(); decoder = null; } if (extractor != null) { extractor.Release(); extractor = null; } } }