public override void onDrawFrame(GL10 gl) { mFrameLock.@lock(); if (mCurrentFrame != null && !mVideoDisabled) { GLES20.glUseProgram(mProgram); if (mTextureWidth != mCurrentFrame.Width || mTextureHeight != mCurrentFrame.Height) { setupTextures(mCurrentFrame); } updateTextures(mCurrentFrame); Matrix.setIdentityM(mScaleMatrix, 0); float scaleX = 1.0f, scaleY = 1.0f; float ratio = (float)mCurrentFrame.Width / mCurrentFrame.Height; float vratio = (float)mViewportWidth / mViewportHeight; if (mVideoFitEnabled) { if (ratio > vratio) { scaleY = vratio / ratio; } else { scaleX = ratio / vratio; } } else { if (ratio < vratio) { scaleY = vratio / ratio; } else { scaleX = ratio / vratio; } } Matrix.scaleM(mScaleMatrix, 0, scaleX * (mCurrentFrame.MirroredX ? -1.0f : 1.0f), scaleY, 1); int mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mScaleMatrix, 0); GLES20.glDrawElements(GLES20.GL_TRIANGLES, mVertexIndex.Length, GLES20.GL_UNSIGNED_SHORT, mDrawListBuffer); } else { //black frame when video is disabled gl.glClearColor(0, 0, 0, 1); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); } mFrameLock.unlock(); }
public override bool destroyCapturer() { m_captureLock.@lock(); // release the object m_audioRecord.release(); m_audioRecord = null; m_shutdownCaptureThread = true; m_captureEvent.signal(); m_captureLock.unlock(); return(true); }
/// <summary> /// {@inheritDoc} /// </summary> public override void Drop() { _commitCloseLock.@lock(); try { CommitLockedDrop(); } finally { _commitCloseLock.unlock(); } }
public override void Close() { _idContainerLock.@lock(); try { _idContainer.close(_highId); } finally { _idContainerLock.unlock(); } }
internal virtual void Create() { PopulatorLock.@lock(); try { if (PopulationOngoing) { Populator.create(); } } finally { PopulatorLock.unlock(); } }
public override bool destroyRenderer() { m_rendererLock.@lock(); // release the object m_audioTrack.release(); m_audioTrack = null; m_shutdownRenderThread = true; m_renderEvent.signal(); m_rendererLock.unlock(); unregisterHeadsetReceiver(); m_audioManager.SpeakerphoneOn = false; m_audioManager.Mode = AudioManager.MODE_NORMAL; return(true); }
public override void Close() { _lockClientCleanupLock.@lock(); try { if (_locks != null) { _locks.close(); _locks = null; _active = false; } } finally { _lockClientCleanupLock.unlock(); } }
private void reload() { if (reloadLock.tryLock()) { try { longToString = buildMapping(); } catch (IOException ioe) { throw new InvalidOperationException(ioe); } finally { reloadLock.unlock(); } } }
public virtual void dispose() { @lock.unlock(); }
public override int startCapture() { if (isCaptureStarted) { return(-1); } // Set the preferred capturing size configureCaptureSize(PREFERRED_CAPTURE_WIDTH, PREFERRED_CAPTURE_HEIGHT); // Set the capture parameters Camera.Parameters parameters = mCamera.Parameters; parameters.setPreviewSize(mCaptureWidth, mCaptureHeight); parameters.PreviewFormat = PIXEL_FORMAT; parameters.PreviewFrameRate = mCaptureFPS; try { mCamera.Parameters = parameters; } catch (Exception e) { Log.e(LOGTAG, "setParameters failed", e); return(-1); } // Create capture buffers PixelFormat pixelFormat = new PixelFormat(); PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat); int bufSize = mCaptureWidth * mCaptureHeight * pixelFormat.bitsPerPixel / 8; sbyte[] buffer = null; for (int i = 0; i < mNumCaptureBuffers; i++) { buffer = new sbyte[bufSize]; mCamera.addCallbackBuffer(buffer); } try { mSurfaceTexture = new SurfaceTexture(42); mCamera.PreviewTexture = mSurfaceTexture; } catch (Exception e) { // TODO Auto-generated catch block Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); } // Start preview mCamera.PreviewCallbackWithBuffer = this; mCamera.startPreview(); mPreviewBufferLock.@lock(); mExpectedFrameSize = bufSize; isCaptureRunning = true; mPreviewBufferLock.unlock(); isCaptureStarted = true; return(0); }