// // Callbacks for ISurfaceTextureListener // public void OnSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) { // We configure the size of default buffer to be the size of camera preview we want. texture.SetDefaultBufferSize(mPreviewSize.Height, mPreviewSize.Width); mProcessor.SetOutputSurface(new Surface(texture)); }
public void startPreview() { if (null == cameraDevice || !activity.textureView.IsAvailable || null == previewSize) { return; } try { SetUpRecoder(); SurfaceTexture texture = activity.textureView.SurfaceTexture; texture.SetDefaultBufferSize(previewSize.Width, previewSize.Height); previewBuilder = cameraDevice.CreateCaptureRequest(CameraTemplate.Record); var surfaces = new List <Surface>(); var previewSurface = new Surface(texture); surfaces.Add(previewSurface); previewBuilder.AddTarget(previewSurface); var recorderSurface = mediaRecorder.Surface; surfaces.Add(recorderSurface); previewBuilder.AddTarget(recorderSurface); cameraDevice.CreateCaptureSession(surfaces, new PreviewCaptureStateCallback(activity, this), backgroundHandler); } catch (CameraAccessException e) { e.PrintStackTrace(); } catch (Java.IO.IOException e) { e.PrintStackTrace(); } }
public void CreateCameraPreviewSession() { try { SurfaceTexture texture = mTextureView.SurfaceTexture; if (texture == null) { throw new IllegalStateException("texture is null"); } // We configure the size of default buffer to be the size of camera preview we want. texture.SetDefaultBufferSize(mPreviewSize.Width, mPreviewSize.Height); // This is the output Surface we need to start preview. Surface surface = new Surface(texture); // We set up a CaptureRequest.Builder with the output Surface. PreviewRequestBuilder = Device.CreateCaptureRequest(CameraTemplate.Preview); PreviewRequestBuilder.AddTarget(surface); // Here, we create a CameraCaptureSession for camera preview. List <Surface> surfaces = new List <Surface>(); surfaces.Add(surface); surfaces.Add(mImageReader.Surface); Device.CreateCaptureSession(surfaces, new CameraCaptureSessionCallback(this), null); } catch (CameraAccessException e) { e.PrintStackTrace(); } }
public void StartPreview() { if (CameraDevice == null || !cameraTexture.IsAvailable || previewSize == null) { return; } SurfaceTexture texture = cameraTexture.SurfaceTexture; texture.SetDefaultBufferSize(previewSize.Width, previewSize.Height); Surface surface = new Surface(texture); previewBuilder = CameraDevice.CreateCaptureRequest(CameraTemplate.Preview); previewBuilder.AddTarget(surface); List <Surface> surfaces = new List <Surface>(); surfaces.Add(surface); surfaces.Add(imageReader.Surface); CameraDevice.CreateCaptureSession(surfaces, new CameraCaptureStateListener { OnConfigureFailedAction = session => { }, OnConfiguredAction = session => { previewSession = session; UpdatePreview(); } }, backgroundHandler); }
//Start the camera preview public void startPreview() { if (null == cameraDevice || !textureView.IsAvailable || null == previewSize) { return; } try { closePreviewSession(); SurfaceTexture texture = textureView.SurfaceTexture; //Assert.IsNotNull(texture); texture.SetDefaultBufferSize(previewSize.Width, previewSize.Height); previewBuilder = cameraDevice.CreateCaptureRequest(CameraTemplate.Record); surfaces = new List <Surface>(); var previewSurface = new Surface(texture); surfaces.Add(previewSurface); previewBuilder.AddTarget(previewSurface); if (previewBuilder != null) { if (flashon) { previewBuilder.Set(CaptureRequest.FlashMode, (int)FlashMode.Torch); } Rect sensor_rect = max; int left = sensor_rect.Width() / 2; int right = left; int top = sensor_rect.Height() / 2; int bottom = top; int hwidth = (int)(sensor_rect.Width() / (2.0 * zoomlev)); int hheight = (int)(sensor_rect.Height() / (2.0 * zoomlev)); left -= hwidth; right += hwidth; top -= hheight; bottom += hheight; previewBuilder.Set(CaptureRequest.ScalerCropRegion, new Rect(left, top, right, bottom)); } previewBuilder.Set(CaptureRequest.ControlAfMode, (int)ControlAFMode.Off); previewBuilder.Set(CaptureRequest.ControlAfTrigger, (int)ControlAFTrigger.Cancel); //var recorderSurface = mediaRecorder.Surface; //surfaces.Add(recorderSurface); //previewBuilder.AddTarget(recorderSurface); mPreviewSession = new PreviewCaptureStateCallback(this, false); cameraDevice.CreateCaptureSession(surfaces, mPreviewSession, backgroundHandler); } catch (CameraAccessException e) { e.PrintStackTrace(); } catch (IOException e) { e.PrintStackTrace(); } }
/** Creates a new {@link CameraCaptureSession} for camera preview. */ private void createCameraPreviewSession() { try { SurfaceTexture texture = textureView.SurfaceTexture; // assert texture != null; // We configure the size of default buffer to be the size of camera preview we want. texture.SetDefaultBufferSize(previewSize.Width, previewSize.Height); // This is the output Surface we need to start preview. Surface surface = new Surface(texture); // We set up a CaptureRequest.Builder with the output Surface. previewRequestBuilder = cameraDevice.CreateCaptureRequest(CameraTemplate.Preview); previewRequestBuilder.AddTarget(surface); StateCallback call = new StateCallback(); call.OnConfiguredHandler += (sender, e) => { if (null == cameraDevice) { return; } // When the session is ready, we start displaying the preview. captureSession = e;// cameraCaptureSession; try { // Auto focus should be continuous for camera preview. previewRequestBuilder.Set( CaptureRequest.ControlAeMode, (int)ControlAFMode.ContinuousPicture); // ly, we start displaying the camera preview. previewRequest = previewRequestBuilder.Build(); CaptureCallback captureCallback = new CaptureCallback(); captureSession.SetRepeatingRequest(previewRequest, captureCallback, backgroundHandler); } catch (CameraAccessException ex) { Log.Error(TAG, "Failed to set up config to capture Camera", e); } }; // Here, we create a CameraCaptureSession for camera preview. cameraDevice.CreateCaptureSession(new System.Collections.Generic.List <Surface>() { surface }, call, null); } catch (CameraAccessException e) { Log.Error(TAG, "Failed to preview Camera", e); } }
private void StartPreview() { if (this.cameraDevice == null || !this.autoFitTextureView.IsAvailable || this.previewSize == null) { return; } this.tracer.Debug("StartPreview"); try { SurfaceTexture texture = this.autoFitTextureView.SurfaceTexture; Debug.Assert(texture != null); // We configure the size of the default buffer to be the size of the camera preview we want texture.SetDefaultBufferSize(this.previewSize.Width, this.previewSize.Height); // This is the output Surface we need to start the preview Surface surface = new Surface(texture); // We set up a CaptureRequest.Builder with the output Surface this.previewBuilder = this.cameraDevice.CreateCaptureRequest(CameraTemplate.Preview); this.previewBuilder.AddTarget(surface); // Here, we create a CameraCaptureSession for camera preview. this.cameraDevice.CreateCaptureSession( new List <Surface> { surface }, new CameraCaptureStateListener { OnConfigureFailedAction = (CameraCaptureSession session) => { Activity activity = this.Activity; if (activity != null) { this.tracer.Error("OnConfigureFailedAction"); } }, OnConfiguredAction = (CameraCaptureSession session) => { this.previewSession = session; this.UpdatePreview(); } }, this.mBackgroundHandler); } catch (CameraAccessException ex) { this.tracer.Exception(ex, "Failed to start preview."); } }
#pragma warning restore CS0618 // Type or member is obsolete #endregion public void StartPreview() { if (mCameraDevice == null || !textureView.IsAvailable || mPreviewSize == null) { return; } try { SurfaceTexture texture = textureView.SurfaceTexture; System.Diagnostics.Debug.Assert(texture != null); // We configure the size of the default buffer to be the size of the camera preview we want texture.SetDefaultBufferSize(mPreviewSize.Width, mPreviewSize.Height); // This is the output Surface we need to start the preview previewSurface = new Surface(texture); // We set up a CaptureRequest.Builder with the output Surface mPreviewBuilder = mCameraDevice.CreateCaptureRequest(CameraTemplate.Preview); mPreviewBuilder.AddTarget(previewSurface); // Here, we create a CameraCaptureSession for camera preview. mCameraDevice.CreateCaptureSession(new List <Surface>() { previewSurface }, new CameraCaptureStateListener() { OnConfigureFailedAction = (CameraCaptureSession session) => { Activity activity = Activity; if (activity != null) { Toast.MakeText(activity, "Failed", ToastLength.Short).Show(); } }, OnConfiguredAction = (CameraCaptureSession session) => { mPreviewSession = session; UpdatePreview(); } }, null); takePhotoButton.Enabled = true; } catch (CameraAccessException ex) { Log.WriteLine(LogPriority.Info, "Camera2BasicFragment", ex.StackTrace); } }
public void createpreview() { //Toast.MakeText(Application.Context, "Iam there", ToastLength.Long).Show(); SurfaceTexture texture = tview.SurfaceTexture; texture.SetDefaultBufferSize(imagedimension.Width, imagedimension.Height); Surface surface = new Surface(texture); Builder = cameradevice.CreateCaptureRequest(CameraTemplate.Preview); Builder.AddTarget(surface); outputs.Add(surface); cameradevice.CreateCaptureSession(outputs, capturecallback, null); //throw new NotImplementedException(); }
// Creates a new {@link CameraCaptureSession} for camera preview. public void CreateCameraPreviewSession(bool forVideo = false) { //surfacetexture available -> camera opened -> preview for still images starts try { SurfaceTexture texture = mTextureView.SurfaceTexture; if (texture == null) { throw new IllegalStateException("texture is null"); } // We configure the size of default buffer to be the size of camera preview we want. texture.SetDefaultBufferSize(mPreviewSize.Width, mPreviewSize.Height); // This is the output Surface we need to start preview. Surface surface = new Surface(texture); // We set up a CaptureRequest.Builder with the output Surface. // if (forVideo) // mPreviewRequestBuilder = mCameraDevice.CreateCaptureRequest(CameraTemplate.Record); // else mPreviewRequestBuilder = mCameraDevice.CreateCaptureRequest(CameraTemplate.Preview); mPreviewRequestBuilder.AddTarget(surface); // Here, we create a CameraCaptureSession for camera preview. List <Surface> surfaces = new List <Surface> { surface }; if (forVideo && mediaRecorder != null) { surfaces.Add(mediaRecorder.Surface); } else { surfaces.Add(mImageReader.Surface); } // prepares the camera (focus/flash) and shows the preview for the camera // also sets the mCaptureSession for trigerring the capture on takephoto //todo: change to continueVideo for video preview mCameraDevice.CreateCaptureSession(surfaces, new CameraCaptureSessionCallback(this), null); } catch (CameraAccessException e) { e.PrintStackTrace(); } }
public void CreateCameraPreviewSession() { try { SurfaceTexture texture = mTextureView.SurfaceTexture; if (texture == null) { throw new IllegalStateException("texture is null"); } // We configure the size of default buffer to be the size of camera preview we want. texture.SetDefaultBufferSize(mPreviewSize.Width, mPreviewSize.Height); // This is the output Surface we need to start preview. Surface surface = new Surface(texture); Activity.RunOnUiThread(new Runnable(() => { DisplayMetrics displayMetrics = new DisplayMetrics(); Activity.WindowManager.DefaultDisplay.GetMetrics(displayMetrics); int height = displayMetrics.HeightPixels; int width = displayMetrics.WidthPixels; roi.Left = 0; roi.Top = 0; roi.Right = width; roi.Bottom = height; })); ChangeScale(); ChangeRoi(); // We set up a CaptureRequest.Builder with the output Surface. mPreviewRequestBuilder = mCameraDevice.CreateCaptureRequest(CameraTemplate.Preview); mPreviewRequestBuilder.AddTarget(surface); // Here, we create a CameraCaptureSession for camera preview. List <Surface> surfaces = new List <Surface>(); surfaces.Add(surface); surfaces.Add(mImageReader.Surface); mCameraDevice.CreateCaptureSession(surfaces, new CameraCaptureSessionCallback(this), null); } catch (CameraAccessException e) { e.PrintStackTrace(); } }
private void CreateCameraPreview() { SurfaceTexture surfaceTexture = TextureView.SurfaceTexture; surfaceTexture.SetDefaultBufferSize(_imageDimension.Width, _imageDimension.Height); var surface = new Surface(surfaceTexture); CaptureRequest.Builder requestBuilder = _cameraDevice.CreateCaptureRequest(CameraTemplate.Preview); requestBuilder.AddTarget(surface); _cameraDevice.CreateCaptureSession(new[] { surface }, new CameraPreviewStateCallback(_cameraDevice, requestBuilder, _backgroundHandler), null); }
private void CreateCaptureSession() { //SurfaceTexture texture = _textureView.SurfaceTexture; //エラーになる //バッファのサイズをプレビューサイズに設定(画面サイズ等適当な値を入れる) _surfaceTexture.SetDefaultBufferSize(_previewSize.Width, _previewSize.Height); Surface surface = new Surface(_surfaceTexture); List <Surface> list = new List <Surface>(); list.Add(surface); list.Add(_recorder.Surface); CaptureRequest.Builder captureRequest = _cameraDevice.CreateCaptureRequest(CameraTemplate.Record); captureRequest.AddTarget(surface); captureRequest.AddTarget(_recorder.Surface); _captureRequest = captureRequest.Build(); _cameraDevice.CreateCaptureSession(list, new CameraCaputureSessionCallBack(_captureRequest), null); }
public void StartPreview() { if (ActiveCameraDevice == null || !textureView.IsAvailable || previewSize == null) { return; } try { SurfaceTexture texture = textureView.SurfaceTexture; texture.SetDefaultBufferSize(previewSize.Width, previewSize.Height); Surface surface = new Surface(texture); previewBuilder = ActiveCameraDevice.CreateCaptureRequest(CameraTemplate.Preview); previewBuilder.AddTarget(surface); ActiveCameraDevice.CreateCaptureSession(new List <Surface> { surface }, new CameraCaptureSessionStateCallback(this) { OnConfigureFailedAction = session => { if (Context != null) { Toast.MakeText(Context, "Failed", ToastLength.Short).Show(); } }, OnConfiguredAction = session => { ActiveCaptureSession = session; UpdatePreview(); } }, null); } catch (CameraAccessException ex) { Log.WriteLine(LogPriority.Info, "Camera2BasicFragment", ex.StackTrace); } }
//Start the camera preview public void startPreview() { if (null == camera_device || !texture_view.IsAvailable || null == preview_size) { return; } try { SurfaceTexture texture = texture_view.SurfaceTexture; //Assert.IsNotNull(texture); texture.SetDefaultBufferSize(preview_size.Width, preview_size.Height); preview_builder = camera_device.CreateCaptureRequest(CameraTemplate.Preview); Surface surface = new Surface(texture); var surfaces = new List <Surface>(); surfaces.Add(surface); preview_builder.AddTarget(surface); camera_device.CreateCaptureSession(surfaces, new PreviewCaptureStateListener(this), null); } catch (CameraAccessException e) { e.PrintStackTrace(); } }
/// <summary> /// 背面カメラを開く /// </summary> private void OpenBackCamera() { System.Diagnostics.Debug.WriteLine("CameraPageRenderer.OpenBackCamera"); string selectedCameraId = string.Empty; CameraManager manager = (CameraManager)activity.GetSystemService(Context.CameraService); // 背面カメラを探す foreach (var cameraId in manager.GetCameraIdList()) { var characteristics = manager.GetCameraCharacteristics(cameraId); var facing = (Integer)characteristics.Get(CameraCharacteristics.LensFacing); if (facing != null && facing == Integer.ValueOf((int)LensFacing.Back)) { selectedCameraId = cameraId; break; } } if (!string.IsNullOrEmpty(selectedCameraId)) { // プレビューサイズの取得 PreviewSize = this.GetPreviewSize(selectedCameraId); System.Diagnostics.Debug.WriteLine($"previewSize: w={PreviewSize.Width},h={PreviewSize.Height}"); // これでいいの? TextureView.LayoutParameters = new FrameLayout.LayoutParams(PreviewSize.Width, PreviewSize.Height); SurfaceTexture.SetDefaultBufferSize(PreviewSize.Width, PreviewSize.Height); // カメラを開く (成功したらコールバックが呼ばれる) manager.OpenCamera(selectedCameraId, new CameraStateListener(this), null); } else { string msg = "背面カメラが見つけられなかった"; System.Diagnostics.Debug.WriteLine(msg); throw new System.Exception(msg); } }
/// <summary> /// Start preview /// </summary> public void StartPreview() { if (mSurfaceTexture == null) { Log.Debug(TAG, "mSurfaceTexture is null!"); return; } Log.Debug(TAG, "StartPreview!"); mSurfaceTexture.SetDefaultBufferSize(mPreviewSize.Width, mPreviewSize.Height); if (mCameraDevice == null) { Log.Debug(TAG, "mCameraDevice is null!"); return; } try { mCaptureRequestBuilder = mCameraDevice.CreateCaptureRequest(CameraTemplate.Preview); List <Surface> surfaces = new List <Surface>(); if (mPreViewSurface != null) { surfaces.Add(mPreViewSurface); } if (mVgaSurface != null) { surfaces.Add(mVgaSurface); } if (mDepthSurface != null) { surfaces.Add(mDepthSurface); } CaptureSession(surfaces); } catch (CameraAccessException e) { Log.Debug(TAG, "StartPreview error"); } }
public static int StartCapture(SurfaceTexture previewSurfaceTexture, Size previewSize) { // Find the optimal camera and size string optimalCamId = null; string[] availableCameras = _cameraManager.GetCameraIdList(); for (int i = 0; i < availableCameras.Length; i++) { CameraCharacteristics characteristics = _cameraManager.GetCameraCharacteristics(availableCameras[i]); var camFace = (Java.Lang.Integer)characteristics.Get(CameraCharacteristics.LensFacing); if (camFace != null && camFace.IntValue() == (int)LensFacing.Back) { optimalCamId = availableCameras[i]; StreamConfigurationMap streamConfig = (StreamConfigurationMap)characteristics.Get(CameraCharacteristics.ScalerStreamConfigurationMap); List <Size> availableSizes = new List <Size>(streamConfig.GetOutputSizes(Java.Lang.Class.FromType(typeof(SurfaceTexture)))); if (availableSizes.Exists(s => s.Width == previewSize.Width && s.Height == previewSize.Height)) { previewSurfaceTexture.SetDefaultBufferSize(previewSize.Width, previewSize.Height); break; } availableSizes.Sort((s1, s2) => s1.Width * s1.Height - s2.Width * s2.Height); Size maxSize = availableSizes[availableSizes.Count - 1]; availableSizes.RemoveAll(s => (s.Height != s.Width * maxSize.Height / maxSize.Width)); availableSizes.Add(previewSize); availableSizes.Sort((s1, s2) => s1.Width * s1.Height - s2.Width * s2.Height); int previewSizeIndex = availableSizes.IndexOf(previewSize); if (previewSizeIndex < availableSizes.Count - 1) { previewSurfaceTexture.SetDefaultBufferSize(availableSizes[previewSizeIndex + 1].Width, availableSizes[previewSizeIndex + 1].Height); } else { previewSurfaceTexture.SetDefaultBufferSize(availableSizes[previewSizeIndex - 1].Width, availableSizes[previewSizeIndex - 1].Height); } break; } } if (optimalCamId == null) { return(1); } // Open the camera // We use the semaphore to wait for the 'onOpen' function to be called, so that we can aquire the camera device. Semaphore utilSemaphore = new Semaphore(0, 1); void onOpen(CameraDevice camera) { _camera = camera; utilSemaphore.Release(); } void onDisconnected(CameraDevice camera) { camera.Close(); _camera = null; } void onError(CameraDevice camera, CameraError error) { onDisconnected(camera); } _backgroundThread = new HandlerThread("CameraBackgroundWorker"); _backgroundThread.Start(); _backgroundHandler = new Handler(_backgroundThread.Looper); _cameraManager.OpenCamera(optimalCamId, new FunctionalCameraStateListener(onOpen, onDisconnected, onError), _backgroundHandler); utilSemaphore.WaitOne(); if (_camera == null) { return(2); } // Start the capture session bool configureFailed = false; Surface previewSurface = new Surface(previewSurfaceTexture); void onConfigured(CameraCaptureSession session) { _captureSession = session; var captureRequestBuilder = _camera.CreateCaptureRequest(CameraTemplate.Preview); captureRequestBuilder.AddTarget(previewSurface); captureRequestBuilder.Set(CaptureRequest.ControlAfMode, (int)ControlAFMode.ContinuousVideo); var captureRequest = captureRequestBuilder.Build(); _captureSession.SetRepeatingRequest(captureRequest, null, _backgroundHandler); utilSemaphore.Release(); } void onConfigureFailed(CameraCaptureSession session) { configureFailed = true; session.Close(); utilSemaphore.Release(); } _camera.CreateCaptureSession(new Surface[] { previewSurface }, new FunctionalCameraCaptureSessionListener(onConfigured, onConfigureFailed), _backgroundHandler); utilSemaphore.WaitOne(); if (configureFailed) { onDisconnected(_camera); return(3); } return(0); }
// private CameraDevice.StateCallback deviceStateCallback = new CameraDevice.StateCallback() // { // public void onOpened(@NonNull CameraDevice cameraDevice) // { // cameraLock.release(); // Camera2Control.this.cameraDevice = cameraDevice; // createCameraPreviewSession(); // } // public void onDisconnected(@NonNull CameraDevice cameraDevice) // { // cameraLock.release(); // cameraDevice.close(); // Camera2Control.this.cameraDevice = null; // } // public void onError(@NonNull CameraDevice cameraDevice, int error) // { // cameraLock.release(); // cameraDevice.close(); // Camera2Control.this.cameraDevice = null; // } //}; private void createCameraPreviewSession() { try { if (surfaceTexture == null) { surfaceTexture = new SurfaceTexture(11); // TODO } if (textureView != null) { // handler.post(new Runnable() // { // public void run() // { // try // { // surfaceTexture.detachFromGLContext(); // } // catch (Exception e) // { // e.printStackTrace(); // } // if (textureView.getSurfaceTexture() != surfaceTexture) // { // textureView.setSurfaceTexture(surfaceTexture); // } // } //}); } Surface surface = new Surface(surfaceTexture); int rotation = ORIENTATIONS.Get(orientation); if (rotation % 180 == 90) { surfaceTexture.SetDefaultBufferSize(preferredWidth, preferredHeight); } else { surfaceTexture.SetDefaultBufferSize(preferredHeight, preferredWidth); } previewRequestBuilder = cameraDevice.CreateCaptureRequest(CameraTemplate.Preview); previewRequestBuilder.AddTarget(surface); imageReader = ImageReader.NewInstance(preferredWidth, preferredHeight, ImageFormat.Yuv420888, 2); // imageReader.SetOnImageAvailableListener(new OnImageAvailableListener() // { // public void onImageAvailable(ImageReader reader) // { // } //}, backgroundHandler); previewRequestBuilder.AddTarget(imageReader.Surface); updateFlashMode(this.flashMode, previewRequestBuilder); // cameraDevice.createCaptureSession(Arrays.asList(surface, imageReader.getSurface()), // new CameraCaptureSession.StateCallback() // { // public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) // { // // The camera is already closed // if (null == cameraDevice) // { // return; // } // captureSession = cameraCaptureSession; // try // { // previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, // CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); // previewRequest = previewRequestBuilder.build(); // captureSession.setRepeatingRequest(previewRequest, // captureCallback, backgroundHandler); // } // catch (CameraAccessException e) // { // e.printStackTrace(); // } // } // public void onConfigureFailed(@NonNull CameraCaptureSession session) // { // Log.e("xx", "onConfigureFailed" + session); // } //}, backgroundHandler); } catch (CameraAccessException e) { e.PrintStackTrace(); } }
public override void Run() { Prepare(); while (!mStopped) { if (_externalVideoInputManager._curVideoInput != _externalVideoInputManager._newVideoInput) { Log.Info(TAG, "New video input selected"); // Current video input is running, but we now // introducing a new video type. // The new video input type may be null, referring // that we are not using any video. if (_externalVideoInputManager._curVideoInput != null) { _externalVideoInputManager._curVideoInput.OnVideoStopped(mThreadContext); Log.Info(TAG, "recycle stopped input"); } _externalVideoInputManager._curVideoInput = _externalVideoInputManager._newVideoInput; if (_externalVideoInputManager._curVideoInput != null) { _externalVideoInputManager._curVideoInput.OnVideoInitialized(mSurface); Log.Info(TAG, "initialize new input"); } if (_externalVideoInputManager._curVideoInput == null) { continue; } Size size = _externalVideoInputManager._curVideoInput.OnGetFrameSize(); mVideoWidth = size.Width; mVideoHeight = size.Height; mSurfaceTexture.SetDefaultBufferSize(mVideoWidth, mVideoHeight); if (mPaused) { // If current thread is in pause state, it must be paused // because of switching external video sources. mPaused = false; } } else if (_externalVideoInputManager._curVideoInput != null && !_externalVideoInputManager._curVideoInput.IsRunning) { // Current video source has been stopped by other // mechanisms (video playing has completed, etc). // A callback method is invoked to do some collect // or release work. // Note that we also set the new video source null, // meaning at meantime, we are not introducing new // video types. Log.Info(TAG, "current video input is not running"); _externalVideoInputManager._curVideoInput.OnVideoStopped(mThreadContext); _externalVideoInputManager._curVideoInput = null; _externalVideoInputManager._newVideoInput = null; } if (mPaused || _externalVideoInputManager._curVideoInput == null) { WaitForTime(DEFAULT_WAIT_TIME); continue; } try { mSurfaceTexture.UpdateTexImage(); mSurfaceTexture.GetTransformMatrix(mTransform); } catch (Java.Lang.Exception e) { e.PrintStackTrace(); } if (_externalVideoInputManager._curVideoInput != null) { _externalVideoInputManager._curVideoInput.OnFrameAvailable(mThreadContext, mTextureId, mTransform); } mEglCore.MakeCurrent(mEglSurface); GLES20.GlViewport(0, 0, mVideoWidth, mVideoHeight); if (_externalVideoInputManager._consumer != null) { Log.Error(TAG, "publish stream with ->width:" + mVideoWidth + ",height:" + mVideoHeight); /**Receives the video frame in texture,and push it out * @param textureId ID of the texture * @param format Pixel format of the video frame * @param width Width of the video frame * @param height Height of the video frame * @param rotation Clockwise rotating angle (0, 90, 180, and 270 degrees) of the video frame * @param timestamp Timestamp of the video frame. For each video frame, you need to set a timestamp * @param matrix Matrix of the texture. The float value is between 0 and 1, such as 0.1, 0.2, and so on*/ _externalVideoInputManager._textureTransformer.Copy(mTextureId, MediaIO.PixelFormat.TextureOes.IntValue(), mVideoWidth, mVideoHeight); _externalVideoInputManager._consumer.ConsumeTextureFrame(mTextureId, MediaIO.PixelFormat.TextureOes.IntValue(), mVideoWidth, mVideoHeight, 0, DateTime.Now.Millisecond, mTransform); } // The pace at which the output Surface is sampled // for video frames is controlled by the waiting // time returned from the external video source. WaitForNextFrame(); } if (_externalVideoInputManager._curVideoInput != null) { // The manager will cause the current // video source to be stopped. _externalVideoInputManager._curVideoInput.OnVideoStopped(mThreadContext); } Release(); }