/// <summary>
        /// Create a <seealso cref="com.samsung.android.sdk.camera.SCameraCaptureSession"/> for preview.
        /// </summary>
        private void createPreviewSession()
        {
            if (null == mSCamera || null == mSCameraDevice || null == mSCameraManager || null == mPreviewSize || !mTextureView.Available)
            {
                return;
            }

            try
            {
                SurfaceTexture texture = mTextureView.SurfaceTexture;
                texture.setDefaultBufferSize(mPreviewSize.Width, mPreviewSize.Height);

                Surface surface = new Surface(texture);

                mPreviewBuilder = mSCameraDevice.createCaptureRequest(SCameraDevice.TEMPLATE_PREVIEW);
                mPreviewBuilder.set(SCaptureRequest.CONTROL_AF_MODE, SCaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
                mPreviewBuilder.addTarget(surface);
                mPreviewBuilder.addTarget(mProcessor.InputSurface);

                //HAL Workaround
                mPreviewBuilder.set(SCaptureRequest.METERING_MODE, SCaptureRequest.METERING_MODE_MATRIX);

                IList <Surface> outputSurface = Arrays.asList(surface, mProcessor.InputSurface);
                mSCameraDevice.createCaptureSession(outputSurface, new StateCallbackAnonymousInnerClassHelper(this), mBackgroundHandler);
            }
            catch (CameraAccessException e)
            {
                showAlertDialog("Fail to session. " + e.Message, true);
            }
        }
Exemplo n.º 2
0
            public override void onImageAvailable(ImageReader reader)
            {
                STotalCaptureResult result = null;
                Image image = reader.acquireNextImage();

                try
                {
                    result = outerInstance.mCaptureResultQueue.take();
                }
                catch (InterruptedException e)
                {
                    Console.WriteLine(e.ToString());
                    Console.Write(e.StackTrace);
                }


                {
                    // Simple YUV processing that makes brightness value be quantized by 10.
                    ByteBuffer y_buffer     = image.Planes[0].Buffer;
                    sbyte[]    y_byte_array = new sbyte[y_buffer.capacity()];
                    y_buffer.get(y_byte_array);

                    int size = image.Width * image.Height;
                    for (int i = 0; i < size; i++)
                    {
                        y_byte_array[i] = (sbyte)(y_byte_array[i] / 10 * 10);
                    }

                    y_buffer.rewind();
                    y_buffer.put(y_byte_array);
                }

                try
                {
                    SCaptureRequest.Builder builder = outerInstance.mSCameraDevice.createReprocessCaptureRequest(result);
                    builder.addTarget(outerInstance.mJpegReader.Surface);

                    // Option #1. Put Image obtained from ImageReader directly to ImageWriter
                    outerInstance.mReprocessWriter.queueInputImage(image);

                    /* Option #2. Obtain input Image from ImageWriter and copy to it. Then push back to ImageWriter. potentially with zero copy
                     * Image inputImage = mReprocessWriter.dequeueInputImage();
                     * //copy image to inputImage here
                     * mReprocessWriter.queueInputImage(inputImage);
                     */


                    outerInstance.mSCameraSession.capture(builder.build(), null, outerInstance.mBackgroundHandler);
                }
                catch (CameraAccessException e)
                {
                    Console.WriteLine(e.ToString());
                    Console.Write(e.StackTrace);
                }
            }
Exemplo n.º 3
0
        private void takePicture()
        {
            try
            {
                SCaptureRequest.Builder builder = mSCameraDevice.createCaptureRequest(SCameraDevice.TEMPLATE_STILL_CAPTURE);
                builder.addTarget(mYUVReader.Surface);

                mSCameraSession.capture(builder.build(), new CaptureCallbackAnonymousInnerClassHelper(this), mBackgroundHandler);
            }
            catch (CameraAccessException e)
            {
                Console.WriteLine(e.ToString());
                Console.Write(e.StackTrace);
            }
        }
Exemplo n.º 4
0
        private void takePicture()
        {
            ImageManager.CaptureImage image = mImageManager.get();
            try
            {
                SCaptureRequest.Builder builder = mSCameraDevice.createReprocessCaptureRequest(image.mResult);
                builder.addTarget(mJpegReader.Surface);
                mReprocessWriter.queueInputImage(image.mImage);

                mSCameraSession.capture(builder.build(), null, mBackgroundHandler);
            }
            catch (CameraAccessException e)
            {
                Console.WriteLine(e.ToString());
                Console.Write(e.StackTrace);
            }
        }
Exemplo n.º 5
0
        private void createPreviewSession()
        {
            lock (this)
            {
                if (null == mSCameraDevice || null == mSCameraManager || null == mPreviewSize || !mTextureView.Available)
                {
                    return;
                }

                try
                {
                    SurfaceTexture texture = mTextureView.SurfaceTexture;

                    // Set default buffer size to camera preview size.
                    texture.setDefaultBufferSize(mPreviewSize.Width, mPreviewSize.Height);

                    Surface surface = new Surface(texture);

                    // Creates CaptureRequest.Builder for preview with output target.
                    mZslBuilder = mSCameraDevice.createCaptureRequest(SCameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
                    mZslBuilder.addTarget(surface);
                    mZslBuilder.addTarget(mZslReader.Surface);

                    // Creates a CameraCaptureSession here.
                    IList <Surface> outputSurface = new List <Surface>();
                    outputSurface.Add(mZslReader.Surface);
                    outputSurface.Add(surface);
                    outputSurface.Add(mJpegReader.Surface);

                    mSCameraDevice.createReprocessableCaptureSession(new InputConfiguration(mZslReader.Width, mZslReader.Height, mZslReader.ImageFormat), outputSurface, new StateCallbackAnonymousInnerClassHelper(this), mBackgroundHandler);
                }
                catch (CameraAccessException)
                {
                    showAlertDialog("Fail to create camera capture session.", true);
                }
            }
        }
Exemplo n.º 6
0
        /// <summary>
        /// Create a <seealso cref="com.samsung.android.sdk.camera.SCameraCaptureSession"/> for preview.
        /// </summary>
//JAVA TO C# CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes:
//ORIGINAL LINE: @SuppressWarnings("deprecation") private void createPreviewSession()
        private void createPreviewSession()
        {
            if (null == mSCamera || null == mSCameraDevice || null == mSCameraManager || !mTextureView.Available)
            {
                return;
            }

            try
            {
                mPreviewSize = mVideoParameter.mVideoSize;

                // Android Camera HAL3.2
                // Note that for the use case of multiple output streams, application must select one
                // unique size from this metadata to use. Otherwise a request error might occur.
                // The camera device will only support up to 2 output high speed streams
                // (processed non-stalling format defined in android.request.maxNumOutputStreams) in this mode.
                // This control will be effective only if all of below conditions are true:
                //
                // The application created no more than maxNumHighSpeedStreams processed non-stalling format output streams,
                // where maxNumHighSpeedStreams is calculated as min(2, android.request.maxNumOutputStreams[Processed (but not-stalling)]).
                // The stream sizes are selected from the sizes reported by android.control.availableHighSpeedVideoConfigurations.
                // No processed non-stalling or raw streams are configured.

                Log.e(TAG, "Preview size: " + mPreviewSize + " Video size: " + mVideoParameter.mVideoSize);

                // Set the aspect ratio to TextureView
                int orientation = Resources.Configuration.orientation;
                if (orientation == Configuration.ORIENTATION_LANDSCAPE)
                {
                    mTextureView.setAspectRatio(mPreviewSize.Width, mPreviewSize.Height);
                }
                else
                {
                    mTextureView.setAspectRatio(mPreviewSize.Height, mPreviewSize.Width);
                }

                prepareMediaRecorder();

                SurfaceTexture texture = mTextureView.SurfaceTexture;

                // Set default buffer size to camera preview size.
                texture.setDefaultBufferSize(mPreviewSize.Width, mPreviewSize.Height);

                Surface previewSurface  = new Surface(texture);
                Surface recorderSurface = mMediaRecorder.Surface;

                // Creates SCaptureRequest.Builder for preview and recording with output target.
                mPreviewBuilder = mSCameraDevice.createCaptureRequest(SCameraDevice.TEMPLATE_RECORD);

                // {@link com.samsung.android.sdk.camera.processor.SCameraEffectProcessor} supports only 24fps.
                mPreviewBuilder.set(SCaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, mVideoParameter.mFpsRange);
                mPreviewBuilder.set(SCaptureRequest.CONTROL_AF_MODE, SCaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
                mPreviewBuilder.set(SCaptureRequest.CONTROL_SCENE_MODE, SCaptureRequest.CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO);
                mPreviewBuilder.addTarget(previewSurface);
                mPreviewBuilder.addTarget(recorderSurface);

                // limit preview fps up to 30.
                int requestListSize = mVideoParameter.mFpsRange.Upper > 30 ? mVideoParameter.mFpsRange.Upper / 30 : 1;

                mRepeatingList = new List <>();
                mRepeatingList.Add(mPreviewBuilder.build());
                mPreviewBuilder.removeTarget(previewSurface);

                for (int i = 0; i < requestListSize - 1; i++)
                {
                    mRepeatingList.Add(mPreviewBuilder.build());
                }

                Log.e(TAG, "Request size: " + mRepeatingList.Count);

                // Creates a SCameraCaptureSession here.
                IList <Surface> outputSurface = Arrays.asList(previewSurface, recorderSurface);
                mSCameraDevice.createCaptureSession(outputSurface, new StateCallbackAnonymousInnerClassHelper(this), mBackgroundHandler);
            }
            catch (CameraAccessException e)
            {
                Console.WriteLine(e.ToString());
                Console.Write(e.StackTrace);
            }
            catch (IOException e)
            {
                Console.WriteLine(e.ToString());
                Console.Write(e.StackTrace);
            }
        }
Exemplo n.º 7
0
		private void createPreviewSession()
		{
			lock (this)
			{
        
				if (null == mSCameraDevice || null == mSCameraManager || null == mPreviewSize || !mTextureView.Available)
				{
					return;
				}
        
				try
				{
					SurfaceTexture texture = mTextureView.SurfaceTexture;
        
					// Set default buffer size to camera preview size.
					texture.setDefaultBufferSize(mPreviewSize.Width, mPreviewSize.Height);
        
					Surface surface = new Surface(texture);
        
					// Creates CaptureRequest.Builder for preview with output target.
					mRequestBuilder = mSCameraDevice.createCaptureRequest(SCameraDevice.TEMPLATE_PREVIEW);
					mRequestBuilder.addTarget(surface);
        
					// Creates a CameraCaptureSession here.
					IList<Surface> outputSurface = new List<Surface>();
					outputSurface.Add(mYUVReader.Surface);
					outputSurface.Add(surface);
					outputSurface.Add(mJpegReader.Surface);
        
					mSCameraDevice.createReprocessableCaptureSession(new InputConfiguration(mYUVReader.Width, mYUVReader.Height, mYUVReader.ImageFormat), outputSurface, new StateCallbackAnonymousInnerClassHelper(this), mBackgroundHandler);
				}
				catch (CameraAccessException)
				{
					showAlertDialog("Fail to create camera capture session.", true);
				}
			}
		}
Exemplo n.º 8
0
		/// <summary>
		/// Create a <seealso cref="com.samsung.android.sdk.camera.SCameraCaptureSession"/> for preview.
		/// </summary>
		private void createPreviewSession()
		{
			if (null == mSCamera || null == mSCameraDevice || null == mSCameraManager || null == mPreviewSize || !mTextureView.Available)
			{
				return;
			}

			try
			{
				SurfaceTexture texture = mTextureView.SurfaceTexture;

				// Set default buffer size to camera preview size.
				texture.setDefaultBufferSize(mPreviewSize.Width, mPreviewSize.Height);

				Surface surface = new Surface(texture);

				// Creates SCaptureRequest.Builder for preview with output target.
				mPreviewBuilder = mSCameraDevice.createCaptureRequest(SCameraDevice.TEMPLATE_PREVIEW);
				mPreviewBuilder.addTarget(surface);

				mCaptureBuilder = mSCameraDevice.createCaptureRequest(SCameraDevice.TEMPLATE_STILL_CAPTURE);
				mCaptureBuilder.addTarget(mImageReader.Surface);

				// remove af default lens distance
				mPreviewBuilder.set(SCaptureRequest.LENS_FOCUS_DISTANCE, null);
				mCaptureBuilder.set(SCaptureRequest.LENS_FOCUS_DISTANCE, null);

				// set default AF trigger
				mPreviewBuilder.set(SCaptureRequest.CONTROL_AF_TRIGGER, SCameraMetadata.CONTROL_AF_TRIGGER_IDLE);
				mCaptureBuilder.set(SCaptureRequest.CONTROL_AF_TRIGGER, SCameraMetadata.CONTROL_AF_TRIGGER_IDLE);

				// Creates a SCameraCaptureSession here.
				IList<Surface> outputSurface = Arrays.asList(surface, mImageReader.Surface);
				mSCameraDevice.createCaptureSession(outputSurface, new StateCallbackAnonymousInnerClassHelper(this), mBackgroundHandler);

			}
			catch (CameraAccessException e)
			{
				showAlertDialog("Fail to create session. " + e.Message, true);
			}
		}
Exemplo n.º 9
0
		/// <summary>
		/// Create a <seealso cref="com.samsung.android.sdk.camera.SCameraCaptureSession"/> for preview.
		/// </summary>
		private void createPreviewSession()
		{
			if (null == mSCamera || null == mSCameraDevice || null == mSCameraManager || null == mPreviewSize || !mTextureView.Available)
			{
				return;
			}

			try
			{
				SurfaceTexture texture = mTextureView.SurfaceTexture;

				// Set default buffer size to camera preview size.
				texture.setDefaultBufferSize(mPreviewSize.Width, mPreviewSize.Height);

				Surface surface = new Surface(texture);

				// Creates SCaptureRequest.Builder for preview with output target.
				mPreviewBuilder = mSCameraDevice.createCaptureRequest(SCameraDevice.TEMPLATE_PREVIEW);
				mPreviewBuilder.set(SCaptureRequest.CONTROL_AF_MODE, SCaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
				mPreviewBuilder.addTarget(surface);

				mCaptureBuilder = mSCameraDevice.createCaptureRequest(SCameraDevice.TEMPLATE_STILL_CAPTURE);
				mCaptureBuilder.set(SCaptureRequest.CONTROL_AF_MODE, SCaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
				mCaptureBuilder.addTarget(mImageReader.Surface);

				// HAL Workaround
				mPreviewBuilder.set(SCaptureRequest.METERING_MODE, SCaptureRequest.METERING_MODE_MATRIX);
				mCaptureBuilder.set(SCaptureRequest.METERING_MODE, SCaptureRequest.METERING_MODE_MATRIX);

				// Creates a SCameraCaptureSession here.
				IList<Surface> outputSurface = Arrays.asList(surface, mImageReader.Surface);
				mSCameraDevice.createCaptureSession(outputSurface, new StateCallbackAnonymousInnerClassHelper(this), mBackgroundHandler);

			}
			catch (CameraAccessException e)
			{
				showAlertDialog("Fail to create session. " + e.Message, true);
			}
		}
Exemplo n.º 10
0
		/// <summary>
		/// Create a <seealso cref="com.samsung.android.sdk.camera.SCameraCaptureSession"/> for preview.
		/// </summary>
		private void createPreviewSession()
		{
			lock (this)
			{
        
				if (null == mSCamera || null == mSCameraDevice || null == mSCameraManager || null == mPreviewSize || !mTextureView.Available)
				{
					return;
				}
        
				try
				{
					SurfaceTexture texture = mTextureView.SurfaceTexture;
        
					// Set default buffer size to camera preview size.
					texture.setDefaultBufferSize(mPreviewSize.Width, mPreviewSize.Height);
        
					Surface surface = new Surface(texture);
        
					// Creates SCaptureRequest.Builder for preview with output target.
					mPreviewBuilder = mSCameraDevice.createCaptureRequest(SCameraDevice.TEMPLATE_PREVIEW);
					mPreviewBuilder.addTarget(surface);
        
					// Creates SCaptureRequest.Builder for still capture with output target.
					mCaptureBuilder = mSCameraDevice.createCaptureRequest(SCameraDevice.TEMPLATE_STILL_CAPTURE);
        
					// Setup SettingDialog. SettingDialog will add setting item depends on camera characteristics.
					// and updates builders as setting value changes.
					runOnUiThread(() =>
					{
						mSettingDialog.configure(mCharacteristics, mLensFacing, mLensFacingList, mImageFormat, mImageFormatList, mPreviewBuilder, mCaptureBuilder);
					});
        
					// Creates a SCameraCaptureSession here.
					IList<Surface> outputSurface = new List<Surface>();
					outputSurface.Add(surface);
					outputSurface.Add(mJpegReader.Surface);
					if (mRawReader != null)
					{
						outputSurface.Add(mRawReader.Surface);
					}
        
					mSCameraDevice.createCaptureSession(outputSurface, new StateCallbackAnonymousInnerClassHelper(this), mBackgroundHandler);
				}
				catch (CameraAccessException)
				{
					showAlertDialog("Fail to create camera capture session.", true);
				}
			}
		}