public static int Main(string[] args) { string file1 = args[0]; ImageReader reader = new ImageReader(); reader.SetFileName( file1 ); bool ret = reader.Read(); if( !ret ) { return 1; } Image image = reader.GetImage(); PixelFormat pixeltype = image.GetPixelFormat(); if( image.GetNumberOfDimensions() != 2 ) { // For the purpose of the test, exit early on return 1; } uint dimx = image.GetDimension(0); uint dimy = image.GetDimension(1); uint npixels = dimx * dimy; //LookupTable lut = image.GetLUT(); //uint rl = lut.GetLUTLength( LookupTable.LookupTableType.RED ); //byte[] rbuf = new byte[ rl ]; //uint rl2 = lut.GetLUT( LookupTable.LookupTableType.RED, rbuf ); //assert rl == rl2; //byte[] str1 = new byte[ image.GetBufferLength()]; //image.GetBuffer( str1 ); if( pixeltype.GetScalarType() == PixelFormat.ScalarType.UINT8 ) { System.Console.WriteLine( "Processing UINT8 image type" ); byte[] str1 = new byte[ npixels ]; image.GetArray( str1 ); } else if( pixeltype.GetScalarType() == PixelFormat.ScalarType.INT16 ) { System.Console.WriteLine( "Processing INT16 image type" ); short[] str1 = new short[ npixels ]; image.GetArray( str1 ); } else if( pixeltype.GetScalarType() == PixelFormat.ScalarType.UINT16 ) { System.Console.WriteLine( "Processing UINT16 image type" ); ushort[] str1 = new ushort[ npixels ]; image.GetArray( str1 ); } else { //System.Console.WriteLine( "Default (unhandled pixel format): " + pixeltype.toString() ); System.Console.WriteLine( "Default (unhandled pixel format): " + pixeltype.GetScalarTypeAsString() ); // Get bytes byte[] str1 = new byte[ image.GetBufferLength()]; image.GetBuffer( str1 ); } return 0; }
public void CurrentPixelType__origin_of_coordinate_system_is_correct() { var bmp = new Bitmap("Testimages/testimage1.png"); try { var imageRead = new ImageReader(bmp); var result = imageRead.CurrentPixelType(); imageRead.CurrentColumn.Should().Be(0); imageRead.CurrentRow.Should().Be(0); result.Should().Be(PixelType.Water); } finally { bmp.Dispose(); } }
public void CurrentPixelType__receives_pixel_from_image_typed_as_undefined() { var bmp = new Bitmap("Testimages/testimage1.png"); try { var imageRead = new ImageReader(bmp); imageRead.FrameX--; imageRead.FrameY--; var result = imageRead.CurrentPixelType(); result.Should().Be(PixelType.undefined); } finally { bmp.Dispose(); } }
public void CurrentPixelType__receives_pixel_from_image_typed_as_ground() { var bmp = new Bitmap("Testimages/testimage1.png"); try { var imageRead = new ImageReader(bmp); int i = 0; while (i++ < 42) imageRead.NextRow(); var result = imageRead.CurrentPixelType(); imageRead.CurrentRow.Should().BeGreaterThan(40); result.Should().Be(PixelType.Ground); } finally { bmp.Dispose(); } }
public void CurrentPixelType__receives_pixel_from_image_typed_as_water() { var bmp = new Bitmap("Testimages/testimage1.png"); try { var imageRead = new ImageReader(bmp); int i = 0; while (i++ < 50) imageRead.NextColumn(); var result = imageRead.CurrentPixelType(); imageRead.CurrentColumn.Should().BeGreaterThan(30); result.Should().Be(PixelType.Water); } finally { bmp.Dispose(); } }
public static int Main(string[] args) { string file1 = args[0]; ImageReader reader = new ImageReader(); reader.SetFileName( file1 ); bool ret = reader.Read(); if( !ret ) { return 1; } Image image = reader.GetImage(); PixelFormat pixeltype = image.GetPixelFormat(); Rescaler r = new Rescaler(); r.SetIntercept( 0 ); r.SetSlope( 1.2 ); r.SetPixelFormat( pixeltype ); PixelFormat outputpt = new PixelFormat( r.ComputeInterceptSlopePixelType() ); System.Console.WriteLine( "pixeltype" ); System.Console.WriteLine( pixeltype.toString() ); System.Console.WriteLine( "outputpt" ); System.Console.WriteLine( outputpt.toString() ); uint len = image.GetBufferLength(); short[] input = new short[ len / 2 ]; // sizeof(short) == 2 image.GetArray( input ); double[] output = new double[ len / 2 ]; r.Rescale( output, input, len ); // First Pixel is: System.Console.WriteLine( "Input:" ); System.Console.WriteLine( input[0] ); System.Console.WriteLine( "Output:" ); System.Console.WriteLine( output[0] ); return 0; }
protected override IEnumerator <MachineInstruction> CreateDisassembler(IProcessorArchitecture arch, ImageReader rdr) { return(new ThumbDisassembler(arch, rdr).GetEnumerator()); }
private IEnumerable <SparcInstruction> CreateDisassemblyStream(ImageReader rdr) { return(new SparcDisassembler(arch, rdr)); }
public abstract Address ReadCodeAddress(int byteSize, ImageReader rdr, ProcessorState state);
public override void Apply(ref Bitmap bitmap, out Highlighter[] highlightersOut) { // Make a copy of the bitmap filtered with the Difference filter Bitmap differenceBitmap = new Bitmap(bitmap); var filter = new Images.Filters.FilterDifference(); Highlighter[] temp; filter.Apply(ref differenceBitmap, out temp); // Analyse gridlines ImageReader reader = new ImageReader(ref bitmap); ImageReader differenceReader = new ImageReader(ref differenceBitmap); List <Highlighter> highlighters = new List <Highlighter>(); int numHorizontalDivisions = bitmap.Width / gridInterval, numVerticalDivisions = bitmap.Height / gridInterval; List <EdgePoint>[] horizontalEdges = new List <EdgePoint> [numVerticalDivisions]; List <EdgePoint>[] verticalEdges = new List <EdgePoint> [numHorizontalDivisions]; // Verticals for (int tileX = 0; tileX < bitmap.Width / gridInterval * gridInterval; tileX += gridInterval) { FindEdgesOnLine(ref reader, ref differenceReader, out verticalEdges[tileX / gridInterval], new Point(tileX, 0), 0, 1); } // Horizontals for (int tileY = 0; tileY < bitmap.Height / gridInterval * gridInterval; tileY += gridInterval) { FindEdgesOnLine(ref reader, ref differenceReader, out horizontalEdges[tileY / gridInterval], new Point(0, tileY), 0, 1); } // Debug: highlight the edges foreach (List <EdgePoint> edgeList in verticalEdges) { foreach (EdgePoint edge in edgeList) // ow the edge { // ow the edge highlighters.Add(new PointHighlighter(edge.X, edge.Y)); highlighters.Add(new PointHighlighter(edge.SegmentCentre.X, edge.SegmentCentre.Y) { Pen = new Pen(Color.Orange) }); // add a line too if (edge.LastPoint != null) { highlighters.Add(new EdgeHighlighter(new Point(edge.LastPoint.X, edge.LastPoint.Y), new Point(edge.X, edge.Y)) { Pen = new Pen(edge.AverageSegmentColour, 2.0f) }); } } } // Connect the edge points ConnectEdgePoints(verticalEdges, highlighters); // Add a grid highlighter representing the grid we checked GridHighlighter grid = new GridHighlighter(new Rectangle(0, 0, bitmap.Width, bitmap.Height), gridInterval); grid.Pen.Width = 1; grid.Pen.Color = Color.Gray; //highlighters.Add(grid); // Complete the highlighter array highlightersOut = highlighters.ToArray(); // Release resources reader.Dispose(); }
private void openCamera() { lock (this) { try { if (!mCameraOpenCloseLock.tryAcquire(3000, TimeUnit.MILLISECONDS)) { showAlertDialog("Time out waiting to lock camera opening.", true); } // acquires camera characteristics mCharacteristics = mSCameraManager.getCameraCharacteristics(mSCameraManager.CameraIdList[0]); StreamConfigurationMap streamConfigurationMap = mCharacteristics.get(SCameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); // Acquires supported preview size list that supports SurfaceTexture mPreviewSize = streamConfigurationMap.getOutputSizes(typeof(SurfaceTexture))[0]; foreach (Size option in streamConfigurationMap.getOutputSizes(typeof(SurfaceTexture))) { // Find maximum preview size that is not larger than MAX_PREVIEW_WIDTH/MAX_PREVIEW_HEIGHT int areaCurrent = Math.Abs((mPreviewSize.Width * mPreviewSize.Height) - (MAX_PREVIEW_WIDTH * MAX_PREVIEW_HEIGHT)); int areaNext = Math.Abs((option.Width * option.Height) - (MAX_PREVIEW_WIDTH * MAX_PREVIEW_HEIGHT)); if (areaCurrent > areaNext) { mPreviewSize = option; } } // Acquires supported input size for YUV_420_888 format. Size yuvSize = streamConfigurationMap.getInputSizes(ImageFormat.YUV_420_888)[0]; // Configures an ImageReader mYUVReader = ImageReader.newInstance(yuvSize.Width, yuvSize.Height, ImageFormat.YUV_420_888, 2); mJpegReader = ImageReader.newInstance(mYUVReader.Width, mYUVReader.Height, ImageFormat.JPEG, 2); mYUVReader.setOnImageAvailableListener(mYUVImageListener, mReaderHandler); mJpegReader.setOnImageAvailableListener(mJpegImageListener, mReaderHandler); // Set the aspect ratio to TextureView int orientation = Resources.Configuration.orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.Width, mPreviewSize.Height); } else { mTextureView.setAspectRatio(mPreviewSize.Height, mPreviewSize.Width); } // Opening the camera device here mSCameraManager.openCamera(mCameraId, new StateCallbackAnonymousInnerClassHelper(this), mBackgroundHandler); } catch (CameraAccessException e) { showAlertDialog("Cannot open the camera.", true); Log.e(TAG, "Cannot open the camera.", e); } catch (InterruptedException e) { throw new Exception("Interrupted while trying to lock camera opening.", e); } } }
public static int Main(string[] args) { string file1 = args[0]; Mpeg2VideoInfo info = new Mpeg2VideoInfo(file1); System.Console.WriteLine( info.StartTime ); System.Console.WriteLine( info.EndTime ); System.Console.WriteLine( info.Duration ); System.Console.WriteLine( info.AspectRatio ); System.Console.WriteLine( info.FrameRate ); System.Console.WriteLine( info.PictureWidth ); System.Console.WriteLine( info.PictureHeight ); ImageReader r = new ImageReader(); //Image image = new Image(); Image image = r.GetImage(); image.SetNumberOfDimensions( 3 ); DataElement pixeldata = new DataElement( new gdcm.Tag(0x7fe0,0x0010) ); System.IO.FileStream infile = new System.IO.FileStream(file1, System.IO.FileMode.Open, System.IO.FileAccess.Read); uint fsize = gdcm.PosixEmulation.FileSize(file1); byte[] jstream = new byte[fsize]; infile.Read(jstream, 0 , jstream.Length); SmartPtrFrag sq = SequenceOfFragments.New(); Fragment frag = new Fragment(); frag.SetByteValue( jstream, new gdcm.VL( (uint)jstream.Length) ); sq.AddFragment( frag ); pixeldata.SetValue( sq.__ref__() ); // insert: image.SetDataElement( pixeldata ); PhotometricInterpretation pi = new PhotometricInterpretation( PhotometricInterpretation.PIType.YBR_PARTIAL_420 ); image.SetPhotometricInterpretation( pi ); // FIXME hardcoded: PixelFormat pixeltype = new PixelFormat(3,8,8,7); image.SetPixelFormat( pixeltype ); // FIXME hardcoded: TransferSyntax ts = new TransferSyntax( TransferSyntax.TSType.MPEG2MainProfile); image.SetTransferSyntax( ts ); image.SetDimension(0, (uint)info.PictureWidth); image.SetDimension(1, (uint)info.PictureHeight); image.SetDimension(2, 721); ImageWriter writer = new ImageWriter(); gdcm.File file = writer.GetFile(); file.GetHeader().SetDataSetTransferSyntax( ts ); Anonymizer anon = new Anonymizer(); anon.SetFile( file ); MediaStorage ms = new MediaStorage( MediaStorage.MSType.VideoEndoscopicImageStorage); UIDGenerator gen = new UIDGenerator(); anon.Replace( new Tag(0x0008,0x16), ms.GetString() ); anon.Replace( new Tag(0x0018,0x40), "25" ); anon.Replace( new Tag(0x0018,0x1063), "40.000000" ); anon.Replace( new Tag(0x0028,0x34), "4\\3" ); anon.Replace( new Tag(0x0028,0x2110), "01" ); writer.SetImage( image ); writer.SetFileName( "dummy.dcm" ); if( !writer.Write() ) { System.Console.WriteLine( "Could not write" ); return 1; } return 0; }
public override Address ReadCodeAddress(int byteSize, ImageReader rdr, ProcessorState state) { return(Address.Ptr32(rdr.ReadLeUInt32())); }
// Apply relocations to a segment. bool ApplyRelocations(ImageReader rdr, int cRelocations, NeSegment seg) { string module = ""; Address address = null; NeRelocationEntry rep = null; for (int i = 0; i < cRelocations; i++) { rep = new NeRelocationEntry { address_type = rdr.ReadByte(), relocation_type = rdr.ReadByte(), offset = rdr.ReadLeUInt16(), target1 = rdr.ReadLeUInt16(), target2 = rdr.ReadLeUInt16(), }; // Get the target address corresponding to this entry. // If additive, there is no target chain list. Instead, add source // and target. bool additive = (rep.relocation_type & NE_RELFLAG_ADDITIVE) != 0; Tuple <Address, ImportReference> impRef; uint lp; switch (rep.relocation_type & 3) { case NE_RELTYPE_ORDINAL: module = moduleNames[rep.target1 - 1]; // Synthesize an import lp = ((uint)rep.target1 << 16) | rep.target2; if (importStubs.TryGetValue(lp, out impRef)) { address = impRef.Item1; } else { address = addrImportStubs; importStubs.Add(lp, new Tuple <Address, ImportReference>( address, new OrdinalImportReference(address, module, rep.target2))); addrImportStubs += 8; } break; case NE_RELTYPE_NAME: module = moduleNames[rep.target1 - 1]; uint offName = lfaNew + this.offImportedNamesTable + rep.target2; var nameRdr = new LeImageReader(RawImage, offName); byte fnNameLength = nameRdr.ReadByte(); var abFnName = nameRdr.ReadBytes(fnNameLength); lp = ((uint)rep.target1 << 16) | rep.target2; if (importStubs.TryGetValue(lp, out impRef)) { address = impRef.Item1; } else { address = addrImportStubs; string fnName = Encoding.ASCII.GetString(abFnName); importStubs.Add(lp, new Tuple <Address, ImportReference>( address, new NamedImportReference(address, module, fnName))); } break; case NE_RELTYPE_INTERNAL: if ((rep.target1 & 0xff) == 0xff) { throw new NotImplementedException(); } else { address = segments[rep.target1 - 1].Address + rep.target2; } Debug.Print("{0}: {1:X4}:{2:X4} {3}", i + 1, address.Selector.Value, address.Selector.Value, ""); break; case NE_RELTYPE_OSFIXUP: /* Relocation type 7: * * These appear to be used as fixups for the Windows * floating point emulator. Let's just ignore them and * try to use the hardware floating point. Linux should * successfully emulate the coprocessor if it doesn't * exist. */ /* * TRACE("%d: TYPE %d, OFFSET %04x, TARGET %04x %04x %s\n", * i + 1, rep->relocation_type, rep->offset, * rep->target1, rep->target2, * NE_GetRelocAddrName( rep->address_type, additive ) ); */ continue; } ushort offset = rep.offset; // Apparently, high bit of address_type is sometimes set; // we ignore it for now. if (rep.address_type > NE_RADDR_OFFSET32) { diags.Error( string.Format( "Module {0}: unknown relocation address type {1:X2}. Please report", module, rep.address_type)); return(false); } if (additive) { var sp = seg.Address + offset; Debug.Print(" {0:X4}:{0:X4}", offset, offset); byte b; ushort w; switch (rep.address_type & 0x7f) { case NE_RADDR_LOWBYTE: b = mem.ReadByte(sp); mem.WriteByte(sp, (byte)(b + address.Offset)); break; case NE_RADDR_OFFSET16: w = mem.ReadLeUInt16(sp); mem.WriteLeUInt16(sp, (ushort)(w + address.Offset)); break; case NE_RADDR_POINTER32: w = mem.ReadLeUInt16(sp); mem.WriteLeUInt16(sp, (ushort)(w + address.Offset)); mem.WriteLeUInt16(sp + 2, address.Selector.Value); break; case NE_RADDR_SELECTOR: // Borland creates additive records with offset zero. Strange, but OK. w = mem.ReadLeUInt16(sp); if (w != 0) { diags.Error(string.Format("Additive selector to {0:X4}. Please report.", w)); } else { mem.WriteLeUInt16(sp, address.Selector.Value); } break; default: goto unknown; } } else { // Non-additive fixup. do { var sp = seg.Address + offset; ushort next_offset = mem.ReadLeUInt16(sp); Debug.Print(" {0:X4}:{0:X4}", offset, next_offset); switch (rep.address_type & 0x7f) { case NE_RADDR_LOWBYTE: mem.WriteByte(sp, (byte)address.Offset); break; case NE_RADDR_OFFSET16: mem.WriteLeUInt16(sp, (ushort)address.Offset); break; case NE_RADDR_POINTER32: mem.WriteLeUInt16(sp, (ushort)address.Offset); mem.WriteLeUInt16(sp + 2, address.Selector.Value); break; case NE_RADDR_SELECTOR: mem.WriteLeUInt16(sp, address.Selector.Value); break; default: goto unknown; } if (next_offset == offset) { break; // avoid infinite loop } if (next_offset >= seg.Alloc) { break; } offset = next_offset; } while (offset != 0xffff); } } return(true); unknown: var svc = Services.RequireService <IDiagnosticsService>(); svc.Warn("{0}: unknown ADDR TYPE {1}, " + "TYPE {2}, OFFSET {3:X4}, TARGET {4:X4} {5:X4}", seg.Address.Selector, rep.address_type, rep.relocation_type, rep.offset, rep.target1, rep.target2); return(false); }
ISymbolReader GetSymbolReader(ModuleDefinition module, Disposable <Stream> symbolStream, string fileName) { return(new PortablePdbReader(ImageReader.ReadPortablePdb(symbolStream, fileName), module)); }
public override void onImageAvailable(ImageReader reader) { if (outerInstance.mImageFormat == ImageFormat.JPEG) { outerInstance.mImageSaver.save(reader.acquireNextImage(), outerInstance.createFileName() + ".jpg"); } else { outerInstance.mImageSaver.save(reader.acquireNextImage(), outerInstance.createFileName() + ".dng"); } }
/// <summary> /// Opens a <seealso cref="com.samsung.android.sdk.camera.SCameraDevice"/>. /// </summary> private void openCamera(int facing) { lock (this) { try { if (!mCameraOpenCloseLock.tryAcquire(3000, TimeUnit.MILLISECONDS)) { showAlertDialog("Time out waiting to lock camera opening.", true); } mSCameraManager = mSCamera.SCameraManager; mCameraId = null; // Find camera device that facing to given facing parameter. foreach (string id in mSCamera.SCameraManager.CameraIdList) { SCameraCharacteristics cameraCharacteristics = mSCamera.SCameraManager.getCameraCharacteristics(id); if (cameraCharacteristics.get(SCameraCharacteristics.LENS_FACING) == facing) { mCameraId = id; break; } } if (mCameraId == null) { showAlertDialog("No camera exist with given facing: " + facing, true); return; } // acquires camera characteristics mCharacteristics = mSCamera.SCameraManager.getCameraCharacteristics(mCameraId); StreamConfigurationMap streamConfigurationMap = mCharacteristics.get(SCameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); // Acquires supported preview size list that supports SurfaceTexture mPreviewSize = streamConfigurationMap.getOutputSizes(typeof(SurfaceTexture))[0]; foreach (Size option in streamConfigurationMap.getOutputSizes(typeof(SurfaceTexture))) { // Find maximum preview size that is not larger than MAX_PREVIEW_WIDTH/MAX_PREVIEW_HEIGHT int areaCurrent = Math.Abs((mPreviewSize.Width * mPreviewSize.Height) - (MAX_PREVIEW_WIDTH * MAX_PREVIEW_HEIGHT)); int areaNext = Math.Abs((option.Width * option.Height) - (MAX_PREVIEW_WIDTH * MAX_PREVIEW_HEIGHT)); if (areaCurrent > areaNext) { mPreviewSize = option; } } // Acquires supported size for JPEG format Size[] jpegSizeList = null; jpegSizeList = streamConfigurationMap.getOutputSizes(ImageFormat.JPEG); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && 0 == jpegSizeList.Length) { // If device has 'SCameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE' getOutputSizes can return zero size list // for a format value in getOutputFormats. jpegSizeList = streamConfigurationMap.getHighResolutionOutputSizes(ImageFormat.JPEG); } Size jpegSize = jpegSizeList[0]; // Configures an ImageReader mJpegReader = ImageReader.newInstance(jpegSize.Width, jpegSize.Height, ImageFormat.JPEG, 1); mJpegReader.setOnImageAvailableListener(mImageCallback, mImageSavingHandler); if (contains(mCharacteristics.get(SCameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), SCameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) { Size[] rawSizeList = streamConfigurationMap.getOutputSizes(ImageFormat.RAW_SENSOR); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && 0 == rawSizeList.Length) { rawSizeList = streamConfigurationMap.getHighResolutionOutputSizes(ImageFormat.RAW_SENSOR); } Size rawSize = rawSizeList[0]; mRawReader = ImageReader.newInstance(rawSize.Width, rawSize.Height, ImageFormat.RAW_SENSOR, 1); mRawReader.setOnImageAvailableListener(mImageCallback, mImageSavingHandler); mImageFormatList = Arrays.asList(ImageFormat.JPEG, ImageFormat.RAW_SENSOR); } else { if (mRawReader != null) { mRawReader.close(); mRawReader = null; } mImageFormatList = Arrays.asList(ImageFormat.JPEG); } mImageFormat = ImageFormat.JPEG; // Set the aspect ratio to TextureView int orientation = Resources.Configuration.orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.Width, mPreviewSize.Height); mFaceRectView.setAspectRatio(mPreviewSize.Width, mPreviewSize.Height); } else { mTextureView.setAspectRatio(mPreviewSize.Height, mPreviewSize.Width); mFaceRectView.setAspectRatio(mPreviewSize.Height, mPreviewSize.Width); } // calculate transform matrix for face rect view configureFaceRectTransform(); // Opening the camera device here mSCameraManager.openCamera(mCameraId, new StateCallbackAnonymousInnerClassHelper(this), mBackgroundHandler); } catch (CameraAccessException e) { showAlertDialog("Cannot open the camera.", true); Log.e(TAG, "Cannot open the camera.", e); } catch (InterruptedException e) { throw new Exception("Interrupted while trying to lock camera opening.", e); } } }
void LoadTextures(bool member) { baseTexture = ImageReader.GetTexture(member ? memberTextureName : baseTextureName); }
public override void onImageAvailable(ImageReader reader) { if (outerInstance.State == CAMERA_STATE.CLOSING) { return; } outerInstance.mReceivedImgCnt++; Image image = reader.acquireNextImage(); outerInstance.mInputImageList.Add(image); // if we received enough images to produce depth of field image, request process to depth of field processor. if (outerInstance.mReceivedImgCnt == outerInstance.mDeviceDofInputCnt) { outerInstance.mReceivedImgCnt = 0; outerInstance.mProcessor.requestMultiProcess(outerInstance.mInputImageList); } }
public override Address ReadCodeAddress(int byteSize, ImageReader rdr, ProcessorState state) { return(ReadSegmentedCodeAddress(byteSize, rdr, state)); }
/// <summary> /// Closes a camera and release resources. /// </summary> private void closeCamera() { try { mCameraOpenCloseLock.acquire(); if (mSCameraSession != null) { mSCameraSession.close(); mSCameraSession = null; } if (mSCameraDevice != null) { mSCameraDevice.close(); mSCameraDevice = null; } if (mImageReader != null) { mImageReader.close(); mImageReader = null; } mSCameraManager = null; mSCamera = null; } catch (InterruptedException e) { Log.e(TAG, "Interrupted while trying to lock camera closing.", e); } finally { mCameraOpenCloseLock.release(); } }
public override void onImageAvailable(ImageReader reader) { STotalCaptureResult result = null; Image image = reader.acquireNextImage(); try { result = outerInstance.mCaptureResultQueue.take(); } catch (InterruptedException e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); } { // Simple YUV processing that makes brightness value be quantized by 10. ByteBuffer y_buffer = image.Planes[0].Buffer; sbyte[] y_byte_array = new sbyte[y_buffer.capacity()]; y_buffer.get(y_byte_array); int size = image.Width * image.Height; for (int i = 0; i < size; i++) { y_byte_array[i] = (sbyte)(y_byte_array[i] / 10 * 10); } y_buffer.rewind(); y_buffer.put(y_byte_array); } try { SCaptureRequest.Builder builder = outerInstance.mSCameraDevice.createReprocessCaptureRequest(result); builder.addTarget(outerInstance.mJpegReader.Surface); // Option #1. Put Image obtained from ImageReader directly to ImageWriter outerInstance.mReprocessWriter.queueInputImage(image); /* Option #2. Obtain input Image from ImageWriter and copy to it. Then push back to ImageWriter. potentially with zero copy Image inputImage = mReprocessWriter.dequeueInputImage(); //copy image to inputImage here mReprocessWriter.queueInputImage(inputImage); */ outerInstance.mSCameraSession.capture(builder.build(), null, outerInstance.mBackgroundHandler); } catch (CameraAccessException e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); } }
/// <summary> /// Opens a <seealso cref="com.samsung.android.sdk.camera.SCameraDevice"/>. /// </summary> private void openCamera() { try { if (!mCameraOpenCloseLock.tryAcquire(3000, TimeUnit.MILLISECONDS)) { showAlertDialog("Time out waiting to lock camera opening.", true); } mSCameraManager = mSCamera.SCameraManager; // acquires camera characteristics SCameraCharacteristics characteristics = mSCameraManager.getCameraCharacteristics(mCameraId); StreamConfigurationMap streamConfigurationMap = characteristics.get(SCameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); // Acquires supported preview size list that supports SurfaceTexture mPreviewSize = streamConfigurationMap.getOutputSizes(typeof(SurfaceTexture))[0]; foreach (Size option in streamConfigurationMap.getOutputSizes(typeof(SurfaceTexture))) { // Find maximum preview size that is not larger than MAX_PREVIEW_WIDTH/MAX_PREVIEW_HEIGHT int areaCurrent = Math.Abs((mPreviewSize.Width * mPreviewSize.Height) - (MAX_PREVIEW_WIDTH * MAX_PREVIEW_HEIGHT)); int areaNext = Math.Abs((option.Width * option.Height) - (MAX_PREVIEW_WIDTH * MAX_PREVIEW_HEIGHT)); if (areaCurrent > areaNext) { mPreviewSize = option; } } // Acquires supported size for JPEG format Size[] jpegSizeList = null; jpegSizeList = streamConfigurationMap.getOutputSizes(ImageFormat.JPEG); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && 0 == jpegSizeList.Length) { // If device has 'SCameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE' getOutputSizes can return zero size list // for a format value in getOutputFormats. jpegSizeList = streamConfigurationMap.getHighResolutionOutputSizes(ImageFormat.JPEG); } Size jpegSize = jpegSizeList[0]; // Configures an ImageReader mImageReader = ImageReader.newInstance(jpegSize.Width, jpegSize.Height, ImageFormat.JPEG, mProcessor.Parameters.get(SCameraDepthOfFieldProcessor.MULTI_INPUT_COUNT_RANGE).Upper); mImageReader.setOnImageAvailableListener(mImageCallback, mBackgroundHandler); // Set the aspect ratio to TextureView int orientation = Resources.Configuration.orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.Width, mPreviewSize.Height); } else { mTextureView.setAspectRatio(mPreviewSize.Height, mPreviewSize.Width); } // Initialize depth of field processor initProcessor(); // Opening the camera device here mSCameraManager.openCamera(mCameraId, new StateCallbackAnonymousInnerClassHelper(this), mBackgroundHandler); } catch (CameraAccessException e) { showAlertDialog("Cannot open the camera.", true); Log.e(TAG, "Cannot open the camera.", e); } catch (InterruptedException e) { throw new Exception("Interrupted while trying to lock camera opening.", e); } }
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /// <summary> /// Loads a TGA from a file /// </summary> /// <param name="filename"></param> public void Load(string filename) { LoadData(filename); byte[] ImageID = new byte[_tgaData.rawData[0]]; for (int i = 0; i < ImageID.Length; i++) { ImageID[i] = _tgaData.rawData[18 + i]; } _tgaData.imagetype = (ImageTypes)_tgaData.rawData[2]; // it's possible for rawData[7] to have 15 bytes, but we still want it to return 2 in this case. int cTableBytes = (int)Math.Ceiling(Convert.ToDouble(_tgaData.rawData[7]) / 8.0); byte[] colorTable = new byte[_tgaData.rawData[1] * (_tgaData.rawData[5] + _tgaData.rawData[6] * 256) * cTableBytes]; _tgaData.width = _tgaData.rawData[12] + _tgaData.rawData[13] * 256; _tgaData.height = _tgaData.rawData[14] + _tgaData.rawData[15] * 256; int _xOrig = _tgaData.rawData[8] + _tgaData.rawData[9] * 256; int _yOrig = _tgaData.rawData[10] + _tgaData.rawData[11] * 256; // set up the orientation for later _tgaData.rightCorner = (byte)Origin.rightCorner & _tgaData.rawData[17]; _tgaData.topCorner = (byte)Origin.topCorner & _tgaData.rawData[17]; // where the actual bytes per channel is stored depends on if we're color mapped or not. _tgaData.bytesPerChannel = _tgaData.rawData[1] != 0 ? cTableBytes : _tgaData.rawData[16] / 8; _tgaData.imageData = new byte[_tgaData.bytesPerChannel * _tgaData.width * _tgaData.height]; // record the number of alpha bits. It's either 1 or 8 if it's anything. _tgaData.bitsPerAlpha = _tgaData.rawData[17] & 0x09; // all of the image types above 8 are RLE images bool RLE = ((int)_tgaData.imagetype > 8); bool trueColor = (((int)_tgaData.imagetype & 0x02) == 0x02); // We've gone through the header. Now to prep all the info and // parse it into an image // imageOffset is the point in the file the actual image data starts int imageOffset = ImageID.Length + colorTable.Length + 18; // offset is used to keep track of our position reading the image data int offset = imageOffset; // value to make sure we don't read past the image data into the footer int numPixels = _tgaData.width * _tgaData.height; ImageReader imageReader; switch (_tgaData.imagetype) { case ImageTypes.UncompressedColorMap: // where the color table starts int cMapOffset = 18 + (_tgaData.rawData[3] + _tgaData.rawData[4] * 256); // populate the colorTable for (int i = 0; i < colorTable.Length; i++) colorTable[i] = _tgaData.rawData[i + cMapOffset]; int cMapSize = colorTable.Length; _tgaData.colorTable = colorTable; // load the image data from the colorTable imageReader = new ImageReader(_tgaData, offset); break; case ImageTypes.UncompressedTrueColor: imageReader = new ImageReader(_tgaData, offset); break; case ImageTypes.UncompressedBlackAndWhite: imageReader = new ImageReader(_tgaData, offset); break; case ImageTypes.RLETrueColor: imageReader = new ImageReader(_tgaData, offset); break; } // end switch // we don't need the raw data anymore. _tgaData.rawData = new byte[0]; }
public static int Main(string[] args) { if( args.Length < 2 ) { System.Console.WriteLine( " input.dcm output.dcm" ); return 1; } string filename = args[0]; string outfilename = args[1]; ImageReader reader = new ImageReader(); reader.SetFileName( filename ); if( !reader.Read() ) { System.Console.WriteLine( "Could not read: " + filename ); return 1; } // The output of gdcm::Reader is a gdcm::File File file = reader.GetFile(); // the dataset is the the set of element we are interested in: DataSet ds = file.GetDataSet(); Image image = reader.GetImage(); //image.Print( cout ); ImageChangeTransferSyntax change = new ImageChangeTransferSyntax(); TransferSyntax targetts = new TransferSyntax( TransferSyntax.TSType.JPEGBaselineProcess1 ); change.SetTransferSyntax( targetts ); // Setup our JPEGCodec, warning it should be compatible with JPEGBaselineProcess1 JPEGCodec jpegcodec = new JPEGCodec(); if( !jpegcodec.CanCode( targetts ) ) { System.Console.WriteLine( "Something went really wrong, JPEGCodec cannot handle JPEGBaselineProcess1" ); return 1; } jpegcodec.SetLossless( false ); jpegcodec.SetQuality( 50 ); // poor quality ! change.SetUserCodec( jpegcodec ); // specify the codec to use to the ImageChangeTransferSyntax change.SetInput( image ); bool b = change.Change(); if( !b ) { System.Console.WriteLine( "Could not change the Transfer Syntax" ); return 1; } ImageWriter writer = new ImageWriter(); writer.SetImage( change.GetOutput() ); writer.SetFile( reader.GetFile() ); writer.SetFileName( outfilename ); if( !writer.Write() ) { System.Console.WriteLine( "Could not write: " + outfilename ); return 1; } return 0; }
public abstract X86Disassembler CreateDisassembler(ImageReader rdr, X86Options options);
public override void onImageAvailable(ImageReader reader) { STotalCaptureResult result = null; try { result = mCaptureResultQueue.take(); } catch (InterruptedException e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); } addImage(reader.acquireNextImage(), result); }
// Sets up member variables related to camera. private void SetUpCameraOutputs(int width, int height) { var activity = Activity; var manager = (CameraManager)activity.GetSystemService(Context.CameraService); try { for (var i = 0; i < manager.GetCameraIdList().Length; i++) { var cameraId = manager.GetCameraIdList()[i]; CameraCharacteristics characteristics = manager.GetCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. var facing = (Integer)characteristics.Get(CameraCharacteristics.LensFacing); if (facing != null && facing == (Integer.ValueOf((int)LensFacing.Front))) { continue; } var map = (StreamConfigurationMap)characteristics.Get(CameraCharacteristics.ScalerStreamConfigurationMap); if (map == null) { continue; } // For still image captures, we use the largest available size. Size largest = (Size)Collections.Max(Arrays.AsList(map.GetOutputSizes((int)ImageFormatType.Jpeg)), new CompareSizesByArea()); mImageReader = ImageReader.NewInstance(largest.Width, largest.Height, ImageFormatType.Jpeg, /*maxImages*/ 2); mImageReader.SetOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. var displayRotation = activity.WindowManager.DefaultDisplay.Rotation; //noinspection ConstantConditions mSensorOrientation = (int)characteristics.Get(CameraCharacteristics.SensorOrientation); bool swappedDimensions = false; switch (displayRotation) { case SurfaceOrientation.Rotation0: case SurfaceOrientation.Rotation180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case SurfaceOrientation.Rotation90: case SurfaceOrientation.Rotation270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.Error(TAG, "Display rotation is invalid: " + displayRotation); break; } Point displaySize = new Point(); activity.WindowManager.DefaultDisplay.GetSize(displaySize); var rotatedPreviewWidth = width; var rotatedPreviewHeight = height; var maxPreviewWidth = displaySize.X; var maxPreviewHeight = displaySize.Y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.Y; maxPreviewHeight = displaySize.X; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. mPreviewSize = ChooseOptimalSize(map.GetOutputSizes(Class.FromType(typeof(SurfaceTexture))), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. var orientation = Resources.Configuration.Orientation; if (orientation == Orientation.Landscape) { mTextureView.SetAspectRatio(mPreviewSize.Width, mPreviewSize.Height); } else { mTextureView.SetAspectRatio(mPreviewSize.Height, mPreviewSize.Width); } // Check if the flash is supported. var available = (Boolean)characteristics.Get(CameraCharacteristics.FlashInfoAvailable); if (available == null) { mFlashSupported = false; } else { mFlashSupported = (bool)available; } mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.PrintStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.NewInstance(GetString(Resource.String.camera_error)).Show(ChildFragmentManager, FRAGMENT_DIALOG); } }
public void ReadOptionalHeader(ImageReader rdr, short expectedMagic) { if (optionalHeaderSize <= 0) { throw new BadImageFormatException("Optional header size should be larger than 0 in a PE executable image file."); } short magic = rdr.ReadLeInt16(); if (magic != expectedMagic) { throw new BadImageFormatException("Not a valid PE Header."); } rdr.ReadByte(); // Linker major version rdr.ReadByte(); // Linker minor version rdr.ReadLeUInt32(); // code size (== .text section size) rdr.ReadLeUInt32(); // size of initialized data rdr.ReadLeUInt32(); // size of uninitialized data rvaStartAddress = rdr.ReadLeUInt32(); uint rvaBaseOfCode = rdr.ReadLeUInt32(); preferredBaseOfImage = innerLoader.ReadPreferredImageBase(rdr); rdr.ReadLeUInt32(); // section alignment rdr.ReadLeUInt32(); // file alignment rdr.ReadLeUInt16(); // OS major version rdr.ReadLeUInt16(); // OS minor version rdr.ReadLeUInt16(); // Image major version rdr.ReadLeUInt16(); // Image minor version rdr.ReadLeUInt16(); // Subsystem major version rdr.ReadLeUInt16(); // Subsystem minor version rdr.ReadLeUInt32(); // reserved uint sizeOfImage = rdr.ReadLeUInt32(); uint sizeOfHeaders = rdr.ReadLeUInt32(); uint checksum = rdr.ReadLeUInt32(); ushort subsystem = rdr.ReadLeUInt16(); ushort dllFlags = rdr.ReadLeUInt16(); var stackReserve = rdr.Read(arch.WordWidth); var stackCommit = rdr.Read(arch.WordWidth); var heapReserve = rdr.Read(arch.WordWidth); var heapCommit = rdr.Read(arch.WordWidth); rdr.ReadLeUInt32(); // loader flags uint dictionaryCount = rdr.ReadLeUInt32(); if (dictionaryCount == 0) { return; } this.rvaExportTable = rdr.ReadLeUInt32(); this.sizeExportTable = rdr.ReadLeUInt32(); if (--dictionaryCount == 0) { return; } this.rvaImportTable = rdr.ReadLeUInt32(); uint importTableSize = rdr.ReadLeUInt32(); if (--dictionaryCount == 0) { return; } this.rvaResources = rdr.ReadLeUInt32(); // resource address rdr.ReadLeUInt32(); // resource size if (--dictionaryCount == 0) { return; } this.rvaExceptionTable = rdr.ReadLeUInt32(); // exception address this.sizeExceptionTable = rdr.ReadLeUInt32(); // exception size if (--dictionaryCount == 0) { return; } rdr.ReadLeUInt32(); // certificate address rdr.ReadLeUInt32(); // certificate size if (--dictionaryCount == 0) { return; } this.rvaBaseRelocationTable = rdr.ReadLeUInt32(); this.sizeBaseRelocationTable = rdr.ReadLeUInt32(); if (--dictionaryCount == 0) { return; } uint rvaDebug = rdr.ReadLeUInt32(); uint cbDebug = rdr.ReadLeUInt32(); if (--dictionaryCount == 0) { return; } uint rvaArchitecture = rdr.ReadLeUInt32(); uint cbArchitecture = rdr.ReadLeUInt32(); if (--dictionaryCount == 0) { return; } uint rvaGlobalPointer = rdr.ReadLeUInt32(); uint cbGlobalPointer = rdr.ReadLeUInt32(); if (--dictionaryCount == 0) { return; } uint rvaTls = rdr.ReadLeUInt32(); uint cbTls = rdr.ReadLeUInt32(); if (--dictionaryCount == 0) { return; } uint rvaLoadConfig = rdr.ReadLeUInt32(); uint cbLoadConfig = rdr.ReadLeUInt32(); if (--dictionaryCount == 0) { return; } uint rvaBoundImport = rdr.ReadLeUInt32(); uint cbBoundImport = rdr.ReadLeUInt32(); if (--dictionaryCount == 0) { return; } uint rvaIat = rdr.ReadLeUInt32(); uint cbIat = rdr.ReadLeUInt32(); if (--dictionaryCount == 0) { return; } this.rvaDelayImportDescriptor = rdr.ReadLeUInt32(); uint cbDelayImportDescriptor = rdr.ReadLeUInt32(); }
private void FindEdgesOnLine(ref ImageReader reader, ref ImageReader diffReader, out List <EdgePoint> edgePoints, Point start, int xStep, int yStep) { edgePoints = new List <EdgePoint>(); unsafe { // Determine the popularity of brightnesses (we'll grab the pixels brighter than a percentile) int x = start.X, y = start.Y; int[] counts = new int[256]; int numPixelsAlongLine = 0; Array.Clear(counts, 0, 256); while (x >= 0 && y >= 0 && x < diffReader.Width && y < diffReader.Height) { ++counts[diffReader.PixelRows[y][x] & 0xFF]; ++numPixelsAlongLine; x += xStep; y += yStep; } // Considering the averages, determine the brightness threshold to qualify a pixel int expectedCount = 90 * numPixelsAlongLine / 100; int brightnessThreshold = 0; for (int i = 0, currentCount = 0; i < 256; ++i) { if (currentCount >= expectedCount) { brightnessThreshold = i; break; } currentCount += counts[i]; } // Step along again, this time marking pixels that exceed the brightness threshold bool wasAboveThreshold = false; EdgePoint lastEdgePoint = null; x = start.X; y = start.Y; while (x >= 0 && y >= 0 && x < diffReader.Width && y < diffReader.Height) { // Register the pixel if it's just gone above the threshold bool isAboveThreshold = ((diffReader.PixelRows[y][x] & 0xFF) > brightnessThreshold); if (isAboveThreshold && !wasAboveThreshold) { lastEdgePoint = new EdgePoint(x, y, ref reader, ref lastEdgePoint); edgePoints.Add(lastEdgePoint); } // Advance wasAboveThreshold = isAboveThreshold; x += xStep; y += yStep; } } }
public abstract bool ResolveImportDescriptorEntry(string dllName, ImageReader rdrIlt, ImageReader rdrIat);
public Tlcs900Disassembler(Tlcs900Architecture arch, ImageReader rdr) { this.arch = arch; this.rdr = rdr; }
public abstract Address ReadPreferredImageBase(ImageReader rdr);
protected virtual IEnumerator <MachineInstruction> CreateDisassembler(IProcessorArchitecture arch, ImageReader rdr) { return(arch.CreateDisassembler(rdr).GetEnumerator()); }
public override Address ReadPreferredImageBase(ImageReader rdr) { return(Address64.Ptr64(rdr.ReadLeUInt64())); }
public void OnImageAvailable(ImageReader reader) { owner.mBackgroundHandler.Post(new ImageSaver(reader.AcquireNextImage(), file, owner)); }
public override bool ResolveImportDescriptorEntry(string dllName, ImageReader rdrIlt, ImageReader rdrIat) { Address addrThunk = rdrIat.Address; ulong iatEntry = rdrIat.ReadLeUInt64(); ulong iltEntry = rdrIlt.ReadLeUInt64(); if (iltEntry == 0) { return(false); } outer.importReferences.Add( addrThunk, ResolveImportedFunction(dllName, iltEntry, addrThunk)); Debug.Print("{0}: {1}", addrThunk, outer.importReferences[addrThunk]); return(true); }
private bool LoadNeHeader(ImageReader rdr) { ushort magic; if (!rdr.TryReadLeUInt16(out magic) || magic != 0x454E) { throw new BadImageFormatException("Not a valid NE header."); } ushort linker; if (!rdr.TryReadLeUInt16(out linker)) { return(false); } if (!rdr.TryReadLeUInt16(out offEntryTable)) { return(false); } ushort cbEntryTable; if (!rdr.TryReadLeUInt16(out cbEntryTable)) { return(false); } uint crc; if (!rdr.TryReadLeUInt32(out crc)) { return(false); } byte bProgramFlags; if (!rdr.TryReadByte(out bProgramFlags)) { return(false); } byte bAppFlags; if (!rdr.TryReadByte(out bAppFlags)) { return(false); } ushort iSegAutoData; if (!rdr.TryReadUInt16(out iSegAutoData)) { return(false); } ushort cbHeapSize; if (!rdr.TryReadUInt16(out cbHeapSize)) { return(false); } ushort cbStackSize; if (!rdr.TryReadUInt16(out cbStackSize)) { return(false); } ushort cs, ip; if (!rdr.TryReadUInt16(out ip) || !rdr.TryReadUInt16(out cs)) { return(false); } ushort ss, sp; if (!rdr.TryReadUInt16(out sp) || !rdr.TryReadUInt16(out ss)) { return(false); } if (!rdr.TryReadUInt16(out cSeg)) { return(false); } ushort cModules; if (!rdr.TryReadUInt16(out cModules)) { return(false); } ushort cbNonResidentNames; if (!rdr.TryReadUInt16(out cbNonResidentNames)) { return(false); } ushort offSegTable; if (!rdr.TryReadUInt16(out offSegTable)) { return(false); } if (!rdr.TryReadUInt16(out offRsrcTable)) { return(false); } if (!rdr.TryReadUInt16(out offResidentNameTable)) { return(false); } ushort offModuleReferenceTable; if (!rdr.TryReadUInt16(out offModuleReferenceTable)) { return(false); } if (!rdr.TryReadUInt16(out offImportedNamesTable)) { return(false); } uint offNonResidentNameTable; if (!rdr.TryReadUInt32(out offNonResidentNameTable)) { return(false); } ushort cMoveableEntryPoints; if (!rdr.TryReadUInt16(out cMoveableEntryPoints)) { return(false); } if (!rdr.TryReadUInt16(out cbFileAlignmentShift)) { return(false); } ushort cResourceTableEntries; if (!rdr.TryReadUInt16(out cResourceTableEntries)) { return(false); } byte bTargetOs; if (!rdr.TryReadByte(out bTargetOs)) { return(false); } byte bOsExeFlags; if (!rdr.TryReadByte(out bOsExeFlags)) { return(false); } ushort offGanglands; if (!rdr.TryReadUInt16(out offGanglands)) { return(false); } ushort cbGanglands; if (!rdr.TryReadUInt16(out cbGanglands)) { return(false); } ushort cbMinCodeSwapArea; if (!rdr.TryReadUInt16(out cbMinCodeSwapArea)) { return(false); } ushort wWindowsVersion; if (!rdr.TryReadUInt16(out wWindowsVersion)) { return(false); } LoadModuleTable(this.lfaNew + offModuleReferenceTable, cModules); LoadSegments(this.lfaNew + offSegTable); this.addrEntry = segments[cs - 1].Address + ip; return(true); }
public override Address ReadCodeAddress(int size, ImageReader rdr, ProcessorState state) { throw new NotImplementedException(); }
private void UpdateMode(TransportModes transportMode) { // Update the transport mode and stop any riding sounds playing. mode = transportMode; if (ridingAudioSource.isPlaying) { ridingAudioSource.Stop(); } if (mode == TransportModes.Horse || mode == TransportModes.Cart) { // Tell player motor we're riding. playerMotor.IsRiding = true; // Setup appropriate riding sounds. SoundClips sound = (mode == TransportModes.Horse) ? horseRidingSound2 : cartRidingSound; ridingAudioSource.clip = dfAudioSource.GetAudioClip((int)sound); // Setup appropriate riding textures. string textureName = (mode == TransportModes.Horse) ? horseTextureName : cartTextureName; for (int i = 0; i < 4; i++) { ridingTexures[i] = ImageReader.GetImageData(textureName, 0, i, true, true); } ridingTexture = ridingTexures[0]; // Initialise neighing timer. neighTime = Time.time + Random.Range(1, 5); } else { // Tell player motor we're not riding. playerMotor.IsRiding = false; } if (mode == TransportModes.Ship) { GameManager.Instance.PlayerMotor.CancelMovement = true; SerializablePlayer serializablePlayer = GetComponent <SerializablePlayer>(); DaggerfallUI.Instance.FadeBehaviour.SmashHUDToBlack(); StreamingWorld world = GameManager.Instance.StreamingWorld; DFPosition shipCoords = DaggerfallBankManager.GetShipCoords(); // Is there recorded position before boarding and is player on the ship? if (IsOnShip()) { // Check for terrain sampler changes. (so don't fall through floor) StreamingWorld.RepositionMethods reposition = StreamingWorld.RepositionMethods.None; if (boardShipPosition.terrainSamplerName != DaggerfallUnity.Instance.TerrainSampler.ToString() || boardShipPosition.terrainSamplerVersion != DaggerfallUnity.Instance.TerrainSampler.Version) { reposition = StreamingWorld.RepositionMethods.RandomStartMarker; if (DaggerfallUI.Instance.DaggerfallHUD != null) { DaggerfallUI.Instance.DaggerfallHUD.PopupText.AddText("Terrain sampler changed. Repositioning player."); } } // Restore player position from before boarding ship, caching ship scene first. SaveLoadManager.CacheScene(world.SceneName); // TODO: Should this should move into teleport to support other teleports? Issue only if inside. (e.g. recall) DFPosition mapPixel = MapsFile.WorldCoordToMapPixel(boardShipPosition.worldPosX, boardShipPosition.worldPosZ); world.TeleportToCoordinates(mapPixel.X, mapPixel.Y, reposition); serializablePlayer.RestorePosition(boardShipPosition); boardShipPosition = null; // Restore cached scene (ship is special case, cache will not be cleared) SaveLoadManager.RestoreCachedScene(world.SceneName); } else { // Record current player position before boarding ship, and cache scene. (ship is special case, cache will not be cleared) boardShipPosition = serializablePlayer.GetPlayerPositionData(); SaveLoadManager.CacheScene(world.SceneName); // Teleport to the players ship, restoring cached scene. world.TeleportToCoordinates(shipCoords.X, shipCoords.Y, StreamingWorld.RepositionMethods.RandomStartMarker); SaveLoadManager.RestoreCachedScene(world.SceneName); } DaggerfallUI.Instance.FadeBehaviour.FadeHUDFromBlack(); mode = TransportModes.Foot; } }
public override IEnumerable <MachineInstruction> CreateDisassembler(ImageReader imageReader) { return(new MipsDisassembler(this, imageReader)); }
public override X86Disassembler CreateDisassembler(ImageReader rdr) { return(new X86Disassembler(this, rdr, PrimitiveType.Word32, PrimitiveType.Word32, false)); }
public override IEnumerable <Address> CreatePointerScanner(ImageMap map, ImageReader rdr, IEnumerable <Address> knownAddresses, PointerScannerFlags flags) { var knownLinAddresses = knownAddresses.Select(a => (uint)a.ToLinear()).ToHashSet(); return(new MipsPointerScanner32(rdr, knownLinAddresses, flags).Select(l => Address.Ptr32(l))); }
public override IEnumerable <Address> CreateInstructionScanner(ImageMap map, ImageReader rdr, IEnumerable <Address> knownAddresses, PointerScannerFlags flags) { var knownLinAddresses = knownAddresses.Select(a => (ulong)a.ToLinear()).ToHashSet(); return(new X86PointerScanner64(rdr, knownLinAddresses, flags).Select(li => map.MapLinearAddressToAddress(li))); }
/// <summary> /// Load and validate an image file. /// </summary> /// <param name="filePath">The path to the file.</param> public void LoadImageFile(string filePath) { // Check image selected if (String.IsNullOrEmpty(filePath)) { this.logging.Write("File not specified"); this.view.DisplayMessage(Properties.Resources.IsoNotSelectedMessage, Properties.Resources.IsoNotSelectedCaption); return; } // Check image extension is ".iso" FileInfo image = null; try { image = new FileInfo(filePath); } catch (ArgumentException) { } if (image == null || !image.Exists || !image.Extension.Equals(".iso", StringComparison.OrdinalIgnoreCase)) { this.logging.Write("Invalid file specified"); this.view.DisplayMessage(Properties.Resources.IsoInvalidMessage, Properties.Resources.IsoInvalidCaption); return; } // Check image contents can be read. this.imageReader = new ImageReader(image); if (!this.imageReader.Open()) { this.logging.Write("File contents is not an ISO image."); this.view.DisplayMessage(Properties.Resources.IsoInvalidMessage, Properties.Resources.IsoInvalidCaption); return; } this.view.DisplayMediaTypeScreen(); this.view.ScreenTitle = Properties.Resources.TitleMediaType; return; }
public override void onImageAvailable(ImageReader reader) { Image image = reader.acquireNextImage(); outerInstance.mImageSaver.save(image, outerInstance.createFileName() + ".jpg"); }
public RiscVDisassembler(RiscVArchitecture arch, ImageReader rdr) { this.arch = arch; this.rdr = rdr; }
public static int Main(string[] args) { string file1 = args[0]; string file2 = args[1]; ImageReader reader = new ImageReader(); reader.SetFileName( file1 ); bool ret = reader.Read(); if( !ret ) { return 1; } Image image = new Image(); Image ir = reader.GetImage(); image.SetNumberOfDimensions( ir.GetNumberOfDimensions() ); //Just for fun: //int dircos = ir.GetDirectionCosines(); //t = gdcm.Orientation.GetType(dircos); //int l = gdcm.Orientation.GetLabel(t); //System.Console.WriteLine( "Orientation label:" + l ); // Set the dimensions, // 1. either one at a time //image.SetDimension(0, ir.GetDimension(0) ); //image.SetDimension(1, ir.GetDimension(1) ); // 2. the array at once uint[] dims = {0, 0}; // Just for fun let's invert the dimensions: dims[0] = ir.GetDimension(1); dims[1] = ir.GetDimension(0); ir.SetDimensions( dims ); PixelFormat pixeltype = ir.GetPixelFormat(); image.SetPixelFormat( pixeltype ); PhotometricInterpretation pi = ir.GetPhotometricInterpretation(); image.SetPhotometricInterpretation( pi ); DataElement pixeldata = new DataElement( new Tag(0x7fe0,0x0010) ); byte[] str1 = new byte[ ir.GetBufferLength()]; ir.GetBuffer( str1 ); //System.Console.WriteLine( ir.GetBufferLength() ); pixeldata.SetByteValue( str1, new VL( (uint)str1.Length ) ); //image.SetDataElement( pixeldata ); ir.SetDataElement( pixeldata ); ImageWriter writer = new ImageWriter(); writer.SetFileName( file2 ); writer.SetFile( reader.GetFile() ); writer.SetImage( ir ); ret = writer.Write(); if( !ret ) { return 1; } return 0; }
public override X86Disassembler CreateDisassembler(ImageReader rdr, X86Options options) { return(new X86Disassembler(this, rdr, PrimitiveType.Word32, PrimitiveType.Word64, true)); }
public abstract IEnumerable <Address> CreateInstructionScanner(ImageMap map, ImageReader rdr, IEnumerable <Address> knownAddresses, PointerScannerFlags flags);
public override void onImageAvailable(ImageReader reader) { Image image = reader.acquireNextImage(); // add image to input image list outerInstance.mInputImageList.Add(image); }