/* Setup/Teardown */ /// <summary> /// Creates a ONE archive given the bytes of a Heroes ONE file. /// </summary> public OneArchive(byte[] data) { _handle = GCHandle.Alloc(data, GCHandleType.Pinned); var dataPointer = (byte *)_handle?.AddrOfPinnedObject(); SetupPointers(dataPointer); }
void Update() { if (state == 0) { AndroidJavaClass UnityOpenCVLoaderJava = new AndroidJavaClass(UNITY_OPENCV_LOADER); var b = UnityOpenCVLoaderJava.CallStatic<Boolean>("isSuccess"); if (b) { state = 1; } } else if (state == 1) { if (cameraInstance == IntPtr.Zero) cameraInstance = CreateCameraInstance (); if (Open (cameraInstance, 0, width, height)) { texture = new Texture2D (width, height, TextureFormat.ARGB32, false); pixels = texture.GetPixels32 (); pixelsHandle = GCHandle.Alloc (pixels, GCHandleType.Pinned); pixelsPtr = pixelsHandle.AddrOfPinnedObject (); GetComponent<Renderer>().material.mainTexture = texture; state = 2; } else { state = -1; } } else if (state == 2) { getCameraTexture (cameraInstance, pixelsPtr, width, height); texture.SetPixels32 (pixels); texture.Apply (); } }
// Update is called once per frame void Update() { // off-screen rendering var camtex = RenderTexture.GetTemporary (camWidth, camHeight, 24, RenderTextureFormat.Default, RenderTextureReadWrite.Default, 1); myCamera.targetTexture = camtex; myCamera.Render (); RenderTexture.active = camtex; tex.ReadPixels (new Rect (0, 0, camtex.width, camtex.height), 0, 0); tex.Apply (); // Convert texture to ptr texturePixels_ = tex.GetPixels32(); texturePixelsHandle_ = GCHandle.Alloc(texturePixels_, GCHandleType.Pinned); texturePixelsPtr_ = texturePixelsHandle_.AddrOfPinnedObject(); // Show a window fullWindow (windowName, displayNum, texturePixelsPtr_, camWidth, camHeight); texturePixelsHandle_.Free(); RenderTexture.active = null; RenderTexture.ReleaseTemporary (camtex); myCamera.targetTexture = null; }
private static AKRESULT DoLoadBank(string in_bankPath) { ms_www = new WWW(in_bankPath); while( ! ms_www.isDone ) { #if ! UNITY_METRO System.Threading.Thread.Sleep(WaitMs); #endif // #if ! UNITY_METRO } uint in_uInMemoryBankSize = 0; try { ms_pinnedArray = GCHandle.Alloc(ms_www.bytes, GCHandleType.Pinned); ms_pInMemoryBankPtr = ms_pinnedArray.AddrOfPinnedObject(); in_uInMemoryBankSize = (uint)ms_www.bytes.Length; } catch { return AKRESULT.AK_Fail; } AKRESULT result = AkSoundEngine.LoadBank(ms_pInMemoryBankPtr, in_uInMemoryBankSize, out ms_bankID); return result; }
private void CreateBuffer(int width, int height) { // Free buffer if it's too small if (_frameHandle.IsAllocated && _frameData != null) { if (_frameData.Length < _frameWidth * _frameHeight) { FreeBuffer(); } } if (_frameData == null) { _frameWidth = width; _frameHeight = height; _frameData = new Color32[_frameWidth * _frameHeight]; _frameHandle = GCHandle.Alloc(_frameData, GCHandleType.Pinned); _framePointer = _frameHandle.AddrOfPinnedObject(); #if TEXTURETEST _testTexture = new Texture2D(_frameWidth ,_frameHeight, TextureFormat.ARGB32, false, false); _testTexture.Apply(false, false); #endif } }
// Use this for initialization void Start () { camera_ = getCamera(device); setCameraProp(camera_, width, height, fps); texture_ = new Texture2D(width, height, TextureFormat.ARGB32, false); pixels_ = texture_.GetPixels32(); pixels_handle_ = GCHandle.Alloc(pixels_, GCHandleType.Pinned); pixels_ptr_ = pixels_handle_.AddrOfPinnedObject(); GetComponent<Renderer>().material.mainTexture = texture_; }
public Matrix(int size) { if (!_validSizes.Contains(size)) throw new ArgumentOutOfRangeException("size"); _size = size; _data = new int[size * size]; _dataPtrHandle = GCHandle.Alloc(_data, GCHandleType.Pinned); _dataPtr = (int*)_dataPtrHandle.AddrOfPinnedObject().ToPointer(); }
/// <summary> /// Update the view. This method triggers the view to be rendered to the /// underlaying texture. /// </summary> /// <returns>true if the view was actually updated</returns> public bool UpdateView() { if (View != null) { m_DataPin = GCHandle.Alloc(m_Data, System.Runtime.InteropServices.GCHandleType.Pinned); return View.GetAsBitmap(m_DataPin.AddrOfPinnedObject(), Width * Height * 4); } return false; }
/// <summary> /// Adds sample data to a "push" stream. /// </summary> /// <param name="buffer"> /// Pointer to the sample data... NULL = allocate space in the queue buffer so that there is at least /// length bytes of free space. Empty array can be used to just check how much data is queued. /// </param> /// <returns>The number of bytes read from buffer.</returns> /// <exception cref="BassErrorException"> /// Some error occur to call a Bass function, check the error code and error message /// to get more error information. /// </exception> /// <exception cref="BassNotLoadedException"> /// Bass DLL not loaded, you must use <see cref="BassManager.Initialize" /> to /// load Bass DLL first. /// </exception> /// <exception cref="NotAvailableException">Channel object is no longer available.</exception> public virtual int PutData(byte[] buffer) { CheckAvailable(); GCHandle?bufferHandle = (buffer != null && buffer.Length != 0) ? GCHandle.Alloc(buffer, GCHandleType.Pinned) : (GCHandle?)null; return(AudioStreamModule.StreamPutDataFunction.CheckResult( AudioStreamModule.StreamPutDataFunction.Delegate(Handle, bufferHandle?.AddrOfPinnedObject() ?? IntPtr.Zero, buffer?.Length ?? 0))); }
void Start() { camera_ = get_camera(); if (camera_ == IntPtr.Zero) { Debug.LogError("camera cannot be opened."); return; } texture_ = new Texture2D(640, 480, TextureFormat.RGBA32, false); pixels_ = texture_.GetPixels32(); handle_ = GCHandle.Alloc(pixels_, GCHandleType.Pinned); ptr_ = handle_.AddrOfPinnedObject(); GetComponent<Renderer>().material.mainTexture = texture_; }
IEnumerator LoadFile() { ms_www = new WWW(m_bankPath); yield return ms_www; uint in_uInMemoryBankSize = 0; // Allocate an aligned buffer try { ms_pinnedArray = GCHandle.Alloc(ms_www.bytes, GCHandleType.Pinned); ms_pInMemoryBankPtr = ms_pinnedArray.AddrOfPinnedObject(); in_uInMemoryBankSize = (uint)ms_www.bytes.Length; // Array inside the WWW object is not aligned. Allocate a new array for which we can guarantee the alignment. if( (ms_pInMemoryBankPtr.ToInt64() & AK_BANK_PLATFORM_DATA_ALIGNMENT_MASK) != 0 ) { byte[] alignedBytes = new byte[ms_www.bytes.Length + AK_BANK_PLATFORM_DATA_ALIGNMENT]; GCHandle new_pinnedArray = GCHandle.Alloc(alignedBytes, GCHandleType.Pinned); IntPtr new_pInMemoryBankPtr = new_pinnedArray.AddrOfPinnedObject(); int alignedOffset = 0; // New array is not aligned, so we will need to use an offset inside it to align our data. if( (new_pInMemoryBankPtr.ToInt64() & AK_BANK_PLATFORM_DATA_ALIGNMENT_MASK) != 0 ) { Int64 alignedPtr = (new_pInMemoryBankPtr.ToInt64() + AK_BANK_PLATFORM_DATA_ALIGNMENT_MASK) & ~AK_BANK_PLATFORM_DATA_ALIGNMENT_MASK; alignedOffset = (int)(alignedPtr - new_pInMemoryBankPtr.ToInt64()); new_pInMemoryBankPtr = new IntPtr(alignedPtr); } // Copy the bank's bytes in our new array, at the correct aligned offset. Array.Copy (ms_www.bytes, 0, alignedBytes, alignedOffset, ms_www.bytes.Length); ms_pInMemoryBankPtr = new_pInMemoryBankPtr; ms_pinnedArray.Free(); ms_pinnedArray = new_pinnedArray; } } catch { yield break; } AKRESULT result = AkSoundEngine.LoadBank(ms_pInMemoryBankPtr, in_uInMemoryBankSize, out ms_bankID); if( result != AKRESULT.AK_Success ) { Debug.LogError ("AkMemBankLoader: bank loading failed with result " + result.ToString ()); } }
void Start() { var path = GetFilePath(imagePath); int width, height; if (!get_image_size(path, out width, out height)) { Debug.LogFormat("{0} was not found", path); return; } texture_ = new Texture2D(width, height, TextureFormat.RGB24, false); texture_.filterMode = FilterMode.Point; pixels_ = texture_.GetPixels32(); pixels_handle_ = GCHandle.Alloc(pixels_, GCHandleType.Pinned); pixels_ptr_ = pixels_handle_.AddrOfPinnedObject(); GetComponent<Renderer>().material.mainTexture = texture_; read_image(path, pixels_ptr_); texture_.SetPixels32(pixels_); texture_.Apply(); pixels_handle_.Free(); }
void Start() { // テクスチャを生成 texture_ = new Texture2D(10, 10, TextureFormat.RGB24, false); // テクスチャの拡大方法をニアレストネイバーに変更 texture_.filterMode = FilterMode.Point; // Color32 型の配列としてテクスチャの参照をもらう pixels_ = texture_.GetPixels32(); // GC されないようにする pixels_handle_ = GCHandle.Alloc(pixels_, GCHandleType.Pinned); // そのテクスチャのアドレスをもらう pixels_ptr_ = pixels_handle_.AddrOfPinnedObject(); // スクリプトがアタッチされたオブジェクトのテクスチャをコレにする GetComponent<Renderer>().material.mainTexture = texture_; // ネイティブ側でテクスチャを生成 create_check_texture(pixels_ptr_, texture_.width, texture_.height, 4); // セットして反映させる texture_.SetPixels32(pixels_); texture_.Apply(false, true); // GC 対象にする pixels_handle_.Free(); }
public void InitAwesomium(int width, int height) { Debug.Log("init awsommium"); this.width = width; this.height = height; m_texture = new Texture2D(width, height, TextureFormat.ARGB32, true); //Get Color[] (pixels) from texture m_pixels = m_texture.GetPixels(0); // Create window handle id - future usage m_TextureID = m_texture.GetInstanceID(); Debug.Log("textID : " + m_TextureID); // assign m_texture to this GUITexture texture gameObject.renderer.material.mainTexture = m_texture; // Create GCHandle - Allocation of m_pixels in memory. m_pixelsHandler = GCHandle.Alloc(m_pixels, GCHandleType.Pinned); AwesomiumWrapper.Init(); AwesomiumWrapper.CreateAwesomiumWebView(m_TextureID, m_pixelsHandler.AddrOfPinnedObject(), width, height, this.SetPixels, this.ApplyTexture); isAwesomiumInit = true; GetComponent<BrowserGUIEvents>().interactive = true; Debug.Log("done init awsommium"); }
public bool LoadMovieFromResource(bool autoPlay, string path) { bool result = false; UnloadMovie(); _textAsset = Resources.Load(path, typeof(TextAsset)) as TextAsset; if (_textAsset != null) { if (_textAsset.bytes != null && _textAsset.bytes.Length > 0) { _bytesHandle = GCHandle.Alloc(_textAsset.bytes, GCHandleType.Pinned); result = LoadMovieFromMemory(autoPlay, path, _bytesHandle.AddrOfPinnedObject(), (uint)_textAsset.bytes.Length, FilterMode.Bilinear, TextureWrapMode.Clamp); } } if (!result) { Debug.LogError("[AVProWindowsMedia] Unable to load resource " + path); } return result; }
// Update is called once per frame void Update() { //blocksの中身入れる //IR背景差分 //↓ //テンプレートマッチング // KinectからIR画像を取得 tex = infraredSourceManagerScript.GetInfraredTexture(); // Convert texture to ptr texturePixels_ = tex.GetPixels32(); texturePixelsHandle_ = GCHandle.Alloc(texturePixels_, GCHandleType.Pinned); texturePixelsPtr_ = texturePixelsHandle_.AddrOfPinnedObject(); // get new depth data from DepthSourceManager. ushort[] rawdata = multiSourceManagerScript.GetDepthData(); int depthWidth = multiSourceManagerScript.GetdepthWidth(); int depthHeight = multiSourceManagerScript.GetdepthHeight(); //背景差分 //backGroundDifference(rawdata, texturePixelsPtr_, depthWidth, depthHeight, noise, humanNoise, texturePixelsPtr2_); //テンプレートマッチング //templateMatchingManager(texturePixelsPtr2_, rawdata, blocks, blocknum, desk_pos_y, desk_pos_z); templateMatchingManager(texturePixelsPtr_, rawdata, blocks, blocknum, desk_pos_y, desk_pos_z); }
private int PatchStatics(TileMatrix matrix, string dataPath, string indexPath, string lookupPath) { using ( FileStream fsData = new FileStream(dataPath, FileMode.Open, FileAccess.Read, FileShare.Read), fsIndex = new FileStream(indexPath, FileMode.Open, FileAccess.Read, FileShare.Read), fsLookup = new FileStream(lookupPath, FileMode.Open, FileAccess.Read, FileShare.Read)) { using (BinaryReader indexReader = new BinaryReader(fsIndex), lookupReader = new BinaryReader(fsLookup)) { int count = Math.Min((int)(indexReader.BaseStream.Length / 4), (int)(lookupReader.BaseStream.Length / 12)); var lists = new HuedTileList[8][]; for (int x = 0; x < 8; ++x) { lists[x] = new HuedTileList[8]; for (int y = 0; y < 8; ++y) { lists[x][y] = new HuedTileList(); } } for (int i = 0; i < count; ++i) { int blockID = indexReader.ReadInt32(); int blockX = blockID / matrix.BlockHeight; int blockY = blockID % matrix.BlockHeight; int offset = lookupReader.ReadInt32(); int length = lookupReader.ReadInt32(); lookupReader.ReadInt32(); // Extra if (offset < 0 || length <= 0) { if (StaticBlocks[blockX] == null) { StaticBlocks[blockX] = new HuedTile[matrix.BlockHeight][][][]; } StaticBlocks[blockX][blockY] = TileMatrix.EmptyStaticBlock; continue; } fsData.Seek(offset, SeekOrigin.Begin); int tileCount = length / 7; if (m_TileBuffer.Length < tileCount) { m_TileBuffer = new StaticTile[tileCount]; } StaticTile[] staTiles = m_TileBuffer; GCHandle gc = GCHandle.Alloc(staTiles, GCHandleType.Pinned); try { if (m_Buffer == null || m_Buffer.Length < length) { m_Buffer = new byte[length]; } fsData.Read(m_Buffer, 0, length); Marshal.Copy(m_Buffer, 0, gc.AddrOfPinnedObject(), length); for (int j = 0; j < tileCount; ++j) { StaticTile cur = staTiles[j]; lists[cur.m_X & 0x7][cur.m_Y & 0x7].Add(_Art.GetLegalItemID(cur.m_ID), cur.m_Hue, cur.m_Z); } var tiles = new HuedTile[8][][]; for (int x = 0; x < 8; ++x) { tiles[x] = new HuedTile[8][]; for (int y = 0; y < 8; ++y) { tiles[x][y] = lists[x][y].ToArray(); } } if (StaticBlocks[blockX] == null) { StaticBlocks[blockX] = new HuedTile[matrix.BlockHeight][][][]; } StaticBlocks[blockX][blockY] = tiles; } finally { gc.Free(); } } return(count); } } }
void Start() { // Initialize Berkelium UnityBerkelium.init(); // Create the texture that will represent the website (with optional transparency and without mipmaps) TextureFormat texFormat = transparency ? TextureFormat.ARGB32 : TextureFormat.RGB24; m_Texture = new Texture2D (width, height, texFormat, false); // Create the pixel array for the plugin to write into at startup m_Pixels = m_Texture.GetPixels (0); // "pin" the array in memory, so we can pass direct pointer to it's data to the plugin, // without costly marshaling of array of structures. m_PixelsHandle = GCHandle.Alloc(m_Pixels, GCHandleType.Pinned); // Save the texture ID m_TextureID = m_Texture.GetInstanceID(); // Improve rendering at shallow angles m_Texture.filterMode = FilterMode.Trilinear; m_Texture.anisoLevel = 2; // Assign texture to the renderer if (renderer) { renderer.material.mainTexture = m_Texture; // Transparency? if(transparency) renderer.material.shader = Shader.Find("Transparent/Diffuse"); else renderer.material.shader = Shader.Find("Diffuse"); // The texture has to be flipped renderer.material.mainTextureScale = new Vector2(1,-1); } // or gui texture else if (GetComponent(typeof(GUITexture))) { GUITexture gui = GetComponent(typeof(GUITexture)) as GUITexture; gui.texture = m_Texture; } else { Debug.Log("Game object has no renderer or gui texture to assign the generated texture to!"); } // Create new web window UnityBerkelium.Window.create(m_TextureID, m_PixelsHandle.AddrOfPinnedObject(), transparency, width,height, url); print("Created new web window: " + m_TextureID); // Paint callbacks m_setPixelsFunc = new UnityBerkelium.SetPixelsFunc(this.SetPixels); m_applyTextureFunc = new UnityBerkelium.ApplyTextureFunc(this.ApplyTexture); UnityBerkelium.Window.setPaintFunctions(m_TextureID, m_setPixelsFunc, m_applyTextureFunc); // Set the external host callback (for calling Unity functions from javascript) m_externalHostFunc = new UnityBerkelium.ExternalHostFunc(this.onExternalHost); UnityBerkelium.Window.setExternalHostCallback(m_TextureID, m_externalHostFunc); }
public static byte[] CreateSelfSignCertificatePfx( string x500, DateTime startTime, DateTime endTime, SecureString password) { byte[] pfxData; if (x500 == null) { x500 = ""; } SystemTime startSystemTime = ToSystemTime(startTime); SystemTime endSystemTime = ToSystemTime(endTime); string containerName = Guid.NewGuid().ToString(); GCHandle dataHandle = new GCHandle(); IntPtr providerContext = IntPtr.Zero; IntPtr cryptKey = IntPtr.Zero; IntPtr certContext = IntPtr.Zero; IntPtr certStore = IntPtr.Zero; IntPtr storeCertContext = IntPtr.Zero; IntPtr passwordPtr = IntPtr.Zero; RuntimeHelpers.PrepareConstrainedRegions(); try { Check(NativeMethods.CryptAcquireContextW( out providerContext, containerName, null, 1, // PROV_RSA_FULL 8)); // CRYPT_NEWKEYSET Check(NativeMethods.CryptGenKey( providerContext, 1, // AT_KEYEXCHANGE 1, // CRYPT_EXPORTABLE out cryptKey)); IntPtr errorStringPtr; int nameDataLength = 0; byte[] nameData; // errorStringPtr gets a pointer into the middle of the x500 string, // so x500 needs to be pinned until after we've copied the value // of errorStringPtr. dataHandle = GCHandle.Alloc(x500, GCHandleType.Pinned); if (!NativeMethods.CertStrToNameW( 0x00010001, // X509_ASN_ENCODING | PKCS_7_ASN_ENCODING dataHandle.AddrOfPinnedObject(), 3, // CERT_X500_NAME_STR = 3 IntPtr.Zero, null, ref nameDataLength, out errorStringPtr)) { string error = Marshal.PtrToStringUni(errorStringPtr); throw new ArgumentException(error); } nameData = new byte[nameDataLength]; if (!NativeMethods.CertStrToNameW( 0x00010001, // X509_ASN_ENCODING | PKCS_7_ASN_ENCODING dataHandle.AddrOfPinnedObject(), 3, // CERT_X500_NAME_STR = 3 IntPtr.Zero, nameData, ref nameDataLength, out errorStringPtr)) { string error = Marshal.PtrToStringUni(errorStringPtr); throw new ArgumentException(error); } dataHandle.Free(); dataHandle = GCHandle.Alloc(nameData, GCHandleType.Pinned); CryptoApiBlob nameBlob = new CryptoApiBlob( nameData.Length, dataHandle.AddrOfPinnedObject()); CryptKeyProviderInformation kpi = new CryptKeyProviderInformation(); kpi.ContainerName = containerName; kpi.ProviderType = 1; // PROV_RSA_FULL kpi.KeySpec = 1; // AT_KEYEXCHANGE certContext = NativeMethods.CertCreateSelfSignCertificate( providerContext, ref nameBlob, 0, ref kpi, IntPtr.Zero, // default = SHA1RSA ref startSystemTime, ref endSystemTime, IntPtr.Zero); Check(certContext != IntPtr.Zero); dataHandle.Free(); certStore = NativeMethods.CertOpenStore( "Memory", // sz_CERT_STORE_PROV_MEMORY 0, IntPtr.Zero, 0x2000, // CERT_STORE_CREATE_NEW_FLAG IntPtr.Zero); Check(certStore != IntPtr.Zero); Check(NativeMethods.CertAddCertificateContextToStore( certStore, certContext, 1, // CERT_STORE_ADD_NEW out storeCertContext)); NativeMethods.CertSetCertificateContextProperty( storeCertContext, 2, // CERT_KEY_PROV_INFO_PROP_ID 0, ref kpi); if (password != null) { passwordPtr = Marshal.SecureStringToCoTaskMemUnicode(password); } CryptoApiBlob pfxBlob = new CryptoApiBlob(); Check(NativeMethods.PFXExportCertStoreEx( certStore, ref pfxBlob, passwordPtr, IntPtr.Zero, 7)); // EXPORT_PRIVATE_KEYS | REPORT_NO_PRIVATE_KEY | REPORT_NOT_ABLE_TO_EXPORT_PRIVATE_KEY pfxData = new byte[pfxBlob.DataLength]; dataHandle = GCHandle.Alloc(pfxData, GCHandleType.Pinned); pfxBlob.Data = dataHandle.AddrOfPinnedObject(); Check(NativeMethods.PFXExportCertStoreEx( certStore, ref pfxBlob, passwordPtr, IntPtr.Zero, 7)); // EXPORT_PRIVATE_KEYS | REPORT_NO_PRIVATE_KEY | REPORT_NOT_ABLE_TO_EXPORT_PRIVATE_KEY dataHandle.Free(); } finally { if (passwordPtr != IntPtr.Zero) { Marshal.ZeroFreeCoTaskMemUnicode(passwordPtr); } if (dataHandle.IsAllocated) { dataHandle.Free(); } if (certContext != IntPtr.Zero) { NativeMethods.CertFreeCertificateContext(certContext); } if (storeCertContext != IntPtr.Zero) { NativeMethods.CertFreeCertificateContext(storeCertContext); } if (certStore != IntPtr.Zero) { NativeMethods.CertCloseStore(certStore, 0); } if (cryptKey != IntPtr.Zero) { NativeMethods.CryptDestroyKey(cryptKey); } if (providerContext != IntPtr.Zero) { NativeMethods.CryptReleaseContext(providerContext, 0); NativeMethods.CryptAcquireContextW( out providerContext, containerName, null, 1, // PROV_RSA_FULL 0x10); // CRYPT_DELETEKEYSET } } return pfxData; }
public IRasterDataProvider ConvertDataType <TSrc, TDst>(IRasterDataProvider srcDataProvider, enumDataType dstDataType, string dstFileName, Func <TSrc, TDst> converter) { if (srcDataProvider == null || string.IsNullOrEmpty(dstFileName)) { return(null); } IRasterDataProvider dstDataProvider = null; try { dstDataProvider = CreateDstDataProvider(dstDataType, srcDataProvider, dstFileName); if (dstDataProvider == null) { new Exception("创建目标数据提供者时发生未知错误!"); } } catch (Exception ex) { throw new Exception("创建目标数据提供者时发生错误!", ex); } // int blockRows = 100; int blockCount = (int)Math.Ceiling((float)srcDataProvider.Height / blockRows); //总块数 int bRow = 0, eRow = 0; int height = srcDataProvider.Height; int width = srcDataProvider.Width; int bufferRowCount = 0; TSrc[] srcBuffer = new TSrc[blockRows * srcDataProvider.Width]; TDst[] dstBuffer = new TDst[blockRows * srcDataProvider.Width]; GCHandle srcHandle = GCHandle.Alloc(srcBuffer, GCHandleType.Pinned); GCHandle dstHandle = GCHandle.Alloc(dstBuffer, GCHandleType.Pinned); try { for (int b = 1; b <= srcDataProvider.BandCount; b++) { IRasterBand srcBand = srcDataProvider.GetRasterBand(b); IRasterBand dstBand = dstDataProvider.GetRasterBand(b); bRow = 0; // for (int blocki = 0; blocki < blockCount; blocki++, bRow += blockRows) { eRow = Math.Min(height, bRow + blockRows); bufferRowCount = eRow - bRow; srcBand.Read(0, bRow, width, bufferRowCount, srcHandle.AddrOfPinnedObject(), srcDataProvider.DataType, width, bufferRowCount); // int count = bufferRowCount * width; for (int i = 0; i < count; i++) { dstBuffer[i] = converter(srcBuffer[i]); } // dstBand.Write(0, bRow, width, bufferRowCount, dstHandle.AddrOfPinnedObject(), dstDataType, width, bufferRowCount); } } } finally { srcHandle.Free(); dstHandle.Free(); } return(dstDataProvider); }
/// <summary> /// Creates a new string from the contents of a given byte buffer. /// </summary> public static unsafe string StringFromBytes(byte[] buffer, int charLength, bool needs16BitSupport) { int byteCount = needs16BitSupport ? charLength * 2 : charLength; if (buffer.Length < byteCount) { throw new ArgumentException("Buffer is not large enough to contain the given string; a size of at least " + byteCount + " is required."); } GCHandle toHandle = default(GCHandle); string result = new string(default(char), charLength); // Creaty empty string of required length try { toHandle = GCHandle.Alloc(buffer, GCHandleType.Pinned); if (needs16BitSupport) { if (BitConverter.IsLittleEndian) { fixed(char *charPtr1 = result) { ushort *fromPtr1 = (ushort *)toHandle.AddrOfPinnedObject().ToPointer(); ushort *toPtr1 = (ushort *)charPtr1; for (int i = 0; i < byteCount; i += sizeof(ushort)) { *toPtr1++ = *fromPtr1++; } } } else { fixed(char *charPtr2 = result) { byte *fromPtr2 = (byte *)toHandle.AddrOfPinnedObject().ToPointer(); byte *toPtr2 = (byte *)charPtr2; for (int i = 0; i < byteCount; i += sizeof(ushort)) { *toPtr2 = *(fromPtr2 + 1); *(toPtr2 + 1) = *fromPtr2; fromPtr2 += 2; toPtr2 += 2; } } } } else { if (BitConverter.IsLittleEndian) { fixed(char *charPtr3 = result) { byte *fromPtr3 = (byte *)toHandle.AddrOfPinnedObject().ToPointer(); byte *toPtr3 = (byte *)charPtr3; for (int i = 0; i < byteCount; i += sizeof(byte)) { *toPtr3++ = *fromPtr3++; toPtr3++; // Skip every other string byte } } } else { fixed(char *charPtr4 = result) { byte *fromPtr4 = (byte *)toHandle.AddrOfPinnedObject().ToPointer(); byte *toPtr4 = (byte *)charPtr4; for (int i = 0; i < byteCount; i += sizeof(byte)) { toPtr4++; // Skip every other string byte *toPtr4++ = *fromPtr4++; } } } } } finally { if (toHandle.IsAllocated) { toHandle.Free(); } } // Retrieve proper string reference from the intern pool. // This code removed for now, as the slight decrease in memory use is not considered worth the performance cost of the intern lookup and the potential extra garbage to be collected. // Might eventually become a global config option, if this is considered necessary. //result = string.Intern(result); return(result); }
/// <summary> /// Eliminate the matched features whose scale and rotation do not aggree with the majority's scale and rotation. /// </summary> /// <param name="rotationBins">The numbers of bins for rotation, a good value might be 20 (which means each bin covers 18 degree)</param> /// <param name="scaleIncrement">This determins the different in scale for neighbour hood bins, a good value might be 1.5 (which means matched features in bin i+1 is scaled 1.5 times larger than matched features in bin i</param> /// <param name="matchedFeatures">The matched feature that will be participated in the voting. For each matchedFeatures, only the zero indexed ModelFeature will be considered.</param> public static MatchedSURFFeature[] VoteForSizeAndOrientation(MatchedSURFFeature[] matchedFeatures, double scaleIncrement, int rotationBins) { int elementsCount = matchedFeatures.Length; float[] scales = new float[elementsCount]; float[] rotations = new float[elementsCount]; float[] flags = new float[elementsCount]; float minScale = float.MaxValue; float maxScale = float.MinValue; for (int i = 0; i < matchedFeatures.Length; i++) { float scale = (float)matchedFeatures[i].ObservedFeature.Point.size / (float)matchedFeatures[i].SimilarFeatures[0].Feature.Point.size; scale = (float)Math.Log10(scale); scales[i] = scale; if (scale < minScale) { minScale = scale; } if (scale > maxScale) { maxScale = scale; } float rotation = matchedFeatures[i].ObservedFeature.Point.dir - matchedFeatures[i].SimilarFeatures[0].Feature.Point.dir; rotations[i] = rotation < 0.0 ? rotation + 360 : rotation; } int scaleBinSize = (int)Math.Max(((maxScale - minScale) / Math.Log10(scaleIncrement)), 1); int count; using (DenseHistogram h = new DenseHistogram(new int[] { scaleBinSize, rotationBins }, new RangeF[] { new RangeF(minScale, maxScale), new RangeF(0, 360) })) { GCHandle scaleHandle = GCHandle.Alloc(scales, GCHandleType.Pinned); GCHandle rotationHandle = GCHandle.Alloc(rotations, GCHandleType.Pinned); GCHandle flagsHandle = GCHandle.Alloc(flags, GCHandleType.Pinned); using (Matrix <float> flagsMat = new Matrix <float>(1, elementsCount, flagsHandle.AddrOfPinnedObject())) using (Matrix <float> scalesMat = new Matrix <float>(1, elementsCount, scaleHandle.AddrOfPinnedObject())) using (Matrix <float> rotationsMat = new Matrix <float>(1, elementsCount, rotationHandle.AddrOfPinnedObject())) { h.Calculate(new Matrix <float>[] { scalesMat, rotationsMat }, true, null); float minVal, maxVal; int[] minLoc, maxLoc; h.MinMax(out minVal, out maxVal, out minLoc, out maxLoc); h.Threshold(maxVal * 0.5); CvInvoke.cvCalcBackProject(new IntPtr[] { scalesMat.Ptr, rotationsMat.Ptr }, flagsMat.Ptr, h.Ptr); count = CvInvoke.cvCountNonZero(flagsMat); } scaleHandle.Free(); rotationHandle.Free(); flagsHandle.Free(); MatchedSURFFeature[] matchedGoodFeatures = new MatchedSURFFeature[count]; int index = 0; for (int i = 0; i < matchedFeatures.Length; i++) { if (flags[i] != 0) { matchedGoodFeatures[index++] = matchedFeatures[i]; } } return(matchedGoodFeatures); } }
// Use this for initialization void Start() { go_cameraLeft = transform.FindChild("DeviceCameraLeft").camera; go_cameraPlaneLeft = transform.FindChild("DeviceCameraLeft").FindChild("CameraPlane").gameObject; go_cameraPlaneLeft.transform.localPosition = new Vector3(1.25f, 0.0f, 1.0f); //Default go_cameraPlaneLeft.transform.localScale = new Vector3(-1.3333333f, 1.0f, 1.0f); // initialize camera plane object(Right) go_cameraRight = transform.FindChild("DeviceCameraRight").camera; go_cameraPlaneRight = transform.FindChild("DeviceCameraRight").FindChild("CameraPlane").gameObject; go_cameraPlaneRight.transform.localPosition = new Vector3(-1.25f, 0.0f, 1.0f); go_cameraPlaneRight.transform.localScale = new Vector3(-1.3333333f, 1.0f, 1.0f); //Setting cameras go_cameraLeft.transform.position = Vector3.zero; go_cameraLeft.transform.rotation = Quaternion.identity; go_cameraLeft.orthographicSize = (5.0f / 0.6f); go_cameraRight.transform.position = Vector3.zero; go_cameraRight.transform.rotation = Quaternion.identity; go_cameraRight.orthographicSize = (5.0f / 0.6f); //Create cam texture go_CamTexLeft = new Texture2D(ovGetImageWidth(), ovGetImageHeight(), TextureFormat.RGB24, false); go_CamTexRight = new Texture2D(ovGetImageWidth(), ovGetImageHeight(), TextureFormat.RGB24, false); //Cam setting go_CamTexLeft.wrapMode = TextureWrapMode.Clamp; go_CamTexRight.wrapMode = TextureWrapMode.Clamp; if (camViewShader == 0) { //Normal shader go_cameraPlaneLeft.renderer.material.shader = Shader.Find("Ovrvision/ovTexture"); go_cameraPlaneRight.renderer.material.shader = Shader.Find("Ovrvision/ovTexture"); } else if (camViewShader == 1) { //Chroma-key shader go_cameraPlaneLeft.renderer.material.shader = Shader.Find("Ovrvision/ovChromaticMask"); go_cameraPlaneRight.renderer.material.shader = Shader.Find("Ovrvision/ovChromaticMask"); go_cameraPlaneLeft.renderer.material.SetFloat("_Color_maxh", chroma_hue.x); go_cameraPlaneLeft.renderer.material.SetFloat("_Color_minh", chroma_hue.y); go_cameraPlaneLeft.renderer.material.SetFloat("_Color_maxs", chroma_saturation.x); go_cameraPlaneLeft.renderer.material.SetFloat("_Color_mins", chroma_saturation.y); go_cameraPlaneLeft.renderer.material.SetFloat("_Color_maxv", chroma_brightness.x); go_cameraPlaneLeft.renderer.material.SetFloat("_Color_minv", chroma_brightness.y); go_cameraPlaneRight.renderer.material.SetFloat("_Color_maxh", chroma_hue.x); go_cameraPlaneRight.renderer.material.SetFloat("_Color_minh", chroma_hue.y); go_cameraPlaneRight.renderer.material.SetFloat("_Color_maxs", chroma_saturation.x); go_cameraPlaneRight.renderer.material.SetFloat("_Color_mins", chroma_saturation.y); go_cameraPlaneRight.renderer.material.SetFloat("_Color_maxv", chroma_brightness.x); go_cameraPlaneRight.renderer.material.SetFloat("_Color_minv", chroma_brightness.y); } if (!camStatus) return; //Camera open only //Get texture pointer go_pixelsColorLeft = go_CamTexLeft.GetPixels32(); go_pixelsColorRight = go_CamTexRight.GetPixels32(); go_pixelsHandleLeft = GCHandle.Alloc(go_pixelsColorLeft, GCHandleType.Pinned); go_pixelsHandleRight = GCHandle.Alloc(go_pixelsColorRight, GCHandleType.Pinned); go_pixelsPointerLeft = go_pixelsHandleLeft.AddrOfPinnedObject(); go_pixelsPointerRight = go_pixelsHandleRight.AddrOfPinnedObject(); go_cameraPlaneLeft.renderer.material.mainTexture = go_CamTexLeft; go_cameraPlaneRight.renderer.material.mainTexture = go_CamTexRight; Test_SetOculusIPD(); //v0.8 test // Create thread ovrvisionTextureThreadMutex = new Mutex(true); ovrvisionTextureThread = new Thread(GetImageThreadFunc); ovrvisionTextureThread.Start(); }
/// <summary>Starts the process using the supplied start info.</summary> /// <param name="startInfo">The start info with which to start the process.</param> private bool StartWithCreateProcess(ProcessStartInfo startInfo) { // See knowledge base article Q190351 for an explanation of the following code. Noteworthy tricky points: // * The handles are duplicated as non-inheritable before they are passed to CreateProcess so // that the child process can not close them // * CreateProcess allows you to redirect all or none of the standard IO handles, so we use // GetStdHandle for the handles that are not being redirected StringBuilder commandLine = BuildCommandLine(startInfo.FileName, startInfo.Arguments); Interop.Kernel32.STARTUPINFO startupInfo = new Interop.Kernel32.STARTUPINFO(); Interop.Kernel32.PROCESS_INFORMATION processInfo = new Interop.Kernel32.PROCESS_INFORMATION(); Interop.Kernel32.SECURITY_ATTRIBUTES unused_SecAttrs = new Interop.Kernel32.SECURITY_ATTRIBUTES(); SafeProcessHandle procSH = new SafeProcessHandle(); SafeThreadHandle threadSH = new SafeThreadHandle(); bool retVal; int errorCode = 0; // handles used in parent process SafeFileHandle standardInputWritePipeHandle = null; SafeFileHandle standardOutputReadPipeHandle = null; SafeFileHandle standardErrorReadPipeHandle = null; GCHandle environmentHandle = new GCHandle(); lock (s_createProcessLock) { try { // set up the streams if (startInfo.RedirectStandardInput || startInfo.RedirectStandardOutput || startInfo.RedirectStandardError) { if (startInfo.RedirectStandardInput) { CreatePipe(out standardInputWritePipeHandle, out startupInfo.hStdInput, true); } else { startupInfo.hStdInput = new SafeFileHandle(Interop.Kernel32.GetStdHandle(Interop.Kernel32.HandleTypes.STD_INPUT_HANDLE), false); } if (startInfo.RedirectStandardOutput) { CreatePipe(out standardOutputReadPipeHandle, out startupInfo.hStdOutput, false); } else { startupInfo.hStdOutput = new SafeFileHandle(Interop.Kernel32.GetStdHandle(Interop.Kernel32.HandleTypes.STD_OUTPUT_HANDLE), false); } if (startInfo.RedirectStandardError) { CreatePipe(out standardErrorReadPipeHandle, out startupInfo.hStdError, false); } else { startupInfo.hStdError = new SafeFileHandle(Interop.Kernel32.GetStdHandle(Interop.Kernel32.HandleTypes.STD_ERROR_HANDLE), false); } startupInfo.dwFlags = Interop.Advapi32.StartupInfoOptions.STARTF_USESTDHANDLES; } // set up the creation flags parameter int creationFlags = 0; if (startInfo.CreateNoWindow) { creationFlags |= Interop.Advapi32.StartupInfoOptions.CREATE_NO_WINDOW; } // set up the environment block parameter IntPtr environmentPtr = (IntPtr)0; if (startInfo._environmentVariables != null) { creationFlags |= Interop.Advapi32.StartupInfoOptions.CREATE_UNICODE_ENVIRONMENT; byte[] environmentBytes = EnvironmentVariablesToByteArray(startInfo._environmentVariables); environmentHandle = GCHandle.Alloc(environmentBytes, GCHandleType.Pinned); environmentPtr = environmentHandle.AddrOfPinnedObject(); } string workingDirectory = startInfo.WorkingDirectory; if (workingDirectory == string.Empty) { workingDirectory = Directory.GetCurrentDirectory(); } if (startInfo.UserName.Length != 0) { if (startInfo.Password != null && startInfo.PasswordInClearText != null) { throw new ArgumentException(SR.CantSetDuplicatePassword); } Interop.Advapi32.LogonFlags logonFlags = (Interop.Advapi32.LogonFlags) 0; if (startInfo.LoadUserProfile) { logonFlags = Interop.Advapi32.LogonFlags.LOGON_WITH_PROFILE; } if (startInfo.Password != null) { IntPtr passwordPtr = Marshal.SecureStringToGlobalAllocUnicode(startInfo.Password); try { retVal = Interop.Advapi32.CreateProcessWithLogonW( startInfo.UserName, startInfo.Domain, passwordPtr, logonFlags, null, // we don't need this since all the info is in commandLine commandLine, creationFlags, environmentPtr, workingDirectory, startupInfo, // pointer to STARTUPINFO processInfo // pointer to PROCESS_INFORMATION ); if (!retVal) { errorCode = Marshal.GetLastWin32Error(); } } finally { Marshal.ZeroFreeGlobalAllocUnicode(passwordPtr); } } else { unsafe { fixed(char *passwordPtr = startInfo.PasswordInClearText ?? string.Empty) { retVal = Interop.Advapi32.CreateProcessWithLogonW( startInfo.UserName, startInfo.Domain, (IntPtr)passwordPtr, logonFlags, null, // we don't need this since all the info is in commandLine commandLine, creationFlags, environmentPtr, workingDirectory, startupInfo, // pointer to STARTUPINFO processInfo // pointer to PROCESS_INFORMATION ); } } if (!retVal) { errorCode = Marshal.GetLastWin32Error(); } } if (processInfo.hProcess != IntPtr.Zero && processInfo.hProcess != (IntPtr)INVALID_HANDLE_VALUE) { procSH.InitialSetHandle(processInfo.hProcess); } if (processInfo.hThread != IntPtr.Zero && processInfo.hThread != (IntPtr)INVALID_HANDLE_VALUE) { threadSH.InitialSetHandle(processInfo.hThread); } if (!retVal) { if (errorCode == Interop.Errors.ERROR_BAD_EXE_FORMAT || errorCode == Interop.Errors.ERROR_EXE_MACHINE_TYPE_MISMATCH) { throw new Win32Exception(errorCode, SR.InvalidApplication); } throw new Win32Exception(errorCode); } } else { retVal = Interop.Kernel32.CreateProcess( null, // we don't need this since all the info is in commandLine commandLine, // pointer to the command line string ref unused_SecAttrs, // address to process security attributes, we don't need to inherit the handle ref unused_SecAttrs, // address to thread security attributes. true, // handle inheritance flag creationFlags, // creation flags environmentPtr, // pointer to new environment block workingDirectory, // pointer to current directory name startupInfo, // pointer to STARTUPINFO processInfo // pointer to PROCESS_INFORMATION ); if (!retVal) { errorCode = Marshal.GetLastWin32Error(); } if (processInfo.hProcess != (IntPtr)0 && processInfo.hProcess != (IntPtr)INVALID_HANDLE_VALUE) { procSH.InitialSetHandle(processInfo.hProcess); } if (processInfo.hThread != (IntPtr)0 && processInfo.hThread != (IntPtr)INVALID_HANDLE_VALUE) { threadSH.InitialSetHandle(processInfo.hThread); } if (!retVal) { if (errorCode == Interop.Errors.ERROR_BAD_EXE_FORMAT || errorCode == Interop.Errors.ERROR_EXE_MACHINE_TYPE_MISMATCH) { throw new Win32Exception(errorCode, SR.InvalidApplication); } throw new Win32Exception(errorCode); } } } finally { // free environment block if (environmentHandle.IsAllocated) { environmentHandle.Free(); } startupInfo.Dispose(); } } if (startInfo.RedirectStandardInput) { Encoding enc = startInfo.StandardInputEncoding ?? GetEncoding((int)Interop.Kernel32.GetConsoleCP()); _standardInput = new StreamWriter(new FileStream(standardInputWritePipeHandle, FileAccess.Write, 4096, false), enc, 4096); _standardInput.AutoFlush = true; } if (startInfo.RedirectStandardOutput) { Encoding enc = startInfo.StandardOutputEncoding ?? GetEncoding((int)Interop.Kernel32.GetConsoleOutputCP()); _standardOutput = new StreamReader(new FileStream(standardOutputReadPipeHandle, FileAccess.Read, 4096, false), enc, true, 4096); } if (startInfo.RedirectStandardError) { Encoding enc = startInfo.StandardErrorEncoding ?? GetEncoding((int)Interop.Kernel32.GetConsoleOutputCP()); _standardError = new StreamReader(new FileStream(standardErrorReadPipeHandle, FileAccess.Read, 4096, false), enc, true, 4096); } bool ret = false; if (!procSH.IsInvalid) { SetProcessHandle(procSH); SetProcessId((int)processInfo.dwProcessId); threadSH.Dispose(); ret = true; } return(ret); }
private void SendMeshBuffersToPlugin() { var filter = GetComponent <MeshFilter> (); var mesh = filter.mesh; // The plugin will want to modify the vertex buffer -- on many platforms // for that to work we have to mark mesh as "dynamic" (which makes the buffers CPU writable -- // by default they are immutable and only GPU-readable). mesh.MarkDynamic(); // However, mesh being dynamic also means that the CPU on most platforms can not // read from the vertex buffer. Our plugin also wants original mesh data, // so let's pass it as pointers to regular C# arrays. // This bit shows how to pass array pointers to native plugins without doing an expensive // copy: you have to get a GCHandle, and get raw address of that. var vertices = mesh.vertices; var normals = mesh.normals; var uvs = mesh.uv; GCHandle gcVertices = GCHandle.Alloc(vertices, GCHandleType.Pinned); GCHandle gcNormals = GCHandle.Alloc(normals, GCHandleType.Pinned); GCHandle gcUV = GCHandle.Alloc(uvs, GCHandleType.Pinned); SetMeshBuffersFromUnity(mesh.GetNativeVertexBufferPtr(0), mesh.vertexCount, gcVertices.AddrOfPinnedObject(), gcNormals.AddrOfPinnedObject(), gcUV.AddrOfPinnedObject()); gcVertices.Free(); gcNormals.Free(); gcUV.Free(); }
/// <summary> /// Writes the specified value. /// </summary> /// <typeparam name="T">The type.</typeparam> /// <param name="value">The value.</param> public void Write <T>(T value) { if (typeof(T) == typeof(string)) { binaryWriter.Write(encoding.GetBytes((string)((object)value))); } else if (typeof(T) == typeof(BString)) { binaryWriter.Write((string)((BString)((object)value))); } else if (typeof(T) == typeof(BaseString)) { binaryWriter.Write(((BaseString)((object)value)).ToCharArray()); } else if (typeof(T) == typeof(byte)) { binaryWriter.Write((byte)((object)value)); } else if (typeof(T) == typeof(byte[])) { binaryWriter.Write((byte[])((object)value)); } else if (typeof(T) == typeof(sbyte)) { binaryWriter.Write((sbyte)((object)value)); } else if (typeof(T) == typeof(char)) { binaryWriter.Write((char)((object)value)); } else if (typeof(T) == typeof(short)) { binaryWriter.Write((short)((object)value)); } else if (typeof(T) == typeof(ushort)) { binaryWriter.Write((ushort)((object)value)); } else if (typeof(T) == typeof(int)) { binaryWriter.Write((int)((object)value)); } else if (typeof(T) == typeof(uint)) { binaryWriter.Write((uint)((object)value)); } else if (typeof(T) == typeof(long)) { binaryWriter.Write((long)((object)value)); } else if (typeof(T) == typeof(ulong)) { binaryWriter.Write((ulong)((object)value)); } else if (typeof(T) == typeof(float)) { binaryWriter.Write((float)((object)value)); } else if (typeof(T) == typeof(double)) { binaryWriter.Write((double)((object)value)); } else if (typeof(T) == typeof(decimal)) { binaryWriter.Write((decimal)((object)value)); } else if (typeof(T) == typeof(Vector2)) { binaryWriter.Write(((Vector2)((object)value)).x); binaryWriter.Write(((Vector2)((object)value)).y); } else if (typeof(T) == typeof(Vector3)) { binaryWriter.Write(((Vector3)((object)value)).x); binaryWriter.Write(((Vector3)((object)value)).y); binaryWriter.Write(((Vector3)((object)value)).z); } else if (typeof(T) == typeof(Quaternion)) { binaryWriter.Write(((Quaternion)((object)value)).x); binaryWriter.Write(((Quaternion)((object)value)).y); binaryWriter.Write(((Quaternion)((object)value)).z); binaryWriter.Write(((Quaternion)((object)value)).w); } else { int unmanagedSize = Marshal.SizeOf(typeof(T)); byte[] buffer = new byte[unmanagedSize]; GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); try { Marshal.StructureToPtr(value, handle.AddrOfPinnedObject(), false); Marshal.Copy(handle.AddrOfPinnedObject(), buffer, 0, unmanagedSize); } finally { handle.Free(); } binaryWriter.Write(buffer); } }
/// <summary> /// Reads a value. /// </summary> /// <typeparam name="T">The type.</typeparam> /// <returns>The value.</returns> public T Read <T>() { if (typeof(T) == typeof(byte)) { return((T)((object)binaryReader.ReadByte())); } if (typeof(T) == typeof(sbyte)) { return((T)((object)binaryReader.ReadSByte())); } if (typeof(T) == typeof(char)) { return((T)((object)binaryReader.ReadChar())); } if (typeof(T) == typeof(short)) { return((T)((object)binaryReader.ReadInt16())); } if (typeof(T) == typeof(ushort)) { return((T)((object)binaryReader.ReadUInt16())); } if (typeof(T) == typeof(int)) { return((T)((object)binaryReader.ReadInt32())); } if (typeof(T) == typeof(uint)) { return((T)((object)binaryReader.ReadUInt32())); } if (typeof(T) == typeof(long)) { return((T)((object)binaryReader.ReadInt64())); } if (typeof(T) == typeof(ulong)) { return((T)((object)binaryReader.ReadUInt64())); } if (typeof(T) == typeof(float)) { return((T)((object)binaryReader.ReadSingle())); } if (typeof(T) == typeof(double)) { return((T)((object)binaryReader.ReadDouble())); } if (typeof(T) == typeof(decimal)) { return((T)((object)binaryReader.ReadDecimal())); } if (typeof(T) == typeof(Vector3)) { return((T)((object)new Vector3() { x = binaryReader.ReadSingle(), y = binaryReader.ReadSingle(), z = binaryReader.ReadSingle() })); } if (typeof(T) == typeof(Quaternion)) { return((T)((object)new Quaternion() { x = binaryReader.ReadSingle(), y = binaryReader.ReadSingle(), z = binaryReader.ReadSingle(), w = binaryReader.ReadSingle() })); } if (typeof(T) == typeof(BString)) { return((T)((object)((BString)binaryReader.ReadString()))); } if (typeof(T) == typeof(ZString)) { ZString returnString = string.Empty; while (true) { char character = binaryReader.ReadChar(); if (character == 0) { break; } returnString += character; } return((T)((object)(returnString))); } GCHandle handle = GCHandle.Alloc(binaryReader.ReadBytes(Marshal.SizeOf(typeof(T))), GCHandleType.Pinned); T value; try { value = (T)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(T)); } finally { handle.Free(); } return(value); }
private static void EncodeImage(ImagingFactory imagingFactory, Image image, WicFlags flags, Guid containerFormat, BitmapFrameEncode frame) { Guid pfGuid = ToWic(image.Format, false); frame.Initialize(); frame.SetSize(image.Width, image.Height); frame.SetResolution(72, 72); Guid targetGuid = pfGuid; frame.SetPixelFormat(ref targetGuid); EncodeMetadata(frame, containerFormat, image.Format); if (targetGuid != pfGuid) { // Conversion required to write. GCHandle handle = GCHandle.Alloc(image.Data, GCHandleType.Pinned); using (var source = new Bitmap(imagingFactory, image.Width, image.Height, pfGuid, new DataRectangle(handle.AddrOfPinnedObject(), image.RowPitch), image.Data.Length)) { using (var converter = new FormatConverter(imagingFactory)) { if (!converter.CanConvert(pfGuid, targetGuid)) throw new NotSupportedException("Format conversion is not supported."); converter.Initialize(source, targetGuid, GetWicDither(flags), null, 0, BitmapPaletteType.Custom); frame.WriteSource(converter, new Rectangle(0, 0, image.Width, image.Height)); } } handle.Free(); } else { // No conversion required. frame.WritePixels(image.Height, image.RowPitch, image.Data); } frame.Commit(); }
public void *GetBuffer() { return((void *)_handle?.AddrOfPinnedObject()); }
/// <summary> /// Blindly mem-copies a given number of bytes from the memory location of one object to another. WARNING: This method is ridiculously dangerous. Only use if you know what you're doing. /// </summary> public static unsafe void MemoryCopy(object from, object to, int byteCount, int fromByteOffset, int toByteOffset) { GCHandle fromHandle = default(GCHandle); GCHandle toHandle = default(GCHandle); if (fromByteOffset % sizeof(ulong) != 0 || toByteOffset % sizeof(ulong) != 0) { throw new ArgumentException("Byte offset must be divisible by " + sizeof(ulong) + " (IE, sizeof(ulong))"); } try { int restBytes = byteCount % sizeof(ulong); int ulongCount = (byteCount - restBytes) / sizeof(ulong); int fromOffsetCount = fromByteOffset / sizeof(ulong); int toOffsetCount = toByteOffset / sizeof(ulong); fromHandle = GCHandle.Alloc(from, GCHandleType.Pinned); toHandle = GCHandle.Alloc(to, GCHandleType.Pinned); ulong *fromUlongPtr = (ulong *)fromHandle.AddrOfPinnedObject().ToPointer(); ulong *toUlongPtr = (ulong *)toHandle.AddrOfPinnedObject().ToPointer(); if (fromOffsetCount > 0) { fromUlongPtr += fromOffsetCount; } if (toOffsetCount > 0) { toUlongPtr += toOffsetCount; } for (int i = 0; i < ulongCount; i++) { *toUlongPtr++ = *fromUlongPtr++; } if (restBytes > 0) { byte *fromBytePtr = (byte *)fromUlongPtr; byte *toBytePtr = (byte *)toUlongPtr; for (int i = 0; i < restBytes; i++) { *toBytePtr++ = *fromBytePtr++; } } } finally { if (fromHandle.IsAllocated) { fromHandle.Free(); } if (toHandle.IsAllocated) { toHandle.Free(); } } }
IntPtr Malloc(IntPtr size) { bufhandle = GCHandle.Alloc(bufinstance, GCHandleType.Pinned); return(bufhandle.AddrOfPinnedObject()); }
/// <summary> /// Writes the contents of a string into a given byte buffer. /// </summary> public static unsafe int StringToBytes(byte[] buffer, string value, bool needs16BitSupport) { int byteCount = needs16BitSupport ? value.Length * 2 : value.Length; if (buffer.Length < byteCount) { throw new ArgumentException("Buffer is not large enough to contain the given string; a size of at least " + byteCount + " is required."); } GCHandle toHandle = default(GCHandle); try { toHandle = GCHandle.Alloc(buffer, GCHandleType.Pinned); if (needs16BitSupport) { if (BitConverter.IsLittleEndian) { fixed(char *charPtr1 = value) { ushort *fromPtr1 = (ushort *)charPtr1; ushort *toPtr1 = (ushort *)toHandle.AddrOfPinnedObject().ToPointer(); for (int i = 0; i < byteCount; i += sizeof(ushort)) { *toPtr1++ = *fromPtr1++; } } } else { fixed(char *charPtr2 = value) { byte *fromPtr2 = (byte *)charPtr2; byte *toPtr2 = (byte *)toHandle.AddrOfPinnedObject().ToPointer(); for (int i = 0; i < byteCount; i += sizeof(ushort)) { *toPtr2 = *(fromPtr2 + 1); *(toPtr2 + 1) = *fromPtr2; fromPtr2 += 2; toPtr2 += 2; } } } } else { if (BitConverter.IsLittleEndian) { fixed(char *charPtr3 = value) { byte *fromPtr3 = (byte *)charPtr3; byte *toPtr3 = (byte *)toHandle.AddrOfPinnedObject().ToPointer(); for (int i = 0; i < byteCount; i += sizeof(byte)) { fromPtr3++; // Skip every other string byte *toPtr3++ = *fromPtr3++; } } } else { fixed(char *charPtr4 = value) { byte *fromPtr4 = (byte *)charPtr4; byte *toPtr4 = (byte *)toHandle.AddrOfPinnedObject().ToPointer(); for (int i = 0; i < byteCount; i += sizeof(byte)) { *toPtr4++ = *fromPtr4++; fromPtr4++; // Skip every other string byte } } } } } finally { if (toHandle.IsAllocated) { toHandle.Free(); } } return(byteCount); }
private Tile[][][] ReadStaticBlock(int x, int y) { try { m_IndexReader.BaseStream.Seek(((x * m_BlockHeight) + y) * 12, SeekOrigin.Begin); int lookup = m_IndexReader.ReadInt32(); int length = m_IndexReader.ReadInt32(); if (lookup < 0 || length <= 0) { return(m_EmptyStaticBlock); } else { int count = length / 7; m_Statics.Seek(lookup, SeekOrigin.Begin); if (m_Buffer == null || length > m_Buffer.Length) { m_Buffer = new byte[length]; } GCHandle handle = GCHandle.Alloc(m_Buffer, GCHandleType.Pinned); try { m_Statics.Read(m_Buffer, 0, length); if (m_Lists == null) { m_Lists = new TileList[8][]; for (int i = 0; i < 8; ++i) { m_Lists[i] = new TileList[8]; for (int j = 0; j < 8; ++j) { m_Lists[i][j] = new TileList(); } } } TileList[][] lists = m_Lists; for (int i = 0; i < count; i++) { IntPtr ptr = new IntPtr((long)handle.AddrOfPinnedObject() + i * 7); StaticTile cur = (StaticTile)Marshal.PtrToStructure(ptr, typeof(StaticTile)); lists[cur.m_X & 0x7][cur.m_Y & 0x7].Add((short)((cur.m_ID & 0x3FFF) + 0x4000), cur.m_Z); } Tile[][][] tiles = new Tile[8][][]; for (int i = 0; i < 8; ++i) { tiles[i] = new Tile[8][]; for (int j = 0; j < 8; ++j) { tiles[i][j] = lists[i][j].ToArray(); } } return(tiles); } finally { handle.Free(); } } } catch (EndOfStreamException) { if (Core.Now >= m_NextStaticWarning) { log.WarnFormat("Static EOS for {0} ({1}, {2})", m_Owner, x, y); m_NextStaticWarning = Core.Now + TimeSpan.FromMinutes(1.0); } return(m_EmptyStaticBlock); } }
IntPtr PinByteArray(ref GCHandle handle, byte[] array) { handle.Free(); handle = GCHandle.Alloc(array, GCHandleType.Pinned); return(handle.AddrOfPinnedObject()); }
// Use this for initialization void Start() { // Initialize camera plane object(Left) go_cameraPlaneLeft = this.transform.FindChild("CameraPlaneLeft").gameObject; go_cameraPlaneLeft.transform.localPosition = new Vector3(0.0f, 0.0f, 5.0f); //Default // Initialize camera plane object(Right) go_cameraPlaneRight = this.transform.FindChild("CameraPlaneRight").gameObject; go_cameraPlaneRight.transform.localPosition = new Vector3(0.0f, 0.0f, 5.0f); //Create cam texture go_CamTexLeft = new Texture2D(ovGetImageWidth(), ovGetImageHeight(), TextureFormat.RGB24, false); go_CamTexRight = new Texture2D(ovGetImageWidth(), ovGetImageHeight(), TextureFormat.RGB24, false); //Cam setting go_CamTexLeft.wrapMode = TextureWrapMode.Clamp; go_CamTexRight.wrapMode = TextureWrapMode.Clamp; //in Oculus Rift camera if (GameObject.Find("LeftEyeAnchor")) go_cameraPlaneLeft.transform.parent = GameObject.Find("LeftEyeAnchor").transform; if (GameObject.Find("RightEyeAnchor")) go_cameraPlaneRight.transform.parent = GameObject.Find("RightEyeAnchor").transform; if (camViewShader == 0) { //Normal shader go_cameraPlaneLeft.renderer.material.shader = Shader.Find("Ovrvision/ovTexture"); go_cameraPlaneRight.renderer.material.shader = Shader.Find("Ovrvision/ovTexture"); } else if (camViewShader == 1) { //Chroma-key shader go_cameraPlaneLeft.renderer.material.shader = Shader.Find("Ovrvision/ovChromaticMask"); go_cameraPlaneRight.renderer.material.shader = Shader.Find("Ovrvision/ovChromaticMask"); go_cameraPlaneLeft.renderer.material.SetFloat("_Color_maxh", chroma_hue.x); go_cameraPlaneLeft.renderer.material.SetFloat("_Color_minh", chroma_hue.y); go_cameraPlaneLeft.renderer.material.SetFloat("_Color_maxs", chroma_saturation.x); go_cameraPlaneLeft.renderer.material.SetFloat("_Color_mins", chroma_saturation.y); go_cameraPlaneLeft.renderer.material.SetFloat("_Color_maxv", chroma_brightness.x); go_cameraPlaneLeft.renderer.material.SetFloat("_Color_minv", chroma_brightness.y); go_cameraPlaneRight.renderer.material.SetFloat("_Color_maxh", chroma_hue.x); go_cameraPlaneRight.renderer.material.SetFloat("_Color_minh", chroma_hue.y); go_cameraPlaneRight.renderer.material.SetFloat("_Color_maxs", chroma_saturation.x); go_cameraPlaneRight.renderer.material.SetFloat("_Color_mins", chroma_saturation.y); go_cameraPlaneRight.renderer.material.SetFloat("_Color_maxv", chroma_brightness.x); go_cameraPlaneRight.renderer.material.SetFloat("_Color_minv", chroma_brightness.y); } if (!camStatus) return; //Camera open only //Get texture pointer go_pixelsColorLeft = go_CamTexLeft.GetPixels32(); go_pixelsColorRight = go_CamTexRight.GetPixels32(); go_pixelsHandleLeft = GCHandle.Alloc(go_pixelsColorLeft, GCHandleType.Pinned); go_pixelsHandleRight = GCHandle.Alloc(go_pixelsColorRight, GCHandleType.Pinned); go_pixelsPointerLeft = go_pixelsHandleLeft.AddrOfPinnedObject(); go_pixelsPointerRight = go_pixelsHandleRight.AddrOfPinnedObject(); go_cameraPlaneLeft.renderer.material.mainTexture = go_CamTexLeft; go_cameraPlaneRight.renderer.material.mainTexture = go_CamTexRight; }
public void SaveBinary(GraphicsContext ctx) { if (ctx == null) { throw new ArgumentNullException("ctx"); } if (!ctx.Caps.GlExtensions.GetProgramBinary_ARB) { throw new NotSupportedException("get_program_binary not supported"); } if (!IsLinked) { throw new InvalidOperationException("not linked"); } int programCacheLength; Gl.GetProgram(ObjectName, Gl.PROGRAM_BINARY_LENGTH, out programCacheLength); byte[] programCache = new byte[programCacheLength]; int programCacheFormat; GCHandle programCacheBuffer = GCHandle.Alloc(programCache); try { Gl.GetProgramBinary(ObjectName, programCache.Length, out programCacheLength, out programCacheFormat, programCacheBuffer.AddrOfPinnedObject()); } finally { programCacheBuffer.Free(); } string cachePath = Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData); cachePath = Path.Combine(cachePath, CompiledHash + ".glsl"); using (FileStream fs = new FileStream(cachePath, FileMode.Create, FileAccess.Write)) { fs.Write(programCache, 0, programCache.Length); } }
/// <summary> /// 自己証明書作成のメソッド本体 /// rawData が nullで、自己証明書を新規作成 /// rawDate を指定すると、既存の自己証明書の更新 /// </summary> /// <param name="rawData">更新時の証明書バイナリーデータ</param> /// <param name="x500">組織名(CN="XXXXX")</param> /// <param name="startTime">開始日</param> /// <param name="endTime">終了日</param> /// <param name="insecurePassword">セキュア パスワード</param> /// <returns>証明書データ</returns> public static byte[] CreateSelfSignCertificatePfx( byte[] rawData, string x500, DateTime startTime, DateTime endTime, SecureString password) { byte[] pfxData; if (x500 == null) { x500 = ""; } SystemTime startSystemTime = ToSystemTime(startTime); SystemTime endSystemTime = ToSystemTime(endTime); string containerName = Guid.NewGuid().ToString(); GCHandle dataHandle = new GCHandle(); IntPtr providerContext = IntPtr.Zero; IntPtr cryptKey = IntPtr.Zero; IntPtr certContext = IntPtr.Zero; IntPtr certStore = IntPtr.Zero; IntPtr storeCertContext = IntPtr.Zero; IntPtr passwordPtr = IntPtr.Zero; IntPtr callerFreeProvOrNCryptKey = IntPtr.Zero; // コード本体を制約された実行領域 (CER) として指定します。 RuntimeHelpers.PrepareConstrainedRegions(); try { if (password != null) { // アンマネージ COM タスク アロケーターから割り当てられたメモリ ブロックに、マネージ SecureString オブジェクトの内容をコピーします passwordPtr = Marshal.SecureStringToCoTaskMemUnicode(password); } if (rawData == null) { // 自己証明書の新規作成用にハンドル(HCRYPTPROV) を取得します(providerContext) // cryptographic service provider(CSP)内で キーコンテナに対するハンドル(HCRYPTPROV)を取得します Check(NativeMethods.CryptAcquireContextW( out providerContext, containerName, //MS_ENHANCED_PROV, null, PROV_RSA_FULL, CRYPT_NEWKEYSET)); // ランダムな暗号化用のセッションキーか public/private のキーペアを生成します(HCRYPTKEY) Check(NativeMethods.CryptGenKey( providerContext, AT_KEYEXCHANGE, //RSA2048BIT_KEY | CRYPT_EXPORTABLE, // 2048ビットへ変更、標準は1024ビット RSA1024BIT_KEY | CRYPT_EXPORTABLE, // 2048ビットへ変更、標準は1024ビット out cryptKey)); } else { // 自己証明書の更新用にハンドル(HCRYPTPROV) を取得します(providerContext) // 証明書データ(PFX Blob)をインポートします CryptoApiBlob certBlob = new CryptoApiBlob(); certBlob.DataLength = rawData.Length; // 証明書データをPFX Blobへコピーします certBlob.Data = Marshal.AllocHGlobal(certBlob.DataLength); Marshal.Copy(rawData, 0, certBlob.Data, rawData.Length); Check(NativeMethods.PFXIsPFXBlob(ref certBlob)); // PFX Blobが正常かどうかを確認します certStore = NativeMethods.PFXImportCertStore( ref certBlob, passwordPtr, CRYPT_EXPORTABLE | CRYPT_USER_KEYSET); Check(certStore != IntPtr.Zero); // 証明書を取得します certContext = NativeMethods.CertEnumCertificatesInStore( certStore, IntPtr.Zero); Check(certContext != IntPtr.Zero); // 証明書の private キーを含めて、キーコンテナに対するハンドル(HCRYPTPROV)を取得します IntPtr keySpec = IntPtr.Zero; Check(NativeMethods.CryptAcquireCertificatePrivateKey( certContext, 0, IntPtr.Zero, out providerContext, out keySpec, out callerFreeProvOrNCryptKey)); // 不要になったハンドルを解放 Check(NativeMethods.CertCloseStore(certStore, 0)); Check(NativeMethods.CertFreeCertificateContext(certContext)); Marshal.FreeHGlobal(certBlob.Data); if (keySpec != IntPtr.Zero) Marshal.FreeHGlobal(keySpec); } IntPtr errorStringPtr; int nameDataLength = 0; byte[] nameData; // errorStringPtr は、 x500 文字列に対するポインターを取得します。 // この理由で、値を errorStringPtr へコピーするまではピン止めする必要があります dataHandle = GCHandle.Alloc(x500, GCHandleType.Pinned); // X.500 文字列を エンコードされた証明書名へ変換します if (!NativeMethods.CertStrToNameW( X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, dataHandle.AddrOfPinnedObject(), CERT_X500_NAME_STR, IntPtr.Zero, null, ref nameDataLength, out errorStringPtr)) { string error = Marshal.PtrToStringUni(errorStringPtr); throw new ArgumentException(error); } nameData = new byte[nameDataLength]; if (!NativeMethods.CertStrToNameW( X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, dataHandle.AddrOfPinnedObject(), CERT_X500_NAME_STR, IntPtr.Zero, nameData, ref nameDataLength, out errorStringPtr)) { string error = Marshal.PtrToStringUni(errorStringPtr); throw new ArgumentException(error); } dataHandle.Free(); dataHandle = GCHandle.Alloc(nameData, GCHandleType.Pinned); CryptoApiBlob nameBlob = new CryptoApiBlob( nameData.Length, dataHandle.AddrOfPinnedObject()); // 暗号化キー情報 CryptKeyProviderInformation kpi = new CryptKeyProviderInformation(); kpi.ContainerName = containerName; kpi.ProviderType = PROV_RSA_FULL; kpi.KeySpec = AT_KEYEXCHANGE; // 自己証明書を作成し、CERT_CONTEXT 構造体のポインター(証明書)を返します。 // HCRYPTPROV か、NCryptOpenKey を使用した NCRYPT_KEY_HANDLE certContext = NativeMethods.CertCreateSelfSignCertificate( providerContext, ref nameBlob, 0, ref kpi, IntPtr.Zero, // default = SHA1RSA ref startSystemTime, ref endSystemTime, IntPtr.Zero); Check(certContext != IntPtr.Zero); dataHandle.Free(); // 指定した CSP を使って証明書ストアを開きます certStore = NativeMethods.CertOpenStore( sz_CERT_STORE_PROV_MEMORY, 0, IntPtr.Zero, CERT_STORE_CREATE_NEW_FLAG, IntPtr.Zero); Check(certStore != IntPtr.Zero); // 証明書コンテキストを証明書ストアへ追加します Check(NativeMethods.CertAddCertificateContextToStore( certStore, certContext, CERT_STORE_ADD_NEW, out storeCertContext)); // 指定した証明書コンテキストへ拡張プロパティを設定します // この場合は暗号化キーです NativeMethods.CertSetCertificateContextProperty( storeCertContext, CERT_KEY_PROV_INFO_PROP_ID, 0, ref kpi); CryptoApiBlob pfxBlob = new CryptoApiBlob(); // 証明書をエクスポートします。参照された証明書ストアから関連付けられた private キーは、存在する場合にです // 1度目のエクスポートで、pfxBlob.DataLengthを取得します。 // 2度目のエクスポートは、pfxBlob.Data を pfxDataにポインター経由で取得します。 Check(NativeMethods.PFXExportCertStoreEx( certStore, ref pfxBlob, passwordPtr, IntPtr.Zero, EXPORT_PRIVATE_KEYS | REPORT_NO_PRIVATE_KEY | REPORT_NOT_ABLE_TO_EXPORT_PRIVATE_KEY)); pfxData = new byte[pfxBlob.DataLength]; dataHandle = GCHandle.Alloc(pfxData, GCHandleType.Pinned); pfxBlob.Data = dataHandle.AddrOfPinnedObject(); Check(NativeMethods.PFXExportCertStoreEx( certStore, ref pfxBlob, passwordPtr, IntPtr.Zero, EXPORT_PRIVATE_KEYS | REPORT_NO_PRIVATE_KEY | REPORT_NOT_ABLE_TO_EXPORT_PRIVATE_KEY)); dataHandle.Free(); } catch (Exception e) { throw e; } finally { if (passwordPtr != IntPtr.Zero) { Marshal.ZeroFreeCoTaskMemUnicode(passwordPtr); } if (dataHandle.IsAllocated) { dataHandle.Free(); } if (certContext != IntPtr.Zero) { // 証明書コンテキストを解放します NativeMethods.CertFreeCertificateContext(certContext); } if (storeCertContext != IntPtr.Zero) { // 証明書ストアコンテキストを解放します NativeMethods.CertFreeCertificateContext(storeCertContext); } if (certStore != IntPtr.Zero) { // 証明書ストアをクローズします NativeMethods.CertCloseStore(certStore, 0); } if (cryptKey != IntPtr.Zero) { // 暗号化キーを解放します NativeMethods.CryptDestroyKey(cryptKey); } if (providerContext != IntPtr.Zero) { // cryptographic service provider(CSP) を解放します NativeMethods.CryptReleaseContext(providerContext, 0); // 暗号化キーをコンテナから削除します NativeMethods.CryptAcquireContextW( out providerContext, containerName, null, PROV_RSA_FULL, CRYPT_DELETEKEYSET); } if (callerFreeProvOrNCryptKey != IntPtr.Zero) Marshal.FreeHGlobal(callerFreeProvOrNCryptKey); } return pfxData; }
//private struct FlipStruct //{ // public int[] flip; //} public void initAnalyzer() { //no spur elimination => only one spur_elim_fft and it's spectrum is not flipped int[] flip = { 0 }; GCHandle handle = GCHandle.Alloc(flip, GCHandleType.Pinned); IntPtr h_flip = handle.AddrOfPinnedObject(); //PinnedObject<FlipStruct> h_flip = new PinnedObject<FlipStruct>(); //FlipStruct fs = new FlipStruct(); //fs.flip = new int[] { 0 }; //h_flip.ManangedObject = fs; int low = 0; int high = 0; double bw_per_subspan = 0.0; switch (data_type) { case 0: //real fft - in case we want to use for wideband data in the future { break; } case 1: //complex fft { //fraction of the spectrum to clip off each side of each sub-span const double CLIP_FRACTION = 0.04; //set overlap as needed to achieve the desired frame_rate overlap = (int)Math.Max(0.0, Math.Ceiling(fft_size - (double)sample_rate / (double)frame_rate)); //clip is the number of bins to clip off each side of each sub-span clip = (int)Math.Floor(CLIP_FRACTION * fft_size); //the amount of frequency in each fft bin (for complex samples) is given by: double bin_width = (double)sample_rate / (double)fft_size; double bin_width_tx = 96000.0 / (double)fft_size; //the number of useable bins per subspan is int bins_per_subspan = fft_size - 2 * clip; //the amount of useable bandwidth we get from each subspan is: bw_per_subspan = bins_per_subspan * bin_width; //the total number of bins available to display is: int bins = stitches * bins_per_subspan; //apply log function to zoom slider value double zoom_slider = Math.Log10(9.0 * z_slider + 1.0); //limits how much you can zoom in; higher value means you zoom more const double zoom_limit = 100; int width = (int)(bins * (1.0 - (1.0 - 1.0 / zoom_limit) * zoom_slider)); //FSCLIPL is 0 if pan_slider is 0; it's bins-width if pan_slider is 1 //FSCLIPH is bins-width if pan_slider is 0; it's 0 if pan_slider is 1 span_clip_l = (int)Math.Floor(pan_slider * (bins - width)); span_clip_h = bins - width - span_clip_l; if (Display.RX1DSPMode == DSPMode.DRM) { //Apply any desired frequency offset int bin_offset = (int)(freq_offset / bin_width); if ((span_clip_h -= bin_offset) < 0) { span_clip_h = 0; } span_clip_l = bins - width - span_clip_h; } //As for the low and high frequencies that are being displayed: low = -(int)((double)stitches / 2.0 * bw_per_subspan - (double)span_clip_l * bin_width + bin_width / 2.0); high = +(int)((double)stitches / 2.0 * bw_per_subspan - (double)span_clip_h * bin_width - bin_width / 2.0); //Note that the bin_width/2.0 factors are included because the complex FFT has one more negative output bin // than positive output bin. max_w = fft_size + (int)Math.Min(KEEP_TIME * sample_rate, KEEP_TIME * fft_size * frame_rate); break; } } switch (disp) { case 0: Display.RXDisplayLow = low; Display.RXDisplayHigh = high; break; case 1: Display.RX2DisplayLow = low; Display.RX2DisplayHigh = high; break; case 2: case 3: case 4: case 5: Display.TXDisplayLow = low; Display.TXDisplayHigh = high; break; } NetworkIO.LowFreqOffset = bw_per_subspan; NetworkIO.HighFreqOffset = bw_per_subspan; if (disp == 0) { if (Display.CurrentDisplayMode != DisplayMode.PANADAPTER && Display.CurrentDisplayMode != DisplayMode.WATERFALL && Display.CurrentDisplayMode != DisplayMode.PANAFALL && Display.CurrentDisplayMode != DisplayMode.PANASCOPE) { return; } } SpecHPSDRDLL.SetAnalyzer( disp, 2, spur_eliminationtion_ffts, data_type, h_flip, fft_size, blocksize, window_type, kaiser_pi, overlap, clip, span_clip_l, span_clip_h, pixels, stitches, calibration_data_set, span_min_freq, span_max_freq, max_w); }
void Awake () { Initialize(); Application.runInBackground = true; if(m_configString.Equals("")) { m_configString = "qt: 0 320 30 rgb 0"; } //open the camera Debug.Log("Using Config String \"" + m_configString + "\""); if(VIDEO_openVideo(m_configString,ref m_cameraHandle)!=0) { Debug.Log("VIDEO_openVideo failed"); } //start the camera if(VIDEO_startVideo(m_cameraHandle)!=0) { Debug.Log("VIDEO_startVideo failed"); } VIDEO_getWidth(m_cameraHandle, ref m_width); VIDEO_getHeight(m_cameraHandle, ref m_height); VIDEO_getDepth(m_cameraHandle, ref m_depth); VIDEO_getPixelFormat(m_cameraHandle, ref m_pixelFormat); Debug.Log("Video Depth = " + m_depth + " Format = " + m_pixelFormat); m_vidframe_byte = new Byte[m_width * m_height * (int)(m_depth/8.0f)]; m_vidframeHandle_byte = GCHandle.Alloc(m_vidframe_byte, GCHandleType.Pinned); m_vidpointer_byte = m_vidframeHandle_byte.AddrOfPinnedObject(); m_vidframe = new Color[m_width * m_height]; //this buffer will contain the frame of video m_vidframeHandle = GCHandle.Alloc(m_vidframe, GCHandleType.Pinned); }
public void CalcSpectrum(int filter_low, int filter_high, int spec_blocksize, int sample_rate) { //filter_low is the low frequency setting for the filter //filter_high is the high frequency setting for the filter //samplerate is the current samplerate //fft_size is the current FFT size //no spur elimination => only one spur_elim_fft and it's spectrum is not flipped int[] flip = { 0 }; GCHandle handle = GCHandle.Alloc(flip, GCHandleType.Pinned); IntPtr h_flip = handle.AddrOfPinnedObject(); //PinnedObject<FlipStruct> h_flip = new PinnedObject<FlipStruct>(); //FlipStruct fs = new FlipStruct(); //fs.flip = new int[] { 0 }; //h_flip.ManangedObject = fs; // const int extra = 1000; //if we allow a little extra spectrum to be displayed on each side of // the filter settings, then, you can look at filter rolloff. This // seems to happen at least some of the time with the old spectrum display. // "extra" is the amount extra to leave on each side of the filter bandwidth // and is in Hertz. //the upper and lower limits of the displayed spectrum would be int upper_freq = filter_high; // +extra; int lower_freq = filter_low; // -extra; //bandwidth to clip off on the high and low sides double high_clip_bw = 0.5 * sample_rate - upper_freq; double low_clip_bw = 0.5 * sample_rate + lower_freq; //calculate the width of each frequency bin double bin_width = (double)sample_rate / fft_size; //calculate span clip parameters int fsclipH = (int)Math.Floor(high_clip_bw / bin_width); int fsclipL = (int)Math.Ceiling(low_clip_bw / bin_width); //no need for any symmetrical clipping int sclip = 0; int stitch = 1; max_w = fft_size + (int)Math.Min(KEEP_TIME * sample_rate, KEEP_TIME * fft_size * frame_rate); Display.RXSpectrumDisplayLow = lower_freq; Display.RXSpectrumDisplayHigh = upper_freq; // set overlap as needed to achieve the desired frame rate overlap = (int)Math.Max(0.0, Math.Ceiling(fft_size - (double)sample_rate / (double)frame_rate)); SpecHPSDRDLL.SetAnalyzer( disp, 2, spur_eliminationtion_ffts, data_type, h_flip, fft_size, spec_blocksize, window_type, kaiser_pi, overlap, sclip, fsclipL, fsclipH, pixels, stitch, calibration_data_set, span_min_freq, span_max_freq, max_w); }
// Use this for initialization void Start() { // Initialize camera plane object(Left) go_cameraPlaneLeft = this.transform.FindChild("CameraPlaneLeft").gameObject; // Initialize camera plane object(Right) go_cameraPlaneRight = this.transform.FindChild("CameraPlaneRight").gameObject; //Create cam texture go_CamTexLeft = new Texture2D(ovGetImageWidth(), ovGetImageHeight(), TextureFormat.RGB24, false); go_CamTexRight = new Texture2D(ovGetImageWidth(), ovGetImageHeight(), TextureFormat.RGB24, false); //Cam setting go_CamTexLeft.wrapMode = TextureWrapMode.Clamp; go_CamTexRight.wrapMode = TextureWrapMode.Clamp; //in Oculus Rift camera if (GameObject.Find("LeftEyeAnchor")) { go_cameraPlaneLeft.transform.parent = GameObject.Find("LeftEyeAnchor").transform; go_cameraPlaneLeft.transform.localPosition = new Vector3(0.0f, 0.0f, 5.0f); //Default go_cameraPlaneLeft.transform.localRotation = Quaternion.Euler(270.0f, 0.0f, 0.0f); Camera cam_L = GameObject.Find("LeftEyeAnchor").GetComponent<Camera>(); cam_L.cullingMask = ~(1 << go_cameraPlaneRight.layer); } if (GameObject.Find("RightEyeAnchor")) { go_cameraPlaneRight.transform.parent = GameObject.Find("RightEyeAnchor").transform; go_cameraPlaneRight.transform.localPosition = new Vector3(0.0f, 0.0f, 5.0f); go_cameraPlaneRight.transform.localRotation = Quaternion.Euler(270.0f, 0.0f, 0.0f); Camera cam_R = GameObject.Find("RightEyeAnchor").GetComponent<Camera>(); cam_R.cullingMask = ~(1 << go_cameraPlaneLeft.layer); } //Set right eye gap const float scale = 0.001f; // 1/1000 [m]:[mm] if (GameObject.Find("OVRCameraRig")) GameObject.Find("OVRCameraRig").GetComponent<OVRCameraRig>().ovrvisionRightEyeGap = scale * new Vector3(ovGetOculusRightGap(0), ovGetOculusRightGap(1), ovGetOculusRightGap(2)); if (camViewShader == 0) { //Normal shader go_cameraPlaneLeft.GetComponent<Renderer>().material.shader = Shader.Find("Ovrvision/ovTexture"); go_cameraPlaneRight.GetComponent<Renderer>().material.shader = Shader.Find("Ovrvision/ovTexture"); } else if (camViewShader == 1) { //Chroma-key shader go_cameraPlaneLeft.GetComponent<Renderer>().material.shader = Shader.Find("Ovrvision/ovChromaticMask"); go_cameraPlaneRight.GetComponent<Renderer>().material.shader = Shader.Find("Ovrvision/ovChromaticMask"); go_cameraPlaneLeft.GetComponent<Renderer>().material.SetFloat("_Color_maxh", chroma_hue.x); go_cameraPlaneLeft.GetComponent<Renderer>().material.SetFloat("_Color_minh", chroma_hue.y); go_cameraPlaneLeft.GetComponent<Renderer>().material.SetFloat("_Color_maxs", chroma_saturation.x); go_cameraPlaneLeft.GetComponent<Renderer>().material.SetFloat("_Color_mins", chroma_saturation.y); go_cameraPlaneLeft.GetComponent<Renderer>().material.SetFloat("_Color_maxv", chroma_brightness.x); go_cameraPlaneLeft.GetComponent<Renderer>().material.SetFloat("_Color_minv", chroma_brightness.y); go_cameraPlaneRight.GetComponent<Renderer>().material.SetFloat("_Color_maxh", chroma_hue.x); go_cameraPlaneRight.GetComponent<Renderer>().material.SetFloat("_Color_minh", chroma_hue.y); go_cameraPlaneRight.GetComponent<Renderer>().material.SetFloat("_Color_maxs", chroma_saturation.x); go_cameraPlaneRight.GetComponent<Renderer>().material.SetFloat("_Color_mins", chroma_saturation.y); go_cameraPlaneRight.GetComponent<Renderer>().material.SetFloat("_Color_maxv", chroma_brightness.x); go_cameraPlaneRight.GetComponent<Renderer>().material.SetFloat("_Color_minv", chroma_brightness.y); } if (!camStatus) return; //Camera open only //Get texture pointer go_pixelsColorLeft = go_CamTexLeft.GetPixels32(); go_pixelsColorRight = go_CamTexRight.GetPixels32(); go_pixelsHandleLeft = GCHandle.Alloc(go_pixelsColorLeft, GCHandleType.Pinned); go_pixelsHandleRight = GCHandle.Alloc(go_pixelsColorRight, GCHandleType.Pinned); go_pixelsPointerLeft = go_pixelsHandleLeft.AddrOfPinnedObject(); go_pixelsPointerRight = go_pixelsHandleRight.AddrOfPinnedObject(); go_cameraPlaneLeft.GetComponent<Renderer>().material.mainTexture = go_CamTexLeft; go_cameraPlaneRight.GetComponent<Renderer>().material.mainTexture = go_CamTexRight; // start the image update thread ThreadStart(); }
/// <summary> /// Estimates extrinsic camera parameters using known intrinsic parameters and extrinsic parameters for each view. The coordinates of 3D object points and their correspondent 2D projections must be specified. This function also minimizes back-projection error. /// </summary> /// <param name="objectPoints">The array of object points</param> /// <param name="imagePoints">The array of corresponding image points</param> /// <param name="intrin">The intrinsic parameters</param> /// <returns>The extrinsic parameters</returns> public static ExtrinsicCameraParameters FindExtrinsicCameraParams2( MCvPoint3D32f[] objectPoints, PointF[] imagePoints, IntrinsicCameraParameters intrin) { ExtrinsicCameraParameters p = new ExtrinsicCameraParameters(); GCHandle handle1 = GCHandle.Alloc(objectPoints, GCHandleType.Pinned); GCHandle handle2 = GCHandle.Alloc(imagePoints, GCHandleType.Pinned); using (Matrix <float> objectPointMatrix = new Matrix <float>(objectPoints.Length, 3, handle1.AddrOfPinnedObject())) using (Matrix <float> imagePointMatrix = new Matrix <float>(imagePoints.Length, 2, handle2.AddrOfPinnedObject())) CvInvoke.cvFindExtrinsicCameraParams2(objectPointMatrix, imagePointMatrix, intrin.IntrinsicMatrix.Ptr, intrin.DistortionCoeffs.Ptr, p.RotationVector.Ptr, p.TranslationVector.Ptr, 0); handle1.Free(); handle2.Free(); return(p); }
/// <summary> /// Computes projections of 3D points to the image plane given intrinsic and extrinsic camera parameters. /// Optionally, the function computes jacobians - matrices of partial derivatives of image points as functions of all the input parameters w.r.t. the particular parameters, intrinsic and/or extrinsic. /// The jacobians are used during the global optimization in cvCalibrateCamera2 and cvFindExtrinsicCameraParams2. /// The function itself is also used to compute back-projection error for with current intrinsic and extrinsic parameters. /// </summary> /// <remarks>Note, that with intrinsic and/or extrinsic parameters set to special values, the function can be used to compute just extrinsic transformation or just intrinsic transformation (i.e. distortion of a sparse set of points) </remarks> /// <param name="objectPoints">The array of object points.</param> /// <param name="extrin">Extrinsic parameters</param> /// <param name="intrin">Intrinsic parameters</param> /// <param name="mats">Optional matrix supplied in the following order: dpdrot, dpdt, dpdf, dpdc, dpddist</param> /// <returns>The array of image points which is the projection of <paramref name="objectPoints"/></returns> public static PointF[] ProjectPoints( MCvPoint3D32f[] objectPoints, ExtrinsicCameraParameters extrin, IntrinsicCameraParameters intrin, params Matrix <float>[] mats) { PointF[] imagePoints = new PointF[objectPoints.Length]; int matsLength = mats.Length; GCHandle handle1 = GCHandle.Alloc(objectPoints, GCHandleType.Pinned); GCHandle handle2 = GCHandle.Alloc(imagePoints, GCHandleType.Pinned); using (Matrix <float> pointMatrix = new Matrix <float>(objectPoints.Length, 1, 3, handle1.AddrOfPinnedObject(), 3 * sizeof(float))) using (Matrix <float> imagePointMatrix = new Matrix <float>(imagePoints.Length, 1, 2, handle2.AddrOfPinnedObject(), 2 * sizeof(float))) CvInvoke.cvProjectPoints2( pointMatrix, extrin.RotationVector.Ptr, extrin.TranslationVector.Ptr, intrin.IntrinsicMatrix.Ptr, intrin.DistortionCoeffs.Ptr, imagePointMatrix, matsLength > 0 ? mats[0] : IntPtr.Zero, matsLength > 1 ? mats[1] : IntPtr.Zero, matsLength > 2 ? mats[2] : IntPtr.Zero, matsLength > 3 ? mats[3] : IntPtr.Zero, matsLength > 4 ? mats[4] : IntPtr.Zero, 0.0); handle1.Free(); handle2.Free(); return(imagePoints); }
/// <summary> /// Finds perspective transformation H=||h_ij|| between the source and the destination planes /// </summary> /// <param name="srcPoints">Point coordinates in the original plane</param> /// <param name="dstPoints">Point coordinates in the destination plane</param> /// <param name="method">FindHomography method</param> /// <param name="ransacReprojThreshold"> /// The maximum allowed reprojection error to treat a point pair as an inlier. /// The parameter is only used in RANSAC-based homography estimation. /// E.g. if dst_points coordinates are measured in pixels with pixel-accurate precision, it makes sense to set this parameter somewhere in the range ~1..3 /// </param> /// <returns>The 3x3 homography matrix if found. Null if not found.</returns> public static HomographyMatrix FindHomography( PointF[] srcPoints, PointF[] dstPoints, CvEnum.HOMOGRAPHY_METHOD method, double ransacReprojThreshold) { HomographyMatrix homography; GCHandle srcHandle = GCHandle.Alloc(srcPoints, GCHandleType.Pinned); GCHandle dstHandle = GCHandle.Alloc(dstPoints, GCHandleType.Pinned); using (Matrix <float> srcPointMatrix = new Matrix <float>(srcPoints.Length, 2, srcHandle.AddrOfPinnedObject())) using (Matrix <float> dstPointMatrix = new Matrix <float>(dstPoints.Length, 2, dstHandle.AddrOfPinnedObject())) homography = FindHomography(srcPointMatrix, dstPointMatrix, method, ransacReprojThreshold); srcHandle.Free(); dstHandle.Free(); return(homography); }
//send a notification to the already existing instance that a new instance was started private bool NotifyPreviousInstance(object message) { //First, find the window of the previous instance IntPtr handle = NativeMethods.FindWindow(null, _id); if (handle != IntPtr.Zero) { //create a GCHandle to hold the serialized object. GCHandle bufferHandle = new GCHandle(); try { byte[] buffer; NativeMethods.COPYDATASTRUCT data = new NativeMethods.COPYDATASTRUCT(); if (message != null) { //serialize the object into a byte array buffer = Serialize(message); //pin the byte array in memory bufferHandle = GCHandle.Alloc(buffer, GCHandleType.Pinned); data.dwData = 0; data.cbData = buffer.Length; //get the address of the pinned buffer data.lpData = bufferHandle.AddrOfPinnedObject(); } GCHandle dataHandle = GCHandle.Alloc(data, GCHandleType.Pinned); try { NativeMethods.SendMessage(handle, NativeMethods.WM_COPYDATA, IntPtr.Zero, dataHandle.AddrOfPinnedObject()); return(true); } finally { dataHandle.Free(); } } finally { if (bufferHandle.IsAllocated) { bufferHandle.Free(); } } } return(false); }
/// <summary> /// Default constructor. /// </summary> /// <param name="wavDevHandle">Wave in device handle.</param> /// <param name="dataSize">Data buffer size in bytes.</param> public BufferItem(IntPtr wavDevHandle,int dataSize) { m_WavDevHandle = wavDevHandle; m_ThisHandle = GCHandle.Alloc(this); m_pBuffer = new byte[dataSize]; m_DataHandle = GCHandle.Alloc(m_pBuffer,GCHandleType.Pinned); m_Header = new WAVEHDR(); m_Header.lpData = m_DataHandle.AddrOfPinnedObject(); m_Header.dwBufferLength = (uint)dataSize; m_Header.dwBytesRecorded = 0; m_Header.dwUser = (IntPtr)m_ThisHandle; m_Header.dwFlags = 0; m_Header.dwLoops = 0; m_Header.lpNext = IntPtr.Zero; m_Header.reserved = 0; m_HeaderHandle = GCHandle.Alloc(m_Header,GCHandleType.Pinned); m_pEventArgs = new EventArgs<byte[]>(m_pBuffer); }
public bool StartFromMemory(byte[] movieData, string filename, bool loop, bool allowYUV, bool yuvHD, bool ignoreFlips) { Dispose(); Close(); if (movieData == null || movieData.Length < 8) return false; Filename = filename.Trim(); _movieSource = AVProQuickTimePlugin.MovieSource.Memory; _movieMemoryHandle = GCHandle.Alloc(movieData, GCHandleType.Pinned); _movieMemoryPtr = _movieMemoryHandle.AddrOfPinnedObject(); _movieMemoryLength = (UInt32)(movieData.Length); _yuvHD = yuvHD; _ignoreFlips = ignoreFlips; return StartMovie(loop, allowYUV); }
private static ShaderResourceView InitTextureFromData(Device d3dDevice, DeviceContext context, DDS_HEADER header, DDS_HEADER_DXT10?header10, byte[] bitData, int offset, int maxsize, out bool isCubeMap) { int width = header.width; int height = header.height; int depth = header.depth; ResourceDimension resDim = ResourceDimension.Unknown; int arraySize = 1; Format format = Format.Unknown; isCubeMap = false; int mipCount = header.mipMapCount; if (0 == mipCount) { mipCount = 1; } if (((header.ddspf.flags & DDS_FOURCC) > 0) && (MAKEFOURCC('D', 'X', '1', '0') == header.ddspf.fourCC)) { DDS_HEADER_DXT10 d3d10ext = header10.Value; arraySize = d3d10ext.arraySize; if (arraySize == 0) { throw new Exception(); } if (BitsPerPixel(d3d10ext.dxgiFormat) == 0) { throw new Exception(); } format = d3d10ext.dxgiFormat; switch ((ResourceDimension)d3d10ext.resourceDimension) { case ResourceDimension.Texture1D: // D3DX writes 1D textures with a fixed Height of 1 if ((header.flags & DDS_HEIGHT) > 0 && height != 1) { throw new Exception(); } height = depth = 1; break; case ResourceDimension.Texture2D: //D3D11_RESOURCE_MISC_TEXTURECUBE if ((d3d10ext.miscFlag & 0x4) > 0) { arraySize *= 6; isCubeMap = true; } depth = 1; break; case ResourceDimension.Texture3D: if (!((header.flags & DDS_HEADER_FLAGS_VOLUME) > 0)) { throw new Exception(); } if (arraySize > 1) { throw new Exception(); } break; default: throw new Exception(); } resDim = (ResourceDimension)d3d10ext.resourceDimension; } else { format = GetDXGIFormat(header.ddspf); if (format == Format.Unknown) { throw new Exception(); } if ((header.flags & DDS_HEADER_FLAGS_VOLUME) > 0) { resDim = ResourceDimension.Texture3D; } else { if ((header.caps2 & DDS_CUBEMAP) > 0) { // We require all six faces to be defined if ((header.caps2 & DDS_CUBEMAP_ALLFACES) != DDS_CUBEMAP_ALLFACES) { throw new Exception(); } arraySize = 6; isCubeMap = true; } depth = 1; resDim = ResourceDimension.Texture2D; } } Resource resource = null; GCHandle pinnedArray = GCHandle.Alloc(bitData, GCHandleType.Pinned); IntPtr pointer = pinnedArray.AddrOfPinnedObject(); var boxes = FillInitData(pointer, width, height, depth, mipCount, arraySize, format, 0, 0, offset); switch (resDim) { case ResourceDimension.Unknown: break; case ResourceDimension.Buffer: break; case ResourceDimension.Texture1D: resource = new Texture1D(d3dDevice, new Texture1DDescription() { BindFlags = BindFlags.ShaderResource, Format = format, ArraySize = arraySize, Width = width, CpuAccessFlags = CpuAccessFlags.None, MipLevels = mipCount, OptionFlags = ResourceOptionFlags.None, Usage = ResourceUsage.Default, }, boxes.ToArray()); break; case ResourceDimension.Texture2D: resource = new Texture2D(d3dDevice, new Texture2DDescription() { ArraySize = arraySize, BindFlags = BindFlags.ShaderResource, Format = format, Height = height, Width = width, CpuAccessFlags = CpuAccessFlags.None, MipLevels = mipCount, OptionFlags = ResourceOptionFlags.None, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default }, boxes.ToArray()); break; case ResourceDimension.Texture3D: resource = new Texture3D(d3dDevice, new Texture3DDescription() { Depth = depth, BindFlags = BindFlags.ShaderResource, Format = format, Height = height, Width = width, CpuAccessFlags = CpuAccessFlags.None, MipLevels = mipCount, OptionFlags = ResourceOptionFlags.None, Usage = ResourceUsage.Default }, boxes.ToArray()); break; default: break; } pinnedArray.Free(); var resourceView = new ShaderResourceView(d3dDevice, resource); return(resourceView); }
public StreamReader( Stream source ) { this.source = source; this.buffer = new byte[2048]; this.hBuffer = GCHandle.Alloc( buffer, GCHandleType.Pinned ); this.pBuffer = hBuffer.AddrOfPinnedObject(); }
public DirectBitmap(int width, int height) { Width = width; Height = height; Bits = new Int32[width * height]; BitsHandle = GCHandle.Alloc(Bits, GCHandleType.Pinned); Bitmap = new Bitmap(width, height, width * 4, PixelFormat.Format32bppPArgb, BitsHandle.AddrOfPinnedObject()); }
public static IntPtr AddrOfPinnedObject(GCHandle gh) { return gh.AddrOfPinnedObject(); }
/// <summary> /// Initializes an output device. /// </summary> /// <param name="device"> /// The device to use... -1 = default device, 0 = no sound, 1 = first real output device. /// <see cref="GetDeviceInfo" /> can be used to enumerate the available devices. /// </param> /// <param name="freq">Output sample rate. </param> /// <param name="configs">Configures of initialize Bass.</param> /// <param name="windowHandle">The application's main window... 0 = the desktop window (use this for console applications). </param> /// <param name="dSoundGuid"> /// Class identifier of the object to create, that will be used to initialize DirectSound... NULL /// = use default. /// </param> /// <remarks> /// This function must be successfully called before using any sample, stream or MOD music functions. The recording /// functions may be used without having called this function. /// <para /> /// Playback is not possible with the "no sound" device, but it does allow the use of "decoding channels", eg. to /// decode files. /// <para /> /// Simultaneously using multiple devices is supported in the Bass API via a context switching system; instead of there /// being an extra "device" parameter in the function calls, the device to be used is set prior to calling the /// functions. <see cref="SetDevice" /> is used to switch the current device. When successful, /// <see cref="InitializeBass" /> automatically sets the current thread's device to the one that was just initialized. /// <para /> /// When using the default device (device = -1), <see cref="GetDevice" /> can be used to find out which device it was /// mapped to. /// </remarks> /// <exception cref="BassNotLoadedException"> /// Bass DLL not loaded, you must use <see cref="Interop.Core.Initialize" /> to /// load Bass DLL first. /// </exception> /// <exception cref="BassErrorException"> /// Some error occur to call a Bass function, check the error code and error message /// to get more error information. /// </exception> public static void InitializeBass(int device, uint freq, InitializationConfig configs, IntPtr windowHandle, Guid?dSoundGuid) { GCHandle?guidHandle = null; if (dSoundGuid != null) { guidHandle = GCHandle.Alloc(dSoundGuid.Value, GCHandleType.Pinned); } BassCoreModule.InitializeFunction.CheckResult(BassCoreModule.InitializeFunction.Delegate(device, freq, configs, windowHandle, guidHandle?.AddrOfPinnedObject() ?? IntPtr.Zero)); guidHandle?.Free(); }
// Unity audio callback public void OnAudioFilterRead(float[] data, int channels) { if(dataPtr == IntPtr.Zero) { dataHandle = GCHandle.Alloc(data,GCHandleType.Pinned); dataPtr = dataHandle.AddrOfPinnedObject(); } if (islibpdready) { LibPD.Process(numberOfTicks, dataPtr, dataPtr); } }
void InitDepthTexture(GameObject infraPlane) { // confidence (= infrared) image int confi_w, confi_h; getIntelCameraInfraImage(out confi_w, out confi_h); // data handling mTextureInfra = new Texture2D(confi_w, confi_h, TextureFormat.RGB24, false); pixelsInfra = mTextureInfra.GetPixels32(); pixelsHandleInfra = GCHandle.Alloc(pixelsInfra, GCHandleType.Pinned); pixelsPointerInfra = pixelsHandleInfra.AddrOfPinnedObject(); // game object properties infraPlane.GetComponent<Renderer>().material.mainTexture = mTextureInfra; }
private void LoadFileToMemory(string folder, string filename) { string filePath = Path.Combine(folder, filename); // If we're running outside of the editor we may need to resolve the relative path // as the working-directory may not be that of the application EXE. if (!Application.isEditor && !Path.IsPathRooted(filePath)) { string rootPath = Path.GetFullPath(Path.Combine(Application.dataPath, "..")); filePath = Path.Combine(rootPath, filePath); } ReleaseMemoryFile(); if (File.Exists(filePath)) { byte[] bytes = System.IO.File.ReadAllBytes(filePath); if (bytes.Length > 0) { _bytesHandle = GCHandle.Alloc(bytes, GCHandleType.Pinned); _moviePtr = _bytesHandle.AddrOfPinnedObject(); _movieLength = (uint)bytes.Length; _movie.LoadMovieFromMemory(true, filename, _moviePtr, _movieLength); } } }
private void InitMeshVisualizer(GameObject meshVisualizer, Material meshMaterial) { // make array & get pointer floatArray = new float[WIDTH * HEIGHT * 3]; floatArrayHandle = GCHandle.Alloc(floatArray, GCHandleType.Pinned); floatArrayPtr = floatArrayHandle.AddrOfPinnedObject(); // add & set Mesh for visualization MeshFilter meshFilter = meshVisualizer.AddComponent<MeshFilter>(); depthMesh = new Mesh(); depthMesh.name = "rawdepth"; // initial mesh vertices... depthMesh.vertices = new Vector3[WIDTH * HEIGHT]; Vector3[] vertices = depthMesh.vertices; for (int j = 0; j < HEIGHT; j++) for (int i = 0; i < WIDTH; i++) { float x = (i - WIDTH / 2); float y = -(j - HEIGHT / 2); float z = 0.0f; vertices[i + j * WIDTH] = new Vector3(x, y, z); } depthMesh.vertices = vertices; // initial mesh triangles... depthMesh.triangles = new int[(WIDTH - 1) * (HEIGHT - 1) * 6]; int[] triangles = depthMesh.triangles; for (int j = 0; j < HEIGHT - 1; j++) for (int i = 0; i < WIDTH - 1; i++) { int idx = i + j * WIDTH; triangles[6 * (i + j * (WIDTH - 1)) + 0] = idx; triangles[6 * (i + j * (WIDTH - 1)) + 1] = idx + 1; triangles[6 * (i + j * (WIDTH - 1)) + 2] = idx + WIDTH; triangles[6 * (i + j * (WIDTH - 1)) + 3] = idx + 1; triangles[6 * (i + j * (WIDTH - 1)) + 4] = idx + 1 + WIDTH; triangles[6 * (i + j * (WIDTH - 1)) + 5] = idx + WIDTH; } depthMesh.triangles = triangles; // uvs & normals for mesh depthMesh.uv = new Vector2[depthMesh.vertices.Length]; depthMesh.normals = new Vector3[depthMesh.vertices.Length]; // recalculate normals & bounds [DEPRECATED by speed issue] /* depthMesh.Optimize(); depthMesh.RecalculateNormals(); depthMesh.RecalculateBounds(); //*/ // create MeshFilter for GameObject meshFilter.mesh = depthMesh; // add & set Mesh for visualization MeshRenderer meshRenderer = meshVisualizer.AddComponent<MeshRenderer>(); meshRenderer.material = meshMaterial; init = true; }
public unsafe static void Initialize() { string filePath = Files.GetFilePath("tiledata.mul"); if (filePath != null) { using (FileStream fs = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read)) { bool useNeWTileDataFormat = Art.IsUOAHS(); landheader = new int[512]; int j = 0; m_LandData = new LandData[0x4000]; byte[] buffer = new byte[fs.Length]; GCHandle gc = GCHandle.Alloc(buffer, GCHandleType.Pinned); long currpos = 0; try { fs.Read(buffer, 0, buffer.Length); for (int i = 0; i < 0x4000; i += 32) { IntPtr ptrheader = new IntPtr((long)gc.AddrOfPinnedObject() + currpos); currpos += 4; landheader[j++] = (int)Marshal.PtrToStructure(ptrheader, typeof(int)); for (int count = 0; count < 32; ++count) { IntPtr ptr = new IntPtr((long)gc.AddrOfPinnedObject() + currpos); if (useNeWTileDataFormat) { currpos += sizeof(NewLandTileDataMul); NewLandTileDataMul cur = (NewLandTileDataMul)Marshal.PtrToStructure(ptr, typeof(NewLandTileDataMul)); m_LandData[i + count] = new LandData(cur); } else { currpos += sizeof(OldLandTileDataMul); OldLandTileDataMul cur = (OldLandTileDataMul)Marshal.PtrToStructure(ptr, typeof(OldLandTileDataMul)); m_LandData[i + count] = new LandData(cur); } } } long remaining = buffer.Length - currpos; int structsize = useNeWTileDataFormat ? sizeof(NewItemTileDataMul) : sizeof(OldItemTileDataMul); itemheader = new int[(remaining / ((structsize * 32) + 4))]; int itemlength = itemheader.Length * 32; m_ItemData = new ItemData[itemlength]; m_HeightTable = new int[itemlength]; j = 0; for (int i = 0; i < itemlength; i += 32) { IntPtr ptrheader = new IntPtr((long)gc.AddrOfPinnedObject() + currpos); currpos += 4; itemheader[j++] = (int)Marshal.PtrToStructure(ptrheader, typeof(int)); for (int count = 0; count < 32; ++count) { IntPtr ptr = new IntPtr((long)gc.AddrOfPinnedObject() + currpos); if (useNeWTileDataFormat) { currpos += sizeof(NewItemTileDataMul); NewItemTileDataMul cur = (NewItemTileDataMul)Marshal.PtrToStructure(ptr, typeof(NewItemTileDataMul)); m_ItemData[i + count] = new ItemData(cur); m_HeightTable[i + count] = cur.height; } else { currpos += sizeof(OldItemTileDataMul); OldItemTileDataMul cur = (OldItemTileDataMul)Marshal.PtrToStructure(ptr, typeof(OldItemTileDataMul)); m_ItemData[i + count] = new ItemData(cur); m_HeightTable[i + count] = cur.height; } } } } finally { gc.Free(); } } } }
public static void Init(GameObject gameObject) { IsDone = false; m_main_texture = new Texture2D(320, 200, TextureFormat.RGBA32, false); m_main_texture.filterMode = FilterMode.Point; m_screen_data = new byte[320 * 200 * 4]; m_screen_handle = GCHandle.Alloc(m_screen_data, GCHandleType.Pinned); var bCam = GameObject.Find("ui_camera"); Debug.Log("bCam: " + bCam); if (bCam == null) { Debug.Log("briefing cam not found???"); } else { var bloom = bCam.gameObject.GetComponent <SENaturalBloomAndDirtyLens>(); if (bloom != null) { bloom.enabled = false; } var post = bCam.gameObject.GetComponent <PostProcessingBehaviour>(); if (post != null) { post.enabled = false; } } Camera mainCam = Camera.main; orgCamGO = mainCam.gameObject; orgCamGO.SetActive(false); camGO = new GameObject("ClassicCam"); //camGO.transform.position = mainCam.transform.position; camGO.transform.parent = orgCamGO.transform.parent; // add distance for backside of cockpit camGO.transform.localPosition = mainCam.transform.localPosition; camGO.transform.localRotation = mainCam.transform.localRotation; //camGO.transform.localScale = mainCam.transform.localScale; var cam = camGO.AddComponent <Camera>(); //cam.nearClipPlane = 0.01f; //cam.fieldOfView = mainCam.fieldOfView; //cam.depth = mainCam.depth; cam.orthographic = true; cam.orthographicSize = 0.44f; cam.allowHDR = false; cam.allowMSAA = false; for (var i = 0; i < asrcs.Length; i++) { asrcs[i] = gameObject.AddComponent <AudioSource>(); } gameObject.AddComponent <VeryClassicController>(); play = new SoundPlay3DHandler(SoundPlay3D); IntPtr x = LoadLibrary("VeryClassic.dll"); Debug.Log("VeryClassicLib: " + x); VeryClassicSettings(GameplayManager.DifficultyLevel, MenuManager.opt_auto_leveling >= 2 ? 1 : 0, MenuManager.opt_volume_sfx, MenuManager.opt_volume_music, GameplayManager.Level.LevelNum + 1); int ret = VeryClassicInit(m_screen_handle.AddrOfPinnedObject(), play); Debug.Log("VeryClassicInit: " + ret); #if false m_main_texture = new Texture2D(320, 200, TextureFormat.RGBA32, false); m_main_texture.filterMode = FilterMode.Point; //Debug.Log("name1: " + gameObject.GetComponent<MeshRenderer>().sharedMaterial.GetTexture("_MainTex").name); gameObject.GetComponent <MeshRenderer>().sharedMaterial.SetTexture("_MainTex", m_main_texture); Debug.Log("name2: " + gameObject.GetComponent <MeshRenderer>().sharedMaterial.GetTexture("_MainTex").name); #endif GameManager.MaybeLockCursor(); }
public unsafe static bool CheckBytes (GCHandle h, int pos, byte val) { IntPtr p = h.AddrOfPinnedObject (); byte* a = (byte*) p.ToPointer (); return a [pos] != val; }
/// <summary> /// Pins a byte[] in memory and reads the SecureString's unmanaged content into that byte[] /// </summary> private void SecureStringToBytes() { ClearMemory(); unsafe { if (SecureString != null) { _bytes = new byte[_secureString.Length * 2]; //Unicode, so two bytes per char _gcHandleBytes = new GCHandle(); RuntimeHelpers.PrepareConstrainedRegions(); try { } finally { _gcHandleBytes = GCHandle.Alloc(_bytes, GCHandleType.Pinned); } IntPtr bytePtr = IntPtr.Zero; RuntimeHelpers.ExecuteCodeWithGuaranteedCleanup( delegate { RuntimeHelpers.PrepareConstrainedRegions(); try { } finally { bytePtr = Marshal.SecureStringToGlobalAllocUnicode(_secureString); //ensure this finishes, only to ensure we can clean up } RuntimeHelpers.PrepareConstrainedRegions(); try { byte *pByteArray = (byte *)bytePtr; byte *pInsecureByteArray = (byte *)_gcHandleBytes.AddrOfPinnedObject(); for (int i = 0; i < _secureString.Length * 2; i = i + 2) { pInsecureByteArray[i] = pByteArray[i]; pInsecureByteArray[i + 1] = pByteArray[i + 1]; //throw new Exception("ups"); the ClearMemory is triggered and the CER-finally enrues possible already written bytes are cleared } } catch { ClearMemory(); } }, delegate { if (bytePtr != IntPtr.Zero) { Marshal.ZeroFreeGlobalAllocUnicode(bytePtr); } }, null); } } }