/// <inheritdoc /> public void Start() { _device = _connection.System.GetObject <Device1> (BlueZPath.Service, BlueZPath.Device(_adapterName, _deviceAddress)); int retries = 3; for (int i = 0; i < retries; i++) { try { _logger.Info("Connecting..."); _device.Connect(); _logger.Info("Connected"); System.Threading.Thread.Sleep(3000); break; } catch (Exception ex) { _logger.Warn("Failed", ex); //we can't really do much other than try again if (i == retries - 1) { throw new Exception("Failed to connect to BLE Anemometer", ex); } else { System.Threading.Thread.Sleep(3000); } } } string name = _device.Name; for (int i = 0; i < retries; i++) { try { var readCharPath = BlueZPath.GattCharacteristic(_adapterName, _deviceAddress, _serviceId, _readCharId); _readChar = _connection.System.GetObject <GattCharacteristic1> (BlueZPath.Service, readCharPath); _properties = _connection.System.GetObject <Properties> (BlueZPath.Service, readCharPath); _readChar.StartNotify(); InitializePropertyListener(); _logger.Info("Now listening for wind data"); break; } catch (Exception ex) { _logger.Warn("Failed to configure listener", ex); if (i == retries - 1) { throw new Exception("Are you sure BlueZ is running in experimental mode?", ex); } else { System.Threading.Thread.Sleep(3000); } } } }
public D3D(Ref <Dxgi> dxgi) { Dxgi = dxgi; Device = default; Context = default; RenderTargetView = default; }
// 生成と終了 /// <summary> /// コンストラクタ。 /// 指定された画像ファイルと矩形リストyamlファイルを使って、フォント画像を生成する。 /// </summary> /// <param name="文字幅補正dpx">文字と文字の間の(横方向の)間隔。拡大率の影響は受けない。負数もOK。</param> /// <param name="不透明度">透明: 0 ~ 1 :不透明</param> public フォント画像(Device1 d3dDevice1, VariablePath 文字盤の画像ファイルパス, VariablePath 文字盤の矩形リストファイルパス, float 文字幅補正dpx = 0f, float 透明度 = 1f) { this._文字盤 = new 画像(d3dDevice1, 文字盤の画像ファイルパス); this._矩形リスト = new 矩形リスト(文字盤の矩形リストファイルパス); this.文字幅補正dpx = 文字幅補正dpx; this.透明度 = 透明度; }
protected override void GivenThat() { base.GivenThat(); // genius: .NET4 doesnt work properly with Rhino // https://stackoverflow.com/questions/3444581/mocking-com-interfaces-using-rhino-mocks // if we need to use .NET4 then uncomment this line //Castle.DynamicProxy.Generators.AttributesToAvoidReplicating.Add(typeof(TypeIdentifierAttribute)); PortableDeviceManager = new MockPortableDeviceManager(); PortableDevice = GenerateMock <IPortableDevice>(); Device1 = GenerateMock <IDevice>(); Device1.Stub(device => device.Name) .Return("Device 1"); Device1.Stub(device => device.Id) .Return("Device_Id_1"); Device2 = GenerateMock <IDevice>(); Device2.Stub(device => device.Name) .Return("Device 2"); Device2.Stub(device => device.Id) .Return("Device_Id_2"); DeviceFactory = GenerateMock <IDeviceFactory>(); DeviceFactory.Stub(factory => factory.CreateDevice("Device_Id_1")) .Return(Device1); DeviceFactory.Stub(factory => factory.CreateDevice("Device_Id_2")) .Return(Device2); DeviceManager = new DeviceManager(PortableDeviceManager, DeviceFactory); }
protected override void GivenThat() { base.GivenThat(); PortableDeviceManager = new MockPortableDeviceManager(); PortableDevice = GenerateMock <IPortableDevice>(); Device1 = GenerateMock <IDevice>(); Device1.Stub(device => device.Name) .Return("Device 1"); Device1.Stub(device => device.Id) .Return("Device_Id_1"); Device2 = GenerateMock <IDevice>(); Device2.Stub(device => device.Name) .Return("Device 2"); Device2.Stub(device => device.Id) .Return("Device_Id_2"); DeviceFactory = GenerateMock <IDeviceFactory>(); DeviceFactory.Stub(factory => factory.CreateDevice("Device_Id_1")) .Return(Device1); DeviceFactory.Stub(factory => factory.CreateDevice("Device_Id_2")) .Return(Device2); DeviceManager = new DeviceManager(PortableDeviceManager, DeviceFactory); }
public void Create(Device1 device) { PrimitiveTopology = PrimitiveTopology.TriangleList; CreateVertexBuffer(device); CreateIndexBuffer(device); IsCreated = true; }
protected void Initialize() { Requires.State(!IsInitialized); var defaultDevice = DeviceFactory.CreateDevice(); _device = defaultDevice.QueryInterface <Device1>(); }
public void SetDevice(Device1 d) { _device = d; var t = ShaderViewPort.Shader.Effect.GetVariableByName(ShaderName).AsShaderResource(); t.SetResource(new ShaderResourceView(_device, ContentValue)); }
public void Create(Device1 device) { PrimitiveTopology = PrimitiveTopology.TriangleList; VertexStride = Marshal.SizeOf <VertexPosColNormTex>(); var verts = new List <VertexPosColNormTex>(); var importer = new AssimpContext(); var s = importer.GetSupportedImportFormats(); if (!importer.IsImportFormatSupported(Path.GetExtension(_FileName))) { throw new ArgumentException( "Model format " + Path.GetExtension(_FileName) + " is not supported! Cannot load {1}", "filename"); } var model = importer.ImportFile(_FileName, PostProcessSteps.GenerateSmoothNormals | PostProcessSteps.CalculateTangentSpace | PostProcessSteps.Triangulate); foreach (var mesh in model.Meshes) { for (int i = 0; i < mesh.VertexCount; i++) { var pos = new Vector3(mesh.Vertices[i].X, mesh.Vertices[i].Y, mesh.Vertices[i].Z); var color = Color.AliceBlue; var norm = new Vector3(mesh.Normals[i].X, mesh.Normals[i].Y, mesh.Normals[i].Z); var tx = new Vector2(mesh.TextureCoordinateChannels[0][i].X, -mesh.TextureCoordinateChannels[0][i].Y); verts.Add(new VertexPosColNormTex(pos, color, norm, tx)); } var bufferDesc = new BufferDescription { Usage = ResourceUsage.Immutable, BindFlags = BindFlags.VertexBuffer, SizeInBytes = VertexStride * verts.Count, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }; VertexBuffer = new Buffer(device, DataStream.Create(verts.ToArray(), false, false), bufferDesc); IndexCount = mesh.GetIndices().Length; //buffer creations bufferDesc = new BufferDescription { Usage = ResourceUsage.Immutable, BindFlags = BindFlags.IndexBuffer, SizeInBytes = sizeof(uint) * IndexCount, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }; IndexBuffer = new Buffer(device, DataStream.Create(mesh.GetIndices(), false, false), bufferDesc); } }
public void Create(Device1 device) { var shaderBytecode = ShaderBytecode.CompileFromFile("Resources\\PosNormTex3D.fx", "fx_4_0", shaderFlags: ShaderFlags.None); Effect = new Effect(device, shaderBytecode); Technique = Effect.GetTechniqueByIndex(0); var pass = Technique.GetPassByIndex(0); InputLayout = new InputLayout(device, pass.Description.Signature, InputLayouts.PosNormColTex); }
public DrawingBackend() { Factory = new Factory(FactoryType.SingleThreaded, DebugLevel.None); #if NETFX_CORE using (var device = new Device(DriverType.Hardware, DeviceCreationFlags.BgraSupport)) { Device = device.QueryInterface <Device1>(); } #else Device = new Device1(DriverType.Hardware, DeviceCreationFlags.BgraSupport, FeatureLevel.Level_10_0); #endif }
public void Create(Device1 device) { //Create effect var shaderSource = File.ReadAllText("Resources/Effects/PosColNorm3DSkinned.fx"); var shaderByteCode = ShaderBytecode.Compile(shaderSource, "fx_4_0", ShaderFlags.None, EffectFlags.None); Effect = new SharpDX.Direct3D10.Effect(device, shaderByteCode); Technique = Effect.GetTechniqueByIndex(0); //Create inputLayout var pass = Technique.GetPassByIndex(0); InputLayout = new InputLayout(device, pass.Description.Signature, InputLayouts.SkinnedVertex); }
public void Create(Device1 device) { CompilationResult shaderByteCode = ShaderBytecode.CompileFromFile(FxFileName, "fx_4_0", ShaderFlags.None); if (shaderByteCode.Bytecode == null) { throw new Exception($"Failed to compile shader, message: {shaderByteCode.Message}"); } InputParameters = CreateInputLayout(device); Effect = new Effect(device, shaderByteCode); Technique = Effect.GetTechniqueByIndex(0); var pass = Technique.GetPassByIndex(0); InputLayout = new InputLayout(device, pass.Description.Signature, InputParameters); }
/// <summary> /// 指定したサイズの、空の画像を作成する。 /// </summary> public 画像(Device1 d3dDevice1, Size2F サイズ, BindFlags bindFlags = BindFlags.ShaderResource) { //using var _ = new LogBlock( Log.現在のメソッド名 ); #region " 条件チェック " //---------------- if (0f >= サイズ.Width || 0f >= サイズ.Height) { Log.ERROR($"テクスチャサイズが不正です。0 より大きい正の値を指定してください。[{サイズ}]"); return; } //---------------- #endregion this._空のテクスチャとそのシェーダーリソースビューを作成して返す(d3dDevice1, bindFlags, サイズ); }
public void Create( IntPtr hwnd, Size2 size, CompositionType compositionType) { DebugSelect(DeviceCreationFlags.Debug, DeviceCreationFlags.None, out DeviceCreationFlags creationFlags); creationFlags |= DeviceCreationFlags.BgraSupport | DeviceCreationFlags.SingleThreaded; Device = CreateDevice(creationFlags); Context = Device.ImmediateContext1; Dxgi().Initialize(Device.QueryInterface <DXGI.Device2>(), hwnd, compositionType, size); Connect(); }
public void Initialize(Device1 device, RenderTargetView renderTarget, DX10RenderCanvas canvasControl) { _device = device; _renderTargetView = renderTarget; _renderControl = canvasControl; //Set Model (IModel) // Model = new BoxModel(); //Model.Create(_device); var filepath = System.AppDomain.CurrentDomain.BaseDirectory; filepath += "\\Resources\\Model\\MinecraftModel.obj"; Model = new AssimpModel(filepath); Model.Create(_device); //Set Shader (IEffect) Shader = new PosNormTex(); Shader.Create(device); //Shader.SetTexture("C:\\Users\\NielsR\\Documents\\DAE 2016-2017\\TOOL DEVELOPMENT\\Week_11_-_DirectX_Image_Control\\d\\DirextXFirstProject\\ Minecraft_skin_editor\\scichart-surface-mesh-8x8-3d-300x187.png", _device); }
public void Initialize(Device1 device, RenderTargetView renderTarget, DX10RenderCanvas canvasControl) { //Create the graphics context GraphicsContext.Device = device; GraphicsContext.RenderTargetView = renderTarget; GraphicsContext.RenderControl = canvasControl; OrbitCamera camera = new OrbitCamera(canvasControl); camera.ResetAngles = new Vector3(-MathUtil.PiOverFour, -MathUtil.PiOverFour, 0); camera.Reset(); GraphicsContext.Camera = camera; //Grid _grid = new Grid(); _grid.Initialize(GraphicsContext); ParticleEmitter = new ParticleEmitter(GraphicsContext); ParticleEmitter.Intialize(); DebugLog.Log("Initialized", "Direct3D"); }
// 生成と終了 /// <summary> /// 指定した画像ファイルから画像を作成する。 /// </summary> public 画像(Device1 d3dDevice1, VariablePath 画像ファイルパス, BindFlags bindFlags = BindFlags.ShaderResource) { //using var _ = new LogBlock( Log.現在のメソッド名 ); #region " 条件チェック " //---------------- if (string.IsNullOrEmpty(画像ファイルパス.数なしパス)) { Log.ERROR($"画像ファイルパスの指定がありません。"); return; } if (!File.Exists(画像ファイルパス.数なしパス)) { Log.ERROR($"画像ファイルが存在しません。[{画像ファイルパス.変数付きパス}]"); return; } //---------------- #endregion this._画像ファイルからシェーダリソースビューを作成して返す(d3dDevice1, bindFlags, 画像ファイルパス); }
/// <summary> /// Creates the client adn initializes the members that will request a Twitter authorization and /// perform search tasks. /// </summary> /// <param name="device">DirectX device.</param> /// <param name="factory">Imaging factory.</param> public TwitterClient(Device1 device, ImagingFactory2 factory) { this.authorizer = new WinRtAuthorizer() { Credentials = new LocalDataCredentials() { #error Please create an application in Twitter and set your own consumer key and secret. ConsumerKey = null, ConsumerSecret = null, }, UseCompression = true, Callback = new Uri("http://followercatcher.codeplex.com/"), }; this.device = device; this.factory = factory; this.AvatarTextures = new List <Texture>(); this.Tweets = new Dictionary <string, TweetInfo>(); LoadTexturesFromCache(); }
public async Task <IActionResult> Set1([FromBody] Device1 arduino) { using var _context = new WebAppContext(); var device = await _context.Devices1.FindAsync(arduino.MAC); device.LastUpdate = DateTime.Now; device.ERROR = arduino.ERROR; device.Temperature = arduino.Temperature; device.Humidity = arduino.Humidity; try { await _context.SaveChangesAsync(); } catch (DbUpdateException) { return(StatusCode(500)); //Internal server error } return(Ok()); }
public BusWindow() { InitializeComponent(); Device1.SetBinding(TextBlock.TextProperty, new Binding("Info1") { Source = DataList }); Device2.SetBinding(TextBlock.TextProperty, new Binding("Info2") { Source = DataList }); Device3.SetBinding(TextBlock.TextProperty, new Binding("Info3") { Source = DataList }); Device4.SetBinding(TextBlock.TextProperty, new Binding("Info4") { Source = DataList }); Device5.SetBinding(TextBlock.TextProperty, new Binding("Info5") { Source = DataList }); }
public Device(Device1 native) { this.native = native; }
/// <summary> /// Constructs a new <see cref = "T:SharpDX.Direct3D11.BlendState1" /> based on the specified description. /// </summary> /// <param name = "device">The device with which to associate the state object.</param> /// <param name = "description">The state description.</param> /// <returns>The newly created object.</returns> public BlendState1(Device1 device, SharpDX.Direct3D11.BlendStateDescription1 description) : base(IntPtr.Zero) { device.CreateBlendState1(ref description, this); }
public Peripheral(Device1 native) { this.native = native; }
/* DEVICE MANAGER METHODS */ /// <summary> /// Initialize resources and trigger an initialization event for all registered listeners /// </summary> public void Initialize(Tesseract gameEngine) { // Release any pre-exisitng references ReleaseResources(); // Retrieve the Direct3D 11.1 device using (var device = new Device(DriverType.Hardware, DeviceCreationFlags.BgraSupport, FeatureLevel)) { Device3D = ToDispose(device.QueryInterface<Device1>()); } // Get the Direct3D 11.1 context Context3D = ToDispose(Device3D.ImmediateContext.QueryInterface<DeviceContext1>()); // Create the remaining references Factory2D = ToDispose(new SharpDX.Direct2D1.Factory1(SharpDX.Direct2D1.FactoryType.SingleThreaded)); FactoryDW = ToDispose(new SharpDX.DirectWrite.Factory(SharpDX.DirectWrite.FactoryType.Shared)); FactoryWIC = ToDispose(new SharpDX.WIC.ImagingFactory2()); // Create the Direct2D device using (var device = Device3D.QueryInterface<SharpDX.DXGI.Device>()) { Device2D = ToDispose(new SharpDX.Direct2D1.Device(Factory2D, device)); } // Create the Direct2D context Context2D = ToDispose(new SharpDX.Direct2D1.DeviceContext(Device2D, SharpDX.Direct2D1.DeviceContextOptions.None)); }
public void Initialize(Device1 d3dDevice, DeviceContext1 d3dContext, int capacity = 1024) { m_d3dDevice = d3dDevice; m_d3dContext = d3dContext; m_capacity = capacity; var path = Windows.ApplicationModel.Package.Current.InstalledLocation.Path; var vertexShaderByteCode = NativeFile.ReadAllBytes(path + "\\Assets\\SpriteBatch.vs.cso"); m_vertexShader = new VertexShader(m_d3dDevice, vertexShaderByteCode); m_pixelShader = new PixelShader(d3dDevice, NativeFile.ReadAllBytes(path + "\\Assets\\SpriteBatch.ps.cso")); // Layout from VertexShader input signature m_layout = new InputLayout(d3dDevice, vertexShaderByteCode, new[] { new InputElement("POSITION", 0, SharpDX.DXGI.Format.R32G32B32A32_Float, 0, 0), new InputElement("TEXCOORD", 0, SharpDX.DXGI.Format.R32G32_Float, 16, 0), new InputElement("COLOR", 0, SharpDX.DXGI.Format.R32G32B32A32_Float, 24, 0), }); SamplerStateDescription samplerDesc = SharpDX.Direct3D11.SamplerStateDescription.Default(); m_sampler = new SamplerState(d3dDevice, samplerDesc); //BlendStateDescription1 blendDesc = new BlendStateDescription1(); //blendDesc.AlphaToCoverageEnable = true; //set to true to get nice blending betweent sprites //blendDesc.IndependentBlendEnable = false; //blendDesc.RenderTarget[0].IsBlendEnabled = true; //blendDesc.RenderTarget[0].IsLogicOperationEnabled = false; //blendDesc.RenderTarget[0].SourceBlend = BlendOption.SourceColor; //blendDesc.RenderTarget[0].DestinationBlend = BlendOption.SourceAlphaSaturate; //blendDesc.RenderTarget[0].BlendOperation = BlendOperation.Add; //blendDesc.RenderTarget[0].SourceAlphaBlend = BlendOption.One; //blendDesc.RenderTarget[0].DestinationAlphaBlend = BlendOption.One //blendDesc.RenderTarget[0].AlphaBlendOperation = BlendOperation.Maximum; // set to maximum to blend 2 sprites nicely over each other //blendDesc.RenderTarget[0].RenderTargetWriteMask = ColorWriteMaskFlags.All; //m_blendStateAlpha = new BlendState1(d3dDevice, blendDesc); var description = BlendStateDescription1.Default(); description.RenderTarget[0].IsBlendEnabled = true; description.RenderTarget[0].SourceBlend = BlendOption.SourceAlpha; description.RenderTarget[0].DestinationBlend = BlendOption.One; description.RenderTarget[0].SourceAlphaBlend = BlendOption.SourceAlpha; description.RenderTarget[0].DestinationAlphaBlend = BlendOption.One; description.RenderTarget[0].BlendOperation = BlendOperation.Add; description.RenderTarget[0].IsLogicOperationEnabled = false; description.RenderTarget[0].AlphaBlendOperation = BlendOperation.Maximum; description.RenderTarget[0].RenderTargetWriteMask = ColorWriteMaskFlags.All; description.AlphaToCoverageEnable = true; //<==RT DOES NOT WORK description.IndependentBlendEnable = false; m_blendStateAlpha = new BlendState1(d3dDevice, description); //[BELOW] Windows RT this does not work //var description = BlendStateDescription1.Default(); //description.RenderTarget[0].IsBlendEnabled = true; //description.RenderTarget[0].SourceBlend = BlendOption.SourceColor; //description.RenderTarget[0].DestinationBlend = BlendOption.SourceAlphaSaturate; //description.RenderTarget[0].SourceAlphaBlend = BlendOption.One; //description.RenderTarget[0].DestinationAlphaBlend = BlendOption.One; //description.RenderTarget[0].BlendOperation = BlendOperation.Add; //description.RenderTarget[0].IsLogicOperationEnabled = false; //description.RenderTarget[0].AlphaBlendOperation = BlendOperation.Maximum; //description.AlphaToCoverageEnable = true; //description.IndependentBlendEnable = false; //description.RenderTarget[0].RenderTargetWriteMask = ColorWriteMaskFlags.All; //m_blendStateAlpha = new BlendState1(d3dDevice, description); m_constantBufferVS = ToDispose(new SharpDX.Direct3D11.Buffer(d3dDevice, Utilities.SizeOf <Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0)); m_constantBufferPS = ToDispose(new SharpDX.Direct3D11.Buffer(d3dDevice, Utilities.SizeOf <Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0)); //======================= // Setup the pipeline //======================= m_vertices = ToDispose(BuildVerticesBuffer(d3dDevice, 1.0f, new Vector2(0, 1), new Vector2(0, 0), new Vector2(1, 0), new Vector2(1, 1))); m_vertexBufferBinding = new VertexBufferBinding(m_vertices, sizeof(float) * 10, 0); d3dContext.InputAssembler.SetVertexBuffers(0, m_vertexBufferBinding); d3dContext.InputAssembler.InputLayout = m_layout; d3dContext.InputAssembler.PrimitiveTopology = SharpDX.Direct3D.PrimitiveTopology.TriangleList; d3dContext.VertexShader.SetConstantBuffer(0, m_constantBufferVS); d3dContext.VertexShader.Set(m_vertexShader); d3dContext.PixelShader.SetConstantBuffer(0, m_constantBufferPS); d3dContext.PixelShader.SetSampler(0, m_sampler); d3dContext.PixelShader.Set(m_pixelShader); d3dContext.OutputMerger.BlendState = m_blendStateAlpha; // m_blendStateAlpha, m_blendStateAdditive; }
internal BlueZ5Pebble(PebbleBluetoothConnection connection, Device1 device, string pebbleId) : base(connection, pebbleId) { _device = device; }
public D3D11GraphicsDevice(bool validation, PresentationParameters presentationParameters) : base(GraphicsBackend.Direct3D11, presentationParameters) { #if DEBUG SharpDX.Configuration.EnableObjectTracking = true; SharpDX.Configuration.ThrowOnShaderCompileError = false; #endif // Create factory first. using (var dxgifactory = new DXGI.Factory1()) { var adapterCount = dxgifactory.GetAdapterCount1(); for (var i = 0; i < adapterCount; i++) { var adapter = dxgifactory.GetAdapter1(i); var desc = adapter.Description1; // Don't select the Basic Render Driver adapter. if ((desc.Flags & DXGI.AdapterFlags.Software) != DXGI.AdapterFlags.None) { continue; } var creationFlags = DeviceCreationFlags.BgraSupport /* | DeviceCreationFlags.VideoSupport*/; if (validation) { creationFlags |= DeviceCreationFlags.Debug; } try { D3DDevice = new Device(adapter, creationFlags, s_featureLevels); } catch (SharpDXException) { // Remove debug flag not being supported. creationFlags &= ~DeviceCreationFlags.Debug; D3DDevice = new Device(adapter, creationFlags, s_featureLevels); } Features.VendorId = desc.VendorId; Features.DeviceId = desc.DeviceId; Features.DeviceName = desc.Description; Log.Debug($"Direct3D Adapter ({i}): VID:{desc.VendorId}, PID:{desc.DeviceId} - {desc.Description}"); break; } } FeatureLevel = D3DDevice.FeatureLevel; D3DContext = D3DDevice.ImmediateContext; D3DDevice1 = D3DDevice.QueryInterfaceOrNull <Device1>(); if (D3DDevice1 != null) { D3DContext1 = D3DContext.QueryInterface <DeviceContext1>(); D3DAnnotation = D3DContext.QueryInterface <UserDefinedAnnotation>(); } // Detect multithreading D3DDevice1.CheckThreadingSupport(out _supportsConcurrentResources, out _supportsCommandLists); if (_supportsConcurrentResources && _supportsCommandLists) { Features.Multithreading = true; } // Create queue's GraphicsQueue = new D3D11CommandQueue(this); //ImmediateContext = new D3D11CommandContext(this, Device.ImmediateContext1); _mainSwapchain = new D3D11Swapchain(this, presentationParameters); }
/// <summary> /// Constructs a new <see cref = "T:SharpDX.Direct3D11.BlendState1" /> based on the specified description. /// </summary> /// <param name = "device">The device with which to associate the state object.</param> /// <param name = "description">The state description.</param> /// <returns>The newly created object.</returns> public BlendState1(Device1 device, ref BlendStateDescription1 description) : base(IntPtr.Zero) { device.CreateBlendState1(ref description, this); }
public InputElement[] CreateInputLayout(Device1 device) { VertexStride = 0; //create input layout var vertexShaderByteCode = ShaderBytecode.CompileFromFile(FxFileName, "VS", "vs_4_0", ShaderFlags.None, EffectFlags.None); if (vertexShaderByteCode.Bytecode == null) { throw new Exception("Could not find vertex shader, please rename it to \"VS\""); } var vs = new VertexShader(device, vertexShaderByteCode); List <InputElement> inputElements = new List <InputElement>(); ShaderReflection r = new ShaderReflection(vertexShaderByteCode); var d = r.Description; var ip = d.InputParameters; for (int i = 0; i < ip; ++i) { var ipd = r.GetInputParameterDescription(i); Format frmt; //dxgi format if ((ipd.UsageMask & RegisterComponentMaskFlags.ComponentX) != 0) { if ((ipd.UsageMask & RegisterComponentMaskFlags.ComponentY) != 0) { if ((ipd.UsageMask & RegisterComponentMaskFlags.ComponentZ) != 0) { if ((ipd.UsageMask & RegisterComponentMaskFlags.ComponentW) != 0) { switch (ipd.ComponentType) { case RegisterComponentType.UInt32: frmt = Format.R32G32B32A32_UInt; VertexStride += 4 * sizeof(uint); break; case RegisterComponentType.SInt32: frmt = Format.R32G32B32A32_SInt; VertexStride += 4 * sizeof(int); break; case RegisterComponentType.Float32: frmt = Format.R32G32B32A32_Float; VertexStride += 4 * sizeof(float); break; default: throw new Exception("Invalid Component Type when creating input layout"); } } else { switch (ipd.ComponentType) { case RegisterComponentType.UInt32: frmt = Format.R32G32B32_UInt; VertexStride += 3 * sizeof(uint); break; case RegisterComponentType.SInt32: frmt = Format.R32G32B32_SInt; VertexStride += 3 * sizeof(int); break; case RegisterComponentType.Float32: frmt = Format.R32G32B32_Float; VertexStride += 3 * sizeof(float); break; default: throw new Exception("Invalid Component Type when creating input layout"); } } } else { switch (ipd.ComponentType) { case RegisterComponentType.UInt32: frmt = Format.R32G32_UInt; VertexStride += 2 * sizeof(uint); break; case RegisterComponentType.SInt32: frmt = Format.R32G32_SInt; VertexStride += 2 * sizeof(int); break; case RegisterComponentType.Float32: frmt = Format.R32G32_Float; VertexStride += 2 * sizeof(float); break; default: throw new Exception("Invalid Component Type when creating input layout"); } } } else { switch (ipd.ComponentType) { case RegisterComponentType.UInt32: frmt = Format.R32_UInt; VertexStride += sizeof(uint); break; case RegisterComponentType.SInt32: frmt = Format.R32_SInt; VertexStride += sizeof(int); break; case RegisterComponentType.Float32: frmt = Format.R32_Float; VertexStride += sizeof(float); break; default: throw new Exception("Invalid Component Type when creating input layout"); } } } else { throw new Exception("Invalid format for input parameter"); } inputElements.Add(new InputElement(ipd.SemanticName, ipd.SemanticIndex, frmt, InputElement.AppendAligned, 0, InputClassification.PerVertexData, 0)); } return(inputElements.ToArray()); }
/// <summary> /// Constructs a new <see cref = "T:SharpDX.Direct3D11.RasterizerState1" /> based on the specified description. /// </summary> /// <param name = "device">The device with which to associate the state object.</param> /// <param name = "description">The state description.</param> /// <returns>The newly created object.</returns> public RasterizerState1(Device1 device, RasterizerStateDescription1 description) : base(IntPtr.Zero) { device.CreateRasterizerState1(ref description, this); }
/// <summary> /// Initializes the factory instance. /// </summary> /// <param name="device">DirectX device.</param> public EntityFactory(Device1 device) { this.cachedTextures = new Dictionary <string, Texture>(); this.cachedModels = new Dictionary <string, Model>(); this.device = device; }
/* RESOURCE MANIPUTLATION PROCEDURES */ /// <summary> /// Binds the device-dependent resources to their respective buffers once the game engine initialization procedure is complete. /// <para>This superclass implementation automatically binds the texture sampler state and the vertex buffer.</para> /// </summary> /// <param name="gameEngine">An instance of the <see cref="Tesseract"/> game engine.</param> protected virtual void BindDeviceResources(Tesseract gameEngine) { // Store references to the now-initialized Direct3D device & device context Context3D = gameEngine.DeviceManager.Context3D; Device3D = gameEngine.DeviceManager.Device3D; // Bind the texture sampler state description TextureSamplerState = ToDispose(new SamplerState(Device3D, TextureSamplerStateDescription)); // Create and bind the vertex buffer VertexBuffer = ToDispose(Buffer.Create(Device3D, BindFlags.VertexBuffer, VertexArray)); VertexBufferBinding = new VertexBufferBinding(VertexBuffer, Utilities.SizeOf<Vertex>(), 0); }
public static void 全インスタンスで共有するリソースを作成する(Device1 d3dDevice1, VariablePath 頂点シェーダCSOパス, VariablePath ピクセルシェーダCSOパス) { using var _ = new LogBlock(Log.現在のメソッド名); #region " 頂点シェーダを生成する。" //---------------- { var byteCode = File.ReadAllBytes(頂点シェーダCSOパス.数なしパス); _VertexShader = new VertexShader(d3dDevice1, byteCode); } //---------------- #endregion #region " ピクセルシェーダを生成する。" //---------------- { var byteCode = File.ReadAllBytes(ピクセルシェーダCSOパス.数なしパス); _PixelShader = new PixelShader(d3dDevice1, byteCode); } //---------------- #endregion #region " ブレンドステート通常版を生成する。" //---------------- { var BlendStateNorm = new BlendStateDescription() { AlphaToCoverageEnable = false, // アルファマスクで透過する(するならZバッファ必須) IndependentBlendEnable = false, // 個別設定。false なら BendStateDescription.RenderTarget[0] だけが有効で、[1~7] は無視される。 }; BlendStateNorm.RenderTarget[0].IsBlendEnabled = true; // true ならブレンディングが有効。 BlendStateNorm.RenderTarget[0].RenderTargetWriteMask = ColorWriteMaskFlags.All; // RGBA の書き込みマスク。 // アルファ値のブレンディング設定 ... 特になし BlendStateNorm.RenderTarget[0].SourceAlphaBlend = BlendOption.One; BlendStateNorm.RenderTarget[0].DestinationAlphaBlend = BlendOption.Zero; BlendStateNorm.RenderTarget[0].AlphaBlendOperation = BlendOperation.Add; // 色値のブレンディング設定 ... アルファ強度に応じた透明合成(テクスチャのアルファ値は、テクスチャのアルファ×ピクセルシェーダでの全体アルファとする(HLSL参照)) BlendStateNorm.RenderTarget[0].SourceBlend = BlendOption.SourceAlpha; BlendStateNorm.RenderTarget[0].DestinationBlend = BlendOption.InverseSourceAlpha; BlendStateNorm.RenderTarget[0].BlendOperation = BlendOperation.Add; // ブレンドステートを作成する。 _BlendState通常合成 = new BlendState(d3dDevice1, BlendStateNorm); } //---------------- #endregion #region " ブレンドステート加算合成版を生成する。" //---------------- { var BlendStateAdd = new BlendStateDescription() { AlphaToCoverageEnable = false, // アルファマスクで透過する(するならZバッファ必須) IndependentBlendEnable = false, // 個別設定。false なら BendStateDescription.RenderTarget[0] だけが有効で、[1~7] は無視される。 }; BlendStateAdd.RenderTarget[0].IsBlendEnabled = true; // true ならブレンディングが有効。 BlendStateAdd.RenderTarget[0].RenderTargetWriteMask = ColorWriteMaskFlags.All; // RGBA の書き込みマスク。 // アルファ値のブレンディング設定 ... 特になし BlendStateAdd.RenderTarget[0].SourceAlphaBlend = BlendOption.One; BlendStateAdd.RenderTarget[0].DestinationAlphaBlend = BlendOption.Zero; BlendStateAdd.RenderTarget[0].AlphaBlendOperation = BlendOperation.Add; // 色値のブレンディング設定 ... 加算合成 BlendStateAdd.RenderTarget[0].SourceBlend = BlendOption.SourceAlpha; BlendStateAdd.RenderTarget[0].DestinationBlend = BlendOption.One; BlendStateAdd.RenderTarget[0].BlendOperation = BlendOperation.Add; // ブレンドステートを作成する。 _BlendState加算合成 = new BlendState(d3dDevice1, BlendStateAdd); } //---------------- #endregion #region " ラスタライザステートを生成する。" //---------------- _RasterizerState = new RasterizerState(d3dDevice1, new RasterizerStateDescription() { FillMode = FillMode.Solid, // 普通に描画する CullMode = CullMode.None, // 両面を描画する IsFrontCounterClockwise = false, // 時計回りが表面 DepthBias = 0, DepthBiasClamp = 0, SlopeScaledDepthBias = 0, IsDepthClipEnabled = true, IsScissorEnabled = false, IsMultisampleEnabled = false, IsAntialiasedLineEnabled = false, }); //---------------- #endregion #region " サンプラーステートを生成する。" //---------------- _SamplerState = new SamplerState(d3dDevice1, new SamplerStateDescription() { Filter = Filter.Anisotropic, AddressU = TextureAddressMode.Border, AddressV = TextureAddressMode.Border, AddressW = TextureAddressMode.Border, MipLodBias = 0.0f, MaximumAnisotropy = 2, ComparisonFunction = Comparison.Never, BorderColor = new RawColor4(0f, 0f, 0f, 0f), MinimumLod = float.MinValue, MaximumLod = float.MaxValue, }); //---------------- #endregion #region " 定数バッファを作成する。" //---------------- _ConstantBuffer = new SharpDX.Direct3D11.Buffer(d3dDevice1, new BufferDescription() { Usage = ResourceUsage.Dynamic, // 動的使用法 BindFlags = BindFlags.ConstantBuffer, // 定数バッファ CpuAccessFlags = CpuAccessFlags.Write, // CPUから書き込む OptionFlags = ResourceOptionFlags.None, SizeInBytes = SharpDX.Utilities.SizeOf <ST定数バッファの転送元データ>(), // バッファサイズ StructureByteStride = 0, }); //---------------- #endregion }
public RenderContext11(Control control) { var failed = true; while (failed) { try { // SwapChain description desc = new SwapChainDescription() { BufferCount = 1, ModeDescription = new ModeDescription(control.ClientSize.Width, control.ClientSize.Height, new Rational(60, 1), DefaultColorFormat), IsWindowed = true, OutputHandle = control.Handle, SampleDescription = new SampleDescription(MultiSampleCount, 0), SwapEffect = SwapEffect.Discard, Usage = Usage.RenderTargetOutput }; FeatureLevel[] featureLevels = { FeatureLevel.Level_11_0, FeatureLevel.Level_10_1, FeatureLevel.Level_10_0, FeatureLevel.Level_9_3 }; //Enable this instead for downlevel testing. //featureLevels = new FeatureLevel[] { FeatureLevel.Level_9_3 }; // Create Device and SwapChain Device.CreateWithSwapChain(DriverType.Hardware, DeviceCreationFlags.None, featureLevels, desc, out device, out swapChain); failed = false; } catch { if (MultiSampleCount != 1) { MultiSampleCount = 1; AppSettings.SettingsBase["MultiSampling"] = 1; } else { throw new Exception("DX Init failed"); } failed = true; } } devContext = device.ImmediateContext; PrepDevice = device; if(device.FeatureLevel == FeatureLevel.Level_9_3) { PixelProfile = "ps_4_0_level_9_3"; VertexProfile = "vs_4_0_level_9_3"; Downlevel = true; } else if (device.FeatureLevel == FeatureLevel.Level_9_1) { PixelProfile = "ps_4_0_level_9_1"; VertexProfile = "vs_4_0_level_9_1"; Downlevel = true; } if (!Downlevel) { sRGB = true; dv1 = device.QueryInterface<Device1>(); dv1.MaximumFrameLatency = 1; } // Ignore all windows events factory = swapChain.GetParent<Factory>(); factory.MakeWindowAssociation(control.Handle, WindowAssociationFlags.IgnoreAll); // New RenderTargetView from the backbuffer backBuffer = Texture2D.FromSwapChain<Texture2D>(swapChain, 0); renderView = new RenderTargetView(device, backBuffer); // Create Depth Buffer & View depthBuffer = new Texture2D(device, new Texture2DDescription() { Format = DefaultDepthStencilFormat, ArraySize = 1, MipLevels = 1, Width = control.ClientSize.Width, Height = control.ClientSize.Height, SampleDescription = new SampleDescription(MultiSampleCount, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.DepthStencil, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }); depthView = new DepthStencilView(device, depthBuffer); if (Downlevel) { sampler = new SamplerState(device, new SamplerStateDescription() { Filter = Filter.Anisotropic, AddressU = TextureAddressMode.Clamp, AddressV = TextureAddressMode.Clamp, AddressW = TextureAddressMode.Wrap, BorderColor = SharpDX.Color.Black, ComparisonFunction = Comparison.Never, MaximumAnisotropy = 16, MipLodBias = 0, MinimumLod = 0, MaximumLod = float.MaxValue, }); } else { sampler = new SamplerState(device, new SamplerStateDescription() { Filter = Filter.Anisotropic, AddressU = TextureAddressMode.Clamp, AddressV = TextureAddressMode.Clamp, AddressW = TextureAddressMode.Wrap, BorderColor = SharpDX.Color.Black, ComparisonFunction = Comparison.Never, MaximumAnisotropy = 16, MipLodBias = 0, MinimumLod = 0, MaximumLod = 16, }); } devContext.PixelShader.SetSampler(0, sampler); // Prepare All the stages displayViewPort = new Viewport(0, 0, control.ClientSize.Width, control.ClientSize.Height, 0.0f, 1.0f); ViewPort = displayViewPort; devContext.OutputMerger.SetTargets(depthView, renderView); initializeStates(); }
public void Create(Device1 device, GenericShader effect) { PrimitiveTopology = PrimitiveTopology.TriangleList; VertexStride = effect.VertexStride; List <int> indices = new List <int>(); var importer = new AssimpContext(); Scene scene = importer.ImportFile(_fileName, PostProcessSteps.GenerateSmoothNormals | PostProcessSteps.CalculateTangentSpace | PostProcessSteps.Triangulate); if (!importer.IsImportFormatSupported(Path.GetExtension(_fileName))) { throw new Exception("File Format not supported"); } long vertCount = 0; foreach (var model in scene.Meshes) { vertCount += model.VertexCount; } var verts = new float[VertexStride * vertCount]; int meshOffset = 0; foreach (var model in scene.Meshes) { for (var i = 0; i < model.VertexCount; ++i) { var pos = model.Vertices[i]; var nor = model.Normals[i]; var uv = model.TextureCoordinateChannels[0][i]; uv.Y = -uv.Y; var col = model.HasVertexColors(0) ? model.VertexColorChannels[0][i] : new Color4D(1, 0, 1); var tan = model.Tangents[i]; var inputOffset = 0; foreach (var inputParam in effect.InputParameters) { if (inputParam.SemanticName == "POSITION") { Array.Copy(pos.ToArray(), 0, verts, i * (VertexStride / sizeof(float)) + inputOffset + meshOffset, 3); inputOffset += 3; } else if (inputParam.SemanticName == "NORMAL") { Array.Copy(nor.ToArray(), 0, verts, i * (VertexStride / sizeof(float)) + inputOffset + meshOffset, 3); inputOffset += 3; } else if (inputParam.SemanticName == "COLOR") { Array.Copy(col.ToArray(), 0, verts, i * (VertexStride / sizeof(float)) + inputOffset + meshOffset, 4); inputOffset += 4; } else if (inputParam.SemanticName == "TEXCOORD" || inputParam.SemanticName == "TEXCOORD0") { Array.Copy(uv.ToArray(), 0, verts, i * (VertexStride / sizeof(float)) + inputOffset + meshOffset, 2); inputOffset += 2; } else if (inputParam.SemanticName == "TANGENT") { Array.Copy(tan.ToArray(), 0, verts, i * (VertexStride / sizeof(float)) + inputOffset + meshOffset, 3); inputOffset += 3; } else { MessageBox.Show("AssimpModel::Create() > Unsupported Semantic type! ({inputParam.SemanticName})"); } } } meshOffset += model.VertexCount * VertexStride; indices.AddRange(model.GetIndices().ToList()); } IndexCount = indices.Count; BufferDescription bd = new BufferDescription( (int)(verts.Length), ResourceUsage.Immutable, BindFlags.VertexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None); VertexBuffer = new Buffer(device, DataStream.Create(verts, false, false), bd); bd = new BufferDescription( sizeof(int) * IndexCount, ResourceUsage.Immutable, BindFlags.IndexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None); IndexBuffer = new Buffer(device, DataStream.Create(indices.ToArray(), false, false), bd); }
/// <summary> /// Initializes a new deferred context instance of <see cref="SharpDX.Direct3D11.DeviceContext1"/> class. /// </summary> /// <param name="device"></param> public DeviceContext1(Device1 device) : base(IntPtr.Zero) { device.CreateDeferredContext1(0, this); }
public void Initialize(Device1 d3dDevice, DeviceContext1 d3dContext, int capacity = 1024) { m_d3dDevice = d3dDevice; m_d3dContext = d3dContext; m_capacity = capacity; var path = Windows.ApplicationModel.Package.Current.InstalledLocation.Path; var vertexShaderByteCode = NativeFile.ReadAllBytes(path + "\\Assets\\SpriteBatch.vs.cso"); m_vertexShader = new VertexShader(m_d3dDevice, vertexShaderByteCode); m_pixelShader = new PixelShader(d3dDevice, NativeFile.ReadAllBytes(path + "\\Assets\\SpriteBatch.ps.cso")); // Layout from VertexShader input signature m_layout = new InputLayout(d3dDevice, vertexShaderByteCode, new[] { new InputElement("POSITION", 0, SharpDX.DXGI.Format.R32G32B32A32_Float,0,0), new InputElement("TEXCOORD", 0, SharpDX.DXGI.Format.R32G32_Float, 16, 0), new InputElement("COLOR", 0, SharpDX.DXGI.Format.R32G32B32A32_Float, 24, 0), }); SamplerStateDescription samplerDesc = SharpDX.Direct3D11.SamplerStateDescription.Default(); m_sampler = new SamplerState(d3dDevice, samplerDesc); //BlendStateDescription1 blendDesc = new BlendStateDescription1(); //blendDesc.AlphaToCoverageEnable = true; //set to true to get nice blending betweent sprites //blendDesc.IndependentBlendEnable = false; //blendDesc.RenderTarget[0].IsBlendEnabled = true; //blendDesc.RenderTarget[0].IsLogicOperationEnabled = false; //blendDesc.RenderTarget[0].SourceBlend = BlendOption.SourceColor; //blendDesc.RenderTarget[0].DestinationBlend = BlendOption.SourceAlphaSaturate; //blendDesc.RenderTarget[0].BlendOperation = BlendOperation.Add; //blendDesc.RenderTarget[0].SourceAlphaBlend = BlendOption.One; //blendDesc.RenderTarget[0].DestinationAlphaBlend = BlendOption.One //blendDesc.RenderTarget[0].AlphaBlendOperation = BlendOperation.Maximum; // set to maximum to blend 2 sprites nicely over each other //blendDesc.RenderTarget[0].RenderTargetWriteMask = ColorWriteMaskFlags.All; //m_blendStateAlpha = new BlendState1(d3dDevice, blendDesc); var description = BlendStateDescription1.Default(); description.RenderTarget[0].IsBlendEnabled = true; description.RenderTarget[0].SourceBlend = BlendOption.SourceAlpha; description.RenderTarget[0].DestinationBlend = BlendOption.One; description.RenderTarget[0].SourceAlphaBlend = BlendOption.SourceAlpha; description.RenderTarget[0].DestinationAlphaBlend = BlendOption.One; description.RenderTarget[0].BlendOperation = BlendOperation.Add; description.RenderTarget[0].IsLogicOperationEnabled = false; description.RenderTarget[0].AlphaBlendOperation = BlendOperation.Maximum; description.RenderTarget[0].RenderTargetWriteMask = ColorWriteMaskFlags.All; description.AlphaToCoverageEnable = true; //<==RT DOES NOT WORK description.IndependentBlendEnable = false; m_blendStateAlpha = new BlendState1(d3dDevice, description); //[BELOW] Windows RT this does not work //var description = BlendStateDescription1.Default(); //description.RenderTarget[0].IsBlendEnabled = true; //description.RenderTarget[0].SourceBlend = BlendOption.SourceColor; //description.RenderTarget[0].DestinationBlend = BlendOption.SourceAlphaSaturate; //description.RenderTarget[0].SourceAlphaBlend = BlendOption.One; //description.RenderTarget[0].DestinationAlphaBlend = BlendOption.One; //description.RenderTarget[0].BlendOperation = BlendOperation.Add; //description.RenderTarget[0].IsLogicOperationEnabled = false; //description.RenderTarget[0].AlphaBlendOperation = BlendOperation.Maximum; //description.AlphaToCoverageEnable = true; //description.IndependentBlendEnable = false; //description.RenderTarget[0].RenderTargetWriteMask = ColorWriteMaskFlags.All; //m_blendStateAlpha = new BlendState1(d3dDevice, description); m_constantBufferVS = ToDispose(new SharpDX.Direct3D11.Buffer(d3dDevice, Utilities.SizeOf<Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0)); m_constantBufferPS = ToDispose(new SharpDX.Direct3D11.Buffer(d3dDevice, Utilities.SizeOf<Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0)); //======================= // Setup the pipeline //======================= m_vertices = ToDispose(BuildVerticesBuffer(d3dDevice, 1.0f, new Vector2(0, 1), new Vector2(0, 0), new Vector2(1, 0), new Vector2(1, 1))); m_vertexBufferBinding = new VertexBufferBinding(m_vertices, sizeof(float) * 10, 0); d3dContext.InputAssembler.SetVertexBuffers(0, m_vertexBufferBinding); d3dContext.InputAssembler.InputLayout = m_layout; d3dContext.InputAssembler.PrimitiveTopology = SharpDX.Direct3D.PrimitiveTopology.TriangleList; d3dContext.VertexShader.SetConstantBuffer(0, m_constantBufferVS); d3dContext.VertexShader.Set(m_vertexShader); d3dContext.PixelShader.SetConstantBuffer(0, m_constantBufferPS); d3dContext.PixelShader.SetSampler(0, m_sampler); d3dContext.PixelShader.Set(m_pixelShader); d3dContext.OutputMerger.BlendState = m_blendStateAlpha; // m_blendStateAlpha, m_blendStateAdditive; }