// Generate texture containing text. // For text rendering we will use UI. private async Task <ITextureResource> ClientGenerateProceduralTextureForTradingStationContent( TPublicState publicState, ProceduralTextureRequest request) { var rendering = Client.Rendering; var renderingTag = request.TextureName; var qualityScaleCoef = rendering.CalculateCurrentQualityScaleCoefWithOffset(-1); var scale = 1.0 / qualityScaleCoef; var controlWidth = (int)Math.Floor(90 * (this.LotsCount / 2.0)); var controlHeight = 268; // create and prepare UI renderer for the sign text to render var control = new ObjectTradingStationDisplayControl { IsBuyMode = publicState.Mode == TradingStationMode.StationBuying, TradingLots = publicState.Lots .Select(l => new TradingStationsMapMarksSystem.TradingStationLotInfo(l)) .ToArray(), Width = controlWidth, Height = controlHeight, LayoutTransform = new ScaleTransform(scale, scale) }; var textureSize = new Vector2Ushort((ushort)(scale * controlWidth), (ushort)(scale * controlHeight)); // create camera and render texture var renderTexture = rendering.CreateRenderTexture(renderingTag, textureSize.X, textureSize.Y); var cameraObject = Client.Scene.CreateSceneObject(renderingTag); var camera = rendering.CreateCamera(cameraObject, renderingTag, drawOrder: -200); camera.RenderTarget = renderTexture; camera.SetOrthographicProjection(textureSize.X, textureSize.Y); rendering.CreateUIElementRenderer( cameraObject, control, size: textureSize, renderingTag: renderingTag); await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); var generatedTexture = await renderTexture.SaveToTexture(isTransparent : true, qualityScaleCoef : qualityScaleCoef); renderTexture.Dispose(); request.ThrowIfCancelled(); return(generatedTexture); }
private static async Task <ITextureResource> GenerateChunkProceduralTexture( ITempList <Tile> tiles, Vector2Ushort chunkStartPosition, ProceduralTextureRequest request) { var renderingService = Api.Client.Rendering; var renderingTag = request.TextureName; var textureSize = new Vector2Ushort(WorldChunkMapTextureSize, WorldChunkMapTextureSize); // create camera and render texture var renderTexture = renderingService.CreateRenderTexture(renderingTag, textureSize.X, textureSize.Y); var cameraObject = Api.Client.Scene.CreateSceneObject(renderingTag); var camera = renderingService.CreateCamera(cameraObject, renderingTag, drawOrder: -100); camera.RenderTarget = renderTexture; camera.ClearColor = Colors.Magenta; // to make potential issues visible clear with magenta color camera.SetOrthographicProjection(textureSize.X, textureSize.Y); // create tile renderers foreach (var tile in tiles) { var drawPosition = tile.Position.ToVector2D() - chunkStartPosition.ToVector2D(); drawPosition = ( drawPosition.X * WorldTileTextureSize, // Y is reversed (drawPosition.Y - ScriptingConstants.WorldChunkSize + 1) * WorldTileTextureSize); renderingService.CreateSpriteRenderer( cameraObject, tile.ProtoTile.GetWorldMapTexture(tile), positionOffset: drawPosition, // draw down spritePivotPoint: (0, 1), renderingTag: renderingTag); } tiles.Dispose(); await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); var generatedTexture = await renderTexture.SaveToTexture(isTransparent : false); renderTexture.Dispose(); request.ThrowIfCancelled(); return(generatedTexture); }
// Generate texture containing text. // For text rendering we will use UI. private async Task <ITextureResource> ClientGenerateProceduralTextureForText( string text, ProceduralTextureRequest request) { var rendering = Client.Rendering; var renderingTag = request.TextureName; var qualityScaleCoef = rendering.CalculateCurrentQualityScaleCoefWithOffset(-1); var scale = 1.0 / qualityScaleCoef; var control = new ObjectSignControl() { Text = text, RenderTransformOrigin = new Point(0.5, 0.5), RenderTransform = new ScaleTransform(scale, scale) }; var controlWidth = control.Width; var controlHeight = control.Height; var textureSize = new Vector2Ushort((ushort)(scale * controlWidth), (ushort)(scale * controlHeight)); // create camera and render texture var renderTexture = rendering.CreateRenderTexture(renderingTag, textureSize.X, textureSize.Y); var cameraObject = Client.Scene.CreateSceneObject(renderingTag); var camera = rendering.CreateCamera(cameraObject, renderingTag, drawOrder: -200); camera.RenderTarget = renderTexture; camera.SetOrthographicProjection(textureSize.X, textureSize.Y); // create and prepare UI renderer for this text to render rendering.CreateUIElementRenderer( cameraObject, control, size: textureSize, renderingTag: renderingTag); await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); var generatedTexture = await renderTexture.SaveToTexture(isTransparent : true, qualityScaleCoef : qualityScaleCoef); renderTexture.Dispose(); request.ThrowIfCancelled(); return(generatedTexture); }
/// <summary> /// Uses in texture procedural generation. /// </summary> /// <param name="request">Request from ProceduralTexture generator</param> /// <param name="textureWidth">Texture width</param> /// <param name="textureHeight">Texture height</param> /// <param name="spriteQualityOffset">Sprite quality modifier (0 = full size, 1 = x0.5, 2 = x0.25)</param> /// <returns></returns> public override async Task <ITextureResource> GenerateIcon( ProceduralTextureRequest request, ushort textureWidth = 512, ushort textureHeight = 512, sbyte spriteQualityOffset = 0) { if (!(ProtoEntity is IProtoCharacterMob creature)) // Can't be via constructor rule { Api.Logger.Error("CNEI: creature is not IProtoCharacterMob, but it shouldn't be possible!"); return(DefaultIcon); } creature.SharedGetSkeletonProto(null, out var creatureSkeleton, out double _); var worldScale = 1.0; if (creatureSkeleton is ProtoCharacterSkeletonAnimal animalSkeleton) { worldScale = animalSkeleton.WorldScale * 2; } string renderingTag = request.TextureName; var renderTarget = Api.Client.Rendering.CreateRenderTexture(renderingTag, textureWidth, textureHeight); var cameraObject = Api.Client.Scene.CreateSceneObject(renderingTag); var camera = Api.Client.Rendering.CreateCamera(cameraObject, renderingTag: renderingTag, drawOrder: -10); camera.RenderTarget = renderTarget; camera.ClearColor = Color.FromArgb(0, 0, 0, 0); camera.SetOrthographicProjection(textureWidth, textureHeight); var currentSkeleton = ClientCharacterEquipmentHelper.CreateCharacterSkeleton( cameraObject, creatureSkeleton, worldScale: worldScale, spriteQualityOffset: spriteQualityOffset); currentSkeleton.PositionOffset = (textureWidth / 2d, -textureHeight * 0.70); currentSkeleton.RenderingTag = renderingTag; await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); var generatedTexture = await renderTarget.SaveToTexture( isTransparent : true, qualityScaleCoef : Api.Client.Rendering.CalculateCurrentQualityScaleCoefWithOffset( spriteQualityOffset)); currentSkeleton.Destroy(); renderTarget.Dispose(); request.ThrowIfCancelled(); return(generatedTexture); }
protected static async Task <ITextureResource> ClientComposeHorizontalDoor( ProceduralTextureRequest request, params ITextureResource[] textureResources) { var rendering = Api.Client.Rendering; var renderingTag = request.TextureName; var textureSize = await rendering.GetTextureSize(textureResources[1]); var textureSizeBase = await rendering.GetTextureSize(textureResources[0]); request.ThrowIfCancelled(); // create camera and render texture var renderTexture = rendering.CreateRenderTexture(renderingTag, textureSize.X, textureSize.Y); var cameraObject = Api.Client.Scene.CreateSceneObject(renderingTag); var camera = rendering.CreateCamera(cameraObject, renderingTag, drawOrder: -100); camera.RenderTarget = renderTexture; camera.SetOrthographicProjection(textureSize.X, textureSize.Y); // create and prepare sprite renderers rendering.CreateSpriteRenderer( cameraObject, textureResources[0], positionOffset: (0, -textureSize.Y + textureSizeBase.Y), // draw down spritePivotPoint: (0, 1), renderingTag: renderingTag); rendering.CreateSpriteRenderer( cameraObject, textureResources[1], positionOffset: (0, 0), // draw down spritePivotPoint: (0, 1), renderingTag: renderingTag); await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); var generatedTexture = await renderTexture.SaveToTexture(isTransparent : true); renderTexture.Dispose(); request.ThrowIfCancelled(); return(generatedTexture); }
public static async Task <ITextureResource> CreateIconForLockedEntry( ProceduralTextureRequest request, ITextureResource originalIcon) { var size = await Rendering.GetTextureSize(originalIcon); var originalIconSize = size; // expand size a bit to fit the outline var paddingFraction = 0.0; var padding = (int)Math.Round(size.X * paddingFraction, MidpointRounding.AwayFromZero); size = new Vector2Ushort((ushort)(size.X + padding), (ushort)(size.Y + padding)); request.ThrowIfCancelled(); // create camera and render texture var renderingTag = request.TextureName; var renderTexture = Rendering.CreateRenderTexture(renderingTag, size.X, size.Y); var cameraObject = Api.Client.Scene.CreateSceneObject(renderingTag); var camera = Rendering.CreateCamera(cameraObject, renderingTag, drawOrder: -100); camera.RenderTarget = renderTexture; camera.SetOrthographicProjection(size.X, size.Y); var spriteRenderer = Rendering.CreateSpriteRenderer( cameraObject, originalIcon, // draw at the center positionOffset: (size.X / 2, -size.Y / 2), spritePivotPoint: (0.5, 0.5), renderingTag: renderingTag, scale: 1 + paddingFraction); spriteRenderer.RenderingMaterial = GetRenderingMaterial(originalIconSize); await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); var generatedTexture = await renderTexture.SaveToTexture(isTransparent : true); renderTexture.Dispose(); request.ThrowIfCancelled(); return(generatedTexture); }
public static async Task <ITextureResource> CreateIcon( ITextureResource texture, ProceduralTextureRequest request) { var textureSize = await Rendering.GetTextureSize(texture); request.ThrowIfCancelled(); if (!ClampTextureSize(ref textureSize, out var scale) && texture is TextureResource) { // can use original texture as icon for itself return(texture); } // create camera and render texture var renderingTag = request.TextureName; var renderTexture = Rendering.CreateRenderTexture(renderingTag, textureSize.X, textureSize.Y); var cameraObject = Api.Client.Scene.CreateSceneObject(renderingTag); var camera = Rendering.CreateCamera(cameraObject, renderingTag, drawOrder: -100); camera.RenderTarget = renderTexture; camera.SetOrthographicProjection(textureSize.X, textureSize.Y); // create and prepare renderer for icon (attach it to camera object) Rendering.CreateSpriteRenderer( cameraObject, texture, positionOffset: (0, 0), // draw down spritePivotPoint: (0, 1), renderingTag: renderingTag, scale: scale); await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); var generatedTexture = await renderTexture.SaveToTexture(isTransparent : true); renderTexture.Dispose(); request.ThrowIfCancelled(); return(generatedTexture); }
private async Task <ITextureResource> ClientGenerateProceduralTextureForPicture( TextureResource pictureTextureResource, ProceduralTextureRequest request) { var rendering = Client.Rendering; var renderingTag = request.TextureName; var originalTextureSize = await rendering.GetTextureSize(pictureTextureResource); request.ThrowIfCancelled(); var scale = 3; var scaledTextureSize = new Vector2Ushort((ushort)(originalTextureSize.X * scale), (ushort)(originalTextureSize.Y * scale)); var renderTexture = rendering.CreateRenderTexture(renderingTag, scaledTextureSize.X, scaledTextureSize.Y); var cameraObject = Client.Scene.CreateSceneObject(renderingTag); var camera = rendering.CreateCamera(cameraObject, renderingTag, drawOrder: -200); camera.ClearColor = Color.FromArgb(0, 0, 0, 0); camera.RenderTarget = renderTexture; camera.TextureFilter = TextureFilter.Point; camera.SetOrthographicProjection(scaledTextureSize.X, scaledTextureSize.Y); // draw sprite with the required scale rendering.CreateSpriteRenderer( cameraObject, pictureTextureResource, renderingTag: renderingTag, // draw down spritePivotPoint: (0, 1), scale: scale); await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); var generatedTexture = await renderTexture.SaveToTexture(isTransparent : true); renderTexture.Dispose(); request.ThrowIfCancelled(); return(generatedTexture); }
public async Task <ITextureResource> GenerateIcon( ProceduralTextureRequest request, ushort textureWidth = 384, ushort textureHeight = 384, sbyte spriteQualityOffset = 0) { var protoSkeleton = this; var scale = protoSkeleton.IconScale * textureWidth / 256.0; var renderingTag = request.TextureName; var renderTarget = Api.Client.Rendering.CreateRenderTexture(renderingTag, textureWidth, textureHeight); var cameraObject = Api.Client.Scene.CreateSceneObject(renderingTag); var camera = Api.Client.Rendering.CreateCamera(cameraObject, renderingTag: renderingTag, drawOrder: -100); camera.RenderTarget = renderTarget; camera.ClearColor = Color.FromArgb(0, 0, 0, 0); camera.SetOrthographicProjection(textureWidth, textureHeight); var currentSkeleton = ClientCharacterEquipmentHelper.CreateCharacterSkeleton( cameraObject, protoSkeleton, worldScale: scale, spriteQualityOffset: spriteQualityOffset); currentSkeleton.PositionOffset = (textureWidth / 2.0 + this.IconOffset.X * scale, -textureHeight * 0.7 + this.IconOffset.Y * scale); currentSkeleton.RenderingTag = renderingTag; await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); var generatedTexture = await renderTarget.SaveToTexture( isTransparent : true, qualityScaleCoef : Api.Client.Rendering.CalculateCurrentQualityScaleCoefWithOffset( spriteQualityOffset)); currentSkeleton.Destroy(); renderTarget.Dispose(); request.ThrowIfCancelled(); return(generatedTexture); }
private static async Task <ITextureResource> GenerateIcon( ProceduralTextureRequest request, ITextureResource originalIcon) { Vector2Ushort size = (128, 128); // expand size a bit to fit the outline request.ThrowIfCancelled(); // create camera and render texture var renderingTag = request.TextureName; var renderTexture = Rendering.CreateRenderTexture(renderingTag, size.X, size.Y); var cameraObject = Api.Client.Scene.CreateSceneObject(renderingTag); var camera = Rendering.CreateCamera(cameraObject, renderingTag, drawOrder: -100); camera.RenderTarget = renderTexture; camera.SetOrthographicProjection(size.X, size.Y); var spriteRenderer = Rendering.CreateSpriteRenderer( cameraObject, originalIcon, // draw at the center positionOffset: (size.X / 2, -size.Y / 2), spritePivotPoint: (0.5, 0.5), renderingTag: renderingTag, // as most icons are 256x256 and we need a 128x128 icon scale it this way scale: 0.5); spriteRenderer.RenderingMaterial = RenderingMaterialCrateIcon; await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); var generatedTexture = await renderTexture.SaveToTexture(isTransparent : true); renderTexture.Dispose(); request.ThrowIfCancelled(); return(generatedTexture); }
private static async ValueTask <Vector2Ushort> PrepareLayers( ProceduralTextureRequest request, List <ComposeLayer> spritesToCombine) { var textureDataTasks = spritesToCombine.Select( t => Rendering .GetTextureSizeWithMagentaPixelPosition(t.TextureResource)) .ToList(); foreach (var textureDataTask in textureDataTasks) { await textureDataTask; request.ThrowIfCancelled(); } var extendX = 0f; var extendY = 0f; for (var index = 0; index < textureDataTasks.Count; index++) { var result = textureDataTasks[index].Result; var pivotPos = result.MagentaPixelPosition; var composeItem = spritesToCombine[index]; composeItem.PivotPos = pivotPos; spritesToCombine[index] = composeItem; var itemExtendX = Math.Max(pivotPos.X, result.Size.X - pivotPos.X); var itemExtendY = Math.Max(pivotPos.Y, result.Size.Y - pivotPos.Y); if (itemExtendX > extendX) { extendX = itemExtendX; } if (itemExtendY > extendY) { extendY = itemExtendY; } } var resultTextureSize = new Vector2Ushort((ushort)Math.Floor(extendX * 2), (ushort)Math.Floor(extendY * 2)); return(resultTextureSize); }
public static async Task <IRenderTarget2D> ApplyColorizerLut( ProceduralTextureRequest request, ITextureResource sourceTextureResource, string lutTextureFilePath) { var lutTextureResource = new TextureResource3D(lutTextureFilePath, depth: 24, isTransparent: false); var lutTexture3D = await Renderer.LoadTexture3D(lutTextureResource); var textureSize = await Renderer.GetTextureSize(sourceTextureResource); var textureWidth = textureSize.X; var textureHeight = textureSize.Y; var renderingMaterial = RenderingMaterial.Create(new EffectResource("ColorLutRemap")); renderingMaterial.EffectParameters.Set("TextureLut", lutTexture3D); var renderingTag = "Colorizer camera for procedural texture: " + request.TextureName; var cameraObject = Api.Client.Scene.CreateSceneObject(renderingTag); var camera = Renderer.CreateCamera(cameraObject, renderingTag: renderingTag, drawOrder: -10, drawMode: CameraDrawMode.Manual); var renderTarget = Renderer.CreateRenderTexture(renderingTag, textureWidth, textureHeight); camera.RenderTarget = renderTarget; camera.ClearColor = Color.FromArgb(0, 0, 0, 0); camera.SetOrthographicProjection(textureWidth, textureHeight); Renderer.CreateSpriteRenderer(cameraObject, sourceTextureResource, renderingTag: renderingTag, // draw down spritePivotPoint: (0, 1)) .RenderingMaterial = renderingMaterial; await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); return(renderTarget); }
private async Task <IGeneratedTexture2D> GenerateProceduralTextureInternal( string textureName, TextureAtlasResource sourceTextureAtlas, ProceduralTextureRequest request, bool isAtlas) { var renderingTag = string.Format("Procedural texture \"{0}\" camera \"{1}\"", textureName, sourceTextureAtlas.TextureResource); var client = Api.Client; var cameraObject = client.Scene.CreateSceneObject(renderingTag); var camera = client.Rendering.CreateCamera(cameraObject, renderingTag, -10); var wallChunkTypes = FloorChunkPresets; var atlasSize = AtlasSize; var textureWidth = this.AtlasTextureWidth; var textureHeight = this.AtlasTextureHeight; var rendering = Api.Client.Rendering; var renderTexture = rendering.CreateRenderTexture(renderingTag, textureWidth, textureHeight); camera.RenderTarget = renderTexture; // TODO: we cannot use Colors.Transparent because RGB=FFFFFF in that case. camera.ClearColor = Color.FromArgb(0, 0, 0, 0); camera.SetOrthographicProjection(textureWidth, textureHeight); camera.TextureFilter = TextureFilter.Point; foreach (var variant in wallChunkTypes.Values) { if (variant.Layers == null) { // reusing layers from another chunk continue; } var targetRow = variant.TargetRow; var targetColumn = variant.TargetColumn; if (targetRow >= atlasSize.RowsCount) { Api.Logger.Error( $"Floor chunk target row is exceed rows count: {targetRow} >= {atlasSize.RowsCount}"); } if (targetColumn >= atlasSize.ColumnsCount) { Api.Logger.Error( $"Floor chunk target column is exceed columns count: {targetColumn} >= {atlasSize.ColumnsCount}"); } foreach (var layer in variant.Layers) { rendering.CreateSpriteRenderer( cameraObject, sourceTextureAtlas.Chunk(layer.Column, layer.Row), positionOffset: (this.TileTextureSize * targetColumn, -this.TileTextureSize * targetRow), // draw down spritePivotPoint: (0, 1), renderingTag: renderingTag); } } await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); IGeneratedTexture2D generatedTexture; if (isAtlas) { generatedTexture = await renderTexture.SaveToTextureAtlas(atlasSize, isTransparent : true); //Api.Logger.Write($"Texture atlas generated: {renderingTag} atlas size: {atlasSize}"); } else { generatedTexture = await renderTexture.SaveToTexture(isTransparent : true); } renderTexture.Dispose(); request.ThrowIfCancelled(); return(generatedTexture); }
private static async Task <ITextureResource> Compose( ProceduralTextureRequest request, Vector2Ushort?customSize, params TextureResourceWithOffset[] textureResources) { var rendering = Api.Client.Rendering; var renderingTag = request.TextureName; var qualityScaleCoef = rendering.CalculateCurrentQualityScaleCoefWithOffset(0); Vector2Ushort size; if (customSize is not null) { size = customSize.Value; if (qualityScaleCoef > 1) { size = new Vector2Ushort((ushort)(size.X / qualityScaleCoef), (ushort)(size.Y / qualityScaleCoef)); } } else { size = await rendering.GetTextureSize(textureResources[0].TextureResource); } request.ThrowIfCancelled(); // create camera and render texture var renderTexture = rendering.CreateRenderTexture(renderingTag, size.X, size.Y); var cameraObject = Api.Client.Scene.CreateSceneObject(renderingTag); var camera = rendering.CreateCamera(cameraObject, renderingTag, drawOrder: -100); camera.RenderTarget = renderTexture; camera.SetOrthographicProjection(size.X, size.Y); // create and prepare sprite renderers foreach (var entry in textureResources) { var positionOffset = entry.Offset; if (positionOffset.HasValue && qualityScaleCoef > 1) { positionOffset /= qualityScaleCoef; } rendering.CreateSpriteRenderer( cameraObject, entry.TextureResource, positionOffset: positionOffset, spritePivotPoint: entry.PivotPoint ?? (0, 1), // draw down by default renderingTag: renderingTag); } await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); var generatedTexture = await renderTexture.SaveToTexture(isTransparent : true); renderTexture.Dispose(); request.ThrowIfCancelled(); return(generatedTexture); }
public static async Task <ITextureResource> GenerateHeadSprite( CharacterHeadSpriteData data, ProceduralTextureRequest request, bool isMale, bool isFrontFace, Vector2Ushort?customTextureSize = null, sbyte spriteQualityOffset = 0) { var renderingTag = request.TextureName; var side = isFrontFace ? "Front" : "Back"; var style = data.FaceStyle; var faceStylesProvider = SharedCharacterFaceStylesProvider.GetForGender(isMale); var facePath = $"{faceStylesProvider.FacesFolderPath}{style.FaceId}/{side}"; var faceShapePath = facePath + ".png"; if (!IsFileExists(faceShapePath)) { Api.Logger.Error("Face sprite not found: " + faceShapePath); // try fallback facePath = faceStylesProvider.FacesFolderPath + "Face01/" + side; faceShapePath = facePath; if (!IsFileExists(faceShapePath)) { // no fallback return(TextureResource.NoTexture); } } var faceTopPath = $"{facePath}Top{style.TopId}.png"; var faceBottomPath = $"{facePath}Bottom{style.BottomId}.png"; if (isFrontFace) { if (!IsFileExists(faceTopPath)) { Api.Logger.Error("Face top sprite not found: " + faceTopPath); // try fallback faceTopPath = $"{facePath}Top01.png"; if (!IsFileExists(faceTopPath)) { // no fallback return(TextureResource.NoTexture); } } if (!IsFileExists(faceBottomPath)) { Api.Logger.Error("Face bottom sprite not found: " + faceBottomPath); // try fallback faceBottomPath = $"{facePath}Bottom01.png"; if (!IsFileExists(faceBottomPath)) { // no fallback return(TextureResource.NoTexture); } } } var itemHeadEquipment = data.HeadEquipment; var protoItemHeadEquipment = (IProtoItemEquipmentHead)itemHeadEquipment?.ProtoItem; var isHairVisible = protoItemHeadEquipment?.IsHairVisible ?? true; isHairVisible &= style.HairId != null; string hair = null, hairBehind = null; if (isHairVisible) { var hairBase = faceStylesProvider.HairFolderPath + $"{style.HairId}/{side}"; hair = hairBase + ".png"; hairBehind = hairBase + "Behind.png"; } string helmetFront = null, helmetBehind = null; if (protoItemHeadEquipment != null) { protoItemHeadEquipment.ClientGetHeadSlotSprites( itemHeadEquipment, isMale, data.SkeletonResource, isFrontFace, out helmetFront, out helmetBehind); if (helmetFront == null) { throw new Exception("Helmet attachment is not available for " + protoItemHeadEquipment); } } var isHeadVisible = protoItemHeadEquipment?.IsHeadVisible ?? true; // let's combine all the layers (if some elements are null - they will not be rendered) List <ComposeLayer> layers; if (isHeadVisible) { layers = new List <ComposeLayer>() { new ComposeLayer(helmetBehind, spriteQualityOffset), new ComposeLayer(hairBehind, spriteQualityOffset), new ComposeLayer(faceShapePath, spriteQualityOffset), new ComposeLayer(faceTopPath, spriteQualityOffset), new ComposeLayer(faceBottomPath, spriteQualityOffset), new ComposeLayer(hair, spriteQualityOffset), new ComposeLayer(helmetFront, spriteQualityOffset) }; } else // if head is not visible (defined by head equipment item) { layers = new List <ComposeLayer>() { new ComposeLayer(helmetBehind, spriteQualityOffset), new ComposeLayer(helmetFront, spriteQualityOffset) }; } // load only those layers which had the according file layers.RemoveAll( t => t.TextureResource == null || !IsFileExists(t.TextureResource.FullPath)); if (layers.Count == 0) { Api.Logger.Error("No sprites for face rendering: " + request.TextureName); return(TextureResource.NoTexture); } // load all the layers data var resultTextureSize = await PrepareLayers(request, layers); if (customTextureSize.HasValue) { resultTextureSize = customTextureSize.Value; } var referencePivotPos = new Vector2Ushort( (ushort)(resultTextureSize.X / 2), (ushort)(resultTextureSize.Y / 2)); // create camera and render texture var renderTexture = Rendering.CreateRenderTexture(renderingTag, resultTextureSize.X, resultTextureSize.Y); var cameraObject = Api.Client.Scene.CreateSceneObject(renderingTag); var camera = Rendering.CreateCamera(cameraObject, renderingTag, drawOrder: -100); camera.RenderTarget = renderTexture; camera.SetOrthographicProjection(resultTextureSize.X, resultTextureSize.Y); // create and prepare renderer for each layer foreach (var layer in layers) { var pivotPos = layer.PivotPos; var offsetX = referencePivotPos.X - pivotPos.X; var offsetY = pivotPos.Y - referencePivotPos.Y; var offset = (offsetX, offsetY); Rendering.CreateSpriteRenderer( cameraObject, layer.TextureResource, positionOffset: offset, // draw down spritePivotPoint: (0, 1), renderingTag: renderingTag); } // ReSharper disable once CoVariantArrayConversion request.ChangeDependencies(layers.Select(l => l.TextureResource).ToArray()); await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); var generatedTexture = await renderTexture.SaveToTexture( isTransparent : true, qualityScaleCoef : Rendering.CalculateCurrentQualityScaleCoefWithOffset( spriteQualityOffset)); renderTexture.Dispose(); request.ThrowIfCancelled(); return(generatedTexture); }
public static async Task <IRenderTarget2D> ApplyMaskToRenderTargetAsync( ProceduralTextureRequest request, IRenderTarget2D sourceRenderTarget, TextureResource maskResource) { var textureWidth = (ushort)sourceRenderTarget.Width; var textureHeight = (ushort)sourceRenderTarget.Height; var maskSizeAbsolute = await Renderer.GetTextureSize(maskResource); var maskScale = new Vector2F((float)(textureWidth / (double)maskSizeAbsolute.X), (float)(textureHeight / (double)maskSizeAbsolute.Y)); var maskOffset = new Vector2F((float)((maskSizeAbsolute.X - textureWidth) / (2.0 * textureWidth)), (float)((maskSizeAbsolute.Y - textureHeight) / (2.0 * textureHeight))); /*Api.Logger.Dev( * string.Format("Texture size: X={0} Y={1}" + "{2}Mask size: X={3} Y={4}" + "{2}Mask scale: X={5:F2} Y={6:F2}" + "{2}Mask offset: X={7:F2} Y={8:F2}", + textureWidth, + textureHeight, + Environment.NewLine, + maskSizeAbsolute.X, + maskSizeAbsolute.Y, + maskScale.X, + maskScale.Y, + maskOffset.X, + maskOffset.Y));*/ var renderingMaterial = RenderingMaterial.Create(new EffectResource("DrawWithMaskOffset")); renderingMaterial.EffectParameters .Set("MaskTextureArray", maskResource) .Set("MaskScale", maskScale) .Set("MaskOffset", maskOffset); var renderingTag = "Mask camera for procedural texture: " + request.TextureName; var cameraObject = Api.Client.Scene.CreateSceneObject(renderingTag); var camera = Renderer.CreateCamera(cameraObject, renderingTag: renderingTag, drawOrder: -10, drawMode: CameraDrawMode.Manual); var renderTarget = Renderer.CreateRenderTexture(renderingTag, textureWidth, textureHeight); camera.RenderTarget = renderTarget; camera.ClearColor = Color.FromArgb(0, 0, 0, 0); camera.SetOrthographicProjection(textureWidth, textureHeight); Renderer.CreateSpriteRenderer(cameraObject, sourceRenderTarget, renderingTag: renderingTag, // draw down spritePivotPoint: (0, 1)) .RenderingMaterial = renderingMaterial; await camera.DrawAsync(); cameraObject.Destroy(); request.ThrowIfCancelled(); return(renderTarget); }