public void ColorKey() { var context = new TestProcessorContext(TargetPlatform.Windows, "dummy.xnb"); var processor = new TextureProcessor { ColorKeyColor = Color.Red, ColorKeyEnabled = true, GenerateMipmaps = false, PremultiplyAlpha = false, ResizeToPowerOfTwo = false, TextureFormat = TextureProcessorOutputFormat.Color }; var face = new PixelBitmapContent<Color>(8, 8); Fill(face, Color.Red); var input = new Texture2DContent(); input.Faces[0] = face; var output = processor.Process(input, context); Assert.NotNull(output); Assert.AreEqual(1, output.Faces.Count); Assert.AreEqual(1, output.Faces[0].Count); Assert.IsAssignableFrom<PixelBitmapContent<Color>>(output.Faces[0][0]); var outFace = (PixelBitmapContent<Color>)output.Faces[0][0]; Assert.AreEqual(8, outFace.Width); Assert.AreEqual(8, outFace.Height); for (var y=0; y < outFace.Height; y++) for (var x = 0; x < outFace.Width; x++) Assert.AreEqual(Color.Transparent, outFace.GetPixel(x, y)); }
public InputHandler(TextureProcessor processor = null, EmptyTextureType emptyTextureType = EmptyTextureType.White) { connectedProcessor = processor; emptyProcessor = new EmptyTextureProcessor(); this.emptyTextureType = emptyTextureType; UpdateEmptyProcessorColor(); }
void CompressDefault <T>(TargetPlatform platform, Color color) { var context = new TestProcessorContext(platform, "dummy.xnb"); var processor = new TextureProcessor { ColorKeyEnabled = false, GenerateMipmaps = false, PremultiplyAlpha = false, ResizeToPowerOfTwo = false, TextureFormat = TextureProcessorOutputFormat.Compressed }; var face = new PixelBitmapContent <Color>(16, 16); Fill(face, color); var input = new Texture2DContent(); input.Faces[0] = face; var output = processor.Process(input, context); Assert.NotNull(output); Assert.AreEqual(1, output.Faces.Count); Assert.AreEqual(1, output.Faces[0].Count); Assert.IsAssignableFrom <T>(output.Faces[0][0]); }
public static TextureImporter Make(TextureProcessor textureProcessor, Figure figure, string uvSetName, int surfaceIdx) { var uvSet = figure.UvSets[uvSetName]; var mask = TextureMask.Make(uvSet, figure.Geometry.SurfaceMap, surfaceIdx); return(new TextureImporter(textureProcessor, mask)); }
public static void DumpAllForFigure(ImportSettings settings, Device device, ShaderCache shaderCache, ContentFileLocator fileLocator, DsonObjectLocator objectLocator, Figure figure, TextureProcessorSharer textureProcessorSharer) { MaterialSetImportConfiguration[] configurations = MaterialSetImportConfiguration.Load(figure.Name); var baseConf = configurations.Single(conf => conf.name == "Base"); var surfaceProperties = SurfacePropertiesJson.Load(figure); TextureProcessor sharedTextureProcessor = surfaceProperties.ShareTextures != null? textureProcessorSharer.GetSharedProcessor(surfaceProperties.ShareTextures) : null; foreach (var conf in configurations) { if (conf == baseConf) { continue; } if (!settings.ShouldImportMaterialSet(figure.Name, conf.name)) { continue; } DumpMaterialSetAndScattering(settings, device, shaderCache, fileLocator, objectLocator, figure, baseConf, conf, sharedTextureProcessor); } }
protected void Awake() { item = this.GetComponent <Item>(); module = item.data.GetModule <Shared.AttachmentModule>(); //item.OnHeldActionEvent += this.OnHeldAction; parentFirearm = this.GetComponent <Weapons.BaseFirearmGenerator>(); if (!String.IsNullOrEmpty(module.ammoCounterRef)) { ammoCounterMesh = item.GetCustomReference(module.ammoCounterRef).GetComponent <MeshRenderer>(); digitsGridTexture = (Texture2D)item.GetCustomReference(module.ammoCounterRef).GetComponent <MeshRenderer>().material.mainTexture; } //if (digitsGridTexture == null) Debug.LogError("[Fisher-ModularFirearms] COULD NOT GET GRID TEXTURE"); //if (ammoCounterMesh == null) Debug.LogError("[Fisher-ModularFirearms] COULD NOT GET MESH RENDERER"); if ((digitsGridTexture != null) && (ammoCounterMesh != null)) { ammoCounter = new TextureProcessor(); ammoCounter.SetGridTexture(digitsGridTexture); ammoCounter.SetTargetRenderer(ammoCounterMesh); } if (ammoCounter != null) { ammoCounter.DisplayUpdate(newAmmoCount); } }
void CompressDefault <T>(TargetPlatform platform, Color color, int width = 16, int height = 16) { var context = new TestProcessorContext(platform, "dummy.xnb"); var processor = new TextureProcessor { ColorKeyEnabled = false, GenerateMipmaps = true, PremultiplyAlpha = false, ResizeToPowerOfTwo = false, TextureFormat = TextureProcessorOutputFormat.Compressed }; var face = new PixelBitmapContent <Color>(width, height); Fill(face, color); var input = new Texture2DContent(); input.Faces[0] = face; var output = processor.Process(input, context); Assert.NotNull(output); Assert.AreEqual(1, output.Faces.Count, "Expected number of faces"); Assert.AreEqual(5, output.Faces[0].Count, "Expected number of mipmaps"); Assert.IsAssignableFrom <T>(output.Faces[0][0], "Incorrect pixel format"); }
public void TestNativeAmerican() { var camPos1 = new Point3D(0.000000, 0.000000, 1239.995361); var lookingAtPt1 = new Point3D(-0.810811, 0.000000, 0.000000); var image1 = (Bitmap)Image.FromFile(_inputPath + @"\native_american_front.jpg"); var frontTexImageInfo = new AddTexImageInfo { CameraLocation = camPos1, ImageBitmap = image1, LookingAt = lookingAtPt1 }; var camPos2 = new Point3D(-54.005825, 0.000000, -1238.818726); var lookingAtPt2 = new Point3D(0.810041, 0.000000, -0.035313); var image2 = (Bitmap)Image.FromFile(_inputPath + @"\native_american_back.jpg"); var backTexImageInfo = new AddTexImageInfo { CameraLocation = camPos2, ImageBitmap = image2, LookingAt = lookingAtPt2 }; var cameraRatio = Te(); var addTextureInfo = new AddTextureInfo { CameraRatio = cameraRatio, ImageInfos = new[] { frontTexImageInfo, backTexImageInfo } }; var mdlFilePath = _inputPath + @"\native_american.mdl"; var mdlReader = new MdlFilePolygonDataReader(mdlFilePath); var triangles = Triangle.GetTrianglesFromPts(mdlReader.Points);; var meshGeometryModel = PaulBourkeSmoother.CreateMeshGeometry3DFromTriangles(triangles); Assert.AreEqual(0, meshGeometryModel.Normals.Count); //make the model smoother var currentPositions = meshGeometryModel.Positions; var positionNeighbors = PaulBourkeSmoother.GetPositionNeighbors(currentPositions.Count, meshGeometryModel.TriangleIndices); for (var ctr = 1; ctr <= 4; ctr++) { var newPositions = PaulBourkeSmoother.GetSmoothenedPositions(currentPositions, meshGeometryModel.TriangleIndices, positionNeighbors); currentPositions = newPositions; } meshGeometryModel.Positions = currentPositions; var result = TextureProcessor.GenerateTexture(addTextureInfo, meshGeometryModel, _outputPath + @"\log.txt"); var textureImageName = _outputPath + @"\" + "native_american.bmp"; result.Bitmap.Save(textureImageName); meshGeometryModel.TextureCoordinates = result.TextureCoordinates; var geometryModel3D = new GeometryModel3D { Geometry = meshGeometryModel, Material = new DiffuseMaterial { Brush = new ImageBrush { ImageSource = new BitmapImage(new Uri(textureImageName, UriKind.Relative)), ViewportUnits = BrushMappingMode.Absolute } } }; XamlWriter.SaveGeometryModel3D(_outputPath + @"\native_american_ModelWithTexture.xaml", geometryModel3D); MdlToXamlConverter.SaveAsGeometryModel3D(mdlFilePath, _outputPath + @"\Orig_native_american_Model_WithTexture.xaml"); }
public override ParticleDesignerProcessorResult Process(ParticleDesignerContent input, ContentProcessorContext context) { logger = context.Logger; var result = new ParticleDesignerProcessorResult(); // check for an embedded tiff texture if (input.emitterConfig.texture.data != null) { context.Logger.LogMessage("pex file has an embedded tiff. Extracting now."); using (var memoryStream = new MemoryStream(Convert.FromBase64String(input.emitterConfig.texture.data), writable: false)) { using (var stream = new GZipStream(memoryStream, CompressionMode.Decompress)) { const int size = 4096; byte[] buffer = new byte[size]; using (var memory = new MemoryStream()) { int count = 0; do { count = stream.Read(buffer, 0, size); if (count > 0) { memory.Write(buffer, 0, count); } } while(count > 0); result.textureTiffData = memory.ToArray(); } } } var tempFile = Path.Combine(Path.GetTempPath(), "tempParticleTexture.tif"); File.WriteAllBytes(tempFile, result.textureTiffData); context.Logger.LogMessage("writing tiff to temp file: {0}", tempFile); context.Logger.LogMessage("running TextureImportor on tiff"); var textureImporter = new TextureImporter(); result.texture = textureImporter.Import(tempFile, input.context) as Texture2DContent; result.texture.Name = input.emitterConfig.texture.name; context.Logger.LogMessage("deleting temp file"); File.Delete(tempFile); // process context.Logger.LogMessage("processing TextureContent"); var textureProcessor = new TextureProcessor { GenerateMipmaps = false, TextureFormat = TextureProcessorOutputFormat.Color }; result.texture = (Texture2DContent)textureProcessor.Process(result.texture, context); context.Logger.LogMessage("TextureContent processed"); } result.particleEmitterConfig = input.emitterConfig; return(result); }
public ProcessorPerlinNoise() { matAdd = new Material(Shader.Find("ProTeGe/Processors/Mix/Add")); perlin = new ProcessorSimplePerlin(); perlin.cacheON = false; AddPropertyHook("Generate", delegate { perlin ["Seed"] = Time.time; }); }
public void DumpMaterialSet(ImportSettings importSettings, TextureProcessor textureProcessor, DirectoryInfo figureDestDir, MaterialSetImportConfiguration conf) { MaterialSetDumper.DumpMaterialSetAndScattering(importSettings, device, shaderCache, fileLocator, objectLocator, figure, surfaceProperties, baseMaterialSetImportConfiguration, textureProcessor, figureDestDir, conf); if (conf.useCustomOcclusion) { shapeDumper.DumpOcclusionForMaterialSet(figureDestDir, conf.name); } }
public void MipmapNonSquareNonPowerOfTwo() { var context = new TestProcessorContext(TargetPlatform.Windows, "dummy.xnb"); var processor = new TextureProcessor { ColorKeyEnabled = false, GenerateMipmaps = true, PremultiplyAlpha = false, ResizeToPowerOfTwo = false, TextureFormat = TextureProcessorOutputFormat.Color }; var width = 23; var height = 5; var face = new PixelBitmapContent <Color>(width, height); Fill(face, Color.Red); var input = new Texture2DContent(); input.Faces[0] = face; var output = processor.Process(input, context); Assert.NotNull(output); Assert.AreEqual(1, output.Faces.Count); var outChain = output.Faces[0]; Assert.AreEqual(5, outChain.Count); foreach (var outFace in outChain) { Assert.AreEqual(width, outFace.Width); Assert.AreEqual(height, outFace.Height); var bitmap = (PixelBitmapContent <Color>)outFace; for (var y = 0; y < height; y++) { for (var x = 0; x < width; x++) { Assert.AreEqual(Color.Red, bitmap.GetPixel(x, y)); } } if (width > 1) { width /= 2; } if (height > 1) { height /= 2; } } }
public void TestDinosaurModel() { var camPos1 = new Point3D(0.000000, 0.000000, 40.000000); var lookingAtPt1 = new Point3D(0.449719, 0.000000, 0.000000); var image1 = (Bitmap)Image.FromFile(_inputPath + @"\dinosaur_front.bmp"); var frontTexImageInfo = new AddTexImageInfo { CameraLocation = camPos1, ImageBitmap = image1, LookingAt = lookingAtPt1 }; var camPos2 = new Point3D(9.568636, 0.000000, -38.838657); var lookingAtPt2 = new Point3D(-0.436662, 0.000000, -0.107580); var image2 = (Bitmap)Image.FromFile(_inputPath + @"\dinosaur_back.bmp"); var backTexImageInfo = new AddTexImageInfo { CameraLocation = camPos2, ImageBitmap = image2, LookingAt = lookingAtPt2 }; var cameraRatio = new CameraRatio { XRangeAtInfinity = 30.000000, YRangeAtInfinity = 22.504684 }; var addTextureInfo = new AddTextureInfo { CameraRatio = cameraRatio, ImageInfos = new[] { frontTexImageInfo, backTexImageInfo } }; var models = XamlFormatModelReader.GetModelsFromFile(_inputPath + @"\dinosaur_with_normals.xaml"); var meshGeometryModel = (MeshGeometry3D)models[0].Geometry; //make the model smoother var currentPositions = meshGeometryModel.Positions; var positionNeighbors = PaulBourkeSmoother.GetPositionNeighbors(currentPositions.Count, meshGeometryModel.TriangleIndices); for (var ctr = 1; ctr <= 21; ctr++) { var newPositions = PaulBourkeSmoother.GetSmoothenedPositions(currentPositions, meshGeometryModel.TriangleIndices, positionNeighbors); currentPositions = newPositions; } meshGeometryModel.Positions = currentPositions; var result = TextureProcessor.GenerateTexture(addTextureInfo, meshGeometryModel, _outputPath + @"\log.txt"); var textureImageName = _outputPath + @"\" + "dinosaur_texture.bmp"; result.Bitmap.Save(textureImageName); meshGeometryModel.TextureCoordinates = result.TextureCoordinates; var geometryModel3D = new GeometryModel3D { Geometry = meshGeometryModel, Material = new DiffuseMaterial { Brush = new ImageBrush { ImageSource = new BitmapImage(new Uri(textureImageName, UriKind.Relative)), ViewportUnits = BrushMappingMode.Absolute } } }; XamlWriter.SaveGeometryModel3D(_outputPath + @"\ModelWithTexture.xaml", geometryModel3D); MdlToXamlConverter.SaveAsGeometryModel3D(_inputPath + @"\dinosaur.mdl", _outputPath + @"\Orig_dinosaur_Model_WithTexture.xaml"); }
public TextureProcessor GetSharedProcessor(string shareName) { if (!processors.TryGetValue(shareName, out var processor)) { var destinationFolder = CommonPaths.WorkDir.Subdirectory("textures").Subdirectory(shareName); processor = new TextureProcessor(device, shaderCache, destinationFolder, compress); processors.Add(shareName, processor); } return(processor); }
public void ValidateDefaults() { var processor = new TextureProcessor(); Assert.AreEqual(new Color(255, 0, 255, 255), processor.ColorKeyColor); Assert.AreEqual(true, processor.ColorKeyEnabled); Assert.AreEqual(false, processor.GenerateMipmaps); Assert.AreEqual(true, processor.PremultiplyAlpha); Assert.AreEqual(false, processor.ResizeToPowerOfTwo); Assert.AreEqual(TextureProcessorOutputFormat.Color, processor.TextureFormat); }
public void ValidateDefaults() { var processor = new TextureProcessor(); Assert.AreEqual(new Color(255, 0, 255, 255), processor.ColorKeyColor); Assert.AreEqual(true, processor.ColorKeyEnabled); Assert.AreEqual(false, processor.GenerateMipmaps); Assert.AreEqual(true, processor.PremultiplyAlpha); Assert.AreEqual(false, processor.ResizeToPowerOfTwo); Assert.AreEqual(TextureProcessorOutputFormat.Color, processor.TextureFormat); }
private static int registerTP(TextureProcessor tp) { if (tpIDMap == null) { tpIDMap = new Dictionary <int, TextureProcessor>(); } int id = lastTPId++; tpIDMap.Add(id, tp); return(id); }
public static ModelMeshAndTexture CreateDefaultModel(CreateModelContract inputParams) { var logger = new Logger(inputParams.LogFilePath); Stream moldDataStream; ApplyImages(inputParams, out moldDataStream); var createModelInfo = new CreateModelInfo { MoldData = moldDataStream, Minx = 1, Maxx = inputParams.PtDensity, Miny = 1, Maxy = inputParams.PtDensity, Minz = 1, Maxz = inputParams.PtDensity }; logger.Log("Start model creation from mold points"); var ptsToPolygons = new PointsToPolygons(createModelInfo); var model = ptsToPolygons.Process(); logger.Log("End model creation from mold points."); if (moldDataStream != null) { moldDataStream.Close(); } if (inputParams.SmoothingIterationCount > 0) { logger.Log(string.Format("Start smoothening {0} times", inputParams.SmoothingIterationCount)); model.Positions = PaulBourkeSmoother.GetSmoothenedPositions(model.Positions, model.TriangleIndices, inputParams.SmoothingIterationCount); logger.Log("End smoothening."); } var addTextureInfo = GetAddTextureInfoForFrontAndBackImage(inputParams); var texture = TextureProcessor.GenerateTexture(addTextureInfo, model, inputParams.LogFilePath); logger.Log("Returning model mesh and texture"); if (texture != null) { model.TextureCoordinates = texture.TextureCoordinates; } return(new ModelMeshAndTexture { MeshGeometry = model, TextureBitmap = (texture == null? null : texture.Bitmap) }); }
public ProcessorAmbientOcclusion() { blur = new ProcessorGaussianBlur(); AO = new ProcessorAOInternal(); inputs [0].emptyTextureType = InputHandler.EmptyTextureType.White; AddProperty(new ProcessorProperty_float("Range", 16, 32)); AddProperty(new ProcessorProperty_float("Quality", 4, 32)); AddProperty(new ProcessorProperty_float("Intensity", 0.5f, 1)); AddProperty(new ProcessorProperty_float("Range Blur", 0f, 4f)); AddProperty(new ProcessorProperty_fixed("Quality Blur", 0f)); AddProperty(new ProcessorProperty_fixed("Undersampling Blur", 0.75f)); }
void UpdateTexture(string name, TextureProcessor processor) { RenderTexture old = Globals.instance.components.previewRenderer.material.GetTexture(name) as RenderTexture; Globals.instance.components.previewRenderer.material.SetTexture(name, null); if (old != null) { RenderTexture.ReleaseTemporary(old); } Globals.instance.components.previewRenderer.material.SetTexture( name, processor.Generate(Globals.instance.textureSize_preview).renderTexture ); }
public ProTeGe_Texture Generate(int resolution) { if (connectedProcessor == null) { throw new System.Exception("somehow TextureProcessorInputHandler has connectedProcessor null"); } if (connectedProcessor.isDead) { connectedProcessor = null; } System.Tuple <ProTeGe_Texture, long> generated = connectedProcessor.Generate_with_cacheID(resolution); cacheID = generated.Item2; return(generated.Item1); }
public ProcessorBillowNoise() { matAdd = new Material(Shader.Find("ProTeGe/Processors/Mix/Add")); cellular = new ProcessorCellularNoise(); seeder = new ProcessorCellularNoiseSeeder(); seeder ["Order"] = 0; cellular.cacheON = false; seeder.cacheON = false; cellular.inputs [0].connectedProcessor = seeder; AddPropertyHook("Generate", delegate { seeder ["Seed"] = Time.time; cellular["Seed"] = Time.time; }); }
private void AdjustSkin(object sender, RoutedEventArgs e) { if (_currentTextureType != TextureTypeEnum.FourCorners) { return; } var adjustSkinWindow = new AdjustSkin(_xCoodRangesFor4ImageTexture, _userXCoodRangesFor4ImageTexture); var result = adjustSkinWindow.ShowDialog(); if (!result.HasValue || !result.Value || adjustSkinWindow.ReturnValue == null) { return; } var returnValue = adjustSkinWindow.ReturnValue; if (_4CornerTextureAndBitmap == null) { return; } var indices = MainProcessor.GetIndicesFor4CornerTexture(_createMeshContract.ClickInputs.Angles); if (indices == null) { MessageBox.Show("Error in generating textures: Cannot identify photos for four corners.\nPlease check if photos are available for a full 360 degree view."); return; } var quickProcessingWindow = new QuickProcessingWindowHelper(ParentGrid); var add4CornerTexture = MainProcessor.GetAddTextureInfoForIndexCollection(_createMeshContract, indices); if (add4CornerTexture.ImageInfos != null) { SetUserSuppliedLimit(0, add4CornerTexture, returnValue.FrontPhotoTexCoodValueLimits); SetUserSuppliedLimit(1, add4CornerTexture, returnValue.RightPhotoTexCoodValueLimits); SetUserSuppliedLimit(2, add4CornerTexture, returnValue.BackPhotoTexCoodValueLimits); SetUserSuppliedLimit(3, add4CornerTexture, returnValue.LeftPhotoTexCoodValueLimits); } _userXCoodRangesFor4ImageTexture = returnValue; _4CornerTextureAndBitmap = TextureProcessor.GenerateTexture(add4CornerTexture, (MeshGeometry3D)_currentModel.Geometry, ""); quickProcessingWindow.Close(); ApplyTextureOnCurrentModel(_4CornerTextureAndBitmap); }
private void Generate2CornerTexture() { if (_2CornerTextureAndBitmap != null) { return; } var quickProcessingWindow = new QuickProcessingWindowHelper(ParentGrid); var addTextureInfoForFrontAndBack = MainProcessor.GetAddTextureInfoForFrontAndBackImage(_createMeshContract); if (addTextureInfoForFrontAndBack == null) { MessageBox.Show("Error in generating textures: Cannot identify front and back photos.\nPlease check if photos are available from front to back."); quickProcessingWindow.Close(); return; } _2CornerTextureAndBitmap = TextureProcessor.GenerateTexture(addTextureInfoForFrontAndBack, (MeshGeometry3D)_currentModel.Geometry, ""); quickProcessingWindow.Close(); }
void Start() { processor_albedo = new ProTeGe.TextureProcessors.Other.ProcessorRemoveAlpha(); processor_normalMap = new ProTeGe.TextureProcessors.Other.ProcessorRemoveAlpha(); processor_smoothness = new ProTeGe.TextureProcessors.Other.ProcessorRemoveAlpha(); processor_metallic = new ProTeGe.TextureProcessors.Other.ProcessorRemoveAlpha(); processor_occlusion = new ProTeGe.TextureProcessors.Other.ProcessorRemoveAlpha(); processor_parallax = new ProTeGe.TextureProcessors.Other.ProcessorRemoveAlpha(); processor_emission = new ProTeGe.TextureProcessors.Other.ProcessorRemoveAlpha(); processor_normalMap.inputs [0].emptyTextureType = InputHandler.EmptyTextureType.NormalMap; processor_smoothness.inputs [0].emptyTextureType = InputHandler.EmptyTextureType.Grey; processor_metallic.inputs [0].emptyTextureType = InputHandler.EmptyTextureType.Black; processor_parallax.inputs [0].emptyTextureType = InputHandler.EmptyTextureType.Black; processor_emission.inputs [0].emptyTextureType = InputHandler.EmptyTextureType.Black; WantUpdate = true; }
public void Mipmap() { var context = new TestProcessorContext(TargetPlatform.Windows, "dummy.xnb"); var processor = new TextureProcessor { ColorKeyEnabled = false, GenerateMipmaps = true, PremultiplyAlpha = false, ResizeToPowerOfTwo = false, TextureFormat = TextureProcessorOutputFormat.Color }; var face = new PixelBitmapContent<Color>(8, 8); Fill(face, Color.Red); var input = new Texture2DContent(); input.Faces[0] = face; var output = processor.Process(input, context); Assert.NotNull(output); Assert.AreEqual(1, output.Faces.Count); //Assert.AreNotEqual(face, output.Faces[0][0]); var outChain = output.Faces[0]; Assert.AreEqual(4, outChain.Count); var width = 8; var height = 8; foreach (var outFace in outChain) { Assert.AreEqual(width, outFace.Width); Assert.AreEqual(height, outFace.Height); var bitmap = (PixelBitmapContent<Color>)outFace; for (var y = 0; y < height; y++) for (var x = 0; x < width; x++) Assert.AreEqual(Color.Red, bitmap.GetPixel(x, y)); width = width >> 1; height = height >> 1; } }
public void ColorKey() { var context = new TestProcessorContext(TargetPlatform.Windows, "dummy.xnb"); var processor = new TextureProcessor { ColorKeyColor = Color.Red, ColorKeyEnabled = true, GenerateMipmaps = false, PremultiplyAlpha = false, ResizeToPowerOfTwo = false, TextureFormat = TextureProcessorOutputFormat.Color }; var face = new PixelBitmapContent <Color>(8, 8); Fill(face, Color.Red); var input = new Texture2DContent(); input.Faces[0] = face; var output = processor.Process(input, context); Assert.NotNull(output); Assert.AreEqual(1, output.Faces.Count); Assert.AreEqual(1, output.Faces[0].Count); Assert.IsAssignableFrom <PixelBitmapContent <Color> >(output.Faces[0][0]); var outFace = (PixelBitmapContent <Color>)output.Faces[0][0]; Assert.AreEqual(8, outFace.Width); Assert.AreEqual(8, outFace.Height); for (var y = 0; y < outFace.Height; y++) { for (var x = 0; x < outFace.Width; x++) { Assert.AreEqual(Color.Transparent, outFace.GetPixel(x, y)); } } }
private void Generate8CornerTexture() { if (_8CornerTextureAndBitmap != null) { return; } var indices = MainProcessor.GetIndicesFor8CornerTexture(_createMeshContract.ClickInputs.Angles); if (indices == null) { MessageBox.Show("Error in generating textures: Cannot identify photos for eight corners.\nPlease check if photos are available for a full 360 degree view."); return; } var quickProcessingWindow = new QuickProcessingWindowHelper(ParentGrid); var add8CornerTexture = MainProcessor.GetAddTextureInfoForIndexCollection(_createMeshContract, indices); _8CornerTextureAndBitmap = TextureProcessor.GenerateTexture(add8CornerTexture, (MeshGeometry3D)_currentModel.Geometry, ""); quickProcessingWindow.Close(); if (indices.Length < 8) { MessageBox.Show(string.Format("Texture generated for {0} images as only {0} out of 8 images could be identified.", indices.Length)); } }
public void DumpShape(TextureProcessor textureProcessor, DirectoryInfo figureDestDir, ShapeImportConfiguration shapeImportConfiguration) { DirectoryInfo shapeDirectory = figureDestDir.Subdirectory("shapes").Subdirectory(shapeImportConfiguration.name); //generate inputs var shapeInputs = MakeShapeInputs(shapeImportConfiguration); DumpNormals(textureProcessor, shapeDirectory, shapeImportConfiguration, shapeInputs); DumpInputs(shapeDirectory, shapeInputs); DumpParentOverrides(shapeDirectory, shapeImportConfiguration); var faceTransparencies = FaceTransparencies.For(figure, surfaceProperties, figureDestDir); if (figure == parentFigure) { DumpOccluderParameters(shapeDirectory, shapeInputs, faceTransparencies); } else { DumpSimpleOcclusion(shapeDirectory, shapeInputs, faceTransparencies); } }
public void TestWithABlankCube() { var camPos1 = new Point3D(0.000000, 0.000000, 1239.995361); var lookingAtPt1 = new Point3D(0.0, 0.000000, 0.000000); var image1 = (Bitmap)Image.FromFile(_inputPath + @"\native_american_front.jpg"); var frontTexImageInfo = new AddTexImageInfo { CameraLocation = camPos1, ImageBitmap = image1, LookingAt = lookingAtPt1 }; var camPos2 = new Point3D(0, 0.000000, -1238.818726); var lookingAtPt2 = new Point3D(0.0, 0.000000, 0); var image2 = (Bitmap)Image.FromFile(_inputPath + @"\native_american_back.jpg"); var backTexImageInfo = new AddTexImageInfo { CameraLocation = camPos2, ImageBitmap = image2, LookingAt = lookingAtPt2 }; var cameraRatio = new CameraRatio { XRangeAtInfinity = 2.0, YRangeAtInfinity = 2.0 }; var addTextureInfo = new AddTextureInfo { CameraRatio = cameraRatio, ImageInfos = new[] { frontTexImageInfo, backTexImageInfo } }; var points = PolygonsGetter.GetBoxPolygonsAroundAPoint(new Point3D(0, 0, 0), 20); var triangles = Triangle.GetTrianglesFromPts(points); var meshGeometryModel = PaulBourkeSmoother.CreateMeshGeometry3DFromTriangles(triangles); Assert.AreEqual(0, meshGeometryModel.Normals.Count); var result = TextureProcessor.GenerateTexture(addTextureInfo, meshGeometryModel, _outputPath + @"\log.txt"); var textureImageName = _outputPath + @"\" + "native_american.bmp"; result.Bitmap.Save(textureImageName); meshGeometryModel.TextureCoordinates = result.TextureCoordinates; XamlWriter.SaveMeshGeometryModel(_outputPath + @"\blank_cube_model_with_native_american_texture.xaml", meshGeometryModel, result.Bitmap); }
public void GenerateTextureProfile() { if (this.transparencyColor == null || this.dialogViewModel != null) { return; } var textureProcessor = new TextureProcessor(); this.dialogViewModel = new DialogViewModel(); var pixelColors = this.sourceImage.CopyPixels(); TextureProfile textureProfile = null; this.dialogViewModel.AddTask ( async(taskContext) => { textureProfile = await textureProcessor.GenerateTextureProfileAsync(pixelColors, this.transparencyColor, this.dialogViewModel, taskContext); } ); this.windowManager.ShowDialog(this.dialogViewModel); this.dialogViewModel = null; this.Texture = new Texture(this.originalSource, textureProfile); this.DrawTextureOnOutput(); }
private static void FormatModel() { List <ModelImporter> modelImporters = new List <ModelImporter>(); var objs = Selection.objects; foreach (var o in objs) { string path = AssetDatabase.GetAssetPath(o); modelImporters.AddRange(GetImporterByPath <ModelImporter>(path)); } List <TextureImporter> textureImporters = new List <TextureImporter>(); foreach (var o in objs) { string path = AssetDatabase.GetAssetPath(o); textureImporters.AddRange(GetImporterByPath <TextureImporter>(path)); } int totalCount = modelImporters.Count + textureImporters.Count; float index = 0; foreach (var modelImporter in modelImporters) { index++; EditorUtility.DisplayProgressBar("正在格式化资源", modelImporter.assetPath, index / totalCount); ModelProcessor.FormatModel(modelImporter); } foreach (var importer in textureImporters) { index++; EditorUtility.DisplayProgressBar("正在格式化资源", importer.assetPath, index / totalCount); TextureProcessor.FormatTexture(importer); } EditorUtility.ClearProgressBar(); }
public override ParticleDesignerProcessorResult Process(ParticleDesignerContent input, ContentProcessorContext context) { logger = context.Logger; var result = new ParticleDesignerProcessorResult(); // check for an embedded tiff texture if (input.emitterConfig.texture.data != null) { context.Logger.LogMessage("pex file has an embedded tiff. Extracting now."); using (var memoryStream = new MemoryStream(Convert.FromBase64String(input.emitterConfig.texture.data), writable: false)) { using (var stream = new GZipStream(memoryStream, CompressionMode.Decompress)) { const int size = 4096; byte[] buffer = new byte[size]; using (var memory = new MemoryStream()) { int count = 0; do { count = stream.Read(buffer, 0, size); if (count > 0) { memory.Write(buffer, 0, count); } } while(count > 0); result.textureTiffData = memory.ToArray(); } } } var tempFile = Path.Combine(Path.GetTempPath(), "tempParticleTexture.tif"); File.WriteAllBytes(tempFile, result.textureTiffData); context.Logger.LogMessage("writing tiff to temp file: {0}", tempFile); context.Logger.LogMessage("running TextureImportor on tiff"); var textureImporter = new TextureImporter(); result.texture = textureImporter.Import(tempFile, input.context) as Texture2DContent; result.texture.Name = input.emitterConfig.texture.name; context.Logger.LogMessage("deleting temp file"); File.Delete(tempFile); // process context.Logger.LogMessage("processing TextureContent"); var textureProcessor = new TextureProcessor { GenerateMipmaps = false, TextureFormat = TextureProcessorOutputFormat.Color }; result.texture = (Texture2DContent)textureProcessor.Process(result.texture, context); context.Logger.LogMessage("TextureContent processed"); } else // no tiff data, so let's try loading the texture with the texture name, from the same directory as the particle file { string fileDirectory = Path.GetDirectoryName(input.path); string fullPath = Path.Combine(fileDirectory, input.emitterConfig.texture.name); context.Logger.LogMessage("Looking for texture file at {0}", fullPath); result.texture = context.BuildAndLoadAsset <string, Texture2DContent>(new ExternalReference <string>(fullPath), "TextureProcessor"); context.Logger.LogMessage("Texture file loaded."); } result.particleEmitterConfig = input.emitterConfig; context.Logger.LogMessage("Emitter configuration loaded."); return(result); }