public LogPipe(LogStore logStore) { _logStore = logStore; _stream = new LogStream(LogStreamCallback); _stream.Attach(); }
static Config() { int level; FileInputStream fis = null; _log = LogStream.GetInstance(); try { string filename = Runtime.GetProperty("jcifs.properties"); if (filename != null && filename.Length > 1) { fis = new FileInputStream(filename); } Load(fis); if (fis != null) { fis.Close(); } } catch (IOException ioe) { if (_log.Level > 0) { Runtime.PrintStackTrace(ioe, _log); } } if ((level = GetInt("jcifs.util.loglevel", -1)) != -1) { _log.SetLevel(level); } try { Runtime.GetBytesForString(string.Empty, DefaultOemEncoding); } catch (Exception ex) { if (_log.Level >= 2) { _log.WriteLine("WARNING: The default OEM encoding " + DefaultOemEncoding + " does not appear to be supported by this JRE. The default encoding will be US-ASCII." ); } //DEFAULT_OEM_ENCODING = "US-ASCII"; } if (_log.Level >= 4) { try { _prp.Store(_log); } catch (IOException) { } } }
public static MeshResource Load(string filePath) { AssimpImporter importer = new AssimpImporter(); importer.SetConfig(new NormalizeVertexComponentsConfig(true)); importer.SetConfig(new MultithreadingConfig(-1)); LogStream logStream = new LogStream(delegate(String msg, String userdata) { Log.Message(msg); Log.Message(userdata); }); importer.AttachLogStream(logStream); Assimp.Scene model = importer.ImportFile(filePath, PostProcessPreset.TargetRealTimeMaximumQuality | PostProcessSteps.FlipUVs); MeshResource meshResource = new MeshResource(); meshResource.Materials = model.Materials.Select(m => (BaseMaterial)new TexturedMaterial(m, Directory.GetParent(filePath).FullName)).ToList(); foreach (var modelMesh in model.Meshes) { EntityBuffer buffer = new EntityBuffer(); List<ushort> indices = new List<ushort>(); List<Vertex> vertices = new List<Vertex>(); Vector3D[] texCoords = modelMesh.HasTextureCoords(0) ? modelMesh.GetTextureCoords(0) : null; foreach (var face in modelMesh.Faces) { for (int i = 0; i < face.IndexCount; i++) { indices.Add((ushort)face.Indices[i]); } } var material = model.Materials[modelMesh.MaterialIndex]; TexturedMaterial texMat = new TexturedMaterial(material, Directory.GetParent(filePath).FullName); for (int i = 0; i < modelMesh.VertexCount; i++) { var vertex = modelMesh.Vertices[i]; var texCoord = texCoords[i]; var normal = modelMesh.Normals[i]; vertices.Add(new Vertex(new Position3(vertex.X, vertex.Y, vertex.Z), new Position2(texCoord.X, texCoord.Y), new Position3(normal.X, normal.Y, normal.Z))); } buffer.SetVertices(vertices, indices); Mesh mesh = new Mesh(buffer); meshResource.Meshes.Add(modelMesh.MaterialIndex, mesh); } importer.Dispose(); return meshResource; }
public static HistoryNode FromLog (LogStream l, string username) { if(l.Position == null) return null; HistoryNode n = new HistoryNode (); n.Username = username; n.Dimension = l.Position.Dimension; n.Timestamp = l.Timestamp; if (l.Digging != null) { if (l.Digging.Status != PlayerDigging.StatusEnum.FinishedDigging) return null; n.Position = l.Digging.Position.CloneDouble (); n.Color = Color.Red; return n; } if (l.PlaceBlock != null) { n.Position = l.PlaceBlock.BlockPosition.CloneDouble (); if (l.PlaceBlock.Item == null || l.PlaceBlock.Item.ItemID <= 0) { n.Color = Color.Transparent; return n; } n.Color = Color.GreenYellow; //All other blocks n.Item = l.PlaceBlock.Item; if (l.PlaceBlock.Item.ItemID == BlockID.Waterbucket) n.Color = Color.Blue; if (l.PlaceBlock.Item.ItemID == BlockID.FlintandSteel) n.Color = Color.Orange; if (l.PlaceBlock.Item.ItemID == BlockID.Lavabucket) n.Color = Color.Orange; if (l.PlaceBlock.Item.ItemID == BlockID.TNT) n.Color = Color.Red; return n; } if (l.Window != null && l.Window.WindowID >= 0) { n.Position = l.Position.Position; n.Color = Color.Yellow; n.Item = l.Window.Item; return n; } return null; }
private static void ImportGenericModel(String path, out Scene scene) { try { using (LogStream logStream = new LogStream(delegate(String msg, String userData) { Console.WriteLine(msg); })) { logStream.Attach(); using (AssimpContext importer = new AssimpContext()) scene = importer.ImportFile(path, PostProcessPreset.TargetRealTimeMaximumQuality); logStream.Detach(); } } catch (Exception exception) { scene = null; System.Windows.MessageBox.Show(exception.Message); } }
/// <summary> /// Reads all events in the log file and processes them in parallel /// </summary> /// <returns>If all events are read successfully, <see cref="BinaryLogReader.EventReadResult.EndOfStream"/> will be returned</returns> public EventReadResult ReadAllEvents() { int numPathEventConsumers = 2; int numNotPathEventConsumers = 5; // Create a boolean array to decide which events to process bool[] shouldProcessEvent = new bool[((int)BinaryLogger.LogSupportEventId.Max) + m_handlers.Length]; for (int i = 0; i < shouldProcessEvent.Length; ++i) { if (i < (int)BinaryLogger.LogSupportEventId.Max) { // Always process internal events shouldProcessEvent[i] = true; } else if (m_handlers[i - (int)BinaryLogger.LogSupportEventId.Max] != null) { // Only process event if handler is defined shouldProcessEvent[i] = true; } else { shouldProcessEvent[i] = false; } } BlockingCollection <long>[] addPathEventsToDeserialize = new BlockingCollection <long> [numPathEventConsumers]; BlockingCollection <long>[] notAddPathEventsToDeserialize = new BlockingCollection <long> [numNotPathEventConsumers]; try { // Initialize the queues for (int i = 0; i < addPathEventsToDeserialize.Length; ++i) { addPathEventsToDeserialize[i] = new BlockingCollection <long>(); } for (int i = 0; i < notAddPathEventsToDeserialize.Length; ++i) { notAddPathEventsToDeserialize[i] = new BlockingCollection <long>(); } // Start the event consumers Task[] pathEventConsumers = new Task[numPathEventConsumers]; for (int i = 0; i < pathEventConsumers.Length; ++i) { pathEventConsumers[i] = CreatePathEventConsumerTask(addPathEventsToDeserialize[i]); } Task[] notPathEventConsumers = new Task[numNotPathEventConsumers]; for (int i = 0; i < notPathEventConsumers.Length; ++i) { notPathEventConsumers[i] = CreateNonPathEventConsumerTask(notAddPathEventsToDeserialize[i]); } // Event positions are added to the queues in a round robin manner // These variables indicate which queue to put the next event position in int pathEventQueueToAddTo = 0; int notPathEventQueueToAddTo = 0; EventReadResult result; try { while (true) { if (m_nextReadPosition != null) { LogStream.Seek(m_nextReadPosition.Value, SeekOrigin.Begin); } var position = LogStream.Position; if (position == LogLength) { result = EventReadResult.EndOfStream; break; } // Read the header EventHeader header = EventHeader.ReadFrom(m_logStreamReader); if (shouldProcessEvent[header.EventId]) { // Add event to appropriate queue if ((BinaryLogger.LogSupportEventId)header.EventId == BinaryLogger.LogSupportEventId.AddPath) { addPathEventsToDeserialize[pathEventQueueToAddTo].Add(position); pathEventQueueToAddTo++; pathEventQueueToAddTo %= numPathEventConsumers; } else { notAddPathEventsToDeserialize[notPathEventQueueToAddTo].Add(position); notPathEventQueueToAddTo++; notPathEventQueueToAddTo %= numNotPathEventConsumers; } } m_currentEventPayloadSize = header.EventPayloadSize; position = LogStream.Position; // There are less bytes than specified by the payload // The file is corrupted or truncated if (position + header.EventId > LogLength) { result = EventReadResult.UnexpectedEndOfStream; break; } m_nextReadPosition = position + header.EventPayloadSize; } } catch (EndOfStreamException) { result = EventReadResult.UnexpectedEndOfStream; } // We are done adding events to the queues so mark all the queues as complete for adding foreach (var q in addPathEventsToDeserialize) { q.CompleteAdding(); } foreach (var q in notAddPathEventsToDeserialize) { q.CompleteAdding(); } // Wait for all events to be processed Task.WaitAll(pathEventConsumers); Task.WaitAll(notPathEventConsumers); return(result); } finally { // Dispose the queues foreach (var q in addPathEventsToDeserialize) { q.Dispose(); } foreach (var q in notAddPathEventsToDeserialize) { q.Dispose(); } } }
/// <summary> /// Attaches a logging stream to the importer. /// </summary> /// <param name="logstream">Logstream to attach</param> public void AttachLogStream(LogStream logstream) { if(logstream == null || m_logStreams.Contains(logstream)) { return; } m_logStreams.Add(logstream); }
internal static void FlushAndClose() { Verbose("FlushAndClose called. Finished processing ICM"); LogStream?.Flush(); }
public EncoderUnit(string transcoder, string arguments, TransportMethod inputMethod, TransportMethod outputMethod, LogStream logStream, StreamContext context) : this(transcoder, arguments, inputMethod, outputMethod, logStream) { this.context = context; }
public override bool Execute(List<string> args) { if (args.Count < 1 || args.Count > 3) return false; ResourceLocation location = ResourceLocation.Resources; TagInstance destination = Info.Cache.Tags[0x3317]; if (args.Count == 3) { var value = args[0]; switch (value) { case "resources": location = ResourceLocation.Resources; break; case "textures": location = ResourceLocation.Textures; break; case "textures_b": location = ResourceLocation.TexturesB; break; case "audio": location = ResourceLocation.Audio; break; case "video": location = ResourceLocation.Video; break; case "render_models": location = ResourceLocation.RenderModels; break; case "lightmaps": location = ResourceLocation.Lightmaps; break; default: Console.WriteLine("Invalid resource location: " + value); return false; } args.RemoveAt(0); } if (args.Count == 2) { destination = ArgumentParser.ParseTagIndex(Info, args[0]); if (!destination.IsInGroup("mode")) { Console.WriteLine("Specified tag is not a render_model: " + args[0]); return false; } args.RemoveAt(0); } var builder = new RenderModelBuilder(Info.Version); // Add a root node var node = builder.AddNode(new RenderModel.Node { Name = Info.StringIDs.GetStringID("street_cone"), ParentNode = -1, FirstChildNode = -1, NextSiblingNode = -1, DefaultRotation = new Vector4(0, 0, 0, -1), DefaultScale = 1, InverseForward = new Vector3(1, 0, 0), InverseLeft = new Vector3(0, 1, 0), InverseUp = new Vector3(0, 0, 1), }); // Begin building the default region and permutation builder.BeginRegion(Info.StringIDs.GetStringID("default")); builder.BeginPermutation(Info.StringIDs.GetStringID("default")); using (var importer = new AssimpContext()) { Scene model; using (var logStream = new LogStream((msg, userData) => Console.WriteLine(msg))) { logStream.Attach(); model = importer.ImportFile(args[0], PostProcessSteps.CalculateTangentSpace | PostProcessSteps.GenerateNormals | PostProcessSteps.JoinIdenticalVertices | PostProcessSteps.SortByPrimitiveType | PostProcessSteps.PreTransformVertices | PostProcessSteps.Triangulate); logStream.Detach(); } Console.WriteLine("Assembling vertices..."); // Build a multipart mesh from the model data, // with each model mesh mapping to a part of one large mesh and having its own material builder.BeginMesh(); ushort partStartVertex = 0; ushort partStartIndex = 0; var vertices = new List<RigidVertex>(); var indices = new List<ushort>(); foreach (var mesh in model.Meshes) { for (var i = 0; i < mesh.VertexCount; i++) { var position = mesh.Vertices[i]; var normal = mesh.Normals[i]; var uv = mesh.TextureCoordinateChannels[0][i]; var tangent = mesh.Tangents[i]; var bitangent = mesh.BiTangents[i]; vertices.Add(new RigidVertex { Position = new Vector4(position.X, position.Y, position.Z, 1), Normal = new Vector3(normal.X, normal.Y, normal.Z), Texcoord = new Vector2(uv.X, uv.Y), Tangent = new Vector4(tangent.X, tangent.Y, tangent.Z, 1), Binormal = new Vector3(bitangent.X, bitangent.Y, bitangent.Z), }); } // Build the index buffer var meshIndices = mesh.GetIndices(); indices.AddRange(meshIndices.Select(i => (ushort)(i + partStartVertex))); // Define a material and part for this mesh var material = builder.AddMaterial(new RenderMaterial { RenderMethod = Info.Cache.Tags[0x101F], }); builder.BeginPart(material, partStartIndex, (ushort)meshIndices.Length, (ushort)mesh.VertexCount); builder.DefineSubPart(partStartIndex, (ushort)meshIndices.Length, (ushort)mesh.VertexCount); builder.EndPart(); // Move to the next part partStartVertex += (ushort)mesh.VertexCount; partStartIndex += (ushort)meshIndices.Length; } // Bind the vertex and index buffers builder.BindRigidVertexBuffer(vertices, node); builder.BindIndexBuffer(indices, PrimitiveType.TriangleList); builder.EndMesh(); } builder.EndPermutation(); builder.EndRegion(); Console.WriteLine("Building Blam mesh data..."); var resourceStream = new MemoryStream(); var renderModel = builder.Build(Info.Serializer, resourceStream); Console.WriteLine("Writing resource data..."); // Add a new resource for the model data var resources = new ResourceDataManager(); resources.LoadCachesFromDirectory(Info.CacheFile.DirectoryName); resourceStream.Position = 0; resources.Add(renderModel.Geometry.Resource, location, resourceStream); Console.WriteLine("Writing tag data..."); using (var cacheStream = Info.OpenCacheReadWrite()) { var tag = destination; var context = new TagSerializationContext(cacheStream, Info.Cache, Info.StringIDs, tag); Info.Serializer.Serialize(context, renderModel); } Console.WriteLine("Model imported successfully!"); return true; }
public override object Execute(List <string> args) { if (args.Count < 1 || args.Count > 2) { return(false); } var stringIdCount = Cache.StringTable.Count; var destinationTag = Cache.TagCache.GetTag(@"objects\gear\human\industrial\street_cone\street_cone", "mode"); string vertexType = "rigid"; if (args[0] == "skinned" || args[0] == "rigid") { vertexType = args[0]; args.RemoveAt(0); } if (args.Count == 2) { if (!Cache.TryGetTag(args[0], out destinationTag) || !destinationTag.IsInGroup("mode")) { Console.WriteLine("Specified tag is not a render_model: " + args[0]); return(false); } args.RemoveAt(0); } RenderModel edMode = null; using (var cacheStream = Cache.OpenCacheReadWrite()) edMode = Cache.Deserialize <RenderModel>(cacheStream, destinationTag); // Get a list of the original nodes var nodeIndices = new Dictionary <string, int>(); foreach (var a in edMode.Nodes) { nodeIndices.Add(Cache.StringTable.GetString(a.Name), edMode.Nodes.IndexOf(a)); } // Read the custom model file if (!File.Exists(args[0])) { return(false); } Console.WriteLine($"File date: {File.GetLastWriteTime(args[0])}"); var builder = new RenderModelBuilder(Cache); using (var importer = new AssimpContext()) { Scene model; if (vertexType == "skinned") { using (var logStream = new LogStream((msg, userData) => Console.WriteLine(msg))) { logStream.Attach(); model = importer.ImportFile(args[0], PostProcessSteps.CalculateTangentSpace | PostProcessSteps.GenerateNormals | PostProcessSteps.SortByPrimitiveType | PostProcessSteps.Triangulate); logStream.Detach(); } for (var i = 0; i < model.Meshes.Count; i++) { if (!model.Meshes[i].HasBones) { throw new Exception($"Mesh \"{model.Meshes[i].Name}\" has no bones!"); } } } else { using (var logStream = new LogStream((msg, userData) => Console.WriteLine(msg))) { logStream.Attach(); model = importer.ImportFile(args[0], PostProcessSteps.CalculateTangentSpace | PostProcessSteps.GenerateNormals | PostProcessSteps.JoinIdenticalVertices | PostProcessSteps.SortByPrimitiveType | PostProcessSteps.PreTransformVertices | PostProcessSteps.Triangulate); logStream.Detach(); } } Console.WriteLine("Assembling vertices..."); // Add nodes var rigidNode = builder.AddNode(new RenderModel.Node { Name = Cache.StringTable.GetStringId("street_cone"), ParentNode = -1, FirstChildNode = -1, NextSiblingNode = -1, DefaultRotation = new RealQuaternion(0, 0, 0, -1), DefaultScale = 1, InverseForward = new RealVector3d(1, 0, 0), InverseLeft = new RealVector3d(0, 1, 0), InverseUp = new RealVector3d(0, 0, 1), }); // Build a multipart mesh from the model data, // with each model mesh mapping to a part of one large mesh and having its own material ushort partStartVertex = 0; ushort partStartIndex = 0; var rigidVertices = new List <RigidVertex>(); var skinnedVertices = new List <SkinnedVertex>(); var indices = new List <ushort>(); Dictionary <string, int> newNodes = new Dictionary <string, int>(); foreach (var mesh in model.Meshes) { var meshIndex = model.Meshes.IndexOf(mesh); Console.Write($"Enter a region name for '{mesh.Name}' (mesh index {meshIndex}): "); var regionName = Console.ReadLine(); var regionStringId = Cache.StringTable.GetStringId(regionName); if (regionStringId == StringId.Invalid) { regionStringId = Cache.StringTable.AddString(regionName); } // Begin building the default region and permutation builder.BeginRegion(regionStringId); builder.BeginPermutation(Cache.StringTable.GetStringId("default")); builder.BeginMesh(); for (var i = 0; i < mesh.VertexCount; i++) { var position = mesh.Vertices[i]; var normal = mesh.Normals[i]; var uv = mesh.TextureCoordinateChannels[0][i]; var tangent = mesh.Tangents.Count != 0 ? mesh.Tangents[i] : new Vector3D(); var bitangent = mesh.BiTangents.Count != 0 ? mesh.BiTangents[i] : new Vector3D(); if (vertexType == "skinned") { var blendIndicesList = new List <byte>(); var blendWeightsList = new List <float>(); var bonesList = new List <string>(); foreach (var bone in mesh.Bones) { foreach (var vertexInfo in bone.VertexWeights) { if (vertexInfo.VertexID == i) { bonesList.Add(bone.Name); blendIndicesList.Add((byte)nodeIndices[bone.Name]); blendWeightsList.Add(vertexInfo.Weight); } } } var blendIndices = new byte[4]; var blendWeights = new float[4]; for (int j = 0; j < blendIndicesList.Count; j++) { if (j < 4) { blendIndices[j] = blendIndicesList[j]; } } for (int j = 0; j < blendWeightsList.Count; j++) { if (j < 4) { blendWeights[j] = blendWeightsList[j]; } } skinnedVertices.Add(new SkinnedVertex { Position = new RealQuaternion(position.X, position.Y, position.Z, 1), Texcoord = new RealVector2d(uv.X, -uv.Y), Normal = new RealVector3d(normal.X, normal.Y, normal.Z), Tangent = new RealQuaternion(tangent.X, tangent.Y, tangent.Z, 1), Binormal = new RealVector3d(bitangent.X, bitangent.Y, bitangent.Z), BlendIndices = blendIndices, BlendWeights = blendWeights }); } else { rigidVertices.Add(new RigidVertex { Position = new RealQuaternion(position.X, position.Y, position.Z, 1), Texcoord = new RealVector2d(uv.X, -uv.Y), Normal = new RealVector3d(normal.X, normal.Y, normal.Z), Tangent = new RealQuaternion(tangent.X, tangent.Y, tangent.Z, 1), Binormal = new RealVector3d(bitangent.X, bitangent.Y, bitangent.Z), }); } } // Build the index buffer var meshIndices = mesh.GetIndices(); indices.AddRange(meshIndices.Select(i => (ushort)(i + partStartVertex))); // Define a material and part for this mesh var material = builder.AddMaterial(new RenderMaterial { RenderMethod = Cache.TagCache.GetTag(@"shaders\invalid", "rmsh"), }); builder.BeginPart(material, partStartIndex, (ushort)meshIndices.Length, (ushort)mesh.VertexCount); builder.DefineSubPart(partStartIndex, (ushort)meshIndices.Length, (ushort)mesh.VertexCount); builder.EndPart(); // Move to the next part partStartVertex += (ushort)mesh.VertexCount; partStartIndex += (ushort)meshIndices.Length; // Bind the vertex and index buffers if (vertexType == "skinned") { builder.BindSkinnedVertexBuffer(skinnedVertices); } else { builder.BindRigidVertexBuffer(rigidVertices, rigidNode); } builder.BindIndexBuffer(indices, IndexBufferFormat.TriangleList); builder.EndMesh(); builder.EndPermutation(); builder.EndRegion(); } } Console.Write("Building render_geometry..."); var resourceStream = new MemoryStream(); var renderModel = builder.Build(Cache.Serializer, resourceStream); if (vertexType == "skinned") { // Copy required data from the original render_model tag renderModel.Nodes = edMode.Nodes; renderModel.MarkerGroups = edMode.MarkerGroups; renderModel.RuntimeNodeOrientations = edMode.RuntimeNodeOrientations; } Console.WriteLine("done."); // // Serialize the new render_model tag // Console.Write("Writing render_model tag data..."); using (var cacheStream = Cache.OpenCacheReadWrite()) Cache.Serialize(cacheStream, destinationTag, renderModel); Console.WriteLine("done."); // // Save any new string ids // if (stringIdCount != Cache.StringTable.Count) { Console.Write("Saving string ids..."); Cache.SaveStrings(); Console.WriteLine("done"); } Console.WriteLine("Model imported successfully!"); return(true); }
public override bool Execute(List<string> args) { if (args.Count != 1) return false; var builder = new RenderModelBuilder(_info.Version); // Add a root node var node = builder.AddNode(new RenderModel.Node { Name = _stringIds.GetStringId("street_cone"), ParentNode = -1, FirstChildNode = -1, NextSiblingNode = -1, DefaultRotation = new Vector4(0, 0, 0, -1), DefaultScale = 1, InverseForward = new Vector3(1, 0, 0), InverseLeft = new Vector3(0, 1, 0), InverseUp = new Vector3(0, 0, 1), }); // Begin building the default region and permutation builder.BeginRegion(_stringIds.GetStringId("default")); builder.BeginPermutation(_stringIds.GetStringId("default")); using (var importer = new AssimpContext()) { Scene model; using (var logStream = new LogStream((msg, userData) => Console.WriteLine(msg))) { logStream.Attach(); model = importer.ImportFile(args[0], PostProcessSteps.CalculateTangentSpace | PostProcessSteps.GenerateNormals | PostProcessSteps.JoinIdenticalVertices | PostProcessSteps.SortByPrimitiveType | PostProcessSteps.PreTransformVertices | PostProcessSteps.Triangulate); logStream.Detach(); } Console.WriteLine("Assembling vertices..."); // Build a multipart mesh from the model data, // with each model mesh mapping to a part of one large mesh and having its own material builder.BeginMesh(); ushort partStartVertex = 0; ushort partStartIndex = 0; var vertices = new List<RigidVertex>(); var indices = new List<ushort>(); foreach (var mesh in model.Meshes) { for (var i = 0; i < mesh.VertexCount; i++) { var position = mesh.Vertices[i]; var normal = mesh.Normals[i]; var uv = mesh.TextureCoordinateChannels[0][i]; var tangent = mesh.Tangents[i]; var bitangent = mesh.BiTangents[i]; vertices.Add(new RigidVertex { Position = new Vector4(position.X, position.Y, position.Z, 1), Normal = new Vector3(normal.X, normal.Y, normal.Z), Texcoord = new Vector2(uv.X, uv.Y), Tangent = new Vector4(tangent.X, tangent.Y, tangent.Z, 1), Binormal = new Vector3(bitangent.X, bitangent.Y, bitangent.Z), }); } // Build the index buffer var meshIndices = mesh.GetIndices(); indices.AddRange(meshIndices.Select(i => (ushort)(i + partStartVertex))); // Define a material and part for this mesh var material = builder.AddMaterial(new RenderMaterial { RenderMethod = _cache.Tags[0x101F], }); builder.DefinePart(material, partStartIndex, (ushort)meshIndices.Length, (ushort)mesh.VertexCount); // Move to the next part partStartVertex += (ushort)mesh.VertexCount; partStartIndex += (ushort)meshIndices.Length; } // Bind the vertex and index buffers builder.BindRigidVertexBuffer(vertices, node); builder.BindIndexBuffer(indices, PrimitiveType.TriangleList); builder.EndMesh(); } builder.EndPermutation(); builder.EndRegion(); Console.WriteLine("Building Blam mesh data..."); var resourceStream = new MemoryStream(); var renderModel = builder.Build(_info.Serializer, resourceStream); Console.WriteLine("Writing resource data..."); // Add a new resource for the model data var resources = new ResourceDataManager(); resources.LoadCachesFromDirectory(_fileInfo.DirectoryName); resourceStream.Position = 0; resources.Add(renderModel.Geometry.Resource, ResourceLocation.Resources, resourceStream); Console.WriteLine("Writing tag data..."); using (var cacheStream = _fileInfo.Open(FileMode.Open, FileAccess.ReadWrite)) { var tag = _cache.Tags[0x3317]; var context = new TagSerializationContext(cacheStream, _cache, _stringIds, tag); _info.Serializer.Serialize(context, renderModel); } Console.WriteLine("Model imported successfully!"); return true; }
/// <summary> /// Reads an event /// </summary> /// <returns>the result of reading the next event</returns> public EventReadResult ReadEvent() { try { while (true) { if (m_nextReadPosition != null && LogStream.Position != m_nextReadPosition.Value) { LogStream.Seek(m_nextReadPosition.Value, SeekOrigin.Begin); } var position = LogStream.Position; if (position == LogLength) { return(EventReadResult.EndOfStream); } // Read the header EventHeader header = EventHeader.ReadFrom(m_logStreamReader); m_currentEventPayloadSize = header.EventPayloadSize; position = LogStream.Position; // There are less bytes than specified by the payload // The file is corrupted or truncated if (position + header.EventPayloadSize > LogLength) { return(EventReadResult.UnexpectedEndOfStream); } m_nextReadPosition = position + header.EventPayloadSize; // Handle the internal events if (header.EventId < (uint)BinaryLogger.LogSupportEventId.Max) { switch ((BinaryLogger.LogSupportEventId)header.EventId) { case BinaryLogger.LogSupportEventId.StartTime: ReadStartTimeEvent(m_logStreamReader); break; case BinaryLogger.LogSupportEventId.AddPath: ReadPathEvent(m_logStreamReader); break; } Contract.Assert(LogStream.Position == (position + header.EventPayloadSize)); continue; } else { header.EventId -= (uint)BinaryLogger.LogSupportEventId.Max; } EventHandler handler; if ((m_handlers.Length > header.EventId) && ((handler = m_handlers[header.EventId]) != null)) { handler(header.EventId, header.WorkerId, header.Timestamp, m_logStreamReader); Contract.Assert(LogStream.Position <= (position + header.EventPayloadSize), "Event handler read beyond the event payload"); } m_logStreamReader.ReadBytes((int)(m_nextReadPosition.Value - LogStream.Position)); return(EventReadResult.Success); } } catch (EndOfStreamException) { return(EventReadResult.UnexpectedEndOfStream); } }
/// <summary> /// Handles cleaning up on close /// </summary> private void MainWindowClosing(object sender, CancelEventArgs e) { LogStream?.Flush(); LogStream?.Dispose(); Instance.Settings.Save("Settings.hcfg"); }
static int nullBonesC = 0; //TODO remove this counter public static void Main(string[] args) { //Dir if (Directory.Exists("./obj")) { Console.WriteLine("Directory Found"); } else { printError("Creating Dir"); Directory.CreateDirectory("./obj"); } //File Input string fileName = "./obj/" + getUserInput("File name"); if (fileName == "./obj/") { string[] possibleFiles = Directory.GetFiles("./obj"); foreach (string currFileName in possibleFiles) { if (!currFileName.EndsWith(".txt") && !currFileName.EndsWith(".js")) { fileName = currFileName; break; } } } else { string[] possibleFiles = Directory.GetFiles("./obj"); foreach (string currFileName in possibleFiles) { if (!currFileName.Contains("fileName")) { fileName = currFileName; break; } } } Console.WriteLine("Files found, starting to read them"); try { File.Delete("./obj/output.txt"); } catch (Exception e) { printError("No file to delete, ignore this error" + e); } //Create a new importer AssimpContext importer = new AssimpContext(); importer.SetConfig(new IFCUseCustomTriangulationConfig(true)); importer.SetConfig(new SortByPrimitiveTypeConfig(PrimitiveType.Line | PrimitiveType.Point)); importer.SetConfig(new VertexBoneWeightLimitConfig(4)); //This is how we add a configuration (each config is its own class) //NormalSmoothingAngleConfig config = new NormalSmoothingAngleConfig(66.0f); //importer.SetConfig(config); //This is how we add a logging callback LogStream logstream = new LogStream(delegate(String msg, String userData) { Console.WriteLine(msg); }); logstream.Attach(); //Import the model. All configs are set. The model //is imported, loaded into managed memory. Then the unmanaged memory is released, and everything is reset. //Triangulating is already being done //TODO aiProcess_JoinIdenticalVertices (Index buffer objects) scene = importer.ImportFile(fileName, PostProcessPreset.TargetRealTimeMaximumQuality | PostProcessSteps.FlipUVs | PostProcessSteps.OptimizeMeshes | PostProcessSteps.OptimizeGraph | PostProcessSteps.SortByPrimitiveType | PostProcessSteps.LimitBoneWeights); extractBones(scene.RootNode); createBoneTree(scene.RootNode, -1, Matrix4x4.Identity); parseNode(scene.RootNode); //End of example importer.Dispose(); adjVert = (Lookup <Vector3D, triAndVertIndex>)toLookup.ToLookup((item) => item.Key, (item) => item.Value); //First 3 => Point, Second 3 => Line //TODO Make this a bit better //For each triangle, store some bary coords Bary[] bary = new Bary[normals.Count]; //Filled with: default( int ) //Edit #region Bary coords and bones //Lines: for (int j = 0; j < toLookup.Count; j += 3) { Vector3D v0 = toLookup[j + 2].Key - toLookup[j + 1].Key; Vector3D v1 = toLookup[j + 2].Key - toLookup[j].Key; Vector3D v2 = toLookup[j + 1].Key - toLookup[j].Key; double area = Math.Abs(Vector3D.Cross(v1, v2).Length()) / 2; //Determinant of a 2D matrix, used to calculate the area of a parallelogram IEnumerable <triAndVertIndex> matchingVertices0 = adjVert[toLookup[j].Key]; IEnumerable <triAndVertIndex> matchingVertices1 = adjVert[toLookup[j + 1].Key]; IEnumerable <triAndVertIndex> matchingVertices2 = adjVert[toLookup[j + 2].Key]; //2 Matching points //TriIndex = triangle index of the adjacent triangle foreach (triAndVertIndex index in matchingVertices0) { //Oh, yeah! It's working! (Magic!) //TODO turn this into a function as well foreach (triAndVertIndex otherIndex in matchingVertices1) { //If it is part of the same line if (otherIndex.triIndex == index.triIndex) { double angleBetweenTriangles = (Vector3D.Dot(faceNormals[j / 3], faceNormals[otherIndex.triIndex])); if (angleBetweenTriangles < THRESHOLD) { //area = 1/2*base*height //2*area / base = height /* * dist = vec3(area / v0.Length(), 0, 0); * gl_Position = gl_PositionIn[0]; * EmitVertex(); * dist = vec3(0, area / v1.Length(), 0); * gl_Position = gl_PositionIn[1]; * EmitVertex(); * dist = vec3(0, 0, area / v2.Length()); * gl_Position = gl_PositionIn[2]; * EmitVertex();*/ bary[j / 3].l2 = area / v2.Length(); // 1;// angleBetweenTriangles + addTo; } //If we found the adjacent triangle, we can go to the next one break; } } } foreach (triAndVertIndex index in matchingVertices1) { foreach (triAndVertIndex otherIndex in matchingVertices2) { if (otherIndex.triIndex == index.triIndex) { double angleBetweenTriangles = (Vector3D.Dot(faceNormals[j / 3], faceNormals[otherIndex.triIndex])); if (angleBetweenTriangles < THRESHOLD) { bary[j / 3].l0 = area / v0.Length(); // TODO angleBetweenTriangles + addTo; } break; } } } foreach (triAndVertIndex index in matchingVertices2) { foreach (triAndVertIndex otherIndex in matchingVertices0) { if (otherIndex.triIndex == index.triIndex) { double angleBetweenTriangles = (Vector3D.Dot(faceNormals[j / 3], faceNormals[otherIndex.triIndex])); if (angleBetweenTriangles < THRESHOLD) { bary[j / 3].l1 = area / v1.Length(); // TODO angleBetweenTriangles + addTo; } break; } } } } //Draw the points as well for (int j = 0; j < toLookup.Count; j += 3) { Vector3D v0 = toLookup[j + 2].Key - toLookup[j + 1].Key; Vector3D v1 = toLookup[j + 2].Key - toLookup[j].Key; Vector3D v2 = toLookup[j + 1].Key - toLookup[j].Key; double area = Math.Abs(Vector3D.Cross(v1, v2).Length()) / 2; //Determinant of a 2D matrix, used to calculate the area of a parallelogram IEnumerable <triAndVertIndex> matchingVertices0 = adjVert[toLookup[j].Key]; IEnumerable <triAndVertIndex> matchingVertices1 = adjVert[toLookup[j + 1].Key]; IEnumerable <triAndVertIndex> matchingVertices2 = adjVert[toLookup[j + 2].Key]; /*int numberOfAdjBary = 0; * * //Index of the adjacent triangle * foreach (triAndVertIndex index in matchingVertices0) * { * //TODO turn this into a function as well * if ((bary[index.triIndex], ((index.vertIndex + 1) % 3) + 3] > 0 || bary[index.triIndex, ((index.vertIndex + 2) % 3) + 3] > 0) * && index.triIndex != j / 3) * { * numberOfAdjBary++; * } * } * //Every line is actually 2 lines * if (numberOfAdjBary >= 4) * { * //Now, we need to do the point calculations * double dist0 = area / v0.Length(); * //bary[j / 3, 0] = ; * } * numberOfAdjBary = 0; * foreach (triAndVertIndex index in matchingVertices1) * { * if ((bary[index.triIndex, ((index.vertIndex + 1) % 3) + 3] > 0 || bary[index.triIndex, ((index.vertIndex + 2) % 3) + 3] > 0) * && index.triIndex != j / 3) * { * numberOfAdjBary++; * } * } * if (numberOfAdjBary >= 4) * { * bary[j / 3, 1] = area / v1.Length(); * } * numberOfAdjBary = 0; * foreach (triAndVertIndex index in matchingVertices2) * { * if ((bary[index.triIndex, ((index.vertIndex + 1) % 3) + 3] > 0 || bary[index.triIndex, ((index.vertIndex + 2) % 3) + 3] > 0) * && index.triIndex != j / 3) * { * numberOfAdjBary++; * } * } * if (numberOfAdjBary >= 4) * { * bary[j / 3, 2] = area / v2.Length(); * }***/ } #endregion #if true //Create the output file StreamWriter JSONFile = File.CreateText("./obj/output.txt"); //Write to file JSONFile.Write("model = ["); bool firstTime = true; for (int j = 0, texCount = 0; j < vertices.Count; j++) { var index = j - texCount; Vector3D[] currVert = Program.vertices[j]; if (currVert.Length == 1) { if (firstTime) { JSONFile.Write("{"); firstTime = false; } else { JSONFile.Write("]},\n{"); } JSONFile.Write("name:\"" + texNames[(int)currVert[0].X] + "\",model:["); Console.Write(texNames[(int)currVert[0].X] + "---"); texCount++; } else { //Edit string[] baryCoordsOfTri = toBary(bary[index]); //Triangle for (int i = 0; i < 3; i++) { JSONFile.Write(Vec3DToString(currVert[i])); JSONFile.Write(UVToString(uvs[index][i])); JSONFile.Write(Vec3DToString(normals[index][i])); JSONFile.Write(baryCoordsOfTri[i]); bones[index][i].Weights[1] = 0; if (bones[index][i].BoneIDs[1] == -1) { bones[index][i].Weights[0] = 1; } if (bones[index][i].Weights[0] == 0.5) { bones[index][i].Weights[0] = 0.49f; } JSONFile.Write((bones[index][i].BoneIDs[0] + 0.5) + "," + (bones[index][i].BoneIDs[1] + 0.5) + "," + bones[index][i].Weights[0] + ","); if (bones[index][i].Weights[0] > 0.45 && bones[index][i].Weights[0] < 0.55) { //printError("W1: " + string.Join(",.,", bones[index][i].Weights)); } if (bones[index][i].Weights[0] > 0.9) { //printError("W1: " + string.Join(",.,", bones[index][i].Weights)); } } } } JSONFile.Write("]}];"); JSONFile.Close(); #endif StreamWriter bonesFile = File.CreateText("./obj/outputBones.txt"); //You are going to have to reorder the parts manually bonesFile.Write("bones = ["); foreach (BoneInfo boneNode in boneNodesList) { //TODO Number of bones (To nearest power of 2) //TODO Max number of influencing bones per vertex Quaternion rot, offsetRot; Vector3D translation, offsetTranslation; Vector3D scale, offsetScale; boneNode.localMat.Decompose(out scale, out rot, out translation); boneNode.BoneOffsetMatrix.Decompose(out offsetScale, out offsetRot, out offsetTranslation); //Console.WriteLine(QuaternionToString(rot) + "->\n" + MatToString1(rot.GetMatrix())); //Console.WriteLine(QuaternionToString(rot) + "->\n" + Matrix4x4.FromTranslation(translation)); //Don't use .ToString(), use a custom function! bonesFile.WriteLine( "{name:\"" + boneNode.Name + "\",parent:" + boneNode.Parent + ",pos:[" + Vec3DToString(translation, false) + "],qRot:[" + QuaternionToString(rot) + "],offsetPos:[" + Vec3DToString(offsetTranslation, false) + "],offsetRot:[" + QuaternionToString(offsetRot) + "]},"); } bonesFile.Write("];"); bonesFile.WriteLine("\nvar animations = [];"); bonesFile.Close(); try { File.Delete("./obj/output.js"); } catch (Exception) { }; try { File.Delete("./obj/outputBones.js"); } catch (Exception) { }; try { File.Move("./obj/output.txt", Path.ChangeExtension("./obj/output.txt", ".js")); } catch (Exception) { }; try { File.Move("./obj/outputBones.txt", Path.ChangeExtension("./obj/outputBones.txt", ".js")); } catch (Exception) { }; Console.WriteLine("Info: {0} Bones {1} vertices without bones", boneNodesList.Count, nullBonesC); Console.WriteLine("DONE!"); Console.Read(); }
public LogStream info() { LogStream ret = new LogStream(yarpPINVOKE.Log_info__SWIG_1(swigCPtr), true); return(ret); }
public LogStream debug() { LogStream ret = new LogStream(yarpPINVOKE.Log_debug__SWIG_1(swigCPtr), true); return(ret); }
public LogStream trace() { LogStream ret = new LogStream(yarpPINVOKE.Log_trace__SWIG_1(swigCPtr), true); return(ret); }
public abstract void detachStream(LogStream stream, int severity);
/// <summary> /// Creates A Log Stream based on a UnityTextWriter. /// </summary> /// <param name="utw">TextWriter</param> /// <param name="param">Parameters to create the Log Stream.</param> /// <returns></returns> public static LogStream ToUnityConsoleLogStream(UnityTextWriter utw, LogStreamParams param) { var ls = new LogStream(utw, param.Mask, param.MatchType, param.SetTimeStamp); return(ls); }
public LogStream fatal() { LogStream ret = new LogStream(yarpPINVOKE.Log_fatal__SWIG_1(swigCPtr), true); return(ret); }
public override void detachStream(LogStream stream, int severity) { throw new NotImplementedException(); }
public override bool Execute(List <string> args) { if (args.Count < 1 || args.Count > 2) { return(false); } TagInstance destination = Info.Cache.Tags[0x3317]; if (args.Count == 2) { destination = ArgumentParser.ParseTagIndex(Info, args[0]); if (!destination.IsInGroup("mode")) { Console.WriteLine("Specified tag is not a render_model: " + args[0]); return(false); } args = args.Skip(1).ToList(); } var builder = new RenderModelBuilder(Info.Version); // Add a root node var node = builder.AddNode(new RenderModel.Node { Name = Info.StringIDs.GetStringID("street_cone"), ParentNode = -1, FirstChildNode = -1, NextSiblingNode = -1, DefaultRotation = new Vector4(0, 0, 0, -1), DefaultScale = 1, InverseForward = new Vector3(1, 0, 0), InverseLeft = new Vector3(0, 1, 0), InverseUp = new Vector3(0, 0, 1), }); // Begin building the default region and permutation builder.BeginRegion(Info.StringIDs.GetStringID("default")); builder.BeginPermutation(Info.StringIDs.GetStringID("default")); using (var importer = new AssimpContext()) { Scene model; using (var logStream = new LogStream((msg, userData) => Console.WriteLine(msg))) { logStream.Attach(); model = importer.ImportFile(args[0], PostProcessSteps.CalculateTangentSpace | PostProcessSteps.GenerateNormals | PostProcessSteps.JoinIdenticalVertices | PostProcessSteps.SortByPrimitiveType | PostProcessSteps.PreTransformVertices | PostProcessSteps.Triangulate); logStream.Detach(); } Console.WriteLine("Assembling vertices..."); // Build a multipart mesh from the model data, // with each model mesh mapping to a part of one large mesh and having its own material builder.BeginMesh(); ushort partStartVertex = 0; ushort partStartIndex = 0; var vertices = new List <RigidVertex>(); var indices = new List <ushort>(); foreach (var mesh in model.Meshes) { for (var i = 0; i < mesh.VertexCount; i++) { var position = mesh.Vertices[i]; var normal = mesh.Normals[i]; var uv = mesh.TextureCoordinateChannels[0][i]; var tangent = mesh.Tangents[i]; var bitangent = mesh.BiTangents[i]; vertices.Add(new RigidVertex { Position = new Vector4(position.X, position.Y, position.Z, 1), Normal = new Vector3(normal.X, normal.Y, normal.Z), Texcoord = new Vector2(uv.X, uv.Y), Tangent = new Vector4(tangent.X, tangent.Y, tangent.Z, 1), Binormal = new Vector3(bitangent.X, bitangent.Y, bitangent.Z), }); } // Build the index buffer var meshIndices = mesh.GetIndices(); indices.AddRange(meshIndices.Select(i => (ushort)(i + partStartVertex))); // Define a material and part for this mesh var material = builder.AddMaterial(new RenderMaterial { RenderMethod = Info.Cache.Tags[0x101F], }); builder.BeginPart(material, partStartIndex, (ushort)meshIndices.Length, (ushort)mesh.VertexCount); builder.DefineSubPart(partStartIndex, (ushort)meshIndices.Length, (ushort)mesh.VertexCount); builder.EndPart(); // Move to the next part partStartVertex += (ushort)mesh.VertexCount; partStartIndex += (ushort)meshIndices.Length; } // Bind the vertex and index buffers builder.BindRigidVertexBuffer(vertices, node); builder.BindIndexBuffer(indices, PrimitiveType.TriangleList); builder.EndMesh(); } builder.EndPermutation(); builder.EndRegion(); Console.WriteLine("Building Blam mesh data..."); var resourceStream = new MemoryStream(); var renderModel = builder.Build(Info.Serializer, resourceStream); Console.WriteLine("Writing resource data..."); // Add a new resource for the model data var resources = new ResourceDataManager(); resources.LoadCachesFromDirectory(Info.CacheFile.DirectoryName); resourceStream.Position = 0; resources.Add(renderModel.Geometry.Resource, ResourceLocation.Resources, resourceStream); Console.WriteLine("Writing tag data..."); using (var cacheStream = Info.OpenCacheReadWrite()) { var tag = destination; var context = new TagSerializationContext(cacheStream, Info.Cache, Info.StringIDs, tag); Info.Serializer.Serialize(context, renderModel); } Console.WriteLine("Model imported successfully!"); return(true); }
public static void InitCause(this Exception ex, Exception cause) { LogStream.GetInstance().WriteLine(cause); }
public EncoderUnit(string transcoder, string arguments, TransportMethod inputMethod, TransportMethod outputMethod, LogStream logStream) { this.transcoderPath = transcoder; this.arguments = arguments; this.inputMethod = inputMethod; this.outputMethod = outputMethod; this.logStream = logStream; }
public static void PrintStackTrace(Exception ex) { LogStream.GetInstance().WriteLine(ex); }
public static void PrintStackTrace(Exception ex, LogStream tw) { tw.WriteLine(ex); }
public LogStream warning() { LogStream ret = new LogStream(yarpPINVOKE.Log_warning__SWIG_1(swigCPtr), true); return(ret); }
/// <summary> /// Dumps stored log into log file. /// </summary> public static void DumpLog() { Log.ToString().Split('\n').ToList().ForEach(x => LogStream.WriteLine(x)); LogStream.Flush(); }
static public AssimpVolume LoadFromFile(string filename) { string path = Path.Combine("Assets", "Models", filename); AssimpContext importer = new AssimpContext(); NormalSmoothingAngleConfig normalSmoothing = new NormalSmoothingAngleConfig(66.0f); importer.SetConfig(normalSmoothing); LogStream logStream = new LogStream ( delegate(string message, string userData) { Console.Write(message); } ); logStream.Attach(); Scene model = importer.ImportFile(path, PostProcessPreset.TargetRealTimeMaximumQuality); Mesh mesh = model.Meshes[0]; AssimpVolume v = new AssimpVolume(); List<Vector3> newVertices = new List<Vector3>(); foreach (Assimp.Vector3D vert in mesh.Vertices) { newVertices.Add(new Vector3(vert.X, vert.Y, vert.Z)); } v.vertices = newVertices.ToArray(); v.indices = mesh.GetIndices(); if (mesh.HasNormals) { v.generateNormals = false; List<Vector3> newNormals = new List<Vector3>(); foreach (Assimp.Vector3D n in mesh.Normals) { newNormals.Add(new Vector3(n.X, n.Y, n.Z)); } v.normals = newNormals.ToArray(); } if (mesh.HasTextureCoords(0)) { List<Vector2> newTextureCoords = new List<Vector2>(); foreach (Assimp.Vector3D tc in mesh.TextureCoordinateChannels[0]) { newTextureCoords.Add(new Vector2(tc.X, tc.Y)); } v.textureCoords = newTextureCoords.ToArray(); } if (mesh.HasVertexColors(0)) { List<Vector3> newColors = new List<Vector3>(); foreach (Assimp.Color4D c in mesh.VertexColorChannels[0]) { newColors.Add(new Vector3(c.R, c.G, c.B)); } v.colors = newColors.ToArray(); } importer.Dispose(); return v; }
/// <summary> /// Detaches a logging stream from the importer. /// </summary> /// <param name="logStream">Logstream to detatch</param> public void DetachLogStream(LogStream logStream) { if(logStream == null) { return; } m_logStreams.Remove(logStream); }
public void WriteLog(object value) { LogStream.Write(value); }
internal static global::System.Runtime.InteropServices.HandleRef getCPtr(LogStream obj) { return((obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr); }
private bool ImportGenericModel() { LogStream logStream = new LogStream(delegate (String msg, String userData) { Console.WriteLine(msg); }); try { logStream.Attach(); using (AssimpContext importer = new AssimpContext()) Scene = importer.ImportFile(ModelPath, PostProcessPreset.TargetRealTimeMaximumQuality); logStream.Detach(); logStream.Dispose(); if (Scene == null) return false; return true; } catch (Exception exception) { logStream.Detach(); logStream.Dispose(); System.Windows.MessageBox.Show("An error appear : " + exception.Message); return false; } }
internal NameServiceClient(int lport, IPAddress laddr) { this._lport = lport; this.laddr = laddr; try { Baddr = Config.GetInetAddress("jcifs.netbios.baddr", Extensions.GetAddressByName("255.255.255.255")); } catch (Exception e) { LogStream.GetInstance().WriteLine(e); } _sndBuf = new byte[SndBufSize]; _rcvBuf = new byte[RcvBufSize]; if (string.IsNullOrEmpty(Ro)) { if (NbtAddress.GetWinsAddress() == null) { _resolveOrder = new int[2]; _resolveOrder[0] = ResolverLmhosts; _resolveOrder[1] = ResolverBcast; } else { _resolveOrder = new int[3]; _resolveOrder[0] = ResolverLmhosts; _resolveOrder[1] = ResolverWins; _resolveOrder[2] = ResolverBcast; } } else { int[] tmp = new int[3]; StringTokenizer st = new StringTokenizer(Ro, ","); int i = 0; while (st.HasMoreTokens()) { string s = st.NextToken().Trim(); if (Runtime.EqualsIgnoreCase(s, "LMHOSTS")) { tmp[i++] = ResolverLmhosts; } else { if (Runtime.EqualsIgnoreCase(s, "WINS")) { if (NbtAddress.GetWinsAddress() == null) { if (_log.Level > 1) { _log.WriteLine("NetBIOS resolveOrder specifies WINS however the " + "jcifs.netbios.wins property has not been set" ); } continue; } tmp[i++] = ResolverWins; } else { if (Runtime.EqualsIgnoreCase(s, "BCAST")) { tmp[i++] = ResolverBcast; } else { if (Runtime.EqualsIgnoreCase(s, "DNS")) { } else { // skip if (_log.Level > 1) { _log.WriteLine("unknown resolver method: " + s); } } } } } } _resolveOrder = new int[i]; Array.Copy(tmp, 0, _resolveOrder, 0, i); } }
// // IMPOTER // private void InitLog() { LogStream logstream = new LogStream(delegate(String msg, String userData) { Console.WriteLine(msg); }); logstream.Attach(); }
/// <summary> /// This must be called before <tt>getInstance</tt> is called or /// it will have no effect. /// </summary> /// <remarks> /// This must be called before <tt>getInstance</tt> is called or /// it will have no effect. /// </remarks> public static void SetInstance(TextWriter other) { //inst = new Jcifs.Util.LogStream(); _inst = new LogStream(other); }
public virtual bool detatchStream(LogStream pStream, Logger.ErrorSeverity severity) { bool ret = assimp_swigPINVOKE.Logger_detatchStream__SWIG_0(swigCPtr, LogStream.getCPtr(pStream), (uint)severity); return(ret); }
public static HistoryNode FromLog(LogStream l, string username) { if (l.Position == null) { return(null); } HistoryNode n = new HistoryNode(); n.Username = username; n.Dimension = l.Position.Dimension; n.Timestamp = l.Timestamp; if (l.Digging != null) { if (l.Digging.Status != PlayerDigging.StatusEnum.FinishedDigging) { return(null); } n.Position = l.Digging.Position.CloneDouble(); n.Color = Color.Red; return(n); } if (l.PlaceBlock != null) { n.Position = l.PlaceBlock.BlockPosition.CloneDouble(); if (l.PlaceBlock.Item == null || l.PlaceBlock.Item.ItemID <= 0) { n.Color = Color.Transparent; return(n); } n.Color = Color.GreenYellow; //All other blocks n.Item = l.PlaceBlock.Item; if (l.PlaceBlock.Item.ItemID == BlockID.Waterbucket) { n.Color = Color.Blue; } if (l.PlaceBlock.Item.ItemID == BlockID.FlintandSteel) { n.Color = Color.Orange; } if (l.PlaceBlock.Item.ItemID == BlockID.Lavabucket) { n.Color = Color.Orange; } if (l.PlaceBlock.Item.ItemID == BlockID.TNT) { n.Color = Color.Red; } return(n); } if (l.Window != null && l.Window.WindowID >= 0) { n.Position = l.Position.Position; n.Color = Color.Yellow; n.Item = l.Window.Item; return(n); } return(null); }
public LogStream error() { LogStream ret = new LogStream(yarpPINVOKE.Log_error__SWIG_1(swigCPtr), true); return(ret); }
public abstract bool DetatchStream(LogStream stream, ErrorSeverity severity = ErrorSeverity.Debugging | ErrorSeverity.Err | ErrorSeverity.Warn | ErrorSeverity.Info);
private static void BuildMesh(D3D10.Device d, string meshArgs) { string name = meshArgs.Split(',')[0]; InputElement[] layout = (InputElement[])typeof(MeshInputElements10).GetField(meshArgs.Split(',')[1]).GetValue(typeof(MeshInputElements10)); string fileName = ConfigurationSettings.AppSettings["ExportsFolder"] + name; AssimpImporter importer = new AssimpImporter(); LogStream logstream = new LogStream(delegate(String msg, String userData) { Console.WriteLine(msg); }); importer.AttachLogStream(logstream); importer.Scale = 0; importer.XAxisRotation = 0; importer.YAxisRotation = 0; importer.ZAxisRotation = 0; importer.VerboseLoggingEnabled = true; importer.RemoveConfigs(); Scene model = importer.ImportFile(fileName, PostProcessSteps.CalculateTangentSpace // calculate tangents and bitangents if possible | PostProcessSteps.JoinIdenticalVertices // join identical vertices/ optimize indexing CAUSES A PROBLEM //| PostProcessSteps.ValidateDataStructure // perform a full validation of the loader's output | PostProcessSteps.ImproveCacheLocality // improve the cache locality of the output vertices | PostProcessSteps.RemoveRedundantMaterials // remove redundant materials //| PostProcessSteps.FindDegenerates // remove degenerated polygons from the import CAUSES A PROBLEM //| PostProcessSteps.FindInvalidData // detect invalid model data, such as invalid normal vectors | PostProcessSteps.GenerateUVCoords // convert spherical, cylindrical, box and planar mapping to proper UVs | PostProcessSteps.TransformUVCoords // preprocess UV transformations (scaling, translation ...) //| PostProcessSteps.FindInstances // search for instanced meshes and remove them by references to one master //| PostProcessSteps.LimitBoneWeights // limit bone weights to 4 per vertex | PostProcessSteps.OptimizeMeshes // join small meshes, if possible; | PostProcessSteps.GenerateSmoothNormals // generate smooth normal vectors if not existing | PostProcessSteps.Triangulate // triangulate polygons with more than 3 edges | PostProcessSteps.SortByPrimitiveType // make 'clean' meshes which consist of a single typ of primitives | PostProcessSteps.FlipUVs // common DirectX issue (Xna also) | PostProcessSteps.FixInFacingNormals | PostProcessSteps.MakeLeftHanded | PostProcessSteps.FlipWindingOrder ); MeshHelper.BuildMeshTextures(d, model); Mesh3D m = MeshHelper.LoadFromFile(d, model, layout); importer.Dispose(); Stream stream = File.Open(ConfigurationSettings.AppSettings["ExportsFolder"] + name.Replace(".X", ".mesh"), FileMode.Create); BinaryFormatter bFormatter = new BinaryFormatter(); if (m != null) { bFormatter.Serialize(stream, m); } stream.Close(); }
/// <summary> /// Ucitavanje podataka o sceni iz odgovarajuceg fajla. /// </summary> private void LoadScene() { // Instanciranje klase za ucitavanje podataka o sceni. AssimpImporter importer = new AssimpImporter(); // Definisanje callback delegata za belezenje poruka u toku ucitavanja podataka o sceni. LogStream logstream = new LogStream(delegate(String msg, String userData) { Console.WriteLine(msg); }); importer.AttachLogStream(logstream); // Ucitavanje podataka o sceni iz odgovarajuceg fajla. m_scene = importer.ImportFile(Path.Combine(m_scenePath, m_sceneFileName), PostProcessPreset.TargetRealTimeMaximumQuality); // Oslobadjanje resursa koriscenih za ucitavanje podataka o sceni. importer.Dispose(); }
public virtual bool attachStream(LogStream pStream) { bool ret = assimp_swigPINVOKE.Logger_attachStream__SWIG_1(swigCPtr, LogStream.getCPtr(pStream)); return(ret); }