private Switch setupPosViewBone(PosViewReader pos, int boneIndex) { Switch s = new Switch(); s.reference(); _bones[boneIndex] = new Separator(); //create a material for that bone! _boneMaterials[boneIndex] = new Material(); float[][] hamColors = PosViewSettings.PosViewColors; int colorIndex = boneIndex % hamColors.Length; //prevent index exception when we have more than 15 bones. (only 15 colors defined) _boneMaterials[boneIndex].setColor(hamColors[colorIndex][0], hamColors[colorIndex][1], hamColors[colorIndex][2]); _boneMaterials[boneIndex].setOverride(true); //for this material to apply to everything below it. _bones[boneIndex].addFile(pos.IvFileNames[boneIndex], false); //load bone file once, it will referenced multiple times if (pos.SetColor) //only insert now if the config file says to { _bones[boneIndex].insertNode(_boneMaterials[boneIndex], 0); } Transform[] transforms = DatParser.parsePosViewRTFileToTransforms(pos.RTFileNames[boneIndex]); _numPositions = transforms.Length; for (int i = 0; i < transforms.Length; i++) { Separator sepPosition = new Separator(); sepPosition.addNode(transforms[i]); sepPosition.addChild(_bones[boneIndex]); s.addChild(sepPosition); } s.whichChild(0); //set it to start at the first frame s.unrefNoDelete(); return(s); }
public async Task Flush() { await writer.FlushAsync(); writer.Dispose(); var filters = new List <FilterEntry>(); using (var input = new StreamReader(temp.OpenShareableRead())) { string line; while ((line = await input.ReadLineAsync()) != null) { var filter = DatParser.ParseEntry(line); if (filter != null) { filters.Add(filter); } } } // Sort and merge the list var list = FilterCollection.Merge(filters); // Flush the list out using (var stream = file.Open(FileMode.Create, FileAccess.Write, FileShare.Read)) using (var listWriter = new EmuleWriter(stream)) { await listWriter.Write(list, null); } }
public async Task Flush() { await writer.FlushAsync(); writer.Dispose(); var filters = new List <FilterEntry>(); using (var input = new StreamReader(temp.OpenShareableRead())) { string line; while ((line = await input.ReadLineAsync()) != null) { var filter = DatParser.ParseEntry(line); if (filter != null) { filters.Add(filter); } } } // Sort and merge the list var list = FilterCollection.Merge(filters); // Flush the list out using var stream = file.Open(FileMode.Create, FileAccess.Write, FileShare.Read); // Determine the desired format from the file extension var format = file.Extension.StartsWith(".p2p") ? FilterFileFormat.P2p : FilterFileFormat.Emule; //using var listWriter = (format == FilterFileFormat.Emule ? new EmuleWriter(stream) : (IFormatWriter)new BitTorrentWriter(stream)); using var listWriter = new P2pWriter(stream); await listWriter.Write(list, null); }
public EditForm(String title, DatParser d) { InitializeComponent(); _parser = d; _listView.BackColor = ConvertUtilities.HtmlColorToColor(RConfig.Instance.EditorBackColor); _listView.ColumnWidthChanged += _listView_ColumnWidthChanged; _listView.RetrieveVirtualItem += _listView_RetrieveVirtualItem; _listView.ItemSelectionChanged += _listView_ItemSelectionChanged; _listView.ColumnClick += _listView_ColumnClick; _propertyGid.PropertyValueChanged += _propertyGid_PropertyValueChanged; searchColumn.SelectedIndexChanged += _searchColumn_SelectedIndexChanged; searchText.KeyUp += _seatchText_KeyUp; KeyUp += EditForm_KeyUp; Text = Localizate.getMessage(Word.EDITOR) + ": " + title; Definition info = d.getDefinition(); PropertyInfo[] parray = info.GetType().GetProperties(); foreach (PropertyInfo property in parray) { if (property.CanWrite) { addHeader(property.Name); searchColumn.Items.Add(property.Name); } } }
static async Task SilentMain() { var detector = new ApplicationEnumerator(); var apps = (await detector.GetInstalledApplications()).ToList(); var cancellationSource = new CancellationTokenSource(); // Download the filter var downloader = new FilterDownloader(); var progressValue = 0; var progress = new Progress <ProgressModel>(delegate(ProgressModel model) { progressValue = model.Value; Trace.TraceInformation("{0}", model.Caption); }); using (var filter = await downloader.DownloadFilter(null, cancellationSource.Token, progress)) { if (filter.Exception != null) { throw filter.Exception; } Trace.TraceInformation("Parsing filter (" + filter.Length + " bytes)"); filter.Stream.Seek(0, SeekOrigin.Begin); using (var reader = new StreamReader(filter.Stream, Encoding.Default, false, 65535, true)) { var line = await reader.ReadLineAsync(); while (line != null) { var entry = DatParser.ParseEntry(line); if (entry != null) { filter.Entries.Add(entry); } var percent = (int)Math.Floor((double)filter.Stream.Position / filter.Stream.Length * 100); await Task.Yield(); //if( percent > progressValue) progress.Report(UpdateState.Decompressing, "Parsed " + filter.Entries.Count + " entries", percent); line = await reader.ReadLineAsync(); } Trace.TraceInformation("Parsed " + filter.Entries.Count + " entries"); } foreach (var application in apps) { Trace.TraceInformation("Updating app {0} {1}", application.Description, application.Version); await application.Application.UpdateFilterAsync(filter, cancellationSource.Token, progress); } } Trace.TraceInformation("Done."); }
public void ParseEntry() { var entry = DatParser.ParseEntry("001.002.003.001 - 001.002.003.254 , 000 , Description text"); Assert.AreEqual((uint)0x01020301, entry.From); Assert.AreEqual((uint)0x010203FE, entry.To); Assert.AreEqual(0, entry.Level); Assert.AreEqual("Description text", entry.Description); }
public DocumentCollection GetDocs() { var datLines = new List <string>(new string[] { "þDOCIDþþBEGATTþþVOLUMEþþDOCTYPEþþNATIVEþ", "þDOC000001þþDOC000001þþVOL001þþEMAILþþX:\\VOL001\\NATIVE\\0001\\DOC000001.XLSXþ", "þDOC000002þþDOC000001þþVOL001þþPDFþþþ", null, null }); var optLines = new List <string[]> { new string[] { "DOC000001", "VOL001", "X:\\VOL001\\IMAGES\\0001\\DOC000001.jpg", "Y", "", "", "1" }, new string[] { "DOC000002", "VOL001", "X:\\VOL001\\IMAGES\\0001\\DOC000002.tif", "Y", "", "", "2" }, new string[] { "DOC000003", "VOL001", "X:\\VOL001\\IMAGES\\0001\\DOC000003.tif", "", "", "", "" } }; var mockReader = new Mock <TextReader>(); int calls = 0; mockReader .Setup(r => r.ReadLine()) .Returns(() => datLines[calls]) .Callback(() => calls++); FileInfo infile = new FileInfo(@"X:\VOL001\infile.dat"); bool hasHeader = true; string keyColName = "DOCID"; string parentColName = "BEGATT"; string childColName = String.Empty; string childColDelim = ";"; RepresentativeBuilder repSetting = new RepresentativeBuilder("NATIVE", Representative.FileType.Native); List <RepresentativeBuilder> reps = new List <RepresentativeBuilder>(); reps.Add(repSetting); var builder = new DatBuilder(); IParser parser = new DatParser(Delimiters.CONCORDANCE); List <string[]> records = parser.Parse(mockReader.Object); builder.HasHeader = hasHeader; builder.KeyColumnName = keyColName; builder.ParentColumnName = parentColName; builder.ChildColumnName = childColName; builder.ChildSeparator = childColDelim; builder.RepresentativeBuilders = reps; builder.ParentColumnName = infile.Directory.FullName; List <Document> documents = builder.Build(records); var docs = new DocumentCollection(documents); var optBuilder = new OptBuilder(); optBuilder.PathPrefix = String.Empty; optBuilder.TextBuilder = null; List <Document> optDocs = optBuilder.Build(optLines); docs.AddRange(optDocs); docs[1].SetParent(docs[0]); return(docs); }
public void Exporters_DatExporter_FromCsvTest() { // Arrange var mockReader = new Mock <TextReader>(); var mockWriter = new Mock <TextWriter>(); int calls = 0; mockReader .Setup(r => r.ReadLine()) .Returns(() => datLines[calls]) .Callback(() => calls++); List <string> output = new List <string>(); mockWriter .Setup(r => r.WriteLine(It.IsAny <string>())) .Callback((string s) => output.Add(s)); FileInfo infile = new FileInfo(@"X:\VOL001\infile.dat"); bool hasHeader = true; string keyColName = "DOCID"; string parentColName = "BEGATT"; string childColName = String.Empty; string childColDelim = ";"; RepresentativeBuilder repSetting = new RepresentativeBuilder("NATIVE", Representative.FileType.Native); List <RepresentativeBuilder> reps = new List <RepresentativeBuilder>(); reps.Add(repSetting); var builder = new DatBuilder(); IParser parser = new DatParser(Delimiters.CONCORDANCE); builder.ChildColumnName = childColName; builder.ChildSeparator = childColDelim; builder.RepresentativeBuilders = reps; builder.PathPrefix = infile.Directory.FullName; builder.HasHeader = hasHeader; builder.ParentColumnName = parentColName; builder.KeyColumnName = keyColName; // act List <string[]> records = parser.Parse(mockReader.Object); List <Document> documents = builder.Build(records); DocumentCollection docs = new DocumentCollection(documents); string[] fields = new string[] { "DOCID", "BEGATT", "VOLUME", "NATIVE" }; var exporter = DatExporter.Builder .Start(mockWriter.Object, fields) .SetDelimiters(Delimiters.PIPE_CARET) .Build(); exporter.Export(docs); // assert Assert.AreEqual("^DOCID^|^BEGATT^|^VOLUME^|^NATIVE^", output[0]); Assert.AreEqual("^DOC000001^|^DOC000001^|^VOL001^|^X:\\VOL001\\NATIVE\\0001\\DOC000001.XLSX^", output[1]); Assert.AreEqual("^DOC000002^|^DOC000001^|^VOL001^|^^", output[2]); }
public async Task WriteLineAsync(string line) { var parsed = DatParser.ParseLine(line); if (parsed == null) { Trace.TraceWarning("Invalid line: " + line); return; } await writer.WriteLineAsync(parsed); }
public MergeForm(DatParser dat) { InitializeComponent(); foreach (String f in dat.getFieldNames()) { _fieldNames.Items.Add(f); _updateFieldNames.Items.Add(f); _secondKeyList.Items.Add(f); } _fieldNames.SelectedIndex = 0; _updateFieldNames.SelectedIndex = 0; _secondKeyList.SelectedIndex = 0; }
public void Exporters_OptExporter_FromCsvTest() { // Arrange var mockReader = new Mock <TextReader>(); var mockWriter = new Mock <TextWriter>(); int calls = 0; mockReader .Setup(r => r.ReadLine()) .Returns(() => datLines[calls]) .Callback(() => calls++); List <string> output = new List <string>(); mockWriter .Setup(r => r.WriteLine(It.IsAny <string>())) .Callback((string s) => output.Add(s)); FileInfo infile = new FileInfo(@"X:\VOL001\infile.dat"); bool hasHeader = true; string keyColName = "DOCID"; string parentColName = "BEGATT"; string childColName = String.Empty; string childColDelim = ";"; string vol = "TEST001"; RepresentativeBuilder repSetting = new RepresentativeBuilder("NATIVE", Representative.FileType.Native); List <RepresentativeBuilder> reps = new List <RepresentativeBuilder>(); reps.Add(repSetting); var builder = new DatBuilder(); IParser parser = new DatParser(Delimiters.CONCORDANCE); builder.HasHeader = hasHeader; builder.KeyColumnName = keyColName; builder.ParentColumnName = parentColName; builder.ChildColumnName = childColName; builder.ChildSeparator = childColDelim; builder.RepresentativeBuilders = reps; builder.PathPrefix = infile.Directory.FullName; // act List <string[]> records = parser.Parse(mockReader.Object); List <Document> documents = builder.Build(records); DocumentCollection docs = new DocumentCollection(documents); var exporter = OptExporter.Builder.Start(mockWriter.Object).SetVolumeName(vol).Build(); exporter.Export(docs); // assert Assert.IsTrue(output.Count == 0); }
public static Separator test() { Separator level1 = new Separator(); ColoredBone b = new ColoredBone(@"P:\WORKING_OI_CODE\distv\data\model\scaAVG.iv"); int numPositions = 90; int numVertices = b.getNumberVertices(); int[][] colors = DatParser.parseDistvColorFile(@"P:\WORKING_OI_CODE\distv\data\color\scacolor.dat", numPositions, numVertices); level1.addNode(b); uint a = 1; int bb = (int)a; b.setColorMap((int[])colors[0]); return(level1); }
public void ParseLine() { Assert.IsNull(DatParser.ParseLine("// C style comment")); Assert.IsNull(DatParser.ParseLine("/ Invalid comment")); Assert.IsNull(DatParser.ParseLine("# Comment")); Assert.AreEqual("192.168.1.1 - 192.168.1.254", DatParser.ParseLine("192.168.1.1 - 192.168.1.254 , 000 , Some organization")); Assert.AreEqual("192.168.1.1 - 192.168.1.254", DatParser.ParseLine("192.168.1.1 - 192.168.1.254 , 123 , Some organization")); // Access is > 127 so ignored Assert.IsNull(DatParser.ParseLine("192.168.1.1 - 192.168.1.254 , 128 , Some organization")); // Leading zeroes Assert.AreEqual("12.28.15.152 - 12.28.15.159", DatParser.ParseLine("012.028.015.152 - 012.028.015.159 , 000 , HILTON HOTEL CORPORATION")); }
public Task WriteLineAsync(string line) { var parsed = DatParser.ParseLine(line); if (parsed == null) { if (!line.StartsWith("#")) { Trace.TraceWarning("Invalid line: " + line); } return(success); } writer.WriteLine(parsed); return(success); }
public void Exporters_DatExporter_FromLfpTest() { // Arrange var mockReader = new Mock <TextReader>(); var mockWriter = new Mock <TextWriter>(); int calls = 0; mockReader .Setup(r => r.ReadLine()) .Returns(() => optLines[calls]) .Callback(() => calls++); List <string> output = new List <string>(); mockWriter .Setup(r => r.WriteLine(It.IsAny <string>())) .Callback((string s) => output.Add(s)); FileInfo infile = new FileInfo(@"X:\VOL001\infile.opt"); TextBuilder rep = new TextBuilder( TextBuilder.TextLevel.None, TextBuilder.TextLocation.None, null, null); var builder = new OptBuilder(); IParser parser = new DatParser(Delimiters.COMMA_DELIMITED); builder.PathPrefix = String.Empty; builder.TextBuilder = rep; // act List <string[]> records = parser.Parse(mockReader.Object); List <Document> documents = builder.Build(records); DocumentCollection docs = new DocumentCollection(documents); string[] fields = new string[] { "DocID", "Page Count" }; var exporter = DatExporter.Builder .Start(mockWriter.Object, fields) .SetDelimiters(Delimiters.COMMA_QUOTE) .Build(); exporter.Export(docs); // assert Assert.AreEqual("\"DocID\",\"Page Count\"", output[0]); Assert.AreEqual("\"000000001\",\"1\"", output[1]); Assert.AreEqual("\"000000002\",\"2\"", output[2]); }
private void setupDistv(string rootPath) { _bones = new ColoredBone[NUM_BONES]; _boneSeparators = new Separator[NUM_BONES]; _transformsSwitch = new Switch[NUM_BONES]; _inverseTransformsSwitch = new Switch[NUM_BONES]; _colorData = new int[NUM_BONES][][]; for (int i = 0; i < NUM_BONES; i++) { string bonePath = Path.Combine(rootPath, String.Format(BONE_FILE_PATTERN, WristFilesystem.ShortBoneNames[i])); string transformPath = Path.Combine(rootPath, String.Format(RT_FILE_PATTERN, WristFilesystem.ShortBoneNames[i])); string colorPath = Path.Combine(rootPath, String.Format(COLOR_FILE_PATTERN, WristFilesystem.ShortBoneNames[i])); _boneSeparators[i] = new Separator(); _bones[i] = new ColoredBone(bonePath); _transformsSwitch[i] = new Switch(); _inverseTransformsSwitch[i] = new Switch(); TransformRT[] tfrm = DatParser.parseRTFileWithHeaderToRT(transformPath); for (int j = 0; j < tfrm.Length; j++) { Transform t1 = new Transform(); DatParser.addRTtoTransform(tfrm[j], t1); _transformsSwitch[i].addChild(t1); //now the inverse to allow us to fix another bone Transform t2 = new Transform(); DatParser.addRTtoTransform(tfrm[j], t2); t2.invert(); _inverseTransformsSwitch[i].addChild(t2); } _numPositions = tfrm.Length; //TODO: Check that all are the same... _colorData[i] = DatParser.parseDistvColorFile(colorPath, tfrm.Length, _bones[i].getNumberVertices()); _bones[i].setupFullColorMap(_colorData[i]); //TODO: Add swich in with transforms.... _boneSeparators[i].addNode(_transformsSwitch[i]); _boneSeparators[i].addNode(_bones[i]); _root.addChild(_boneSeparators[i]); } setAllColorMaps(0); }
public void Exporters_LfpExporter_FromOptTest() { // Arrange var mockReader = new Mock <TextReader>(); var mockWriter = new Mock <TextWriter>(); int calls = 0; mockReader .Setup(r => r.ReadLine()) .Returns(() => optLines[calls]) .Callback(() => calls++); List <string> output = new List <string>(); mockWriter .Setup(r => r.WriteLine(It.IsAny <string>())) .Callback((string s) => output.Add(s)); FileInfo infile = new FileInfo(@"X:\VOL001\infile.opt"); string vol = "TEST001"; TextBuilder rep = new TextBuilder( TextBuilder.TextLevel.None, TextBuilder.TextLocation.None, null, null); var builder = new OptBuilder(); IParser parser = new DatParser(Delimiters.COMMA_DELIMITED); builder.PathPrefix = String.Empty; builder.TextBuilder = rep; // act List <string[]> records = parser.Parse(mockReader.Object); List <Document> documents = builder.Build(records); DocumentCollection docs = new DocumentCollection(documents); var exporter = LfpExporter.Builder .Start(mockWriter.Object) .SetVolumeName(vol) .Build(); exporter.Export(docs); // assert Assert.AreEqual("IM,000000001,D,0,@TEST001;X:\\VOL001\\IMAGES\\0001;000000001.jpg;4,0", output[0]); Assert.AreEqual("IM,000000002,D,0,@TEST001;X:\\VOL001\\IMAGES\\0001;000000002.tif;2,0", output[1]); Assert.AreEqual("IM,000000003,,0,@TEST001;X:\\VOL001\\IMAGES\\0001;000000003.tif;2,0", output[2]); }
public void TestSetup() { datLines = new List <string>(new string[] { "þDOCIDþþBEGATTþþVOLUMEþþNATIVEþ", "þDOC000001þþDOC000001þþVOL001þþX:\\VOL001\\NATIVE\\0001\\DOC000001.XLSXþ", "þDOC000002þþDOC000001þþVOL001þþþ", null, null }); var mockReader = new Mock <TextReader>(); int calls = 0; mockReader .Setup(r => r.ReadLine()) .Returns(() => datLines[calls]) .Callback(() => calls++); FileInfo infile = new FileInfo(@"X:\VOL001\infile.dat"); bool hasHeader = true; string keyColName = "DOCID"; string parentColName = "BEGATT"; string childColName = String.Empty; string childColDelim = ";"; RepresentativeBuilder repSetting = new RepresentativeBuilder("NATIVE", Representative.FileType.Native); List <RepresentativeBuilder> reps = new List <RepresentativeBuilder>(); reps.Add(repSetting); var builder = new DatBuilder(); IParser parser = new DatParser(Delimiters.CONCORDANCE); List <string[]> records = parser.Parse(mockReader.Object); builder.HasHeader = hasHeader; builder.KeyColumnName = keyColName; builder.ParentColumnName = parentColName; builder.ChildColumnName = childColName; builder.ChildSeparator = childColDelim; builder.RepresentativeBuilders = reps; builder.ParentColumnName = infile.Directory.FullName; List <Document> documents = builder.Build(records); docs = new DocumentCollection(documents); }
private Switch setupPosViewHAMs(PosViewReader pos, int boneIndex) { Switch s = new Switch(); if (!pos.ShowHams) { return(s); } s.reference(); double[][] HAMdata = DatParser.parsePosViewHAMFile(pos.HAMFileNames[boneIndex]); float[][] hamColors = PosViewSettings.PosViewColors; for (int i = 0; i < HAMdata.Length; i++) { Separator sepPosition = new Separator(); s.addChild(sepPosition); Material color = new Material(); color.setColor(hamColors[boneIndex][0], hamColors[boneIndex][1], hamColors[boneIndex][2]); color.setOverride(true); sepPosition.addNode(color); HamAxis axis = new HamAxis(HAMdata[i][1], HAMdata[i][2], HAMdata[i][3], HAMdata[i][5], HAMdata[i][6], HAMdata[i][7]); sepPosition.addNode(axis); if (_reader.HamLength > -1) { axis.SetHamLength(_reader.HamLength); } if (_reader.HamRadius > -1) { axis.SetHamRadius(_reader.HamRadius); } } s.whichChild(0); //set it to start at the first frame s.unrefNoDelete(); return(s); }
private void DatInfo_init() { try { Type t = Type.GetType("com.jds.PathEditor.classes.client.definitions." + SelectedFiles); object instance = t.InvokeMember(null, BindingFlags.CreateInstance, null, null, new object[] { }); if (instance != null) { DatInfo = (DatParser)instance; } else { DatInfo = new DatParser(); } DatDatas = new List <Definition>(); } catch (Exception e) { _log.Info(e.StackTrace, e); } }
private CT run() { _bones = new Separator[TextureSettings.ShortBNames.Length]; _subjectPath = textBoxSubjectDirectory.Text.Trim(); //TODO: Figure out the image type.... parseCropValues(); CT mri; //check if we have this MRI saved!!!, dirty cache //TODO: Check if the crop values are compatable!!! if (false && _LastImagePath.ToLower().Equals(textBoxImageFile.Text.Trim().ToLower())) { mri = _LastMRI; } else { //pass crop values now, for faster read :) mri = CT.SmartLoad(textBoxImageFile.Text); mri.setCrop(_minX, _maxX, _minY, _maxY, _minZ, _maxZ); if (mri.Layers == 1) //the default case, we want to load the only layer, echo 0 { mri.loadImageData(); } else //for other cases, we should try and load layer 5, the layer used by the Wrist Registration Code. { mri.loadImageData(5); //TODO: Option for loading different image layers, check for at least 6, etc. } _LastMRI = mri; _LastImagePath = textBoxImageFile.Text.Trim(); //save filename, to use in cache } Byte[][] voxels = mri.getCroppedRegionScaledToBytes((mri.Layers == 1) ? 0 : 5); int min = 1000; int max = -10; for (int i = 0; i < voxels[0].Length; i++) { if (voxels[0][i] < min) { min = voxels[0][i]; } if (voxels[0][i] > max) { max = voxels[0][i]; } } Hashtable transforms = null; _transformParser = null; if (File.Exists(textBoxKinematicFilename.Text)) { switch (_kinematicFileType) { case KinematicFileTypes.AUTO_REGISTR: _transformParser = new TransformParser(textBoxKinematicFilename.Text); transforms = _transformParser.getFinalTransforms(); break; case KinematicFileTypes.OUT_RT: throw new NotImplementedException("Can't yet read OutRT files"); case KinematicFileTypes.MOTION: throw new NotImplementedException("Can't yet read Motion files"); } } //lets load each bone for (int i = 0; i < TextureSettings.ShortBNames.Length; i++) { double[][] pts = DatParser.parseDatFile(getBoneFileName(TextureSettings.ShortBNames[i])); _bones[i] = Texture.createPointsFileObject(pts, TextureSettings.BoneColors[i]); //try and load transforms if (transforms != null && transforms.ContainsKey(TextureSettings.TransformBNames[i])) { Transform tfrm = new Transform(); TransformParser.addTfmMatrixtoTransform((TransformMatrix)transforms[TextureSettings.TransformBNames[i]], tfrm); _bones[i].addTransform(tfrm); } _root.addChild(_bones[i]); } _texture = new Texture(_side == WristFilesystem.Sides.LEFT ? Texture.Sides.LEFT : Texture.Sides.RIGHT, mri.Cropped_SizeX, mri.Cropped_SizeY, mri.Cropped_SizeZ, mri.voxelSizeX, mri.voxelSizeY, mri.voxelSizeZ); Separator plane1 = _texture.makeDragerAndTexture(voxels, Texture.Planes.XY_PLANE); Separator plane2 = _texture.makeDragerAndTexture(voxels, Texture.Planes.YZ_PLANE); _root.addChild(plane1); _root.addChild(plane2); _root.addChild(_texture.createKeyboardCallbackObject(_viewer.Parent_HWND)); //returning mri in order to pass it into the texture controller, to contrust a texture for volume rendering return(mri); }
/// <summary> /// Initializes a new instance of <see cref="DatImporter"/>. /// </summary> /// <param name="parser">The parser used to import the DAT file.</param> public DatImporter(DatParser parser) { this.parser = parser; this.builder = new DatBuilder(); }
public void OnExecute() { if (Debug) { Program.debug = true; } if (!string.IsNullOrEmpty(FileType)) { AbstractFileParser selclass = null; switch (FileType.ToLower()) { case "cz0": selclass = new CZ0Parser(); break; case "cz1": selclass = new CZ1Parser(); break; case "cz1_4bit": selclass = new CZ1_4bitParser(); break; case "cz2": selclass = new CZ2Parser(); break; case "cz3": selclass = new CZ3Parser(); break; case "cz4": selclass = new CZ4Parser(); break; case "dat": selclass = new DatParser(); break; case "pak": if (!string.IsNullOrEmpty(PakCoding)) { selclass = new PAKManager(PakCoding); } else { selclass = new PAKManager(); } break; case "psb": selclass = new PsbScript(); break; case "info": selclass = new FontInfoParser(); break; case "scr": if (!string.IsNullOrEmpty(CustomOpcodePath)) { selclass = new ScriptParser(GameScript.CUSTOM, CustomOpcodePath); } else if (!string.IsNullOrEmpty(OpcodePath)) { if (OpcodePath != "CUSTOM") { selclass = new ScriptParser((GameScript)Enum.Parse(typeof(GameScript), OpcodePath, true)); } } else { throw new Exception("Need Input OpcodePath or CustomOpcodePath!"); } if (!string.IsNullOrEmpty(TBLFile)) { CustomEncoding.LoadTbl(TBLFile); } break; default: break; } if (ParserMode.ToLower() == "import" || ParserMode.ToLower() == "i") { selclass.FileImport(FileName); } else if (ParserMode.ToLower() == "export" || ParserMode.ToLower() == "e") { selclass.FileExport(FileName); } else { throw new Exception("Need Input ParserMode!"); } } if (GameList) { Console.WriteLine(@"Supported Games: Opcode name Game name & Platform SP 《Summer Pocket》Nintendo Switch CL 《Clannad》Nintendo Switch TAWL 《Tomoyo After Its a Wonderful Life CS Edition》Nintendo Switch FLOWERS 《Flowers - Shiki》 ISALND 《ISLAND》Psvita CUSTOM Read custom Opcode file. Path: OPCODE/{CUSTOM}.txt "); } if (OpcodeHelp) { Console.WriteLine(@" Byte, Byte2, Byte3, Byte4, UInt16, UInt32, StringUnicode, StringSJIS, StringUTF8, LenStringUnicode, LenStringSJIS, Opcode, Skip 待完善... "); } }
public static void Main(string[] args) { Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); LoadConfig(); try { var argValues = new List <string>(2); var argOptions = new Dictionary <string, string>(2); for (var i = 0; i < args.Length; i++) { var arg = args[i]; if (arg.StartsWith("--")) { argOptions.Add(arg.Substring(2), args[++i]); } else { argValues.Add(arg); } } var fileName = argValues.FirstOrDefault(); if (fileName == null) { Console.Error.WriteLine("Usage: dotnet Sqpack.CLI.dll <extract_file_name> [--game <game_dir>] [--output <output_path>]"); return; } string indexName = null; if (fileName.Contains(":")) { var index = fileName.IndexOf(":", StringComparison.Ordinal); indexName = fileName.Substring(0, index); fileName = fileName.Substring(index + 1); } if (argOptions.TryGetValue("game", out var gameDir)) { config[GameDirKey] = gameDir; } else { gameDir = config.ContainsKey(GameDirKey) ? config[GameDirKey] : null; } if (gameDir == null) { Console.Error.WriteLine("Please specify game location using --game option."); return; } // ReSharper disable once ConvertIfStatementToConditionalTernaryExpression if (argOptions.TryGetValue("output", out var output)) { output = Path.GetFullPath(output); } else { output = Directory.GetCurrentDirectory(); } const string sep = "/"; var fileParts = fileName.Split(new[] { sep }, StringSplitOptions.RemoveEmptyEntries); var fileHash = FFCrc.Compute(fileParts.Last()); var folderHash = FFCrc.Compute(string.Join(sep, fileParts.Take(fileParts.Length - 1))); var outputPath = Path.Combine((new[] { output }).Concat(fileParts.Take(fileParts.Length - 1)).ToArray()); var outputFile = Path.Combine(outputPath, fileParts.Last()); var offset = 0; string datFile = null; foreach (var kv in GetFolders(indexName)) { var result = SearchFile(kv.Value, folderHash, fileHash); if (result == null) { continue; } offset = result.Value; datFile = Path.ChangeExtension(kv.Key, string.Format(".dat{0}", (offset & 0xF) % 2)); break; } if (datFile == null) { Console.Error.WriteLine("Can not find file \"{0}\".", fileName); return; } using (var stream = File.OpenRead(datFile)) { var data = new DatParser(stream).GetFileData(offset); if (!Directory.Exists(outputPath)) { Directory.CreateDirectory(outputPath); } File.WriteAllBytes(Path.Combine(outputFile), data); Console.WriteLine(outputFile); } } finally { SaveConfig(); } }
public void OnExecute() { if (Debug) { Program.debug = true; } if (!string.IsNullOrEmpty(FileType)) { AbstractFileParser selclass = null; switch (FileType.ToLower()) { case "cz0": selclass = new CZ0Parser(); break; case "cz1": selclass = new CZ1Parser(); break; case "cz1_4bit": selclass = new CZ1_4bitParser(); break; case "cz2": selclass = new CZ2Parser(); break; case "cz3": selclass = new CZ3Parser(); break; case "cz4": selclass = new CZ4Parser(); break; case "dat": selclass = new DatParser(); break; case "pak": if (!string.IsNullOrEmpty(PakCoding)) { selclass = new PAKManager(PakCoding); } else { selclass = new PAKManager(); } break; case "psb": selclass = new PsbScript(); break; case "info": selclass = new FontInfoParser(); break; case "scr": if (!FormatOld && !FormatLua && !FormatLuaE && !FormatJson) { FormatJson = true; } if (FormatLua && FormatLuaE) { FormatLuaE = false; // 优先能导入 } if (!string.IsNullOrEmpty(CustomOpcodePath)) { selclass = new ScriptParser(GameScript.CUSTOM, CustomOpcodePath, FormatOld, FormatLua, FormatLuaE, FormatJson, OnlyText); } else if (!string.IsNullOrEmpty(OpcodePath)) { if (OpcodePath != "CUSTOM") { selclass = new ScriptParser((GameScript)Enum.Parse(typeof(GameScript), OpcodePath, true), "", FormatOld, FormatLua, FormatLuaE, FormatJson, OnlyText); } } else { throw new Exception("Need Input OpcodePath or CustomOpcodePath!"); } if (!string.IsNullOrEmpty(TBLFile)) { CustomEncoding.LoadTbl(TBLFile); } break; default: break; } if (ParserMode.ToLower() == "import" || ParserMode.ToLower() == "i") { if (File.Exists(FileName)) { // 是文件 selclass.FileImport(FileName, OutFileName); } else if (Directory.Exists(FileName)) { // 是文件夹 string outFolder = OutFileName; if (!Directory.Exists(OutFileName)) { Console.WriteLine("输出目录不是文件夹,默认输出目录已更改"); outFolder = Path.GetDirectoryName(OutFileName); } var files = Directory.GetFiles(FileName, "*"); foreach (var file in files) { selclass.FileImport(file, Path.Combine(outFolder, Path.GetFileNameWithoutExtension(file))); } } } else if (ParserMode.ToLower() == "export" || ParserMode.ToLower() == "e") { if (File.Exists(FileName)) { // 是文件 selclass.FileExport(FileName, OutFileName); } else if (Directory.Exists(FileName)) { // 是文件夹 string outFolder = OutFileName; if (!Directory.Exists(OutFileName)) { Console.WriteLine("输出目录不是文件夹,默认输出目录已更改"); outFolder = Path.GetDirectoryName(OutFileName); } var files = Directory.GetFiles(FileName, "*"); foreach (var file in files) { selclass.FileExport(file, Path.Combine(outFolder, Path.GetFileNameWithoutExtension(file))); } } } else { throw new Exception("Need Input ParserMode!"); } } if (GameList) { Console.WriteLine(@"Supported Games: Opcode name Game name & Platform SP 《Summer Pocket》Nintendo Switch CL 《Clannad》Nintendo Switch TAWL 《Tomoyo After Its a Wonderful Life CS Edition》Nintendo Switch FLOWERS 《Flowers - Shiki》 ISALND 《ISLAND》Psvita CUSTOM Read custom Opcode file. Path: OPCODE/{CUSTOM}.txt "); } if (OpcodeHelp) { Console.WriteLine(@" Byte, Byte2, Byte3, Byte4, UInt16, UInt32, StringUnicode, StringSJIS, StringUTF8, LenStringUnicode, LenStringSJIS, Opcode, Skip 待完善... "); } }
internal async Task StartAsync() { var message = "Done"; progress.Report(UpdateState.Downloading, "Starting..."); try { if (SelectedMirrorProvider == null) { progress.Report(new ProgressModel(UpdateState.Cancelled, "Please select a filter source", 0)); return; } var uri = SelectedMirrorProvider.GetUrlForMirror(); using (var filter = await downloader.DownloadFilter(new Uri(uri), cancellationToken.Token, progress)) { cancellationToken.Token.ThrowIfCancellationRequested(); if (filter == null) { progress.Report(new ProgressModel(UpdateState.Cancelled, "A filter wasn't downloaded successfully.", 0)); } else if (filter.Exception != null) { if (filter.Exception is OperationCanceledException) { throw filter.Exception; } Trace.TraceError("Problem when downloading: " + filter.Exception); progress.Report(new ProgressModel(UpdateState.Cancelled, "Problem when downloading: " + filter.Exception.Message, 0)); return; } else { filter.Stream.Seek(0, SeekOrigin.Begin); using (var reader = new StreamReader(filter.Stream, Encoding.Default, false, 65535, true)) { var line = await reader.ReadLineAsync(); while (line != null) { var entry = DatParser.ParseEntry(line); if (entry != null) { filter.Entries.Add(entry); } var percent = (int)Math.Floor((double)filter.Stream.Position / filter.Stream.Length * 100); await Task.Yield(); if (percent > ProgressValue) { progress.Report(new ProgressModel(UpdateState.Decompressing, "Parsed " + filter.Entries.Count + " entries", percent)); } line = await reader.ReadLineAsync(); } } foreach (var application in apps) { Trace.TraceInformation("Updating app {0} {1}", application.Description, application.Version); await application.Application.UpdateFilterAsync(filter, cancellationToken.Token, progress); } } if (filter?.FilterTimestamp != null) { message = $"Done. List timestamp: {filter.FilterTimestamp.Value.ToLocalTime()}"; } } } catch (OperationCanceledException) { Trace.TraceWarning("Update was cancelled."); progress.Report(new ProgressModel(UpdateState.Cancelled, "Update was cancelled.", 0)); return; } catch (Exception ex) { Trace.TraceError("Problem when updating: " + ex); progress.Report(new ProgressModel(UpdateState.Cancelled, "Problem when updating: " + ex.Message, 0)); return; } progress.Report(UpdateState.Decompressing, "Cleaning up...", -1); GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); Trace.TraceInformation(message); progress.Report(new ProgressModel(UpdateState.Done, message, 100)); ShowNotification("Updated IP Filter", message, ToolTipIcon.Info); }
/// <summary> /// Initializes a new instance of <see cref="DatImporter"/>. /// </summary> public DatImporter(Delimiters delimiters) { this.parser = new DatParser(delimiters); this.builder = new DatBuilder(); }
private Separator readStackfile(string filename) { double[][] pts = DatParser.parseDatFile(filename); return(Texture.createPointsFileObject(pts)); }
static void Main(string[] args) { var stone = new StoneProvider(); Console.WriteLine("Using Stone Platforms v" + stone.StoneVersion); IEnumerable <RomInfo> datInfos = new List <RomInfo>(); IEnumerable <SerialInfo> serialInfos = new List <SerialInfo>(); IEnumerable <string> mameFilenames = new List <string>(); if (!Directory.Exists("PlatformDats")) { Console.WriteLine("PlatformDats folder does not exist.. Creating Directory Structure"); Directory.CreateDirectory("PlatformDats"); foreach (var platform in stone.Platforms) { Directory.CreateDirectory(Path.Combine("PlatformDats", platform.Key)); } } if (File.Exists(Path.Combine("PlatformDats", "openvgdb.sqlite"))) { Console.WriteLine("OpenVGDB Found. Parsing..."); var openvgdb = new OpenVgdb(Path.Combine("PlatformDats", "openvgdb.sqlite")); serialInfos = serialInfos.Concat(openvgdb.GetSerialInfos().ToList()); datInfos = datInfos.Concat(openvgdb.GetDatInfos().ToList()); mameFilenames = mameFilenames.Concat(openvgdb.GetMameFiles().ToList()); } foreach (string platformId in stone.Platforms.Select(p => p.Key)) { if (!Directory.Exists(Path.Combine("PlatformDats", platformId))) { continue; } foreach (string file in Directory.EnumerateFiles(Path.Combine("PlatformDats", platformId))) { Console.Write(platformId + " found: " + Path.GetFileName(file)); if (Path.GetExtension(file) == ".idlist") { Console.WriteLine(" is type of ID List"); serialInfos = serialInfos.Concat(IdlistParser.ParseSerials(file, platformId)); continue; } switch (DatParser.GetParser(File.ReadLines(file).First())) { case ParserClass.Cmp: Console.WriteLine(" is type of ClrMamePro"); serialInfos = serialInfos.Concat(CmpParser.ParseSerials(file, platformId)); datInfos = datInfos.Concat(CmpParser.Parse(file, platformId)); continue; case ParserClass.Tdb: Console.WriteLine(" is type of GameTDB"); serialInfos = serialInfos.Concat(GameTdbParser.ParseSerials(file, platformId)); continue; case ParserClass.Xml: Console.WriteLine(" is type of Logiqix XML"); datInfos = datInfos.Concat(XmlParser.Parse(file, platformId)); continue; default: Console.WriteLine(" is invalid."); continue; } } } Console.WriteLine("Generating shiragame.db ..."); var memoryDb = new ShiragameDb(); if (!Directory.Exists("out")) { Directory.CreateDirectory("out"); } var diskDb = new SqliteDatabase("out\\shiragame.db"); memoryDb.Commit(datInfos.ToList()); memoryDb.Commit(serialInfos.DistinctBy(x => new { x.PlatformId, x.Serial }).ToList()); memoryDb.Commit(mameFilenames.ToList()); memoryDb.SaveTo(diskDb); // todo fix online backup API }