public void Contents() { var path = "c:\file.txt"; var contents = "testContents"; var stream = new UnclosableMemoryStream(); var fileSystem = new Mock<IFileSystem>(); var file = new Mock<IFile>(); file.Setup(f=>f.Open(System.IO.FileMode.Create, System.IO.FileAccess.Write, System.IO.FileShare.Write)).Returns(stream); fileSystem.Setup(f=>f.GetFile(path)).Returns(file.Object); IoCContainer.Clear(); IoCContainer.RegisterInstance(typeof(IFileSystem), fileSystem.Object); var writer = new FileWriter(path, contents); writer.Write(); Assert.IsTrue(stream.ClosedCalled); stream.Position = 0; var reader = new StreamReader(stream); Assert.AreEqual(contents, reader.ReadToEnd()); }
public void WriteFileNamesToConsole() { var procs = from cal in Process.GetProcesses() where (IntPtr.Zero != (cal.MainWindowHandle)) orderby cal.ProcessName select cal; var testresultsLocation = @"C:\Z_Temp\installers\{0}"; var notePadsLocations = string.Format(testresultsLocation, "TestResults.txt"); var errorPageLoc = string.Format(testresultsLocation, "TestResultsErrors.txt"); var writer = new FileWriter(); var listOfProcesses = new List<JFlowProcessActivator>(8); string errorMessage = "Current errors:"; foreach (var proc in procs) { try { var pa = new JFlowProcessActivator { FileNames = new List<string>(proc.Modules.Count) }; for (var i = 0; i < proc.Modules.Count; i++) { pa.FileNames.Add(proc.Modules[i].FileName); } var processName = proc.ProcessName; pa.ProcessName = processName; listOfProcesses.Add(pa); } catch { "Errror at".P(postText: proc.ProcessName); errorMessage += string.Format("\nCouldn't process : {0}", proc.ProcessName); } } writer.WriteApplicationToFile(listOfProcesses, notePadsLocations); errorMessage.P(); writer.WriteErrorsToFile(errorPageLoc, errorMessage); }
public void file_is_written() { var fileWriter = new FileWriter(); fileWriter.SaveGame(Board.Access()); Assert.IsTrue(File.Exists("SavedBoard.bin")); }
/// <summary> /// The entry point of the program, where the program control starts and ends. /// </summary> /// <param name="args">The command-line arguments.</param> public static void Main(String[] args) { Console.Write("Where would you like the output file?: "); String path = Console.ReadLine(); Semaphore askWait = new Semaphore(1); FileWriter fileWriter = new FileWriter(path); VowelCount vowelCount = new VowelCount(); // Plug all the correct channels to each active object vowelCount.outputChannel = fileWriter.inputChannel; VowelFilter vowelFilter = new VowelFilter(vowelCount.inputChannel); SentenceReader sentenceReader = new SentenceReader(askWait); sentenceReader.outputChannel = vowelFilter.inputChannel; // Start all the active objects fileWriter.Start(); vowelFilter.Start(); vowelCount.Start(); sentenceReader.Start(); while (true) { vowelCount.PrintCount(); askWait.Release(); } }
/// <summary> /// A simple constructor that initializes the object with the given values. /// </summary> /// <param name="p_fdrFileDownloader">The <see cref="FileDownloader"/> from which to retrieve the block this /// downloader will download.</param> /// <param name="p_fmdInfo">The metadata of the to be downloaded.</param> /// <param name="p_fwrWriter">The writer to use to write the file to the disk.</param> /// <param name="p_intBufferSize">The size of the buffer to send to the file writer.</param> /// <param name="p_strUserAgent">The current User Agent.</param> public BlockDownloader(FileDownloader p_fdrFileDownloader, FileMetadata p_fmdInfo, FileWriter p_fwrWriter, Int32 p_intBufferSize, string p_strUserAgent) { m_fdrFileDownloader = p_fdrFileDownloader; m_fmdInfo = p_fmdInfo; m_fwrWriter = p_fwrWriter; m_intBufferSize = p_intBufferSize; m_strUserAgent = p_strUserAgent; m_fwrWriter.UnableToWrite += new EventHandler(FileWriter_UnableToWrite); }
static void Main(string[] args) { var logger = new FileWriter(); var paramsReader = new ParamsReader(args, logger); var fileChecker = new FileChecker(paramsReader); var mailComposer = new MailMessageComposer(paramsReader); var mailSender = new MailClientSender(fileChecker, paramsReader, logger); mailSender.SetData(mailComposer); //Console.ReadLine(); }
public void WriteAllTextShouldWriteAllTextToTheFile() { string path = "../../DataManagers/answer-to-the-universe.txt"; string text = "42"; var reader = new FileReader(); var writer = new FileWriter(); writer.WriteAllText(path, text); Assert.AreEqual(text, reader.ReadAllText(path)); }
public IReport ResolveReport(ReportType reportType, string reportName) { var fileWriter = new FileWriter(); IReport report = null; if (reportType == ReportType.Failing) report = new HtmlFailedReport(fileWriter, reportName); else if (reportType == ReportType.TopTen) report = new HtmlTopTenReport(fileWriter, reportName); else if (reportName.EndsWith (".xml")) report = new XmlReport (fileWriter, reportName); else report = new HtmlReport(fileWriter,reportName); return report; }
public void SetUp() { if (Directory.Exists(TestDirectory)) { Directory.Delete(TestDirectory, true); } Directory.CreateDirectory(TestDirectory); SetUpAccountRepositoryTestData(); _accountTagRepository = Substitute.For<IAccountTagRepository>(); _journalRepository = Substitute.For<IJournalRepository>(); _templateRepository = Substitute.For<ITemplateRepository>(); _fileWriter = new FileWriter(_accountRepository, _accountTagRepository, _templateRepository, _journalRepository); }
private void btn_buildNgramfromDB_Click(object sender, EventArgs e) { String[] Languages = "'eu';,'ca';,'gl';,'es';,'en';,'pt';".Split(','); FileWriter FW = new FileWriter(); for (int i = 0; i < Languages.Length; i++) { FetchFromDB fetchFromDatabase = new FetchFromDB(); DataTable dataTable = fetchFromDatabase.getTrainingDataFor(Languages[i]); fetchFromDatabase.closeConnection(); DataParser DP = new DataParser(); DataTable cleanTable = new DataTable(); cleanTable = DP.getCleanTable(dataTable); NgramBuilder NB = new NgramBuilder(); DataTable uniGram = new DataTable(); uniGram = NB.GetGram(cleanTable, 1); double uniGramN = NB.getTotalFrequency(); DataTable smoothedUniGram = new DataTable(); smoothedUniGram = NB.applySmoothing(uniGram, 0.1); double uniGramSmoothedN = NB.getTotalFrequency(); DataTable biGram = new DataTable(); biGram = NB.GetGram(cleanTable, 2); double biGramN = NB.getTotalFrequency(); DataTable smoothedBiGram = new DataTable(); smoothedBiGram = NB.applySmoothing(biGram, 0.1); double BiGramSmoothedN = NB.getTotalFrequency(); //FileWriter FW = new FileWriter(); FW.writeUniGram(uniGram, Languages[i], "False", uniGramN); FW.writeUniGram(smoothedUniGram, Languages[i], "True", uniGramSmoothedN); FW.writeBiGram(biGram, Languages[i], "False", biGramN); FW.writeBiGram(smoothedBiGram, Languages[i], "True", BiGramSmoothedN); MessageBox.Show("Done " + Languages[i]); } FW.closeWriter(); }
static void Main(string[] args) { // start a timer var watch = Stopwatch.StartNew(); for (int i = 0; i < 7; i++){ FileWriter fw = new FileWriter(); fw.iteration = i; threadDelegate = new ThreadStart(w.WriteFile); newThread = new Thread(threadDelegate); newThread.Start(); } // stop timer watch.Stop(); Console.WriteLine("\n\nExecution Time: " + watch.ElapsedMilliseconds + " ms"); }
public void OnShareFileClick(View v) { try { // This file will be accessed by the target of the share through // the ContentProvider SharingSupportProvider. FileWriter fw = new FileWriter(GetFilesDir() + "/foo.txt"); fw.Write("This is a file share"); fw.Close(); ShareCompat.IntentBuilder.From(this) .SetType("text/plain") .SetStream(Uri.Parse(SharingSupportProvider.CONTENT_URI + "/foo.txt")) .StartChooser(); } catch (FileNotFoundException e) { e.PrintStackTrace(); } catch (IOException e) { e.PrintStackTrace(); } }
/// <summary> /// Used to create a list of processed hash items /// </summary> /// <param name="file_path">file where hash items are listed</param> internal HashItems(string file_path) { lock (this) { try { file_abs_path = file_path; if (!file_abs_path.Contains(":")) file_abs_path = Log.WorkDir + "\\" + file_abs_path; string data_string = ""; if (!File.Exists(file_abs_path)) { if (!LogMessage.AskYesNo(file_abs_path + " does not exists.\nDo you want it to be created?\nIf you run the app in first time, press OK.", false)) Log.Exit(file_abs_path + " does not exists"); } else data_string = File.ReadAllText(file_abs_path); fw = new FileWriter(file_abs_path, true, false, -1); if (string.IsNullOrEmpty(data_string)) return; data_string = data_string.Trim(); Match m = Regex.Match(data_string, @"^(.*?)" + Config.Output.OutputFieldSeparator + "(.*?)$", RegexOptions.Compiled | RegexOptions.Multiline); while (m.Success) { hash_items[m.Groups[1].Value.Trim()] = m.Groups[2].Value.Trim(); m = m.NextMatch(); } } catch (Exception e) { LogMessage.Exit(e); } } }
/// <summary> /// Bounces the specified tags. /// </summary> /// <param name="tags">The tags.</param> /// <param name="files">The files.</param> public void Bounce(List<string> tags, List<string> files) { foreach (var file in files) { using ( var fileReader = new FileReader(file)) { _length = fileReader.Length(); var output = string.Empty; var trigger = string.Empty; var bounce = false; var close = false; _currentPosition = 0; while (_length > _currentPosition) { var s = fileReader.ReadString(1); _currentPosition++; if (s == ">") close = true; if (s == "<") { trigger = string.Empty; bounce = false; close = false; } if (!close) { trigger = trigger + s; } if (close) { if (s == ">") { trigger = trigger + s; foreach (var tag in tags) { if (trigger.Contains(tag)) bounce = true; } if (!bounce) { output = output + trigger; } } else { output = output + s; } } } fileReader.Close(); using (var fileWriter = new FileWriter()) { fileWriter.WriteFile(output, file); } } } }
/// <summary> /// Write all given text to a file with given path. /// </summary> public static void WriteAllText(string path, string text) { if (path == null) throw new ArgumentNullException("path"); if (text == null) throw new ArgumentNullException("text"); var file = new JFile(path); var writer = new FileWriter(file); try { writer.Write(text, 0, text.Length); } finally { writer.Close(); } }
public EncryptFacade() { reader = new FileReader(); cipher = new NewCipherMachine(); writer = new FileWriter(); }
static RC FileWriterFinish(Context ctx, FileWriter p, ref long eof) { if (p.FWErr == 0 && C._ALWAYS(p.Buffer) && p.BufEnd > p.BufStart) p.FWErr = p.File.Write(p.Buffer[p.BufStart], p.BufEnd - p.BufStart, p.WriteOffset + p.BufStart); eof = (p.WriteOffset + p.BufEnd); C._tagfree(ctx, ref p.Buffer.data); RC rc = (RC)p.FWErr; p._memset(); return rc; }
protected abstract void GenCreateFarBranch(FileWriter writer, CreateMethod method);
protected abstract void GenCreateMemory64(FileWriter writer, CreateMethod method);
public void Done(FileWriter writer) => SetKind(writer, StmtKind.Other);
public byte[] Write() { MemoryStream mem = new MemoryStream(); FileWriter writer = new FileWriter(mem); writer.ByteOrder = Syroot.BinaryData.ByteOrder.BigEndian; writer.Write(dim); writer.Write(width); writer.Write(height); writer.Write(depth); writer.Write(numMips); writer.Write(format); writer.Write(aa); writer.Write(use); writer.Write(imageSize); writer.Write(imagePtr); writer.Write(mipSize); writer.Write(mipPtr); writer.Write(tileMode); writer.Write(swizzle); writer.Write(alignment); writer.Write(pitch); for (int i = 0; i < 13; i++) { if (mipOffset.Length > i) { writer.Write(mipOffset[i]); } else { writer.Write(0); } } writer.Write(firstMip); writer.Write(imageCount); writer.Write(firstSlice); writer.Write(numSlices); for (int i = 0; i < 4; i++) { if (compSel != null && compSel.Length > i) { writer.Write(compSel[i]); } else { writer.Write((byte)0); } } for (int i = 0; i < 5; i++) { if (texRegs != null && texRegs.Length > i) { writer.Write(texRegs[i]); } else { writer.Write(0); } } return(mem.ToArray()); }
public void Write(FileWriter writer) { writer.Write(ID); writer.Write(flag1); writer.Write(flag2); }
public override void Write(FileWriter writer, LayoutHeader header) { base.Write(writer, header); }
public void Write(FileWriter writer) { writer.Write(Translate); writer.Write(Rotate); writer.Write(Scale); }
public override void Write(FileWriter writer, LayoutHeader header) { writer.Write((ushort)Materials.Count); writer.Seek(2); }
public void Write(FileWriter reader) { }
static void FileWriterWrite(FileWriter p, byte[] data, int dataLength) { int remain = dataLength; while (remain > 0 && p.FWErr == 0) { int copy = remain; if (copy > (p.Buffer.length - p.BufEnd)) copy = p.Buffer.length - p.BufEnd; C._memcpy(p.Buffer[p.BufEnd], data[dataLength - remain], copy); p.BufEnd += copy; if (p.BufEnd == p.Buffer.length) { p.FWErr = p.File.Write(p.Buffer[p.BufStart], p.BufEnd - p.BufStart, p.WriteOffset + p.BufStart); p.BufStart = p.BufEnd = 0; p.WriteOffset += p.Buffer.length; } Debug.Assert(p.BufEnd < p.Buffer.length); remain -= copy; } }
public void SetKind(FileWriter writer, ImplAccStatementKind kind) => SetKind(writer, GetStmtKind(kind));
protected void SerializeTable(GenTypes genTypes, FileWriter writer, StringsTable stringsTable) { var fastFmtFlags = genTypes[TypeIds.FastFmtFlags]; var hasVPrefixEnum = fastFmtFlags[nameof(FastFmtFlags.HasVPrefix)]; var sameAsPrevEnum = fastFmtFlags[nameof(FastFmtFlags.SameAsPrev)]; var flagsValues = new List <EnumValue>(); int index = -1; uint prevMnemonicStringIndex = uint.MaxValue; foreach (var def in defs) { index++; var code = def.Code; if (code.Value != (uint)index) { throw new InvalidOperationException(); } flagsValues.Clear(); if (index != 0) { writer.WriteLine(); } writer.WriteCommentLine(code.ToStringValue(idConverter)); var mnemonic = def.Mnemonic; uint mnemonicStringIndex = stringsTable.GetIndex(mnemonic, ignoreVPrefix: true, out var hasVPrefix); var flags = def.Flags; if (hasVPrefix) { flagsValues.Add(hasVPrefixEnum); } bool isSame = false; if (mnemonicStringIndex == prevMnemonicStringIndex) { isSame = true; flagsValues.Add(sameAsPrevEnum); } if (def.Flags is EnumValue flags2) { flagsValues.Add(flags2); } else if (def.Flags is OrEnumValue flags3) { flagsValues.AddRange(flags3.Values); } else { throw new InvalidOperationException(); } uint flagsValue = 0; foreach (var enumValue in flagsValues) { flagsValue |= enumValue.Value; } if (flagsValue > byte.MaxValue) { throw new InvalidOperationException(); } writer.WriteByte((byte)flagsValue); string comment = flagsValues.Count switch { 0 => "No flags set", 1 => flagsValues[0].ToStringValue(idConverter), _ => new OrEnumValue(fastFmtFlags, flagsValues.ToArray()).ToStringValue(idConverter), }; writer.WriteCommentLine(comment); // We save 4KB (11,595 -> 7,435 bytes) if (!isSame) { writer.WriteCompressedUInt32(mnemonicStringIndex); writer.WriteCommentLine($"{mnemonicStringIndex} = \"{mnemonic}\""); } prevMnemonicStringIndex = mnemonicStringIndex; } }
public Generator(IFileSystem fileSystem, Settings settings, FileWriter fileWriter) { _fs = fileSystem; _settings = settings; _fileWriter = fileWriter; }
public void SaveFile(System.IO.Stream stream) { using (var writer = new FileWriter(stream)) { writer.Write(FlatBufferConverter.SerializeFrom <Model>(Model, "gfbmdl")); } }
public override void Save(FileWriter stream) { base.Save(stream); stream.WriteBoolean(Broken); }
protected abstract void GenCreateXbegin(FileWriter writer, CreateMethod method);
public void Write(FileWriter writer) { }
protected abstract void GenCreateString_ESRDI_SegRSI(FileWriter writer, CreateMethod method, StringMethodKind kind, string methodBaseName, EnumValue code);
public void Write(FileWriter writer, uint version) { }
protected override void Act() { Sut = new FileWriter(FileImpl); }
public void Write(FileWriter writer) { writer.Write(hash); writer.Write(Index); writer.Write(unknown); }
/// <summary> /// Save this document to the file with given path. /// </summary> public void Save(string path, SaveOptions options) { var writer = new FileWriter(path); try { ToXml(writer, options); } finally { writer.Flush(); writer.Close(); } }
public void WriteBlock(FileWriter writer) { writer.Align(16); writer.WriteUint64Offset(DataOffset); writer.Write(CompressedData); }
/// <summary> /// Write all lines to a file with given path. /// </summary> public static void WriteAllLines(string path, IEnumerable<string> lines) { if (path == null) throw new ArgumentNullException("path"); if (lines == null) throw new ArgumentNullException("lines"); var file = new JFile(path); var writer = new FileWriter(file); try { var nl = JSystem.GetProperty("line.separator"); foreach (var line in lines) { if (line != null) writer.Write(line, 0, line.Length); writer.Write(nl, 0, nl.Length); } } finally { writer.Close(); } }
public ModeResult Run(string[] args) { if (args.Length < 2) { Console.Out.WriteLine("Missing required arg: \"output\""); return(ModeResult.Fail); } string outDirectory = args[1]; string dataDirectory; if (args.Length >= 3) { dataDirectory = args[2]; } else { dataDirectory = StructuredDataInfo.GetDefaultDirectory(); } string[] extraData = args.Skip(3).ToArray(); _info = new StructuredDataInfo(dataDirectory); foreach (string extra in extraData) { _info.LoadExtra(extra); } const string stuTypeNamespace = "TankLib.STU.Types"; const string stuEnumNamespace = "TankLib.STU.Types.Enums"; string generatedDirectory = Path.Combine(outDirectory, "Generated"); string generatedEnumsDirectory = Path.Combine(outDirectory, "Generated", "Enums"); var genericTypeFile = new FileWriter(Path.Combine(generatedDirectory, "Misc.cs"), stuTypeNamespace); var genericEnumsFile = new FileWriter(Path.Combine(generatedEnumsDirectory, "Misc.cs"), stuEnumNamespace); List <FileWriter> extraFileWriters = new List <FileWriter>(); Directory.CreateDirectory(generatedDirectory); Directory.CreateDirectory(generatedEnumsDirectory); void Build(ClassBuilder classBuilder, bool isEnum) { FileWriter fileWriter = isEnum ? genericEnumsFile : genericTypeFile; if (classBuilder.HasRealName) { fileWriter = new FileWriter(Path.Combine(isEnum ? generatedEnumsDirectory : generatedDirectory, classBuilder.Name + ".cs"), isEnum ? stuEnumNamespace : stuTypeNamespace); extraFileWriters.Add(fileWriter); } classBuilder.Build(fileWriter); } Dictionary <uint, FieldNew> enumFields = new Dictionary <uint, FieldNew>(); foreach (KeyValuePair <uint, InstanceNew> instance in _info.Instances.OrderBy(x => x.Value.Hash2)) { //if (_info.BrokenInstances.Contains(instance.Key)) { // continue; //} //if (instance.Key == 0x440233A5) { // for generating the mirror types with oldhash // continue; //} if (instance.Key == 0x2BB2C217) { continue; // references mirror data. todo: handle better } var tree = DumpHashes.GetParentTree(_info, instance.Value); if (tree.Contains(0x54D6A5F9u)) { continue; // ignore MirrorData (thx tim) } InstanceBuilder instanceBuilder = new InstanceBuilder(_info, instance.Value); Build(instanceBuilder, false); foreach (var field in instance.Value.m_fields) { if (field.m_serializationType != 8 && field.m_serializationType != 9) { continue; } var enumType = field.TypeHash2; if (!enumFields.ContainsKey(enumType)) { enumFields[enumType] = field; } } } foreach (KeyValuePair <uint, EnumNew> enumData in _info.Enums.OrderBy(x => x.Value.Hash2)) { FieldNew field; if (!enumFields.TryGetValue(enumData.Key, out field)) { field = new FieldNew { m_typeHash = enumData.Key.ToString("X8"), m_size = 4 }; Logger.Warn("Enum", $"Enum {enumData.Value.Hash2:X8} is not referenced by a field"); } EnumBuilder enumBuilder = new EnumBuilder(_info, field); Build(enumBuilder, true); } genericTypeFile.Finish(); genericEnumsFile.Finish(); foreach (FileWriter writer in extraFileWriters) { writer.Finish(); } return(ModeResult.Success); }
protected virtual void Generate(FileWriter writer) { GenCreateMethods(writer, 0); GenTheRest(writer); }
private void btn_generateAccuracyData_Click(object sender, EventArgs e) { FileWriter uniGramResultWriter = new FileWriter("analysis-unigram"); FileWriter biGramResultWriter = new FileWriter("analysis-bigram"); StringBuilder builderUniAnalysis = new StringBuilder(); StringBuilder builderBiAnalysis = new StringBuilder(); //int totalTweet = 18318; //labelingUniMatrixFrequency[i, j] = 0.0; //labelingBiMatrixFrequency[i, j] = 0.0; builderUniAnalysis.Append("Overall Accuracy of Unigram = " + Math.Round(((labelingUniMatrixFrequency[0, 0] + labelingUniMatrixFrequency[1, 1] + labelingUniMatrixFrequency[2, 2] + labelingUniMatrixFrequency[3, 3] + labelingUniMatrixFrequency[4,4]+labelingUniMatrixFrequency[5,5])*100),2)/totalTweet+"%"); builderBiAnalysis.Append("Overall Accuracy of Bigram = " + Math.Round(((labelingBiMatrixFrequency[0, 0] + labelingBiMatrixFrequency[1, 1] + labelingBiMatrixFrequency[2, 2] + labelingBiMatrixFrequency[3, 3] + labelingBiMatrixFrequency[4, 4] + labelingBiMatrixFrequency[5, 5])*100),2)/totalTweet+"%"); builderBiAnalysis.Append("\n").Append("\n").Append("\n").Append("\n"); builderUniAnalysis.Append("\n").Append("\n").Append("\n").Append("\n"); builderUniAnalysis.Append("Accuracy of Language = " + LanguagesFull[0] + " " + Math.Round((labelingUniMatrixFrequency[0, 0]*100 )/ (labelingUniMatrixFrequency[0, 0]+labelingUniMatrixFrequency[0, 1]+labelingUniMatrixFrequency[0, 2]+labelingUniMatrixFrequency[0, 3]+labelingUniMatrixFrequency[0, 4]+labelingUniMatrixFrequency[0, 5]),2)+"%").Append("\n"); builderUniAnalysis.Append("Accuracy of Language = " + LanguagesFull[1] + " " + Math.Round((labelingUniMatrixFrequency[1, 1]*100 ) / (labelingUniMatrixFrequency[1, 0]+labelingUniMatrixFrequency[1, 1]+labelingUniMatrixFrequency[1, 2]+labelingUniMatrixFrequency[1, 3]+labelingUniMatrixFrequency[1, 4]+labelingUniMatrixFrequency[1, 5]),2)+"%").Append("\n"); builderUniAnalysis.Append("Accuracy of Language = " + LanguagesFull[2] + " " + Math.Round((labelingUniMatrixFrequency[2, 2]*100 ) / (labelingUniMatrixFrequency[2, 0]+labelingUniMatrixFrequency[2, 1]+labelingUniMatrixFrequency[2, 2]+labelingUniMatrixFrequency[2, 3]+labelingUniMatrixFrequency[2, 4]+labelingUniMatrixFrequency[2, 5]),2)+"%").Append("\n"); builderUniAnalysis.Append("Accuracy of Language = " + LanguagesFull[3] + " " + Math.Round((labelingUniMatrixFrequency[3, 3]*100 ) / (labelingUniMatrixFrequency[3, 0]+labelingUniMatrixFrequency[3, 1]+labelingUniMatrixFrequency[3, 2]+labelingUniMatrixFrequency[3, 3]+labelingUniMatrixFrequency[3, 4]+labelingUniMatrixFrequency[3, 5]),2)+"%").Append("\n"); builderUniAnalysis.Append("Accuracy of Language = " + LanguagesFull[4] + " " + Math.Round((labelingUniMatrixFrequency[4, 4]*100 ) / (labelingUniMatrixFrequency[4, 0]+labelingUniMatrixFrequency[4, 1]+labelingUniMatrixFrequency[4, 2]+labelingUniMatrixFrequency[4, 3]+labelingUniMatrixFrequency[4, 4]+labelingUniMatrixFrequency[4, 5]),2)+"%").Append("\n"); builderUniAnalysis.Append("Accuracy of Language = " + LanguagesFull[5] + " " + Math.Round((labelingUniMatrixFrequency[5, 5]*100) / (labelingUniMatrixFrequency[5, 0]+labelingUniMatrixFrequency[5, 1]+labelingUniMatrixFrequency[5, 2]+labelingUniMatrixFrequency[5, 3]+labelingUniMatrixFrequency[5, 4]+labelingUniMatrixFrequency[5, 5]),2)+"%").Append("\n").Append("\n").Append("\n"); builderUniAnalysis.Append("Confusion Matrix for Unigram:").Append("\n"); String row=" "; Double[] uniRows = {(labelingUniMatrixFrequency[0, 0]+labelingUniMatrixFrequency[0, 1]+labelingUniMatrixFrequency[0, 2]+labelingUniMatrixFrequency[0, 3]+labelingUniMatrixFrequency[0, 4]+labelingUniMatrixFrequency[0, 5]), (labelingUniMatrixFrequency[1, 0]+labelingUniMatrixFrequency[1, 1]+labelingUniMatrixFrequency[1, 2]+labelingUniMatrixFrequency[1, 3]+labelingUniMatrixFrequency[1, 4]+labelingUniMatrixFrequency[1, 5]), (labelingUniMatrixFrequency[2, 0]+labelingUniMatrixFrequency[2, 1]+labelingUniMatrixFrequency[2, 2]+labelingUniMatrixFrequency[2, 3]+labelingUniMatrixFrequency[2, 4]+labelingUniMatrixFrequency[2, 5]), (labelingUniMatrixFrequency[3, 0]+labelingUniMatrixFrequency[3, 1]+labelingUniMatrixFrequency[3, 2]+labelingUniMatrixFrequency[3, 3]+labelingUniMatrixFrequency[3, 4]+labelingUniMatrixFrequency[3, 5]), (labelingUniMatrixFrequency[4, 0]+labelingUniMatrixFrequency[4, 1]+labelingUniMatrixFrequency[4, 2]+labelingUniMatrixFrequency[4, 3]+labelingUniMatrixFrequency[4, 4]+labelingUniMatrixFrequency[4, 5]), (labelingUniMatrixFrequency[5, 0]+labelingUniMatrixFrequency[5, 1]+labelingUniMatrixFrequency[5, 2]+labelingUniMatrixFrequency[5, 3]+labelingUniMatrixFrequency[5, 4]+labelingUniMatrixFrequency[5, 5])}; for (int i = 0; i < 6; i++) row = row + LanguagesFull[i] + " "; row = row + "\n"; for (int i = 0; i < 6; i++) { row= row + LanguagesFull[i]+" "; for (int j = 0; j < 6; j++) row = row + Math.Round((labelingUniMatrixFrequency[i, j] * 100) / uniRows[i], 2)+"%" + " "; row= row+"\n\n\n\n"; } builderUniAnalysis.Append(row); uniGramResultWriter.resultsWriter(builderUniAnalysis.ToString()); uniGramResultWriter.closeAnalysisWriter(); builderBiAnalysis.Append("Accuracy of Language = " + LanguagesFull[0] + " " + Math.Round((labelingBiMatrixFrequency[0, 0] * 100) / (labelingBiMatrixFrequency[0, 0] + labelingBiMatrixFrequency[0, 1] + labelingBiMatrixFrequency[0, 2] + labelingBiMatrixFrequency[0, 3] + labelingBiMatrixFrequency[0, 4] + labelingBiMatrixFrequency[0, 5]),2) + "%").Append("\n"); builderBiAnalysis.Append("Accuracy of Language = " + LanguagesFull[1] + " " + Math.Round((labelingBiMatrixFrequency[1, 1] * 100) / (labelingBiMatrixFrequency[1, 0] + labelingBiMatrixFrequency[1, 1] + labelingBiMatrixFrequency[1, 2] + labelingBiMatrixFrequency[1, 3] + labelingBiMatrixFrequency[1, 4] + labelingBiMatrixFrequency[1, 5]),2) + "%").Append("\n"); builderBiAnalysis.Append("Accuracy of Language = " + LanguagesFull[2] + " " + Math.Round((labelingBiMatrixFrequency[2, 2] * 100) / (labelingBiMatrixFrequency[2, 0] + labelingBiMatrixFrequency[2, 1] + labelingBiMatrixFrequency[2, 2] + labelingBiMatrixFrequency[2, 3] + labelingBiMatrixFrequency[2, 4] + labelingBiMatrixFrequency[2, 5]),2) + "%").Append("\n"); builderBiAnalysis.Append("Accuracy of Language = " + LanguagesFull[3] + " " + Math.Round((labelingBiMatrixFrequency[3, 3] * 100) / (labelingBiMatrixFrequency[3, 0] + labelingBiMatrixFrequency[3, 1] + labelingBiMatrixFrequency[3, 2] + labelingBiMatrixFrequency[3, 3] + labelingBiMatrixFrequency[3, 4] + labelingBiMatrixFrequency[3, 5]),2) + "%").Append("\n"); builderBiAnalysis.Append("Accuracy of Language = " + LanguagesFull[4] + " " + Math.Round((labelingBiMatrixFrequency[4, 4] * 100) / (labelingBiMatrixFrequency[4, 0] + labelingBiMatrixFrequency[4, 1] + labelingBiMatrixFrequency[4, 2] + labelingBiMatrixFrequency[4, 3] + labelingBiMatrixFrequency[4, 4] + labelingBiMatrixFrequency[4, 5]),2) + "%").Append("\n"); builderBiAnalysis.Append("Accuracy of Language = " + LanguagesFull[5] + " " + Math.Round((labelingBiMatrixFrequency[5, 5] * 100) / (labelingBiMatrixFrequency[5, 0] + labelingBiMatrixFrequency[5, 1] + labelingBiMatrixFrequency[5, 2] + labelingBiMatrixFrequency[5, 3] + labelingBiMatrixFrequency[5, 4] + labelingBiMatrixFrequency[5, 5]), 2) + "%").Append("\n").Append("\n").Append("\n"); builderBiAnalysis.Append("Confusion Matrix for Bigram:").Append("\n"); String row2 = " "; Double[] BiRows = {(labelingBiMatrixFrequency[0, 0]+labelingBiMatrixFrequency[0, 1]+labelingBiMatrixFrequency[0, 2]+labelingBiMatrixFrequency[0, 3]+labelingBiMatrixFrequency[0, 4]+labelingBiMatrixFrequency[0, 5]), (labelingBiMatrixFrequency[1, 0]+labelingBiMatrixFrequency[1, 1]+labelingBiMatrixFrequency[1, 2]+labelingBiMatrixFrequency[1, 3]+labelingBiMatrixFrequency[1, 4]+labelingBiMatrixFrequency[1, 5]), (labelingBiMatrixFrequency[2, 0]+labelingBiMatrixFrequency[2, 1]+labelingBiMatrixFrequency[2, 2]+labelingBiMatrixFrequency[2, 3]+labelingBiMatrixFrequency[2, 4]+labelingBiMatrixFrequency[2, 5]), (labelingBiMatrixFrequency[3, 0]+labelingBiMatrixFrequency[3, 1]+labelingBiMatrixFrequency[3, 2]+labelingBiMatrixFrequency[3, 3]+labelingBiMatrixFrequency[3, 4]+labelingBiMatrixFrequency[3, 5]), (labelingBiMatrixFrequency[4, 0]+labelingBiMatrixFrequency[4, 1]+labelingBiMatrixFrequency[4, 2]+labelingBiMatrixFrequency[4, 3]+labelingBiMatrixFrequency[4, 4]+labelingBiMatrixFrequency[4, 5]), (labelingBiMatrixFrequency[5, 0]+labelingBiMatrixFrequency[5, 1]+labelingBiMatrixFrequency[5, 2]+labelingBiMatrixFrequency[5, 3]+labelingBiMatrixFrequency[5, 4]+labelingBiMatrixFrequency[5, 5])}; for (int i = 0; i < 6; i++) row2 = row2 + LanguagesFull[i] + " "; row2 = row2 + "\n"; for (int i = 0; i < 6; i++) { row2 = row2 + LanguagesFull[i] + " "; for (int j = 0; j < 6; j++) row2 = row2 + Math.Round((labelingBiMatrixFrequency[i, j] * 100) / BiRows[i], 2) + "%" + " "; row2 = row2 + "\n\n\n\n"; } builderBiAnalysis.Append(row2); biGramResultWriter.resultsWriter(builderBiAnalysis.ToString()); biGramResultWriter.closeAnalysisWriter(); MessageBox.Show("Done"); }
static void FileWriterInit(Context ctx, VFile file, FileWriter p, long start) { p._memset(); int pageSize = ctx.DBs[0].Bt.GetPageSize(); p.Buffer.data = (byte[])C._tagalloc(ctx, pageSize); if (p.Buffer.data == null) p.FWErr = RC.NOMEM; else { p.BufEnd = p.BufStart = (start % pageSize); p.WriteOffset = start - p.BufStart; p.Buffer.length = pageSize; p.File = file; } }
private void btn_testDatafromFiles_Click(object sender, EventArgs e) { int[] countOfTrainingTweetforLanguage = { 374, 1493, 456, 12855, 971, 2169 }; FileWriter uniGramResultWriter = new FileWriter("results-unigram"); FileWriter biGramResultWriter = new FileWriter("results-bigram"); StringBuilder builderUniResult = new StringBuilder(); StringBuilder builderBiResult = new StringBuilder(); builderUniResult.Append("TweetID" + " " + "Likely Language").Append("\n"); builderBiResult.Append("TweetID" + " " + "Likely Language").Append("\n"); for (int i = 0; i < 6; i++) for (int j = 0; j < 6; j++) { labelingUniMatrixFrequency[i, j] = 0.0; labelingBiMatrixFrequency[i, j] = 0.0; } for (int i = 0; i < Languages.Length; i++) { FetchFromFolderFiles fetchFromFolder = new FetchFromFolderFiles("Testingnlp"); Hashtable languageTweetsClean = new Hashtable(); languageTweetsClean = fetchFromFolder.getTestingDataFor(Languages[i]); NaiveBayesClassifier NBC = new NaiveBayesClassifier(countOfTrainingTweetforLanguage, totalTweet, gramDictionary); foreach (DictionaryEntry entry in languageTweetsClean) { Double[] uniConfidence = NBC.ApplyBayesOnUnigram(entry.Value.ToString()); int IndexOfMaxUniConfidence = NBC.getMaxConfidence(uniConfidence); labelingUniMatrixFrequency[i, IndexOfMaxUniConfidence] = labelingUniMatrixFrequency[i, IndexOfMaxUniConfidence] + 1; builderUniResult.Append(entry.Key.ToString() + " " + Languages[IndexOfMaxUniConfidence]); builderUniResult.Append("\n"); Double[] biConfidence = NBC.ApplyBayesOnBigram(entry.Value.ToString()); int IndexOfMaxBiiConfidence = NBC.getMaxConfidence(biConfidence); labelingBiMatrixFrequency[i, IndexOfMaxBiiConfidence] = labelingBiMatrixFrequency[i, IndexOfMaxBiiConfidence] + 1; builderBiResult.Append(entry.Key.ToString() + " " + Languages[IndexOfMaxBiiConfidence]); builderBiResult.Append("\n"); } } uniGramResultWriter.resultsWriter(builderUniResult.ToString()); biGramResultWriter.resultsWriter(builderBiResult.ToString()); uniGramResultWriter.closeAnalysisWriter(); biGramResultWriter.closeAnalysisWriter(); MessageBox.Show("Done"); }
static internal void ClearSession() { lock (HashItemsPool) { foreach (string file_path in HashItemsPool.Keys) { HashItems his = (HashItems)HashItemsPool[file_path]; his.fw.Close(); //rewrite the hash item file to remove old values string file_abs_path = file_path; if (!file_abs_path.Contains(":")) file_abs_path = Log.WorkDir + "\\" + file_abs_path; string file_abs_path_old = file_abs_path + ".back"; File.Delete(file_abs_path_old); File.Move(file_abs_path, file_abs_path_old); FileWriter fw = new FileWriter(file_abs_path, false, false, -1); foreach (string key in his.Keys) fw.WriteLine(key, (string)his[key]); } HashItemsPool.Clear(); } }
public void Save(System.IO.Stream stream) { using (FileWriter writer = new FileWriter(stream, true)) { writer.ByteOrder = Syroot.BinaryData.ByteOrder.BigEndian; header.Write(writer); uint surfBlockType; uint dataBlockType; uint mipBlockType; if (header.MajorVersion == 6 && header.MinorVersion == 0) { surfBlockType = 0x0A; dataBlockType = 0x0B; mipBlockType = 0x0C; } else if (header.MajorVersion == 6 || header.MajorVersion == 7) { surfBlockType = 0x0B; dataBlockType = 0x0C; mipBlockType = 0x0D; } else { throw new Exception($"Unsupported GTX version {header.MajorVersion}"); } int imageInfoIndex = -1; int imageBlockIndex = -1; int imageMipBlockIndex = -1; writer.Seek(header.HeaderSize, System.IO.SeekOrigin.Begin); foreach (var block in blocks) { if ((uint)block.BlockType == surfBlockType) { imageInfoIndex++; imageBlockIndex++; imageMipBlockIndex++; block.data = textures[imageInfoIndex].surface.Write(); block.Write(writer); } else if ((uint)block.BlockType == dataBlockType) { var tex = textures[imageBlockIndex]; var pos = writer.Position; uint Alignment = tex.surface.alignment; //Create alignment block first uint dataAlignment = GetAlignBlockSize((uint)pos + 32, Alignment); GTXDataBlock dataAlignBlock = new GTXDataBlock(BlockType.AlignData, dataAlignment, 0, 0); dataAlignBlock.Write(writer); block.data = tex.surface.data; block.Write(writer); } else if ((uint)block.BlockType == mipBlockType) { var tex = textures[imageMipBlockIndex]; var pos = writer.Position; uint Alignment = tex.surface.alignment; //Create alignment block first uint dataAlignment = GetAlignBlockSize((uint)pos + 32, Alignment); GTXDataBlock dataAlignBlock = new GTXDataBlock(BlockType.AlignData, dataAlignment, 0, 0); dataAlignBlock.Write(writer); if (tex.surface.mipData == null || tex.surface.mipData.Length <= 0) { throw new Exception("Invalid mip data!"); } block.data = tex.surface.mipData; block.Write(writer); } else if (block.BlockType != BlockType.AlignData) { block.Write(writer); } } } }
private void btn_probabilityNgramfromFile_Click(object sender, EventArgs e) { FileWriter FW = new FileWriter(); for (int i = 0; i < Languages.Length; i++) { gramDictionary.Add(Languages[i], new LanguageObject()); FetchFromFolderFiles fetchFromFolder = new FetchFromFolderFiles("Trainingnlp"); DataTable dataTable = fetchFromFolder.getTrainingDataFor(Languages[i]); DataParser DP = new DataParser(); DataTable cleanTable = new DataTable(); cleanTable = DP.getCleanTable(dataTable); NgramBuilder NB = new NgramBuilder(); DataTable uniGram = new DataTable(); uniGram = NB.GetGram(cleanTable, 1); double uniGramN = NB.getTotalFrequency(); DataTable unSmoothedProbabilityUnigramDataTable = new DataTable(); unSmoothedProbabilityUnigramDataTable = NB.ConvertTableToProbabilityTable(uniGram, uniGramN); Hashtable unSmoothedProbabilityUnigram = NB.ConvertProbTabletoHashTable(unSmoothedProbabilityUnigramDataTable); gramDictionary[Languages[i]].setProbabilityUnigram(unSmoothedProbabilityUnigram,uniGramN); DataTable smoothedUniGram = new DataTable(); smoothedUniGram = NB.applySmoothing(uniGram, 0.1); double uniGramSmoothedN = NB.getTotalFrequency(); DataTable SmoothedProbabilityUnigramDataTable = new DataTable(); SmoothedProbabilityUnigramDataTable = NB.ConvertTableToProbabilityTable(smoothedUniGram, uniGramSmoothedN); Hashtable SmoothedProbabilityUnigram = NB.ConvertProbTabletoHashTable(SmoothedProbabilityUnigramDataTable); gramDictionary[Languages[i]].setSmoothedProbabilityUnigram(SmoothedProbabilityUnigram, uniGramSmoothedN); DataTable biGram = new DataTable(); biGram = NB.GetGram(cleanTable, 2); double biGramN = NB.getTotalFrequency(); DataTable UnSmoothedProbabilityBigramDataTable = new DataTable(); UnSmoothedProbabilityBigramDataTable = NB.ConvertTableToProbabilityTable(biGram, biGramN); Hashtable UnSmoothedProbabilityBigram = NB.ConvertProbTabletoHashTable(UnSmoothedProbabilityBigramDataTable); gramDictionary[Languages[i]].setProbabilityBigram(UnSmoothedProbabilityBigram, biGramN); DataTable smoothedBiGram = new DataTable(); smoothedBiGram = NB.applySmoothing(biGram, 0.1); double BiGramSmoothedN = NB.getTotalFrequency(); DataTable SmoothedProbabilityBigramDataTable = new DataTable(); SmoothedProbabilityBigramDataTable = NB.ConvertTableToProbabilityTable(smoothedBiGram, BiGramSmoothedN); Hashtable SmoothedProbabilityBigram = NB.ConvertProbTabletoHashTable(SmoothedProbabilityBigramDataTable); gramDictionary[Languages[i]].setSmoothedProbabilityBigram(SmoothedProbabilityBigram, BiGramSmoothedN); FW.writeUniGram(unSmoothedProbabilityUnigramDataTable, Languages[i], "False", uniGramN); FW.writeUniGram(SmoothedProbabilityUnigramDataTable, Languages[i], "True", uniGramSmoothedN); FW.writeNGram(UnSmoothedProbabilityBigram,Languages[i],"False",biGramN,"BiGram"); FW.writeNGram(SmoothedProbabilityBigram, Languages[i], "True", BiGramSmoothedN, "BiGram"); // If you want matrix representation include this and remove the upper 2 lines // FW.writeBiGram(UnSmoothedProbabilityBigramDataTable, Languages[i], "False", biGramN); // FW.writeBiGram(SmoothedProbabilityBigramDataTable, Languages[i], "True", BiGramSmoothedN); } FW.closeWriter(); MessageBox.Show("Done "); }
/// <summary> /// Starts the file download. /// </summary> public void StartDownload() { Trace.TraceInformation(String.Format("[{0}] Downloading.", m_uriURL.ToString())); if (!FileExists) throw new FileNotFoundException("The file to download does not exist.", m_uriURL.ToString()); Int32 intConnectionsToUse = m_fmdInfo.SupportsResume ? m_intMaxConnections : 1; if (ServicePointManager.DefaultConnectionLimit < 1) throw new Exception(String.Format("Only {0} connections can be created to the same file; {1} are wanted.", ServicePointManager.DefaultConnectionLimit, 1)); else if (ServicePointManager.DefaultConnectionLimit < intConnectionsToUse) intConnectionsToUse = ServicePointManager.DefaultConnectionLimit; //get the list of ranges we have not already downloaded RangeSet rgsMissingRanges = new RangeSet(); rgsMissingRanges.AddRange(new Range(0, m_fmdInfo.Length - 1)); if (File.Exists(m_strFileMetadataPath)) { string[] strRanges = File.ReadAllLines(m_strFileMetadataPath); foreach (string strRange in strRanges) { string strCleanRange = strRange.Trim().Trim('\0'); if (String.IsNullOrEmpty(strCleanRange)) continue; rgsMissingRanges.RemoveRange(Range.Parse(strCleanRange)); } } else if (File.Exists(m_strSavePath)) File.Delete(m_strSavePath); Int32 intMinBlockSize = (Int32)Math.Min((UInt64)m_intMinBlockSize, rgsMissingRanges.TotalSize); Int32 intBaseBlockSize = (Int32)Math.Max(rgsMissingRanges.TotalSize / (UInt64)intConnectionsToUse, (UInt64)intMinBlockSize); if (intConnectionsToUse > 1) intBaseBlockSize = Math.Min(intBaseBlockSize, m_intMaxBlockSize); //break the ranges into blocks to be downloaded foreach (Range rngNeeded in rgsMissingRanges) { //find out how many blocks will fit into the range Int32 intBlockCount = (Int32)(rngNeeded.Size / (UInt64)intBaseBlockSize); if (intBlockCount == 0) intBlockCount = 1; //there is likely to be some remainder (there are likely a fractional number of blocks // in the range), so lets distrubute the remainder amongst all of the blocks // we do this by elarging our blocksize UInt64 intBlockSize = (UInt64)Math.Ceiling(rngNeeded.Size / (double)intBlockCount); UInt64 intBlockStart = rngNeeded.StartByte; for (; intBlockStart + intBlockSize < rngNeeded.EndByte; intBlockStart += intBlockSize) m_queRequiredBlocks.Enqueue(new Range(intBlockStart, intBlockStart + intBlockSize - 1)); m_queRequiredBlocks.Enqueue(new Range(intBlockStart, rngNeeded.EndByte)); } m_fwrWriter = new FileWriter(m_strSavePath, m_strFileMetadataPath); m_dteStartTime = DateTime.Now; //spawn the downloading threads Int32 intRequiredBlocks = m_queRequiredBlocks.Count; lock (m_lstDownloaders) { for (Int32 i = 0; i < (intRequiredBlocks < intConnectionsToUse ? intRequiredBlocks : intConnectionsToUse); i++) { BlockDownloader bdrDownloader = new BlockDownloader(this, m_fmdInfo, m_fwrWriter, m_intWriteBufferSize, m_strUserAgent); bdrDownloader.FinishedDownloading += new EventHandler(Downloader_FinishedDownloading); bdrDownloader.Start(); m_lstDownloaders.Add(bdrDownloader); } } }
public abstract void Serialize(GenTypes genTypes, FileWriter writer, StringsTable stringsTable);
public byte[] Save() { //Get each block type for saving except alignment // var TextureInfoBlocks = blocks.Where(i => i.BlockType == BlockType.ImageInfo); // var TextureDataBlocks = blocks.Where(i => i.BlockType == BlockType.ImageData); // var TextureMipDataBlocks = blocks.Where(i => i.BlockType == BlockType.MipData); System.IO.MemoryStream mem = new System.IO.MemoryStream(); using (FileWriter writer = new FileWriter(mem)) { writer.ByteOrder = Syroot.BinaryData.ByteOrder.BigEndian; header.Write(writer); uint surfBlockType; uint dataBlockType; uint mipBlockType; if (header.MajorVersion == 6 && header.MinorVersion == 0) { surfBlockType = 0x0A; dataBlockType = 0x0B; mipBlockType = 0x0C; } else if (header.MajorVersion == 6 || header.MajorVersion == 7) { surfBlockType = 0x0B; dataBlockType = 0x0C; mipBlockType = 0x0D; } else { throw new Exception($"Unsupported GTX version {header.MajorVersion}"); } int imageInfoIndex = -1; int imageBlockIndex = -1; int imageMipBlockIndex = -1; writer.Seek(header.HeaderSize, System.IO.SeekOrigin.Begin); foreach (var block in blocks) { if ((uint)block.BlockType == surfBlockType) { imageInfoIndex++; imageBlockIndex++; imageMipBlockIndex++; block.data = textures[imageInfoIndex].surface.Write(); block.Write(writer); } else if ((uint)block.BlockType == dataBlockType) { var tex = textures[imageBlockIndex]; var pos = writer.Position; uint Alignment = tex.surface.alignment; //Create alignment block first uint dataAlignment = GetAlignBlockSize((uint)pos + 32, Alignment); GTXDataBlock dataAlignBlock = new GTXDataBlock(BlockType.AlignData, dataAlignment, 0, 0); dataAlignBlock.Write(writer); block.data = tex.surface.data; block.Write(writer); } else if ((uint)block.BlockType == mipBlockType) { var tex = textures[imageMipBlockIndex]; var pos = writer.Position; uint Alignment = tex.surface.alignment; //Create alignment block first uint dataAlignment = GetAlignBlockSize((uint)pos + 32, Alignment); GTXDataBlock dataAlignBlock = new GTXDataBlock(BlockType.AlignData, dataAlignment, 0, 0); dataAlignBlock.Write(writer); if (tex.surface.mipData == null || tex.surface.mipData.Length <= 0) { throw new Exception("Invalid mip data!"); } block.data = tex.surface.mipData; block.Write(writer); } else if (block.BlockType != BlockType.AlignData) { block.Write(writer); } } } return(mem.ToArray()); }
private void InitFilters(string sourceFileName, string outputFileName) { try { // create an instance of the xbox filter and add it to the graph _xboxFilter = CreateAndAddFilterByName("Xbox ADPCM", _graphBuilder, FilterCategory.AudioCompressorCategory); if (_xboxFilter != null) { // create an instance of the ACM Wrapper filter _acmWrapper = CreateAndAddFilterByName("ACM Wrapper", _graphBuilder, FilterCategory.LegacyAmFilterCategory); if (_acmWrapper != null) { // Add WavParser _wavParser = CreateAndAddFilterByName("Wave Parser", _graphBuilder, FilterCategory.LegacyAmFilterCategory); if (_wavParser != null) { // Add WavDest filter _wavDest = CreateAndAddFilterByName("WAV Dest", _graphBuilder, FilterCategory.LegacyAmFilterCategory); if (_wavDest != null) { // Add the file source hr = _graphBuilder.AddSourceFilter(sourceFileName, "source", out _source); DsError.ThrowExceptionForHR(hr); // we want to add a filewriter filter to the filter graph _fileWriter = new FileWriter(); // make sure we access the IFileSinkFilter interface to // set the file name IFileSinkFilter fs = (IFileSinkFilter)_fileWriter; hr = fs.SetFileName(outputFileName, null); DsError.ThrowExceptionForHR(hr); // Add the file writer to the graph hr = _graphBuilder.AddFilter((IBaseFilter)_fileWriter, "output"); DsError.ThrowExceptionForHR(hr); } else { throw new Exception("Could not load Wav Dest filter. Is it installed?"); } } else { throw new Exception("Could not load wave parser"); } } else { throw new Exception("Could not load ACM Wrapper"); } } else { throw new Exception("Could not load Xbox Filter. Is it installed?"); } } catch (Exception ex) { throw new ApplicationException("Error initialising filters: " + ex.Message); } }
protected abstract void GenCreate(FileWriter writer, CreateMethod method, InstructionGroup group);
protected abstract void GenCreateMaskmov(FileWriter writer, CreateMethod method, string methodBaseName, EnumValue code);
static void FileWriterWriteVarint(FileWriter p, ulong value) { byte[] bytes = new byte[10]; int length = ConvertEx.PutVarint(bytes, value); FileWriterWrite(p, bytes, length); }
/// <summary> /// Sets up the filter graph with the required filters /// </summary> private void InitGraph(string sourceFileName, string destinationFileName) { try { // Add the file source hr = _graphBuilder.AddSourceFilter(sourceFileName, "source", out _source); DsError.ThrowExceptionForHR(hr); // get the output pin from the source filter IPin sourceOutputPin = DsFindPin.ByDirection(_source, PinDirection.Output, 0); hr = _graphBuilder.Render(sourceOutputPin); DsError.ThrowExceptionForHR(hr); // check for the Default DirectSound Device filter. If it exists we can assume the // intelligent connect worked IBaseFilter DefaultOutputDevice; _graphBuilder.FindFilterByName("Default DirectSound Device", out DefaultOutputDevice); DsError.ThrowExceptionForHR(hr); // get the previous filter in the chain. (The one that is connected to the default direct sound device) // we have to loop through the filters in the graph to do this as we don't know what it might be called IEnumFilters filters; _graphBuilder.EnumFilters(out filters); DsError.ThrowExceptionForHR(hr); IBaseFilter[] outputFilter = new IBaseFilter[1]; IntPtr fetched = IntPtr.Zero; int filterNo = 0; // filters enum seems to list the filters in reverse order (Starting with the default direct sound device) // so the second filter should be the one we're after do { filters.Next(1, outputFilter, IntPtr.Zero); filterNo += 1; } while (filterNo < 2); // get the output pin from the filter connected to the default direct sound device // we will use this later to connect to wav dest IPin outPin = DsFindPin.ByDirection(outputFilter[0], PinDirection.Output, 0); // add wav dest filter _wavDest = CreateAndAddFilterByName("WAV Dest", _graphBuilder, FilterCategory.LegacyAmFilterCategory); // add the file writer and file sink filters _fileWriter = new FileWriter(); // make sure we access the IFileSinkFilter interface to set the file name IFileSinkFilter fs = (IFileSinkFilter)_fileWriter; hr = fs.SetFileName(destinationFileName, null); DsError.ThrowExceptionForHR(hr); // Add the file writer to the graph hr = _graphBuilder.AddFilter((IBaseFilter)_fileWriter, "output"); DsError.ThrowExceptionForHR(hr); // disconnect and remove the default direct sound device filter (we don't need it) _graphBuilder.Disconnect(outPin); _graphBuilder.RemoveFilter(DefaultOutputDevice); // connect the output filter output pin to the wav dest input pin IPin wavDestInputPin = DsFindPin.ByDirection(_wavDest, PinDirection.Input, 0); hr = _graphBuilder.ConnectDirect(outPin, wavDestInputPin, new AMMediaType()); DsError.ThrowExceptionForHR(hr); // connect the WAV Dest filter to the File Writer IPin wavDestOutputPin = DsFindPin.ByDirection(_wavDest, PinDirection.Output, 0); IPin fileWriterInputPin = DsFindPin.ByDirection((IBaseFilter)_fileWriter, PinDirection.Input, 0); hr = _graphBuilder.Connect(wavDestOutputPin, fileWriterInputPin); DsError.ThrowExceptionForHR(hr); // DONE! the graph should be complete } catch (Exception ex) { throw new ApplicationException("Error initialising filter graph: " + ex.Message); } }
protected abstract void GenCreateDeclareData(FileWriter writer, CreateMethod method, DeclareDataKind kind);
private void StartCapture() { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; if (System.IO.File.Exists(txtAviFileName.Text)) { // Get the graphbuilder object m_FilterGraph = (IFilterGraph2) new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Get the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); // Add the video source hr = m_FilterGraph.AddSourceFilter(txtAviFileName.Text, "File Source (Async.)", out capFilter); DsError.ThrowExceptionForHR(hr); //add AVI Decompressor IBaseFilter pAVIDecompressor = (IBaseFilter) new AVIDec(); hr = m_FilterGraph.AddFilter(pAVIDecompressor, "AVI Decompressor"); DsError.ThrowExceptionForHR(hr); IBaseFilter ffdshow; try { // Create Decoder filter COM object (ffdshow video decoder) Type comtype = Type.GetTypeFromCLSID(new Guid("{04FE9017-F873-410E-871E-AB91661A4EF7}")); if (comtype == null) throw new NotSupportedException("Creating ffdshow video decoder COM object fails."); object comobj = Activator.CreateInstance(comtype); ffdshow = (IBaseFilter) comobj; // error ocurrs! raised exception comobj = null; } catch { CustomMessageBox.Show("Please install/reinstall ffdshow"); return; } hr = m_FilterGraph.AddFilter(ffdshow, "ffdshow"); DsError.ThrowExceptionForHR(hr); // IBaseFilter baseGrabFlt = (IBaseFilter) sampGrabber; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); IBaseFilter vidrender = (IBaseFilter) new VideoRenderer(); hr = m_FilterGraph.AddFilter(vidrender, "Render"); DsError.ThrowExceptionForHR(hr); IPin captpin = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); IPin ffdpinin = DsFindPin.ByName(ffdshow, "In"); IPin ffdpinout = DsFindPin.ByName(ffdshow, "Out"); IPin samppin = DsFindPin.ByName(baseGrabFlt, "Input"); hr = m_FilterGraph.Connect(captpin, ffdpinin); DsError.ThrowExceptionForHR(hr); hr = m_FilterGraph.Connect(ffdpinout, samppin); DsError.ThrowExceptionForHR(hr); FileWriter filewritter = new FileWriter(); IFileSinkFilter filemux = (IFileSinkFilter) filewritter; //filemux.SetFileName("test.avi",); //hr = capGraph.RenderStream(null, MediaType.Video, capFilter, null, vidrender); // DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); // setup buffer if (m_handle == IntPtr.Zero) m_handle = Marshal.AllocCoTaskMem(m_stride*m_videoHeight); // tell the callback to ignore new images m_PictureReady = new ManualResetEvent(false); m_bGotOne = false; m_bRunning = false; timer1 = new Thread(timer); timer1.IsBackground = true; timer1.Start(); m_mediaextseek = m_FilterGraph as IAMExtendedSeeking; m_mediapos = m_FilterGraph as IMediaPosition; m_mediaseek = m_FilterGraph as IMediaSeeking; double length = 0; m_mediapos.get_Duration(out length); trackBar_mediapos.Minimum = 0; trackBar_mediapos.Maximum = (int) length; Start(); } else { MessageBox.Show("File does not exist"); } }
protected abstract void GenCreateDeclareDataArrayLength(FileWriter writer, CreateMethod method, DeclareDataKind kind, ArrayType arrayType);