public void GetBufferTest() { var binarySerializer = new BinarySerializer(); var buffer = binarySerializer.GetBuffer(new TestHelpers.Customer(21)); var customer = binarySerializer.Deserializer(buffer, typeof(Customer)); Assert.IsInstanceOfType(customer, typeof(Customer)); }
public void SerializeAndReadBack() { var registry = new DependencyFingerprintProtocolRegistry(); registry.Register<CombinedFingerprintProtocol>(); registry.Register<SourceSetFingerprintProtocol>(); var ser = new BinarySerializer(registry); var dep = CreateDependencyObject(); var fp1 = dep.Fingerprint; byte[] data; using (var ms = new MemoryStream()) { fp1.Save(ser, ms); data = ms.ToArray(); } CombinedFingerprint fp2; using (var ms = new MemoryStream(data)) { fp2 = new CombinedFingerprint(ser, ms); } fp1.Should().Be(fp2); }
private static void Main(string[] args) { var file = args[0]; using (var stream = new FileStream(file, FileMode.Open, FileAccess.Read)) { var serializer = new BinarySerializer(); serializer.MemberDeserialized += (sender, eventArgs) => Console.WriteLine(eventArgs.MemberName + " : " + eventArgs.Value); var iso = serializer.Deserialize<Iso9660.Iso9660>(stream); stream.Position = 0; var xmlSerializer = new XmlSerializer(typeof (Iso9660.Iso9660)); var outfilePath = Path.GetDirectoryName(file); var outfileName = Path.GetFileNameWithoutExtension(file) + ".xml"; var outfile = Path.Combine(outfilePath, outfileName); using (var xmlStream = new FileStream(outfile, FileMode.Create, FileAccess.Write)) { xmlSerializer.Serialize(xmlStream, iso); } Console.ReadKey(); } }
public void Load(BinarySerializer reader) { TotalHits = reader.ReadLong(); ActualSize = reader.ReadInt(); ResultSetXml = reader.ReadString(); TimeStamp = DateTime.FromOADate(reader.ReadDouble()); }
/// <summary>Reads or writes datas from/to the given binary serializer.</summary> /// <param name="serializer">The binary serializer.</param> void IDataSerializable.Serialize(BinarySerializer serializer) { serializer.Serialize(ref MaterialIndex); serializer.Serialize(ref IndexBufferRange); serializer.Serialize(ref VertexBufferRange); serializer.Serialize(ref Properties); }
private void LoadState() { try { var ser = new BinarySerializer(); var stat = ser.Deserialize(StateFileName) as Dictionary<String, Object>; if (stat != null) { var w = (Int32)stat["Width"]; var h = (Int32)stat["Height"]; if (w > MinimumSize.Width) Width = w; if (h > MinimumSize.Height) Height = h; var x = (Int32)stat["X"]; var y = (Int32)stat["Y"]; if (x > 0 && y > 0) Location = new Point(x, y); foreach (var p in GetType().GetProperties()) { if (Attribute.IsDefined(p, typeof(StateItemAttribute))) p.SetValue(this, stat[p.Name], null); } } } catch { } }
/// <inheritdoc/> void IDataSerializable.Serialize(BinarySerializer serializer) { serializer.Serialize(ref Name); serializer.Serialize(ref ShareConstantBuffers); serializer.Serialize(ref Techniques); serializer.Serialize(ref Arguments, SerializeFlags.Nullable); }
void IDataSerializable.Serialize(BinarySerializer serializer) { if (serializer.Mode == SerializerMode.Write) { serializer.Writer.Write(Textures.Count); foreach (var texture in Textures) { var name = texture.Key; var list = texture.Value; serializer.Serialize(ref name); serializer.Serialize(ref list); } } else { var count = serializer.Reader.ReadInt32(); Textures = new Dictionary<string, List<MaterialTexture>>(count); for(int i = 0; i < count; i++) { string name = null; List<MaterialTexture> list = null; serializer.Serialize(ref name); serializer.Serialize(ref list); Textures.Add(name, list); } } serializer.Serialize(ref Properties); }
public DocumentCategorizerComponent(string modelFileName) : base(typeof(DocumentCategorizerComponent)) { mBlockSelector = "TextBlock"; BinarySerializer reader = new BinarySerializer(modelFileName, FileMode.Open); LoadModel(reader); }
public void Load(BinarySerializer reader) { Utils.ThrowException(reader == null ? new ArgumentNullException("reader") : null); // the following statements throw serialization-related exceptions mLanguage = (Language)reader.ReadInt(); CreateStemmer(); }
public void GetBuffer_UseGeneric() { var binarySerializer = new BinarySerializer(); var buffer = binarySerializer.GetBuffer(new TestHelpers.Customer(32)); var customer = binarySerializer.Deserializer<Customer>(buffer); Assert.IsInstanceOfType(customer, typeof(Customer)); }
public void SerializeTest() { BinarySerializer serializer = new BinarySerializer(); MemoryStream stream = serializer.Serialize(new List<string>() { "f", "s" }); Assert.IsNotNull(stream); }
// *** ISerializable interface implementation *** public void Save(BinarySerializer writer) { writer.WriteLong(TotalHits); writer.WriteInt(ActualSize); writer.WriteString(ResultSetXml); writer.WriteDouble(TimeStamp.ToOADate()); }
void IDataSerializable.Serialize(BinarySerializer serializer) { serializer.Serialize(ref FilePath); serializer.Serialize(ref DependencyFilePath); serializer.SerializeEnum(ref CompilerFlags); serializer.Serialize(ref Macros, SerializeFlags.Nullable); serializer.Serialize(ref IncludeDirectoryList, serializer.Serialize, SerializeFlags.Nullable); }
public override ISerializer ApplyInternal() { var serializer = new BinarySerializer(_binaryFormatter); VerifyErrors(serializer); return serializer; }
void IDataSerializable.Serialize(BinarySerializer serializer) { serializer.Serialize(ref Index); serializer.Serialize(ref ParentIndex); serializer.Serialize(ref Transform); serializer.Serialize(ref Name, false, SerializeFlags.Nullable); serializer.Serialize(ref Children, serializer.Serialize, SerializeFlags.Nullable); }
/// <inheritdoc/> void IDataSerializable.Serialize(BinarySerializer serializer) { serializer.Serialize(ref Name, SerializeFlags.Nullable); serializer.Serialize(ref IsSubPass); serializer.Serialize(ref Attributes); serializer.Serialize(ref Pipeline); }
public void Serialize(BinarySerializer serializer) { serializer.Serialize(ref Index); serializer.Serialize(ref ParentBoneIndex); serializer.Serialize(ref Name, false, SerializeFlags.Nullable); serializer.Serialize(ref MeshParts); serializer.Serialize(ref Attributes); }
void IDataSerializable.Serialize(BinarySerializer serializer) { serializer.Serialize(ref Character); serializer.Serialize(ref Subrect); serializer.Serialize(ref Offset); serializer.Serialize(ref XAdvance); serializer.Serialize(ref BitmapIndex); }
public void Should_throw_exception_if_byte_array_is_null() { // Arrange var serializer = new BinarySerializer(); // Action var obj = serializer.Deserialize<Customer>(null); }
public void Publish(string message, string routingKey) { var properties = _channel.CreateBasicProperties(); properties.DeliveryMode = Persistent; var serializer = new BinarySerializer<string>(); var bytes = serializer.Serialize(message); _channel.BasicPublish(ExchangeName, routingKey, true, false, properties, bytes); }
void IDataSerializable.Serialize(BinarySerializer serializer) { serializer.Serialize(ref Stream); serializer.Serialize(ref SemanticName); serializer.Serialize(ref SemanticIndex); serializer.Serialize(ref StartComponent); serializer.Serialize(ref ComponentCount); serializer.Serialize(ref OutputSlot); }
public void Should_throw_exception_if_msg_is_null() { // Arrange var serializer = new BinarySerializer(); // Action var str = serializer.Serialize<Customer>(null); }
public void DeserializeTest() { BinarySerializer serializer = new BinarySerializer(); var data = new List<string>() { "f", "s" }; MemoryStream stream = serializer.Serialize(data); var obj2 = serializer.Deserializer<List<string>>(stream); Assert.AreEqual(data[0], obj2[0]); }
protected SimpleDataPersistenceEngine CreateSut() { var adapter = new InMemoryAdapter(); adapter.SetKeyColumn("Aggregates", "AggregateId"); adapter.SetAutoIncrementColumn("EventsLog", "Sequence"); Database.UseMockAdapter(adapter); var binarySerializer = new BinarySerializer(); var sut = new SimpleDataPersistenceEngine(string.Empty, binarySerializer); return sut; }
public void DecompressBaseSerializerOutput() { var baseSerializer = new BinarySerializer(); var gzipSerializer = new GZipSerializer(baseSerializer); using (var memoryStream = new MemoryStream(Convert.FromBase64String("H4sIAAAAAAAEAGNgZGBg+A8EIBoE2EAMTt9KBf+krNTkEm4ASBYMlCEAAAA="))) { Assert.Equal("My Object", gzipSerializer.Deserialize(memoryStream, typeof(String))); } }
public void Serialize(BinarySerializer serializer) { serializer.Serialize(ref Name, false, SerializeFlags.Nullable); serializer.Serialize(ref ParentBoneIndex); serializer.Serialize(ref BoundingSphere); serializer.Serialize(ref VertexBuffers); serializer.Serialize(ref IndexBuffers); serializer.Serialize(ref MeshParts); serializer.Serialize(ref Properties); }
public void DecompressBaseSerializerOutput() { var baseSerializer = new BinarySerializer(); var gzipSerializer = new DeflateSerializer(baseSerializer); using (var memoryStream = new MemoryStream(Convert.FromBase64String("Y2BkYGD4DwQgGgTYQAxO30oF/6Ss1OQSbgA="))) { Assert.Equal("My Object", gzipSerializer.Deserialize(memoryStream, typeof(String))); } }
public void Serialize(BinarySerializer serializer) { serializer.Serialize(ref FilePath); serializer.Serialize(ref Index); serializer.Serialize(ref UVIndex); serializer.Serialize(ref BlendFactor); serializer.SerializeEnum(ref Operation); serializer.SerializeEnum(ref WrapMode); serializer.SerializeEnum(ref Flags); }
public override void Serialize(BinarySerializer serializer) { base.Serialize(serializer); serializer.SerializeEnum(ref updateType); serializer.Serialize(ref references, serializer.Serialize, (ref EngineReference er) => serializer.Serialize(ref er)); }
public void Load(BinarySerializer reader) { Utils.ThrowException(reader == null ? new ArgumentNullException("reader") : null); mItems.Load(reader); // throws serialization-related exceptions }
public Annotation(BinarySerializer reader) { Load(reader); }
public override IModel Copy() { var copy = BinarySerializer.Serialize(this); return((IModel)BinarySerializer.Deserialize <FileTypeModel>(copy)); }
public void LoadSettings() { Settings = BinarySerializer.LoadSettings(); }
/// <summary> /// Writes the current tree data to disk. /// </summary> public void SaveTree() { // Save the mediation tree. BinarySerializer.SerializeObject <MediationTreeData>(path + "mediationtree", data); }
/// <summary> /// Read a node object from disk. /// </summary> /// <param name="nodeID">The node's ID.</param> /// <returns>The node object.</returns> public MediationTreeNode GetNode(int nodeID) { // Given the path and ID, deserializes the object from disk. return(BinarySerializer.DeSerializeObject <MediationTreeNode>(path + nodeID)); }
public void Serialize(Stream stream) { BinarySerializer.Serialize(this, stream); }
public MajorityClassifier(BinarySerializer reader) { Load(reader); // throws ArgumentNullException, serialization-related exceptions }
public void WarmSerializer() { BinarySerializer.CacheTypesOf(typeof(UtBinarySerializer).Assembly); BinarySerializer.CacheTypesOf(typeof(Block).Assembly); }
protected virtual void SerializeMethods(BinarySerializer serializer) { }
bool ICustomSerialization.Serialize(BinarySerializer Serializer) { Serializer.Writer.WriteNullTerminatedStringUtf16LE(Str); return(true); }
public new static Answer FromArray(byte[] Bytes) { var Serializer = new BinarySerializer(); return(Serializer.Deserialize <Answer>(Bytes)); }
public LabeledExample(BinarySerializer reader) { m_lbl = default(LblT); m_ex = default(ExT); Load(reader); // throws ArgumentNullException, serialization-related exceptions }
public void CouldPackWithHeaderBenchmark() { Settings.DoAdditionalCorrectnessChecks = false; var silent = true; var rm = BufferPool.Retain(100_000_000); var db = new DirectBuffer(rm.Span); #if !DEBUG var count = 200_000; #else var count = 200; #endif var rounds = 10; var rng = new Random(0); var values = new TestValue[count]; for (int i = 0; i < count; i++) { var value = TestValue.Create(rng, false); // TODO with true, currently doubles are not equal in some cases, se upstream issue 83 values[i] = value; } var dest = db; int payloadSize = 0; for (int r = 0; r < rounds; r++) { using (Benchmark.Run("Write", count)) { dest = db; payloadSize = 0; for (int i = 0; i < count; i++) { var ts = TimeService.Default.CurrentTime; var value = values[i]; var size = BinarySerializer.SizeOf(in value, out var tmpBuffer, SerializationFormat.Json); var written = BinarySerializer.Write(in value, dest, tmpBuffer, SerializationFormat.Json); if (size != written) { Assert.Fail($"size {size} != written {written}"); } dest = dest.Slice(written); payloadSize += written; } } } for (int r = 0; r < rounds; r++) { using (Benchmark.Run("Read", count)) { var readSource = db.Slice(0, payloadSize); for (int i = 0; i < count; i++) { var read = BinarySerializer.Read(readSource, out TestValue value1); readSource = readSource.Slice(read); var expected = values[i]; if (!value1.Equals(expected)) { Assert.Fail($"value1 != values[i]"); } } } } Benchmark.Dump(); rm.Dispose(); }
public void SaveSettings() //saves settings into a file in binary format { Settings = new Settings(InputManager.Instance.KeyBindings); BinarySerializer.SaveSettings(Settings); }
public void CouldCompressWithHeader(int seed, bool silent = false) { var rm = BufferPool.Retain(1000000); var db = new DirectBuffer(rm.Span); var rm1 = BufferPool.Retain(1000000); var db1 = new DirectBuffer(rm1.Span); var rm2 = BufferPool.Retain(1000000); var db2 = new DirectBuffer(rm2.Span); var count = 1000; var rng = new Random(seed); var values = new TestValue[count]; var dest = db; var payloadSize = 0; for (int i = 0; i < count; i++) { var ts = TimeService.Default.CurrentTime; var value = TestValue.Create(rng, false); // TODO with true, currently doubles are not equal in some cases, se upstream issue 83 values[i] = value; var size = BinarySerializer.SizeOf(in value, out var tmpBuffer, SerializationFormat.Json); var written = BinarySerializer.Write(in value, dest, in tmpBuffer, SerializationFormat.Json); if (size != written) { Assert.Fail($"size {size} != written {written}"); } dest = dest.Slice(written); payloadSize += written; } if (!silent) { Console.WriteLine($"Payload size: {payloadSize}"); } //Settings.ZlibCompressionLevel = 9; //Settings.LZ4CompressionLevel = 9; //Settings.ZstdCompressionLevel = 9; foreach (var method in new[] { CompressionMethod.GZip, CompressionMethod.Lz4, CompressionMethod.Zstd }) { var compressed = BinarySerializer.Compress(db.Slice(0, payloadSize).Span, db1.Span, method); if (!silent) { Console.WriteLine( $"{method}: compressed size: {compressed}, ratio: {Math.Round(payloadSize / (compressed * 1.0), 2)}"); } var decomrpessed = BinarySerializer.Decompress(db1.Slice(0, compressed).Span, db2.Span, method); if (decomrpessed != payloadSize) { Assert.Fail($"decomrpessed {decomrpessed} != payloadSize {payloadSize}"); } for (int i = 0; i < count; i++) { var read = BinarySerializer.Read(db2, out TestValue value1); db2 = db2.Slice(read); var expected = values[i]; if (!value1.Equals(expected)) { Assert.Fail($"value1 != values[i]"); } } } rm.Dispose(); rm1.Dispose(); rm2.Dispose(); }
private void SerializeRequest(RestRequest restRequest, string filePath) { BinarySerializer.WriteToBinaryFile(filePath, restRequest); }
/// <summary>Saves the current instance to file.</summary> protected void Save() { BinarySerializer.Save(className, this); }
/// <inheritdoc/> void IDataSerializable.Serialize(BinarySerializer serializer) { serializer.Serialize(ref _code); }
public CentroidData(BinarySerializer reader) { Load(reader); }
public MessageSerializer() { _binSerializer = new BinarySerializer { Endianness = BinarySerialization.Endianness.Big }; }
public static void Save(string FileName, H3D Scene) { using (FileStream FS = new FileStream(FileName, FileMode.Create)) { H3DHeader Header = new H3DHeader(); H3DRelocator Relocator = new H3DRelocator(FS, Header); BinarySerializer Serializer = new BinarySerializer(FS, GetSerializationOptions()); Section Contents = Serializer.Sections[(uint)H3DSectionId.Contents]; Contents.Header = Header; /* * Those comparisons are used to sort Strings and data buffers. * Strings are sorted in alphabetical order (like on the original file), * while buffers places textures first, and then vertex/index data after. * It's unknown why textures needs to come first, but placing the textures * at the end or at random order causes issues on the game. * It's most likely an alignment issue. */ Comparison <RefValue> CompStr = H3DComparers.GetComparisonStr(); Comparison <RefValue> CompRaw = H3DComparers.GetComparisonRaw(); Section Strings = new Section(0x10, CompStr); Section Commands = new Section(0x80); Section RawData = new Section(0x80, CompRaw); Section RawExt = new Section(0x80, CompRaw); Section Relocation = new Section(); Serializer.AddSection((uint)H3DSectionId.Strings, Strings, typeof(string)); Serializer.AddSection((uint)H3DSectionId.Strings, Strings, typeof(H3DStringUtf16)); Serializer.AddSection((uint)H3DSectionId.Commands, Commands, typeof(uint[])); Serializer.AddSection((uint)H3DSectionId.RawData, RawData); Serializer.AddSection((uint)H3DSectionId.RawExt, RawExt); Serializer.AddSection((uint)H3DSectionId.Relocation, Relocation); Header.BackwardCompatibility = Scene.BackwardCompatibility; Header.ForwardCompatibility = Scene.ForwardCompatibility; Header.ConverterVersion = Scene.ConverterVersion; Header.Flags = Scene.Flags; Serializer.Serialize(Scene); Header.AddressCount = (ushort)RawData.Values.Count; Header.AddressCount += (ushort)RawExt.Values.Count; Header.UnInitDataLength = Header.AddressCount * 4; Header.ContentsAddress = Contents.Position; Header.StringsAddress = Strings.Position; Header.CommandsAddress = Commands.Position; Header.RawDataAddress = RawData.Position; Header.RawExtAddress = RawExt.Position; Header.RelocationAddress = Relocation.Position; Header.ContentsLength = Contents.Length; Header.StringsLength = Strings.Length; Header.CommandsLength = Commands.Length; Header.RawDataLength = RawData.Length; Header.RawExtLength = RawExt.Length; Relocator.ToRelative(Serializer); FS.Seek(0, SeekOrigin.Begin); Serializer.WriteValue(Header); } }
public LabeledDataset(BinarySerializer reader) { Load(reader); // throws ArgumentNullException, serialization-related exceptions }
// *** ISerializable interface implementation *** public void Save(BinarySerializer writer) { }
public void Save(BinarySerializer dummy) { throw new NotImplementedException(); }
static void Main(string[] args) { Random rnd = new Random(1); string[] featureNames = "ttr,brunet,honore,hl,ttrLemma,brunetLemma,honoreLemma,hlLemma,ari,flesch,fog,rWords,rChars,rSyllables,rComplex,M04,M05,M06,M07,M08,M09,M10,M11,M12,M13".Split(','); LabeledDataset <BlogMetaData, SparseVector <double> > dataset = new LabeledDataset <BlogMetaData, SparseVector <double> >(); Console.WriteLine("Analiziram besedila..."); foreach (string fileName in Directory.GetFiles(Config.DataFolder, "*.xml")) { // load XML Console.WriteLine("Datoteka {0}...", fileName); XmlDocument doc = new XmlDocument(); doc.LoadXml(File.ReadAllText(fileName).Replace("xmlns=\"http://www.tei-c.org/ns/1.0\"", "")); Corpus corpus = new Corpus(); corpus.LoadFromXmlFile(fileName, /*tagLen=*/ int.MaxValue); #if TEST_CHUNKER Text text = null; #else Text text = new Text(corpus, doc.SelectSingleNode("//header/naslov").InnerText, doc.SelectSingleNode("//header/blog").InnerText /*blog identifier is used as author identifier*/); text.ComputeFeatures(); // compute Detextive features #endif // run chunker Console.WriteLine("Racunam znacilke..."); ArrayList <Chunk> chunks = Chunker.GetChunks(doc); chunks = new ArrayList <Chunk>(chunks.Where(x => !x.mInner)); // get non-inner chunks only chunks.ForEach(x => x.mType = MapChunkType(x.mType)); // move chunks from Other_* to main categories #if TEST_CHUNKER return; #endif // get blog meta-data BlogMetaData metaData = new BlogMetaData(); metaData.mAuthorAge = doc.SelectSingleNode("//header/avtorStarost").InnerText; metaData.mAuthorEducation = doc.SelectSingleNode("//header/avtorIzobrazba").InnerText; metaData.mAuthorGender = doc.SelectSingleNode("//header/avtorSpol").InnerText; metaData.mAuthorLocation = doc.SelectSingleNode("//header/avtorRegija").InnerText; metaData.mBlog = doc.SelectSingleNode("//header/blog").InnerText; // compute features M04-M13 from Stamatatos et al.: Automatic Text Categorization in Terms of Genre and Author (2000) double totalChunks = chunks.Count; double[] M = new double[10]; double numNP = chunks.Count(x => x.mType == ChunkType.NP); double numVP = chunks.Count(x => x.mType == ChunkType.VP); double numAP = chunks.Count(x => x.mType == ChunkType.AP); double numPP = chunks.Count(x => x.mType == ChunkType.PP); double numCON = chunks.Count(x => x.mType == ChunkType.CON); if (totalChunks > 0) { M[0] = numNP / totalChunks; M[1] = numVP / totalChunks; M[2] = numAP / totalChunks; M[3] = numPP / totalChunks; M[4] = numCON / totalChunks; } double numWordsNP = chunks.Where(x => x.mType == ChunkType.NP).Select(x => x.mItems.Count).Sum(); M[5] = numNP == 0 ? 0 : (numWordsNP / numNP); double numWordsVP = chunks.Where(x => x.mType == ChunkType.VP).Select(x => x.mItems.Count).Sum(); M[6] = numVP == 0 ? 0 : (numWordsVP / numVP); double numWordsAP = chunks.Where(x => x.mType == ChunkType.AP).Select(x => x.mItems.Count).Sum(); M[7] = numAP == 0 ? 0 : (numWordsAP / numAP); double numWordsPP = chunks.Where(x => x.mType == ChunkType.PP).Select(x => x.mItems.Count).Sum(); M[8] = numPP == 0 ? 0 : (numWordsPP / numPP); double numWordsCON = chunks.Where(x => x.mType == ChunkType.CON).Select(x => x.mItems.Count).Sum(); M[9] = numCON == 0 ? 0 : (numWordsCON / numCON); // create dataset SparseVector <double> vec = new SparseVector <double>(); int i = 0; foreach (string featureName in "ttr,brunet,honore,hl,ttrLemma,brunetLemma,honoreLemma,hlLemma,ari,flesch,fog,rWords,rChars,rSyllables,rComplex".Split(',')) { if (double.IsNaN(text.mFeatures[featureName]) || double.IsInfinity(text.mFeatures[featureName])) { vec[i++] = 0; } else { vec[i++] = text.mFeatures[featureName]; } } foreach (double val in M) { vec[i++] = val; } dataset.Add(new LabeledExample <BlogMetaData, SparseVector <double> >(metaData, vec)); string htmlFileName = Config.HtmlFolder + "\\" + Path.GetFileNameWithoutExtension(fileName) + ".html"; Output.SaveHtml(featureNames, vec, doc, chunks, htmlFileName); } // save as Orange and Weka file Console.WriteLine("Zapisujem datoteke Weka ARFF in Orange TAB..."); foreach (ClassType classType in new ClassType[] { ClassType.AuthorName, ClassType.AuthorAge, ClassType.AuthorGender, ClassType.AuthorEducation, ClassType.AuthorLocation }) { Output.SaveArff(featureNames, dataset, classType, Config.OutputFolder + "\\" + string.Format("OPA-{0}.arff", classType)); Output.SaveTab(featureNames, dataset, classType, Config.OutputFolder + "\\" + string.Format("OPA-{0}.tab", classType)); } // evaluate features via classification Console.WriteLine("Evalviram znacilke s klasifikacijskimi modeli..."); PerfData <string> perfData = new PerfData <string>(); ArrayList <Pair <string, IModel <string> > > models = new ArrayList <Pair <string, IModel <string> > >(); // create classifiers NearestCentroidClassifier <string> ncc = new NearestCentroidClassifier <string>(); ncc.Similarity = new SingleFeatureSimilarity(); models.Add(new Pair <string, IModel <string> >("NCC", ncc)); //KnnClassifier<string, SparseVector<double>> knn = new KnnClassifier<string, SparseVector<double>>(new SingleFeatureSimilarity()); //models.Add(new Pair<string, IModel<string>>("kNN", knn)); // *** kNN is too slow SvmMulticlassClassifier <string> svm = new SvmMulticlassClassifier <string>(); models.Add(new Pair <string, IModel <string> >("SVM", svm)); MajorityClassifier <string, SparseVector <double> > maj = new MajorityClassifier <string, SparseVector <double> >(); models.Add(new Pair <string, IModel <string> >("Majority", maj)); MajorityClassifier <string, SparseVector <double> > backupCfy = new MajorityClassifier <string, SparseVector <double> >(); foreach (Pair <string, IModel <string> > modelInfo in models) // iterate over different classifiers { Console.WriteLine("Kasifikacijski model: {0}...", modelInfo.First); foreach (ClassType classType in new ClassType[] { ClassType.AuthorName, ClassType.AuthorAge, ClassType.AuthorEducation, ClassType.AuthorGender, ClassType.AuthorLocation }) // iterate over different class types { Console.WriteLine("Ciljni razred: {0}...", classType); for (int fIdx = 0; fIdx < featureNames.Count(); fIdx++) // iterate over different features { Console.WriteLine("Znacilka: {0}...", featureNames[fIdx]); LabeledDataset <string, SparseVector <double> > datasetWithSingleFeature = CreateSingleFeatureDataset(dataset, classType, fIdx); datasetWithSingleFeature.Shuffle(rnd); LabeledDataset <string, SparseVector <double> > trainSet, testSet; for (int foldNum = 1; foldNum <= 10; foldNum++) { Console.WriteLine("Sklop " + foldNum + " / 10..."); datasetWithSingleFeature.SplitForCrossValidation(/*numFolds=*/ 10, foldNum, out trainSet, out testSet); IModel <string> model = modelInfo.Second; backupCfy.Train(trainSet); // if there is only one class in trainSet, switch to MajorityClassifier if (((IEnumerable <LabeledExample <string, SparseVector <double> > >)trainSet).Select(x => x.Label).Distinct().Count() == 1) { model = backupCfy; } else { string cacheFileName = Config.OutputFolder + "\\svm-" + classType + "-" + featureNames[fIdx] + "-" + foldNum + ".bin"; if (model is SvmMulticlassClassifier <string> && File.Exists(cacheFileName)) { using (BinarySerializer bs = new BinarySerializer(cacheFileName, FileMode.Open)) { ((SvmMulticlassClassifier <string>)model).Load(bs); } } else { model.Train(trainSet); } #if CACHE_MODELS if (model is SvmMulticlassFast <string> ) { using (BinarySerializer bs = new BinarySerializer(cacheFileName, FileMode.Create)) { model.Save(bs); } } #endif } foreach (LabeledExample <string, SparseVector <double> > lblEx in testSet) { Prediction <string> pred = model.Predict(lblEx.Example); if (pred.Count == 0) { pred = backupCfy.Predict(lblEx.Example); } // if the model is unable to make a prediction, use MajorityClassifier instead perfData.GetPerfMatrix(classType.ToString(), modelInfo.First + "\t" + featureNames[fIdx], foldNum).AddCount(lblEx.Label, pred.BestClassLabel); } } } } } // train full models Console.WriteLine("Treniram klasifikacijske modele..."); models.Clear(); SvmMulticlassClassifier <string> svmFull = new SvmMulticlassClassifier <string>(); models.Add(new Pair <string, IModel <string> >("SVM", svmFull)); //NearestCentroidClassifier<string> nccFull = new NearestCentroidClassifier<string>(); //nccFull.Similarity = new ManhattanSimilarity(); //models.Add(new Pair<string, IModel<string>>("NCC", nccFull)); foreach (Pair <string, IModel <string> > modelInfo in models) // iterate over different classifiers { Console.WriteLine("Kasifikacijski model: {0}...", modelInfo.First); IModel <string> model = modelInfo.Second; foreach (ClassType classType in new ClassType[] { ClassType.AuthorName, ClassType.AuthorAge, ClassType.AuthorEducation, ClassType.AuthorGender, ClassType.AuthorLocation }) // iterate over different class types { Console.WriteLine("Ciljni razred: {0}...", classType); LabeledDataset <string, SparseVector <double> > nrmDataset = CreateNormalizedDataset(dataset, classType); nrmDataset.Shuffle(rnd); LabeledDataset <string, SparseVector <double> > trainSet, testSet; for (int foldNum = 1; foldNum <= 10; foldNum++) { Console.WriteLine("Sklop " + foldNum + " / 10..."); nrmDataset.SplitForCrossValidation(/*numFolds=*/ 10, foldNum, out trainSet, out testSet); backupCfy.Train(trainSet); // if there is only one class in trainSet, switch to MajorityClassifier if (((IEnumerable <LabeledExample <string, SparseVector <double> > >)trainSet).Select(x => x.Label).Distinct().Count() == 1) { model = backupCfy; } else { string cacheFileName = Config.OutputFolder + "\\svm-" + classType + "-full-" + foldNum + ".bin"; if (model is SvmMulticlassClassifier <string> && File.Exists(cacheFileName)) { using (BinarySerializer bs = new BinarySerializer(cacheFileName, FileMode.Open)) { ((SvmMulticlassClassifier <string>)model).Load(bs); } } else { model.Train(trainSet); } #if CACHE_MODELS if (model is SvmMulticlassFast <string> ) { using (BinarySerializer bs = new BinarySerializer(cacheFileName, FileMode.Create)) { model.Save(bs); } } #endif } foreach (LabeledExample <string, SparseVector <double> > lblEx in testSet) { Prediction <string> pred = model.Predict(lblEx.Example); if (pred.Count == 0) { pred = backupCfy.Predict(lblEx.Example); } // if the model is unable to make a prediction, use MajorityClassifier instead perfData.GetPerfMatrix(classType.ToString(), modelInfo.First + "\tfull", foldNum).AddCount(lblEx.Label, pred.BestClassLabel); } } // save model string modelFileName = Config.OutputFolder + "\\" + modelInfo.First + "-" + classType + ".model"; if (!File.Exists(modelFileName)) { using (BinarySerializer bs = new BinarySerializer(modelFileName, FileMode.Create)) { model.Train(nrmDataset); model.Save(bs); } } } } using (StreamWriter w = new StreamWriter(Config.OutputFolder + "\\ClassifierEval.txt")) { w.WriteLine("*** Macro F1 ***"); w.WriteLine(); w.WriteLine("\t" + perfData.ToString(null, PerfMetric.MacroF1)); w.WriteLine(); w.WriteLine("*** Micro F1 ***"); w.WriteLine(); w.WriteLine("\t" + perfData.ToString(null, PerfMetric.MicroF1)); w.WriteLine(); w.WriteLine("*** Macro accuracy ***"); w.WriteLine(); w.WriteLine("\t" + perfData.ToString(null, PerfMetric.MacroAccuracy)); w.WriteLine(); w.WriteLine("*** Micro accuracy ***"); w.WriteLine(); w.WriteLine("\t" + perfData.ToString(null, PerfMetric.MicroAccuracy)); } // all done Console.WriteLine("Koncano."); }
public IEnumerable <Quote> GetAndParseSnapshot(string topic) { return(BinarySerializer <IEnumerable <Quote> > .DeSerializeFromByteArray(base.GetSnapshot(topic))); }
// *** ISerializable interface implementation *** public void Save(BinarySerializer writer) { Utils.ThrowException(writer == null ? new ArgumentNullException("writer") : null); mItems.Save(writer); // throws serialization-related exceptions }
/// <inheritdoc/> void IDataSerializable.Serialize(BinarySerializer serializer) { serializer.Serialize(ref Name); serializer.Serialize(ref Duration); serializer.Serialize(ref Channels); }
private void ConsumerReceived(IBasicConsumer sender, BasicDeliverEventArgs args) { var serializer = new BinarySerializer<string>(); var message = serializer.DeSerialize(args.Body); OnMessageReceived(message); }