/// <summary> /// то же, что и GetHistoryOrders, но упаковано MarketOrderSerializer /// </summary> public RequestStatus GetHistoryOrdersCompressed(int?accountId, DateTime?startDate, out byte[] buffer) { List <MarketOrder> orders; buffer = null; var retVal = GetHistoryOrders(accountId, startDate, out orders); if (orders != null && orders.Count > 0) { try { using (var writer = new SerializationWriter()) { writer.Write(orders); writer.Flush(); buffer = writer.ToArray(); } } catch (Exception ex) { Logger.Error("GetHistoryOrdersCompressed() - ошибка сериализации", ex); return(RequestStatus.SerializationError); } } return(retVal); }
public void TestUTF16Chars() { using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms, new UnicodeEncoding())) using (SerializationReader sr = new SerializationReader(ms, new UnicodeEncoding())) { for (int i = 0; i < 55296; i++) sw.Write((char)i); sw.Flush(); ms.Position = 0; for (int i = 0; i < 55296; i++) Assert.AreEqual((char)i, sr.Read<char>()); } }
public void TestUTF16String() { using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms, new UnicodeEncoding())) using (SerializationReader sr = new SerializationReader(ms, new UnicodeEncoding())) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < 55296; i++) sb.Append((char)i); string res = sb.ToString(); sw.Write(res); sw.Flush(); ms.Position = 0; Assert.AreEqual(res, sr.Read<string>()); } }
/// <summary> /// Saves the index. /// </summary> /// <returns> The filename of the persisted index. </returns> /// <param name='indexName'> Index name. </param> /// <param name='index'> Index. </param> /// <param name='path'> Path. </param> private static String SaveIndex(string indexName, IIndex index, string path) { var indexFileName = path + Constants.IndexSaveString + indexName; using (var indexFile = File.Create(indexFileName, Constants.BufferSize, FileOptions.SequentialScan)) { var indexWriter = new SerializationWriter(indexFile); indexWriter.Write(indexName); indexWriter.Write(index.PluginName); index.Save(indexWriter); indexWriter.UpdateHeader(); indexWriter.Flush(); indexFile.Flush(); } return(Path.GetFileName(indexFileName)); }
/// <summary> /// Saves the service /// </summary> /// <param name="serviceName">Service name.</param> /// <param name="service">Service.</param> /// <param name="path">Path.</param> /// <returns>The filename of the persisted service.</returns> private static String SaveService(string serviceName, IService service, string path) { var serviceFileName = path + Constants.ServiceSaveString + serviceName; using (var serviceFile = File.Create(serviceFileName, Constants.BufferSize, FileOptions.SequentialScan)) { var serviceWriter = new SerializationWriter(serviceFile); serviceWriter.Write(serviceName); serviceWriter.Write(service.PluginName); service.Save(serviceWriter); serviceWriter.UpdateHeader(); serviceWriter.Flush(); serviceFile.Flush(); } return(Path.GetFileName(serviceFileName)); }
public override RequestStatus GetHistoryOrdersCompressed(int?accountId, DateTime?startDate, out byte[] buffer) { buffer = null; List <MarketOrder> ordlist; var status = GetHistoryOrders(accountId, startDate, out ordlist); if (ordlist == null || ordlist.Count == 0) { return(status); } using (var writer = new SerializationWriter()) { writer.Write(orders); writer.Flush(); buffer = writer.ToArray(); } return(status); }
public void TestBytes() { Random rand = new Random(); using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms)) using (SerializationReader sr = new SerializationReader(ms)) { byte[] values = new byte[Config.MULTI_TEST_COUNT]; rand.NextBytes(values); for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) sw.Write(values[i]); sw.Flush(); ms.Position = 0; for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) Assert.AreEqual(values[i], sr.Read<byte>()); } }
public void TestBasicList() { Random rand = new Random(); using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms)) using (SerializationReader sr = new SerializationReader(ms)) { List<int> list = new List<int>(Config.MULTI_TEST_COUNT); for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) list.Add(rand.Next(int.MinValue, int.MaxValue)); sw.Write(list); sw.Flush(); ms.Position = 0; List<int> ret = sr.Read<List<int>>(); for (int i = 0; i < list.Count; i++) Assert.AreEqual(list[i], ret[i]); } }
public void TestBasicDictionary() { Random rand = new Random(); using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms)) using (SerializationReader sr = new SerializationReader(ms)) { Dictionary<int, float> dict = new Dictionary<int, float>(Config.MULTI_TEST_COUNT); for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) dict[rand.Next(int.MinValue, int.MaxValue)] = (float)rand.NextDouble(); sw.Write(dict); sw.Flush(); ms.Position = 0; Dictionary<int, float> ret = sr.Read<Dictionary<int, float>>(); foreach (KeyValuePair<int, float> kvp in dict) Assert.AreEqual(kvp.Value, ret[kvp.Key]); } }
public void TestBasicArray() { Random rand = new Random(); using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms)) using (SerializationReader sr = new SerializationReader(ms)) { int[] array = new int[Config.MULTI_TEST_COUNT]; for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) array[i] = rand.Next(int.MinValue, int.MaxValue); sw.Write(array); sw.Flush(); ms.Position = 0; int[] ret = sr.Read<int[]>(); for (int i = 0; i < ret.Length; i++) Assert.AreEqual(array[i], ret[i]); } }
/// <summary> /// Saves the graph element bunch. /// </summary> /// <returns> The path to the graph element bunch </returns> /// <param name='range'> Range. </param> /// <param name='graphElements'> Graph elements. </param> /// <param name='pathToSavePoint'> Path to save point basis. </param> private static String SaveBunch(Tuple <Int32, Int32> range, BigList <AGraphElement> graphElements, String pathToSavePoint) { var partitionFileName = pathToSavePoint + Constants.GraphElementsSaveString + range.Item1 + "_to_" + range.Item2; using (var partitionFile = File.Create(partitionFileName, Constants.BufferSize, FileOptions.SequentialScan)) { var partitionWriter = new SerializationWriter(partitionFile); partitionWriter.Write(range.Item1); partitionWriter.Write(range.Item2); for (var i = range.Item1; i < range.Item2; i++) { AGraphElement aGraphElement; //there can be nulls if (!graphElements.TryGetElementOrDefault(out aGraphElement, i)) { partitionWriter.WriteOptimized(SerializedNull); // 2 for null continue; } //code if it is an vertex or an edge if (aGraphElement is VertexModel) { WriteVertex((VertexModel)aGraphElement, partitionWriter); } else { WriteEdge((EdgeModel)aGraphElement, partitionWriter); } } partitionWriter.UpdateHeader(); partitionWriter.Flush(); partitionFile.Flush(); } return(Path.GetFileName(partitionFileName)); }
public void TestBools() { Random rand = new Random(); using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms)) using (SerializationReader sr = new SerializationReader(ms)) { bool[] values = new bool[Config.MULTI_TEST_COUNT]; for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) { values[i] = rand.Next(0, 2) == 1; sw.Write(values[i]); } sw.Flush(); ms.Position = 0; for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) Assert.AreEqual(values[i], sr.Read<bool>()); } }
public void TestDateTimes() { Random rand = new Random(); using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms)) using (SerializationReader sr = new SerializationReader(ms)) { DateTime[] values = new DateTime[Config.MULTI_TEST_COUNT]; for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) { values[i] = new DateTime(rand.Next(1970, 5623), rand.Next(1, 13), rand.Next(1, 29), rand.Next(0, 24), rand.Next(0, 60), rand.Next(0, 60), DateTimeKind.Utc); sw.Write(values[i]); } sw.Flush(); ms.Position = 0; for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) Assert.AreEqual(values[i], sr.Read<DateTime>()); } }
public void TestObjects() { Random rand = new Random(); using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms)) using (SerializationReader sr = new SerializationReader(ms)) { object[] values = new object[6]; values[0] = rand.Next(int.MinValue, int.MaxValue); values[1] = "test"; values[2] = EnumTests.ByteEnum.h; values[3] = 0.0002f; values[4] = 0.1234d; values[5] = (decimal)1000; sw.Write(values[0]); sw.Write(values[1]); sw.Write(values[2]); sw.Write(values[3]); sw.Write(values[4]); sw.Write(values[5]); sw.Flush(); ms.Position = 0; for (int i = 0; i < values.Length; i++) { if (i == 2) Assert.AreEqual(values[i], (EnumTests.ByteEnum)sr.Read<object>()); else Assert.AreEqual(values[i], sr.Read<object>()); } } }
public void TestShorts() { Random rand = new Random(); using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms)) using (SerializationReader sr = new SerializationReader(ms)) { short[] values = new short[Config.MULTI_TEST_COUNT]; byte[] bytes = new byte[2]; for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) { rand.NextBytes(bytes); values[i] = BitConverter.ToInt16(bytes, 0); sw.Write(values[i]); } sw.Flush(); ms.Position = 0; for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) Assert.AreEqual(values[i], sr.Read<short>()); } }
/// <summary> /// Save the specified graphElements, indices and pathToSavePoint. /// </summary> /// <param name='fallen8'> Fallen-8. </param> /// <param name='graphElements'> Graph elements. </param> /// <param name='path'> Path. </param> /// <param name='savePartitions'> The number of save partitions for the graph elements. </param> /// <param name="currentId">The current graph elemement identifier.</param> internal static void Save(Fallen8 fallen8, BigList <AGraphElement> graphElements, String path, UInt32 savePartitions, Int32 currentId) { // Create the new, empty data file. if (File.Exists(path)) { //the newer save gets an timestamp path = path + Constants.VersionSeparator + DateTime.Now.ToBinary().ToString(CultureInfo.InvariantCulture); } using (var file = File.Create(path, Constants.BufferSize, FileOptions.SequentialScan)) { var writer = new SerializationWriter(file, true); writer.Write(currentId); //create some futures to save as much as possible in parallel const TaskCreationOptions options = TaskCreationOptions.LongRunning; var f = new TaskFactory(CancellationToken.None, options, TaskContinuationOptions.None, TaskScheduler.Default); #region graph elements var graphElementCount = fallen8.VertexCount + fallen8.EdgeCount; Task <string>[] graphElementSaver; if (graphElementCount > 0) { var graphElementPartitions = CreatePartitions(graphElementCount, savePartitions); graphElementSaver = new Task <string> [graphElementPartitions.Count]; for (var i = 0; i < graphElementPartitions.Count; i++) { var partition = graphElementPartitions[i]; graphElementSaver[i] = f.StartNew(() => SaveBunch(partition, graphElements, path)); } } else { graphElementSaver = new Task <string> [0]; } #endregion #region indices var indexSaver = new Task <string> [fallen8.IndexFactory.Indices.Count]; var counter = 0; foreach (var aIndex in fallen8.IndexFactory.Indices) { var indexName = aIndex.Key; var index = aIndex.Value; indexSaver[counter] = f.StartNew(() => SaveIndex(indexName, index, path)); counter++; } #endregion #region services var serviceSaver = new Task <string> [fallen8.ServiceFactory.Services.Count]; counter = 0; foreach (var aService in fallen8.ServiceFactory.Services) { var serviceName = aService.Key; var service = aService.Value; serviceSaver[counter] = f.StartNew(() => SaveService(serviceName, service, path)); counter++; } #endregion writer.WriteOptimized(graphElementSaver.Length); foreach (var aFileStreamName in graphElementSaver) { writer.WriteOptimized(aFileStreamName.Result); } writer.WriteOptimized(indexSaver.Length); foreach (var aIndexFileName in indexSaver) { writer.WriteOptimized(aIndexFileName.Result); } writer.WriteOptimized(serviceSaver.Length); foreach (var aServiceFileName in serviceSaver) { writer.WriteOptimized(aServiceFileName.Result); } writer.UpdateHeader(); writer.Flush(); file.Flush(); } }
/// <summary> /// Saves the service /// </summary> /// <param name="serviceName">Service name.</param> /// <param name="service">Service.</param> /// <param name="path">Path.</param> /// <returns>The filename of the persisted service.</returns> private static String SaveService(string serviceName, IService service, string path) { var serviceFileName = path + Constants.ServiceSaveString + serviceName; using (var serviceFile = File.Create(serviceFileName, Constants.BufferSize, FileOptions.SequentialScan)) { var serviceWriter = new SerializationWriter(serviceFile); serviceWriter.Write(serviceName); serviceWriter.Write(service.PluginName); service.Save(serviceWriter); serviceWriter.UpdateHeader(); serviceWriter.Flush(); serviceFile.Flush(); } return Path.GetFileName(serviceFileName); }
public void TestComplexDictionary() { Random rand = new Random(); using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms)) using (SerializationReader sr = new SerializationReader(ms)) { Dictionary<int, SIBFClass> dict = new Dictionary<int, SIBFClass>(Config.MULTI_TEST_COUNT); for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) { dict[rand.Next(int.MinValue, int.MaxValue)] = new SIBFClass() { S = "test" + i, IBF = new IBFClass() { I = rand.Next(int.MinValue, int.MaxValue), B = rand.Next(2) == 1, F = (float)rand.NextDouble() } }; } sw.Write(dict); sw.Flush(); ms.Position = 0; Dictionary<int, SIBFClass> ret = sr.Read<Dictionary<int, SIBFClass>>(); foreach (KeyValuePair<int, SIBFClass> kvp in dict) { Assert.AreEqual(kvp.Value.S, ret[kvp.Key].S); Assert.AreEqual(kvp.Value.IBF.I, ret[kvp.Key].IBF.I); Assert.AreEqual(kvp.Value.IBF.B, ret[kvp.Key].IBF.B); Assert.AreEqual(kvp.Value.IBF.F, ret[kvp.Key].IBF.F); } } }
/// <summary> /// Saves the graph element bunch. /// </summary> /// <returns> The path to the graph element bunch </returns> /// <param name='range'> Range. </param> /// <param name='graphElements'> Graph elements. </param> /// <param name='pathToSavePoint'> Path to save point basis. </param> private static String SaveBunch(Tuple<Int32, Int32> range, BigList<AGraphElement> graphElements, String pathToSavePoint) { var partitionFileName = pathToSavePoint + Constants.GraphElementsSaveString + range.Item1 + "_to_" + range.Item2; using (var partitionFile = File.Create(partitionFileName, Constants.BufferSize, FileOptions.SequentialScan)) { var partitionWriter = new SerializationWriter(partitionFile); partitionWriter.Write(range.Item1); partitionWriter.Write(range.Item2); for (var i = range.Item1; i < range.Item2; i++) { AGraphElement aGraphElement = graphElements.GetElement(i); //there can be nulls if (aGraphElement == null) { partitionWriter.Write(SerializedNull); // 2 for null continue; } //code if it is an vertex or an edge if (aGraphElement is VertexModel) { WriteVertex((VertexModel) aGraphElement, partitionWriter); } else { WriteEdge((EdgeModel) aGraphElement, partitionWriter); } } partitionWriter.UpdateHeader(); partitionWriter.Flush(); partitionFile.Flush(); } return Path.GetFileName(partitionFileName); }
/// <summary> /// Save the specified graphElements, indices and pathToSavePoint. /// </summary> /// <param name='fallen8'> Fallen-8. </param> /// <param name='graphElements'> Graph elements. </param> /// <param name='path'> Path. </param> /// <param name='savePartitions'> The number of save partitions for the graph elements. </param> /// <param name="currentId">The current graph elemement identifier.</param> internal static void Save(Fallen8 fallen8, BigList<AGraphElement> graphElements, String path, UInt32 savePartitions, Int32 currentId) { // Create the new, empty data file. if (File.Exists(path)) { //the newer save gets an timestamp path = path + Constants.VersionSeparator + DateTime.Now.ToBinary().ToString(CultureInfo.InvariantCulture); } using (var file = File.Create(path, Constants.BufferSize, FileOptions.SequentialScan)) { var writer = new SerializationWriter(file, true); writer.Write(currentId); //create some futures to save as much as possible in parallel const TaskCreationOptions options = TaskCreationOptions.LongRunning; var f = new TaskFactory(CancellationToken.None, options, TaskContinuationOptions.None, TaskScheduler.Default); #region graph elements var graphElementCount = Convert.ToUInt32(currentId); Task<string>[] graphElementSaver; if (graphElementCount > 0) { graphElementCount++; //Hack var graphElementPartitions = CreatePartitions(graphElementCount, savePartitions); graphElementSaver = new Task<string>[graphElementPartitions.Count]; for (var i = 0; i < graphElementPartitions.Count; i++) { var partition = graphElementPartitions[i]; graphElementSaver[i] = f.StartNew(() => SaveBunch(partition, graphElements, path)); } } else { graphElementSaver = new Task<string>[0]; } #endregion #region indices var indexSaver = new Task<string>[fallen8.IndexFactory.Indices.Count]; var counter = 0; foreach (var aIndex in fallen8.IndexFactory.Indices) { var indexName = aIndex.Key; var index = aIndex.Value; indexSaver[counter] = f.StartNew(() => SaveIndex(indexName, index, path)); counter++; } #endregion #region services var serviceSaver = new Task<string>[fallen8.ServiceFactory.Services.Count]; counter = 0; foreach (var aService in fallen8.ServiceFactory.Services) { var serviceName = aService.Key; var service = aService.Value; serviceSaver[counter] = f.StartNew(() => SaveService(serviceName, service, path)); counter++; } #endregion writer.Write(graphElementSaver.Length); foreach (var aFileStreamName in graphElementSaver) { writer.Write(aFileStreamName.Result); } writer.Write(indexSaver.Length); foreach (var aIndexFileName in indexSaver) { writer.Write(aIndexFileName.Result); } writer.Write(serviceSaver.Length); foreach (var aServiceFileName in serviceSaver) { writer.Write(aServiceFileName.Result); } writer.UpdateHeader(); writer.Flush(); file.Flush(); } }
public void TestComplexArray() { Random rand = new Random(); using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms)) using (SerializationReader sr = new SerializationReader(ms)) { SIBFClass[] array = new SIBFClass[Config.MULTI_TEST_COUNT]; for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) { array[i] = new SIBFClass() { S = "test" + i, IBF = new IBFClass() { I = rand.Next(int.MinValue, int.MaxValue), B = rand.Next(2) == 1, F = (float)rand.NextDouble() } }; } sw.Write(array); sw.Flush(); ms.Position = 0; SIBFClass[] ret = sr.Read<SIBFClass[]>(); for (int i = 0; i < ret.Length; i++) { Assert.AreEqual(array[i].S, ret[i].S); Assert.AreEqual(array[i].IBF.I, ret[i].IBF.I); Assert.AreEqual(array[i].IBF.B, ret[i].IBF.B); Assert.AreEqual(array[i].IBF.F, ret[i].IBF.F); } } }
public void TestFloats() { Random rand = new Random(); using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms)) using (SerializationReader sr = new SerializationReader(ms)) { float[] values = new float[Config.MULTI_TEST_COUNT]; byte[] bytes = new byte[4]; for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) { rand.NextBytes(bytes); values[i] = BitConverter.ToSingle(bytes, 0); sw.Write(values[i]); } sw.Flush(); ms.Position = 0; for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) { float result = sr.Read<float>(); Assert.IsTrue(values[i] != values[i] || result == values[i]); } } }
public void TestComplexList() { Random rand = new Random(); using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms)) using (SerializationReader sr = new SerializationReader(ms)) { List<SIBFClass> list = new List<SIBFClass>(Config.MULTI_TEST_COUNT); for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) { list.Add(new SIBFClass() { S = "test" + i, IBF = new IBFClass() { I = rand.Next(int.MinValue, int.MaxValue), B = rand.Next(2) == 1, F = (float)rand.NextDouble() } }); } sw.Write(list); sw.Flush(); ms.Position = 0; List<SIBFClass> ret = sr.Read<List<SIBFClass>>(); for (int i = 0; i < list.Count; i++) { Assert.AreEqual(list[i].S, ret[i].S); Assert.AreEqual(list[i].IBF.I, ret[i].IBF.I); Assert.AreEqual(list[i].IBF.B, ret[i].IBF.B); Assert.AreEqual(list[i].IBF.F, ret[i].IBF.F); } } }
/// <summary> /// Saves the index. /// </summary> /// <returns> The filename of the persisted index. </returns> /// <param name='indexName'> Index name. </param> /// <param name='index'> Index. </param> /// <param name='path'> Path. </param> private static String SaveIndex(string indexName, IIndex index, string path) { var indexFileName = path + Constants.IndexSaveString + indexName; using (var indexFile = File.Create(indexFileName, Constants.BufferSize, FileOptions.SequentialScan)) { var indexWriter = new SerializationWriter(indexFile); indexWriter.Write(indexName); indexWriter.Write(index.PluginName); index.Save(indexWriter); indexWriter.UpdateHeader(); indexWriter.Flush(); indexFile.Flush(); } return Path.GetFileName(indexFileName); }
/// <summary> /// то же, что и GetHistoryOrders, но упаковано MarketOrderSerializer /// </summary> public RequestStatus GetHistoryOrdersCompressed(int? accountId, DateTime? startDate, out byte[] buffer) { List<MarketOrder> orders; buffer = null; var retVal = GetHistoryOrders(accountId, startDate, out orders); if (orders != null && orders.Count > 0) { try { using (var writer = new SerializationWriter()) { writer.Write(orders); writer.Flush(); buffer = writer.ToArray(); } } catch (Exception ex) { Logger.Error("GetHistoryOrdersCompressed() - ошибка сериализации", ex); return RequestStatus.SerializationError; } } return retVal; }
public void TestDecimals() { Random rand = new Random(); using (MemoryStream ms = new MemoryStream()) using (SerializationWriter sw = new SerializationWriter(ms)) using (SerializationReader sr = new SerializationReader(ms)) { decimal[] values = new decimal[Config.MULTI_TEST_COUNT]; for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) { values[i] = new decimal(rand.Next(0, int.MaxValue), rand.Next(0, int.MaxValue), rand.Next(0, int.MaxValue), rand.Next(0, 2) == 1, (byte)rand.Next(0, 29)); sw.Write(values[i]); } sw.Flush(); ms.Position = 0; for (int i = 0; i < Config.MULTI_TEST_COUNT; i++) Assert.AreEqual(values[i], sr.Read<decimal>()); } }