public NaivePreHeat_remoteread(string dataFilePath, int K, string outputFilePath, int chunkSize, string mdserver, LocationInfo li, int endSlotIndex) { this.dataFilePath = dataFilePath; occupancyKey = new StrKey("occupancy"); random = new Random(); this.constK = K; this.outputFilePath = outputFilePath; this.chunkSize = chunkSize; this.mdserver = mdserver; this.li = li; int slotIndex = 0; StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID(fqsidprefix + chunkSize, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); streamFactory.deleteStream(fq_sid, ci); IStream occupancyGroundTruthStream = streamFactory.openValueDataStream <StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserver, chunkSize, 1, new Logger()); while (true) { occupancyGroundTruthStream.Append(occupancyKey, new ByteValue(BitConverter.GetBytes(random.Next(2))), slotIndexBase + slotIndex); slotIndex++; if (slotIndex == endSlotIndex) { break; } } occupancyGroundTruthStream.Close(); }
public void WriteToStream() { StrKey key = new StrKey("DummyKey2"); datastream.Append(key, new StrValue("2")); logger.Log("Writing {0} to stream ", datastream.Get(key).ToString()); }
public void WriteToStream() { StrKey key = new StrKey("DigitalMediaKey"); datastream.Append(key, new StrValue("DigitalMediaVal")); logger.Log("Writing {0} to stream ", datastream.Get(key).ToString()); }
public void Setup() { StreamFactory sf = StreamFactory.Instance; dfs_str_val = sf.openValueDataStream <StrKey, StrValue>(new FqStreamID("99-2729", "A0", "TestRange"), new CallerInfo(null, "A0", "A0", 1), null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write); keys = new List <IKey>(); for (int i = 0; i < 10; i++) { keys.Add(new StrKey("k" + i)); for (int j = 0; j < 100; j++) { dfs_str_val.Append(keys[i], new StrValue("k" + i + "_value" + j)); } // dfs_str_val.Seal(null); } // IEnumerable<IDataItem> iterator = dfs_str_val.GetAll(new StrKey("k0")); // foreach (IDataItem data in iterator) // Console.WriteLine("data is: " + data.GetTimestamp()); }
public void Setup() { StreamFactory sf = StreamFactory.Instance; dfs_str_val = sf.openValueDataStream<StrKey, StrValue>(new FqStreamID("99-2729", "A0", "TestRange"), new CallerInfo(null, "A0", "A0", 1), null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write); keys = new List<IKey>(); for (int i = 0; i < 10; i++) { keys.Add(new StrKey("k" + i)); for (int j = 0; j < 100; j++) { dfs_str_val.Append(keys[i], new StrValue("k" + i + "_value" + j)); } // dfs_str_val.Seal(null); } // IEnumerable<IDataItem> iterator = dfs_str_val.GetAll(new StrKey("k0")); // foreach (IDataItem data in iterator) // Console.WriteLine("data is: " + data.GetTimestamp()); }
private static void Write(IStream stream) { try { int i = 1; StrKey k1 = k1 = new StrKey("k1"); while (true) { stream.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-value" + i))); i++; Console.WriteLine("Written "+i+" values"); if (i %10==0) stream.Seal(false); if (isWriting) System.Threading.Thread.Sleep(1000); else break; } } catch(Exception e) { Console.WriteLine("Exception in write: "+e); } }
private static void Write(IStream stream) { try { int i = 1; StrKey k1 = k1 = new StrKey("k1"); while (true) { stream.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-value" + i))); i++; Console.WriteLine("Written " + i + " values"); if (i % 10 == 0) { stream.Seal(false); } if (isWriting) { System.Threading.Thread.Sleep(1000); } else { break; } } } catch (Exception e) { Console.WriteLine("Exception in write: " + e); } }
public void WriteToStream(string tag, string data) { StrKey key = new StrKey(tag); if (datastream != null) { datastream.Append(key, new StrValue(data)); } }
private static void UploadDataAsStreams(int UploadCount) { string directory = @"..\\..\\data\\meter-data"; int count = 0; Dictionary <DateTime, double> ts_temperature = new Dictionary <DateTime, double>(); StreamReader wfile = new System.IO.StreamReader(@"..\\..\\data\\weather.txt"); string wline; while ((wline = wfile.ReadLine()) != null) { string[] words = wline.Split('\t'); DateTime date = Convert.ToDateTime(words[4]); date = date.AddHours(Int32.Parse(words[5])); double temperature = Double.Parse(words[0]); ts_temperature[date] = temperature; } wfile.Close(); foreach (string filePath in Directory.GetFiles(directory)) { Console.WriteLine("file name:" + filePath); string line; System.IO.StreamReader file = new System.IO.StreamReader(filePath); LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); FqStreamID fq_sid = new FqStreamID("crrealhome" + count, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; sf.deleteStream(fq_sid, ci); IStream dfs_byte_val = sf.openValueDataStream <DoubleKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserveraddress: mdServer, ChunkSizeForUpload: 4 * 1024 * 1024, ThreadPoolSize: 1, log: new Logger()); while ((line = file.ReadLine()) != null) { string[] words = line.Split('\t'); DateTime date = Convert.ToDateTime(words[0]); date = date.AddHours(int.Parse(words[1]) / 100); DoubleKey key = new DoubleKey(((int)(ts_temperature[date]))); dfs_byte_val.Append(key, new ByteValue(BitConverter.GetBytes(Double.Parse(words[2]))), DateTimeToUnixTimestamp(date)); // Console.WriteLine(DateTimeToUnixTimestamp(date) + "," + words[2]); } dfs_byte_val.Close(); count++; if (count == UploadCount) { break; } } }
public void ReadHourIntoStream() { long start = DateTime.Now.Ticks; Tuple <DateTime, double, double> tuple = timestamp_energy_temperature.ElementAt(count); DoubleKey key = new DoubleKey(Math.Round(tuple.Item3)); dataStream.Append(key, new ByteValue(BitConverter.GetBytes(tuple.Item2)), (long)count); count++; long end = DateTime.Now.Ticks; Console.Write(count + "," + (end - start)); using (results = File.AppendText(".\\result-width-" + width + ".txt")) results.Write(count + "," + (end - start)); }
public void SyncFileStreamTest_TestRepeatedClose() { for (int i = 0; i < 10; ++i) { StreamFactory sf = StreamFactory.Instance; IStream dfs_byte_val = sf.openValueDataStream <StrKey, ByteValue>(new FqStreamID("99-2729", "A0", "TestMultiClose"), new CallerInfo(null, "A0", "A0", 1), locationInfo, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write); dfs_byte_val.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-cmu-" + i))); dfs_byte_val.Close(); Thread.Sleep(5000); } }
private void AddPicDataStream(string key, byte[] imageBytes) { StrKey strKey = new StrKey(key); ByteValue byteVal = new ByteValue(imageBytes); try { lock (picStreamLock) { picStream.Append(strKey, byteVal); } } catch (Exception e) { logger.Log("Error while writing images to dir stream: {0}", e.ToString()); } }
static void Main(string[] args) { string AzureaccountName = ConfigurationManager.AppSettings.Get("AccountName"); string AzureaccountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); /* * string dataFile = "D:\\b"; * int KB = 1024; * int[] chunk_sizes = { 4*1024*KB , 8*1024*KB }; * * for (int i = 1; i <= 1; i++) * { * for (int threads = 1; threads <= 1; threads++) * { * foreach (int csize in chunk_sizes) * { * Console.Write(">"); * File.Copy(dataFile, dataFile + threads + "," + csize); * * AzureHelper helper = new AzureHelper(AzureaccountName, AzureaccountKey, "foo123123", CompressionType.None, EncryptionType.None, null, null, new Logger(), csize, threads); * long start = DateTime.Now.Ticks; * helper.UploadFileAsChunks(dataFile + threads + "," + csize); * long end = DateTime.Now.Ticks; * Console.WriteLine(threads + "," + csize + "," + (((double)(end - start) / (double)10000000)) ); * } * * } * } */ li = null; FqStreamID fq_sid = new FqStreamID("1299-2716", "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; sf.deleteStream(fq_sid, ci); IStream dfs_byte_val = sf.openValueDataStream <StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write); StrKey k1 = new StrKey("k1"); dfs_byte_val.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-cmu"))); dfs_byte_val.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-msr"))); dfs_byte_val.Seal(false); dfs_byte_val.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-uw"))); dfs_byte_val.Close(); Console.ReadKey(); dfs_byte_val = sf.openValueDataStream <StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write); Console.WriteLine("Get in read : " + dfs_byte_val.Get(k1)); IEnumerable <IDataItem> data = dfs_byte_val.GetAll(k1, 0, StreamFactory.NowUtc()); foreach (IDataItem dataItem in data) { Console.WriteLine(dataItem.GetVal().ToString()); } dfs_byte_val.Close(); Console.ReadKey(); /* * ValueSerializerBase<StrKey> vsb = new ValueSerializerBase<StrKey>(); * Byte[] buffer1 = vsb.SerializeToByteStream().ToArray(); * Byte[] buffer2 = SerializerHelper<StrKey>.SerializeToProtoStream(k1).ToArray(); * * FileStream fout = new FileStream("tmp.txt", FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite); * BinaryWriter fs_bw = new BinaryWriter(fout); * fs_bw.Write(buffer1); * fs_bw.Write("-----W00t!-----"); * fs_bw.Write(buffer2); * fs_bw.Write("-----W00t!-----"); * fs_bw.Close(); * fout.Close(); */ }
public void ValueDataStreamTest_TestAppendStrValue() { dfs_str_val.Append(k1, new StrValue("k1-msr-1")); dfs_str_val.Append(k1, new StrValue("k1-msr-2")); }
new public List <RetVal> PredictOccupancy(long startSlotIndex, long endSlotIndex) { List <RetVal> retVal = new List <RetVal>(); System.IO.StreamReader datafile = null; if (dataFilePath != null) //assuming datafile has one occupancy value per line read to startSlotIndex { string line; int counter = 0; datafile = new System.IO.StreamReader(this.dataFilePath); if (startSlotIndex != 0) { while ((line = datafile.ReadLine()) != null) { if (counter == startSlotIndex) { break; } counter++; } } } StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID("smartpreheat", "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); streamFactory.deleteStream(fq_sid, ci); IStream occupancyGroundTruthStream = streamFactory.openValueDataStream <StrKey, ByteValue>(fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, null, 4 * 1024 * 1024, 1, new Logger()); int slotIndex = 0; long startTime, retrievelTime, computeTime, insertTime; while (true) { startTime = DateTime.Now.Ticks; List <int> currentPOV = SmartConstructCurrentPOV(occupancyGroundTruthStream, slotIndex); List <List <int> > previousDaysPOV = SmartConstructPreviousPOV(occupancyGroundTruthStream, slotIndex); retrievelTime = DateTime.Now.Ticks - startTime; startTime = DateTime.Now.Ticks; int predictedOccupancy = Predict(currentPOV, previousDaysPOV); computeTime = DateTime.Now.Ticks - startTime; startTime = DateTime.Now.Ticks; int groundTruth; if (datafile == null) // if no datafile to read the ground truth from just append randomly { groundTruth = random.Next(2); } else { string line = datafile.ReadLine(); groundTruth = int.Parse(line); } currentPOV.Add(groundTruth); List <int> temp = new List <int>(); foreach (List <int> previousPOV in previousDaysPOV) { temp = temp.Concat(previousPOV).ToList(); } temp = temp.Concat(currentPOV).ToList(); occupancyGroundTruthStream.Append(occupancyKey, new ByteValue(temp.SelectMany(BitConverter.GetBytes).ToArray()), slotIndexBase + slotIndex); insertTime = DateTime.Now.Ticks - startTime; Console.WriteLine("Slot number {0} {1} {2} {3}", slotIndex, retrievelTime, computeTime, insertTime); using (results = File.AppendText(outputFilePath)) results.WriteLine("Slot number {0} {1} {2} {3}", slotIndex, retrievelTime, computeTime, insertTime); slotIndex++; //retVal.Add(new RetVal(endTime - startTime, predictedOccupancy)); if (slotIndex == endSlotIndex) { break; } } occupancyGroundTruthStream.Close(); return(retVal); }
private void AddPicDataStream(string key, byte[] imageBytes) { StrKey strKey = new StrKey(key); ByteValue byteVal = new ByteValue(imageBytes); try { picStream = base.CreateDirStream<StrKey, ByteValue>("H2OAlertsPics", true); picStream.Append(strKey, byteVal); // logger.Log("WaterAlert picture has been written to {0}.", picStream.Get(strKey).ToString()); picStream.Close(); } catch (Exception e) { logger.Log("Error while writing images to dir stream: {0}", e.ToString()); } }
public void LocalListedValueDataStreamTest_TestUpdateByteValue() { vds.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-cmu"))); vds.Append(k2, new ByteValue(StreamFactory.GetBytes("k2-msr"))); vds.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-msr"))); }
public void Run(string CallerName, string HomeName, string AppName, string StreamName, string RandName, long stime, long etime, StreamType stream_type, StreamOperation stream_op, StreamFactory.StreamDataType ptype, CompressionType ctype, int ChunkSize, int ThreadPoolSize, Byte[] value, int num_operations, SynchronizerType synctype, int max_key = 0, string address = null, bool doCosts = false, bool doRaw = false) { // Set experiment directory CallerInfo ci = new CallerInfo(null, CallerName, CallerName, 1); exp_directory = Path.GetFullPath((null != ci.workingDir) ? ci.workingDir : Directory.GetCurrentDirectory()); exp_directory = exp_directory + "/" + HomeName + "/" + AppName + "/" + StreamName; if (max_key == 0) { max_key = num_operations; } // Set a description/tag for the experiment this.exp_id = "Directory: " + HomeName + "/" + AppName + "/" + StreamName + " Caller:" + CallerName + " Stream Type:" + stream_type + " Stream Op: " + stream_op + " Stream Ptype: " + ptype + " Compression Type: " + ctype + " Value size: " + value.Length + " num_operations: " + max_key + " actual_num_ops: " + num_operations + " Sync type: " + synctype + " Do costs? " + doCosts + "Chunk Size: " + ChunkSize + " ThreadPool Size:" + ThreadPoolSize; this.compressed_exp_id = " ST:" + stream_type + " OP: " + stream_op + " PT: " + ptype + " CT: " + ctype + " VS: " + value.Length + " I:" + num_operations + " MK:" + max_key + " SYNC: " + synctype + " chsize: " + ChunkSize + " nThreads: " + ThreadPoolSize; // Set remote storage server account info string AzureaccountName = ConfigurationManager.AppSettings.Get("AccountName"); string AzureaccountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); string S3accountName = ConfigurationManager.AppSettings.Get("S3AccountName"); string S3accountKey = ConfigurationManager.AppSettings.Get("S3AccountSharedKey"); LocationInfo Li; if (synctype == SynchronizerType.Azure) { Li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); } else if (synctype == SynchronizerType.AmazonS3) { Li = new LocationInfo(S3accountName, S3accountKey, SynchronizerType.AmazonS3); } else { Li = null; } StreamFactory sf = StreamFactory.Instance; IStream stream = null; FqStreamID streamid = new FqStreamID(HomeName, AppName, StreamName); // Set op : R/W StreamFactory.StreamOp rw; if (stream_op == StreamOperation.RandomKeyRandomValueAppend || stream_op == StreamOperation.RandomKeySameValueAppend || stream_op == StreamOperation.SameKeyRandomValueAppend || stream_op == StreamOperation.SameKeySameValueAppend) { rw = StreamFactory.StreamOp.Write; } else { rw = StreamFactory.StreamOp.Read; } // Initialize costs CostsHelper costhelper = null; double baselineStorageKV = 0; if (doCosts) { costhelper = new CostsHelper(); costhelper.getCurrentCpuUsage(); costhelper.getNetworkUsage(); } if (stream_type == StreamType.CloudRaw) { if (!Directory.Exists(exp_directory)) { Directory.CreateDirectory(exp_directory); } Logger logger = new Logger(); Byte[] val = new Byte[value.Length * num_operations]; // DateTime Date = new DateTime(DateTime.UtcNow.Ticks); // string cname = String.Format("CloudRaw-{0}", Date.ToString("yyyy-MM-dd")); // string bname = String.Format("{0}", Date.ToString("HH-mm-ss")); // string cname = String.Format("cloudraw-{0}", RandomString(4)); // string bname = String.Format("{0}", RandomString(4)); string cname = String.Format("cloudraw-{0}", RandName); string bname = String.Format("{0}", RandName); if (stream_op == StreamOperation.RandomKeyGet || stream_op == StreamOperation.RandomKeyGetMultipleSegments || stream_op == StreamOperation.RandomKeyGetAll) { doRawCloudPerf(val, SynchronizerType.Azure, SynchronizeDirection.Download, exp_directory, logger, containerName: cname, blobName: bname); logger.Dump(exp_directory + "/log"); } else { doRawCloudPerf(val, SynchronizerType.Azure, SynchronizeDirection.Upload, exp_directory, logger, containerName: cname, blobName: bname); logger.Dump(exp_directory + "/log"); } return; } if (stream_type == StreamType.DiskRaw) { if (!Directory.Exists(exp_directory)) { Directory.CreateDirectory(exp_directory); } Logger logger = doDiskRaw(stream_op, num_operations, value.Length, ptype, exp_directory); logger.Dump(exp_directory + "/log"); return; } // Are we getting raw disk throughput? if (stream_type == StreamType.Raw) { string ret = doDiskSpeed((value.Length * num_operations) / 1000 + "K", value.Length / 1000 + "K", rw); if (!Directory.Exists(exp_directory)) { Directory.CreateDirectory(exp_directory); } File.WriteAllText(exp_directory + "/log", ret); return; } // Populate the keys and the values Random random = new Random(DateTime.Now.Millisecond); StrKey[] keys = new StrKey[max_key]; for (int i = 0; i < max_key; ++i) { keys[i] = new StrKey("" + i); } /* * List<ByteValue> vals = new List<ByteValue>(num_operations); * Byte[][] tmp = new Byte[num_operations][]; * for (int i = 0; i < num_operations; ++i) * { * tmp[i] = new Byte[value.Length]; * random.NextBytes(tmp[i]); * } * * for (int i = 0; i < num_operations; ++i) * { * keys[i] = new StrKey("" + i); * vals.Add(new ByteValue(tmp[i])); * // vals[i] = new ByteValue(tmp); * } */ Logger log = new Logger(); // Open stream for different types of experiments if (stream_type == StreamType.Local && ptype == StreamFactory.StreamDataType.Values) { stream = sf.openValueDataStream <StrKey, ByteValue>(streamid, ci, null, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.LocalEnc && ptype == StreamFactory.StreamDataType.Values) { stream = sf.openValueDataStream <StrKey, ByteValue>(streamid, ci, null, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.Remote && ptype == StreamFactory.StreamDataType.Values) { stream = sf.openValueDataStream <StrKey, ByteValue>(streamid, ci, Li, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.RemoteEnc && ptype == StreamFactory.StreamDataType.Values) { stream = sf.openValueDataStream <StrKey, ByteValue>(streamid, ci, Li, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.Local && ptype == StreamFactory.StreamDataType.Files) { stream = sf.openFileDataStream <StrKey>(streamid, ci, null, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.LocalEnc && ptype == StreamFactory.StreamDataType.Files) { stream = sf.openFileDataStream <StrKey>(streamid, ci, null, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.Remote && ptype == StreamFactory.StreamDataType.Files) { stream = sf.openFileDataStream <StrKey>(streamid, ci, Li, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.RemoteEnc && ptype == StreamFactory.StreamDataType.Files) { stream = sf.openFileDataStream <StrKey>(streamid, ci, Li, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else { return; } if (stream_op == StreamOperation.RandomKeyRandomValueAppend) { List <ByteValue> vals = new List <ByteValue>(num_operations); Byte[][] tmp = new Byte[num_operations][]; for (int i = 0; i < num_operations; ++i) { tmp[i] = new Byte[value.Length]; random.NextBytes(tmp[i]); } for (int i = 0; i < num_operations; ++i) { vals.Add(new ByteValue(tmp[i])); } for (int i = 0; i < num_operations; ++i) { baselineStorageKV += keys[i].Size(); baselineStorageKV += vals[i].Size(); stream.Append(keys[i], vals[i]); } stream.Close(); } else if (stream_op == StreamOperation.RandomKeySameValueAppend) { Byte[] singleval = new Byte[value.Length]; random.NextBytes(singleval); ByteValue singlebv = new ByteValue(singleval); for (int i = 0; i < num_operations; ++i) { baselineStorageKV += keys[i].Size(); baselineStorageKV += value.Length; stream.Append(keys[i], singlebv); } stream.Close(); } else if (stream_op == StreamOperation.SameKeySameValueAppend) { StrKey key = new StrKey("ExpKey"); Byte[] singleval = new Byte[value.Length]; random.NextBytes(singleval); ByteValue singlebv = new ByteValue(singleval); for (int i = 0; i < num_operations; ++i) { stream.Append(key, singlebv); // System.Threading.Thread.Sleep(10); } stream.Close(); } else if (stream_op == StreamOperation.RandomKeyGet || stream_op == StreamOperation.RandomKeyGetMultipleSegments) { for (int i = 0; i < num_operations; ++i) { stream.Get(keys[random.Next(0, max_key)]); } stream.Close(); } else if (stream_op == StreamOperation.RandomKeyGetAll) { StrKey key = new StrKey("ExpKey"); for (int i = 0; i < num_operations;) { long st = 0; long et = -1; Console.WriteLine(stime + ":" + etime); while (et < st) { st = RandomLong(stime, etime, random); // et = RandomLong(stime, etime, random); et = st + (10 * 10 * TimeSpan.TicksPerMillisecond); } Console.WriteLine(st + ":" + et); IEnumerable <IDataItem> iterator = stream.GetAll(key, st, et); foreach (IDataItem data in iterator) { data.GetVal(); ++i; if (i == num_operations) { break; } } } stream.Close(); } else if (stream_op == StreamOperation.SameKeyRandomValueAppend) { StrKey key = new StrKey("ExpKey"); for (int i = 0; i < num_operations; ++i) { baselineStorageKV += key.Size(); // baselineStorageKV += vals[i].Size(); // stream.Append(key, vals[i]); } stream.Close(); } else { for (int i = 0; i < num_operations; ++i) { stream.Get(new StrKey("" + random.Next(0, num_operations - 1))); } stream.Close(); } // Dump the instrumentation logs stream.DumpLogs(exp_directory + "/log"); // Collect costs usage List <string> costs = new List <string>(); if (doCosts) { costs.Add(DateTime.UtcNow.Ticks + ": CPU: " + costhelper.getCurrentCpuUsage()); costs.Add(DateTime.UtcNow.Ticks + ": Network: " + costhelper.getNetworkUsage()); costs.Add(DateTime.UtcNow.Ticks + ": DataRelated Storage: " + costhelper.getStorageUsage(this.exp_directory, dataRelated: true) / 1000.0f); costs.Add(DateTime.UtcNow.Ticks + ": Constant Storage: " + costhelper.getStorageUsage(this.exp_directory, dataRelated: false) / 1000.0f); costs.Add(DateTime.UtcNow.Ticks + ": Baseline Storage: " + baselineStorageKV / 1000.0f); } File.AppendAllLines(exp_directory + "/log", costs); // sf.deleteStream(streamid, ci); }