public NaivePreHeat_remoteread(string dataFilePath, int K, string outputFilePath, int chunkSize, string mdserver, LocationInfo li, int endSlotIndex ) { this.dataFilePath = dataFilePath; occupancyKey = new StrKey("occupancy"); random = new Random(); this.constK = K; this.outputFilePath = outputFilePath; this.chunkSize = chunkSize; this.mdserver = mdserver; this.li = li; int slotIndex = 0; StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID(fqsidprefix + chunkSize, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); streamFactory.deleteStream(fq_sid, ci); IStream occupancyGroundTruthStream = streamFactory.openValueDataStream<StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserver, chunkSize, 1, new Logger()); while (true) { occupancyGroundTruthStream.Append(occupancyKey, new ByteValue(BitConverter.GetBytes(random.Next(2))), slotIndexBase + slotIndex); slotIndex++; if (slotIndex == endSlotIndex) break; } occupancyGroundTruthStream.Close(); }
private int width = 7*24 ; // hours #endregion Fields #region Constructors public RowlandsAnalysis(string dataFilePath, int width) { this.width = width; this.dataFilePath = dataFilePath; count = 0 ; timestamp_energy_temperature = new List<Tuple<DateTime, double, double>>(); File.Delete(".\\result-width-" + width + ".txt"); StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID("RowlandsAnalysis"+ width, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); streamFactory.deleteStream(fq_sid, ci); dataStream = streamFactory.openFileStream<DoubleKey, ByteValue>(fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, null, 4*1024*1024, 1, new Logger()); string line; System.IO.StreamReader file = new System.IO.StreamReader(dataFilePath); while ((line = file.ReadLine()) != null) { string[] words = line.Split(','); DateTime date = Convert.ToDateTime(words[0]); double energy = Double.Parse(words[1]); double temperature = Double.Parse(words[2]); timestamp_energy_temperature.Add(new Tuple<DateTime, double, double>(date, energy, temperature)); } file.Close(); }
public void Setup() { StreamFactory sf = StreamFactory.Instance; DateTime Date = new DateTime(DateTime.UtcNow.Ticks); string HomeName = String.Format("TestHome-{0}", Date.ToString("yyyy-MM-dd")); string Caller = String.Format("{0}", Date.ToString("HH-mm-ss")); string AppName = Caller; Random rnd = new Random(); string StreamName = String.Format("{0}", rnd.Next()); FqStreamID fqstreamid = new FqStreamID(HomeName, AppName, StreamName); CallerInfo ci = new CallerInfo(null, Caller, Caller, 1); sf.deleteStream(fqstreamid, ci); vds = sf.openValueDataStream<StrKey, ByteValue>(fqstreamid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserveraddress: "http://localhost:23456/MetadataServer"); k1 = new StrKey("k1"); k2 = new StrKey("k2"); }
static void Main(string[] args) { StreamFactory sf = StreamFactory.Instance; FqStreamID fqsid = new FqStreamID("new5", "A0", "Test"); CallerInfo callerinfo = new CallerInfo(null, "A0", "A0", 1); String mdserver = "http://localhost:23456/MetaDataServer/"; //sf.deleteStream(fqsid, callerinfo, mdserver); try { IStream stream = sf.openValueDataStream<StrKey, ByteValue>(fqsid, callerinfo, locationInfo, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, mdserver, 4 * 1024 * 1024, 1, null, false, ReadFrequencySeconds); Thread readerthread = new Thread(() => Read(stream)); isReading = true; Console.WriteLine("Starting Reader .... (press enter to stop)"); readerthread.Start(); Console.ReadLine(); isReading = false; stream.Close(); } catch(Exception e) { Console.WriteLine(e); } }
public void ExportData(bool remote, DateTime dtbegin, DateTime dtend, String outputFileName) { //read the settings string accountName = ConfigurationManager.AppSettings.Get("AccountName"); string accountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); string homeId = ConfigurationManager.AppSettings.Get("HomeId"); string appId = ConfigurationManager.AppSettings.Get("AppId"); string streamId = ConfigurationManager.AppSettings.Get("StreamId"); IStream datastream; FileStream fs = new FileStream(outputFileName, FileMode.Append); StreamWriter swOut = new StreamWriter(fs); StreamFactory sf = StreamFactory.Instance; CallerInfo ci = new CallerInfo(null, appId, appId, 0); FqStreamID fq_sid = new FqStreamID(homeId, appId, streamId); if (remote) { LocationInfo li = new LocationInfo(accountName, accountKey, SynchronizerType.Azure); datastream = sf.openValueDataStream<StrKey, StrValue> (fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4*1024*1024, 1, null, true); } else { datastream = sf.openValueDataStream<StrKey, StrValue> (fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4*1024*1024, 1, null); } DateTime dtbeginutc = dtbegin.ToUniversalTime(); DateTime dtendutc = dtend.ToUniversalTime(); // StrKey tmpKey = new StrKey("envih1:sensormultilevel:"); HashSet<IKey> keys = datastream.GetKeys(null, null); foreach (IKey key in keys) { // IEnumerable<IDataItem> dataItemEnum = datastream.GetAll(key); // dtendutc.Ticks); IEnumerable<IDataItem> dataItemEnum = datastream.GetAll(key, dtbeginutc.Ticks, dtendutc.Ticks); if (dataItemEnum != null) { foreach (IDataItem di in dataItemEnum) { DateTime ts = new DateTime(di.GetTimestamp()); swOut.WriteLine(key + ", " + ts.ToLocalTime() + ", " + di.GetVal().ToString()); } } } datastream.Close(); swOut.Close(); }
public Export(bool remote) { IStream datastream; string accountName = ConfigurationManager.AppSettings.Get("AccountName"); string accountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); string homeId = ConfigurationManager.AppSettings.Get("HomeId"); string appId = ConfigurationManager.AppSettings.Get("AppId"); string streamId = ConfigurationManager.AppSettings.Get("StreamId"); StreamFactory sf = StreamFactory.Instance; CallerInfo ci = new CallerInfo(null, appId, appId, 0); FqStreamID fq_sid = new FqStreamID(homeId, appId, streamId); if (remote) { LocationInfo li = new LocationInfo(accountName, accountKey, SynchronizerType.Azure); datastream = sf.openValueDataStream<StrKey, StrValue> (fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4*1024*1024, 1, null, true); } else { datastream = sf.openValueDataStream<StrKey, StrValue> (fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4*1024*1024, 1, null); } /* StrKey key = new StrKey("foo"); if (datastream != null) { datastream.Append(key, new StrValue("bar")); datastream.Append(key, new StrValue("baz")); } * */ HashSet<IKey> keys = datastream.GetKeys(null, null); foreach (IKey key in keys) { IEnumerable<IDataItem> dataItemEnum = datastream.GetAll(key); foreach (IDataItem di in dataItemEnum) { try { DateTime ts = new DateTime(di.GetTimestamp()); Console.WriteLine(key + ", " + ts + ", " + di.GetVal().ToString()); } catch (Exception e) { Console.Error.Write(e.StackTrace); } } } datastream.Close(); }
public void Setup() { k1 = new StrKey("k1"); k2 = new StrKey("k2"); string AzureaccountName = "testdrive"; string AzureaccountKey = "zRTT++dVryOWXJyAM7NM0TuQcu0Y23BgCQfkt7xh2f/Mm+r6c8/XtPTY0xxaF6tPSACJiuACsjotDeNIVyXM8Q=="; locationInfo = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); streamID = new FqStreamID("99-a2000", "A0", "TestDS"); callerInfo = new CallerInfo(null, "A0", "A0", 1); streamSecurityType = StreamFactory.StreamSecurityType.Plain; streamFactory = StreamFactory.Instance; }
public void Setup() { k1 = new StrKey("k1"); k2 = new StrKey("k2"); string AzureaccountName = "msrlot"; string AzureaccountKey = "wC3ou+VLBTu8ryKbsiSMtsIIHIpxGSIAWA0NtK58da2wELQ+USgYQwmVMtyq/p8ILsuZc8TiLeHBjaPI+a3A2Q=="; locationInfo = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); streamID = new FqStreamID("99-a2000", "A0", "TestDS"); callerInfo = new CallerInfo(null, "A0", "A0", 1); streamSecurityType = StreamFactory.StreamSecurityType.Plain; streamFactory = StreamFactory.Instance; }
public List<RetVal> PredictOccupancy(long startSlotIndex, long endSlotIndex) { long average = 0; List<RetVal> retVal = new List<RetVal>(); StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID(fqsidprefix + chunkSize, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); Directory.Delete(fq_sid.HomeId, true); int slotIndex = Convert.ToInt32(startSlotIndex); long startTime = 0, retrievelTime = 0, computeTime = 0; IStream occupancyGroundTruthStream = streamFactory.openValueDataStream<StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, mdserver, chunkSize, 1, new Logger()); while (true) { List<int> currentPOV= new List<int>(); List<List<int>> previousDaysPOV= new List<List<int>>(); try { startTime = DateTime.Now.Ticks; currentPOV = ConstructCurrentPOV(occupancyGroundTruthStream, slotIndex); previousDaysPOV = ConstructPreviousPOV(occupancyGroundTruthStream, slotIndex); retrievelTime = DateTime.Now.Ticks - startTime; startTime = DateTime.Now.Ticks; int predictedOccupancy = Predict(currentPOV, previousDaysPOV); computeTime = DateTime.Now.Ticks - startTime; } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine("Slot number {0} {1} ", slotIndex, retrievelTime); using (results = File.AppendText(outputFilePath)) results.WriteLine("Slot number {0} {1}", slotIndex, retrievelTime); average += retrievelTime; slotIndex++; if (slotIndex == endSlotIndex) break; } occupancyGroundTruthStream.Close(); average = average / (endSlotIndex - startSlotIndex + 1) ; retVal.Add(new RetVal(0,Convert.ToInt32(average))); return retVal; }
private static void UploadDataAsStreams(int UploadCount) { string directory = @"..\\..\\data\\meter-data"; int count = 0; Dictionary<DateTime, double> ts_temperature = new Dictionary<DateTime, double>(); StreamReader wfile = new System.IO.StreamReader(@"..\\..\\data\\weather.txt"); string wline; while ((wline = wfile.ReadLine()) != null) { string[] words = wline.Split('\t'); DateTime date = Convert.ToDateTime(words[4]); date = date.AddHours(Int32.Parse(words[5])); double temperature = Double.Parse(words[0]); ts_temperature[date] = temperature; } wfile.Close(); foreach (string filePath in Directory.GetFiles(directory)) { Console.WriteLine("file name:" + filePath); string line; System.IO.StreamReader file = new System.IO.StreamReader(filePath); LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); FqStreamID fq_sid = new FqStreamID("crrealhome"+count, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; sf.deleteStream(fq_sid, ci); IStream dfs_byte_val = sf.openValueDataStream<DoubleKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserveraddress: mdServer, ChunkSizeForUpload: 4 * 1024 * 1024, ThreadPoolSize: 1, log: new Logger()); while ((line = file.ReadLine()) != null) { string[] words = line.Split('\t'); DateTime date = Convert.ToDateTime(words[0]); date=date.AddHours(int.Parse(words[1])/100); DoubleKey key = new DoubleKey(((int)(ts_temperature[date]))); dfs_byte_val.Append(key, new ByteValue(BitConverter.GetBytes(Double.Parse(words[2]))), DateTimeToUnixTimestamp(date)); // Console.WriteLine(DateTimeToUnixTimestamp(date) + "," + words[2]); } dfs_byte_val.Close(); count++; if (count == UploadCount) break; } }
public DNW(int numberOfStreams, int window, string outputFilePath) { this.numberOfStreams = numberOfStreams; this.window = window; this.count = 0; this.outputFilePath = outputFilePath; StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID("dnw-"+window+"-"+numberOfStreams, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); streamFactory.deleteStream(fq_sid, ci); dataStreams = new List<IStream>(); for(int i=1 ; i <= numberOfStreams ; i++) dataStreams.Add(streamFactory.openFileStream<StrKey, ByteValue>(fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, null, 4 * 1024 * 1024, 1, new Logger())); }
public DNW(int numberOfStreams, int window, LocationInfo li, string mdserver, int chunkSize) { this.li = li; this.numberOfStreams = numberOfStreams; this.window = window; this.count = 0; this.mdServer = mdserver; this.chunkSize = chunkSize; StreamFactory streamFactory = StreamFactory.Instance; CallerInfo ci = new CallerInfo(null, "A", "A", 1); dataStreams = new List<IStream>(); for (int i = 1; i <= numberOfStreams; i++) { FqStreamID fq_sid = new FqStreamID(fqprefix + "-"+window + "-" + i + "/" + numberOfStreams + "-" + chunkSize, "A", "TestBS"); streamFactory.deleteStream(fq_sid, ci); dataStreams.Add(streamFactory.openValueDataStream<StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserveraddress: mdServer, ChunkSizeForUpload: chunkSize, ThreadPoolSize: 1, log: new Logger())); } }
public async Task SetupDataStream(bool remote, string accountName, string accountKey, string homeId, string appId, string streamId) { StreamFactory sf = StreamFactory.Instance; CallerInfo ci = new CallerInfo(null, appId, appId, 0); FqStreamID fq_sid = new FqStreamID(homeId, appId, streamId); if (remote) { LocationInfo li = new LocationInfo(accountName, accountKey, SynchronizerType.Azure); datastream = await Task.Run(() => sf.openValueDataStream<StrKey, StrValue> (fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4 * 1024 * 1024, 1, null, true)); } else { datastream = await Task.Run(() => sf.openValueDataStream<StrKey, StrValue> (fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4 * 1024 * 1024, 1, null)); } }
/* syncIntervalSec: * -ve ==> don't sync on writes; only sync on close. * 0 ==> sync on every write * +ve ==> sync every x seconds * * Throws System.Exception e.g., on network disconnection for remote streams. Catch in Caller. */ public IStream openFileDataStream <KeyType>(FqStreamID FQSID, CallerInfo Ci, LocationInfo Li, StreamFactory.StreamSecurityType type, CompressionType ctype, StreamFactory.StreamOp op, string mdserveraddress = null, int ChunkSizeForUpload = 4 *1024 *1024, int ThreadPoolSize = 1, Logger log = null, bool sideload = false, int syncIntervalSec = -1) where KeyType : IKey, new() { if (Li == null) { Li = new LocationInfo("", "", SynchronizerType.None); } return(new MetaStream <KeyType, ByteValue>(FQSID, Ci, Li, op, type, ctype, StreamDataType.Files, syncIntervalSec, mdserveraddress, ChunkSizeForUpload, ThreadPoolSize, log, sideload)); }
public void Run(string CallerName, string HomeName, string AppName, string StreamName, string RandName, long stime, long etime, StreamType stream_type, StreamOperation stream_op, StreamFactory.StreamPhysicalType ptype, CompressionType ctype, int ChunkSize , int ThreadPoolSize, Byte[] value, int num_operations, SynchronizerType synctype, int max_key = 0, string address = null, bool doCosts= false, bool doRaw = false) { // Set experiment directory CallerInfo ci = new CallerInfo(null, CallerName, CallerName, 1); exp_directory = Path.GetFullPath((null != ci.workingDir) ? ci.workingDir : Directory.GetCurrentDirectory()); exp_directory = exp_directory + "/" + HomeName + "/" + AppName + "/" + StreamName; if (max_key == 0) max_key = num_operations; // Set a description/tag for the experiment this.exp_id = "Directory: " + HomeName + "/" + AppName + "/" + StreamName + " Caller:" + CallerName + " Stream Type:" + stream_type + " Stream Op: " + stream_op + " Stream Ptype: " + ptype + " Compression Type: " + ctype + " Value size: " + value.Length + " num_operations: " + max_key + " actual_num_ops: " + num_operations + " Sync type: " + synctype + " Do costs? " + doCosts + "Chunk Size: " + ChunkSize+ " ThreadPool Size:" +ThreadPoolSize; this.compressed_exp_id = " ST:" + stream_type + " OP: " + stream_op + " PT: " + ptype + " CT: " + ctype + " VS: " + value.Length + " I:" + num_operations + " MK:" + max_key + " SYNC: " + synctype+ " chsize: "+ChunkSize + " nThreads: "+ThreadPoolSize ; // Set remote storage server account info string AzureaccountName = ConfigurationManager.AppSettings.Get("AccountName"); string AzureaccountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); string S3accountName = ConfigurationManager.AppSettings.Get("S3AccountName"); string S3accountKey = ConfigurationManager.AppSettings.Get("S3AccountSharedKey"); LocationInfo Li; if (synctype == SynchronizerType.Azure) Li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); else if (synctype == SynchronizerType.AmazonS3) Li = new LocationInfo(S3accountName, S3accountKey, SynchronizerType.AmazonS3); else Li = null; StreamFactory sf = StreamFactory.Instance; IStream stream = null; FqStreamID streamid = new FqStreamID(HomeName, AppName, StreamName); // Set op : R/W StreamFactory.StreamOp rw; if (stream_op == StreamOperation.RandomKeyRandomValueAppend || stream_op == StreamOperation.RandomKeySameValueAppend || stream_op == StreamOperation.SameKeyRandomValueAppend || stream_op == StreamOperation.SameKeySameValueAppend) { rw = StreamFactory.StreamOp.Write; } else { rw = StreamFactory.StreamOp.Read; } // Initialize costs CostsHelper costhelper = null; double baselineStorageKV = 0; if (doCosts) { costhelper = new CostsHelper(); costhelper.getCurrentCpuUsage(); costhelper.getNetworkUsage(); } if (stream_type == StreamType.CloudRaw) { if (!Directory.Exists(exp_directory)) { Directory.CreateDirectory(exp_directory); } Logger logger = new Logger(); Byte[] val = new Byte[value.Length * num_operations]; // DateTime Date = new DateTime(DateTime.UtcNow.Ticks); // string cname = String.Format("CloudRaw-{0}", Date.ToString("yyyy-MM-dd")); // string bname = String.Format("{0}", Date.ToString("HH-mm-ss")); // string cname = String.Format("cloudraw-{0}", RandomString(4)); // string bname = String.Format("{0}", RandomString(4)); string cname = String.Format("cloudraw-{0}", RandName); string bname = String.Format("{0}", RandName); if (stream_op == StreamOperation.RandomKeyGet || stream_op == StreamOperation.RandomKeyGetMultipleSegments || stream_op == StreamOperation.RandomKeyGetAll) { doRawCloudPerf(val, SynchronizerType.Azure, SynchronizeDirection.Download, exp_directory, logger, containerName: cname, blobName: bname); logger.Dump(exp_directory + "/log"); } else { doRawCloudPerf(val, SynchronizerType.Azure, SynchronizeDirection.Upload, exp_directory, logger, containerName: cname, blobName: bname); logger.Dump(exp_directory + "/log"); } return; } if (stream_type == StreamType.DiskRaw) { if (!Directory.Exists(exp_directory)) { Directory.CreateDirectory(exp_directory); } Logger logger = doDiskRaw(stream_op, num_operations, value.Length, ptype, exp_directory); logger.Dump(exp_directory + "/log"); return; } // Are we getting raw disk throughput? if (stream_type == StreamType.Raw) { string ret = doDiskSpeed((value.Length * num_operations)/1000 + "K", value.Length/1000 + "K", rw); if (!Directory.Exists(exp_directory)) { Directory.CreateDirectory(exp_directory); } File.WriteAllText(exp_directory + "/log", ret); return; } // Populate the keys and the values Random random = new Random(DateTime.Now.Millisecond); StrKey[] keys = new StrKey[max_key]; for (int i = 0; i < max_key; ++i) { keys[i] = new StrKey("" + i); } /* List<ByteValue> vals = new List<ByteValue>(num_operations); Byte[][] tmp = new Byte[num_operations][]; for (int i = 0; i < num_operations; ++i) { tmp[i] = new Byte[value.Length]; random.NextBytes(tmp[i]); } for (int i = 0; i < num_operations; ++i) { keys[i] = new StrKey("" + i); vals.Add(new ByteValue(tmp[i])); // vals[i] = new ByteValue(tmp); } */ Logger log = new Logger(); // Open stream for different types of experiments if (stream_type == StreamType.Local && ptype == StreamFactory.StreamPhysicalType.File) { stream = sf.openFileStream<StrKey, ByteValue>(streamid, ci, null, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.LocalEnc && ptype == StreamFactory.StreamPhysicalType.File) { stream = sf.openFileStream<StrKey, ByteValue>(streamid, ci, null, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.Remote && ptype == StreamFactory.StreamPhysicalType.File) { stream = sf.openFileStream<StrKey, ByteValue>(streamid, ci, Li, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.RemoteEnc && ptype == StreamFactory.StreamPhysicalType.File) { stream = sf.openFileStream<StrKey, ByteValue>(streamid, ci, Li, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.Local && ptype == StreamFactory.StreamPhysicalType.Directory) { stream = sf.openDirStream<StrKey>(streamid, ci, null, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.LocalEnc && ptype == StreamFactory.StreamPhysicalType.Directory) { stream = sf.openDirStream<StrKey>(streamid, ci, null, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.Remote && ptype == StreamFactory.StreamPhysicalType.Directory) { stream = sf.openDirStream<StrKey>(streamid, ci, Li, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.RemoteEnc && ptype == StreamFactory.StreamPhysicalType.Directory) { stream = sf.openDirStream<StrKey>(streamid, ci, Li, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else { return; } if (stream_op == StreamOperation.RandomKeyRandomValueAppend) { List<ByteValue> vals = new List<ByteValue>(num_operations); Byte[][] tmp = new Byte[num_operations][]; for (int i = 0; i < num_operations; ++i) { tmp[i] = new Byte[value.Length]; random.NextBytes(tmp[i]); } for (int i = 0; i < num_operations; ++i) { vals.Add(new ByteValue(tmp[i])); } for (int i = 0; i < num_operations; ++i) { baselineStorageKV += keys[i].Size(); baselineStorageKV += vals[i].Size(); stream.Append(keys[i], vals[i]); } stream.Close(); } else if (stream_op == StreamOperation.RandomKeySameValueAppend) { Byte[] singleval = new Byte[value.Length]; random.NextBytes(singleval); ByteValue singlebv = new ByteValue(singleval); for (int i = 0; i < num_operations; ++i) { baselineStorageKV += keys[i].Size(); baselineStorageKV += value.Length; stream.Append(keys[i], singlebv); } stream.Close(); } else if (stream_op == StreamOperation.SameKeySameValueAppend) { StrKey key = new StrKey("ExpKey"); Byte[] singleval = new Byte[value.Length]; random.NextBytes(singleval); ByteValue singlebv = new ByteValue(singleval); for (int i = 0; i < num_operations; ++i) { stream.Append(key, singlebv); // System.Threading.Thread.Sleep(10); } stream.Close(); } else if (stream_op == StreamOperation.RandomKeyGet || stream_op == StreamOperation.RandomKeyGetMultipleSegments) { for (int i = 0; i < num_operations; ++i) { stream.Get(keys[random.Next(0, max_key)]); } stream.Close(); } else if (stream_op == StreamOperation.RandomKeyGetAll) { StrKey key = new StrKey("ExpKey"); for (int i = 0; i < num_operations; ) { long st = 0; long et = -1; Console.WriteLine(stime + ":" + etime); while (et < st) { st = RandomLong(stime, etime, random); // et = RandomLong(stime, etime, random); et = st + (10 * 10 * TimeSpan.TicksPerMillisecond); } Console.WriteLine(st + ":" + et); IEnumerable<IDataItem> iterator = stream.GetAll(key, st, et); foreach (IDataItem data in iterator) { data.GetVal(); ++i; if (i == num_operations) break; } } stream.Close(); } else if (stream_op == StreamOperation.SameKeyRandomValueAppend) { StrKey key = new StrKey("ExpKey"); for (int i = 0; i < num_operations; ++i) { baselineStorageKV += key.Size(); // baselineStorageKV += vals[i].Size(); // stream.Append(key, vals[i]); } stream.Close(); } else { for (int i = 0; i < num_operations; ++i) { stream.Get(new StrKey("" + random.Next(0,num_operations - 1))); } stream.Close(); } // Dump the instrumentation logs stream.DumpLogs(exp_directory + "/log"); // Collect costs usage List<string> costs = new List<string>(); if (doCosts) { costs.Add(DateTime.UtcNow.Ticks + ": CPU: " + costhelper.getCurrentCpuUsage()); costs.Add(DateTime.UtcNow.Ticks + ": Network: " + costhelper.getNetworkUsage()); costs.Add(DateTime.UtcNow.Ticks + ": DataRelated Storage: " + costhelper.getStorageUsage(this.exp_directory, dataRelated:true)/1000.0f); costs.Add(DateTime.UtcNow.Ticks + ": Constant Storage: " + costhelper.getStorageUsage(this.exp_directory, dataRelated:false)/1000.0f); costs.Add(DateTime.UtcNow.Ticks + ": Baseline Storage: " + baselineStorageKV/1000.0f); } File.AppendAllLines(exp_directory + "/log", costs); // sf.deleteStream(streamid, ci); }
private static void UploadDataAsStreams() { string directory = "D:\\data-hds\\data-hds"; int count = 0; int UploadCount = 300; Dictionary<DateTime, double> ts_temperature = new Dictionary<DateTime, double>(); StreamReader wfile = new System.IO.StreamReader(@"..\\..\\data\\weather.txt"); string wline; while ((wline = wfile.ReadLine()) != null) { string[] words = wline.Split('\t'); DateTime date = Convert.ToDateTime(words[4]); date = date.AddHours(Int32.Parse(words[5])); double temperature = Double.Parse(words[0]); ts_temperature[date] = temperature; } wfile.Close(); foreach (string filePath in Directory.GetFiles(directory)) { Console.WriteLine("file name:" + filePath); string line; System.IO.StreamReader file = new System.IO.StreamReader(filePath); string AzureaccountName = "testdrive"; string AzureaccountKey = "zRTT++dVryOWXJyAM7NM0TuQcu0Y23BgCQfkt7xh2f/Mm+r6c8/XtPTY0xxaF6tPSACJiuACsjotDeNIVyXM8Q=="; LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); FqStreamID fq_sid = new FqStreamID("realhome"+count, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; sf.deleteStream(fq_sid, ci); IStream dfs_byte_val = sf.openFileStream<DoubleKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None,StreamFactory.StreamOp.Write, mdServer, 4*1024*1024, 1, new Logger()); while ((line = file.ReadLine()) != null) { string[] words = line.Split('\t'); DateTime date = Convert.ToDateTime(words[0]); date=date.AddHours(int.Parse(words[1])/100); DoubleKey key = new DoubleKey(((int)(ts_temperature[date]))); dfs_byte_val.Append(key, new ByteValue(BitConverter.GetBytes(Double.Parse(words[2]))), DateTimeToUnixTimestamp(date)); // Console.WriteLine(DateTimeToUnixTimestamp(date) + "," + words[2]); } dfs_byte_val.Close(); count++; if (count == UploadCount) break; } }
private static long RemoteRead(int numberOfHomes, DateTime start, DateTime end, string tag) { Dictionary<int, List<double>> temp_energy_allhomes= new Dictionary<int,List<double>>(); Dictionary<int, List<double>> temp_energy_home; long retVal=0; for(int i = 0 ; i <numberOfHomes ; i++) { temp_energy_home = new Dictionary<int,List<double>>(); long start_ticks = DateTime.Now.Ticks; for(int temp = -30 ; temp <=40 ; temp++) { string AzureaccountName = "testdrive"; string AzureaccountKey = "zRTT++dVryOWXJyAM7NM0TuQcu0Y23BgCQfkt7xh2f/Mm+r6c8/XtPTY0xxaF6tPSACJiuACsjotDeNIVyXM8Q=="; LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); FqStreamID fq_sid = new FqStreamID("realhome"+i, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; IStream dfs_byte_val = sf.openFileStream<DoubleKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None,StreamFactory.StreamOp.Read, mdServer, 4*1024*1024, 1, new Logger()); IEnumerable<IDataItem> vals = dfs_byte_val.GetAll(new DoubleKey(temp),DateTimeToUnixTimestamp(start), DateTimeToUnixTimestamp(end)); foreach(IDataItem val in vals) { temp_energy_home[temp].Add(BitConverter.ToDouble(val.GetVal().GetBytes(), 0)); temp_energy_allhomes[temp].Add(BitConverter.ToDouble(val.GetVal().GetBytes(), 0)); } } long end_ticks = DateTime.Now.Ticks; retVal+=end_ticks - start_ticks; WriteToFile(".\\result-realhome-" + i + "-n-" + numberOfHomes + "-" + tag, temp_energy_home); } WriteToFile(".\\result-allhomes-n-" + numberOfHomes + "-" + tag, temp_energy_allhomes); return retVal; }
static void Main(string[] args) { string dir = ""; List<string> summary = new List<string>(); foreach (StreamType type in types) { foreach (String address in streamListType) { foreach (int size in data_size) { foreach (StreamFactory.StreamPhysicalType ptype in ptypes) { foreach (CompressionType ctype in ctypes) { foreach (SynchronizerType synctype in synctypes) { foreach (StreamOperation op in ops) { foreach (int chunksize in ChunkSize) { foreach (int ThreadPoolSize in num_threads) { foreach (int num_operations in n_operations) { for (int i = 0; i < n_iterations; ++i) { byte[] data = new byte[size]; Random rnd = new Random(DateTime.Now.Millisecond); rnd.NextBytes(data); Experiment e = new Experiment(); DateTime Date = new DateTime(DateTime.UtcNow.Ticks); string HomeName = String.Format("ExpHome-{0}", Date.ToString("yyyy-MM-dd")); string Caller = String.Format("{0}", Date.ToString("HH-mm-ss")); string AppName = Caller; string StreamName = String.Format("{0}", rnd.Next()); Byte[] value = data; // int num_operations = n_operations; string RandString = Experiment.RandomString(4); long stime = 0; long etime = 0; // Run the experiment if (op == StreamOperation.RandomKeyGet || op == StreamOperation.RandomKeyGetMultipleSegments || op == StreamOperation.RandomKeyGetAll) { // Need to create the stream before reading it stime = StreamFactory.NowUtc(); Console.WriteLine("Stime: " + stime); StreamOperation tmp = StreamOperation.RandomKeySameValueAppend; if (op == StreamOperation.RandomKeyGetAll) tmp = StreamOperation.SameKeySameValueAppend; e.Run(Caller, HomeName, AppName, StreamName, RandString, stime, etime, type, tmp, ptype, ctype, chunksize, ThreadPoolSize, value, num_operations, synctype, doCosts: true, address: address); etime = StreamFactory.NowUtc(); Console.WriteLine("Etime: " + etime); CallerInfo ci = new CallerInfo(null, AppName, AppName, 1); string exp_directory = Path.GetFullPath((null != ci.workingDir) ? ci.workingDir : Directory.GetCurrentDirectory()); exp_directory = exp_directory + "/" + HomeName + "/" + AppName + "/" + StreamName; File.Delete(exp_directory + "/log"); try { File.Delete(exp_directory + "/results"); File.Delete(exp_directory + "/exp"); } catch { } if (type == StreamType.Remote || type == StreamType.RemoteEnc) { Directory.Delete(exp_directory + "/0", true); File.Delete(exp_directory + "/index_md.dat"); } } TimeSpan span = DateTime.Now.Subtract(new DateTime(1970, 1, 1, 0, 0, 0)); Console.WriteLine("Time: " + span.TotalSeconds); if (op == StreamOperation.RandomKeyGetMultipleSegments || op == StreamOperation.RandomKeyGet) { e.Run(Caller, HomeName, AppName, StreamName, RandString, stime, etime, type, op, ptype, ctype, chunksize, ThreadPoolSize, value, num_multi_segment_operations, synctype, max_key: num_operations, doCosts: true, address: address); } else { e.Run(Caller, HomeName, AppName, StreamName, RandString, stime, etime, type, op, ptype, ctype, chunksize, ThreadPoolSize, value, num_operations, synctype, doCosts: true, address: address); } dir = e.exp_directory + "/../../../Results/"; Console.WriteLine("Completed: " + e.ToString()); span = DateTime.Now.Subtract(new DateTime(1970, 1, 1, 0, 0, 0)); Console.WriteLine("Time: " + span.TotalSeconds); Console.WriteLine("===================================="); // Dump raw data from the experiment e.Dump(Caller, HomeName, AppName, StreamName, type, op, ptype, value, num_operations, synctype); // Get parsed data of the experiment // if (i != 0) //{ // Ignore the first iteration as warmup summary.Add(e.exp_id); List<string> ret = e.Analyze(type, value, num_operations); summary.AddRange(ret); //} System.Threading.Thread.Sleep(2000); e.Destroy(); } } } } } } } } } } } dir = dir + StreamFactory.PrettyNowUtc(); if (!Directory.Exists(dir)) { try { Directory.CreateDirectory(dir); } catch (Exception e) { Console.WriteLine(e.ToString()); } } dir = dir + "/"; File.WriteAllLines(dir + "Summary", summary); // Dump .dat files SummaryToDat(dir); // Thats it! Console.WriteLine("Done!"); Console.ReadKey(); }
static void Main(string[] args) { string AzureaccountName = ConfigurationManager.AppSettings.Get("AccountName"); string AzureaccountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); /* string dataFile = "D:\\b"; int KB = 1024; int[] chunk_sizes = { 4*1024*KB , 8*1024*KB }; for (int i = 1; i <= 1; i++) { for (int threads = 1; threads <= 1; threads++) { foreach (int csize in chunk_sizes) { Console.Write(">"); File.Copy(dataFile, dataFile + threads + "," + csize); AzureHelper helper = new AzureHelper(AzureaccountName, AzureaccountKey, "foo123123", CompressionType.None, EncryptionType.None, null, null, new Logger(), csize, threads); long start = DateTime.Now.Ticks; helper.UploadFileAsChunks(dataFile + threads + "," + csize); long end = DateTime.Now.Ticks; Console.WriteLine(threads + "," + csize + "," + (((double)(end - start) / (double)10000000)) ); } } } */ li = null; FqStreamID fq_sid = new FqStreamID("1299-2716", "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; sf.deleteStream(fq_sid, ci); IStream dfs_byte_val = sf.openValueDataStream<StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write); StrKey k1 = new StrKey("k1"); dfs_byte_val.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-cmu"))); dfs_byte_val.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-msr"))); dfs_byte_val.Seal(false); dfs_byte_val.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-uw"))); dfs_byte_val.Close(); Console.ReadKey(); dfs_byte_val = sf.openValueDataStream<StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write); Console.WriteLine("Get in read : " + dfs_byte_val.Get(k1)); IEnumerable<IDataItem> data = dfs_byte_val.GetAll(k1, 0, StreamFactory.NowUtc()); foreach (IDataItem dataItem in data) Console.WriteLine(dataItem.GetVal().ToString()); dfs_byte_val.Close(); Console.ReadKey(); /* ValueSerializerBase<StrKey> vsb = new ValueSerializerBase<StrKey>(); Byte[] buffer1 = vsb.SerializeToByteStream().ToArray(); Byte[] buffer2 = SerializerHelper<StrKey>.SerializeToProtoStream(k1).ToArray(); FileStream fout = new FileStream("tmp.txt", FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite); BinaryWriter fs_bw = new BinaryWriter(fout); fs_bw.Write(buffer1); fs_bw.Write("-----W00t!-----"); fs_bw.Write(buffer2); fs_bw.Write("-----W00t!-----"); fs_bw.Close(); fout.Close(); */ }
public long RemoteMatch(List<int> targetSMPCvector) { dataStreams.Clear(); StreamFactory sf = StreamFactory.Instance; for (int i = 1; i <= numberOfStreams; i++) { CallerInfo ci = new CallerInfo(null, "A", "A", 1); FqStreamID fq_sid = new FqStreamID(fqprefix +"-"+ window + "-" + i + "/" + numberOfStreams + "-" + chunkSize, "A", "TestBS"); IStream dfs_byte_val = sf.openValueDataStream<StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, mdserveraddress: mdServer, ChunkSizeForUpload: chunkSize, ThreadPoolSize: 1, log: new Logger()); dataStreams.Add(dfs_byte_val); } long start = DateTime.Now.Ticks; NumberOfMatches(targetSMPCvector); long time = DateTime.Now.Ticks - start; return time; }
public bool deleteStream(FqStreamID streamId, CallerInfo Ci, string mdserveraddress = null) { Logger log = new Logger(); string BaseDir = Path.GetFullPath((null != Ci.workingDir) ? Ci.workingDir : Directory.GetCurrentDirectory()); string targetDir = BaseDir + "/" + streamId.ToString(); // string address = ConfigurationManager.AppSettings.Get("MDServerAddress"); // string address = "http://homelab-vm.cloudapp.net:23456/MetaDataServer/"; string address = mdserveraddress; MetaDataService.IMetaDataService clnt = null; if (address != null) { BasicHttpBinding binding = new BasicHttpBinding(); ChannelFactory <MetaDataService.IMetaDataService> factory = new ChannelFactory <MetaDataService.IMetaDataService>(binding, address); clnt = factory.CreateChannel(); } Dictionary <int, MetaDataService.AccountInfo> accounts = null; string containerName; ISync synchronizerForDelete; if (address != null) { try { MetaDataService.FQStreamID stream = new MetaDataService.FQStreamID(); stream.HomeId = streamId.HomeId; stream.AppId = streamId.AppId; stream.StreamId = streamId.StreamId; accounts = clnt.GetAllAccounts(stream); if (accounts == null) { // no stream of this name on the metadata server return(false); } // TODO: Authentication for this update call clnt.RemoveAllInfo(stream); } catch (Exception e) { Console.WriteLine("Exception caught {0}", e); return(false); } foreach (MetaDataService.AccountInfo account in accounts.Values) { Boom(targetDir + "/" + account.num); containerName = streamId.ToString().Replace('/', '-').ToLower() + "-" + account.num; synchronizerForDelete = CreateSyncForAccount(account, containerName, log); synchronizerForDelete.Delete(); } containerName = streamId.ToString().Replace('/', '-').ToLower();//account info for meta stream = that of seg 0. strange? synchronizerForDelete = CreateSyncForAccount(accounts.ElementAt(0).Value, containerName, log); synchronizerForDelete.Delete(); } else { LocalMetaDataServer localMdServer = new LocalMetaDataServer(targetDir + "/" + MetaStream <StrKey, StrValue> .StreamMDFileName, log); localMdServer.LoadMetaDataServer(); Dictionary <int, AccountInfo> tmp = localMdServer.GetAllAccounts(new FQStreamID(streamId.HomeId, streamId.AppId, streamId.StreamId)); if (tmp != null) { MetaDataService.AccountInfo ai = null; MetaDataService.AccountInfo segment0Ai = null; foreach (AccountInfo account in tmp.Values) { Boom(targetDir + "/" + account.num); containerName = streamId.ToString().Replace('/', '-').ToLower() + "-" + account.num; ai = new MetaDataService.AccountInfo(); ai.accountKey = account.accountKey; ai.accountName = account.accountName; ai.location = account.location; ai.keyVersion = account.keyVersion; ai.num = account.num; synchronizerForDelete = CreateSyncForAccount(ai, containerName, log); if (synchronizerForDelete != null) { synchronizerForDelete.Delete(); } if (segment0Ai == null) { segment0Ai = ai; } } containerName = streamId.ToString().Replace('/', '-').ToLower();// TODO account info for meta stream = that of seg 0? something is wrong. synchronizerForDelete = CreateSyncForAccount(segment0Ai, containerName, log); if (synchronizerForDelete != null) { synchronizerForDelete.Delete(); } } } Boom(targetDir); return(true); }
public List<RetVal> PredictOccupancy(long startSlotIndex, long endSlotIndex) { List<RetVal> retVal = new List<RetVal>(); System.IO.StreamReader datafile=null; if (dataFilePath != null) //assuming datafile has one occupancy value per line read to startSlotIndex { string line; int counter = 0; datafile = new System.IO.StreamReader(this.dataFilePath); if (startSlotIndex != 0) { while ((line = datafile.ReadLine()) != null) { if (counter == startSlotIndex) break; counter++; } } } StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID("preheatnaive", "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); streamFactory.deleteStream(fq_sid, ci); IStream occupancyGroundTruthStream = streamFactory.openValueDataStream<StrKey, ByteValue>(fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, null, 4*1024*1024, 1, new Logger()); int slotIndex = 0; long startTime, retrievelTime, computeTime, appendTime; while (true) { startTime = DateTime.Now.Ticks; List<int> currentPOV = ConstructCurrentPOV(occupancyGroundTruthStream, slotIndex); List<List<int>> previousDaysPOV = ConstructPreviousPOV(occupancyGroundTruthStream, slotIndex); retrievelTime = DateTime.Now.Ticks - startTime; startTime = DateTime.Now.Ticks; int predictedOccupancy = Predict(currentPOV, previousDaysPOV); computeTime = DateTime.Now.Ticks - startTime; startTime = DateTime.Now.Ticks; if (datafile == null) // if no datafile to read the ground truth from just append randomly occupancyGroundTruthStream.Append(occupancyKey, new ByteValue(BitConverter.GetBytes(random.Next(2))), slotIndexBase+ slotIndex); else { string line = datafile.ReadLine(); if (line == null) { Console.WriteLine("reached the end of datafile"); break; } occupancyGroundTruthStream.Append(occupancyKey, new ByteValue(StreamFactory.GetBytes(line)), slotIndexBase + slotIndex); } slotIndex++; appendTime = DateTime.Now.Ticks - startTime; Console.WriteLine("Slot number {0} {1} {2} {3}", slotIndex, retrievelTime, computeTime, appendTime); using (results = File.AppendText(outputFilePath)) results.WriteLine("Slot number {0} {1} {2} {3}", slotIndex, retrievelTime, computeTime, appendTime); //retVal.Add(new RetVal(endTime - startTime, predictedOccupancy)); if (slotIndex == endSlotIndex) break; } occupancyGroundTruthStream.Close(); return retVal; }
private static long RemoteRead(int numberOfHomes, DateTime start, DateTime end, string tag) { Dictionary<int, List<double>> temp_energy_allhomes= new Dictionary<int,List<double>>(); Dictionary<int, List<double>> temp_energy_home; long retVal=0; for(int i = 0 ; i <numberOfHomes ; i++) { temp_energy_home = new Dictionary<int,List<double>>(); LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); FqStreamID fq_sid = new FqStreamID("crrealhome" + i, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; IStream dfs_byte_val = sf.openValueDataStream<DoubleKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, mdServer, 4 * 1024 * 1024, 1, new Logger()); long start_ticks = DateTime.Now.Ticks; for (int temp = -30; temp <= 40; temp++) { IEnumerable<IDataItem> vals = dfs_byte_val.GetAll(new DoubleKey(temp), DateTimeToUnixTimestamp(start), DateTimeToUnixTimestamp(end)); if (vals != null) { foreach (IDataItem val in vals) { if (!temp_energy_home.ContainsKey(temp)) temp_energy_home[temp] = new List<double>(); if (!temp_energy_allhomes.ContainsKey(temp)) temp_energy_allhomes[temp] = new List<double>(); temp_energy_home[temp].Add(BitConverter.ToDouble(val.GetVal().GetBytes(), 0)); temp_energy_allhomes[temp].Add(BitConverter.ToDouble(val.GetVal().GetBytes(), 0)); } } } dfs_byte_val.Close(); long end_ticks = DateTime.Now.Ticks; retVal+=end_ticks - start_ticks; WriteToFile(".\\result-realhome-" + i + "-n-" + numberOfHomes + "-" + tag, temp_energy_home); } WriteToFile(".\\result-allhomes-n-" + numberOfHomes + "-" + tag, temp_energy_allhomes); return retVal; }
new public List<RetVal> PredictOccupancy(long startSlotIndex, long endSlotIndex) { List<RetVal> retVal = new List<RetVal>(); System.IO.StreamReader datafile = null; if (dataFilePath != null) //assuming datafile has one occupancy value per line read to startSlotIndex { string line; int counter = 0; datafile = new System.IO.StreamReader(this.dataFilePath); if (startSlotIndex != 0) { while ((line = datafile.ReadLine()) != null) { if (counter == startSlotIndex) break; counter++; } } } string AzureaccountName = "testdrive"; string AzureaccountKey = "zRTT++dVryOWXJyAM7NM0TuQcu0Y23BgCQfkt7xh2f/Mm+r6c8/XtPTY0xxaF6tPSACJiuACsjotDeNIVyXM8Q=="; LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID("simplepreheat", "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); streamFactory.deleteStream(fq_sid, ci); IStream occupancyGroundTruthStream = streamFactory.openValueDataStream<StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, null, 4 * 1024 * 1024, 1, new Logger()); occupancyGroundTruthStream.Close(); int slotIndex = 0; long startTime, retrievelTime, computeTime, insertTime; while (true) { startTime = DateTime.Now.Ticks; List<int> currentPOV = SimpleConstructCurrentPOV(occupancyGroundTruthStream, slotIndex); List<List<int>> previousDaysPOV = SimpleConstructPreviousPOV(occupancyGroundTruthStream, slotIndex); retrievelTime = DateTime.Now.Ticks - startTime; startTime = DateTime.Now.Ticks; int predictedOccupancy = Predict(currentPOV, previousDaysPOV); computeTime = DateTime.Now.Ticks - startTime; startTime = DateTime.Now.Ticks; int groundTruth; if (datafile == null) // if no datafile to read the ground truth from just append randomly groundTruth = random.Next(2); else { string line = datafile.ReadLine(); groundTruth = int.Parse(line); } currentPOV.Add(groundTruth); /*List<int> temp = new List<int>(); foreach (List<int> previousPOV in previousDaysPOV) { temp = temp.Concat(previousPOV).ToList(); } temp = temp.Concat(currentPOV).ToList();*/ occupancyGroundTruthStream.Append(occupancyKey, new ByteValue(currentPOV.SelectMany(BitConverter.GetBytes).ToArray()), slotIndexBase + slotIndex); insertTime = DateTime.Now.Ticks - startTime; Console.WriteLine("Slot number {0} {1} {2} {3}", slotIndex, retrievelTime, computeTime, insertTime); using (results = File.AppendText(outputFilePath)) results.WriteLine("Slot number {0} {1} {2} {3}", slotIndex, retrievelTime, computeTime, insertTime); slotIndex++; //retVal.Add(new RetVal(endTime - startTime, predictedOccupancy)); if (slotIndex == endSlotIndex) break; } occupancyGroundTruthStream.Close(); return retVal; }