static void Main(string[] args) { int len_hours = 1000 ; //24 *60* 30; int[] windows = {60} ;// { 600, 24*60, 7*24*60}; int[] chunkSizes = { 4*1024*1024, 1024 * 1024, 100*1024, 10 * 1024, 1024 }; LocationInfo li = new LocationInfo("testdrive", "zRTT++dVryOWXJyAM7NM0TuQcu0Y23BgCQfkt7xh2f/Mm+r6c8/XtPTY0xxaF6tPSACJiuACsjotDeNIVyXM8Q==", SynchronizerType.Azure); string mdServer="http://scspc417.cs.uwaterloo.ca:23456/TrustedServer/"; int numberOfStreams = 1; // 10 homes int numberOfExperimentRepetitions = 1; foreach (int window in windows) { foreach (int chunkSize in chunkSizes) { DNW dnwt = new DNW(numberOfStreams, window, li, mdServer, chunkSize); for (int i = 1; i <= len_hours; i++) { dnwt.ReadObject(); } dnwt.Finish(); List<long> timeTakenForRemoteRead = new List<long>(); long dataDownloaded=0; for (int repeat = 1; repeat <= numberOfExperimentRepetitions; repeat++) { //lets clean up everything. as if we're the reader located in a different home. for (int i = 1; i <= numberOfStreams; i++) { Directory.Delete(dnwt.fqprefix + "-" + window + "-" + i, true); } timeTakenForRemoteRead.Add(dnwt.RemoteMatch(null)); for (int i = 1; i <= numberOfStreams; i++) { dataDownloaded += GetDirectorySize(dnwt.fqprefix + "-" + window + "-" + i+"/"); } } //of the data downloaded, the amount of used for answering the query is 29689 per home //i.e. size of the stream on disk when len_hours=window size=60 //and for window size of 600 it is 293207 per home long dataused=0; if (window == 60) dataused = 29689; if (window == 600) dataused = 293207; using (StreamWriter writer = File.AppendText("results.txt")) writer.Write(window + "," + numberOfStreams + "," + chunkSize + "," + ListExtensions.Mean(timeTakenForRemoteRead) + "," + ListExtensions.StandardDeviation(timeTakenForRemoteRead) + "," + dataDownloaded + "," +dataused+"\n"); Console.WriteLine(window + "," + numberOfStreams + "," + chunkSize + "," + ListExtensions.Mean(timeTakenForRemoteRead) + "," + ListExtensions.StandardDeviation(timeTakenForRemoteRead) +","+dataDownloaded+","+dataused+ "\n"); } } }
public NaivePreHeat_remoteread(string dataFilePath, int K, string outputFilePath, int chunkSize, string mdserver, LocationInfo li, int endSlotIndex ) { this.dataFilePath = dataFilePath; occupancyKey = new StrKey("occupancy"); random = new Random(); this.constK = K; this.outputFilePath = outputFilePath; this.chunkSize = chunkSize; this.mdserver = mdserver; this.li = li; int slotIndex = 0; StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID(fqsidprefix + chunkSize, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); streamFactory.deleteStream(fq_sid, ci); IStream occupancyGroundTruthStream = streamFactory.openValueDataStream<StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserver, chunkSize, 1, new Logger()); while (true) { occupancyGroundTruthStream.Append(occupancyKey, new ByteValue(BitConverter.GetBytes(random.Next(2))), slotIndexBase + slotIndex); slotIndex++; if (slotIndex == endSlotIndex) break; } occupancyGroundTruthStream.Close(); }
public void Setup() { k1 = new StrKey("k1"); k2 = new StrKey("k2"); string AzureaccountName = "testdrive"; string AzureaccountKey = "zRTT++dVryOWXJyAM7NM0TuQcu0Y23BgCQfkt7xh2f/Mm+r6c8/XtPTY0xxaF6tPSACJiuACsjotDeNIVyXM8Q=="; locationInfo = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); }
public void Setup() { k1 = new StrKey("k1"); k2 = new StrKey("k2"); string AzureaccountName = "msrlot"; string AzureaccountKey = "wC3ou+VLBTu8ryKbsiSMtsIIHIpxGSIAWA0NtK58da2wELQ+USgYQwmVMtyq/p8ILsuZc8TiLeHBjaPI+a3A2Q=="; locationInfo = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); }
public void ExportData(bool remote, DateTime dtbegin, DateTime dtend, String outputFileName) { //read the settings string accountName = ConfigurationManager.AppSettings.Get("AccountName"); string accountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); string homeId = ConfigurationManager.AppSettings.Get("HomeId"); string appId = ConfigurationManager.AppSettings.Get("AppId"); string streamId = ConfigurationManager.AppSettings.Get("StreamId"); IStream datastream; FileStream fs = new FileStream(outputFileName, FileMode.Append); StreamWriter swOut = new StreamWriter(fs); StreamFactory sf = StreamFactory.Instance; CallerInfo ci = new CallerInfo(null, appId, appId, 0); FqStreamID fq_sid = new FqStreamID(homeId, appId, streamId); if (remote) { LocationInfo li = new LocationInfo(accountName, accountKey, SynchronizerType.Azure); datastream = sf.openValueDataStream<StrKey, StrValue> (fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4*1024*1024, 1, null, true); } else { datastream = sf.openValueDataStream<StrKey, StrValue> (fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4*1024*1024, 1, null); } DateTime dtbeginutc = dtbegin.ToUniversalTime(); DateTime dtendutc = dtend.ToUniversalTime(); // StrKey tmpKey = new StrKey("envih1:sensormultilevel:"); HashSet<IKey> keys = datastream.GetKeys(null, null); foreach (IKey key in keys) { // IEnumerable<IDataItem> dataItemEnum = datastream.GetAll(key); // dtendutc.Ticks); IEnumerable<IDataItem> dataItemEnum = datastream.GetAll(key, dtbeginutc.Ticks, dtendutc.Ticks); if (dataItemEnum != null) { foreach (IDataItem di in dataItemEnum) { DateTime ts = new DateTime(di.GetTimestamp()); swOut.WriteLine(key + ", " + ts.ToLocalTime() + ", " + di.GetVal().ToString()); } } } datastream.Close(); swOut.Close(); }
public Export(bool remote) { IStream datastream; string accountName = ConfigurationManager.AppSettings.Get("AccountName"); string accountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); string homeId = ConfigurationManager.AppSettings.Get("HomeId"); string appId = ConfigurationManager.AppSettings.Get("AppId"); string streamId = ConfigurationManager.AppSettings.Get("StreamId"); StreamFactory sf = StreamFactory.Instance; CallerInfo ci = new CallerInfo(null, appId, appId, 0); FqStreamID fq_sid = new FqStreamID(homeId, appId, streamId); if (remote) { LocationInfo li = new LocationInfo(accountName, accountKey, SynchronizerType.Azure); datastream = sf.openValueDataStream<StrKey, StrValue> (fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4*1024*1024, 1, null, true); } else { datastream = sf.openValueDataStream<StrKey, StrValue> (fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4*1024*1024, 1, null); } /* StrKey key = new StrKey("foo"); if (datastream != null) { datastream.Append(key, new StrValue("bar")); datastream.Append(key, new StrValue("baz")); } * */ HashSet<IKey> keys = datastream.GetKeys(null, null); foreach (IKey key in keys) { IEnumerable<IDataItem> dataItemEnum = datastream.GetAll(key); foreach (IDataItem di in dataItemEnum) { try { DateTime ts = new DateTime(di.GetTimestamp()); Console.WriteLine(key + ", " + ts + ", " + di.GetVal().ToString()); } catch (Exception e) { Console.Error.Write(e.StackTrace); } } } datastream.Close(); }
public void Setup() { k1 = new StrKey("k1"); k2 = new StrKey("k2"); string AzureaccountName = "testdrive"; string AzureaccountKey = "zRTT++dVryOWXJyAM7NM0TuQcu0Y23BgCQfkt7xh2f/Mm+r6c8/XtPTY0xxaF6tPSACJiuACsjotDeNIVyXM8Q=="; locationInfo = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); streamID = new FqStreamID("99-a2000", "A0", "TestDS"); callerInfo = new CallerInfo(null, "A0", "A0", 1); streamSecurityType = StreamFactory.StreamSecurityType.Plain; streamFactory = StreamFactory.Instance; }
public void Setup() { k1 = new StrKey("k1"); k2 = new StrKey("k2"); string AzureaccountName = "msrlot"; string AzureaccountKey = "wC3ou+VLBTu8ryKbsiSMtsIIHIpxGSIAWA0NtK58da2wELQ+USgYQwmVMtyq/p8ILsuZc8TiLeHBjaPI+a3A2Q=="; locationInfo = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); streamID = new FqStreamID("99-a2000", "A0", "TestDS"); callerInfo = new CallerInfo(null, "A0", "A0", 1); streamSecurityType = StreamFactory.StreamSecurityType.Plain; streamFactory = StreamFactory.Instance; }
private static void UploadDataAsStreams(int UploadCount) { string directory = @"..\\..\\data\\meter-data"; int count = 0; Dictionary<DateTime, double> ts_temperature = new Dictionary<DateTime, double>(); StreamReader wfile = new System.IO.StreamReader(@"..\\..\\data\\weather.txt"); string wline; while ((wline = wfile.ReadLine()) != null) { string[] words = wline.Split('\t'); DateTime date = Convert.ToDateTime(words[4]); date = date.AddHours(Int32.Parse(words[5])); double temperature = Double.Parse(words[0]); ts_temperature[date] = temperature; } wfile.Close(); foreach (string filePath in Directory.GetFiles(directory)) { Console.WriteLine("file name:" + filePath); string line; System.IO.StreamReader file = new System.IO.StreamReader(filePath); LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); FqStreamID fq_sid = new FqStreamID("crrealhome"+count, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; sf.deleteStream(fq_sid, ci); IStream dfs_byte_val = sf.openValueDataStream<DoubleKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserveraddress: mdServer, ChunkSizeForUpload: 4 * 1024 * 1024, ThreadPoolSize: 1, log: new Logger()); while ((line = file.ReadLine()) != null) { string[] words = line.Split('\t'); DateTime date = Convert.ToDateTime(words[0]); date=date.AddHours(int.Parse(words[1])/100); DoubleKey key = new DoubleKey(((int)(ts_temperature[date]))); dfs_byte_val.Append(key, new ByteValue(BitConverter.GetBytes(Double.Parse(words[2]))), DateTimeToUnixTimestamp(date)); // Console.WriteLine(DateTimeToUnixTimestamp(date) + "," + words[2]); } dfs_byte_val.Close(); count++; if (count == UploadCount) break; } }
public ISync CreateSynchronizer(LocationInfo Li, string container, Logger log, SynchronizeDirection syncDirection = SynchronizeDirection.Upload, CompressionType compressionType = CompressionType.None, int ChunkSizeForUpload = 4*1024*1024, int ThreadPoolSize =1 , EncryptionType encryptionType = EncryptionType.None , byte[] encryptionKey = null, byte[] initializationVector =null) { ISync isync = null; switch (Li.st) { case SynchronizerType.Azure: isync = CreateAzureSynchronizer(new RemoteInfo(Li.accountName, Li.accountKey), container, log, syncDirection, compressionType, ChunkSizeForUpload, ThreadPoolSize, encryptionType, encryptionKey, initializationVector); break; case SynchronizerType.AmazonS3: isync = CreateAmazonS3Synchronizer(new RemoteInfo(Li.accountName, Li.accountKey), container, log, syncDirection, compressionType, ChunkSizeForUpload, ThreadPoolSize, encryptionType, encryptionKey, initializationVector); break; default: isync = null; break; } return isync; }
public DNW(int numberOfStreams, int window, LocationInfo li, string mdserver, int chunkSize) { this.li = li; this.numberOfStreams = numberOfStreams; this.window = window; this.count = 0; this.mdServer = mdserver; this.chunkSize = chunkSize; StreamFactory streamFactory = StreamFactory.Instance; CallerInfo ci = new CallerInfo(null, "A", "A", 1); dataStreams = new List<IStream>(); for (int i = 1; i <= numberOfStreams; i++) { FqStreamID fq_sid = new FqStreamID(fqprefix + "-"+window + "-" + i + "/" + numberOfStreams + "-" + chunkSize, "A", "TestBS"); streamFactory.deleteStream(fq_sid, ci); dataStreams.Add(streamFactory.openValueDataStream<StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserveraddress: mdServer, ChunkSizeForUpload: chunkSize, ThreadPoolSize: 1, log: new Logger())); } }
/// <summary> /// This function will throw an exception if the log is non-rotating and if the container name does meet the following constraints: /// 1. Container names must start with a letter or number, and can contain only letters, numbers, and the dash (-) character. /// 2. Every dash (-) character must be immediately preceded and followed by a letter or number; consecutive dashes are not permitted in container names. /// 3. All letters in a container name must be lowercase. /// 4. Container names must be from 3 through 63 characters long /// </summary> /// <param name="accountName"></param> /// <param name="accountKey"></param> /// <param name="containerName"></param> public void InitSyncing(string accountName, string accountKey, string containerName) { if (!IsRotatingLog) { throw new Exception("Cannot sync a non-rotating log"); } //the code below could throw an exception if containerName does not meet the restrictions var locationInfo = new Bolt.DataStore.LocationInfo(accountName, accountKey, Bolt.DataStore.SynchronizerType.Azure); try { synchronizer = Bolt.DataStore.SyncFactory.Instance.CreateLogSynchronizer(locationInfo, containerName); synchronizer.SetLocalSource(archivingDirectory); //lets sync for starters, in case there are leftover logs from last time SafeThread worker = new SafeThread(delegate() { synchronizer.Sync(); }, "init log syncing", this); worker.Start(); } catch (System.FormatException ex1) { Log("ERROR: Could not start log syncing. The Azure account key may be wrong \n {0}", ex1.ToString()); } catch (System.Runtime.InteropServices.COMException ex2) { Log("ERROR: Could not start log syncing. It appears that the Sync Framework v2.1 x86 version is not installed. Make sure that no other version is present. \n {0}", ex2.ToString()); } catch (Microsoft.WindowsAzure.StorageClient.StorageServerException ex3) { Log("ERROR: Could not start log syncing. The Azure account name may be wrong.\n {0}", ex3.ToString()); } catch (Microsoft.WindowsAzure.StorageClient.StorageClientException ex3) { Log("ERROR: Could not start log syncing. The Azure account key may be wrong.\n {0}", ex3.ToString()); } catch (Exception ex3) { Log("Got unknown exception while starting log syncing.\n {0}", ex3.ToString()); } }
public async Task SetupDataStream(bool remote, string accountName, string accountKey, string homeId, string appId, string streamId) { StreamFactory sf = StreamFactory.Instance; CallerInfo ci = new CallerInfo(null, appId, appId, 0); FqStreamID fq_sid = new FqStreamID(homeId, appId, streamId); if (remote) { LocationInfo li = new LocationInfo(accountName, accountKey, SynchronizerType.Azure); datastream = await Task.Run(() => sf.openValueDataStream<StrKey, StrValue> (fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4 * 1024 * 1024, 1, null, true)); } else { datastream = await Task.Run(() => sf.openValueDataStream<StrKey, StrValue> (fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4 * 1024 * 1024, 1, null)); } }
protected void CreateSync(SynchronizeDirection dir) { // Create Synchronizer if (account.location == "None") { synchronizer = null; } else if (streamtype == StreamFactory.StreamSecurityType.Secure) { LocationInfo Li = new LocationInfo(account.accountName, account.accountKey, SyncFactory.GetSynchronizerType(account.location)); synchronizer = SyncFactory.Instance.CreateSynchronizer(Li, streamid.ToString().Replace('/', '-').ToLower() + "-" + seq_num, logger, dir, streamcompressiontype, this.StreamChunkSizeForUpload, this.StreamThreadPoolSize, EncryptionType.AES, acl_md.encKey, acl_md.IV); synchronizer.SetLocalSource(targetDir); synchronizer.SetIndexFileName(IndexFileName); synchronizer.SetDataFileName(DataLogFileName); } else { LocationInfo Li = new LocationInfo(account.accountName, account.accountKey, SyncFactory.GetSynchronizerType(account.location)); synchronizer = SyncFactory.Instance.CreateSynchronizer(Li, streamid.ToString().Replace('/', '-').ToLower() + "-" + seq_num, logger, dir, streamcompressiontype, this.StreamChunkSizeForUpload, this.StreamThreadPoolSize); synchronizer.SetLocalSource(targetDir); synchronizer.SetIndexFileName(IndexFileName); synchronizer.SetDataFileName(DataLogFileName); } }
/* syncIntervalSec: * -ve ==> don't sync on writes; only sync on close. * 0 ==> sync on every write * +ve ==> sync every x seconds * * Throws System.Exception e.g., on network disconnection for remote streams. Catch in Caller. */ public IStream openFileDataStream <KeyType>(FqStreamID FQSID, CallerInfo Ci, LocationInfo Li, StreamFactory.StreamSecurityType type, CompressionType ctype, StreamFactory.StreamOp op, string mdserveraddress = null, int ChunkSizeForUpload = 4 *1024 *1024, int ThreadPoolSize = 1, Logger log = null, bool sideload = false, int syncIntervalSec = -1) where KeyType : IKey, new() { if (Li == null) { Li = new LocationInfo("", "", SynchronizerType.None); } return(new MetaStream <KeyType, ByteValue>(FQSID, Ci, Li, op, type, ctype, StreamDataType.Files, syncIntervalSec, mdserveraddress, ChunkSizeForUpload, ThreadPoolSize, log, sideload)); }
static void Main(string[] args) { int len =960 + 95;//11 days string outputFile; string mdServer = "http://scspc417.cs.uwaterloo.ca:23456/TrustedServer/"; outputFile = ".\\optimal-remoteread"; LocationInfo li = new LocationInfo("testdrive", "zRTT++dVryOWXJyAM7NM0TuQcu0Y23BgCQfkt7xh2f/Mm+r6c8/XtPTY0xxaF6tPSACJiuACsjotDeNIVyXM8Q==", SynchronizerType.Azure); int[] chunkSizes = {960, 480, 320,240,160,120,60,30,20,10}; foreach (int chunk in chunkSizes) { NaivePreHeat_remoteread preheat = new NaivePreHeat_remoteread(null, 5, outputFile, chunk, mdServer , li, len); List<long> r= new List<long>(); for (int i = 1; i <= 10; i++) { r.Add(preheat.PredictOccupancy(960, len).ElementAt(0).getVal());// 11th day } long mean = ListExtensions.Mean(r); double std = ListExtensions.StandardDeviation(r); StreamWriter results; using (results = File.AppendText("avg-ret-time-chunksize.txt")) results.WriteLine("{0} {1} {2}", chunk, mean, std); } /* outputFile = ".\\smart"; SmartPreHeat spreheat = new SmartPreHeat(null, 5, outputFile); spreheat.PredictOccupancy(0, len); CreateDayMax(outputFile, 3); CreateDayMax(outputFile, 4); CreateDayMax(outputFile, 5); outputFile = ".\\optimal"; OptimalPreHeat preheat = new OptimalPreHeat(null, 5, outputFile); preheat.PredictOccupancy(0, len); CreateDayMax(outputFile, 3); CreateDayMax(outputFile, 4); CreateDayMax(outputFile, 5); outputFile = ".\\naive"; NaivePreHeat npreheat = new NaivePreHeat(null, 5, outputFile); npreheat.PredictOccupancy(0, len); CreateDayMax(outputFile, 3); CreateDayMax(outputFile, 4); CreateDayMax(outputFile, 5); */ }
//////////////////////////////////////////////////////////////////////////////// // Legacy Comaptibility Calls // - depricated (uses sync framework) // - do not use unless you know what you are doing //////////////////////////////////////////////////////////////////////////////// public ISync CreateLogSynchronizer(LocationInfo Li, string container) { ISync isync = null; switch (Li.st) { case SynchronizerType.Azure: isync = new HDS.AzureSynchronizer(new RemoteInfo(Li.accountName, Li.accountKey), container, SynchronizeDirection.Upload); break; default: isync = null; break; } return isync; }
private static long RemoteRead(int numberOfHomes, DateTime start, DateTime end, string tag) { Dictionary<int, List<double>> temp_energy_allhomes= new Dictionary<int,List<double>>(); Dictionary<int, List<double>> temp_energy_home; long retVal=0; for(int i = 0 ; i <numberOfHomes ; i++) { temp_energy_home = new Dictionary<int,List<double>>(); long start_ticks = DateTime.Now.Ticks; for(int temp = -30 ; temp <=40 ; temp++) { string AzureaccountName = "testdrive"; string AzureaccountKey = "zRTT++dVryOWXJyAM7NM0TuQcu0Y23BgCQfkt7xh2f/Mm+r6c8/XtPTY0xxaF6tPSACJiuACsjotDeNIVyXM8Q=="; LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); FqStreamID fq_sid = new FqStreamID("realhome"+i, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; IStream dfs_byte_val = sf.openFileStream<DoubleKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None,StreamFactory.StreamOp.Read, mdServer, 4*1024*1024, 1, new Logger()); IEnumerable<IDataItem> vals = dfs_byte_val.GetAll(new DoubleKey(temp),DateTimeToUnixTimestamp(start), DateTimeToUnixTimestamp(end)); foreach(IDataItem val in vals) { temp_energy_home[temp].Add(BitConverter.ToDouble(val.GetVal().GetBytes(), 0)); temp_energy_allhomes[temp].Add(BitConverter.ToDouble(val.GetVal().GetBytes(), 0)); } } long end_ticks = DateTime.Now.Ticks; retVal+=end_ticks - start_ticks; WriteToFile(".\\result-realhome-" + i + "-n-" + numberOfHomes + "-" + tag, temp_energy_home); } WriteToFile(".\\result-allhomes-n-" + numberOfHomes + "-" + tag, temp_energy_allhomes); return retVal; }
private static void UploadDataAsStreams() { string directory = "D:\\data-hds\\data-hds"; int count = 0; int UploadCount = 300; Dictionary<DateTime, double> ts_temperature = new Dictionary<DateTime, double>(); StreamReader wfile = new System.IO.StreamReader(@"..\\..\\data\\weather.txt"); string wline; while ((wline = wfile.ReadLine()) != null) { string[] words = wline.Split('\t'); DateTime date = Convert.ToDateTime(words[4]); date = date.AddHours(Int32.Parse(words[5])); double temperature = Double.Parse(words[0]); ts_temperature[date] = temperature; } wfile.Close(); foreach (string filePath in Directory.GetFiles(directory)) { Console.WriteLine("file name:" + filePath); string line; System.IO.StreamReader file = new System.IO.StreamReader(filePath); string AzureaccountName = "testdrive"; string AzureaccountKey = "zRTT++dVryOWXJyAM7NM0TuQcu0Y23BgCQfkt7xh2f/Mm+r6c8/XtPTY0xxaF6tPSACJiuACsjotDeNIVyXM8Q=="; LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); FqStreamID fq_sid = new FqStreamID("realhome"+count, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; sf.deleteStream(fq_sid, ci); IStream dfs_byte_val = sf.openFileStream<DoubleKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None,StreamFactory.StreamOp.Write, mdServer, 4*1024*1024, 1, new Logger()); while ((line = file.ReadLine()) != null) { string[] words = line.Split('\t'); DateTime date = Convert.ToDateTime(words[0]); date=date.AddHours(int.Parse(words[1])/100); DoubleKey key = new DoubleKey(((int)(ts_temperature[date]))); dfs_byte_val.Append(key, new ByteValue(BitConverter.GetBytes(Double.Parse(words[2]))), DateTimeToUnixTimestamp(date)); // Console.WriteLine(DateTimeToUnixTimestamp(date) + "," + words[2]); } dfs_byte_val.Close(); count++; if (count == UploadCount) break; } }
static void Main(string[] args) { string AzureaccountName = ConfigurationManager.AppSettings.Get("AccountName"); string AzureaccountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); /* string dataFile = "D:\\b"; int KB = 1024; int[] chunk_sizes = { 4*1024*KB , 8*1024*KB }; for (int i = 1; i <= 1; i++) { for (int threads = 1; threads <= 1; threads++) { foreach (int csize in chunk_sizes) { Console.Write(">"); File.Copy(dataFile, dataFile + threads + "," + csize); AzureHelper helper = new AzureHelper(AzureaccountName, AzureaccountKey, "foo123123", CompressionType.None, EncryptionType.None, null, null, new Logger(), csize, threads); long start = DateTime.Now.Ticks; helper.UploadFileAsChunks(dataFile + threads + "," + csize); long end = DateTime.Now.Ticks; Console.WriteLine(threads + "," + csize + "," + (((double)(end - start) / (double)10000000)) ); } } } */ li = null; FqStreamID fq_sid = new FqStreamID("1299-2716", "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; sf.deleteStream(fq_sid, ci); IStream dfs_byte_val = sf.openValueDataStream<StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write); StrKey k1 = new StrKey("k1"); dfs_byte_val.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-cmu"))); dfs_byte_val.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-msr"))); dfs_byte_val.Seal(false); dfs_byte_val.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-uw"))); dfs_byte_val.Close(); Console.ReadKey(); dfs_byte_val = sf.openValueDataStream<StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write); Console.WriteLine("Get in read : " + dfs_byte_val.Get(k1)); IEnumerable<IDataItem> data = dfs_byte_val.GetAll(k1, 0, StreamFactory.NowUtc()); foreach (IDataItem dataItem in data) Console.WriteLine(dataItem.GetVal().ToString()); dfs_byte_val.Close(); Console.ReadKey(); /* ValueSerializerBase<StrKey> vsb = new ValueSerializerBase<StrKey>(); Byte[] buffer1 = vsb.SerializeToByteStream().ToArray(); Byte[] buffer2 = SerializerHelper<StrKey>.SerializeToProtoStream(k1).ToArray(); FileStream fout = new FileStream("tmp.txt", FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite); BinaryWriter fs_bw = new BinaryWriter(fout); fs_bw.Write(buffer1); fs_bw.Write("-----W00t!-----"); fs_bw.Write(buffer2); fs_bw.Write("-----W00t!-----"); fs_bw.Close(); fout.Close(); */ }
public void Run(string CallerName, string HomeName, string AppName, string StreamName, string RandName, long stime, long etime, StreamType stream_type, StreamOperation stream_op, StreamFactory.StreamPhysicalType ptype, CompressionType ctype, int ChunkSize , int ThreadPoolSize, Byte[] value, int num_operations, SynchronizerType synctype, int max_key = 0, string address = null, bool doCosts= false, bool doRaw = false) { // Set experiment directory CallerInfo ci = new CallerInfo(null, CallerName, CallerName, 1); exp_directory = Path.GetFullPath((null != ci.workingDir) ? ci.workingDir : Directory.GetCurrentDirectory()); exp_directory = exp_directory + "/" + HomeName + "/" + AppName + "/" + StreamName; if (max_key == 0) max_key = num_operations; // Set a description/tag for the experiment this.exp_id = "Directory: " + HomeName + "/" + AppName + "/" + StreamName + " Caller:" + CallerName + " Stream Type:" + stream_type + " Stream Op: " + stream_op + " Stream Ptype: " + ptype + " Compression Type: " + ctype + " Value size: " + value.Length + " num_operations: " + max_key + " actual_num_ops: " + num_operations + " Sync type: " + synctype + " Do costs? " + doCosts + "Chunk Size: " + ChunkSize+ " ThreadPool Size:" +ThreadPoolSize; this.compressed_exp_id = " ST:" + stream_type + " OP: " + stream_op + " PT: " + ptype + " CT: " + ctype + " VS: " + value.Length + " I:" + num_operations + " MK:" + max_key + " SYNC: " + synctype+ " chsize: "+ChunkSize + " nThreads: "+ThreadPoolSize ; // Set remote storage server account info string AzureaccountName = ConfigurationManager.AppSettings.Get("AccountName"); string AzureaccountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); string S3accountName = ConfigurationManager.AppSettings.Get("S3AccountName"); string S3accountKey = ConfigurationManager.AppSettings.Get("S3AccountSharedKey"); LocationInfo Li; if (synctype == SynchronizerType.Azure) Li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); else if (synctype == SynchronizerType.AmazonS3) Li = new LocationInfo(S3accountName, S3accountKey, SynchronizerType.AmazonS3); else Li = null; StreamFactory sf = StreamFactory.Instance; IStream stream = null; FqStreamID streamid = new FqStreamID(HomeName, AppName, StreamName); // Set op : R/W StreamFactory.StreamOp rw; if (stream_op == StreamOperation.RandomKeyRandomValueAppend || stream_op == StreamOperation.RandomKeySameValueAppend || stream_op == StreamOperation.SameKeyRandomValueAppend || stream_op == StreamOperation.SameKeySameValueAppend) { rw = StreamFactory.StreamOp.Write; } else { rw = StreamFactory.StreamOp.Read; } // Initialize costs CostsHelper costhelper = null; double baselineStorageKV = 0; if (doCosts) { costhelper = new CostsHelper(); costhelper.getCurrentCpuUsage(); costhelper.getNetworkUsage(); } if (stream_type == StreamType.CloudRaw) { if (!Directory.Exists(exp_directory)) { Directory.CreateDirectory(exp_directory); } Logger logger = new Logger(); Byte[] val = new Byte[value.Length * num_operations]; // DateTime Date = new DateTime(DateTime.UtcNow.Ticks); // string cname = String.Format("CloudRaw-{0}", Date.ToString("yyyy-MM-dd")); // string bname = String.Format("{0}", Date.ToString("HH-mm-ss")); // string cname = String.Format("cloudraw-{0}", RandomString(4)); // string bname = String.Format("{0}", RandomString(4)); string cname = String.Format("cloudraw-{0}", RandName); string bname = String.Format("{0}", RandName); if (stream_op == StreamOperation.RandomKeyGet || stream_op == StreamOperation.RandomKeyGetMultipleSegments || stream_op == StreamOperation.RandomKeyGetAll) { doRawCloudPerf(val, SynchronizerType.Azure, SynchronizeDirection.Download, exp_directory, logger, containerName: cname, blobName: bname); logger.Dump(exp_directory + "/log"); } else { doRawCloudPerf(val, SynchronizerType.Azure, SynchronizeDirection.Upload, exp_directory, logger, containerName: cname, blobName: bname); logger.Dump(exp_directory + "/log"); } return; } if (stream_type == StreamType.DiskRaw) { if (!Directory.Exists(exp_directory)) { Directory.CreateDirectory(exp_directory); } Logger logger = doDiskRaw(stream_op, num_operations, value.Length, ptype, exp_directory); logger.Dump(exp_directory + "/log"); return; } // Are we getting raw disk throughput? if (stream_type == StreamType.Raw) { string ret = doDiskSpeed((value.Length * num_operations)/1000 + "K", value.Length/1000 + "K", rw); if (!Directory.Exists(exp_directory)) { Directory.CreateDirectory(exp_directory); } File.WriteAllText(exp_directory + "/log", ret); return; } // Populate the keys and the values Random random = new Random(DateTime.Now.Millisecond); StrKey[] keys = new StrKey[max_key]; for (int i = 0; i < max_key; ++i) { keys[i] = new StrKey("" + i); } /* List<ByteValue> vals = new List<ByteValue>(num_operations); Byte[][] tmp = new Byte[num_operations][]; for (int i = 0; i < num_operations; ++i) { tmp[i] = new Byte[value.Length]; random.NextBytes(tmp[i]); } for (int i = 0; i < num_operations; ++i) { keys[i] = new StrKey("" + i); vals.Add(new ByteValue(tmp[i])); // vals[i] = new ByteValue(tmp); } */ Logger log = new Logger(); // Open stream for different types of experiments if (stream_type == StreamType.Local && ptype == StreamFactory.StreamPhysicalType.File) { stream = sf.openFileStream<StrKey, ByteValue>(streamid, ci, null, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.LocalEnc && ptype == StreamFactory.StreamPhysicalType.File) { stream = sf.openFileStream<StrKey, ByteValue>(streamid, ci, null, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.Remote && ptype == StreamFactory.StreamPhysicalType.File) { stream = sf.openFileStream<StrKey, ByteValue>(streamid, ci, Li, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.RemoteEnc && ptype == StreamFactory.StreamPhysicalType.File) { stream = sf.openFileStream<StrKey, ByteValue>(streamid, ci, Li, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.Local && ptype == StreamFactory.StreamPhysicalType.Directory) { stream = sf.openDirStream<StrKey>(streamid, ci, null, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.LocalEnc && ptype == StreamFactory.StreamPhysicalType.Directory) { stream = sf.openDirStream<StrKey>(streamid, ci, null, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.Remote && ptype == StreamFactory.StreamPhysicalType.Directory) { stream = sf.openDirStream<StrKey>(streamid, ci, Li, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.RemoteEnc && ptype == StreamFactory.StreamPhysicalType.Directory) { stream = sf.openDirStream<StrKey>(streamid, ci, Li, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else { return; } if (stream_op == StreamOperation.RandomKeyRandomValueAppend) { List<ByteValue> vals = new List<ByteValue>(num_operations); Byte[][] tmp = new Byte[num_operations][]; for (int i = 0; i < num_operations; ++i) { tmp[i] = new Byte[value.Length]; random.NextBytes(tmp[i]); } for (int i = 0; i < num_operations; ++i) { vals.Add(new ByteValue(tmp[i])); } for (int i = 0; i < num_operations; ++i) { baselineStorageKV += keys[i].Size(); baselineStorageKV += vals[i].Size(); stream.Append(keys[i], vals[i]); } stream.Close(); } else if (stream_op == StreamOperation.RandomKeySameValueAppend) { Byte[] singleval = new Byte[value.Length]; random.NextBytes(singleval); ByteValue singlebv = new ByteValue(singleval); for (int i = 0; i < num_operations; ++i) { baselineStorageKV += keys[i].Size(); baselineStorageKV += value.Length; stream.Append(keys[i], singlebv); } stream.Close(); } else if (stream_op == StreamOperation.SameKeySameValueAppend) { StrKey key = new StrKey("ExpKey"); Byte[] singleval = new Byte[value.Length]; random.NextBytes(singleval); ByteValue singlebv = new ByteValue(singleval); for (int i = 0; i < num_operations; ++i) { stream.Append(key, singlebv); // System.Threading.Thread.Sleep(10); } stream.Close(); } else if (stream_op == StreamOperation.RandomKeyGet || stream_op == StreamOperation.RandomKeyGetMultipleSegments) { for (int i = 0; i < num_operations; ++i) { stream.Get(keys[random.Next(0, max_key)]); } stream.Close(); } else if (stream_op == StreamOperation.RandomKeyGetAll) { StrKey key = new StrKey("ExpKey"); for (int i = 0; i < num_operations; ) { long st = 0; long et = -1; Console.WriteLine(stime + ":" + etime); while (et < st) { st = RandomLong(stime, etime, random); // et = RandomLong(stime, etime, random); et = st + (10 * 10 * TimeSpan.TicksPerMillisecond); } Console.WriteLine(st + ":" + et); IEnumerable<IDataItem> iterator = stream.GetAll(key, st, et); foreach (IDataItem data in iterator) { data.GetVal(); ++i; if (i == num_operations) break; } } stream.Close(); } else if (stream_op == StreamOperation.SameKeyRandomValueAppend) { StrKey key = new StrKey("ExpKey"); for (int i = 0; i < num_operations; ++i) { baselineStorageKV += key.Size(); // baselineStorageKV += vals[i].Size(); // stream.Append(key, vals[i]); } stream.Close(); } else { for (int i = 0; i < num_operations; ++i) { stream.Get(new StrKey("" + random.Next(0,num_operations - 1))); } stream.Close(); } // Dump the instrumentation logs stream.DumpLogs(exp_directory + "/log"); // Collect costs usage List<string> costs = new List<string>(); if (doCosts) { costs.Add(DateTime.UtcNow.Ticks + ": CPU: " + costhelper.getCurrentCpuUsage()); costs.Add(DateTime.UtcNow.Ticks + ": Network: " + costhelper.getNetworkUsage()); costs.Add(DateTime.UtcNow.Ticks + ": DataRelated Storage: " + costhelper.getStorageUsage(this.exp_directory, dataRelated:true)/1000.0f); costs.Add(DateTime.UtcNow.Ticks + ": Constant Storage: " + costhelper.getStorageUsage(this.exp_directory, dataRelated:false)/1000.0f); costs.Add(DateTime.UtcNow.Ticks + ": Baseline Storage: " + baselineStorageKV/1000.0f); } File.AppendAllLines(exp_directory + "/log", costs); // sf.deleteStream(streamid, ci); }
/// <summary> /// This function will throw an exception if the log is non-rotating and if the container name does meet the following constraints: /// 1. Container names must start with a letter or number, and can contain only letters, numbers, and the dash (-) character. /// 2. Every dash (-) character must be immediately preceded and followed by a letter or number; consecutive dashes are not permitted in container names. /// 3. All letters in a container name must be lowercase. /// 4. Container names must be from 3 through 63 characters long /// </summary> /// <param name="accountName"></param> /// <param name="accountKey"></param> /// <param name="containerName"></param> public void InitSyncing(string accountName, string accountKey, string containerName) { if (!IsRotatingLog) throw new Exception("Cannot sync a non-rotating log"); //the code below could throw an exception if containerName does not meet the restrictions var locationInfo = new Bolt.DataStore.LocationInfo(accountName, accountKey, Bolt.DataStore.SynchronizerType.Azure); try { synchronizer = Bolt.DataStore.SyncFactory.Instance.CreateLogSynchronizer(locationInfo, containerName); synchronizer.SetLocalSource(archivingDirectory); //lets sync for starters, in case there are leftover logs from last time SafeThread worker = new SafeThread(delegate() { synchronizer.Sync(); }, "init log syncing", this); worker.Start(); } catch (System.FormatException ex1) { Log("ERROR: Could not start log syncing. The Azure account key may be wrong \n {0}", ex1.ToString()); } catch (System.Runtime.InteropServices.COMException ex2) { Log("ERROR: Could not start log syncing. It appears that the Sync Framework v2.1 x86 version is not installed. Make sure that no other version is present. \n {0}", ex2.ToString()); } catch (Microsoft.WindowsAzure.StorageClient.StorageServerException ex3) { Log("ERROR: Could not start log syncing. The Azure account name may be wrong.\n {0}", ex3.ToString()); } catch (Microsoft.WindowsAzure.StorageClient.StorageClientException ex3) { Log("ERROR: Could not start log syncing. The Azure account key may be wrong.\n {0}", ex3.ToString()); } catch (Exception ex3) { Log("Got unknown exception while starting log syncing.\n {0}", ex3.ToString()); } }
private static long RemoteRead(int numberOfHomes, DateTime start, DateTime end, string tag) { Dictionary<int, List<double>> temp_energy_allhomes= new Dictionary<int,List<double>>(); Dictionary<int, List<double>> temp_energy_home; long retVal=0; for(int i = 0 ; i <numberOfHomes ; i++) { temp_energy_home = new Dictionary<int,List<double>>(); LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); FqStreamID fq_sid = new FqStreamID("crrealhome" + i, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; IStream dfs_byte_val = sf.openValueDataStream<DoubleKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, mdServer, 4 * 1024 * 1024, 1, new Logger()); long start_ticks = DateTime.Now.Ticks; for (int temp = -30; temp <= 40; temp++) { IEnumerable<IDataItem> vals = dfs_byte_val.GetAll(new DoubleKey(temp), DateTimeToUnixTimestamp(start), DateTimeToUnixTimestamp(end)); if (vals != null) { foreach (IDataItem val in vals) { if (!temp_energy_home.ContainsKey(temp)) temp_energy_home[temp] = new List<double>(); if (!temp_energy_allhomes.ContainsKey(temp)) temp_energy_allhomes[temp] = new List<double>(); temp_energy_home[temp].Add(BitConverter.ToDouble(val.GetVal().GetBytes(), 0)); temp_energy_allhomes[temp].Add(BitConverter.ToDouble(val.GetVal().GetBytes(), 0)); } } } dfs_byte_val.Close(); long end_ticks = DateTime.Now.Ticks; retVal+=end_ticks - start_ticks; WriteToFile(".\\result-realhome-" + i + "-n-" + numberOfHomes + "-" + tag, temp_energy_home); } WriteToFile(".\\result-allhomes-n-" + numberOfHomes + "-" + tag, temp_energy_allhomes); return retVal; }
new public List<RetVal> PredictOccupancy(long startSlotIndex, long endSlotIndex) { List<RetVal> retVal = new List<RetVal>(); System.IO.StreamReader datafile = null; if (dataFilePath != null) //assuming datafile has one occupancy value per line read to startSlotIndex { string line; int counter = 0; datafile = new System.IO.StreamReader(this.dataFilePath); if (startSlotIndex != 0) { while ((line = datafile.ReadLine()) != null) { if (counter == startSlotIndex) break; counter++; } } } string AzureaccountName = "testdrive"; string AzureaccountKey = "zRTT++dVryOWXJyAM7NM0TuQcu0Y23BgCQfkt7xh2f/Mm+r6c8/XtPTY0xxaF6tPSACJiuACsjotDeNIVyXM8Q=="; LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID("simplepreheat", "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); streamFactory.deleteStream(fq_sid, ci); IStream occupancyGroundTruthStream = streamFactory.openValueDataStream<StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, null, 4 * 1024 * 1024, 1, new Logger()); occupancyGroundTruthStream.Close(); int slotIndex = 0; long startTime, retrievelTime, computeTime, insertTime; while (true) { startTime = DateTime.Now.Ticks; List<int> currentPOV = SimpleConstructCurrentPOV(occupancyGroundTruthStream, slotIndex); List<List<int>> previousDaysPOV = SimpleConstructPreviousPOV(occupancyGroundTruthStream, slotIndex); retrievelTime = DateTime.Now.Ticks - startTime; startTime = DateTime.Now.Ticks; int predictedOccupancy = Predict(currentPOV, previousDaysPOV); computeTime = DateTime.Now.Ticks - startTime; startTime = DateTime.Now.Ticks; int groundTruth; if (datafile == null) // if no datafile to read the ground truth from just append randomly groundTruth = random.Next(2); else { string line = datafile.ReadLine(); groundTruth = int.Parse(line); } currentPOV.Add(groundTruth); /*List<int> temp = new List<int>(); foreach (List<int> previousPOV in previousDaysPOV) { temp = temp.Concat(previousPOV).ToList(); } temp = temp.Concat(currentPOV).ToList();*/ occupancyGroundTruthStream.Append(occupancyKey, new ByteValue(currentPOV.SelectMany(BitConverter.GetBytes).ToArray()), slotIndexBase + slotIndex); insertTime = DateTime.Now.Ticks - startTime; Console.WriteLine("Slot number {0} {1} {2} {3}", slotIndex, retrievelTime, computeTime, insertTime); using (results = File.AppendText(outputFilePath)) results.WriteLine("Slot number {0} {1} {2} {3}", slotIndex, retrievelTime, computeTime, insertTime); slotIndex++; //retVal.Add(new RetVal(endTime - startTime, predictedOccupancy)); if (slotIndex == endSlotIndex) break; } occupancyGroundTruthStream.Close(); return retVal; }