public RowlandsAnalysis(string dataFilePath, int width) { this.width = width; this.dataFilePath = dataFilePath; count = 0; timestamp_energy_temperature = new List <Tuple <DateTime, double, double> >(); File.Delete(".\\result-width-" + width + ".txt"); StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID("RowlandsAnalysis" + width, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); streamFactory.deleteStream(fq_sid, ci); dataStream = streamFactory.openValueDataStream <DoubleKey, ByteValue>(fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserveraddress: null, ChunkSizeForUpload: 4 * 1024 * 1024, ThreadPoolSize: 1, log: new Logger()); string line; System.IO.StreamReader file = new System.IO.StreamReader(dataFilePath); while ((line = file.ReadLine()) != null) { string[] words = line.Split(','); DateTime date = Convert.ToDateTime(words[0]); double energy = Double.Parse(words[1]); double temperature = Double.Parse(words[2]); timestamp_energy_temperature.Add(new Tuple <DateTime, double, double>(date, energy, temperature)); } file.Close(); }
public void Setup() { StreamFactory sf = StreamFactory.Instance; DateTime Date = new DateTime(DateTime.UtcNow.Ticks); string HomeName = String.Format("TestHome-{0}", Date.ToString("yyyy-MM-dd")); string Caller = String.Format("{0}", Date.ToString("HH-mm-ss")); string AppName = Caller; Random rnd = new Random(); string StreamName = String.Format("{0}", rnd.Next()); FqStreamID fqstreamid = new FqStreamID(HomeName, AppName, StreamName); CallerInfo ci = new CallerInfo(null, Caller, Caller, 1); sf.deleteStream(fqstreamid, ci); vds = sf.openValueDataStream <StrKey, ByteValue>(fqstreamid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserveraddress: "http://localhost:23456/MetadataServer"); k1 = new StrKey("k1"); k2 = new StrKey("k2"); }
public NaivePreHeat_remoteread(string dataFilePath, int K, string outputFilePath, int chunkSize, string mdserver, LocationInfo li, int endSlotIndex) { this.dataFilePath = dataFilePath; occupancyKey = new StrKey("occupancy"); random = new Random(); this.constK = K; this.outputFilePath = outputFilePath; this.chunkSize = chunkSize; this.mdserver = mdserver; this.li = li; int slotIndex = 0; StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID(fqsidprefix + chunkSize, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); streamFactory.deleteStream(fq_sid, ci); IStream occupancyGroundTruthStream = streamFactory.openValueDataStream <StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserver, chunkSize, 1, new Logger()); while (true) { occupancyGroundTruthStream.Append(occupancyKey, new ByteValue(BitConverter.GetBytes(random.Next(2))), slotIndexBase + slotIndex); slotIndex++; if (slotIndex == endSlotIndex) { break; } } occupancyGroundTruthStream.Close(); }
static void Main(string[] args) { try { StreamFactory sf = StreamFactory.Instance; FqStreamID fqsid = new FqStreamID("new5", "A0", "Test"); CallerInfo callerinfo = new CallerInfo(null, "A0", "A0", 1); String mdserver = "http://localhost:23456/MetaDataServer/"; //sf.deleteStream(fqsid, callerinfo, mdserver); IStream stream = sf.openValueDataStream <StrKey, ByteValue>(fqsid, callerinfo, locationInfo, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserver, 4 * 1024 * 1024, 1, null, false, WriteFrequencySeconds); Thread writerthread = new Thread(() => Write(stream)); isWriting = true; writerthread.Start(); Console.WriteLine("Starting Writer .... (press enter to stop) "); Console.ReadLine(); isWriting = false; stream.Close(); } catch (Exception e) { Console.WriteLine(e); } }
public void ExportData(bool remote, DateTime dtbegin, DateTime dtend, String outputFileName) { //read the settings string accountName = ConfigurationManager.AppSettings.Get("AccountName"); string accountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); string homeId = ConfigurationManager.AppSettings.Get("HomeId"); string appId = ConfigurationManager.AppSettings.Get("AppId"); string streamId = ConfigurationManager.AppSettings.Get("StreamId"); IStream datastream; FileStream fs = new FileStream(outputFileName, FileMode.Append); StreamWriter swOut = new StreamWriter(fs); StreamFactory sf = StreamFactory.Instance; CallerInfo ci = new CallerInfo(null, appId, appId, 0); FqStreamID fq_sid = new FqStreamID(homeId, appId, streamId); if (remote) { LocationInfo li = new LocationInfo(accountName, accountKey, SynchronizerType.Azure); datastream = sf.openValueDataStream <StrKey, StrValue> (fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4 * 1024 * 1024, 1, null, true); } else { datastream = sf.openValueDataStream <StrKey, StrValue> (fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4 * 1024 * 1024, 1, null); } DateTime dtbeginutc = dtbegin.ToUniversalTime(); DateTime dtendutc = dtend.ToUniversalTime(); // StrKey tmpKey = new StrKey("envih1:sensormultilevel:"); HashSet <IKey> keys = datastream.GetKeys(null, null); foreach (IKey key in keys) { // IEnumerable<IDataItem> dataItemEnum = datastream.GetAll(key); // dtendutc.Ticks); IEnumerable <IDataItem> dataItemEnum = datastream.GetAll(key, dtbeginutc.Ticks, dtendutc.Ticks); if (dataItemEnum != null) { foreach (IDataItem di in dataItemEnum) { DateTime ts = new DateTime(di.GetTimestamp()); swOut.WriteLine(key + ", " + ts.ToLocalTime() + ", " + di.GetVal().ToString()); } } } datastream.Close(); swOut.Close(); }
public Export(bool remote) { IStream datastream; string accountName = ConfigurationManager.AppSettings.Get("AccountName"); string accountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); string homeId = ConfigurationManager.AppSettings.Get("HomeId"); string appId = ConfigurationManager.AppSettings.Get("AppId"); string streamId = ConfigurationManager.AppSettings.Get("StreamId"); StreamFactory sf = StreamFactory.Instance; CallerInfo ci = new CallerInfo(null, appId, appId, 0); FqStreamID fq_sid = new FqStreamID(homeId, appId, streamId); if (remote) { LocationInfo li = new LocationInfo(accountName, accountKey, SynchronizerType.Azure); datastream = sf.openValueDataStream <StrKey, StrValue> (fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4 * 1024 * 1024, 1, null, true); } else { datastream = sf.openValueDataStream <StrKey, StrValue> (fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4 * 1024 * 1024, 1, null); } /* * StrKey key = new StrKey("foo"); * if (datastream != null) * { * datastream.Append(key, new StrValue("bar")); * datastream.Append(key, new StrValue("baz")); * } * */ HashSet <IKey> keys = datastream.GetKeys(null, null); foreach (IKey key in keys) { IEnumerable <IDataItem> dataItemEnum = datastream.GetAll(key); foreach (IDataItem di in dataItemEnum) { try { DateTime ts = new DateTime(di.GetTimestamp()); Console.WriteLine(key + ", " + ts + ", " + di.GetVal().ToString()); } catch (Exception e) { Console.Error.Write(e.StackTrace); } } } datastream.Close(); }
public OldDataDirStream(FqStreamID FQSID, StreamFactory.StreamOp Op, CallerInfo Ci, ISync sync) : base(FQSID, Op, Ci, sync) { if (!typeof(IValue).IsAssignableFrom(typeof(ValType))) { throw new InvalidDataException("ValType must implement IValue"); } }
public List <RetVal> PredictOccupancy(long startSlotIndex, long endSlotIndex) { long average = 0; List <RetVal> retVal = new List <RetVal>(); StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID(fqsidprefix + chunkSize, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); Directory.Delete(fq_sid.HomeId, true); int slotIndex = Convert.ToInt32(startSlotIndex); long startTime = 0, retrievelTime = 0, computeTime = 0; IStream occupancyGroundTruthStream = streamFactory.openValueDataStream <StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, mdserver, chunkSize, 1, new Logger()); while (true) { List <int> currentPOV = new List <int>(); List <List <int> > previousDaysPOV = new List <List <int> >(); try { startTime = DateTime.Now.Ticks; currentPOV = ConstructCurrentPOV(occupancyGroundTruthStream, slotIndex); previousDaysPOV = ConstructPreviousPOV(occupancyGroundTruthStream, slotIndex); retrievelTime = DateTime.Now.Ticks - startTime; startTime = DateTime.Now.Ticks; int predictedOccupancy = Predict(currentPOV, previousDaysPOV); computeTime = DateTime.Now.Ticks - startTime; } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine("Slot number {0} {1} ", slotIndex, retrievelTime); using (results = File.AppendText(outputFilePath)) results.WriteLine("Slot number {0} {1}", slotIndex, retrievelTime); average += retrievelTime; slotIndex++; if (slotIndex == endSlotIndex) { break; } } occupancyGroundTruthStream.Close(); average = average / (endSlotIndex - startSlotIndex + 1); retVal.Add(new RetVal(0, Convert.ToInt32(average))); return(retVal); }
private static void UploadDataAsStreams(int UploadCount) { string directory = @"..\\..\\data\\meter-data"; int count = 0; Dictionary <DateTime, double> ts_temperature = new Dictionary <DateTime, double>(); StreamReader wfile = new System.IO.StreamReader(@"..\\..\\data\\weather.txt"); string wline; while ((wline = wfile.ReadLine()) != null) { string[] words = wline.Split('\t'); DateTime date = Convert.ToDateTime(words[4]); date = date.AddHours(Int32.Parse(words[5])); double temperature = Double.Parse(words[0]); ts_temperature[date] = temperature; } wfile.Close(); foreach (string filePath in Directory.GetFiles(directory)) { Console.WriteLine("file name:" + filePath); string line; System.IO.StreamReader file = new System.IO.StreamReader(filePath); LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); FqStreamID fq_sid = new FqStreamID("crrealhome" + count, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; sf.deleteStream(fq_sid, ci); IStream dfs_byte_val = sf.openValueDataStream <DoubleKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserveraddress: mdServer, ChunkSizeForUpload: 4 * 1024 * 1024, ThreadPoolSize: 1, log: new Logger()); while ((line = file.ReadLine()) != null) { string[] words = line.Split('\t'); DateTime date = Convert.ToDateTime(words[0]); date = date.AddHours(int.Parse(words[1]) / 100); DoubleKey key = new DoubleKey(((int)(ts_temperature[date]))); dfs_byte_val.Append(key, new ByteValue(BitConverter.GetBytes(Double.Parse(words[2]))), DateTimeToUnixTimestamp(date)); // Console.WriteLine(DateTimeToUnixTimestamp(date) + "," + words[2]); } dfs_byte_val.Close(); count++; if (count == UploadCount) { break; } } }
private static long RemoteRead(int numberOfHomes, DateTime start, DateTime end, string tag) { Dictionary <int, List <double> > temp_energy_allhomes = new Dictionary <int, List <double> >(); Dictionary <int, List <double> > temp_energy_home; long retVal = 0; for (int i = 0; i < numberOfHomes; i++) { temp_energy_home = new Dictionary <int, List <double> >(); LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); FqStreamID fq_sid = new FqStreamID("crrealhome" + i, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; IStream dfs_byte_val = sf.openValueDataStream <DoubleKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, mdServer, 4 * 1024 * 1024, 1, new Logger()); long start_ticks = DateTime.Now.Ticks; for (int temp = -30; temp <= 40; temp++) { IEnumerable <IDataItem> vals = dfs_byte_val.GetAll(new DoubleKey(temp), DateTimeToUnixTimestamp(start), DateTimeToUnixTimestamp(end)); if (vals != null) { foreach (IDataItem val in vals) { if (!temp_energy_home.ContainsKey(temp)) { temp_energy_home[temp] = new List <double>(); } if (!temp_energy_allhomes.ContainsKey(temp)) { temp_energy_allhomes[temp] = new List <double>(); } temp_energy_home[temp].Add(BitConverter.ToDouble(val.GetVal().GetBytes(), 0)); temp_energy_allhomes[temp].Add(BitConverter.ToDouble(val.GetVal().GetBytes(), 0)); } } } dfs_byte_val.Close(); long end_ticks = DateTime.Now.Ticks; retVal += end_ticks - start_ticks; WriteToFile(".\\result-realhome-" + i + "-n-" + numberOfHomes + "-" + tag, temp_energy_home); } WriteToFile(".\\result-allhomes-n-" + numberOfHomes + "-" + tag, temp_energy_allhomes); return(retVal); }
public void Setup() { k1 = new StrKey("k1"); k2 = new StrKey("k2"); string AzureaccountName = "testdrive"; string AzureaccountKey = "zRTT++dVryOWXJyAM7NM0TuQcu0Y23BgCQfkt7xh2f/Mm+r6c8/XtPTY0xxaF6tPSACJiuACsjotDeNIVyXM8Q=="; locationInfo = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); streamID = new FqStreamID("99-a2000", "A0", "TestDS"); callerInfo = new CallerInfo(null, "A0", "A0", 1); streamSecurityType = StreamFactory.StreamSecurityType.Plain; streamFactory = StreamFactory.Instance; }
protected IStream CreateFileDataStream <KeyType, ValType>(string streamId, bool remoteSync, int syncIntervalSec = -1) where KeyType : IKey, new() { CallerInfo ci = new CallerInfo(this.moduleInfo.WorkingDir(), this.moduleInfo.FriendlyName(), this.moduleInfo.AppName(), this.Secret()); FqStreamID fq_sid = new FqStreamID(GetConfSetting("HomeId"), this.moduleInfo.FriendlyName(), streamId); if (remoteSync) { LocationInfo Li = new LocationInfo(GetConfSetting("DataStoreAccountName"), GetConfSetting("DataStoreAccountKey"), SynchronizerType.Azure); return(this.streamFactory.openFileDataStream <KeyType>( fq_sid, ci, Li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, syncIntervalSec: syncIntervalSec)); } else { return(this.streamFactory.openFileDataStream <KeyType>( fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, syncIntervalSec: syncIntervalSec)); } }
public DNW_legacy(int numberOfStreams, int window, string outputFilePath) { this.numberOfStreams = numberOfStreams; this.window = window; this.count = 0; this.outputFilePath = outputFilePath; StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID("dnw-" + window + "-" + numberOfStreams, "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); streamFactory.deleteStream(fq_sid, ci); dataStreams = new List <IStream>(); for (int i = 1; i <= numberOfStreams; i++) { dataStreams.Add(streamFactory.openValueDataStream <StrKey, ByteValue>(fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, null, 4 * 1024 * 1024, 1, new Logger())); } }
public DNW(int numberOfStreams, int window, LocationInfo li, string mdserver, int chunkSize) { this.li = li; this.numberOfStreams = numberOfStreams; this.window = window; this.count = 0; this.mdServer = mdserver; this.chunkSize = chunkSize; StreamFactory streamFactory = StreamFactory.Instance; CallerInfo ci = new CallerInfo(null, "A", "A", 1); dataStreams = new List <IStream>(); for (int i = 1; i <= numberOfStreams; i++) { FqStreamID fq_sid = new FqStreamID(fqprefix + "-" + window + "-" + i + "/" + numberOfStreams + "-" + chunkSize, "A", "TestBS"); streamFactory.deleteStream(fq_sid, ci); dataStreams.Add(streamFactory.openValueDataStream <StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, mdserveraddress: mdServer, ChunkSizeForUpload: chunkSize, ThreadPoolSize: 1, log: new Logger())); } }
public long RemoteMatch(List <int> targetSMPCvector) { dataStreams.Clear(); StreamFactory sf = StreamFactory.Instance; for (int i = 1; i <= numberOfStreams; i++) { CallerInfo ci = new CallerInfo(null, "A", "A", 1); FqStreamID fq_sid = new FqStreamID(fqprefix + "-" + window + "-" + i + "/" + numberOfStreams + "-" + chunkSize, "A", "TestBS"); IStream dfs_byte_val = sf.openValueDataStream <StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, mdserveraddress: mdServer, ChunkSizeForUpload: chunkSize, ThreadPoolSize: 1, log: new Logger()); dataStreams.Add(dfs_byte_val); } long start = DateTime.Now.Ticks; NumberOfMatches(targetSMPCvector); long time = DateTime.Now.Ticks - start; return(time); }
public async Task SetupDataStream(bool remote, string accountName, string accountKey, string homeId, string appId, string streamId) { StreamFactory sf = StreamFactory.Instance; CallerInfo ci = new CallerInfo(null, appId, appId, 0); FqStreamID fq_sid = new FqStreamID(homeId, appId, streamId); if (remote) { LocationInfo li = new LocationInfo(accountName, accountKey, SynchronizerType.Azure); datastream = await Task.Run(() => sf.openValueDataStream <StrKey, StrValue> (fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4 * 1024 * 1024, 1, null, true)); } else { datastream = await Task.Run(() => sf.openValueDataStream <StrKey, StrValue> (fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Read, null, 4 * 1024 * 1024, 1, null)); } }
new public List <RetVal> PredictOccupancy(long startSlotIndex, long endSlotIndex) { List <RetVal> retVal = new List <RetVal>(); System.IO.StreamReader datafile = null; if (dataFilePath != null) //assuming datafile has one occupancy value per line read to startSlotIndex { string line; int counter = 0; datafile = new System.IO.StreamReader(this.dataFilePath); if (startSlotIndex != 0) { while ((line = datafile.ReadLine()) != null) { if (counter == startSlotIndex) { break; } counter++; } } } StreamFactory streamFactory = StreamFactory.Instance; FqStreamID fq_sid = new FqStreamID("smartpreheat", "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); streamFactory.deleteStream(fq_sid, ci); IStream occupancyGroundTruthStream = streamFactory.openValueDataStream <StrKey, ByteValue>(fq_sid, ci, null, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write, null, 4 * 1024 * 1024, 1, new Logger()); int slotIndex = 0; long startTime, retrievelTime, computeTime, insertTime; while (true) { startTime = DateTime.Now.Ticks; List <int> currentPOV = SmartConstructCurrentPOV(occupancyGroundTruthStream, slotIndex); List <List <int> > previousDaysPOV = SmartConstructPreviousPOV(occupancyGroundTruthStream, slotIndex); retrievelTime = DateTime.Now.Ticks - startTime; startTime = DateTime.Now.Ticks; int predictedOccupancy = Predict(currentPOV, previousDaysPOV); computeTime = DateTime.Now.Ticks - startTime; startTime = DateTime.Now.Ticks; int groundTruth; if (datafile == null) // if no datafile to read the ground truth from just append randomly { groundTruth = random.Next(2); } else { string line = datafile.ReadLine(); groundTruth = int.Parse(line); } currentPOV.Add(groundTruth); List <int> temp = new List <int>(); foreach (List <int> previousPOV in previousDaysPOV) { temp = temp.Concat(previousPOV).ToList(); } temp = temp.Concat(currentPOV).ToList(); occupancyGroundTruthStream.Append(occupancyKey, new ByteValue(temp.SelectMany(BitConverter.GetBytes).ToArray()), slotIndexBase + slotIndex); insertTime = DateTime.Now.Ticks - startTime; Console.WriteLine("Slot number {0} {1} {2} {3}", slotIndex, retrievelTime, computeTime, insertTime); using (results = File.AppendText(outputFilePath)) results.WriteLine("Slot number {0} {1} {2} {3}", slotIndex, retrievelTime, computeTime, insertTime); slotIndex++; //retVal.Add(new RetVal(endTime - startTime, predictedOccupancy)); if (slotIndex == endSlotIndex) { break; } } occupancyGroundTruthStream.Close(); return(retVal); }
public OldDataFileStream(FqStreamID FQSID, StreamFactory.StreamOp Op, CallerInfo Ci, ISync sync) { if (!typeof(IKey).IsAssignableFrom(typeof(KeyType))) { throw new InvalidDataException("KeyType must implement IKey"); } if (!typeof(IValue).IsAssignableFrom(typeof(ValType))) { throw new InvalidDataException("ValType must implement IValue"); } callerId = Ci.friendlyName; callerSecret = Ci.secret; synchronizer = sync; isClosed = false; disposed = false; sha1 = new SHA1CryptoServiceProvider(); index = new Dictionary <IKey, List <TS_Offset> >(); /* ts_index = new List<TS_Offset>(); */ latest_tso = new TS_Offset(0, 0); // Get the current directory. string BaseDir = Path.GetFullPath((null != Ci.workingDir) ? Ci.workingDir : Directory.GetCurrentDirectory()); targetDir = BaseDir + "/" + FQSID.ToString(); if (!Directory.Exists(targetDir)) { Directory.CreateDirectory(targetDir); } if (synchronizer != null) { synchronizer.SetLocalSource(targetDir); } md = new OldMetaData(targetDir, ".md"); // Check if stream has to be CREATED if (!md.load) { if (FQSID.AppId == callerId) { md.setOwner(FQSID.AppId); md.SetReadAccess(FQSID.AppId); md.SetWriteAccess(FQSID.AppId); md.FlushMetaData(); Console.WriteLine("Created stream " + targetDir + " for " + callerId); } else { throw new InvalidOperationException(callerId + " not permitted to create stream for " + FQSID.AppId); } } // Open stream for read or write if (Op == StreamFactory.StreamOp.Read) { if (!OpenForRead()) { throw new InvalidDataException("Couldn't open stream for reading"); } } else { if (!OpenForWrite()) { throw new InvalidDataException("Couldn't open stream for writing"); } } // Build index try { // permission checks succeeded // load index from file if present // TODO: if not and stream.dat is present: recreate index from stream.dat string IndexFQN = targetDir + "/index.dat"; if (File.Exists(IndexFQN)) { FileStream iout = new FileStream(IndexFQN, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); BinaryReader index_br = new BinaryReader(iout); try { while (true) { string key = index_br.ReadString(); IKey ikey = SerializerHelper <KeyType> .DeserializeFromJsonStream(key) as IKey; int num_offsets = index_br.ReadInt32(); List <TS_Offset> offsets = new List <TS_Offset>(num_offsets); for (int i = 0; i < num_offsets; i++) { long ts = index_br.ReadInt64(); long offset = index_br.ReadInt64(); TS_Offset tso = new TS_Offset(ts, offset); offsets.Add(tso); if (ts > latest_tso.ts) { latest_tso = tso; } } index[ikey] = offsets; } ; } catch (EndOfStreamException) { // done } finally { index_br.Close(); } } /* * // load ts_index * string TsIndexFQN = targetDir + "/ts_index.dat"; * if (File.Exists(TsIndexFQN)) * { * FileStream iout = new FileStream(TsIndexFQN, FileMode.Open, * FileAccess.Read, FileShare.ReadWrite); * * BinaryReader index_br = new BinaryReader(iout); * * try * { * while (true) * { * long ts = index_br.ReadInt64(); * long offset = index_br.ReadInt64(); * ts_index.Add(new TS_Offset(ts, offset)); * }; * } * catch (EndOfStreamException) * { * // done * } * finally * { * index_br.Close(); * } * } */ // create the FileStream fout = new FileStream(targetDir + "/stream.dat", FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite); fout.Seek(0, SeekOrigin.End); fs_bw = new BinaryWriter(fout); fin = new FileStream(targetDir + "/stream.dat", FileMode.Open, FileAccess.Read, FileShare.ReadWrite); fs_br = new BinaryReader(fin); } catch (Exception e) { Console.WriteLine("Failed to open file: " + targetDir + "/stream.dat"); Console.WriteLine("{0} Exception caught.", e); } }
public void Run(string CallerName, string HomeName, string AppName, string StreamName, string RandName, long stime, long etime, StreamType stream_type, StreamOperation stream_op, StreamFactory.StreamDataType ptype, CompressionType ctype, int ChunkSize, int ThreadPoolSize, Byte[] value, int num_operations, SynchronizerType synctype, int max_key = 0, string address = null, bool doCosts = false, bool doRaw = false) { // Set experiment directory CallerInfo ci = new CallerInfo(null, CallerName, CallerName, 1); exp_directory = Path.GetFullPath((null != ci.workingDir) ? ci.workingDir : Directory.GetCurrentDirectory()); exp_directory = exp_directory + "/" + HomeName + "/" + AppName + "/" + StreamName; if (max_key == 0) { max_key = num_operations; } // Set a description/tag for the experiment this.exp_id = "Directory: " + HomeName + "/" + AppName + "/" + StreamName + " Caller:" + CallerName + " Stream Type:" + stream_type + " Stream Op: " + stream_op + " Stream Ptype: " + ptype + " Compression Type: " + ctype + " Value size: " + value.Length + " num_operations: " + max_key + " actual_num_ops: " + num_operations + " Sync type: " + synctype + " Do costs? " + doCosts + "Chunk Size: " + ChunkSize + " ThreadPool Size:" + ThreadPoolSize; this.compressed_exp_id = " ST:" + stream_type + " OP: " + stream_op + " PT: " + ptype + " CT: " + ctype + " VS: " + value.Length + " I:" + num_operations + " MK:" + max_key + " SYNC: " + synctype + " chsize: " + ChunkSize + " nThreads: " + ThreadPoolSize; // Set remote storage server account info string AzureaccountName = ConfigurationManager.AppSettings.Get("AccountName"); string AzureaccountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); string S3accountName = ConfigurationManager.AppSettings.Get("S3AccountName"); string S3accountKey = ConfigurationManager.AppSettings.Get("S3AccountSharedKey"); LocationInfo Li; if (synctype == SynchronizerType.Azure) { Li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); } else if (synctype == SynchronizerType.AmazonS3) { Li = new LocationInfo(S3accountName, S3accountKey, SynchronizerType.AmazonS3); } else { Li = null; } StreamFactory sf = StreamFactory.Instance; IStream stream = null; FqStreamID streamid = new FqStreamID(HomeName, AppName, StreamName); // Set op : R/W StreamFactory.StreamOp rw; if (stream_op == StreamOperation.RandomKeyRandomValueAppend || stream_op == StreamOperation.RandomKeySameValueAppend || stream_op == StreamOperation.SameKeyRandomValueAppend || stream_op == StreamOperation.SameKeySameValueAppend) { rw = StreamFactory.StreamOp.Write; } else { rw = StreamFactory.StreamOp.Read; } // Initialize costs CostsHelper costhelper = null; double baselineStorageKV = 0; if (doCosts) { costhelper = new CostsHelper(); costhelper.getCurrentCpuUsage(); costhelper.getNetworkUsage(); } if (stream_type == StreamType.CloudRaw) { if (!Directory.Exists(exp_directory)) { Directory.CreateDirectory(exp_directory); } Logger logger = new Logger(); Byte[] val = new Byte[value.Length * num_operations]; // DateTime Date = new DateTime(DateTime.UtcNow.Ticks); // string cname = String.Format("CloudRaw-{0}", Date.ToString("yyyy-MM-dd")); // string bname = String.Format("{0}", Date.ToString("HH-mm-ss")); // string cname = String.Format("cloudraw-{0}", RandomString(4)); // string bname = String.Format("{0}", RandomString(4)); string cname = String.Format("cloudraw-{0}", RandName); string bname = String.Format("{0}", RandName); if (stream_op == StreamOperation.RandomKeyGet || stream_op == StreamOperation.RandomKeyGetMultipleSegments || stream_op == StreamOperation.RandomKeyGetAll) { doRawCloudPerf(val, SynchronizerType.Azure, SynchronizeDirection.Download, exp_directory, logger, containerName: cname, blobName: bname); logger.Dump(exp_directory + "/log"); } else { doRawCloudPerf(val, SynchronizerType.Azure, SynchronizeDirection.Upload, exp_directory, logger, containerName: cname, blobName: bname); logger.Dump(exp_directory + "/log"); } return; } if (stream_type == StreamType.DiskRaw) { if (!Directory.Exists(exp_directory)) { Directory.CreateDirectory(exp_directory); } Logger logger = doDiskRaw(stream_op, num_operations, value.Length, ptype, exp_directory); logger.Dump(exp_directory + "/log"); return; } // Are we getting raw disk throughput? if (stream_type == StreamType.Raw) { string ret = doDiskSpeed((value.Length * num_operations) / 1000 + "K", value.Length / 1000 + "K", rw); if (!Directory.Exists(exp_directory)) { Directory.CreateDirectory(exp_directory); } File.WriteAllText(exp_directory + "/log", ret); return; } // Populate the keys and the values Random random = new Random(DateTime.Now.Millisecond); StrKey[] keys = new StrKey[max_key]; for (int i = 0; i < max_key; ++i) { keys[i] = new StrKey("" + i); } /* * List<ByteValue> vals = new List<ByteValue>(num_operations); * Byte[][] tmp = new Byte[num_operations][]; * for (int i = 0; i < num_operations; ++i) * { * tmp[i] = new Byte[value.Length]; * random.NextBytes(tmp[i]); * } * * for (int i = 0; i < num_operations; ++i) * { * keys[i] = new StrKey("" + i); * vals.Add(new ByteValue(tmp[i])); * // vals[i] = new ByteValue(tmp); * } */ Logger log = new Logger(); // Open stream for different types of experiments if (stream_type == StreamType.Local && ptype == StreamFactory.StreamDataType.Values) { stream = sf.openValueDataStream <StrKey, ByteValue>(streamid, ci, null, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.LocalEnc && ptype == StreamFactory.StreamDataType.Values) { stream = sf.openValueDataStream <StrKey, ByteValue>(streamid, ci, null, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.Remote && ptype == StreamFactory.StreamDataType.Values) { stream = sf.openValueDataStream <StrKey, ByteValue>(streamid, ci, Li, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.RemoteEnc && ptype == StreamFactory.StreamDataType.Values) { stream = sf.openValueDataStream <StrKey, ByteValue>(streamid, ci, Li, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.Local && ptype == StreamFactory.StreamDataType.Files) { stream = sf.openFileDataStream <StrKey>(streamid, ci, null, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.LocalEnc && ptype == StreamFactory.StreamDataType.Files) { stream = sf.openFileDataStream <StrKey>(streamid, ci, null, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.Remote && ptype == StreamFactory.StreamDataType.Files) { stream = sf.openFileDataStream <StrKey>(streamid, ci, Li, StreamFactory.StreamSecurityType.Plain, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else if (stream_type == StreamType.RemoteEnc && ptype == StreamFactory.StreamDataType.Files) { stream = sf.openFileDataStream <StrKey>(streamid, ci, Li, StreamFactory.StreamSecurityType.Secure, ctype, rw, address, ChunkSize, ThreadPoolSize, log); } else { return; } if (stream_op == StreamOperation.RandomKeyRandomValueAppend) { List <ByteValue> vals = new List <ByteValue>(num_operations); Byte[][] tmp = new Byte[num_operations][]; for (int i = 0; i < num_operations; ++i) { tmp[i] = new Byte[value.Length]; random.NextBytes(tmp[i]); } for (int i = 0; i < num_operations; ++i) { vals.Add(new ByteValue(tmp[i])); } for (int i = 0; i < num_operations; ++i) { baselineStorageKV += keys[i].Size(); baselineStorageKV += vals[i].Size(); stream.Append(keys[i], vals[i]); } stream.Close(); } else if (stream_op == StreamOperation.RandomKeySameValueAppend) { Byte[] singleval = new Byte[value.Length]; random.NextBytes(singleval); ByteValue singlebv = new ByteValue(singleval); for (int i = 0; i < num_operations; ++i) { baselineStorageKV += keys[i].Size(); baselineStorageKV += value.Length; stream.Append(keys[i], singlebv); } stream.Close(); } else if (stream_op == StreamOperation.SameKeySameValueAppend) { StrKey key = new StrKey("ExpKey"); Byte[] singleval = new Byte[value.Length]; random.NextBytes(singleval); ByteValue singlebv = new ByteValue(singleval); for (int i = 0; i < num_operations; ++i) { stream.Append(key, singlebv); // System.Threading.Thread.Sleep(10); } stream.Close(); } else if (stream_op == StreamOperation.RandomKeyGet || stream_op == StreamOperation.RandomKeyGetMultipleSegments) { for (int i = 0; i < num_operations; ++i) { stream.Get(keys[random.Next(0, max_key)]); } stream.Close(); } else if (stream_op == StreamOperation.RandomKeyGetAll) { StrKey key = new StrKey("ExpKey"); for (int i = 0; i < num_operations;) { long st = 0; long et = -1; Console.WriteLine(stime + ":" + etime); while (et < st) { st = RandomLong(stime, etime, random); // et = RandomLong(stime, etime, random); et = st + (10 * 10 * TimeSpan.TicksPerMillisecond); } Console.WriteLine(st + ":" + et); IEnumerable <IDataItem> iterator = stream.GetAll(key, st, et); foreach (IDataItem data in iterator) { data.GetVal(); ++i; if (i == num_operations) { break; } } } stream.Close(); } else if (stream_op == StreamOperation.SameKeyRandomValueAppend) { StrKey key = new StrKey("ExpKey"); for (int i = 0; i < num_operations; ++i) { baselineStorageKV += key.Size(); // baselineStorageKV += vals[i].Size(); // stream.Append(key, vals[i]); } stream.Close(); } else { for (int i = 0; i < num_operations; ++i) { stream.Get(new StrKey("" + random.Next(0, num_operations - 1))); } stream.Close(); } // Dump the instrumentation logs stream.DumpLogs(exp_directory + "/log"); // Collect costs usage List <string> costs = new List <string>(); if (doCosts) { costs.Add(DateTime.UtcNow.Ticks + ": CPU: " + costhelper.getCurrentCpuUsage()); costs.Add(DateTime.UtcNow.Ticks + ": Network: " + costhelper.getNetworkUsage()); costs.Add(DateTime.UtcNow.Ticks + ": DataRelated Storage: " + costhelper.getStorageUsage(this.exp_directory, dataRelated: true) / 1000.0f); costs.Add(DateTime.UtcNow.Ticks + ": Constant Storage: " + costhelper.getStorageUsage(this.exp_directory, dataRelated: false) / 1000.0f); costs.Add(DateTime.UtcNow.Ticks + ": Baseline Storage: " + baselineStorageKV / 1000.0f); } File.AppendAllLines(exp_directory + "/log", costs); // sf.deleteStream(streamid, ci); }
static void Main(string[] args) { string AzureaccountName = ConfigurationManager.AppSettings.Get("AccountName"); string AzureaccountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); LocationInfo li = new LocationInfo(AzureaccountName, AzureaccountKey, SynchronizerType.Azure); /* * string dataFile = "D:\\b"; * int KB = 1024; * int[] chunk_sizes = { 4*1024*KB , 8*1024*KB }; * * for (int i = 1; i <= 1; i++) * { * for (int threads = 1; threads <= 1; threads++) * { * foreach (int csize in chunk_sizes) * { * Console.Write(">"); * File.Copy(dataFile, dataFile + threads + "," + csize); * * AzureHelper helper = new AzureHelper(AzureaccountName, AzureaccountKey, "foo123123", CompressionType.None, EncryptionType.None, null, null, new Logger(), csize, threads); * long start = DateTime.Now.Ticks; * helper.UploadFileAsChunks(dataFile + threads + "," + csize); * long end = DateTime.Now.Ticks; * Console.WriteLine(threads + "," + csize + "," + (((double)(end - start) / (double)10000000)) ); * } * * } * } */ li = null; FqStreamID fq_sid = new FqStreamID("1299-2716", "A", "TestBS"); CallerInfo ci = new CallerInfo(null, "A", "A", 1); StreamFactory sf = StreamFactory.Instance; sf.deleteStream(fq_sid, ci); IStream dfs_byte_val = sf.openValueDataStream <StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write); StrKey k1 = new StrKey("k1"); dfs_byte_val.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-cmu"))); dfs_byte_val.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-msr"))); dfs_byte_val.Seal(false); dfs_byte_val.Append(k1, new ByteValue(StreamFactory.GetBytes("k1-uw"))); dfs_byte_val.Close(); Console.ReadKey(); dfs_byte_val = sf.openValueDataStream <StrKey, ByteValue>(fq_sid, ci, li, StreamFactory.StreamSecurityType.Plain, CompressionType.None, StreamFactory.StreamOp.Write); Console.WriteLine("Get in read : " + dfs_byte_val.Get(k1)); IEnumerable <IDataItem> data = dfs_byte_val.GetAll(k1, 0, StreamFactory.NowUtc()); foreach (IDataItem dataItem in data) { Console.WriteLine(dataItem.GetVal().ToString()); } dfs_byte_val.Close(); Console.ReadKey(); /* * ValueSerializerBase<StrKey> vsb = new ValueSerializerBase<StrKey>(); * Byte[] buffer1 = vsb.SerializeToByteStream().ToArray(); * Byte[] buffer2 = SerializerHelper<StrKey>.SerializeToProtoStream(k1).ToArray(); * * FileStream fout = new FileStream("tmp.txt", FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite); * BinaryWriter fs_bw = new BinaryWriter(fout); * fs_bw.Write(buffer1); * fs_bw.Write("-----W00t!-----"); * fs_bw.Write(buffer2); * fs_bw.Write("-----W00t!-----"); * fs_bw.Close(); * fout.Close(); */ }