public static void Main( string[] args ) { InternalData internalData = new InternalData(); // output string representation of internalData Console.WriteLine( "After instantiation:\n{0}", internalData ); // change internal access data in internalData internalData.number = 77; internalData.message = "Goodbye"; // output string representation of internalData Console.WriteLine( "\nAfter changing values:\n{0}", internalData ); }
public void Calculator_Calculate_OneDoctorWithoutPacients() { var data = new InternalData(); var doc = new InternalDataDoctor() { DoctorId = 1 }; data.Doctors.Add(doc); var res0 = Calc.Calculate(data, CalculationType.ByDoctor, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: false); Assert.AreEqual(1, res0.Count(), "res0 must contains 1 result"); Assert.AreEqual(0d, res0.First().Result); var res1 = Calc.Calculate(data, CalculationType.ByDoctor, excludeDoctorsWithoutPacients: true, excludePacientsWithoutMeasurements: false); Assert.AreEqual(0, res1.Count(), "res1 must contains no result"); data.Doctors.Add(new InternalDataDoctor() { DoctorId = 2 }); var res2 = Calc.Calculate(data, CalculationType.ByDoctor, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: false); Assert.AreEqual(2, res2.Count(), "res2 must contains 2 result"); var res3 = Calc.Calculate(data, CalculationType.ByDoctor, excludeDoctorsWithoutPacients: true, excludePacientsWithoutMeasurements: false); Assert.AreEqual(0, res3.Count(), "res3 must contains 0 result"); }
public void Calculator_Calculate_OneDoctorWithOnePacientsAndMeasurements() { var data = new InternalData(); var doc = new InternalDataDoctor() { DoctorId = 1 }; var pac0 = new InternalDataPacient() { DoctorId = doc.DoctorId, PacientId = 1 }; data.Doctors.Add(doc); data.Pacients.Add(pac0); double start_height = 100; double height = start_height; var dt = DateTime.Now; for (int i=0; i<100; i++) { var me = new InternalDataMeasurement() { PacientId = pac0.PacientId, MeasurementId = i }; var hc = new InternalDataHeighComponent() { MeasurementId = me.MeasurementId, Height = height }; var ts = new InternalDataTimestamp() { MeasurementId = me.MeasurementId, Timestamp = dt }; data.Measurements.Add(me); data.HeighComponent.Add(hc); data.Timestamps.Add(ts); height += start_height; dt = dt.AddDays(7); } var res0 = Calc.Calculate(data, CalculationType.ByDoctor, CalculationTimeType.PerWeek, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: false); Assert.AreEqual(start_height, res0.First().Result, string.Format("result for res0 must equals {0} (per week)", start_height)); var res1 = Calc.Calculate(data, CalculationType.ByDoctor, CalculationTimeType.PerDay, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: false); Assert.AreEqual(start_height / 7d, res1.First().Result.Value, 0.01, string.Format("result for res1 must equals {0}/7 (per day)", start_height)); var res2 = Calc.Calculate(data, CalculationType.ByDoctor, CalculationTimeType.PerYear, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: false); Assert.AreEqual(start_height / 7d * 365.25, res2.First().Result.Value, 0.01, string.Format("result for res2 must equals {0}/7*365.25 (per year)", start_height)); var res3 = Calc.Calculate(data, CalculationType.ByDoctor, CalculationTimeType.PerMonth, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: false); Assert.AreEqual(start_height / 7d * 365.25 / 12d, res3.First().Result.Value, 0.01, string.Format("result for res3 must equals {0}/7*365.25/12 (per month)", start_height)); }
/// <summary> /// 放送情報の取得を開始します。 /// </summary> private static void BeginGetLiveInfo(long liveId, InternalData internalData, AutoResetEvent ev) { WebUtil.RequestHttpTextAsync( NicoString.GetLiveUrl(liveId), null, internalData.Cookie, Encoding.UTF8, (result, text) => { try { // 放送情報が取得できなければエラーとします。 var v = LiveInfo.CreateFromHtml( string.Format("lv{0}", liveId), text); internalData.LiveStreamInfo.LiveInfo = v; } catch (Exception ex) { internalData.Exception = ex; } ev.Set(); }); }
/// <summary> /// playerstatusの取得を開始します。 /// </summary> private static void BeginGetPlayerStatus(long liveId, InternalData internalData, AutoResetEvent ev) { WebUtil.RequestHttpAsync( NicoString.GetPlayerStatusUrl(liveId), null, internalData.Cookie, (result, data) => { try { // ステータスがおかしければエラーとします。 var v = PlayerStatus.CreateFromXml(liveId, GetXml(data)); internalData.LiveStreamInfo.PlayerStatus = v; } catch (Exception ex) { internalData.Exception = ex; } ev.Set(); }); }
private void settingsGrid_PropertyValueChanged(object s, PropertyValueChangedEventArgs e) { InternalData.SaveSettings(); }
/* Method: ReadTrainFromFile * Reads a file that stores training data. * * The file must be formatted like: * >TrainDataLength InputCount OutputCount * >inputdata seperated by space * >outputdata seperated by space * > * >. * >. * >. * > * >inputdata seperated by space * >outputdata seperated by space * * See also: * <NeuralNet::TrainOnData>, <SaveTrain>, <fann_read_train_from_file at http://libfann.github.io/fann/docs/files/fann_train-h.html#fann_read_train_from_file> * * This function appears in FANN >= 1.0.0 */ public bool ReadTrainFromFile(string filename) { return(InternalData.read_train_from_file(filename)); }
public void Calculator_Calculate_OneDoctorWithPacients() { var data = new InternalData(); var doc = new InternalDataDoctor() { DoctorId = 1 }; var pac0 = new InternalDataPacient() { DoctorId = doc.DoctorId, PacientId = 1 }; var pac1 = new InternalDataPacient() { DoctorId = doc.DoctorId, PacientId = 2 }; data.Doctors.Add(doc); data.Pacients.Add(pac0); data.Pacients.Add(pac1); var res0 = Calc.Calculate(data, CalculationType.ByDoctor, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: false); Assert.AreEqual(1, res0.Count(), "res0 must contains 1 result"); Assert.AreEqual(0d, res0.First().Result, "result for res0 must equals 0"); var res1 = Calc.Calculate(data, CalculationType.ByPacient, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: false); Assert.AreEqual(2, res1.Count(), "res1 must contains 2 result"); Assert.AreEqual(0d, res1.Distinct().First().Result, "result for res1 must equals 0"); var res2 = Calc.Calculate(data, CalculationType.ByPacient, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: true); Assert.AreEqual(0, res2.Count(), "res2 must contains 0 result"); var res3 = Calc.Calculate(data, CalculationType.ByPacient, excludeDoctorsWithoutPacients: true, excludePacientsWithoutMeasurements: true); Assert.AreEqual(0, res3.Count(), "res3 must contains 2 result"); data.Doctors.Add(new InternalDataDoctor() { DoctorId = 2 }); var res4 = Calc.Calculate(data, CalculationType.ByDoctor, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: false); Assert.AreEqual(2, res4.Count(), "res4 must contains 2 result"); var res5 = Calc.Calculate(data, CalculationType.ByDoctor, excludeDoctorsWithoutPacients: true, excludePacientsWithoutMeasurements: false); Assert.AreEqual(1, res5.Count(), "res5 must contains 1 result"); }
public void Remove(object key) { InternalData.Remove(key); }
internal SWIGTYPE_p_fann_train_data ToFannTrainData() { return(InternalData.to_fann_train_data()); }
private void onReceive(IAsyncResult ar) { bool read_complete = false; int next_read_size = 1024; try { int bytesRead = clientSocket.EndReceive(ar); if (bytesRead < 1024) { byte[] this_read = new byte[bytesRead]; Array.Copy(byteData, 0, this_read, 0, bytesRead); parts.AddRange(this_read); if (parts.Count >= 1024) { if (parts.Count > 1024) { LogEvent($"Warning: Read {parts.Count} bytes, expected 1024"); } Array.Copy(parts.ToArray(), 0, byteData, 0, 1024); parts = new List <byte>(); read_complete = true; } else { //Not done, set the next read size to the remaining next_read_size = 1024 - parts.Count; } } else if (bytesRead == 1024) { //next_read_size = 1024; //set above //bytedata is fine read_complete = true; } //we have the 1024 to send forward if (read_complete) { byte[] internalData = new byte[0]; if (NetData.ReceiveData("server", byteData, out internalData)) { InternalData ido = new InternalData(internalData); ReceiveEvent(ido); } } // Continue to listen byteData = new byte[1024]; clientSocket.BeginReceive(byteData, 0, next_read_size, SocketFlags.None, new AsyncCallback(onReceive), null); //should be socket? } catch (ObjectDisposedException) { // Chances are the object was disposed by Disconnect on another thread Disconnect("Socket disposed"); //Just in case } catch (Exception ex) { LogEvent($"Error: Unable to receive message (OnReceive).\r\n{ex}"); Disconnect("Error recieving message. See log for details."); } }
public void Dispose() { IsDisposed = true; InternalData.Clear(); }
/// <inheritdoc cref="TableWorker{T}.NotifyGlobalDataChanged"/> public override void NotifyGlobalDataChanged() { var wasDirty = false; foreach (EventItem item in ToolkitUtils.Data.Events.Select(item => new { item, existing = InternalData.Find(i => i.Data.Equals(item)) }) .Where(t => t.existing == null) .Select(t => t !.item)) { InternalData.Add(new TableSettingsItem <EventItem> { Data = item }); wasDirty = true; } if (wasDirty) { NotifySortRequested(); } }
/* Method: ScaleTrainData * * Scales the inputs and outputs in the training data to the specified range. * * A simplified scaling method, which is mostly useful in examples where it's known that all the * data will be in one range and it should be transformed to another range. * * It is not recommended to use this on subsets of data as the complete input range might not be * available in that subset. * * For more powerful scaling, please consider <NeuralNet::ScaleTrain> * * See also: * <ScaleOutputTrainData>, <ScaleInputTrainData>, <fann_scale_train_data at http://libfann.github.io/fann/docs/files/fann_train-h.html#fann_scale_train_data> * * This function appears in FANN >= 2.0.0. */ public void ScaleTrainData(int new_min, int new_max) { InternalData.scale_train_data(new_min, new_max); }
public override string ToString() { StringBuilder __sb = new StringBuilder("THTokenInfo("); bool __first = true; if (ReferrerUserId != null && __isset.referrerUserId) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("ReferrerUserId: "); __sb.Append(ReferrerUserId); } if (Token != null && __isset.token) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("Token: "); __sb.Append(Token); } if (Provider != null && __isset.provider) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("Provider: "); __sb.Append(Provider); } if (__isset.firstMatch) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("FirstMatch: "); __sb.Append(FirstMatch); } if (LinkParams != null && __isset.linkParams) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("LinkParams: "); __sb.Append(LinkParams.ToDebugString()); } if (InternalData != null && __isset.internalData) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("InternalData: "); __sb.Append(InternalData.ToDebugString()); } if (__isset.guaranteedMatch) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("GuaranteedMatch: "); __sb.Append(GuaranteedMatch); } if (OriginalData != null && __isset.originalData) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("OriginalData: "); __sb.Append(OriginalData.ToDebugString()); } if (Debug != null && __isset.debug) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("Debug: "); __sb.Append(Debug); } __sb.Append(")"); return(__sb.ToString()); }
public void BindInternalClass () { InternalData data = new InternalData (); data.InnerData = new Data { Opacity = 1.0f }; Rectangle rectangle = new Rectangle { Opacity = 0f }; rectangle.DataContext = data; Binding binding = new Binding ("InnerData.Opacity"); rectangle.SetBinding (Shape.OpacityProperty, binding); }
public bool TryGetEntry(object key, out ICacheEntry entry) { return(InternalData.TryGetValue(key, out entry)); }
public ExtendedXlsFile TestExecuteBalansHierLev0_Valtage_220_330_Interval(BalansHierLev0Result HierLev0, TExportExcelAdapterType ExportType, IVisualDataRequestObjectsNames getNameInterface) { Dictionary <ID_TypeHierarchy, string> dictionaryOfNames = HierLev0.DictionaryOfNames; InternalData internalData = new InternalData(ExportType, HierLev0.NumbersValues); InitBlock initBlock = new InitBlock(internalData); Classes.TitleInfo titleData = new Classes.TitleInfo(getNameInterface.GetBalanceNameForHierLev0(HierLev0.BalanceId), HierLev0.DTStart, HierLev0.DTEnd); TitleBlock titleBlock = new TitleBlock(titleData); initBlock.AddBlock(titleBlock); List <DateTime> dateTimeListForPeriod = getNameInterface.GetDateTimeListForPeriod(HierLev0.DTStart, HierLev0.DTEnd, HierLev0.DiscreteType, HierLev0.IsSummerOrWinter); List <DateTimePeriod> dateTimePeriods = new List <DateTimePeriod>(); for (int i = 0; i < dateTimeListForPeriod.Count; i += 1) { var dateList = dateTimeListForPeriod.GetRange(i, Math.Min(2, dateTimeListForPeriod.Count - i)); if (dateList.Count > 1) { dateTimePeriods.Add(new DateTimePeriod(dateList[0], dateList[1])); } } dateTimePeriods.Add(new DateTimePeriod(dateTimeListForPeriod.Max(), HierLev0.DTEnd.AddMinutes(30))); HeaderFooterBlock headerFooter = new HeaderFooterBlock(new Data.HeaderFooterData(HierLev0.VoltageClass, dateTimePeriods)); initBlock.AddBlock(headerFooter); foreach (TIntegral_HierLev0_Values balanceSection in HierLev0.Result_Values) { BalansHierLev0.Data.Full.BalancePartData balData = new BalansHierLev0.Data.Full.BalancePartData(getNameInterface.GetBalanceSectionName(balanceSection.HierLev0Group_Name), HierLev0.BalPartList.Where(x => x.IsUseInGeneralBalance).Select(x => x.Name).Contains(balanceSection.HierLev0Group_Name), HierLev0.BalPartList.Where(x => x.IsRsk).Select(x => x.Name).Contains(balanceSection.HierLev0Group_Name)); BalancePartBlock balPartBlock = new BalancePartBlock(balData); foreach (KeyValuePair <ID_IsOurSide, TIntegral_PS_ValuesForHierLev0> psBalSect in balanceSection.HierLev0DetailGroupResult) { string psName; TIntegral_PS_ValuesForHierLev0 psBalSectData = psBalSect.Value; ID_IsOurSide side = psBalSect.Key; ID_TypeHierarchy key = new ID_TypeHierarchy(enumTypeHierarchy.Dict_PS, -1); key.TypeHierarchy = side.IsOurSide ? enumTypeHierarchy.Dict_PS : enumTypeHierarchy.Dict_Contr_PS; key.ID = side.ID; if (!dictionaryOfNames.TryGetValue(key, out psName)) { psName = getNameInterface.GetPSName(side.ID, !side.IsOurSide); } PsBlock psBlock = new PsBlock(psName); foreach (TI_Integral_ValuesForHierLev0 tiPsBalSect in psBalSectData.TI_List) { string tIName = string.Empty; key.TypeHierarchy = tiPsBalSect.TypeHierarchy; key.ID = tiPsBalSect.ID; if (!dictionaryOfNames.TryGetValue(key, out tIName)) { switch (tiPsBalSect.TypeHierarchy) { case enumTypeHierarchy.Dict_PS: tIName = getNameInterface.GetPSName(tiPsBalSect.ID, !side.IsOurSide); break; case enumTypeHierarchy.Info_TI: tIName = getNameInterface.GetTIName(tiPsBalSect.ID, false); break; case enumTypeHierarchy.Info_ContrTI: tIName = getNameInterface.GetTIName(tiPsBalSect.ID, true); break; case enumTypeHierarchy.Info_TP: tIName = getNameInterface.GetTPName(tiPsBalSect.ID); break; } } List <TVALUES_DB> inputValues; List <TVALUES_DB> outputValues; Data.TiData tiData = new Data.TiData(tIName); if (tiPsBalSect.Val_List.TryGetValue(1, out inputValues)) { tiData.InputInterval = inputValues.Select(x => new TVALUES_DB(x.F_FLAG, x.F_VALUE / 1000)).ToList(); } if (tiPsBalSect.Val_List.TryGetValue(2, out outputValues)) { tiData.OutputInterval = outputValues.Select(x => new TVALUES_DB(x.F_FLAG, x.F_VALUE / 1000)).ToList(); } TiBlock tiBlock = new TiBlock(tiData); psBlock.AddBlock(tiBlock); } balPartBlock.AddBlock(psBlock); } headerFooter.AddBlock(balPartBlock); } ExtendedXlsFile xls = new ExtendedXlsFile(ExportType); initBlock.Render(xls); return(xls); }
/* Method: SaveTrain * * Save the training structure to a file, with the format as specified in <ReadTrainFromFile> * * Return: * The function returns true on success and false on failure. * * See also: * <ReadTrainFromFile>, <SaveTrainToFixed>, <fann_save_train at http://libfann.github.io/fann/docs/files/fann_train-h.html#fann_save_train> * * This function appears in FANN >= 1.0.0. */ public bool SaveTrain(string filename) { return(InternalData.save_train(filename)); }
/* Method: Dispose * * Disposes of the training data. */ public void Dispose() { InternalData.Dispose(); }
/* Method: SaveTrainToFixed * * Saves the training structure to a fixed point data file. * * This function is very useful for testing the quality of a fixed point network. * * Return: * The function returns true on success and false on failure. * * See also: * <SaveTrain>, <fann_save_train_to_fixed at http://libfann.github.io/fann/docs/files/fann_train-h.html#fann_save_train_to_fixed> * * This function appears in FANN >= 1.0.0. */ public bool SaveTrainToFixed(string filename, uint decimalPoint) { return(InternalData.save_train_to_fixed(filename, decimalPoint)); }
protected bool Equals(InternalData other) { return(CVBucketed.Equals(other.CVBucketed) && Area.Equals(other.Area)); }
/// <summary> /// チェック用データをキューに入れます。 /// </summary> private static void PushData(InternalData data, bool highPriority) { if (data == null) { return; } lock (targetList) { if (highPriority) { targetList.Insert(0, data); } else { targetList.Add(data); } Monitor.PulseAll(targetList); } }
public void Calculator_Calculate_OneDoctorWithTwoPacientsAndMeasurementsForOnePacient() { var data = new InternalData(); var doc = new InternalDataDoctor() { DoctorId = 1 }; var pac0 = new InternalDataPacient() { DoctorId = doc.DoctorId, PacientId = 1 }; var pac1 = new InternalDataPacient() { DoctorId = doc.DoctorId, PacientId = 2 }; var me00 = new InternalDataMeasurement() { PacientId = pac0.PacientId, MeasurementId = 1 }; var me01 = new InternalDataMeasurement() { PacientId = pac0.PacientId, MeasurementId = 2 }; data.Doctors.Add(doc); data.Pacients.Add(pac0); data.Measurements.Add(me00); data.Measurements.Add(me01); var res0 = Calc.Calculate(data, CalculationType.ByDoctor, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: true); //pacient 0 with bad measurement (result should equals 0) Assert.AreEqual(1, res0.Count(), "res0 must contains 1 result"); Assert.AreEqual(0d, res0.First().Result, "result for res0 must equals 0"); var hc00 = new InternalDataHeighComponent() { MeasurementId = me00.MeasurementId, Height = 100 }; var hc01 = new InternalDataHeighComponent() { MeasurementId = me01.MeasurementId, Height = 200 }; var dt = DateTime.Now; var ts00 = new InternalDataTimestamp() { MeasurementId = me00.MeasurementId, Timestamp = dt }; var ts01 = new InternalDataTimestamp() { MeasurementId = me01.MeasurementId, Timestamp = dt.AddDays(7) }; data.HeighComponent.Add(hc00); data.HeighComponent.Add(hc01); data.Timestamps.Add(ts00); data.Timestamps.Add(ts01); var res1 = Calc.Calculate(data, CalculationType.ByDoctor, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: true); //pacient 0 with bad measurement (result should equals 0) Assert.AreEqual(1, res1.Count(), "res1 must contains 1 result"); Assert.AreEqual(100d, res1.First().Result, "result for res1 must equals 100"); //add one pacient without measurement data.Pacients.Add(pac1); var res2 = Calc.Calculate(data, CalculationType.ByDoctor, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: true); Assert.AreEqual(1, res2.Count(), "res2 must contains 1 result"); Assert.AreEqual(100d, res2.First().Result, "result for res2 must equals 100"); //if we calc 2 pacients (one without measurements) then result must be (100 - 0) / 2 = 50 var res3 = Calc.Calculate(data, CalculationType.ByDoctor, excludeDoctorsWithoutPacients: false, excludePacientsWithoutMeasurements: false); Assert.AreEqual(1, res3.Count(), "res3 must contains 1 result"); Assert.AreEqual(50d, res3.First().Result, "result for res3 must equals 50"); }
private IEnumerator Tick(InternalData internalData) { SendInternalRpc(internalData.remoteId, InternalMethod.Tock); yield return(null); }
/* Method: ShuffleTrainData * * Shuffles training data, randomizing the order. * This is recommended for incremental training, while it have no influence during batch training. * * This function appears in FANN >= 1.1.0. */ public void ShuffleTrainData() { InternalData.shuffle_train_data(); }
private void Controller_ResizeEnd(object sender, EventArgs e) { InternalData.SettingsGrid.Height = Height; InternalData.SettingsGrid.Width = Width; InternalData.SaveSettings(); }
public void FetchData(DataStore.MainInterface dataStore) { if (dataStore == null) throw new ArgumentNullException("dataStore"); var d = new InternalData(); d.Doctors = dataStore.Entities .Where(e => IsDoctor(e)) .Select(e => new InternalDataDoctor() { DoctorId = e.Id }) .ToList(); d.Pacients = dataStore.Relations .Where(r => IsDoctor(r.Item1) && IsPacient(r.Item2)) .Select(r => new InternalDataPacient() { DoctorId = r.Item1.Id, PacientId = r.Item2.Id }) .ToList(); d.Measurements = dataStore.Relations .Where(r => IsPacient(r.Item1) && IsMeasurement(r.Item2)) .Select(r => new InternalDataMeasurement() { PacientId = r.Item1.Id, MeasurementId = r.Item2.Id }) .ToList(); //var dmIds = d.Measurements.Select(m => m.MeasurementId).ToArray(); var dmIds = d.Measurements.Select(m => m.MeasurementId).ToDictionary(m => m); d.Timestamps = dataStore.TimestampComponents .Where(ts => !ts.IsMissing) //.Join(dmIds, tc => tc.EntityId, i => i, (ts, i) => ts) .Where(ts => dmIds.ContainsKey(ts.EntityId)) .Select(ts => new InternalDataTimestamp() { MeasurementId = ts.EntityId, Timestamp = ts.Timestamp }) .ToArray(); d.HeighComponent = dataStore.HeightComponents .Where(h => !h.IsMissing) //.Join(dmIds, hc => hc.EntityId, i => i, (hs, i) => hs) .Where(h => dmIds.ContainsKey(h.EntityId)) .Select(h => new InternalDataHeighComponent() { MeasurementId = h.EntityId, Height = (h.Unit == DataStore.LengthUnit.Inch ? h.Value : h.Value * 39.37) //Meters to inches }) .ToArray(); data = d; }
/* Method: MergeTrainData * * Merges the data into the data contained in the <TrainingData>. * * This function appears in FANN >= 1.1.0. */ public void MergeTrainData(TrainingData data) { InternalData.merge_train_data(data.InternalData); }
/// <summary> /// 生放送に必要な情報を非同期でまとめて取得します。 /// </summary> public static LiveStreamInfo GetLiveStreamInfo(PlayerStatus playerStatus, CookieContainer cc) { if (playerStatus == null) { throw new ArgumentNullException("playerStatus"); } if (cc == null) { throw new ArgumentNullException("cc"); } // 結果などを保存するオブジェクトです。 var internalData = new InternalData() { Cookie = cc, }; var eventList = new AutoResetEvent[3] { new AutoResetEvent(false), new AutoResetEvent(false), new AutoResetEvent(false), }; // playerStatusを取得します。 internalData.LiveStreamInfo.PlayerStatus = playerStatus; eventList[0].Set(); // publishstatusを取得します。 // これは放送主にしか取得できません。 if (playerStatus.Stream.IsOwner) { BeginGetPublishStatus( playerStatus.Stream.Id, internalData, eventList[1]); } else { eventList[1].Set(); } // 放送ページの情報を取得します。 // playerstatusなどでは放送タイトルなどが一定の文字数で // 省略されてしまうためです。 BeginGetLiveInfo( playerStatus.Stream.Id, internalData, eventList[2]); // タイムアウト時間を取得します。 // (デフォルトで30秒) var timeout = WebUtil.DefaultTimeout; if (timeout < 0) { timeout = 30 * 1000; } // 各イベントが終了するのを待ちます。 foreach (var ev in eventList) { if (!ev.WaitOne(TimeSpan.FromMilliseconds(timeout))) { throw new TimeoutException( "放送情報の取得がタイムアウトしました。"); } // 例外があればそれをここで投げ返します。 var ex = internalData.Exception; if (ex != null) { throw new NicoLiveException( "GetLiveStreamInfoでエラーが発生しました。", ex); } } return internalData.LiveStreamInfo; }
/* Method: GetTrainInput * Gets the training input data at the given position * * Returns: * An array of input training data at the given position * * See also: * <GetTrainOutput>, <SetTrainData> * * This function appears in FANN >= 2.3.0. */ public DataAccessor GetTrainInput(uint position) { DataAccessor data = DataAccessor.FromPointer(InternalData.get_train_input(position), (int)InputCount); return(data); }
/// <summary> /// publishstatusの取得を開始します。 /// </summary> private static void BeginGetPublishStatus(long liveId, InternalData internalData, AutoResetEvent ev) { WebUtil.RequestHttpAsync( NicoString.GetPublishStatusUrl(liveId), null, internalData.Cookie, (result, data) => { try { // publishstatusは放送主でないと取得できません。 var v = PublishStatus.CreateFromXml(liveId, GetXml(data)); internalData.LiveStreamInfo.PublishStatus = v; } catch (Exception ex) { internalData.Exception = ex; } ev.Set(); }); }
/* Method: GetTrainOutput * Gets the training output data at the given position * * Returns: * An array of output training data at the given position * * See also: * <GetTrainInput> * * This function appears in FANN >= 2.3.0. */ public DataAccessor GetTrainOutput(uint position) { return(DataAccessor.FromPointer(InternalData.get_train_output(position), (int)OutputCount)); }
/* Method: ScaleTrainData * * Scales the inputs and outputs in the training data to the specified range. * * A simplified scaling method, which is mostly useful in examples where it's known that all the * data will be in one range and it should be transformed to another range. * * It is not recommended to use this on subsets of data as the complete input range might not be * available in that subset. * * For more powerful scaling, please consider <NeuralNet::ScaleTrain> * * See also: * <ScaleOutputTrainData>, <ScaleInputTrainData>, <fann_scale_train_data at http://libfann.github.io/fann/docs/files/fann_train-h.html#fann_scale_train_data> * * This function appears in FANN >= 2.0.0. */ public void ScaleTrainData(double new_min, double new_max) { InternalData.scale_train_data(new_min, new_max); }
/* Method: SubsetTrainData * * Changes the training data to a subset, starting at position *pos* * and *length* elements forward. Use the copy constructor to work * on a new copy of the training data. * * >TrainingData fullDataSet = new TrainingData(); * >fullDataSet.ReadTrainFromFile("somefile.train"); * >TrainingData smallDataSet = new TrainingData(fullDataSet); * >smallDataSet->SubsetTrainData(0, 2); // Only use first two * >// Use smallDataSet ... * >small_data_set.Dispose(); * * See also: * <fann_subset_train_data http://libfann.github.io/fann/docs/files/fann_train-h.html#fann_subset_train_data> * * This function appears in FANN >= 2.0.0. */ public void SubsetTrainData(uint pos, uint length) { InternalData.subset_train_data(pos, length); }
/// <summary> /// Calculate stored data /// </summary> /// <param name="data">Internal data storage</param> /// <param name="calcType">Calculate average data by each doctor or by each doctor's pacient</param> /// <param name="calcTimeType">Calculate results by any part of year (year/month/week/day)</param> /// <param name="excludeDoctorsWithoutPacients">Excludes from result doctors with no one pacient</param> /// <param name="excludePacientsWithoutMeasurements">Excludes from result pacients with one or no one measuremtns</param> /// <returns>Calculation result</returns> public IEnumerable<AnalysisModule.SimpleCalculationResult> Calculate( InternalData data, CalculationType calcType = CalculationType.ByDoctor, CalculationTimeType calcTimeType = CalculationTimeType.PerWeek, bool excludeDoctorsWithoutPacients = false, bool excludePacientsWithoutMeasurements = false) { if (data == null) throw new ArgumentNullException("data"); if (data.Doctors == null) throw new ArgumentNullException("data.Doctors", "Feel 'Doctors' array before calling Calculation"); if (data.Pacients == null) throw new ArgumentNullException("data.Pacients", "Feel 'Pacients' array before calling Calculation"); if (data.Timestamps == null) throw new ArgumentNullException("data.Timestamps", "Feel 'Timestamps' array before calling Calculation"); if (data.Measurements == null) throw new ArgumentNullException("data.Measurements", "Feel 'Measurements' array before calling Calculation"); if (data.HeighComponent == null) throw new ArgumentNullException("data.HeighComponent", "Feel 'HeighComponent' array before calling Calculation"); if (!data.Doctors.Any()) throw new ArgumentException("You must feel doctors array before calling Calculation", "data.Doctors"); var res = new List<AnalysisModule.SimpleCalculationResult>(); var bigTableData = data.Doctors .LeftOuterJoin(data.Pacients, d => d.DoctorId, p => p.DoctorId, (d, p) => new { DoctorId = d.DoctorId, PacientId = p == null ? (long?)null : p.PacientId }) .LeftOuterJoin(data.Measurements, i => i.PacientId, m => m.PacientId, (i, m) => new { i.DoctorId, i.PacientId, MeasurementId = m == null ? (long?)null : m.MeasurementId }) .LeftOuterJoin(data.Timestamps, i => i.MeasurementId, t => t.MeasurementId, (i, t) => new { i.DoctorId, i.PacientId, i.MeasurementId, Timestamp = t == null ? (DateTime?)null : t.Timestamp }) .LeftOuterJoin(data.HeighComponent .Where(h => h.Height > 0 && !double.IsInfinity(h.Height) && !double.IsNegativeInfinity(h.Height) && !double.IsPositiveInfinity(h.Height) && !double.IsNaN(h.Height) ), i => i.MeasurementId, hc => hc.MeasurementId, (i, hc) => new { i.DoctorId, i.PacientId, i.MeasurementId, i.Timestamp, Height = hc == null ? (double?)null : hc.Height }) .ToArray(); //Table generated. Group data by Doctor, Pacient and Measurement var doctors = bigTableData .GroupBy(i => i.DoctorId) .Select(g => new { g.FirstOrDefault().DoctorId, //Group data by pacient Pacients = g.Where(p => p != null) .GroupBy(g2 => g2.PacientId) .Select(g2 => new { g2.FirstOrDefault().PacientId, Measurements = g2 .Where(i => i.Height != null && i.Timestamp != null) .OrderBy(m => m.Timestamp) .ToArray() }) .Select(g2 => new { g2.PacientId, MeasurementsExists = g2.Measurements.Count() > 1, Measurements = g2.Measurements.Count() > 1 ? Enumerable.Range(0, g2.Measurements.Count() - 1) .Select(i => new //Get measurement pairs for pacient { Start = g2.Measurements.ElementAt(i), End = g2.Measurements.ElementAt(i + 1) }) .Select(i => new //Calc start and end measure data { HeightStart = i.Start.Height.Value, HeightEnd = i.End.Height.Value, TimestampStart = i.Start.Timestamp.Value, TimestampEnd = i.End.Timestamp.Value, }) .Select(i => new //get height change by part of time { HeightChange = i.HeightEnd - i.HeightStart, TimeParts = GetTimePart((i.TimestampEnd - i.TimestampStart).TotalDays, calcTimeType), }) .Select(i => i.HeightChange / i.TimeParts) //calc growth per week .ToArray() : Enumerable.Empty<double>().ToArray() //return empty array if no one or one only measurement }) .Select(g2 => new { g2.PacientId, g2.MeasurementsExists, //g2.Measurements //!!! enable to debug MeanRateGrowhPerTimePart = g2.MeasurementsExists ? g2.Measurements.Average() : 0 }) }) //Exclude from data pacients with one or less measurements (is setted) .Select(i => new { i.DoctorId, Pacients = i.Pacients .Where(p => p.PacientId != null) .Where(n => !excludePacientsWithoutMeasurements || n.MeasurementsExists) .ToArray() }) //Exclude from data doctors without pacients (is setted) .Where(i => !excludeDoctorsWithoutPacients || i.Pacients.Any()) .ToArray() ; //Get enum description attribute var perName = calcTimeType.ToString(); var memInfo = calcTimeType.GetType().GetMember(perName).FirstOrDefault(); if (memInfo != null) { var descrAttr = memInfo.GetCustomAttributes(typeof(DescriptionAttribute), false).FirstOrDefault() as DescriptionAttribute; if (descrAttr != null) perName = "per " + descrAttr.Description; } //generate result from data foreach (var doc in doctors) { switch (calcType) { case CalculationType.ByPacient: res.AddRange( doc.Pacients.Select(i => new AnalysisModule.SimpleCalculationResult() { Result = i.MeanRateGrowhPerTimePart, AnalysisName = string.Format("{0} {3} for pacient (id:{1}) of doctor (id:{2})", CalcTaskName, i.PacientId, doc.DoctorId, perName) })); break; case CalculationType.ByDoctor: res.Add(new AnalysisModule.SimpleCalculationResult() { Result = doc.Pacients.Any() ? doc.Pacients.Average(p => p.MeanRateGrowhPerTimePart) : 0, AnalysisName = string.Format("{0} {2} for doctor (id:{1})", CalcTaskName, doc.DoctorId, perName) }); break; } } return res; }
/* Method: ScaleOutputTrainData * * Scales the outputs in the training data to the specified range. * * A simplified scaling method, which is mostly useful in examples where it's known that all the * data will be in one range and it should be transformed to another range. * * It is not recommended to use this on subsets of data as the complete input range might not be * available in that subset. * * For more powerful scaling, please consider <NeuralNet::ScaleTrain> * * See also: * <ScaleInputTrainData>, <ScaleTrainData>, <fann_scale_output_train_data at http://libfann.github.io/fann/docs/files/fann_train-h.html#fann_scale_output_train_data> * * This function appears in FANN >= 2.0.0. */ public void ScaleOutputTrainData(float new_min, float new_max) { InternalData.scale_output_train_data(new_min, new_max); }