public override void OnTrace(Trace t) { lock (lockObj) { Prepare(); Write(t); file.Flush(); } }
public int Run() { _trace = new Trace("TryCatchInFinallyTest", "0123456"); _trace.Write("0"); try { _trace.Write("1"); } finally { _trace.Write("2"); try { _trace.Write("3"); throw new InvalidProgramException(); } catch(InvalidProgramException e) { Console.WriteLine(e); _trace.Write("4"); } _trace.Write("5"); } _trace.Write("6"); return _trace.Match(); }
public static void ParseResponse(Trace trace, HttpResponseMessage response) { try { var responseBody = response.Content; trace.Response = responseBody.ReadAsStringAsync().Result; trace.Status = ((int)response.StatusCode).ToString(); if (trace.Response.Length > 0 && !trace.Status.Equals("429")) { trace.ResponseBodyLength = responseBody.Headers.ContentLength; var ad = response.RequestMessage.GetActionDescriptor(); if (ad != null) { var action = new ApiDescription { HttpMethod = response.RequestMessage.Method, RelativePath = response.RequestMessage.RequestUri.PathAndQuery.Substring(1) }; var fid = action.GetFriendlyId(); trace.FriendlyURI = APIUriList.FirstOrDefault(x => x.Equals(fid, StringComparison.OrdinalIgnoreCase)); if (string.IsNullOrWhiteSpace(trace.FriendlyURI)) trace.FriendlyURI = APIUriList.FirstOrDefault(x => x.StartsWith(fid, StringComparison.OrdinalIgnoreCase)); trace.Action = ad.ControllerDescriptor.ControllerName + " / " + ad.ActionName; } if (TraceList.Count >= 64) TraceList.RemoveAt(0); TraceList.Add(trace); } } catch (Exception) { } }
public int Run() { _trace = new Trace("PendingTest", "0123401235"); try { f1(); } catch(Exception e) { Console.WriteLine(e); _trace.Write("4"); } try { f1(); } catch(Exception e) { Console.WriteLine(e); _trace.Write("5"); } return _trace.Match(); }
public int Run() { _trace = new Trace("GoryNativePastTest", "0123456"); _trace.Write("0"); try { try { foo(); } catch(Exception e) { Console.WriteLine(e); _trace.Write("4"); throw; } } catch(Exception e) { _trace.Write("5"); _trace.Write(e.Message); } return _trace.Match(); }
public void initCreatedRefractTrace(Trace refractTrace, UnitVector surfaceNormal, Scientrace.Object3d fromObject3d, Scientrace.Object3d toObject3d) { double oldrefindex = fromObject3d.materialproperties.refractiveindex(this); double newrefindex = toObject3d.materialproperties.refractiveindex(this); //for definitions on the parameters below, check fullInternalReflects function. UnitVector incoming_trace_direction = this.traceline.direction; if (incoming_trace_direction.dotProduct(surfaceNormal) > 0) { surfaceNormal = surfaceNormal.negative(); } Scientrace.UnitVector nnorm = surfaceNormal.negative(); Scientrace.Vector incoming_normal_projection = nnorm*(incoming_trace_direction.dotProduct(nnorm)); Vector L1 = incoming_trace_direction-incoming_normal_projection; double L2 = (oldrefindex/newrefindex)*L1.length; if (incoming_trace_direction == incoming_normal_projection) { //in case of normal incident light: do not refract. refractTrace.traceline.direction = incoming_trace_direction; return; } try { refractTrace.traceline.direction = ((nnorm*Math.Sqrt(1 - Math.Pow(L2,2))) +(L1.tryToUnitVector()*L2)).tryToUnitVector(); } catch (ZeroNonzeroVectorException) { Console.WriteLine("WARNING: cannot define direction for refraction trace. Using surface normal instead. (L1: "+incoming_trace_direction.trico()+", L2:"+incoming_normal_projection.trico()+")."); refractTrace.traceline.direction = nnorm; } //end try/catch }
public int Run() { _trace = new Trace("ThrowInCatchTest", "0123456"); _trace.Write("0"); try { _trace.Write("1"); try { _trace.Write("2"); throw new Exception("....."); } catch(Exception e) { Console.WriteLine(e); _trace.Write("3"); throw new Exception("5"); } } catch(Exception e) { Console.WriteLine(e); _trace.Write("4"); _trace.Write(e.Message); } _trace.Write("6"); return _trace.Match(); }
public AssemblyTrace(Trace parent, CST.AssemblyDef assembly) { Parent = parent; Assembly = assembly; IncludeAssembly = false; TypeMap = new Map<CST.TypeName, TypeTrace>(); }
public float ComputeTraceLength(Trace trace, int fromPoint, int toPoint) { var noOfPoints = trace.Points.Count; if ((fromPoint < 0 || fromPoint > (noOfPoints - 1)) || (toPoint < 0 || toPoint > (noOfPoints - 1))) { //error } float xDiff, yDiff, pointDistance; IList<float> xVec, yVec; var outLength = 0f; xVec = trace.ChannelX; yVec = trace.ChannelY; for (var i = fromPoint; i < toPoint; i++) { xDiff = xVec[i] - xVec[i + 1]; yDiff = yVec[i] - yVec[i + 1]; //distance between 2 points pointDistance = (float)Math.Sqrt(xDiff * xDiff + yDiff * yDiff); outLength += pointDistance; } return outLength; }
public void TraceConstructorTest() { Guid id = Guid.NewGuid(); string contextId = "ContextID"; Trace target = new Trace(id, contextId); Assert.AreEqual(id, target.Id); Assert.AreEqual(contextId, target.ContextId); }
public TraceGroup ReadFromInkFile(string path) { var outTraceGroup = new TraceGroup(); var input = new FileStream(path, FileMode.Open, FileAccess.Read); var fileReader = new StreamReader(input); var strLine = ""; char[] splitter = { ' ' }; var captureDevice = new CaptureDevice { SamplingRate = 100, Latency = 0f }; while(!fileReader.EndOfStream) { strLine = fileReader.ReadLine(); if (strLine == null) continue; //Read header info if (strLine.Contains(".X_POINTS_PER_INCH")) { var strToken = strLine.Split(splitter, StringSplitOptions.None); captureDevice.XDpi = Convert.ToInt32(strToken[1]); } if (strLine.Contains(".Y_POINTS_PER_INCH")) { var strToken = strLine.Split(splitter, StringSplitOptions.None); captureDevice.YDpi = Convert.ToInt32(strToken[1]); } if (strLine.Contains(".POINTS_PER_SECOND")) { var strToken = strLine.Split(splitter, StringSplitOptions.None); //use later } if (!strLine.Trim().Equals(".PEN_DOWN")) continue; var strCoord = ""; var trace = new Trace(); while(!(strCoord = fileReader.ReadLine()?? "").Trim().Equals(".PEN_UP") && !fileReader.EndOfStream) { var strToken = strCoord.Split(splitter, StringSplitOptions.None); var x = Convert.ToInt32(strToken[0]); var y = Convert.ToInt32(strToken[1]); trace.Points.Add(new PenPoint{X=x, Y=y}); } outTraceGroup.Traces.Add(trace); } fileReader.Close(); input.Close(); outTraceGroup.CaptureDevice = captureDevice; return outTraceGroup; }
public override Intersection intersects(Trace trace) { List<Scientrace.FlatShape2d> pgrams = new List<Scientrace.FlatShape2d>(); //first: add parallelogram circle2d surfaces pgrams.Add(new Scientrace.Circle2d(this.loc, this.height, this.width, this.width.length)); pgrams.Add(new Scientrace.Circle2d(this.loc+this.length.toLocation(), this.height, this.width, this.width.length)); //TODO: second: add connecting surface return this.intersectFlatShapes(trace, pgrams); }
/// <summary> /// Initializes a new instance of the <see cref="WordAnalysis"/> class. /// </summary> /// <param name="shape">The shape.</param> /// <param name="curTrace">The current trace record.</param> internal WordAnalysis(PhoneticShape shape, Stratum stratum, Trace curTrace) { m_shape = shape; m_pos = new HCObjectSet<PartOfSpeech>(); m_mrules = new List<MorphologicalRule>(); m_mrulesUnapplied = new Dictionary<MorphologicalRule, int>(); m_rzFeatures = new FeatureValues(); m_stratum = stratum; m_curTrace = curTrace; }
public Spot(Scientrace.Spot copyFromSpot) { this.loc = new Scientrace.Location(copyFromSpot.loc); this.object3d = copyFromSpot.object3d; // do not spot again, so don't recall spotted from object3d this.intensity = copyFromSpot.intensity; this.trace = copyFromSpot.trace; this.intensityFraction = copyFromSpot.intensityFraction; this.fillPolVecs(); }
public Spot(Scientrace.Location loc, Object3d object3d, double intensity, double intensityFraction, Scientrace.Trace trace) { this.loc = loc; this.object3d = object3d; this.object3d.spotted(intensity); this.intensity = intensity; this.intensityFraction = intensityFraction; this.trace = trace; this.fillPolVecs(); }
private static void Dump(Trace[] traces, int style) { foreach (var trace in traces) { Cout.WriteLine(trace.ToString(style)); if (trace.Children.Length > 0) { Dump(trace.Children, style); } } }
public void initialize() { moveTrace = new Trace (); audio = GameObject.Find("AudioManager").GetComponent<AudioManager>(); p1Score = p2Score = 0; /* p1ScoreText = GameObject.Find("P1ScoreText").GetComponent<Text>(); p2ScoreText = GameObject.Find("P2ScoreText").GetComponent<Text>(); defenderTurnIndicator = GameObject.Find("DefenderIndicator").GetComponent<TurnIndicator>(); attackerTurnIndicator = GameObject.Find("AttackerIndicator").GetComponent<TurnIndicator>(); */ loadBoard (); }
/// <summary> /// Copy constructor. /// </summary> /// <param name="wa">The word analysis.</param> public WordAnalysis(WordAnalysis wa) { m_shape = wa.m_shape.Clone(); m_pos = new HCObjectSet<PartOfSpeech>(wa.m_pos); m_rootAllomorph = wa.m_rootAllomorph; if (wa.m_nonHead != null) m_nonHead = wa.m_nonHead.Clone(); m_mrules = new List<MorphologicalRule>(wa.m_mrules); m_mrulesUnapplied = new Dictionary<MorphologicalRule, int>(wa.m_mrulesUnapplied); m_rzFeatures = wa.m_rzFeatures.Clone(); m_curTrace = wa.m_curTrace; m_stratum = wa.m_stratum; }
public int Run() { _trace = new Trace("BaadbaadTest", "1234"); try { DoStuff(); } catch (Exception e) { Console.WriteLine(e); _trace.Write("4"); } return _trace.Match(); }
public int Run() { _trace = new Trace("RecursiveThrowNew", "210C0(eX)C1(e0)C2(e1)CM(e2)"); try { LoveToRecurse(2); } catch (Exception e) { _trace.Write("CM(" + e.Message + ")"); Console.WriteLine(e); } return _trace.Match(); }
public override Scientrace.Intersection intersects(Trace trace) { //NOT TESTED YET! COPIED FROM (tested) TRIANGULAR List<Scientrace.FlatShape2d> pgrams = new List<Scientrace.FlatShape2d>(); //first: add parallelogram surfaces pgrams.Add(new Scientrace.Parallelogram(this.loc, this.height, this.length)); pgrams.Add(new Scientrace.Parallelogram(this.loc, this.width, this.length)); pgrams.Add(new Scientrace.Parallelogram(this.loc, this.height, this.width)); pgrams.Add(new Scientrace.Parallelogram(this.loc+this.length.toLocation(), this.height, this.width)); pgrams.Add(new Scientrace.Parallelogram(this.loc+this.width.toLocation(), this.height, this.length)); pgrams.Add(new Scientrace.Parallelogram(this.loc+this.height.toLocation(), this.width, this.length)); return this.intersectFlatShapes(trace, pgrams); }
public int Run() { _trace = new Trace("RecursiveRethrow", "210C0C1C2RecursionIsFun"); try { LoveToRecurse(2); } catch (Exception e) { Console.WriteLine(e); _trace.Write(e.Message); } return _trace.Match(); }
public int Run() { _trace = new Trace("BaseClassTest", "0121"); try { f1(); } catch(Exception e) { Console.WriteLine(e); _trace.Write("2" + e.Message); } return _trace.Match(); }
public int Run() { _trace = new Trace("GoryManagedPresentTest", "0123456"); try { _trace.Write("0"); foo(1234); _trace.Write("%%%%"); } catch(Exception e) { Console.WriteLine(e); _trace.Write("6"); } return _trace.Match(); }
public InnerFinallyTest() { // Create test writer object to hold expected output System.IO.StringWriter expectedOut = new System.IO.StringWriter(); // Write expected output to string writer object expectedOut.WriteLine(" try 1"); expectedOut.WriteLine("\t try 1.1"); expectedOut.WriteLine("\t finally 1.1"); expectedOut.WriteLine("\t\t try 1.1.1"); expectedOut.WriteLine("\t\t Throwing an exception here!"); expectedOut.WriteLine("\t\t finally 1.1.1"); expectedOut.WriteLine(" catch 1"); expectedOut.WriteLine(" finally 1"); _trace = new Trace("InnerFinallyTest", expectedOut.ToString()); }
public int Run() { _trace = new Trace("CollidedUnwindTest", "0123456789ABCDE"); try { _trace.Write("0"); Foo(); } catch (ExType2 e) { Console.WriteLine(e); _trace.Write("E"); } return _trace.Match(); }
public int Run() { int n = 8; string expected = ""; // create expected result string for (int i = n; i >= 0; i--) { expected += i.ToString(); } _trace = new Trace("RecurseTest", expected); DoTest(n); return _trace.Match(); }
public TraceGroup SmoothenTraceGroup(TraceGroup traceGroup) { var filterLength = 5; var newTraceGroup = new TraceGroup(); var sumX = 0f; var sumY = 0f; var actualIndex = 0; foreach (var trace in traceGroup.Traces) { var newTrace = new Trace(); var numPoints = trace.Points.Count; var channelX = trace.ChannelX; var channelY = trace.ChannelY; for(var pointIndex=0; pointIndex<numPoints; pointIndex++) { sumX = sumY = 0f; for(var loopIdex=0;loopIdex<filterLength;loopIdex++) { actualIndex = (pointIndex - loopIdex); if(actualIndex<0) { actualIndex = 0; }else if(actualIndex>=numPoints) { actualIndex = numPoints - 1; } //accumulate sum sumX += channelX[actualIndex]; sumY += channelY[actualIndex]; } sumX /= filterLength; sumY /= filterLength; newTrace.Points.Add(new PenPoint{X=sumX, Y=sumY}); } newTraceGroup.Traces.Add(newTrace); } return newTraceGroup; }
public int Run() { _trace = new Trace("ThrowInFinallyTest", "0123456789Ca"); _trace.Write("0"); try { _trace.Write("1"); Dumb(); } catch(Exception e) { Console.WriteLine(e); _trace.Write("9"); _trace.Write(e.Message); } _trace.Write("a"); return _trace.Match(); }
public void Next(Trace trace) { traceCounter++; int scaleLength = currentScale.notes.Length; int noteModifier = random.Next(2*SPREAD + 1) - SPREAD; currentNote += noteModifier; currentNote = currentNote % scaleLength; if (currentNote < 0) { currentNote = (-1) * currentNote; } if (trace.player1Notes != 0) { melodySoundBank.PlayCue(currentScale.notes[currentNote].ToString() + "s"); } /*noteValue = random.Next(24); if (trace.player2Notes != 0) melodySoundBank.PlayCue(currentScale.notes[noteValue % scaleLength].ToString() + "l"); noteValue = random.Next(24); if (trace.player3Notes != 0) melodySoundBank.PlayCue(currentScale.notes[noteValue % scaleLength].ToString() + "l"); noteValue = random.Next(24); if (trace.player4Notes != 0) melodySoundBank.PlayCue(currentScale.notes[noteValue % scaleLength].ToString() + "l"); */ if (inTransition) { if (traceCounter % 12 == 0) { inTransition = false; //currentScale = CreateScale(6, ScaleType.NatMinor); } } else { if (traceCounter % 48 == 0) { inTransition = true; //currentScale = CreateTransitionScale(currentScale, CreateScale(6,ScaleType.NatMinor)); } } }
private bool HandlePushOnExit(ref bool isError, FormProcess form) { if (!isError) { return(false); } // there is no way to pull to not current branch if (_selectedBranch != _currentBranchName) { return(false); } // auto pull from URL not supported. See https://github.com/gitextensions/gitextensions/issues/1887 if (!PushToRemote.Checked) { return(false); } // auto pull only if current branch was rejected Regex isRejected = new Regex(Regex.Escape("! [rejected] ") + ".*" + Regex.Escape(_currentBranchName) + ".*", RegexOptions.Compiled); if (isRejected.IsMatch(form.GetOutputString()) && !Module.IsBareRepository()) { bool forcePush = false; IWin32Window owner = form.Owner; if (AppSettings.AutoPullOnPushRejectedAction == null) { bool cancel = false; string destination = _NO_TRANSLATE_Remotes.Text; string buttons = _pullRepositoryButtons.Text; switch (Module.LastPullAction) { case AppSettings.PullAction.Fetch: case AppSettings.PullAction.FetchAll: buttons = string.Format(buttons, _pullActionFetch.Text); break; case AppSettings.PullAction.Merge: buttons = string.Format(buttons, _pullActionMerge.Text); break; case AppSettings.PullAction.Rebase: buttons = string.Format(buttons, _pullActionRebase.Text); break; default: buttons = string.Format(buttons, _pullActionNone.Text); break; } int idx = PSTaskDialog.cTaskDialog.ShowCommandBox(owner, string.Format(_pullRepositoryCaption.Text, destination), _pullRepositoryMainInstruction.Text, _pullRepository.Text, "", "", _dontShowAgain.Text, buttons, true, 0, 0); bool rememberDecision = PSTaskDialog.cTaskDialog.VerificationChecked; switch (idx) { case 0: if (rememberDecision) { AppSettings.AutoPullOnPushRejectedAction = AppSettings.PullAction.Default; } break; case 1: AppSettings.FormPullAction = AppSettings.PullAction.Rebase; if (rememberDecision) { AppSettings.AutoPullOnPushRejectedAction = AppSettings.FormPullAction; } break; case 2: AppSettings.FormPullAction = AppSettings.PullAction.Merge; if (rememberDecision) { AppSettings.AutoPullOnPushRejectedAction = AppSettings.FormPullAction; } break; case 3: forcePush = true; break; default: cancel = true; if (rememberDecision) { AppSettings.AutoPullOnPushRejectedAction = AppSettings.PullAction.None; } break; } if (cancel) { return(false); } } if (forcePush) { if (!form.ProcessArguments.Contains(" -f ") && !form.ProcessArguments.Contains(" --force")) { Trace.Assert(form.ProcessArguments.StartsWith("push "), "Arguments should start with 'push' command"); string forceArg = GitCommandHelpers.VersionInUse.SupportPushForceWithLease ? " --force-with-lease" : " -f"; form.ProcessArguments = form.ProcessArguments.Insert("push".Length, forceArg); } form.Retry(); return(true); } if (AppSettings.AutoPullOnPushRejectedAction == AppSettings.PullAction.None) { return(false); } if (AppSettings.AutoPullOnPushRejectedAction == AppSettings.PullAction.Default) { if (Module.LastPullAction == AppSettings.PullAction.None) { return(false); } Module.LastPullActionToFormPullAction(); } if (AppSettings.FormPullAction == AppSettings.PullAction.Fetch) { form.AppendOutput(Environment.NewLine + "Can not perform auto pull, when merge option is set to fetch."); return(false); } if (IsRebasingMergeCommit()) { form.AppendOutput(Environment.NewLine + "Can not perform auto pull, when merge option is set to rebase " + Environment.NewLine + "and one of the commits that are about to be rebased is a merge."); return(false); } UICommands.StartPullDialog(owner, true, _selectedRemoteBranchName, _selectedRemote.Name, out var pullCompleted, false); if (pullCompleted) { form.Retry(); return(true); } } return(false); }
public void TraceMessage(int nId, string sMessage) { Trace.WriteLine(string.Format("{0}: {1}", nId, sMessage), "FtpServerMessage"); }
public ServiceLocation() { Trace.WriteIf(Tracing.Is.TraceVerbose, string.Empty); Properties.Add(_provider); }
private void ProcessProviderResult() { // Process the result from an auth provider in the request ProviderName = OpenAuth.GetProviderNameFromCurrentRequest(); if (String.IsNullOrEmpty(ProviderName)) { Response.Redirect(FormsAuthentication.LoginUrl); } // Build the redirect url for OpenAuth verification var redirectUrl = "~/Account/RegisterExternalLogin"; var returnUrl = Request.QueryString["ReturnUrl"]; if (!String.IsNullOrEmpty(returnUrl)) { redirectUrl += "?ReturnUrl=" + HttpUtility.UrlEncode(returnUrl); } // Verify the OpenAuth payload var authResult = OpenAuth.VerifyAuthentication(redirectUrl); ProviderDisplayName = OpenAuth.GetProviderDisplayName(ProviderName); if (!authResult.IsSuccessful) { Title = "External login failed"; userNameForm.Visible = false; ModelState.AddModelError("Provider", String.Format("External login {0} failed.", ProviderDisplayName)); // To view this error, enable page tracing in web.config (<system.web><trace enabled="true"/></system.web>) and visit ~/Trace.axd Trace.Warn("OpenAuth", String.Format("There was an error verifying authentication with {0})", ProviderDisplayName), authResult.Error); return; } // User has logged in with provider successfully // Check if user is already registered locally if (OpenAuth.Login(authResult.Provider, authResult.ProviderUserId, createPersistentCookie: false)) { RedirectToReturnUrl(); } // Store the provider details in ViewState ProviderName = authResult.Provider; ProviderUserId = authResult.ProviderUserId; ProviderUserName = authResult.UserName; // Strip the query string from action Form.Action = ResolveUrl(redirectUrl); if (User.Identity.IsAuthenticated) { // User is already authenticated, add the external login and redirect to return url OpenAuth.AddAccountToExistingUser(ProviderName, ProviderUserId, ProviderUserName, User.Identity.Name); RedirectToReturnUrl(); } else { // User is new, ask for their desired membership name userName.Text = authResult.UserName; } }
/// <summary> /// Log message with 'error' log level. /// </summary> private static void LogError(string format, params object[] args) { Trace.TraceError(format, args); }
private Task OnResult(string barcode) { Trace?.Log($"{Localizer["ScanCodeLog"]} {barcode}"); return(Task.CompletedTask); }
private Task OnError(string error) { Trace?.Log($"{Localizer["ErrorLog"]} {error}"); return(Task.CompletedTask); }
internal static void run(double[][] X, double[][] Y, double perplexity, double theta, bool skip_random_init = false) { int N = X.Rows(); int D = X.Columns(); int no_dims = Y.Columns(); // Determine whether we are using an exact algorithm if (N - 1 < 3 * perplexity) { throw new Exception(String.Format("Perplexity too large for the number of data points. For {0} points, should be less than {1}", N, (N - 1) / 3.0)); } Debug.Write(String.Format("Using no_dims = {0}, perplexity = {1}, and theta = {2}", no_dims, perplexity, theta)); bool exact = (theta == 0.0); // Set learning parameters TimeSpan total_time = TimeSpan.Zero; Stopwatch start; TimeSpan end; int max_iter = 1000; int stop_lying_iter = 250; int mom_switch_iter = 250; double momentum = 0.5; double final_momentum = 0.8; double eta = 200.0; // Allocate some memory double[][] dY = Jagged.Create <double>(N, no_dims); double[][] uY = Jagged.Create <double>(N, no_dims); double[][] gains = Jagged.Ones <double>(N, no_dims); // Normalize input data (to prevent numerical problems) Debug.Write("Computing input similarities..."); start = Stopwatch.StartNew(); Accord.Statistics.Tools.Center(X, inPlace: true); X.Divide(X.Max(), result: X); // Compute input similarities for exact t-SNE double[][] P = null; int[] row_P = null; int[] col_P = null; double[] val_P = null; if (exact) { Trace.Write("Exact?"); // Compute similarities P = Jagged.Create <double>(N, N); computeGaussianPerplexity(X, N, D, ref P, perplexity); // Symmetrize input similarities Debug.Write("Symmetrizing..."); for (int n = 0; n < N; n++) { for (int m = n + 1; m < N; m++) { P[n][m] += P[m][n]; P[m][n] = P[n][m]; } } P.Divide(P.Sum(), result: P); } // Compute input similarities for approximate t-SNE else { // Compute asymmetric pairwise input similarities computeGaussianPerplexity(X, N, D, ref row_P, ref col_P, ref val_P, perplexity, (int)(3 * perplexity)); // Symmetrize input similarities symmetrizeMatrix(ref row_P, ref col_P, ref val_P, N); double sum_P = 0.0; for (int i = 0; i < row_P[N]; i++) { sum_P += val_P[i]; } for (int i = 0; i < row_P[N]; i++) { val_P[i] /= sum_P; } } end = start.Elapsed; // Lie about the P-values if (exact) { P.Multiply(12.0, result: P); } else { for (int i = 0; i < row_P[N]; i++) { val_P[i] *= 12.0; } } if (!skip_random_init) { // Initialize solution (randomly) for (int i = 0; i < Y.Length; i++) { for (int j = 0; j < Y[i].Length; j++) { Y[i][j] = randn() * 0.0001; } } } // Perform main training loop if (exact) { Debug.Write(String.Format("Input similarities computed in {0} seconds!", end)); Debug.Write("Learning embedding..."); } else { Debug.Write(String.Format("Input similarities computed in {0} seconds (sparsity = {1})!", end, (double)row_P[N] / ((double)N * (double)N))); Debug.Write("Learning embedding..."); } start = Stopwatch.StartNew(); for (int iter = 0; iter < max_iter; iter++) { // Compute (approximate) gradient if (exact) { computeExactGradient(P, Y, N, no_dims, dY); } else { computeGradient(P, row_P, col_P, val_P, Y, N, no_dims, dY, theta); } // Update gains for (int i = 0; i < gains.Length; i++) { for (int j = 0; j < gains[i].Length; j++) { gains[i][j] = (System.Math.Sign(dY[i][j]) != System.Math.Sign(uY[i][j])) ? (gains[i][j] + 0.2) : (gains[i][j] * 0.8); } } for (int i = 0; i < gains.Length; i++) { for (int j = 0; j < gains[i].Length; j++) { if (gains[i][j] < 0.01) { gains[i][j] = 0.01; } } } // Perform gradient update (with momentum and gains) for (int i = 0; i < uY.Length; i++) { for (int j = 0; j < uY[i].Length; j++) { uY[i][j] = momentum * uY[i][j] - eta * gains[i][j] * dY[i][j]; } } for (int i = 0; i < Y.Length; i++) { for (int j = 0; j < Y[i].Length; j++) { Y[i][j] = Y[i][j] + uY[i][j]; } } // Make solution zero-mean Accord.Statistics.Tools.Center(Y, inPlace: true); // Stop lying about the P-values after a while, and switch momentum if (iter == stop_lying_iter) { if (exact) { P.Divide(12.0, result: P); } else { for (int i = 0; i < row_P[N]; i++) { val_P[i] /= 12.0; } } } if (iter == mom_switch_iter) { momentum = final_momentum; } // Print out progress if (iter > 0 && (iter % 50 == 0 || iter == max_iter - 1)) { end = start.Elapsed; double C = 0.0; if (exact) { C = evaluateError(P, Y, N, no_dims); } else { C = evaluateError(row_P, col_P, val_P, Y, N, no_dims, theta); // doing approximate computation here! } if (iter == 0) { Debug.WriteLine(String.Format("Iteration {0}: error is {1}", iter + 1, C)); } else { total_time += end; Debug.WriteLine(String.Format("Iteration {0}: error is {1} (50 iterations in {2} seconds)", iter, C, end)); } start = Stopwatch.StartNew(); } } end = start.Elapsed; total_time += end; Debug.WriteLine(String.Format("Fitting performed in {0} seconds.", total_time)); }
/// <inheritdoc/> public async override Task ExecuteAsync(TestAdapter adapter, BotCallbackHandler callback) { await Task.Delay((int)Timespan).ConfigureAwait(false); Trace.TraceInformation($"[Turn Ended => {Timespan} ms processing UserDelay[{Timespan}]"); }
private void AsyncGetFromDiskCallback( uint errorCode, uint numBytes, NativeOverlapped* overlap) { if (errorCode != 0) { Trace.TraceError("OverlappedStream GetQueuedCompletionStatus error: {0}", errorCode); } var result = (AsyncGetFromDiskResult<AsyncIOContext>)Overlapped.Unpack(overlap).AsyncResult; Interlocked.Decrement(ref numPendingReads); var ctx = result.context; var record = ctx.record.GetValidPointer(); if (Layout.HasTotalRecord(record, ctx.record.available_bytes, out int requiredBytes)) { //We have the complete record. if (RetrievedObjects(record, ctx)) { if (Key.Equals(ctx.key, Layout.GetKey((long)record))) { //The keys are same, so I/O is complete // ctx.record = result.record; ctx.callbackQueue.Add(ctx); } else { var oldAddress = ctx.logicalAddress; //keys are not same. I/O is not complete ctx.logicalAddress = ((RecordInfo*)record)->PreviousAddress; if (ctx.logicalAddress != Constants.kInvalidAddress) { // Delete key, value, record if (Key.HasObjectsToSerialize()) { var physicalAddress = (long)ctx.record.GetValidPointer(); Key.Free(Layout.GetKey(physicalAddress)); } if (Value.HasObjectsToSerialize()) { var physicalAddress = (long)ctx.record.GetValidPointer(); Value.Free(Layout.GetValue(physicalAddress)); } ctx.record.Return(); AsyncGetFromDisk(ctx.logicalAddress, requiredBytes, AsyncGetFromDiskCallback, ctx); } else { ctx.callbackQueue.Add(ctx); } } } } else { ctx.record.Return(); AsyncGetFromDisk(ctx.logicalAddress, requiredBytes, AsyncGetFromDiskCallback, ctx); } Overlapped.Free(overlap); }
private void AsyncReadPageCallback<TContext>(uint errorCode, uint numBytes, NativeOverlapped* overlap) { if (errorCode != 0) { Trace.TraceError("OverlappedStream GetQueuedCompletionStatus error: {0}", errorCode); } PageAsyncReadResult<TContext> result = (PageAsyncReadResult<TContext>)Overlapped.Unpack(overlap).AsyncResult; if (Interlocked.Decrement(ref result.count) == 1) { // We will be issuing another I/O, so free this overlap Overlapped.Free(overlap); long ptr = (long)pointers[result.page % BufferSize]; // Correct for page 0 of HLOG if (result.page == 0) ptr += Constants.kFirstValidAddress; long minObjAddress = long.MaxValue; long maxObjAddress = long.MinValue; while (ptr < (long)pointers[result.page % BufferSize] + PageSize) { if (!Layout.GetInfo(ptr)->Invalid) { if (Key.HasObjectsToSerialize()) { Key* key = Layout.GetKey(ptr); var addr = ((AddressInfo*)key)->Address; if (addr < minObjAddress) minObjAddress = addr; addr += ((AddressInfo*)key)->Size; if (addr > maxObjAddress) maxObjAddress = addr; } if (Value.HasObjectsToSerialize()) { Value* value = Layout.GetValue(ptr); var addr = ((AddressInfo*)value)->Address; if (addr < minObjAddress) minObjAddress = addr; addr += ((AddressInfo*)value)->Size; if (addr > maxObjAddress) maxObjAddress = addr; } } ptr += Layout.GetPhysicalSize(ptr); } // Object log fragment should be aligned by construction Debug.Assert(minObjAddress % sectorSize == 0); var to_read = (int)(maxObjAddress - minObjAddress); var objBuffer = ioBufferPool.Get(to_read); result.freeBuffer1 = objBuffer; var alignedLength = (to_read + (sectorSize - 1)) & ~(sectorSize - 1); // Request objects from objlog result.objlogDevice.ReadAsync( (int)(result.page >> (LogSegmentSizeBits-LogPageSizeBits)), (ulong)minObjAddress, (IntPtr)objBuffer.aligned_pointer, (uint)alignedLength, AsyncReadPageCallback<TContext>, result); } else { // Load objects from buffer into memory long ptr = (long)pointers[result.page % BufferSize]; // Correct for page 0 of HLOG if (result.page == 0) ptr += Constants.kFirstValidAddress; MemoryStream ms = new MemoryStream(result.freeBuffer1.buffer); ms.Seek(result.freeBuffer1.offset + result.freeBuffer1.valid_offset, SeekOrigin.Begin); while (ptr < (long)pointers[result.page % BufferSize] + PageSize) { if (!Layout.GetInfo(ptr)->Invalid) { if (Key.HasObjectsToSerialize()) { Key.Deserialize(Layout.GetKey(ptr), ms); } if (Value.HasObjectsToSerialize()) { Value.Deserialize(Layout.GetValue(ptr), ms); } } ptr += Layout.GetPhysicalSize(ptr); } ms.Dispose(); result.Free(); // Call the "real" page read callback result.callback(errorCode, numBytes, overlap); } }
private static void WaitForMeta(string nodeHost, Cluster cluster, int maxTry, bool waitForUp) { string expectedFinalNodeState = "UP"; if (!waitForUp) { expectedFinalNodeState = "DOWN"; } for (int i = 0; i < maxTry; ++i) { try { // Are all nodes in the cluster accounted for? bool disconnected = !cluster.RefreshSchema(); if (disconnected) { string warnStr = "While waiting for host " + nodeHost + " to be " + expectedFinalNodeState + ", the cluster is now totally down, returning now ... "; Trace.TraceWarning(warnStr); return; } Metadata metadata = cluster.Metadata; foreach (Host host in metadata.AllHosts()) { bool hostFound = false; if (host.Address.ToString() == nodeHost) { hostFound = true; if (host.IsUp && waitForUp) { Trace.TraceInformation("Verified according to cluster meta that host " + nodeHost + " is " + expectedFinalNodeState + ", returning now ... "); return; } Trace.TraceWarning("We're waiting for host " + nodeHost + " to be " + expectedFinalNodeState); } // Is the host even in the meta list? if (!hostFound) { if (!waitForUp) { Trace.TraceInformation("Verified according to cluster meta that host " + host.Address + " is not available in the MetaData hosts list, returning now ... "); return; } else { Trace.TraceWarning("We're waiting for host " + nodeHost + " to be " + expectedFinalNodeState + ", but this host was not found in the MetaData hosts list!"); } } } } catch (Exception e) { if (e.Message.Contains("None of the hosts tried for query are available") && !waitForUp) { Trace.TraceInformation("Verified according to cluster meta that host " + nodeHost + " is not available in the MetaData hosts list, returning now ... "); return; } Trace.TraceInformation("Exception caught while waiting for meta data: " + e.Message); } Trace.TraceWarning("Waiting for node host: " + nodeHost + " to be " + expectedFinalNodeState); Thread.Sleep(DefaultSleepIterationMs); } string errStr = "Node host should have been " + expectedFinalNodeState + " but was not after " + maxTry + " tries!"; Trace.TraceError(errStr); }
public void TestBuildMcFromResX() { TestResourceBuilder builder1 = new TestResourceBuilder("TestNamespace", "TestResXClass1"); builder1.Add("Testing", "test value 1", "#MessageId=42"); using (TempDirectory intermediateFiles = new TempDirectory()) using (TempFile mctxt = TempFile.FromExtension(".mc")) { using (TempFile resx1 = TempFile.FromExtension(".resx")) { builder1.BuildResX(resx1.TempPath); Commands.ResXtoMc(mctxt.TempPath, new string[] { resx1.TempPath }); } string mcexe = TestResourceBuilder.FindExe("mc.exe"); using (ProcessRunner mc = new ProcessRunner(mcexe, "-U", "{0}", "-r", "{1}", "-h", "{1}")) { mc.OutputReceived += delegate(object o, ProcessOutputEventArgs e) { Trace.WriteLine(e.Data, mcexe); }; Assert.AreEqual(0, mc.RunFormatArgs(mctxt.TempPath, intermediateFiles.TempPath), "mc.exe failed."); } string rcfile = Path.Combine(intermediateFiles.TempPath, Path.GetFileNameWithoutExtension(mctxt.TempPath) + ".rc"); Assert.IsTrue(File.Exists(rcfile)); Assert.IsTrue(File.Exists(Path.ChangeExtension(rcfile, ".h"))); Assert.IsTrue(File.Exists(Path.Combine(intermediateFiles.TempPath, "MSG00409.bin"))); string rcexe = Path.Combine(Path.GetDirectoryName(mcexe), "rc.exe"); if (!File.Exists(rcexe)) { rcexe = TestResourceBuilder.FindExe("rc.exe"); } using (ProcessRunner rc = new ProcessRunner(rcexe, "{0}")) { rc.OutputReceived += delegate(object o, ProcessOutputEventArgs e) { Trace.WriteLine(e.Data, rcexe); }; Assert.AreEqual(0, rc.RunFormatArgs(rcfile), "rc.exe failed."); } string resfile = Path.ChangeExtension(rcfile, ".res"); Assert.IsTrue(File.Exists(resfile)); Assert.IsTrue(File.ReadAllText(resfile).Contains("\0t\0e\0s\0t\0 \0v\0a\0l\0u\0e\0 \01")); } }
private Task OnStart() { Trace?.Log(Localizer["OpenCameraLog"]); return(Task.CompletedTask); }
public static RequestDetails SendRequest(this WebHost webHost, string urlPath, IDictionary<string, IEnumerable<string>> postData, string requestMethod = null) { var physicalPath = Bleroy.FluentPath.Path.Get(webHost.PhysicalDirectory); bool isHomepage = urlPath == "/"; if (!isHomepage) urlPath = StripVDir(urlPath, webHost.VirtualDirectory); var details = new RequestDetails { HostName = webHost.HostName, UrlPath = urlPath.Replace('\\', '/'), }; int queryIndex = urlPath.IndexOf('?'); if (queryIndex >= 0) { details.UrlPath = urlPath.Substring(0, queryIndex).Replace('\\', '/'); details.Query = urlPath.Substring(queryIndex + 1); } var physicalFilePath = physicalPath.Combine(details.UrlPath.TrimStart('/', '\\')); details.Page = (isHomepage ? "" : physicalFilePath.GetRelativePath(physicalPath).ToString()); if (!File.Exists(physicalFilePath)) details.Page = details.Page.Replace('\\', '/'); if (!string.IsNullOrEmpty(webHost.Cookies)) { details.RequestHeaders.Add("Cookie", webHost.Cookies); } details.RequestHeaders.Add("Accept-Charset", "utf-8"); if (postData != null) { var requestBodyText = postData .SelectMany(kv => kv.Value.Select(v => new { k = kv.Key, v })) .Select((kv, n) => new { p = HttpUtility.UrlEncode(kv.k) + "=" + HttpUtility.UrlEncode(kv.v), n }) .Aggregate("", (a, x) => a + (x.n == 0 ? "" : "&") + x.p); if (requestMethod == "POST") details.PostData = Encoding.Default.GetBytes(requestBodyText); else details.Query = requestBodyText; } webHost.Execute(() => { var output = new StringWriter(); var worker = new Worker(details, output); HttpRuntime.ProcessRequest(worker); details.ResponseText = output.ToString(); }); string setCookie; if (details.ResponseHeaders.TryGetValue("Set-Cookie", out setCookie)) { Trace.WriteLine(string.Format("Set-Cookie: {0}", setCookie)); var cookieName = setCookie.Split(';')[0].Split('=')[0]; DateTime expires; if (!string.IsNullOrEmpty(webHost.Cookies) && setCookie.Contains("expires=") && DateTime.TryParse(setCookie.Split(new[] { "expires=" }, 2, StringSplitOptions.None)[1].Split(';')[0], out expires) && expires < DateTime.Now) { // remove Trace.WriteLine(string.Format("Removing cookie: {0}", cookieName)); webHost.Cookies = Regex.Replace(webHost.Cookies, string.Format("{0}=[^;]*;?", cookieName), ""); } else if (!string.IsNullOrEmpty(webHost.Cookies) && Regex.IsMatch(webHost.Cookies, string.Format("\b{0}=", cookieName))) { // replace Trace.WriteLine(string.Format("Replacing cookie: {0}", cookieName)); webHost.Cookies = Regex.Replace(webHost.Cookies, string.Format("{0}=[^;]*(;?)", cookieName), string.Format("{0}$1", setCookie.Split(';')[0])); } else { // add Trace.WriteLine(string.Format("Adding cookie: {0}", cookieName)); webHost.Cookies = (webHost.Cookies + ';' + setCookie.Split(';').FirstOrDefault()).Trim(';'); } Trace.WriteLine(string.Format("Cookie jar: {0}", webHost.Cookies)); } return details; }
public void WriteTrace(string trace) { Trace.WriteLine(trace); }
/// <summary> /// Log message with 'error' log level. /// </summary> private static void LogError(string message) { Trace.TraceError(message); }
public override void Run() { var storageAccount = CloudStorageAccount.Parse(RoleEnvironment.GetConfigurationSettingValue("DataConnectionString")); CloudBlobClient blobStorage = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobStorage.GetContainerReference("photogallery"); CloudQueueClient queueStorage = storageAccount.CreateCloudQueueClient(); CloudQueue queue = queueStorage.GetQueueReference("thumbnailmaker"); Trace.TraceInformation("Creating container and queue..."); // If the Start() method throws an exception, the role recycles. // If this sample is run locally and the development storage tool has not been started, this // can cause a number of exceptions to be thrown because roles are restarted repeatedly. // Lets try to create the queue and the container and check whether the storage services are running // at all. bool containerAndQueueCreated = false; while (!containerAndQueueCreated) { try { container.CreateIfNotExist(); var permissions = container.GetPermissions(); permissions.PublicAccess = BlobContainerPublicAccessType.Container; container.SetPermissions(permissions); permissions = container.GetPermissions(); queue.CreateIfNotExist(); containerAndQueueCreated = true; } catch (StorageClientException e) { if (e.ErrorCode == StorageErrorCode.TransportError) { Trace.TraceError(string.Format("Connect failure! The most likely reason is that the local " + "Development Storage tool is not running or your storage account configuration is incorrect. " + "Message: '{0}'", e.Message)); System.Threading.Thread.Sleep(5000); } else { throw; } } } Trace.TraceInformation("Listening for queue messages..."); // Now that the queue and the container have been created in the above initialization process, get messages // from the queue and process them individually. while (true) { try { CloudQueueMessage msg = queue.GetMessage(); if (msg != null) { string path = msg.AsString; string thumbnailName = System.IO.Path.GetFileNameWithoutExtension(path) + ".jpg"; Trace.TraceInformation(string.Format("Dequeued '{0}'", path)); CloudBlockBlob content = container.GetBlockBlobReference(path); CloudBlockBlob thumbnail = container.GetBlockBlobReference("thumbnails/" + thumbnailName); MemoryStream image = new MemoryStream(); content.DownloadToStream(image); image.Seek(0, SeekOrigin.Begin); thumbnail.Properties.ContentType = "image/jpeg"; thumbnail.UploadFromStream(CreateThumbnail(image)); Trace.TraceInformation(string.Format("Done with '{0}'", path)); queue.DeleteMessage(msg); } else { System.Threading.Thread.Sleep(1000); } } catch (Exception e) { // Explicitly catch all exceptions of type StorageException here because we should be able to // recover from these exceptions next time the queue message, which caused this exception, // becomes visible again. System.Threading.Thread.Sleep(5000); Trace.TraceError(string.Format("Exception when processing queue item. Message: '{0}'", e.Message)); } } }
/// <summary> /// Log message with 'trace' log level. /// </summary> private static void LogTrace(string message) { Trace.TraceInformation(message); }
private void t文字列から読み込み(string strAllSettings) // 2011.4.13 yyagi; refactored to make initial KeyConfig easier. { string[] delimiter = { "\n" }; string[] strSingleLine = strAllSettings.Split(delimiter, StringSplitOptions.RemoveEmptyEntries); foreach (string s in strSingleLine) { string str = s.Replace('\t', ' ').TrimStart(new char[] { '\t', ' ' }); if ((str.Length != 0) && (str[0] != ';')) { try { string strCommand; string strParam; string[] strArray = str.Split(new char[] { '=' }); if (strArray.Length == 2) { strCommand = strArray[0].Trim(); strParam = strArray[1].Trim(); #region [ 演奏 ] //----------------------------- if (strCommand == "ScrollFieldP1Y") { this.nScrollFieldY[0] = C変換.n値を文字列から取得して返す(strParam, 192); } else if (strCommand == "ScrollFieldP2Y") { this.nScrollFieldY[1] = C変換.n値を文字列から取得して返す(strParam, 192); } else if (strCommand == "SENotesP1Y") { this.nSENotesY[0] = C変換.n値を文字列から取得して返す(strParam, this.nSENotesY[0]); } else if (strCommand == "SENotesP2Y") { this.nSENotesY[1] = C変換.n値を文字列から取得して返す(strParam, this.nSENotesY[1]); } else if (strCommand == "JudgePointP1Y") { this.nJudgePointY[0] = C変換.n値を文字列から取得して返す(strParam, this.nJudgePointY[0]); } else if (strCommand == "JudgePointP2Y") { this.nJudgePointY[1] = C変換.n値を文字列から取得して返す(strParam, this.nJudgePointY[1]); } else if (strCommand == "PlayerCharacterP1X") { this.nPlayerCharacterX[0] = C変換.n値を文字列から取得して返す(strParam, 0); } else if (strCommand == "PlayerCharacterP1Y") { this.nPlayerCharacterY[0] = C変換.n値を文字列から取得して返す(strParam, 0); } else if (strCommand == "CourseSymbolP1X") { this.nCourseSymbolX[0] = C変換.n値を文字列から取得して返す(strParam, this.nCourseSymbolX[0]); } else if (strCommand == "CourseSymbolP1Y") { this.nCourseSymbolY[0] = C変換.n値を文字列から取得して返す(strParam, this.nCourseSymbolY[0]); } else if (strCommand == "DiffDispMode") { this.eDiffDispMode = (E難易度表示タイプ)C変換.n値を文字列から取得して範囲内に丸めて返す(strParam, 0, 2, (int)this.eDiffDispMode); } else if (strCommand == "NowStageDisp") { this.b現在のステージ数を表示しない = C変換.bONorOFF(strParam[0]); } else if (strCommand == "AddScoreColorP1") { string[] arColor = strParam.Split(','); if (arColor.Length == 3) { this.cScoreColor1P = C変換.n255ToColor4(Convert.ToInt16(arColor[0]), Convert.ToInt16(arColor[1]), Convert.ToInt16(arColor[2])); } } else if (strCommand == "AddScoreColorP2") { string[] arColor = strParam.Split(','); if (arColor.Length == 3) { this.cScoreColor2P = C変換.n255ToColor4(Convert.ToInt16(arColor[0]), Convert.ToInt16(arColor[1]), Convert.ToInt16(arColor[2])); } } //----------------------------- #endregion #region [ 成績発表 ] //----------------------------- else if (strCommand == "ResultPanelP1X") { this.nResultPanelP1X = C変換.n値を文字列から取得して返す(strParam, 515); } else if (strCommand == "ResultPanelP1Y") { this.nResultPanelP1Y = C変換.n値を文字列から取得して返す(strParam, 75); } else if (strCommand == "ResultPanelP2X") { this.nResultPanelP2X = C変換.n値を文字列から取得して返す(strParam, 515); } else if (strCommand == "ResultPanelP2Y") { this.nResultPanelP2Y = C変換.n値を文字列から取得して返す(strParam, 75); } else if (strCommand == "ResultScoreP1X") { this.nResultScoreP1X = C変換.n値を文字列から取得して返す(strParam, 582); } else if (strCommand == "ResultScoreP1Y") { this.nResultScoreP1Y = C変換.n値を文字列から取得して返す(strParam, 252); } //----------------------------- #endregion #region [ その他 ] #endregion } continue; } catch (Exception exception) { Trace.TraceError(exception.Message); continue; } } } }
/// <summary> /// Log message with 'trace' log level. /// </summary> private static void LogTrace(string format, params object[] args) { Trace.TraceInformation(format, args); }
public static double Run(double[] prms, double[] a, int fxTD, double rf0, int nbuckets, int[] partls, int plproc, int prec) { // Declare local variables. double[] vp = new double[nbuckets]; double[] v = new double[nbuckets]; double[] mts = new double[7]; List <int> uBkts = new List <int> { 1, nbuckets }; List <double> c; List <GammaDistribution> g; // Initiate prior timepoint's probabilities with zeros. for (int b = 1; b <= nbuckets; ++b) { vp[b - 1] = 0.00; } // Check that no value of the partls[] array is equal to or greater than fxTD. for (int i = 0; i < 4 && partls[i] >= fxTD; ++i) { Trace.Write("ERROR: Invalid (>=TD) partial derivative specification partls["); Trace.Write(i); Trace.Write("]="); Trace.Write(partls[i]); Trace.WriteLine("."); Trace.WriteLine("EXITING...PNRsim()..."); Console.Read(); Environment.Exit(1); } // Check that all equity ratios are between MVA and 1.00. //========================================================= for (int y = 0; y < fxTD; ++y) { if (a[y] < Funcs.mva(prms) + 0.0001 || a[y] > 1.00) { if (a[y] > 1.00) { a[y] = 1.00; } else if (a[y] < Funcs.mva(prms) + 0.0001) { a[y] = Funcs.mva(prms) + 0.0001; } } } // Iterate over all time points, launching separate threads to process equal sized collections of // buckets concurrently within each time point. for (int y = fxTD - 1; y >= 1; y--) { // Populate moments array for this time point. mts[0] = Funcs.m(prms, a[y]); mts[1] = Funcs.mp(prms); mts[2] = Funcs.v(prms, a[y]); mts[3] = Funcs.vp(prms, a[y]); mts[4] = Funcs.vpp(prms); mts[5] = Funcs.mva(prms); mts[6] = Funcs.kh1(prms, a[y]); // Process collections of buckets concurrently. var t = new Task[plproc]; c = GetConst.Run(mts, partls, y); // Define needed distributions. g = GetGamma.Run(partls, y); // Update prnbkt and derive the new # of buckets to process per run for the next iteration. // (Buckets with trivial assignments are handled separately.) var cont = 1; var pwr = 1; int prnbkt = nbuckets; if (y < fxTD - fxTD / 6) { prnbkt = 1; pwr = 2; } for (int b = prnbkt + (int)Math.Pow(-1.00, pwr) * 2 * plproc; cont == 1 && b >= 1 && b <= nbuckets; b = b + (int)Math.Pow(-1.00, pwr) * (2 * plproc)) { int[] prnbkts = { b, b, nbuckets }; PNRdyn.Run(mts, prec, prnbkts, vp, v, uBkts, c, g); if (pwr == 1 && v[b - 1] >= 1.00) { prnbkt = b; } else if (pwr == 2 && v[b - 1] < 1.00) { prnbkt = Math.Min(b + (int)Math.Pow(-1.00, pwr) * 2 * plproc, nbuckets); } else { cont = 0; } } // The value of prnbkt should be ge plproc and le nbuckets. if (prnbkt > nbuckets) { prnbkt = nbuckets; } else if (prnbkt < plproc) { prnbkt = plproc; } int bktsprun = prnbkt / plproc + 1; if (bktsprun * plproc > nbuckets) { bktsprun = nbuckets / plproc; } // If # buckets/run is still not right, exit with an error and fix. if (bktsprun * plproc > nbuckets || bktsprun < 1) { Trace.WriteLine(""); Trace.WriteLine("The # of buckets per run is not being derived correctly, must fix:"); Trace.Write("# of buckets per run = "); Trace.WriteLine(bktsprun); Trace.Write("Total # of buckets = "); Trace.WriteLine(nbuckets); Trace.Write("# of concurrent processes being used = "); Trace.WriteLine(plproc); Trace.WriteLine("EXITING...ThrdPNRdyn()..."); Console.Read(); Environment.Exit(1); } // An array of size 3 is used to specify the start/end/total buckets for each thread. var bktarys = new int[plproc][]; for (int i = 0; i < plproc; ++i) { bktarys[i] = new int[3]; bktarys[i][0] = bktsprun * i + 1; bktarys[i][1] = Math.Min(bktsprun * (i + 1), nbuckets); bktarys[i][2] = nbuckets; var j = i; var cLocal = c.ToList(); var gLocal = g.ToList(); t[i] = Task.Run(() => { PNRdyn.Run(mts, prec, bktarys[j], vp, v, uBkts, cLocal, gLocal); }); } // Assign trivial (known) values. if (bktarys[plproc - 1][1] < nbuckets) { for (int b = bktsprun * plproc + 1; b <= nbuckets; ++b) { v[b - 1] = 1.00; } } // Wait for all threads to finish, then proceed. Task.WaitAll(t); // Free temporary memory allocations and reused containers. for (int i = 0; i < plproc; ++i) { bktarys[i] = null; bktarys[i] = null; } uBkts.Clear(); c.Clear(); g.Clear(); // Update quantities needed for next iteration. // Reset the prior year's probabilities in Vp[] and derive the unique bucket quantities. cont = 1; var prevprob = 0.00; for (int b = 1; b <= nbuckets; ++b) { vp[b - 1] = v[b - 1]; // Verify the integrity of the probabilities derived during this iteration. if (vp[b - 1] < prevprob - 1e-15 || vp[b - 1] > 1.00 + 2.00 * Math.Pow(0.1, 16)) { Trace.WriteLine(""); Trace.WriteLine($"There is an issue with the probabilities derived at this timepoint (t={y}), see below:"); if (b > 1) { Trace.Write("Vp["); Trace.Write(b - 2); Trace.Write("] = "); Trace.WriteLine(vp[b - 2]); } Trace.Write("Vp["); Trace.Write(b - 1); Trace.Write("] = "); Trace.WriteLine(vp[b - 1]); Trace.WriteLine("EXITING...ThrdPNRdyn()..."); Console.Read(); Environment.Exit(1); } prevprob = vp[b - 1]; // Derive the new vector of bucket #'s with unique probabilities at the prior timepoint. if (b == 1 || b != nbuckets && Math.Abs(v[b - 1] - v[b]) > 1e-15 && cont == 1 || (b == nbuckets)) { uBkts.Add(b); } // Once PRuin=1 stop collecting unique buckets. if (cont == 1 && vp[b - 1] >= 1.00) { cont = 0; } } // Check that the last bucket at this time point has a PRuin of 1.00. // (Otherwise RFMax needs to be increased. This must hold when using special densities also.) if (vp[nbuckets - 1] < 1.00) { Trace.Write("Timepoint (t="); Trace.Write(y); Trace.Write("), has V["); Trace.Write(nbuckets - 1); Trace.Write("]="); Trace.Write(v[nbuckets - 1]); Trace.WriteLine(", which is < 1.00."); Trace.WriteLine("(Increase RFMax.)"); Trace.WriteLine("EXITING...ThrdPNRdyn()..."); Console.Read(); Environment.Exit(1); } } // Process final timepoint only for the given RF0. mts[0] = Funcs.m(prms, a[0]); mts[1] = Funcs.mp(prms); mts[2] = Funcs.v(prms, a[0]); mts[3] = Funcs.vp(prms, a[0]); mts[4] = Funcs.vpp(prms); mts[5] = Funcs.mva(prms); mts[6] = Funcs.kh1(prms, a[0]); int rf0Bkt = (int)(rf0 * prec + 0.5); c = GetConst.Run(mts, partls, 0); g = GetGamma.Run(partls, 0); int[] fnlbkts = { rf0Bkt, rf0Bkt, nbuckets }; PNRdyn.Run(mts, prec, fnlbkts, vp, v, uBkts, c, g); // Retrieve the probability to return. double rtprob = 1.00 - v[rf0Bkt - 1]; // The single PNR derived using a DP is returned. return(rtprob); }
/// <summary> /// The cleanup. /// </summary> private void Cleanup() { Trace.WriteLine(AppDomain.CurrentDomain.FriendlyName); AppDomain.Unload(this.testDomain); Trace.WriteLine(AppDomain.CurrentDomain.FriendlyName); }
private static T Open<T>(XmlReader reader) where T : SvgDocument, new() { if (!SkipGdiPlusCapabilityCheck) { EnsureSystemIsGdiPlusCapable(); //Validate whether the GDI+ can be loaded, this will yield an exception if not } var elementStack = new Stack<SvgElement>(); bool elementEmpty; SvgElement element = null; SvgElement parent; T svgDocument = null; var elementFactory = new SvgElementFactory(); var styles = new List<ISvgNode>(); while (reader.Read()) { try { switch (reader.NodeType) { case XmlNodeType.Element: // Does this element have a value or children // (Must do this check here before we progress to another node) elementEmpty = reader.IsEmptyElement; // Create element if (elementStack.Count > 0) { element = elementFactory.CreateElement(reader, svgDocument); } else { svgDocument = elementFactory.CreateDocument<T>(reader); element = svgDocument; } // Add to the parents children if (elementStack.Count > 0) { parent = elementStack.Peek(); if (parent != null && element != null) { parent.Children.Add(element); parent.Nodes.Add(element); } } // Push element into stack elementStack.Push(element); // Need to process if the element is empty if (elementEmpty) { goto case XmlNodeType.EndElement; } break; case XmlNodeType.EndElement: // Pop the element out of the stack element = elementStack.Pop(); if (element.Nodes.OfType<SvgContentNode>().Any()) { element.Content = (from e in element.Nodes select e.Content).Aggregate((p, c) => p + c); } else { element.Nodes.Clear(); // No sense wasting the space where it isn't needed } var unknown = element as SvgUnknownElement; if (unknown != null && unknown.ElementName == "style") { styles.Add(unknown); } break; case XmlNodeType.CDATA: case XmlNodeType.Text: element = elementStack.Peek(); element.Nodes.Add(new SvgContentNode() { Content = reader.Value }); break; case XmlNodeType.EntityReference: reader.ResolveEntity(); element = elementStack.Peek(); element.Nodes.Add(new SvgContentNode() { Content = reader.Value }); break; } } catch (Exception exc) { Trace.TraceError(exc.Message); } } if (styles.Any()) { var cssTotal = styles.Select((s) => s.Content).Aggregate((p, c) => p + Environment.NewLine + c); var cssParser = new Parser(); var sheet = cssParser.Parse(cssTotal); foreach (var rule in sheet.StyleRules) { try { var rootNode = new NonSvgElement(); rootNode.Children.Add(svgDocument); var elemsToStyle = rootNode.QuerySelectorAll(rule.Selector.ToString(), elementFactory); foreach (var elem in elemsToStyle) { foreach (var decl in rule.Declarations) { elem.AddStyle(decl.Name, decl.Term.ToString(), rule.Selector.GetSpecificity()); } } } catch (Exception ex) { Trace.TraceWarning(ex.Message); } } } svgDocument?.FlushStyles(true); return svgDocument; }
/// <summary> /// Instantiate new job with a data source(id OR name to be passed) /// </summary> /// <param name="dataSourceId">The data source id (id OR name is required)</param> /// <param name="dataSourceName">The data source name (id OR name is required)</param> /// <param name="processingBy">Processing by user name</param> public Job(int dataSourceId, string dataSourceName, string processingBy) { this.ProcessingBy = processingBy; this.Rows = new List <Row>(); //these list are in this case thread OK, because we add from individual splitted job return items this.Errors = new List <string>(); this.Warnings = new List <string>(); this.BadDataInCSVFormat = new List <string>(); this.StartedAt = DateTime.Now; this.JobIdentifier = ShortGuid.NewGuid(); this.ContainerData = new ConcurrentDictionary <string, object>(); this.CSVRows = new List <string>(); this.PerformanceCounter = new PerformanceCounter(); if ((dataSourceId == 0) && (string.IsNullOrEmpty(dataSourceName))) { return;//Invalid dummy job; } this.DataSource = new DataSource(dataSourceId, dataSourceName); if (!this.DataSource.IsValid) { string errorMessage = string.Format("Could not create job as data source was not valid. Data source id was {0} and name was {1}" , dataSourceId, string.IsNullOrEmpty(dataSourceName)? "<Unknown>" : dataSourceName); Trace.TraceError(errorMessage); return; //we need not to do anything here as job is automatically invalid } if ((!string.IsNullOrEmpty(this.DataSource.OutputWriterType)) && (Type.GetType(this.DataSource.OutputWriterType) != null)) { object objOutputWriter = Activator.CreateInstance(Type.GetType(this.DataSource.OutputWriterType), this); this.DataSource.OutputWriter = (OutputWriter)objOutputWriter; } else { //set default object objOutputWriter = Activator.CreateInstance(Type.GetType("Symplus.RuleEngine.Services.OutputWriterGeneric"), this); this.DataSource.OutputWriter = (OutputWriter)objOutputWriter; } if ((!string.IsNullOrEmpty(this.DataSource.DataContainerValidatorType)) && (Type.GetType(this.DataSource.DataContainerValidatorType) != null)) { object objDataContainerValidator = Activator.CreateInstance(Type.GetType(this.DataSource.DataContainerValidatorType), this); this.DataSource.DataContainerValidator = (DataContainerValidator)objDataContainerValidator; } else { //set default object objDataContainerValidator = Activator.CreateInstance(Type.GetType("Symplus.RuleEngine.Services.DataContainerValidatorGeneric"), this); this.DataSource.DataContainerValidator = (DataContainerValidator)objDataContainerValidator; } if ((!string.IsNullOrEmpty(this.DataSource.PlugInsType)) && (Type.GetType(this.DataSource.PlugInsType) != null)) { object objPlugIns = Activator.CreateInstance(Type.GetType(this.DataSource.PlugInsType), this); this.DataSource.PlugIns = (PlugIns)objPlugIns; } else { //set default object objPlugIns = Activator.CreateInstance(Type.GetType("Symplus.RuleEngine.Services.PlugInsGeneric"), this); this.DataSource.PlugIns = (PlugIns)objPlugIns; } this.SqlClientManager = new SQLClientManager(this.DefaultConnectionString, this.DefaultConnectionType); this.Parameters = new Parameters(this.DataSource.Id, this.DataSource.Name, this.DataSource.Key(SreKeyTypes.GenerateParametersFromDatabase), this.SqlClientManager); Trace.TraceInformation(Environment.NewLine); Trace.TraceInformation("New job '{0}' created.", this.JobIdentifier); }
private void RefreshUi() { lock (_rects) { _rects.Clear(); var grid = new Grid(); grid.CreateRows("50*", "50*"); grid.Cell().Row(1) .AddUi( new Label() { Content = _uiCard.CenterFrequency, FontWeight = FontWeights.Black, HorizontalAlignment = HorizontalAlignment.Center, Foreground = Application.Current.Resources["SpectrumCardTextBrush"] as Brush }) .AddUi( new Label() { Content = _uiCard.Name, HorizontalAlignment = HorizontalAlignment.Left, Foreground = Application.Current.Resources["SpectrumCardNameBrush"] as Brush }) .AddUi( new Label() { //This is a shortcut to display the stream direction without picture using a webding character FontFamily = new FontFamily("Webdings"), FontSize = 16, Content = (_uiCard.StreamDirection == StreamDirectionEnum.Down) ? "6" : "5", HorizontalAlignment = HorizontalAlignment.Right, Foreground = Application.Current.Resources["SpectrumCardNameBrush"] as Brush } ); grid.Cell().Row(0).AddUi( new CustomCanvas( canvas => { int i = 0; Trace.WriteLine(StringExt.Format("Create: {0} - {1}", _uiCard.Name, _uiCard.Channels.Count)); foreach (var channel in _uiCard.Channels) { var rect = new Rectangle() { Fill = ChannelColors.GetChannelColor(channel), Margin = new Thickness(0, 3, 0, 3), Stroke = Application.Current.Resources["SpectrumCardChannelStateBorderBrush"] as Brush }; _rects.Add(rect); canvas.Children.Add(rect); i += 1; } return(true); }, (c) => { int i = 0; foreach (var channel in _uiCard.Channels) { Trace.WriteLine( StringExt.Format("Positioning : {0} - {1}", _uiCard.Name, _uiCard.Channels.Count)); var px = c.ActualWidth / 200 * (channel.CenterFreq - (_uiCard.CenterFrequency - 100) - (channel.Bandwidth / 2)); var size = c.ActualWidth / 200 * channel.Bandwidth; (c.Children[i] as Rectangle).Width = size; (c.Children[i] as Rectangle).Height = c.ActualHeight; Canvas.SetLeft(c.Children[i], px); Canvas.SetTop(c.Children[i], 0); i += 1; Trace.WriteLine(StringExt.Format("Positioning : {0} - {1}", _uiCard.Name, px)); } })); Trace.WriteLine(StringExt.Format("{0} - {1}", _uiCard.Name, _uiCard.Channels.Count)); Child = grid; if (ActualHeight < 50) { grid.RowDefinitions[0].Height = new GridLength(0); } InvalidateVisual(); } }
/// <summary> /// Skinの一覧を再取得する。 /// System/*****/Graphics (やSounds/) というフォルダ構成を想定している。 /// もし再取得の結果、現在使用中のSkinのパス(strSystemSkinSubfloderFullName)が消えていた場合は、 /// 以下の優先順位で存在確認の上strSystemSkinSubfolderFullNameを再設定する。 /// 1. System/Default/ /// 2. System/*****/ で最初にenumerateされたもの /// 3. System/ (従来互換) /// </summary> public void ReloadSkinPaths() { #region [ まず System /*** をenumerateする ] * string[] tempSkinSubfolders = System.IO.Directory.GetDirectories( strSystemSkinRoot, "*" ); * strSystemSkinSubfolders = new string[ tempSkinSubfolders.Length ]; * int size = 0; * for ( int i = 0; i < tempSkinSubfolders.Length; i++ ) * { #region [ 検出したフォルダがスキンフォルダかどうか確認する] * if ( !bIsValid( tempSkinSubfolders[ i ] ) ) * continue; #endregion #region [ スキンフォルダと確認できたものを、strSkinSubfoldersに入れる ] * // フォルダ名末尾に必ず\をつけておくこと。さもないとConfig読み出し側(必ず\をつける)とマッチできない * if ( tempSkinSubfolders[ i ][ tempSkinSubfolders[ i ].Length - 1 ] != System.IO.Path.DirectorySeparatorChar ) * { * tempSkinSubfolders[ i ] += System.IO.Path.DirectorySeparatorChar; * } * strSystemSkinSubfolders[ size ] = tempSkinSubfolders[ i ]; * Trace.TraceInformation( "SkinPath検出: {0}", strSystemSkinSubfolders[ size ] ); * size++; #endregion * } * Trace.TraceInformation( "SkinPath入力: {0}", strSystemSkinSubfolderFullName ); * Array.Resize( ref strSystemSkinSubfolders, size ); * Array.Sort( strSystemSkinSubfolders ); // BinarySearch実行前にSortが必要 #endregion * #region [ 現在のSkinパスがbox.defスキンをCONFIG指定していた場合のために、最初にこれが有効かチェックする。有効ならこれを使う。 ] * if ( bIsValid( strSystemSkinSubfolderFullName ) && * Array.BinarySearch( strSystemSkinSubfolders, strSystemSkinSubfolderFullName, * StringComparer.InvariantCultureIgnoreCase ) < 0 ) * { * strBoxDefSkinSubfolders = new string[ 1 ]{ strSystemSkinSubfolderFullName }; * return; * } #endregion * #region [ 次に、現在のSkinパスが存在するか調べる。あれば終了。] * if ( Array.BinarySearch( strSystemSkinSubfolders, strSystemSkinSubfolderFullName, * StringComparer.InvariantCultureIgnoreCase ) >= 0 ) * return; #endregion #region [ カレントのSkinパスが消滅しているので、以下で再設定する。] * /// 以下の優先順位で現在使用中のSkinパスを再設定する。 * /// 1. System/Default/ * /// 2. System/*****/で最初にenumerateされたもの /// 3. System/ (従来互換) #region [ System/Default/ があるなら、そこにカレントSkinパスを設定する] string tempSkinPath_default = System.IO.Path.Combine(strSystemSkinRoot, "Default" + System.IO.Path.DirectorySeparatorChar); if (Array.BinarySearch(strSystemSkinSubfolders, tempSkinPath_default, StringComparer.InvariantCultureIgnoreCase) >= 0) { strSystemSkinSubfolderFullName = tempSkinPath_default; return; } #endregion #region [ System/SkinFiles.*****/ で最初にenumerateされたものを、カレントSkinパスに再設定する ] if (strSystemSkinSubfolders.Length > 0) { strSystemSkinSubfolderFullName = strSystemSkinSubfolders[0]; return; } #endregion #region [ System/ に、カレントSkinパスを再設定する。] strSystemSkinSubfolderFullName = strSystemSkinRoot; strSystemSkinSubfolders = new string[1] { strSystemSkinSubfolderFullName }; #endregion #endregion }
public void SendMessage(string msg) { Trace.WriteLine(msg); }
void App_DispatcherUnhandledException(object sender, DispatcherUnhandledExceptionEventArgs e) { Trace.WriteLine(e.Exception.Message); }
/// <summary> /// The main thread of the ftp server /// Listen and acception clients, create handler threads for each client /// </summary> private void ThreadRun() { FtpServerMessageHandler.Message += TraceMessage; // listen at the port by the "FTP" endpoint setting int port = int.Parse(ConfigurationManager.AppSettings["FTP"]); System.Net.IPAddress ipaddr = SocketHelpers.GetLocalAddress(); System.Net.IPEndPoint ipEndPoint = new System.Net.IPEndPoint(ipaddr.Address, port); FtpServer.m_ftpIpAddr = ipaddr.ToString(); m_socketListen = SocketHelpers.CreateTcpListener(ipEndPoint); if (m_socketListen != null) { string msg = string.Format("FTP Server started.Listening to: {0}", ipEndPoint); FtpServer.LogWrite(msg); Trace.TraceInformation(msg); m_socketListen.Start(); bool fContinue = true; while (fContinue) { TcpClient socket = null; try { socket = m_socketListen.AcceptTcpClient(); } catch (SocketException) { fContinue = false; } finally { if (socket == null) { fContinue = false; } else if (m_apConnections.Count >= m_maxClients) { Trace.WriteLine("Too many clients, won't handle this connection", "Warnning"); SendRejectMessage(socket); socket.Close(); } else { socket.NoDelay = false; m_nId++; FtpServerMessageHandler.SendMessage(m_nId, "New connection"); SendAcceptMessage(socket); // 2015-11-25 cljung : under stress testing, this happens. Don't know why yet, but let's keep it from crashing try { InitialiseSocketHandler(socket); } catch (System.ObjectDisposedException ode) { Trace.TraceError(string.Format("ObjectDisposedException initializing client socket:\r\n{0}", ode)); m_nId--; // can't fail try { socket.Close(); } catch { } } } } } } else { FtpServerMessageHandler.SendMessage(0, "Error in starting FTP server"); } }