/// <summary> /// Format the given object in the invariant culture. This static method may be /// imported in C# by /// <code> /// using static System.FormattableString; /// </code>. /// Within the scope /// of that import directive an interpolated string may be formatted in the /// invariant culture by writing, for example, /// <code> /// Invariant($"{{ lat = {latitude}; lon = {longitude} }}") /// </code> /// </summary> public static string Invariant(FormattableString formattable) { if (formattable == null) { throw new ArgumentNullException("formattable"); } return formattable.ToString(Globalization.CultureInfo.InvariantCulture); }
private static string Script(FormattableString formattable) { return(formattable.ToString(new LowerCaseFormatProvider())); }
/// <summary> /// Creates a new QueryBuilder over current connection /// </summary> /// <param name="cnn"></param> /// <param name="query">You can use "{where}" or "/**where**/" in your query, and it will be replaced by "WHERE + filters" (if any filter is defined). <br /> /// You can use "{filters}" or "/**filters**/" in your query, and it will be replaced by "filters" (without where) (if any filter is defined). /// </param> public static QueryBuilder QueryBuilder(this IDbConnection cnn, FormattableString query) { return(new QueryBuilder(cnn, query)); }
private void addPathData(TextWriter writer, IHasPath pathData, Vector2 position) { PathType?lastType = null; for (int i = 0; i < pathData.Path.ControlPoints.Count; i++) { PathControlPoint point = pathData.Path.ControlPoints[i]; if (point.Type != null) { // We've reached a new (explicit) segment! // Explicit segments have a new format in which the type is injected into the middle of the control point string. // To preserve compatibility with osu-stable as much as possible, explicit segments with the same type are converted to use implicit segments by duplicating the control point. // One exception are consecutive perfect curves, which aren't supported in osu!stable and can lead to decoding issues if encoded as implicit segments bool needsExplicitSegment = point.Type != lastType || point.Type == PathType.PerfectCurve; // Another exception to this is when the last two control points of the last segment were duplicated. This is not a scenario supported by osu!stable. // Lazer does not add implicit segments for the last two control points of _any_ explicit segment, so an explicit segment is forced in order to maintain consistency with the decoder. if (i > 1) { // We need to use the absolute control point position to determine equality, otherwise floating point issues may arise. Vector2 p1 = position + pathData.Path.ControlPoints[i - 1].Position; Vector2 p2 = position + pathData.Path.ControlPoints[i - 2].Position; if ((int)p1.X == (int)p2.X && (int)p1.Y == (int)p2.Y) { needsExplicitSegment = true; } } if (needsExplicitSegment) { switch (point.Type) { case PathType.Bezier: writer.Write("B|"); break; case PathType.Catmull: writer.Write("C|"); break; case PathType.PerfectCurve: writer.Write("P|"); break; case PathType.Linear: writer.Write("L|"); break; } lastType = point.Type; } else { // New segment with the same type - duplicate the control point writer.Write(FormattableString.Invariant($"{position.X + point.Position.X}:{position.Y + point.Position.Y}|")); } } if (i != 0) { writer.Write(FormattableString.Invariant($"{position.X + point.Position.X}:{position.Y + point.Position.Y}")); writer.Write(i != pathData.Path.ControlPoints.Count - 1 ? "|" : ","); } } var curveData = pathData as IHasPathWithRepeats; writer.Write(FormattableString.Invariant($"{(curveData?.RepeatCount ?? 0) + 1},")); writer.Write(FormattableString.Invariant($"{pathData.Path.ExpectedDistance.Value ?? pathData.Path.Distance},")); if (curveData != null) { for (int i = 0; i < curveData.NodeSamples.Count; i++) { writer.Write(FormattableString.Invariant($"{(int)toLegacyHitSoundType(curveData.NodeSamples[i])}")); writer.Write(i != curveData.NodeSamples.Count - 1 ? "|" : ","); } for (int i = 0; i < curveData.NodeSamples.Count; i++) { writer.Write(getSampleBank(curveData.NodeSamples[i], true)); writer.Write(i != curveData.NodeSamples.Count - 1 ? "|" : ","); } } }
/// <summary> /// Adds the Cache-Control and Pragma HTTP headers by applying the specified cache profile to the HTTP context. /// </summary> /// <param name="context">The HTTP context.</param> /// <param name="cacheProfile">The cache profile.</param> /// <returns>The same HTTP context.</returns> /// <exception cref="System.ArgumentNullException">context or cacheProfile.</exception> public static HttpContext ApplyCacheProfile(this HttpContext context, CacheProfile cacheProfile) { if (context is null) { throw new ArgumentNullException(nameof(context)); } if (cacheProfile is null) { throw new ArgumentNullException(nameof(cacheProfile)); } var headers = context.Response.Headers; if (!string.IsNullOrEmpty(cacheProfile.VaryByHeader)) { headers[HeaderNames.Vary] = cacheProfile.VaryByHeader; } if (cacheProfile.NoStore == true) { // Cache-control: no-store, no-cache is valid. if (cacheProfile.Location == ResponseCacheLocation.None) { headers[HeaderNames.CacheControl] = NoStoreNoCache; headers[HeaderNames.Pragma] = NoCache; } else { headers[HeaderNames.CacheControl] = NoStore; } } else { string cacheControlValue; var duration = cacheProfile.Duration.GetValueOrDefault().ToString(CultureInfo.InvariantCulture); switch (cacheProfile.Location) { case ResponseCacheLocation.Any: cacheControlValue = PublicMaxAge + duration; break; case ResponseCacheLocation.Client: cacheControlValue = PrivateMaxAge + duration; break; case ResponseCacheLocation.None: cacheControlValue = NoCacheMaxAge + duration; headers[HeaderNames.Pragma] = NoCache; break; default: var exception = new NotImplementedException(FormattableString.Invariant($"Unknown {nameof(ResponseCacheLocation)}: {cacheProfile.Location}")); Debug.Fail(exception.ToString()); throw exception; } headers[HeaderNames.CacheControl] = cacheControlValue; } return(context); }
public static void Invariant_Null_ThrowsArgumentNullException() { AssertExtensions.Throws <ArgumentNullException>("formattable", () => FormattableString.Invariant(null)); }
private static string Get100kSheetName(double minLongitude, double minLatitude) { return(FormattableString.Invariant($"{minLongitude * 2 - 40}{minLatitude * 2 - 10}")); }
/// <inheritdoc /> public override string ToString() { var vector = this.ToVector4(); return(FormattableString.Invariant($"BGR32_UINT({vector.Z:#0.##}, {vector.Y:#0.##}, {vector.X:#0.##})")); }
private static void WriteLine(this TraceSource trace, FormattableString message) { Contract.Requires(trace != null); trace.TraceEvent(TraceEventType.Error, 0, message?.ToString(CultureInfo.InvariantCulture)); }
public IQueryable <T> Query(FormattableString command) { throw new NotImplementedException(); }
/// <inheritdoc/> public override string ToString() => FormattableString.Invariant($"CieLchuv({this.L:#0.##}, {this.C:#0.##}, {this.H:#0.##})");
/// <inheritdoc /> public override string ToString() { return(FormattableString.Invariant($"R32G32_UINT({this.R}, {this.G})")); }
private static void PopulateContext(Telemetry telemetryItem, ITelemetry telemetry) { foreach (var tag in telemetryItem.Tags) { if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.ApplicationVersion, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Component.Version = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.CloudRole, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Cloud.RoleName = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.CloudRoleInstance, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Cloud.RoleInstance = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.DeviceId, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Device.Id = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.DeviceLocale, StringComparison.InvariantCultureIgnoreCase)) { } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.DeviceModel, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Device.Model = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.DeviceOEMName, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Device.OemName = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.DeviceOSVersion, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Device.OperatingSystem = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.DeviceType, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Device.Type = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.InternalAgentVersion, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.GetInternalContext().AgentVersion = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.InternalNodeName, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.GetInternalContext().NodeName = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.InternalSdkVersion, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.GetInternalContext().SdkVersion = string.Concat("lf_", tag.Value); } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.LocationIp, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Location.Ip = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.OperationCorrelationVector, StringComparison.InvariantCultureIgnoreCase)) { } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.OperationId, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Operation.Id = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.OperationName, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Operation.Name = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.OperationParentId, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Operation.ParentId = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.OperationSyntheticSource, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Operation.SyntheticSource = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.SessionId, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Session.Id = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.SessionIsFirst, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.Session.IsFirst = false; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.UserAccountId, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.User.AccountId = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.UserAgent, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.User.UserAgent = tag.Value; //TODO: Deprecated??? } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.UserAuthUserId, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.User.AuthenticatedUserId = tag.Value; } else if (string.Equals(tag.Key, AiTelemetryConverter.TagKeys.UserId, StringComparison.InvariantCultureIgnoreCase)) { telemetry.Context.User.Id = tag.Value; } else { // unknown tag, log and ignore Diagnostics.LogTrace(FormattableString.Invariant($"Unknown tag. Ignoring. {tag.Key}")); } } if (string.IsNullOrEmpty(telemetry.Context.GetInternalContext().SdkVersion)) { telemetry.Context.GetInternalContext().SdkVersion = DefaultSdkVersion; } }
public static string AsInvariantString(this FormattableString formattable) => System.FormattableString.Invariant(formattable);
/// <inheritdoc /> public override readonly string ToString() { var vector = this.ToVector2(); return(FormattableString.Invariant($"Rg32({vector.X:#0.##}, {vector.Y:#0.##})")); }
void HandleEncryptRequest( IPacketMsg packetMsg ) { var request = new Msg<MsgChannelEncryptRequest>( packetMsg ); var connectedUniverse = request.Body.Universe; var protoVersion = request.Body.ProtocolVersion; log.LogDebug( nameof(EnvelopeEncryptedConnection), "Got encryption request. Universe: {0} Protocol ver: {1}", connectedUniverse, protoVersion ); DebugLog.Assert( protoVersion == 1, nameof(EnvelopeEncryptedConnection), "Encryption handshake protocol version mismatch!" ); DebugLog.Assert( connectedUniverse == universe, nameof(EnvelopeEncryptedConnection), FormattableString.Invariant( $"Expected universe {universe} but server reported universe {connectedUniverse}" ) ); byte[]? randomChallenge; if ( request.Payload.Length >= 16 ) { randomChallenge = request.Payload.ToArray(); } else { randomChallenge = null; } var publicKey = KeyDictionary.GetPublicKey( connectedUniverse ); if ( publicKey == null ) { log.LogDebug( nameof(EnvelopeEncryptedConnection), "HandleEncryptRequest got request for invalid universe! Universe: {0} Protocol ver: {1}", connectedUniverse, protoVersion ); Disconnect( userInitiated: false ); return; } var response = new Msg<MsgChannelEncryptResponse>(); var tempSessionKey = CryptoHelper.GenerateRandomBlock( 32 ); byte[] encryptedHandshakeBlob; using ( var rsa = new RSACrypto( publicKey ) ) { if ( randomChallenge != null ) { var blobToEncrypt = new byte[ tempSessionKey.Length + randomChallenge.Length ]; Array.Copy( tempSessionKey, blobToEncrypt, tempSessionKey.Length ); Array.Copy( randomChallenge, 0, blobToEncrypt, tempSessionKey.Length, randomChallenge.Length ); encryptedHandshakeBlob = rsa.Encrypt( blobToEncrypt ); } else { encryptedHandshakeBlob = rsa.Encrypt( tempSessionKey ); } } var keyCrc = CryptoHelper.CRCHash( encryptedHandshakeBlob ); response.Write( encryptedHandshakeBlob ); response.Write( keyCrc ); response.Write( ( uint )0 ); if (randomChallenge != null) { encryption = new NetFilterEncryptionWithHMAC( tempSessionKey, log ); } else { encryption = new NetFilterEncryption( tempSessionKey, log ); } var serialized = response.Serialize(); try { debugNetworkListener?.OnOutgoingNetworkMessage( response.MsgType, serialized ); } catch ( Exception e ) { log.LogDebug( nameof( EnvelopeEncryptedConnection ), "DebugNetworkListener threw an exception: {0}", e ); } state = EncryptionState.Challenged; Send( serialized ); }
private async Task <(int created, int updated)> SaveChanges_Batch(bool commit, CancellationToken cancellationToken) { // see https://github.com/redarrowlabs/vsts-restapi-samplecode/blob/master/VSTSRestApiSamples/WorkItemTracking/Batch.cs // and https://docs.microsoft.com/en-us/rest/api/vsts/wit/workitembatchupdate?view=vsts-rest-4.1 // BUG this code won't work if there is a relation between a new (id<0) work item and an existing one (id>0): it is an API limit const string ApiVersion = "api-version=4.1"; int created = _context.Tracker.NewWorkItems.Count(); int updated = _context.Tracker.ChangedWorkItems.Count(); string baseUriString = _context.Client.BaseAddress.AbsoluteUri; BatchRequest[] batchRequests = new BatchRequest[created + updated]; Dictionary <string, string> headers = new Dictionary <string, string> { { "Content-Type", "application/json-patch+json" } }; string credentials = Convert.ToBase64String(Encoding.ASCII.GetBytes($":{_context.PersonalAccessToken}")); int index = 0; foreach (var item in _context.Tracker.NewWorkItems) { _context.Logger.WriteInfo($"Found a request for a new {item.WorkItemType} workitem in {item.TeamProject}"); batchRequests[index++] = new BatchRequest { method = "PATCH", uri = $"/{item.TeamProject}/_apis/wit/workitems/${item.WorkItemType}?{ApiVersion}", headers = headers, body = item.Changes .Where(c => c.Operation != Microsoft.VisualStudio.Services.WebApi.Patch.Operation.Test) .ToArray() }; } foreach (var item in _context.Tracker.ChangedWorkItems) { _context.Logger.WriteInfo($"Found a request to update workitem {item.Id.Value} in {item.TeamProject}"); batchRequests[index++] = new BatchRequest { method = "PATCH", uri = FormattableString.Invariant($"/_apis/wit/workitems/{item.Id.Value}?{ApiVersion}"), headers = headers, body = item.Changes .Where(c => c.Operation != Microsoft.VisualStudio.Services.WebApi.Patch.Operation.Test) .ToArray() }; } var converters = new JsonConverter[] { new JsonPatchOperationConverter() }; string requestBody = JsonConvert.SerializeObject(batchRequests, Formatting.Indented, converters); _context.Logger.WriteVerbose(requestBody); if (commit) { using (var client = new HttpClient()) { client.DefaultRequestHeaders.Accept.Clear(); client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", credentials); var batchRequest = new StringContent(requestBody, Encoding.UTF8, "application/json"); var method = new HttpMethod("POST"); // send the request var request = new HttpRequestMessage(method, $"{baseUriString}/_apis/wit/$batch?{ApiVersion}") { Content = batchRequest }; var response = await client.SendAsync(request, cancellationToken); if (response.IsSuccessStatusCode) { WorkItemBatchPostResponse batchResponse = await response.Content.ReadAsAsync <WorkItemBatchPostResponse>(cancellationToken); string stringResponse = JsonConvert.SerializeObject(batchResponse, Formatting.Indented); _context.Logger.WriteVerbose(stringResponse); bool succeeded = true; foreach (var batchElement in batchResponse.values) { if (batchElement.code != 200) { _context.Logger.WriteError($"Save failed: {batchElement.body}"); succeeded = false; } } if (!succeeded) { throw new InvalidOperationException("Save failed."); } } else { string stringResponse = await response.Content.ReadAsStringAsync(); _context.Logger.WriteError($"Save failed: {stringResponse}"); throw new InvalidOperationException($"Save failed: {response.ReasonPhrase}."); } }//using } else { _context.Logger.WriteWarning($"Dry-run mode: no updates sent to Azure DevOps."); }//if return(created, updated); }
/// <summary> /// Exports the kill log info. /// </summary> /// <returns></returns> private static string ExportKillLogInfo(KillLog killLog) { if (killLog == null) { return(String.Empty); } StringBuilder sb = new StringBuilder(); sb .AppendLine(killLog.KillTime.DateTimeToDotFormattedString()) .AppendLine() .AppendLine($"Victim: {killLog.Victim.Name}") .AppendLine($"Corp: {killLog.Victim.CorporationName}") .AppendLine($"Alliance: {killLog.Victim.AllianceName}") .AppendLine($"Faction: {killLog.Victim.FactionName}") .AppendLine($"Destroyed: {killLog.Victim.ShipTypeName}") .AppendLine($"System: {killLog.SolarSystem?.Name}") .AppendLine(FormattableString.Invariant($"Security: {killLog.SolarSystem?.SecurityLevel:N1}")) .AppendLine(FormattableString.Invariant($"Damage Taken: {killLog.Victim.DamageTaken:N}")); sb.AppendLine(); sb.AppendLine("Involved parties:"); sb.AppendLine(); foreach (SerializableKillLogAttackersListItem attacker in killLog.Attackers.OrderByDescending(x => x.DamageDone)) { // Append info for NPC or player entities if (String.IsNullOrEmpty(attacker.Name)) { sb.Append($"Name: {attacker.ShipTypeName} / {attacker.CorporationName}"); } else { sb.Append($"Name: {attacker.Name}"); } if (attacker.FinalBlow) { sb.Append(" (laid the final blow)"); } sb.AppendLine(); // Append info only for player entities if (!String.IsNullOrEmpty(attacker.Name)) { sb .AppendLine(FormattableString.Invariant($"Security: {attacker.SecurityStatus:N1}")) .AppendLine($"Corp: {attacker.CorporationName}") .AppendLine( $"Alliance: {(attacker.AllianceName == EveMonConstants.UnknownText ? "None" : attacker.AllianceName)}") .AppendLine( $"Faction: {(attacker.FactionName == EveMonConstants.UnknownText ? "None" : attacker.FactionName)}") .AppendLine($"Ship: {attacker.ShipTypeName}") .AppendLine($"Weapon: {attacker.WeaponTypeName}"); } sb .AppendLine(FormattableString.Invariant($"Damage Done: {attacker.DamageDone:N}")) .AppendLine(); } if (killLog.Items.Any(x => x.QtyDestroyed != 0)) { sb.AppendLine("Destroyed items:"); sb.AppendLine(); AppendDestroyedItems(sb, killLog.Items.Where(x => x.QtyDestroyed != 0)); sb.AppendLine(); } if (killLog.Items.Any(x => x.QtyDropped != 0)) { sb.AppendLine("Dropped items:"); sb.AppendLine(); AppendDroppedItems(sb, killLog.Items.Where(x => x.QtyDropped != 0)); sb.AppendLine(); } sb.AppendLine("<-- Generated by EVEMon -->"); return(sb.ToString()); }
private async Task <(int created, int updated)> SaveChanges_TwoPhases(bool commit, CancellationToken cancellationToken) { // see https://github.com/redarrowlabs/vsts-restapi-samplecode/blob/master/VSTSRestApiSamples/WorkItemTracking/Batch.cs // and https://docs.microsoft.com/en-us/rest/api/vsts/wit/workitembatchupdate?view=vsts-rest-4.1 // The workitembatchupdate API has a huge limit: // it fails adding a relation between a new (id<0) work item and an existing one (id>0) var proxy = new BatchProxy(_context, commit); Dictionary <string, string> headers = new Dictionary <string, string> { { "Content-Type", "application/json-patch+json" } }; int created = _context.Tracker.NewWorkItems.Count(); int updated = _context.Tracker.ChangedWorkItems.Count(); BatchRequest[] newWorkItemsBatchRequests = new BatchRequest[created]; int index = 0; foreach (var item in _context.Tracker.NewWorkItems) { _context.Logger.WriteInfo($"Found a request for a new {item.WorkItemType} workitem in {item.TeamProject}"); newWorkItemsBatchRequests[index++] = new BatchRequest { method = "PATCH", uri = $"/{item.TeamProject}/_apis/wit/workitems/${item.WorkItemType}?{proxy.ApiVersion}", headers = headers, body = item.Changes .Where(c => c.Operation != Microsoft.VisualStudio.Services.WebApi.Patch.Operation.Test) // remove relations as we might incour in API failure .Where(c => !string.Equals(c.Path, "/relations/-", StringComparison.Ordinal)) .ToArray() }; } var batchResponse = await proxy.InvokeAsync(newWorkItemsBatchRequests, cancellationToken); if (batchResponse != null) { _context.Logger.WriteVerbose($"Updating work item ids..."); // Fix back var realIds = new Dictionary <int, int>(); index = 0; foreach (var item in _context.Tracker.NewWorkItems) { int oldId = item.Id.Value; // the response order matches the request order string createdWorkitemJson = batchResponse.values[index++].body; dynamic createdWorkitemResult = JsonConvert.DeserializeObject(createdWorkitemJson); int newId = createdWorkitemResult.id; item.ReplaceIdAndResetChanges(item.Id.Value, newId); realIds.Add(oldId, newId); } foreach (var item in _context.Tracker.ChangedWorkItems) { item.RemapIdReferences(realIds); } } var batchRequests = new List <BatchRequest>(); var allWorkItems = _context.Tracker.NewWorkItems.Concat(_context.Tracker.ChangedWorkItems); foreach (var item in allWorkItems) { var changes = item.Changes .Where(c => c.Operation != Microsoft.VisualStudio.Services.WebApi.Patch.Operation.Test); if (changes.Any()) { _context.Logger.WriteInfo($"Found a request to update workitem {item.Id.Value} in {_context.ProjectName}"); batchRequests.Add(new BatchRequest { method = "PATCH", uri = FormattableString.Invariant($"/_apis/wit/workitems/{item.Id.Value}?{proxy.ApiVersion}"), headers = headers, body = changes.ToArray() }); } } // return value not used, we are fine if no exception is thrown await proxy.InvokeAsync(batchRequests.ToArray(), cancellationToken); return(created, updated); }
public static FormattableString FS(FormattableString formattableString) { return(formattableString); }
/// <inheritdoc /> public override string ToString() { var vector = this.ToVector2(); return(FormattableString.Invariant($"NormalizedShort2({vector.X:#0.##}, {vector.Y:#0.##})")); }
public static void CurrentCulture_Null_ThrowsArgumentNullException() { AssertExtensions.Throws <ArgumentNullException>("formattable", () => FormattableString.CurrentCulture(null)); }
public static async Task Main(string[] args) { do { Console.Clear(); Console.WriteLine(Resources.OptionsToChoose); var chosenOption = ReadChosenOption(); if ("Q" == chosenOption) { return; } IPresentationSample presentationSample = chosenOption switch { "1A" => new PresentaionSample1Async(), "1B" => new PresentaionSample1NoAsync(), "2A" => new PresentationSample2(false), "2B" => new PresentationSample2(true), "3A" => new PresentaionSample3A(), "3B" => new PresentaionSample3B(), "3C" => new PresentaionSample3C(), "3D" => new PresentaionSample3D(), "4A" => new PresentationSample4A(), "4B" => new PresentationSample4B(), "5A" => new PresentationSample5A(), "5B" => new PresentationSample5B(), _ => null }; if (presentationSample == null) { Console.ForegroundColor = ConsoleColor.Yellow; Console.WriteLine($"Unrecognized option {chosenOption}"); await Task.Delay(1000); Console.ResetColor(); continue; } try { Console.Clear(); Console.ForegroundColor = ConsoleColor.Green; Console.WriteLine($"Chosen Option {chosenOption}"); Console.ResetColor(); Stopwatch stopwatch = new Stopwatch(); Console.WriteLine("**** Prepare ****"); await presentationSample.Prepare(); Console.WriteLine("**** Run ****"); stopwatch.Start(); await presentationSample.Run(); stopwatch.Stop(); Console.WriteLine("**** Cleanup ****"); await presentationSample.Cleanup(); Console.WriteLine(); Console.ForegroundColor = ConsoleColor.Green; Console.WriteLine(FormattableString.Invariant($"DONE in {stopwatch.Elapsed}")); } catch (Exception ex) { Console.WriteLine(); Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine(ex); } finally { Console.ResetColor(); } Console.WriteLine("Press ENTER to continue or Q and ENTER to continue."); chosenOption = ReadChosenOption(); if ("Q" == chosenOption) { return; } }while (true); }
private void handleControlPoints(TextWriter writer) { if (beatmap.ControlPointInfo.Groups.Count == 0) { return; } var legacyControlPoints = new LegacyControlPointInfo(); foreach (var point in beatmap.ControlPointInfo.AllControlPoints) { legacyControlPoints.Add(point.Time, point.DeepClone()); } writer.WriteLine("[TimingPoints]"); SampleControlPoint lastRelevantSamplePoint = null; DifficultyControlPoint lastRelevantDifficultyPoint = null; bool isOsuRuleset = beatmap.BeatmapInfo.RulesetID == 0; // iterate over hitobjects and pull out all required sample and difficulty changes extractDifficultyControlPoints(beatmap.HitObjects); extractSampleControlPoints(beatmap.HitObjects); // handle scroll speed, which is stored as "slider velocity" in legacy formats. // this is relevant for scrolling ruleset beatmaps. if (!isOsuRuleset) { foreach (var point in legacyControlPoints.EffectPoints) { legacyControlPoints.Add(point.Time, new DifficultyControlPoint { SliderVelocity = point.ScrollSpeed }); } } foreach (var group in legacyControlPoints.Groups) { var groupTimingPoint = group.ControlPoints.OfType <TimingControlPoint>().FirstOrDefault(); // If the group contains a timing control point, it needs to be output separately. if (groupTimingPoint != null) { writer.Write(FormattableString.Invariant($"{groupTimingPoint.Time},")); writer.Write(FormattableString.Invariant($"{groupTimingPoint.BeatLength},")); outputControlPointAt(groupTimingPoint.Time, true); } // Output any remaining effects as secondary non-timing control point. var difficultyPoint = legacyControlPoints.DifficultyPointAt(group.Time); writer.Write(FormattableString.Invariant($"{group.Time},")); writer.Write(FormattableString.Invariant($"{-100 / difficultyPoint.SliderVelocity},")); outputControlPointAt(group.Time, false); } void outputControlPointAt(double time, bool isTimingPoint) { var samplePoint = legacyControlPoints.SamplePointAt(time); var effectPoint = legacyControlPoints.EffectPointAt(time); // Apply the control point to a hit sample to uncover legacy properties (e.g. suffix) HitSampleInfo tempHitSample = samplePoint.ApplyTo(new ConvertHitObjectParser.LegacyHitSampleInfo(string.Empty)); // Convert effect flags to the legacy format LegacyEffectFlags effectFlags = LegacyEffectFlags.None; if (effectPoint.KiaiMode) { effectFlags |= LegacyEffectFlags.Kiai; } if (effectPoint.OmitFirstBarLine) { effectFlags |= LegacyEffectFlags.OmitFirstBarLine; } writer.Write(FormattableString.Invariant($"{(int)legacyControlPoints.TimingPointAt(time).TimeSignature},")); writer.Write(FormattableString.Invariant($"{(int)toLegacySampleBank(tempHitSample.Bank)},")); writer.Write(FormattableString.Invariant($"{toLegacyCustomSampleBank(tempHitSample)},")); writer.Write(FormattableString.Invariant($"{tempHitSample.Volume},")); writer.Write(FormattableString.Invariant($"{(isTimingPoint ? '1' : '0')},")); writer.Write(FormattableString.Invariant($"{(int)effectFlags}")); writer.WriteLine(); } IEnumerable <DifficultyControlPoint> collectDifficultyControlPoints(IEnumerable <HitObject> hitObjects) { if (!isOsuRuleset) { yield break; } foreach (var hitObject in hitObjects) { yield return(hitObject.DifficultyControlPoint); foreach (var nested in collectDifficultyControlPoints(hitObject.NestedHitObjects)) { yield return(nested); } } } void extractDifficultyControlPoints(IEnumerable <HitObject> hitObjects) { foreach (var hDifficultyPoint in collectDifficultyControlPoints(hitObjects).OrderBy(dp => dp.Time)) { if (!hDifficultyPoint.IsRedundant(lastRelevantDifficultyPoint)) { legacyControlPoints.Add(hDifficultyPoint.Time, hDifficultyPoint); lastRelevantDifficultyPoint = hDifficultyPoint; } } } IEnumerable <SampleControlPoint> collectSampleControlPoints(IEnumerable <HitObject> hitObjects) { foreach (var hitObject in hitObjects) { yield return(hitObject.SampleControlPoint); foreach (var nested in collectSampleControlPoints(hitObject.NestedHitObjects)) { yield return(nested); } } } void extractSampleControlPoints(IEnumerable <HitObject> hitObject) { foreach (var hSamplePoint in collectSampleControlPoints(hitObject).OrderBy(sp => sp.Time)) { if (!hSamplePoint.IsRedundant(lastRelevantSamplePoint)) { legacyControlPoints.Add(hSamplePoint.Time, hSamplePoint); lastRelevantSamplePoint = hSamplePoint; } } } }
public void Subscribe(SubscribeContactMessage message) { Condition.Requires(message, nameof(message)).IsNotNull(); _subscribeContactMessagesBus.Send(message, null); _logger.LogDebug(FormattableString.Invariant(FormattableStringFactory.Create("[BUS] Queued subscribe contact message. . ManagerRootId '{0}', RecipientListId '{1}', ContactIdentifier '{2}'.", message.ManagerRootId, message.RecipientListId, message.ContactIdentifier?.Identifier))); }
public RawSqlRideStorage(DbContext context, FormattableString sql) : base(context, context.Set <TDataModel>().FromSqlInterpolated(sql)) { }
public static T Expr <T>( [DataExtensions.SqlFormattableComparer] FormattableString sql ) { throw new LinqToDBException("'Sql.Expr' is server side only method and used only for generating custom SQL parts"); }
/// <summary> /// Creates a new CommandBuilder over current connection /// </summary> /// <param name="cnn"></param> /// <param name="command">SQL command</param> public static CommandBuilder CommandBuilder(this IDbConnection cnn, FormattableString command) { return(new CommandBuilder(cnn, command)); }
public KeyValuePair <string, string> Serialize() { var parts = new string[] { Name }.Concat(Parameters.Select(p => FormattableString.Invariant($"{p.Name}-{p.Type}"))); return(new KeyValuePair <string, string>(string.Join("--", parts), Description)); }
static void GetRoute(StateEnum newState) { // Set the state to ready, until the new route arrives. state = StateEnum.ready; var req = new RouteRequestDirections { Query = FormattableString.Invariant($"{currentLat},{currentLon}:{destinationLat},{destinationLon}") }; var directions = azureMapsServices.GetRouteDirections(req).Result; if (directions.Error != null || directions.Result == null) { // Handle any error. redMessage("Failed to find map route"); } else { int nPoints = directions.Result.Routes[0].Legs[0].Points.Length; greenMessage($"Route found. Number of points = {nPoints}"); // Clear the path. Add two points for the start point and destination. path = new double[nPoints + 2, 2]; int c = 0; // Start with the current location. path[c, 0] = currentLat; path[c, 1] = currentLon; ++c; // Retrieve the route and push the points onto the array. for (var n = 0; n < nPoints; n++) { var x = directions.Result.Routes[0].Legs[0].Points[n].Latitude; var y = directions.Result.Routes[0].Legs[0].Points[n].Longitude; path[c, 0] = x; path[c, 1] = y; ++c; } // Finish with the destination. path[c, 0] = destinationLat; path[c, 1] = destinationLon; // Store the path length and time taken, to calculate the average speed. var meters = directions.Result.Routes[0].Summary.LengthInMeters; var seconds = directions.Result.Routes[0].Summary.TravelTimeInSeconds; var pathSpeed = meters / seconds; double distanceApartInMeters; double timeForOneSection; // Clear the time on path array. The path array is 1 less than the points array. timeOnPath = new double[nPoints + 1]; // Calculate how much time is required for each section of the path. for (var t = 0; t < nPoints + 1; t++) { // Calculate distance between the two path points, in meters. distanceApartInMeters = DistanceInMeters(path[t, 0], path[t, 1], path[t + 1, 0], path[t + 1, 1]); // Calculate the time for each section of the path. timeForOneSection = distanceApartInMeters / pathSpeed; timeOnPath[t] = timeForOneSection; } truckOnSection = 0; truckSectionsCompletedTime = 0; timeOnCurrentTask = 0; // Update the state now the route has arrived. One of: enroute or returning. state = newState; } }
/// <summary> /// Returns a string that describes the type, dimensions and shape of this matrix. /// </summary> public virtual string ToTypeString() { return(FormattableString.Invariant($"{GetType().Name} {RowCount}x{ColumnCount}-{typeof(T).Name}")); }