/// <summary> /// Update stream name. /// </summary> /// <param name="streamName">Stream name</param> public void UpdateName(string streamName) { apiClient.StreamUpdateAsync(apiClient.StreamId, new SpeckleStream() { Name = streamName }); apiClient.Stream.Name = streamName; }
private void MetadataSender_Elapsed(object sender, ElapsedEventArgs e) { //if (ManualMode) //{ // return; //} // we do not need to enque another metadata sending event as the data update superseeds the metadata one. if (DataSender.Enabled) { return; } ; SpeckleStream updateStream = new SpeckleStream() { Name = BucketName, Layers = BucketLayers }; var updateResult = Client.StreamUpdateAsync(Client.StreamId, updateStream).Result; //Log += updateResult.Message; Client.BroadcastMessage("stream", Client.StreamId, new { eventType = "update-meta" }); }
public static async Task <bool> UpdateStreamName(string restApi, string apiToken, string streamId, string streamName, ISpeckleAppMessenger messenger) { SpeckleApiClient myClient = new SpeckleApiClient() { BaseUrl = restApi, AuthToken = apiToken }; try { var response = await myClient.StreamUpdateAsync(streamId, new SpeckleStream() { Name = streamName }); return(response.Success.HasValue && response.Success.Value); } catch (SpeckleException se) { if (messenger != null) { messenger.Message(MessageIntent.Display, MessageLevel.Error, "Unable to update stream name for " + streamId); var context = new List <string>() { "Unable to update stream name for " + streamId, "StatusCode=" + se.StatusCode, "ResponseData=" + se.Response, "Message=" + se.Message, "BaseUrl=" + restApi }; if (se is SpeckleException <ResponseBase> && ((SpeckleException <ResponseBase>)se).Result != null) { var responseJson = ((SpeckleException <ResponseBase>)se).Result.ToJson(); context.Add("ResponseJson=" + responseJson); } messenger.Message(MessageIntent.TechnicalLog, MessageLevel.Error, se, context.ToArray()); } return(false); } catch (Exception ex) { if (messenger != null) { messenger.Message(MessageIntent.Display, MessageLevel.Error, "Unable to update stream name for " + streamId); messenger.Message(MessageIntent.TechnicalLog, MessageLevel.Error, ex, "Unable to rename stream", "StreamId=" + streamId, "BaseUrl=" + restApi, "StreamName=" + streamName); } return(false); } }
private void DataSender_Elapsed(object sender, ElapsedEventArgs e) { if (MetadataSender.Enabled) { // start the timer again, as we need to make sure we're updating DataSender.Start(); return; } this.Message = String.Format("Converting {0} \n objects", BucketObjects.Count); var convertedObjects = Converter.Serialise(BucketObjects).Select(obj => { if (ObjectCache.ContainsKey(obj.Hash)) { return new SpecklePlaceholder() { Hash = obj.Hash, _id = ObjectCache[obj.Hash]._id } } ; return(obj); }).ToList(); this.Message = String.Format("Creating payloads"); long totalBucketSize = 0; long currentBucketSize = 0; List <List <SpeckleObject> > objectUpdatePayloads = new List <List <SpeckleObject> >(); List <SpeckleObject> currentBucketObjects = new List <SpeckleObject>(); List <SpeckleObject> allObjects = new List <SpeckleObject>(); foreach (SpeckleObject convertedObject in convertedObjects) { long size = Converter.getBytes(convertedObject).Length; currentBucketSize += size; totalBucketSize += size; currentBucketObjects.Add(convertedObject); if (currentBucketSize > 5e5) // restrict max to ~500kb; should it be user config? anyway these functions should go into core. at one point. { Debug.WriteLine("Reached payload limit. Making a new one, current #: " + objectUpdatePayloads.Count); objectUpdatePayloads.Add(currentBucketObjects); currentBucketObjects = new List <SpeckleObject>(); currentBucketSize = 0; } } // add the last bucket if (currentBucketObjects.Count > 0) { objectUpdatePayloads.Add(currentBucketObjects); } Debug.WriteLine("Finished, payload object update count is: " + objectUpdatePayloads.Count + " total bucket size is (kb) " + totalBucketSize / 1000); if (objectUpdatePayloads.Count > 100) { this.AddRuntimeMessage(GH_RuntimeMessageLevel.Error, "This is a humongous update, in the range of ~50mb. For now, create more streams instead of just one massive one! Updates will be faster and snappier, and you can combine them back together at the other end easier."); return; } int k = 0; List <ResponseObject> responses = new List <ResponseObject>(); foreach (var payload in objectUpdatePayloads) { this.Message = String.Format("Sending payload\n{0} / {1}", k++, objectUpdatePayloads.Count); responses.Add(mySender.ObjectCreateAsync(payload).GetAwaiter().GetResult()); } this.Message = "Updating stream..."; // create placeholders for stream update payload List <SpeckleObject> placeholders = new List <SpeckleObject>(); foreach (var myResponse in responses) { foreach (var obj in myResponse.Resources) { placeholders.Add(new SpecklePlaceholder() { _id = obj._id }); } } SpeckleStream updateStream = new SpeckleStream() { Layers = BucketLayers, Name = BucketName, Objects = placeholders }; // set some base properties (will be overwritten) var baseProps = new Dictionary <string, object>(); baseProps["units"] = Rhino.RhinoDoc.ActiveDoc.ModelUnitSystem.ToString(); baseProps["tolerance"] = Rhino.RhinoDoc.ActiveDoc.ModelAbsoluteTolerance; baseProps["angleTolerance"] = Rhino.RhinoDoc.ActiveDoc.ModelAngleToleranceRadians; updateStream.BaseProperties = baseProps; var response = mySender.StreamUpdateAsync(mySender.StreamId, updateStream); mySender.BroadcastMessage(new { eventType = "update-global" }); // put the objects in the cache int l = 0; foreach (var obj in placeholders) { ObjectCache[convertedObjects[l].Hash] = placeholders[l]; l++; } Log += response.Result.Message; AddRuntimeMessage(GH_RuntimeMessageLevel.Remark, "Data sent at " + DateTime.Now); Message = "Data sent\n@" + DateTime.Now.ToString("hh:mm:ss"); }
// NOTE: This is actually triggered when clicking "Push!" // TODO: Orchestration // Create buckets, send sequentially, notify ui re upload progress // NOTE: Problems with local context and cache: we seem to not sucesffuly pass through it // perhaps we're not storing the right sent object (localcontext.addsentobject) public override void PushSender(string args) { var client = JsonConvert.DeserializeObject <dynamic>(args); //if it's a category or property filter we need to refresh the list of objects //if it's a selection filter just use the objects that were stored previously ISelectionFilter filter = JsonConvert.DeserializeObject(JsonConvert.SerializeObject(client.filter), GetFilterType(client.filter.Type.ToString())); IEnumerable <SpeckleObject> objects = new List <SpeckleObject>(); objects = GetSelectionFilterObjects(filter, client._id.ToString(), client.streamId.ToString()); var apiClient = new SpeckleApiClient((string)client.account.RestApi) { AuthToken = (string)client.account.Token }; var convertedObjects = new List <SpeckleObject>(); var placeholders = new List <SpeckleObject>(); var units = CurrentDoc.Document.GetUnits().GetFormatOptions(UnitType.UT_Length).DisplayUnits.ToString().ToLowerInvariant().Replace("dut_", ""); InjectScaleInKits(GetScale(units)); // this is used for feet to sane units conversion. int i = 0; long currentBucketSize = 0; var errorMsg = ""; var failedToConvert = 0; var errors = new List <SpeckleError>(); foreach (var obj in objects) { NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = true, isLoadingIndeterminate = false, loadingProgress = 1f * i++ / objects.Count() * 100, loadingBlurb = string.Format("Converting and uploading objects: {0} / {1}", i, objects.Count()) })); var id = 0; Element revitElement = null; try { revitElement = CurrentDoc.Document.GetElement((string)obj.Properties["revitUniqueId"]); id = revitElement.Id.IntegerValue; } catch (Exception e) { errors.Add(new SpeckleError { Message = "Could not retrieve element", Details = e.Message }); continue; } try { var conversionResult = SpeckleCore.Converter.Serialise(new List <object>() { revitElement }); var byteCount = Converter.getBytes(conversionResult).Length; currentBucketSize += byteCount; if (byteCount > 2e6) { errors.Add(new SpeckleError { Message = "Element is too big to be sent", Details = $"Element {id} is bigger than 2MB, it will be skipped" }); continue; } convertedObjects.AddRange(conversionResult); if (currentBucketSize > 5e5 || i >= objects.Count()) // aim for roughly 500kb uncompressed { LocalContext.PruneExistingObjects(convertedObjects, apiClient.BaseUrl); try { var chunkResponse = apiClient.ObjectCreateAsync(convertedObjects).Result.Resources; int m = 0; foreach (var objConverted in convertedObjects) { objConverted._id = chunkResponse[m++]._id; placeholders.Add(new SpecklePlaceholder() { _id = objConverted._id }); if (objConverted.Type != "Placeholder") { LocalContext.AddSentObject(objConverted, apiClient.BaseUrl); } } } catch (Exception e) { errors.Add(new SpeckleError { Message = $"Failed to send {convertedObjects.Count} objects", Details = e.Message }); } currentBucketSize = 0; convertedObjects = new List <SpeckleObject>(); // reset the chunkness } } catch (Exception e) { failedToConvert++; errors.Add(new SpeckleError { Message = $"Failed to convert {revitElement.Name}", Details = $"Element id: {id}" }); //NotifyUi("update-client", JsonConvert.SerializeObject(new //{ // _id = (string)client._id, // errors = "Failed to convert " + failedConvert + " objects." //})); } } if (errors.Any()) { if (failedToConvert > 0) { errorMsg += string.Format("Failed to convert {0} objects ", failedToConvert, failedToConvert == 1 ? "" : "s"); } else { errorMsg += string.Format("There {0} {1} error{2} ", errors.Count() == 1 ? "is" : "are", errors.Count(), errors.Count() == 1 ? "" : "s"); } errorMsg += "<nobr>" + Globals.GetRandomSadFace() + "</nobr>"; } var myStream = new SpeckleStream() { Objects = placeholders }; var ug = UnitUtils.GetUnitGroup(UnitType.UT_Length); var baseProps = new Dictionary <string, object>(); baseProps["units"] = units; baseProps["unitsDictionary"] = GetAndClearUnitDictionary(); myStream.BaseProperties = baseProps; //myStream.BaseProperties = JsonConvert.SerializeObject(baseProps); NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = true, isLoadingIndeterminate = true, loadingBlurb = "Updating stream." })); var response = apiClient.StreamUpdateAsync((string)client.streamId, myStream).Result; var plural = objects.Count() == 1 ? "" : "s"; NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = false, loadingBlurb = "", message = $"Done sending {objects.Count()} object{plural}.", errorMsg, errors })); SpeckleTelemetry.RecordStreamUpdated("Revit"); }
protected override void SolveInstance(IGH_DataAccess DA) { if (mySender == null) { return; } if (this.EnableRemoteControl) { this.Message = "JobQueue: " + JobQueue.Count; } StreamId = mySender.StreamId; DA.SetData(0, Log); DA.SetData(1, mySender.StreamId); if (!mySender.IsConnected) { return; } if (WasSerialised && FirstSendUpdate) { FirstSendUpdate = false; return; } this.State = "Expired"; // All flags are good to start an update if (!this.EnableRemoteControl && !this.ManualMode) { UpdateData(); return; } // else if (!this.EnableRemoteControl && this.ManualMode) { AddRuntimeMessage(GH_RuntimeMessageLevel.Warning, "State is expired, update push is required."); return; } #region RemoteControl // Code below deals with the remote control functionality. // Proceed at your own risk. if (JobQueue.Count == 0) { SetDefaultState(); AddRuntimeMessage(GH_RuntimeMessageLevel.Remark, "Updated default state for remote control."); return; } // prepare solution and exit if (!SolutionPrepared && JobQueue.Count != 0) { System.Collections.DictionaryEntry t = JobQueue.Cast <DictionaryEntry>().ElementAt(0); Document.ScheduleSolution(1, PrepareSolution); return; } // send out solution and exit if (SolutionPrepared) { SolutionPrepared = false; var BucketObjects = GetData(); var BucketLayers = GetLayers(); var convertedObjects = Converter.Serialise(BucketObjects).Select(obj => { if (ObjectCache.ContainsKey(obj.Hash)) { return new SpecklePlaceholder() { Hash = obj.Hash, _id = ObjectCache[obj.Hash]._id } } ; return(obj); }); // theoretically this should go through the same flow as in DataSenderElapsed(), ie creating // buckets for staggered updates, etc. but we're lazy to untangle that logic for now var responseClone = mySender.StreamCloneAsync(this.StreamId).Result; var responseStream = new SpeckleStream(); responseStream.IsComputedResult = true; responseStream.Objects = convertedObjects.ToList(); responseStream.Layers = BucketLayers; List <SpeckleInput> speckleInputs = null; List <SpeckleOutput> speckleOutputs = null; GetSpeckleParams(ref speckleInputs, ref speckleOutputs); responseStream.GlobalMeasures = new { input = speckleInputs, output = speckleOutputs }; // go unblocking var responseCloneUpdate = mySender.StreamUpdateAsync(responseClone.Clone.StreamId, responseStream).ContinueWith(tres => { mySender.SendMessage(CurrentJobClient, new { eventType = "compute-response", streamId = responseClone.Clone.StreamId }); }); JobQueue.RemoveAt(0); this.Message = "JobQueue: " + JobQueue.Count; if (JobQueue.Count != 0) { Rhino.RhinoApp.MainApplicationWindow.Invoke(ExpireComponentAction); } } #endregion }
static async Task TestStreams(SpeckleApiClient myClient) { string streamId = "lol"; string secondStreamId = "hai"; var myPoint = new SpecklePoint() { Value = new List <double>() { 1, 2, 3 } }; var mySecondPoint = new SpecklePoint() { Value = new List <double>() { 23, 33, 12 } }; var myCircle = new SpeckleCircle() { Radius = 21 }; myPoint.Properties = new Dictionary <string, object>(); myPoint.Properties.Add("Really", mySecondPoint); myCircle.Properties = new Dictionary <string, object>(); myCircle.Properties.Add("a property", "Hello!"); myCircle.Properties.Add("point", myPoint); SpeckleStream myStream = new SpeckleStream() { Name = "Hello World My Little Stream", Objects = new List <SpeckleObject>() { myCircle, myPoint } }; SpeckleStream secondStream = new SpeckleStream() { Name = "Second Little Stream", Objects = new List <SpeckleObject>() { myCircle, mySecondPoint } }; Console.WriteLine(); try { Console.WriteLine("Creating a stream."); var Response = await myClient.StreamCreateAsync(myStream); Console.WriteLine("OK: " + Response.Resource.ToJson()); streamId = Response.Resource.StreamId; } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine(); try { Console.WriteLine("Creating a second stream."); var Response = await myClient.StreamCreateAsync(secondStream); Console.WriteLine("OK: " + Response.Resource.ToJson()); secondStreamId = Response.Resource.StreamId; } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine(); try { Console.WriteLine("Diffing two streams!"); var Response = await myClient.StreamDiffAsync(streamId, secondStreamId); Console.WriteLine("OK: " + Response.Message); } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine(); try { Console.WriteLine("Getting a stream."); var Response = await myClient.StreamGetAsync(streamId, null); Console.WriteLine("OK: " + Response.Resource.ToJson()); } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine(); try { Console.WriteLine("Getting a stream's objects."); var Response = await myClient.StreamGetObjectsAsync(streamId, null); Console.WriteLine("OK: " + Response.Resources.Count); } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine(); try { Console.WriteLine("Updating a stream."); var Response = await myClient.StreamUpdateAsync(streamId, new SpeckleStream() { Name = "I hate api testing", ViewerLayers = new List <object>() { new { test = "test" } } }); Console.WriteLine("OK: " + Response.Message); } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine(); try { Console.WriteLine("Getting a stream field."); var Response = await myClient.StreamGetAsync(streamId, "fields=viewerLayers,name,owner"); Console.WriteLine("OK: " + Response.Resource.ToJson()); } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine(); try { Console.WriteLine("Getting all users's streams."); var Response = await myClient.StreamsGetAllAsync(); Console.WriteLine("OK: " + Response.Resources.Count); } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine(); try { Console.WriteLine("Cloning a stream."); var Response = await myClient.StreamCloneAsync(streamId); Console.WriteLine("OK: " + Response.Message); } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine(); try { Console.WriteLine("Deleting a stream: " + streamId); var Response = await myClient.StreamDeleteAsync(streamId); Console.WriteLine("OK: " + Response.Message); } catch (Exception e) { Console.WriteLine(e.Message); } }
// TODO: Orchestration // Create buckets, send sequentially, notify ui re upload progress // NOTE: Problems with local context and cache: we seem to not sucesffuly pass through it // perhaps we're not storing the right sent object (localcontext.addsentobject) public override void UpdateSender(string args) { var client = JsonConvert.DeserializeObject <dynamic>(args); var apiClient = new SpeckleApiClient((string)client.account.RestApi) { AuthToken = (string)client.account.Token }; var convertedObjects = new List <SpeckleObject>(); var placeholders = new List <SpeckleObject>(); var units = CurrentDoc.Document.GetUnits().GetFormatOptions(UnitType.UT_Length).DisplayUnits.ToString().ToLower().Replace("dut_", ""); InjectScaleInKits(GetScale(units)); // this is used for feet to sane units conversion. int i = 0; long currentBucketSize = 0; var errors = ""; var failedSend = 0; var failedConvert = 0; foreach (var obj in client.objects) { NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = true, isLoadingIndeterminate = false, loadingProgress = 1f * i++ / client.objects.Count * 100, loadingBlurb = string.Format("Converting and uploading objects: {0} / {1}", i, client.objects.Count) })); try { var revitElement = CurrentDoc.Document.GetElement((string)obj.properties["revitUniqueId"]); var conversionResult = SpeckleCore.Converter.Serialise(new List <object>() { revitElement }); var byteCount = Converter.getBytes(conversionResult).Length; currentBucketSize += byteCount; if (byteCount > 2e6) { var problemId = revitElement.Id; } convertedObjects.AddRange(conversionResult); if (currentBucketSize > 5e5 || i >= client.objects.Count) // aim for roughly 500kb uncompressed { LocalContext.PruneExistingObjects(convertedObjects, apiClient.BaseUrl); try { var chunkResponse = apiClient.ObjectCreateAsync(convertedObjects).Result.Resources; int m = 0; foreach (var objConverted in convertedObjects) { objConverted._id = chunkResponse[m++]._id; placeholders.Add(new SpecklePlaceholder() { _id = objConverted._id }); if (objConverted.Type != "Placeholder") { LocalContext.AddSentObject(objConverted, apiClient.BaseUrl); } } } catch (Exception e) { failedSend += convertedObjects.Count; NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, errors = "Failed to send " + failedSend + " objects." })); } currentBucketSize = 0; convertedObjects = new List <SpeckleObject>(); // reset the chunkness } } catch (Exception e) { failedConvert++; NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, errors = "Failed to convert " + failedConvert + " objects." })); } } if (failedConvert > 0) { errors += String.Format("Failed to convert a total of {0} objects. ", failedConvert); } if (failedSend > 0) { errors += String.Format("Failed to send a total of {0} objects. ", failedSend); } var myStream = new SpeckleStream() { Objects = placeholders }; var ug = UnitUtils.GetUnitGroup(UnitType.UT_Length); var baseProps = new Dictionary <string, object>(); baseProps["units"] = units; baseProps["unitsDictionary"] = GetAndClearUnitDictionary(); myStream.BaseProperties = baseProps; //myStream.BaseProperties = JsonConvert.SerializeObject(baseProps); NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = true, isLoadingIndeterminate = true, loadingBlurb = "Updating stream." })); var response = apiClient.StreamUpdateAsync((string)client.streamId, myStream).Result; NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = false, loadingBlurb = "Done sending.", errors })); }
// Send objects to Speckle server. Triggered on "Push!". // Create buckets, send sequentially, notify ui re upload progress public override void PushSender(string args) { var client = JsonConvert.DeserializeObject <dynamic>(args); //if it's a category or property filter we need to refresh the list of objects //if it's a selection filter just use the objects that were stored previously ISelectionFilter filter = JsonConvert.DeserializeObject(JsonConvert.SerializeObject(client.filter), GetFilterType(client.filter.Type.ToString())); IEnumerable <SpeckleObject> objects = new List <SpeckleObject>(); objects = GetSelectionFilterObjects(filter, client._id.ToString(), client.streamId.ToString()); var apiClient = new SpeckleApiClient((string)client.account.RestApi) { AuthToken = (string)client.account.Token }; var task = Task.Run(async() => { await apiClient.IntializeUser(); }); task.Wait(); apiClient.ClientType = "AutoCAD"; var convertedObjects = new List <SpeckleObject>(); var placeholders = new List <SpeckleObject>(); var units = AutocadDataService.GetLengthUnit(); //InjectScaleInKits(GetScale(units)); // this is used for feet to sane units conversion. int i = 0; long currentBucketSize = 0; var errorMsg = ""; var failedToConvert = 0; var errors = new List <SpeckleError>(); foreach (var obj in objects) { NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = true, isLoadingIndeterminate = false, loadingProgress = 1f * i++ / objects.Count() * 100, loadingBlurb = string.Format("Converting and uploading objects: {0} / {1}", i, objects.Count()) })); long handle = 0; SpeckleObject speckleObject = null; try { handle = (long)obj.Properties["autocadhandle"]; speckleObject = AutocadDataService.GetObject(handle); if (speckleObject == null) { errors.Add(new SpeckleError { Message = "Could not retrieve element", Details = string.Empty }); continue; } } catch (Exception e) { errors.Add(new SpeckleError { Message = "Could not retrieve element", Details = e.Message }); continue; } try { var conversionResult = new List <SpeckleObject> { speckleObject }; var byteCount = Converter.getBytes(conversionResult).Length; currentBucketSize += byteCount; if (byteCount > 2e6) { errors.Add(new SpeckleError { Message = "Element is too big to be sent", Details = $"Element {handle} is bigger than 2MB, it will be skipped" }); continue; } convertedObjects.AddRange(conversionResult); if (currentBucketSize > 5e5 || i >= objects.Count()) // aim for roughly 500kb uncompressed { LocalContext.PruneExistingObjects(convertedObjects, apiClient.BaseUrl); try { var chunkResponse = apiClient.ObjectCreateAsync(convertedObjects).Result.Resources; int m = 0; foreach (var objConverted in convertedObjects) { objConverted._id = chunkResponse[m++]._id; placeholders.Add(new SpecklePlaceholder() { _id = objConverted._id }); if (objConverted.Type != "Placeholder") { LocalContext.AddSentObject(objConverted, apiClient.BaseUrl); } } } catch (Exception e) { errors.Add(new SpeckleError { Message = $"Failed to send {convertedObjects.Count} objects", Details = e.Message }); } currentBucketSize = 0; convertedObjects = new List <SpeckleObject>(); // reset the chunkness } } catch (Exception e) { failedToConvert++; errors.Add(new SpeckleError { Message = $"Failed to convert element", Details = $"Element handle: {handle}" }); //NotifyUi("update-client", JsonConvert.SerializeObject(new //{ // _id = (string)client._id, // errors = "Failed to convert " + failedConvert + " objects." //})); } } if (errors.Any()) { if (failedToConvert > 0) { errorMsg += string.Format("Failed to convert {0} objects ", failedToConvert, failedToConvert == 1 ? "" : "s"); } else { errorMsg += string.Format("There {0} {1} error{2} ", errors.Count() == 1 ? "is" : "are", errors.Count(), errors.Count() == 1 ? "" : "s"); } } var myStream = new SpeckleStream() { Objects = placeholders }; var baseProps = new Dictionary <string, object>(); baseProps["units"] = units; //baseProps["unitsDictionary"] = GetAndClearUnitDictionary(); myStream.BaseProperties = baseProps; NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = true, isLoadingIndeterminate = true, loadingBlurb = "Updating stream." })); apiClient.Stream = myStream; var response = apiClient.StreamUpdateAsync((string)client.streamId, myStream).Result; var plural = objects.Count() == 1 ? "" : "s"; NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = false, loadingBlurb = "", message = $"Done sending {objects.Count()} object{plural}.", errorMsg, errors })); }