private void CreateObjectsOnServer(List <SpeckleObject> bucketObjects, string baseUrl, ref int numErrors) { // Separate objects into sizeable payloads var payloads = CreatePayloads(bucketObjects); if (bucketObjects.Count(o => o.Type == "Placeholder") == bucketObjects.Count) { numErrors = 0; return; } var payloadTasks = payloads.Select(p => apiClient.ObjectCreateAsync(p, 30000)).ToArray(); // Send objects which are in payload and add to local DB with updated IDs //foreach (List<SpeckleObject> payload in payloads) for (var j = 0; j < payloads.Count(); j++) { ResponseObject res = null; try { res = payloadTasks[j].Result; } catch (Exception ex) { numErrors++; var speckleExceptionContext = ExtractSpeckleExceptionContext(ex); var errContext = speckleExceptionContext.Concat(new[] { "StreamId=" + StreamID, "Error in updating the server with a payload of " + payloads[j].Count() + " objects" }); GSA.GsaApp.gsaMessenger.Message(MessageIntent.TechnicalLog, MessageLevel.Error, ex, errContext.ToArray()); } if (res != null && res.Resources.Count() > 0) { for (int i = 0; i < payloads[j].Count(); i++) { payloads[j][i]._id = res.Resources[i]._id; } } Task.Run(() => { foreach (SpeckleObject obj in payloads[j].Where(o => o.Hash != null && o._id != null)) { HelperFunctions.tryCatchWithEvents(() => LocalContext.AddSentObject(obj, baseUrl), "", "Error in updating local db"); } }); } int successfulPayloads = payloads.Count() - numErrors; GSA.GsaApp.gsaMessenger.Message(MessageIntent.Display, MessageLevel.Information, "Successfully sent " + successfulPayloads + "/" + payloads.Count() + " payloads to the server"); GSA.GsaApp.gsaMessenger.CacheMessage(MessageIntent.TechnicalLog, MessageLevel.Information, "Sent payloads to server", "NumSuccessful=" + successfulPayloads, "NumErrored=" + numErrors); }
// NOTE: This is actually triggered when clicking "Push!" // TODO: Orchestration // Create buckets, send sequentially, notify ui re upload progress // NOTE: Problems with local context and cache: we seem to not sucesffuly pass through it // perhaps we're not storing the right sent object (localcontext.addsentobject) public override void PushSender(string args) { var client = JsonConvert.DeserializeObject <dynamic>(args); //if it's a category or property filter we need to refresh the list of objects //if it's a selection filter just use the objects that were stored previously ISelectionFilter filter = JsonConvert.DeserializeObject(JsonConvert.SerializeObject(client.filter), GetFilterType(client.filter.Type.ToString())); IEnumerable <SpeckleObject> objects = new List <SpeckleObject>(); objects = GetSelectionFilterObjects(filter, client._id.ToString(), client.streamId.ToString()); var apiClient = new SpeckleApiClient((string)client.account.RestApi) { AuthToken = (string)client.account.Token }; var convertedObjects = new List <SpeckleObject>(); var placeholders = new List <SpeckleObject>(); var units = CurrentDoc.Document.GetUnits().GetFormatOptions(UnitType.UT_Length).DisplayUnits.ToString().ToLowerInvariant().Replace("dut_", ""); InjectScaleInKits(GetScale(units)); // this is used for feet to sane units conversion. int i = 0; long currentBucketSize = 0; var errorMsg = ""; var failedToConvert = 0; var errors = new List <SpeckleError>(); foreach (var obj in objects) { NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = true, isLoadingIndeterminate = false, loadingProgress = 1f * i++ / objects.Count() * 100, loadingBlurb = string.Format("Converting and uploading objects: {0} / {1}", i, objects.Count()) })); var id = 0; Element revitElement = null; try { revitElement = CurrentDoc.Document.GetElement((string)obj.Properties["revitUniqueId"]); id = revitElement.Id.IntegerValue; } catch (Exception e) { errors.Add(new SpeckleError { Message = "Could not retrieve element", Details = e.Message }); continue; } try { var conversionResult = SpeckleCore.Converter.Serialise(new List <object>() { revitElement }); var byteCount = Converter.getBytes(conversionResult).Length; currentBucketSize += byteCount; if (byteCount > 2e6) { errors.Add(new SpeckleError { Message = "Element is too big to be sent", Details = $"Element {id} is bigger than 2MB, it will be skipped" }); continue; } convertedObjects.AddRange(conversionResult); if (currentBucketSize > 5e5 || i >= objects.Count()) // aim for roughly 500kb uncompressed { LocalContext.PruneExistingObjects(convertedObjects, apiClient.BaseUrl); try { var chunkResponse = apiClient.ObjectCreateAsync(convertedObjects).Result.Resources; int m = 0; foreach (var objConverted in convertedObjects) { objConverted._id = chunkResponse[m++]._id; placeholders.Add(new SpecklePlaceholder() { _id = objConverted._id }); if (objConverted.Type != "Placeholder") { LocalContext.AddSentObject(objConverted, apiClient.BaseUrl); } } } catch (Exception e) { errors.Add(new SpeckleError { Message = $"Failed to send {convertedObjects.Count} objects", Details = e.Message }); } currentBucketSize = 0; convertedObjects = new List <SpeckleObject>(); // reset the chunkness } } catch (Exception e) { failedToConvert++; errors.Add(new SpeckleError { Message = $"Failed to convert {revitElement.Name}", Details = $"Element id: {id}" }); //NotifyUi("update-client", JsonConvert.SerializeObject(new //{ // _id = (string)client._id, // errors = "Failed to convert " + failedConvert + " objects." //})); } } if (errors.Any()) { if (failedToConvert > 0) { errorMsg += string.Format("Failed to convert {0} objects ", failedToConvert, failedToConvert == 1 ? "" : "s"); } else { errorMsg += string.Format("There {0} {1} error{2} ", errors.Count() == 1 ? "is" : "are", errors.Count(), errors.Count() == 1 ? "" : "s"); } errorMsg += "<nobr>" + Globals.GetRandomSadFace() + "</nobr>"; } var myStream = new SpeckleStream() { Objects = placeholders }; var ug = UnitUtils.GetUnitGroup(UnitType.UT_Length); var baseProps = new Dictionary <string, object>(); baseProps["units"] = units; baseProps["unitsDictionary"] = GetAndClearUnitDictionary(); myStream.BaseProperties = baseProps; //myStream.BaseProperties = JsonConvert.SerializeObject(baseProps); NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = true, isLoadingIndeterminate = true, loadingBlurb = "Updating stream." })); var response = apiClient.StreamUpdateAsync((string)client.streamId, myStream).Result; var plural = objects.Count() == 1 ? "" : "s"; NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = false, loadingBlurb = "", message = $"Done sending {objects.Count()} object{plural}.", errorMsg, errors })); SpeckleTelemetry.RecordStreamUpdated("Revit"); }
public void BatchUpdateStream(SpecklePushConfig pushConfig) { List <SpeckleObject> convertedObjects = SpeckleClient.Stream.Objects; SpeckleCore.SpeckleInitializer.Initialize(); SpeckleCore.LocalContext.Init(); LocalContext.PruneExistingObjects(convertedObjects, SpeckleClient.BaseUrl); List <SpeckleObject> persistedObjects = new List <SpeckleObject>(); OrderedDictionary JobQueue = new OrderedDictionary(); if (convertedObjects.Count(obj => obj.Type == "Placeholder") != convertedObjects.Count) { // create the update payloads int count = 0; var objectUpdatePayloads = new List <List <SpeckleObject> >(); long totalBucketSize = 0; long currentBucketSize = 0; var currentBucketObjects = new List <SpeckleObject>(); var allObjects = new List <SpeckleObject>(); foreach (SpeckleObject convertedObject in convertedObjects) { if (count++ % 100 == 0) { //Message = "Converted " + count + " objects out of " + convertedObjects.Count() + "."; } // size checking & bulk object creation payloads creation long size = Converter.getBytes(convertedObject).Length; currentBucketSize += size; totalBucketSize += size; currentBucketObjects.Add(convertedObject); // Object is too big? if (size > 2e6) { BH.Engine.Reflection.Compute.RecordWarning("An object is too big for the current Speckle limitations."); currentBucketObjects.Remove(convertedObject); } if (currentBucketSize > 3e5) // restrict max to ~300kb; { //BH.Engine.Reflection.Compute.RecordNote("Reached payload limit. Making a new one, current #: " + objectUpdatePayloads.Count); objectUpdatePayloads.Add(currentBucketObjects); currentBucketObjects = new List <SpeckleObject>(); currentBucketSize = 0; } } // add in the last bucket if (currentBucketObjects.Count > 0) { objectUpdatePayloads.Add(currentBucketObjects); } if (objectUpdatePayloads.Count > 1) { BH.Engine.Reflection.Compute.RecordNote($"Payload has been split in { objectUpdatePayloads.Count } batches. Total size is {totalBucketSize / 1024} kB."); } // create bulk object creation tasks List <ResponseObject> responses = new List <ResponseObject>(); foreach (var payload in objectUpdatePayloads) { //Message = String.Format("{0}/{1}", k++, objectUpdatePayloads.Count); try { var objResponse = SpeckleClient.ObjectCreateAsync(payload).Result; responses.Add(objResponse); persistedObjects.AddRange(objResponse.Resources); int m = 0; foreach (var oL in payload) { oL._id = objResponse.Resources[m++]._id; } // push sent objects in the cache non-blocking Task.Run(() => { foreach (var oL in payload) { if (oL.Type != "Placeholder") { LocalContext.AddSentObject(oL, SpeckleClient.BaseUrl); } } }); } catch (Exception err) { BH.Engine.Reflection.Compute.RecordWarning(err.Message); continue; } } } else { persistedObjects = convertedObjects; } // create placeholders for stream update payload List <SpeckleObject> placeholders = new List <SpeckleObject>(); //foreach ( var myResponse in responses ) foreach (var obj in persistedObjects) { placeholders.Add(new SpecklePlaceholder() { _id = obj._id }); } SpeckleClient.Stream.Objects = placeholders; // set some base properties (will be overwritten) var baseProps = new Dictionary <string, object>(); baseProps["units"] = "m"; baseProps["tolerance"] = "0.001"; baseProps["angleTolerance"] = "0.01"; SpeckleClient.Stream.BaseProperties = baseProps; var response = SpeckleClient.StreamUpdateAsync(SpeckleClient.StreamId, SpeckleClient.Stream).Result; SpeckleClient.BroadcastMessage("stream", SpeckleClient.StreamId, new { eventType = "update-global" }); }
private void CreateObjectsOnServer(List <SpeckleObject> bucketObjects, string baseUrl, ref int numErrors) { // Separate objects into sizeable payloads var payloads = CreatePayloads(bucketObjects); if (totalProgress != null) { totalProgress.Report(payloads.Count()); } if (bucketObjects.Count(o => o.Type == "Placeholder") == bucketObjects.Count) { numErrors = 0; return; } // Send objects which are in payload and add to local DB with updated IDs for (var j = 0; j < payloads.Count(); j++) { ResponseObject res = null; //Make a copy so that the task below doesn't use the j variable, which could change (by looping) by the time the task is run var payload = payloads[j].ToList(); try { res = apiClient.ObjectCreateAsync(payload, apiTimeoutOverride).Result; if (incrementProgress != null) { incrementProgress.Report(1); } } catch (Exception ex) { numErrors++; var speckleExceptionContext = ExtractSpeckleExceptionContext(ex); var errContext = speckleExceptionContext.Concat(new[] { "StreamId=" + StreamId, "Endpoint=ObjectCreateAsync", "PayloadBytes=" + Converter.getBytes(payload).Length, "Error in updating the server with a payload of " + payload.Count() + " objects" });; messenger.Message(MessageIntent.TechnicalLog, MessageLevel.Error, ex, errContext.ToArray()); } if (res != null && res.Resources.Count() > 0) { for (int i = 0; i < payload.Count(); i++) { payload[i]._id = res.Resources[i]._id; } } Task.Run(() => { //Don't save results to the hard disk as they take up a huge amount of space and are likely to change very often foreach (SpeckleObject obj in payload.Where(o => o.Hash != null && o._id != null && !o.Type.Contains("Result"))) { tryCatchWithEvents(() => LocalContext.AddSentObject(obj, baseUrl), "", "Error in updating local db"); } }); } int successfulPayloads = payloads.Count() - numErrors; messenger.Message(MessageIntent.Display, MessageLevel.Information, "Successfully sent " + successfulPayloads + "/" + payloads.Count() + " payloads to the server"); messenger.Message(MessageIntent.TechnicalLog, MessageLevel.Information, "Sent payloads to server", "NumSuccessful=" + successfulPayloads, "NumErrored=" + numErrors); }
public async void SendStaggeredUpdate(bool force = false) { if (Paused && !force) { Context.NotifySpeckleFrame("client-expired", StreamId, ""); return; } else { // create a clone var cloneResult = Client.StreamCloneAsync(StreamId).Result; Client.Stream.Children.Add(cloneResult.Clone.StreamId); Client.BroadcastMessage("stream", StreamId, new { eventType = "update-children" }); } if (IsSendingUpdate) { Expired = true; return; } IsSendingUpdate = true; Context.NotifySpeckleFrame("client-is-loading", StreamId, ""); var objs = RhinoDoc.ActiveDoc.Objects.FindByUserString("spk_" + this.StreamId, "*", false).OrderBy(obj => obj.Attributes.LayerIndex); Context.NotifySpeckleFrame("client-progress-message", StreamId, "Converting " + objs.Count() + " objects..."); // layer list creation var pLayers = new List <SpeckleCore.Layer>(); int lindex = -1, count = 0, orderIndex = 0; foreach (RhinoObject obj in objs) { Rhino.DocObjects.Layer layer = RhinoDoc.ActiveDoc.Layers[obj.Attributes.LayerIndex]; if (lindex != obj.Attributes.LayerIndex) { var spkLayer = new SpeckleCore.Layer() { Name = layer.FullPath, Guid = layer.Id.ToString(), ObjectCount = 1, StartIndex = count, OrderIndex = orderIndex++, Properties = new LayerProperties() { Color = new SpeckleCore.SpeckleBaseColor() { A = 1, Hex = System.Drawing.ColorTranslator.ToHtml(layer.Color) }, } }; pLayers.Add(spkLayer); lindex = obj.Attributes.LayerIndex; } else { var spkl = pLayers.FirstOrDefault(pl => pl.Name == layer.FullPath); spkl.ObjectCount++; } count++; } // convert objects var convertedObjects = new List <SpeckleObject>(); foreach (RhinoObject obj in objs) { var myObj = Converter.Serialise(obj.Geometry); myObj.ApplicationId = obj.Id.ToString(); convertedObjects.Add(myObj); } LocalContext.PruneExistingObjects(convertedObjects, Client.BaseUrl); List <SpeckleObject> persistedObjects = new List <SpeckleObject>(); if (convertedObjects.Count(obj => obj.Type == "Placeholder") != convertedObjects.Count) { // create the update payloads count = 0; var objectUpdatePayloads = new List <List <SpeckleObject> >(); long totalBucketSize = 0; long currentBucketSize = 0; var currentBucketObjects = new List <SpeckleObject>(); var allObjects = new List <SpeckleObject>(); foreach (SpeckleObject convertedObject in convertedObjects) { if (count++ % 100 == 0) { Context.NotifySpeckleFrame("client-progress-message", StreamId, "Converted " + count + " objects out of " + objs.Count() + "."); } // size checking & bulk object creation payloads creation long size = Converter.getBytes(convertedObject).Length; currentBucketSize += size; totalBucketSize += size; currentBucketObjects.Add(convertedObject); // Object is too big? if (size > 2e6) { Context.NotifySpeckleFrame("client-error", StreamId, JsonConvert.SerializeObject("This stream contains a super big object. These will fail. Sorry for the bad error message - we're working on improving this.")); currentBucketObjects.Remove(convertedObject); } if (currentBucketSize > 5e5) // restrict max to ~500kb; should it be user config? anyway these functions should go into core. at one point. { Debug.WriteLine("Reached payload limit. Making a new one, current #: " + objectUpdatePayloads.Count); objectUpdatePayloads.Add(currentBucketObjects); currentBucketObjects = new List <SpeckleObject>(); currentBucketSize = 0; } } // add in the last bucket if (currentBucketObjects.Count > 0) { objectUpdatePayloads.Add(currentBucketObjects); } Debug.WriteLine("Finished, payload object update count is: " + objectUpdatePayloads.Count + " total bucket size is (kb) " + totalBucketSize / 1000); // create bulk object creation tasks int k = 0; List <ResponseObject> responses = new List <ResponseObject>(); foreach (var payload in objectUpdatePayloads) { Context.NotifySpeckleFrame("client-progress-message", StreamId, String.Format("Sending payload {0} out of {1}", k++, objectUpdatePayloads.Count)); try { var objResponse = await Client.ObjectCreateAsync(payload); responses.Add(objResponse); persistedObjects.AddRange(objResponse.Resources); int m = 0; foreach (var oL in payload) { oL._id = objResponse.Resources[m++]._id; } // push sent objects in the cache non-blocking Task.Run(() => { foreach (var oL in payload) { if (oL.Type != "Placeholder") { LocalContext.AddSentObject(oL, Client.BaseUrl); } } }); } catch (Exception err) { Context.NotifySpeckleFrame("client-error", Client.Stream.StreamId, JsonConvert.SerializeObject(err.Message)); Context.NotifySpeckleFrame("client-done-loading", StreamId, ""); IsSendingUpdate = false; return; } } } else { persistedObjects = convertedObjects; } Context.NotifySpeckleFrame("client-progress-message", StreamId, "Updating stream..."); // finalise layer creation foreach (var layer in pLayers) { layer.Topology = "0-" + layer.ObjectCount + " "; } // create placeholders for stream update payload List <SpeckleObject> placeholders = new List <SpeckleObject>(); //foreach ( var myResponse in responses ) foreach (var obj in persistedObjects) { placeholders.Add(new SpecklePlaceholder() { _id = obj._id }); } // create stream update payload SpeckleStream streamUpdatePayload = new SpeckleStream(); streamUpdatePayload.Layers = pLayers; streamUpdatePayload.Objects = placeholders; streamUpdatePayload.Name = Client.Stream.Name; // set some base properties (will be overwritten) var baseProps = new Dictionary <string, object>(); baseProps["units"] = RhinoDoc.ActiveDoc.ModelUnitSystem.ToString(); baseProps["tolerance"] = RhinoDoc.ActiveDoc.ModelAbsoluteTolerance; baseProps["angleTolerance"] = RhinoDoc.ActiveDoc.ModelAngleToleranceRadians; streamUpdatePayload.BaseProperties = baseProps; // update the stream ResponseBase response = null; try { response = await Client.StreamUpdateAsync(Client.Stream.StreamId, streamUpdatePayload); } catch (Exception err) { Context.NotifySpeckleFrame("client-error", Client.Stream.StreamId, JsonConvert.SerializeObject(err.Message)); IsSendingUpdate = false; return; } // emit events, etc. Client.Stream.Layers = streamUpdatePayload.Layers.ToList(); Client.Stream.Objects = placeholders; Context.NotifySpeckleFrame("client-metadata-update", StreamId, Client.Stream.ToJson()); Context.NotifySpeckleFrame("client-done-loading", StreamId, ""); Client.BroadcastMessage("stream", StreamId, new { eventType = "update-global" }); IsSendingUpdate = false; if (Expired) { DataSender.Start(); } Expired = false; }
/// <summary> /// Sends the update to the server. /// </summary> private void SendUpdate() { if (MetadataSender.Enabled) { // start the timer again, as we need to make sure we're updating DataSender.Start(); return; } if (IsSendingUpdate) { return; } IsSendingUpdate = true; Message = String.Format("Converting {0} \n objects", BucketObjects.Count); var convertedObjects = Converter.Serialise(BucketObjects).ToList(); Message = String.Format("Creating payloads"); LocalContext.PruneExistingObjects(convertedObjects, Client.BaseUrl); List <SpeckleObject> persistedObjects = new List <SpeckleObject>(); if (convertedObjects.Count(obj => obj.Type == "Placeholder") != convertedObjects.Count) { // create the update payloads int count = 0; var objectUpdatePayloads = new List <List <SpeckleObject> >(); long totalBucketSize = 0; long currentBucketSize = 0; var currentBucketObjects = new List <SpeckleObject>(); var allObjects = new List <SpeckleObject>(); foreach (SpeckleObject convertedObject in convertedObjects) { if (count++ % 100 == 0) { Message = "Converted " + count + " objects out of " + convertedObjects.Count() + "."; } // size checking & bulk object creation payloads creation long size = Converter.getBytes(convertedObject).Length; currentBucketSize += size; totalBucketSize += size; currentBucketObjects.Add(convertedObject); // Object is too big? if (size > 2e6) { AddRuntimeMessage(GH_RuntimeMessageLevel.Warning, "This stream contains a super big object. These will fail. Sorry for the bad error message - we're working on improving this."); currentBucketObjects.Remove(convertedObject); } if (currentBucketSize > 5e5) // restrict max to ~500kb; should it be user config? anyway these functions should go into core. at one point. { Debug.WriteLine("Reached payload limit. Making a new one, current #: " + objectUpdatePayloads.Count); objectUpdatePayloads.Add(currentBucketObjects); currentBucketObjects = new List <SpeckleObject>(); currentBucketSize = 0; } } // add in the last bucket if (currentBucketObjects.Count > 0) { objectUpdatePayloads.Add(currentBucketObjects); } Debug.WriteLine("Finished, payload object update count is: " + objectUpdatePayloads.Count + " total bucket size is (kb) " + totalBucketSize / 1000); // create bulk object creation tasks int k = 0; List <ResponseObject> responses = new List <ResponseObject>(); foreach (var payload in objectUpdatePayloads) { Message = String.Format("{0}/{1}", k++, objectUpdatePayloads.Count); try { var objResponse = Client.ObjectCreateAsync(payload).Result; responses.Add(objResponse); persistedObjects.AddRange(objResponse.Resources); int m = 0; foreach (var oL in payload) { oL._id = objResponse.Resources[m++]._id; } // push sent objects in the cache non-blocking Task.Run(() => { foreach (var oL in payload) { if (oL.Type != "Placeholder") { LocalContext.AddSentObject(oL, Client.BaseUrl); } } }); } catch (Exception err) { AddRuntimeMessage(GH_RuntimeMessageLevel.Error, err.Message); return; } } } else { persistedObjects = convertedObjects; } // create placeholders for stream update payload List <SpeckleObject> placeholders = new List <SpeckleObject>(); //foreach ( var myResponse in responses ) foreach (var obj in persistedObjects) { placeholders.Add(new SpecklePlaceholder() { _id = obj._id }); } SpeckleStream updateStream = new SpeckleStream() { Layers = BucketLayers, Name = BucketName, Objects = placeholders }; // set some base properties (will be overwritten) var baseProps = new Dictionary <string, object>(); baseProps["units"] = Rhino.RhinoDoc.ActiveDoc.ModelUnitSystem.ToString(); baseProps["tolerance"] = Rhino.RhinoDoc.ActiveDoc.ModelAbsoluteTolerance; baseProps["angleTolerance"] = Rhino.RhinoDoc.ActiveDoc.ModelAngleToleranceRadians; updateStream.BaseProperties = baseProps; var response = Client.StreamUpdateAsync(Client.StreamId, updateStream).Result; Client.BroadcastMessage("stream", Client.StreamId, new { eventType = "update-global" }); Log += response.Message; AddRuntimeMessage(GH_RuntimeMessageLevel.Remark, "Data sent at " + DateTime.Now); Message = "Data sent\n@" + DateTime.Now.ToString("hh:mm:ss"); IsSendingUpdate = false; State = "Ok"; }
// TODO: Orchestration // Create buckets, send sequentially, notify ui re upload progress // NOTE: Problems with local context and cache: we seem to not sucesffuly pass through it // perhaps we're not storing the right sent object (localcontext.addsentobject) public override void UpdateSender(string args) { var client = JsonConvert.DeserializeObject <dynamic>(args); var apiClient = new SpeckleApiClient((string)client.account.RestApi) { AuthToken = (string)client.account.Token }; var convertedObjects = new List <SpeckleObject>(); var placeholders = new List <SpeckleObject>(); var units = CurrentDoc.Document.GetUnits().GetFormatOptions(UnitType.UT_Length).DisplayUnits.ToString().ToLower().Replace("dut_", ""); InjectScaleInKits(GetScale(units)); // this is used for feet to sane units conversion. int i = 0; long currentBucketSize = 0; var errors = ""; var failedSend = 0; var failedConvert = 0; foreach (var obj in client.objects) { NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = true, isLoadingIndeterminate = false, loadingProgress = 1f * i++ / client.objects.Count * 100, loadingBlurb = string.Format("Converting and uploading objects: {0} / {1}", i, client.objects.Count) })); try { var revitElement = CurrentDoc.Document.GetElement((string)obj.properties["revitUniqueId"]); var conversionResult = SpeckleCore.Converter.Serialise(new List <object>() { revitElement }); var byteCount = Converter.getBytes(conversionResult).Length; currentBucketSize += byteCount; if (byteCount > 2e6) { var problemId = revitElement.Id; } convertedObjects.AddRange(conversionResult); if (currentBucketSize > 5e5 || i >= client.objects.Count) // aim for roughly 500kb uncompressed { LocalContext.PruneExistingObjects(convertedObjects, apiClient.BaseUrl); try { var chunkResponse = apiClient.ObjectCreateAsync(convertedObjects).Result.Resources; int m = 0; foreach (var objConverted in convertedObjects) { objConverted._id = chunkResponse[m++]._id; placeholders.Add(new SpecklePlaceholder() { _id = objConverted._id }); if (objConverted.Type != "Placeholder") { LocalContext.AddSentObject(objConverted, apiClient.BaseUrl); } } } catch (Exception e) { failedSend += convertedObjects.Count; NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, errors = "Failed to send " + failedSend + " objects." })); } currentBucketSize = 0; convertedObjects = new List <SpeckleObject>(); // reset the chunkness } } catch (Exception e) { failedConvert++; NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, errors = "Failed to convert " + failedConvert + " objects." })); } } if (failedConvert > 0) { errors += String.Format("Failed to convert a total of {0} objects. ", failedConvert); } if (failedSend > 0) { errors += String.Format("Failed to send a total of {0} objects. ", failedSend); } var myStream = new SpeckleStream() { Objects = placeholders }; var ug = UnitUtils.GetUnitGroup(UnitType.UT_Length); var baseProps = new Dictionary <string, object>(); baseProps["units"] = units; baseProps["unitsDictionary"] = GetAndClearUnitDictionary(); myStream.BaseProperties = baseProps; //myStream.BaseProperties = JsonConvert.SerializeObject(baseProps); NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = true, isLoadingIndeterminate = true, loadingBlurb = "Updating stream." })); var response = apiClient.StreamUpdateAsync((string)client.streamId, myStream).Result; NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = false, loadingBlurb = "Done sending.", errors })); }
// Send objects to Speckle server. Triggered on "Push!". // Create buckets, send sequentially, notify ui re upload progress public override void PushSender(string args) { var client = JsonConvert.DeserializeObject <dynamic>(args); //if it's a category or property filter we need to refresh the list of objects //if it's a selection filter just use the objects that were stored previously ISelectionFilter filter = JsonConvert.DeserializeObject(JsonConvert.SerializeObject(client.filter), GetFilterType(client.filter.Type.ToString())); IEnumerable <SpeckleObject> objects = new List <SpeckleObject>(); objects = GetSelectionFilterObjects(filter, client._id.ToString(), client.streamId.ToString()); var apiClient = new SpeckleApiClient((string)client.account.RestApi) { AuthToken = (string)client.account.Token }; var task = Task.Run(async() => { await apiClient.IntializeUser(); }); task.Wait(); apiClient.ClientType = "AutoCAD"; var convertedObjects = new List <SpeckleObject>(); var placeholders = new List <SpeckleObject>(); var units = AutocadDataService.GetLengthUnit(); //InjectScaleInKits(GetScale(units)); // this is used for feet to sane units conversion. int i = 0; long currentBucketSize = 0; var errorMsg = ""; var failedToConvert = 0; var errors = new List <SpeckleError>(); foreach (var obj in objects) { NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = true, isLoadingIndeterminate = false, loadingProgress = 1f * i++ / objects.Count() * 100, loadingBlurb = string.Format("Converting and uploading objects: {0} / {1}", i, objects.Count()) })); long handle = 0; SpeckleObject speckleObject = null; try { handle = (long)obj.Properties["autocadhandle"]; speckleObject = AutocadDataService.GetObject(handle); if (speckleObject == null) { errors.Add(new SpeckleError { Message = "Could not retrieve element", Details = string.Empty }); continue; } } catch (Exception e) { errors.Add(new SpeckleError { Message = "Could not retrieve element", Details = e.Message }); continue; } try { var conversionResult = new List <SpeckleObject> { speckleObject }; var byteCount = Converter.getBytes(conversionResult).Length; currentBucketSize += byteCount; if (byteCount > 2e6) { errors.Add(new SpeckleError { Message = "Element is too big to be sent", Details = $"Element {handle} is bigger than 2MB, it will be skipped" }); continue; } convertedObjects.AddRange(conversionResult); if (currentBucketSize > 5e5 || i >= objects.Count()) // aim for roughly 500kb uncompressed { LocalContext.PruneExistingObjects(convertedObjects, apiClient.BaseUrl); try { var chunkResponse = apiClient.ObjectCreateAsync(convertedObjects).Result.Resources; int m = 0; foreach (var objConverted in convertedObjects) { objConverted._id = chunkResponse[m++]._id; placeholders.Add(new SpecklePlaceholder() { _id = objConverted._id }); if (objConverted.Type != "Placeholder") { LocalContext.AddSentObject(objConverted, apiClient.BaseUrl); } } } catch (Exception e) { errors.Add(new SpeckleError { Message = $"Failed to send {convertedObjects.Count} objects", Details = e.Message }); } currentBucketSize = 0; convertedObjects = new List <SpeckleObject>(); // reset the chunkness } } catch (Exception e) { failedToConvert++; errors.Add(new SpeckleError { Message = $"Failed to convert element", Details = $"Element handle: {handle}" }); //NotifyUi("update-client", JsonConvert.SerializeObject(new //{ // _id = (string)client._id, // errors = "Failed to convert " + failedConvert + " objects." //})); } } if (errors.Any()) { if (failedToConvert > 0) { errorMsg += string.Format("Failed to convert {0} objects ", failedToConvert, failedToConvert == 1 ? "" : "s"); } else { errorMsg += string.Format("There {0} {1} error{2} ", errors.Count() == 1 ? "is" : "are", errors.Count(), errors.Count() == 1 ? "" : "s"); } } var myStream = new SpeckleStream() { Objects = placeholders }; var baseProps = new Dictionary <string, object>(); baseProps["units"] = units; //baseProps["unitsDictionary"] = GetAndClearUnitDictionary(); myStream.BaseProperties = baseProps; NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = true, isLoadingIndeterminate = true, loadingBlurb = "Updating stream." })); apiClient.Stream = myStream; var response = apiClient.StreamUpdateAsync((string)client.streamId, myStream).Result; var plural = objects.Count() == 1 ? "" : "s"; NotifyUi("update-client", JsonConvert.SerializeObject(new { _id = (string)client._id, loading = false, loadingBlurb = "", message = $"Done sending {objects.Count()} object{plural}.", errorMsg, errors })); }