/// <summary> /// 移动文件(两个文件需要在同一账号下) /// </summary> /// <param name="moveFileParamList"></param> /// <param name="persistentOps">策略</param> /// <returns></returns> private IEnumerable <MoveFileResultDto> MoveMulti(List <MoveFileRangeParam.MoveFileParam> moveFileParamList, BasePersistentOps persistentOps) { var bucketManager = GetBucketManager(persistentOps); List <string> ops = moveFileParamList.Select(x => bucketManager.MoveOp(Core.Tools.GetBucket(QiNiuConfig, persistentOps.Bucket), x.SourceKey, x.OptBucket, x.OptKey, x.IsForce)).ToList(); BatchResult ret = bucketManager.Batch(ops); var index = 0; foreach (BatchInfo info in ret.Result) { index++; if (info.Code == (int)HttpCode.OK) { yield return(new MoveFileResultDto(true, moveFileParamList.ToList()[index - 1].SourceKey, "复制成功")); } else { yield return(new MoveFileResultDto(false, moveFileParamList.ToList()[index - 1].SourceKey, info.Data.Error)); } } }
/// <summary> /// 批量更改文件Type /// </summary> /// <param name="keys">文件key</param> /// <param name="type">0表示普通存储,1表示低频存储</param> /// <param name="persistentOps">策略</param> /// <returns></returns> private IEnumerable <ChangeTypeResultDto> ChangeTypeMulti(string[] keys, int type, BasePersistentOps persistentOps) { var bucketManager = base.GetBucketManager(persistentOps); List <string> ops = new List <string>(); foreach (string key in keys) { string op = bucketManager.ChangeTypeOp(Core.Tools.GetBucket(QiNiuConfig, persistentOps.Bucket), key, type); ops.Add(op); } BatchResult ret = bucketManager.Batch(ops); var index = 0; foreach (BatchInfo info in ret.Result) { index++; if (info.Code == (int)HttpCode.OK) { yield return(new ChangeTypeResultDto(true, keys.ToList()[index - 1], "success")); } else { yield return(new ChangeTypeResultDto(false, keys.ToList()[index - 1], "lose")); } } }
/// <summary> /// 复制到新空间的参数 /// </summary> /// <param name="copyFileParam">复制到新空间的参数</param> /// <param name="persistentOps">策略</param> /// <returns></returns> private IEnumerable <CopyFileResultDto> CopyToMulti(ICollection <CopyFileRangeParam.CopyFileParam> copyFileParam, BasePersistentOps persistentOps) { List <string> ops = copyFileParam.Select(x => GetBucketManager(persistentOps).CopyOp(Core.Tools.GetBucket(QiNiuConfig, persistentOps.Bucket), x.SourceKey, Core.Tools.GetBucket(QiNiuConfig, persistentOps.Bucket, x.OptBucket), x.OptKey, x.IsForce)) .ToList(); BatchResult ret = GetBucketManager(persistentOps).Batch(ops); var index = 0; foreach (BatchInfo info in ret.Result) { index++; if (info.Code == (int)HttpCode.OK) { yield return(new CopyFileResultDto(true, copyFileParam.ToList()[index - 1].SourceKey, "复制成功")); } else { yield return(new CopyFileResultDto(false, copyFileParam.ToList()[index - 1].SourceKey, info.Data.Error)); } } }
/// <summary> /// 移动文件(两个文件需要在同一账号下) /// </summary> /// <param name="moveFileParamList"></param> /// <returns></returns> private IEnumerable <MoveFileResultDto> MoveMulti(List <MoveFileParam> moveFileParamList) { moveFileParamList.ToList().ForEach(item => { new MoveFileParamValidator().Validate(item).Check(HttpStatus.Err.Name); }); List <string> ops = moveFileParamList.Select(x => GetBucketManager().MoveOp(x.SourceBucket, x.SourceKey, x.OptBucket, x.OptKey, x.IsForce)).ToList(); BatchResult ret = GetBucketManager().Batch(ops); var index = 0; foreach (BatchInfo info in ret.Result) { index++; if (info.Code == (int)HttpCode.OK) { yield return(new MoveFileResultDto(true, moveFileParamList.ToList()[index - 1].FileId, "复制成功")); } else { yield return(new MoveFileResultDto(false, moveFileParamList.ToList()[index - 1].FileId, info.Data.Error)); } } }
private async Task WaitForReplicationAsync(string writeAssurance, BatchResult lastResultWithEtag) { var parts = writeAssurance.Split(';'); var replicas = int.Parse(parts[0]); var timeout = TimeSpan.Parse(parts[1]); var throwOnTimeout = bool.Parse(parts[2]); var majority = parts[3] == "majority"; var replicationTask = Database.StartupTasks.OfType <ReplicationTask>().FirstOrDefault(); if (replicationTask == null) { if (Log.IsDebugEnabled) { Log.Debug("Was asked to get write assurance on a database without replication, ignoring the request"); } return; } //what can we do if we don't have this? if (lastResultWithEtag == null) { return; } int numberOfReplicasToWaitFor = majority ? replicationTask.GetSizeOfMajorityFromActiveReplicationDestination(replicas) : replicas; var numberOfReplicatesPast = await replicationTask.WaitForReplicationAsync(lastResultWithEtag.Etag, timeout, numberOfReplicasToWaitFor).ConfigureAwait(false); if (numberOfReplicatesPast < numberOfReplicasToWaitFor && throwOnTimeout) { throw new TimeoutException( $"Could not verify that etag {lastResultWithEtag.Etag} was replicated to {numberOfReplicasToWaitFor} servers in {timeout}. So far, it only replicated to {numberOfReplicatesPast}"); } //If we got here than we are either ignoring timeouts or we finished replicating to the required amount of servers. }
/// <summary> /// 获取文件信息集合 /// </summary> /// <param name="keyList">文件key集合</param> /// <param name="persistentOps">策略</param> /// <returns></returns> private IEnumerable <FileInfoDto> GetMulti(string[] keyList, BasePersistentOps persistentOps) { List <string> ops = keyList.Select(key => GetBucketManager(persistentOps) .StatOp(Core.Tools.GetBucket(QiNiuConfig, persistentOps.Bucket), key)).ToList(); BatchResult ret = GetBucketManager(persistentOps).Batch(ops); var index = 0; foreach (var item in ret.Result) { index++; if (item.Code == (int)HttpCode.OK) { yield return(new FileInfoDto(true, "success") { Size = item.Data.Fsize, Hash = item.Data.Hash, MimeType = item.Data.MimeType, PutTime = item.Data.PutTime, FileType = Configuration.Ioc.Plugs.Storage.Enumerations.StorageClass .FromValue <EInfrastructure.Core.Configuration.Ioc.Plugs.Storage.Enumerations.StorageClass>( item.Data.FileType), Key = keyList[index - 1] }); } else { yield return(new FileInfoDto(false, item.Data.Error) { Key = keyList[index - 1] }); } } }
private string WhatIsYourMood(BatchResult sentiment, out double moodLevel) { string answer = string.Empty; string keys = string.Empty; double average = 0; foreach (var res in sentiment.documents) { average += res.score; } average = average / sentiment.documents.Count; moodLevel = average; if (average > 0.5) { answer = "Cool that you're feeling quit good :D Let's keep on like this!"; } else if (average == 0.5) { answer = "Mmmmmmmhhhhhhhhh ... cosi cosi ....."; } else if (average < 0.5) { answer = ".... not cool .. what is keeping you down ... :/"; } return(answer); }
public bool InsertJsonBatchIntoTable(BatchResult <long, string> result) { var valuesTableSql = string.Join(",", Enumerable.Range(0, result.Messages.Count).Select(i => $"(@p1{i}, @p2{i} :: jsonb )")); var options = new JsonSerializerOptions { AllowTrailingCommas = true }; using (var cmd = new NpgsqlCommand($"INSERT INTO \"topicmessages\" (\"id\", \"content\") VALUES {valuesTableSql};", conn)) { if (result.Messages.Count > 0) { var queryIds = result.Messages.Where(me => JsonSerializer.Deserialize <ChannelMessagesJson>(me.Value).MessageEventId == 0).Select(me => me.Key); } for (int i = 0; i < result.Messages.Count; ++i) { var deserializedMessage = JsonSerializer.Deserialize <ChannelMessagesJson>(result.Messages.ElementAt(i).Value); cmd.Parameters.AddWithValue($"p1{i}", result.Messages.ElementAt(i).Key); if (deserializedMessage.MessageEventId != 0) { var Content = result.Messages.FirstOrDefault(m => m.Key == deserializedMessage.MessageEventId); cmd.Parameters.AddWithValue($"p2{i}", $"{{ \"id\":{deserializedMessage.MessageEventId}, \"content\": \"{JsonSerializer.Deserialize<ChannelMessagesJson>(Content.Value).Content}\" ,\"status\":\"acknowledged\", \"isreceived\":true,\"receivedtimestamp\":\"{deserializedMessage.ReceivedTimestamp}\"}}"); } else { cmd.Parameters.AddWithValue($"p2{i}", $"{{ \"id\":{deserializedMessage.Id}, \"content\": \"{deserializedMessage.Content}\" ,\"status\":\"new\",\"created\":\"{deserializedMessage.Created}\"}}"); } } cmd.ExecuteNonQuery(); } return(true); }
private string WhatAreYourTopics(BatchResult keyPhrases, double moodLevel) { string answer = answer = "so ... "; bool haveTopic = false; string keys = string.Empty; List <string> topics = new List <string>(); foreach (var res in keyPhrases.documents) { foreach (var key in res.keyPhrases) { if (!string.IsNullOrEmpty(key)) { haveTopic = true; topics.Add(key); } } } answer += $" ... "; if (haveTopic) { answer += $"I feel like "; topics.ForEach(x => answer += $"{x} "); if (topics.Count > 1) { answer += $"are topics"; } else { answer += $"is a topic"; } if (moodLevel > 0.5) { answer += $" that make you happy :)"; } else { answer += $" that takes you down ...)"; } } else { answer = string.Empty; if (moodLevel > 0.5) { answer += $":)"; } else { answer += $":("; } } return(answer); }
public bool InsertJsonBatchDesirializedIntoTableOpt(BatchResult <long, ChannelMessagesJson> result) { var valuesTableSql = string.Join(",", Enumerable.Range(0, result.Messages.Count).Select(i => $"(@p1{i}, @p2{i} :: jsonb )")); var options = new JsonSerializerOptions { AllowTrailingCommas = true }; if (result.Messages.Count > 0) { //To get from the batch all new messages key, to query the database var queryIds = result.Messages.Where(me => me.Value.MessageEventId != 0).Select(me => me.Value.MessageEventId).ToList(); List <Message <long, string> > values = new List <Message <long, string> >(); if (queryIds.Count() != 0) { values = SelectResultsFromTableString(queryIds); } using (var cmd = new NpgsqlCommand($"INSERT INTO \"topicmessages\" (\"id\", \"content\") VALUES {valuesTableSql};", conn)) { bool executeQuery = true; for (int i = 0; i < result.Messages.Count; ++i) { executeQuery = true; var deserializedMessage = result.Messages.ElementAt(i).Value; cmd.Parameters.AddWithValue($"p1{i}", result.Messages.ElementAt(i).Key); if (deserializedMessage.MessageEventId != 0) { Message <long, string> Content = values.FirstOrDefault(m => m.Key == deserializedMessage.MessageEventId); if (Content == null) { //In this rare event, it means the acknowledge is probably on the same batch as the created event var ContentBatch = result.Messages.FirstOrDefault(m => m.Key == deserializedMessage.MessageEventId); cmd.Parameters.AddWithValue($"p2{i}", $"{{ \"id\":{deserializedMessage.MessageEventId}, \"content\": \"{ContentBatch.Value}\" ,\"status\":\"acknowledged\", \"isreceived\":true,\"receivedtimestamp\":\"{deserializedMessage.ReceivedTimestamp}\"}}"); } else { cmd.Parameters.AddWithValue($"p2{i}", $"{{ \"id\":{deserializedMessage.MessageEventId}, \"content\": \"{Content.Value}\" ,\"status\":\"acknowledged\", \"isreceived\":true,\"receivedtimestamp\":\"{deserializedMessage.ReceivedTimestamp}\"}}"); } } else { cmd.Parameters.AddWithValue($"p2{i}", $"{{ \"id\":{deserializedMessage.Id}, \"content\": \"{deserializedMessage.Content}\" ,\"status\":\"new\",\"created\":\"{deserializedMessage.Created}\"}}"); } } if (executeQuery == true) { cmd.ExecuteNonQuery(); } } } return(true); }
private void Delete() { //1.获得表中选中的数据 if (dgResult.ItemsSource == null && dgResult.SelectedItems.Count <= 0) { return; } List <QiNiuFileInfo> list = new List <QiNiuFileInfo>(); foreach (var item in dgResult.SelectedItems) { QiNiuFileInfo info = (QiNiuFileInfo)item; if (info != null) { list.Add(info); } } if (list.Count > 0) { string msg = string.Join(",\r\n", list.Select(q => q.FileName)); MessageBoxResult confirmToDel = MessageBox.Show("确认要删除所选行吗?\r\n" + msg, "提示", MessageBoxButton.YesNo, MessageBoxImage.Question); if (confirmToDel != MessageBoxResult.Yes) { return; } //执行批量删除 List <string> ops = new List <string>(); foreach (var key in list) { string op = bucketManager.DeleteOp(bucket, key.FileName); ops.Add(op); } BatchResult ret = bucketManager.Batch(ops); StringBuilder sb = new StringBuilder(); if (ret.Code / 100 != 2) { MessageBox.Show("批量删除error: " + ret.ToString()); return; } MessageBox.Show("批量删除成功!"); Search(); Thread.Sleep(10); } }
public static BatchResult ExecuteBatch(this ICommandData self, DocumentDatabase database, IEnumerable <string> participatingIds = null) { var batchResult = new BatchResult(); Execute(self, database, batchResult, participatingIds); batchResult.Method = self.Method; batchResult.Key = self.Key; batchResult.Etag = self.Etag; batchResult.Metadata = self.Metadata; batchResult.AdditionalData = self.AdditionalData; return(batchResult); }
public static BatchResult ExecuteBatch(this ICommandData self, DocumentDatabase database) { var batchResult = new BatchResult(); Execute(self, database, batchResult); batchResult.Method = self.Method; batchResult.Key = self.Key; batchResult.Etag = self.Etag; batchResult.Metadata = self.Metadata; batchResult.AdditionalData = self.AdditionalData; return(batchResult); }
public virtual void printBatchResults(IList <BatchResult> results) { if (results.Count > 0) { StringBuilder sb = new StringBuilder(); sb.Append("Batch summary:\n"); for (int i = 0; i < results.Count; i++) { BatchResult result = results[i]; sb.Append("Result ").Append(i).Append(":\t"); sb.Append(result.Sql.replaceAll("\n", "").replaceAll("\\s+", " ")).Append("\t"); sb.Append("Update counts: ").Append(Arrays.ToString(result.UpdateCounts)).Append("\n"); } logDebug("082", sb.ToString()); } }
public void BatchTest() { Mac mac = new Mac(AccessKey, SecretKey); BucketManager target = new BucketManager(mac); string s1 = target.StatOp(Bucket1, FileKey1); string s2 = target.ChgmOp(Bucket2, FileKey2, "MimeType"); string[] ops = new string[] { s1, s2, "OP-UNDEF" }; BatchResult result = target.Batch(ops); bool cond = (result.Code == (int)HttpCode.OK || result.Code == (int)HttpCode.BAD_REQUEST || result.Code == (int)HttpCode.PARTLY_OK); Assert.IsTrue(cond); }
public void BatchStatTest() { BatchCopyTest(); Config config = new Config(); config.Zone = Zone.ZONE_CN_East; //config.Region = Region.Region_CN_East; Mac mac = new Mac(AccessKey, SecretKey); BucketManager bucketManager = new BucketManager(mac, config); string[] keys = { "qiniu-0.png", "qiniu-1.png", "qiniu-2.png" }; List <string> ops = new List <string>(); foreach (string key in keys) { string op = bucketManager.StatOp(Bucket, key); ops.Add(op); } BatchResult ret = bucketManager.Batch(ops); if (ret.Code / 100 != 2) { Assert.Fail("batch error: " + ret.ToString()); } foreach (BatchInfo info in ret.Result) { if (info.Code == (int)HttpCode.OK) { Console.WriteLine("{0}, {1}, {2}, {3}, {4}", info.Data.MimeType, info.Data.PutTime, info.Data.Hash, info.Data.Fsize, info.Data.FileType); } else { Console.WriteLine(info.Data.Error); } } }
public void BatchDeleteAfterDaysTest() { BatchCopyTest(); Config config = new Config(); config.Zone = Zone.ZONE_CN_East; //config.Region = Region.Region_CN_East; Mac mac = new Mac(AccessKey, SecretKey); BucketManager bucketManager = new BucketManager(mac, config); string[] keys = { "qiniu-0.png", "qiniu-1.png", "qiniu-2.png" }; List <string> ops = new List <string>(); foreach (string key in keys) { string op = bucketManager.DeleteAfterDaysOp(Bucket, key, 7); ops.Add(op); } BatchResult ret = bucketManager.Batch(ops); if (ret.Code / 100 != 2) { Assert.Fail("batch error: " + ret.ToString()); } foreach (BatchInfo info in ret.Result) { if (info.Code == (int)HttpCode.OK) { Console.WriteLine("deleteAfterDays success"); } else { Console.WriteLine(info.Data.Error); } } }
private void Delete() { //1.获得表中选中的数据 if (dgResult.ItemsSource == null && dgResult.SelectedItems.Count <= 0) { return; } List <QiNiuFileInfo> list = new List <QiNiuFileInfo>(); foreach (var item in dgResult.SelectedItems) { QiNiuFileInfo info = (QiNiuFileInfo)item; if (info != null) { list.Add(info); } } if (list.Count > 0) { //执行批量删除 List <string> ops = new List <string>(); foreach (var key in list) { string op = bucketManager.DeleteOp(bucket, key.FileName); ops.Add(op); } BatchResult ret = bucketManager.Batch(ops); StringBuilder sb = new StringBuilder(); if (ret.Code / 100 != 2) { MessageBox.Show("批量删除error: " + ret.ToString()); return; } MessageBox.Show("批量删除成功!"); Search(); Thread.Sleep(10); } }
/// <summary> ///根据文件key集合删除 /// </summary> /// <param name="keyList">文件key集合</param> /// <returns></returns> private IEnumerable <DeleteResultDto> DelMulti(IEnumerable <string> keyList) { var enumerable = keyList as string[] ?? keyList.ToArray(); List <string> ops = enumerable.Select(key => GetBucketManager().DeleteOp(QiNiuConfig.Bucket, key)).ToList(); BatchResult ret = GetBucketManager().Batch(ops); var index = 0; foreach (var item in ret.Result) { index++; if (item.Code == (int)HttpCode.OK) { yield return(new DeleteResultDto(true, enumerable.ToList()[index - 1], "删除成功")); } else { yield return(new DeleteResultDto(false, enumerable.ToList()[index - 1], item.Data.Error)); } } }
private async Task WaitForReplicationAsync(string writeAssurance, BatchResult lastResultWithEtag) { var parts = writeAssurance.Split(';'); var replicas = int.Parse(parts[0]); var timeout = TimeSpan.Parse(parts[1]); var throwOnTimeout = bool.Parse(parts[2]); var majority = parts[3] == "majority"; var replicationTask = Database.StartupTasks.OfType <ReplicationTask>().FirstOrDefault(); if (replicationTask == null) { if (Log.IsDebugEnabled) { Log.Debug("Was asked to get write assurance on a database without replication, ignoring the request"); } return; } if (lastResultWithEtag != null) { await replicationTask.WaitForReplicationAsync(lastResultWithEtag.Etag, timeout, replicas, majority, throwOnTimeout).ConfigureAwait(false); } }
public static Mock <IHttpCommunication> CreateSentimentHttpMock(double score) { var fakeSentimentResult = new BatchResult { Documents = new List <DocumentResult> { new DocumentResult { Score = score } } }; var httpCommunicationMock = new Mock <IHttpCommunication>(); httpCommunicationMock.Setup ( x => x.PostAsync(It.IsAny <string>(), It.IsAny <string>(), It.IsAny <Dictionary <string, string> >(), It.IsAny <byte[]>())) .Returns(Task.FromResult(JsonConvert.SerializeObject(fakeSentimentResult)) ); return(httpCommunicationMock); }
private static SentimentManager GetSentimentManager() { var fakeSentimentResult = new BatchResult { Documents = new List <DocumentResult> { new DocumentResult { Score = 60 } } }; var httpCommunicationMock = new Mock <IHttpCommunication>(); httpCommunicationMock.Setup ( x => x.PostAsync(It.IsAny <string>(), It.IsAny <string>(), It.IsAny <Dictionary <string, string> >(), It.IsAny <byte[]>())) .Returns(Task.FromResult(JsonConvert.SerializeObject(fakeSentimentResult)) ); return(new SentimentManager("text analytics api key", "", "http://localhost", httpCommunicationMock.Object)); }
/// <summary> /// 获取文件信息集合 /// </summary> /// <param name="keyList">文件key集合</param> /// <returns></returns> private IEnumerable <FileInfoDto> GetMulti(IEnumerable <string> keyList) { var enumerable = keyList as string[] ?? keyList.ToArray(); List <string> ops = enumerable.Select(key => GetBucketManager().StatOp(QiNiuConfig.Bucket, key)).ToList(); BatchResult ret = GetBucketManager().Batch(ops); var index = 0; foreach (var item in ret.Result) { index++; if (item.Code == (int)HttpCode.OK) { yield return(new FileInfoDto() { Size = item.Data.Fsize, Hash = item.Data.Hash, MimeType = item.Data.MimeType, PutTime = item.Data.PutTime, FileType = item.Data.FileType, Success = true, Host = QiNiuConfig.Host, Path = enumerable.ToList()[index - 1], Msg = "成功" }); } else { yield return(new FileInfoDto() { Success = false, Msg = item.Data.Error, Host = QiNiuConfig.Host, Path = enumerable.ToList()[index - 1] }); } } }
public BatchResult BulkProcessing(IEnumerable <BlobProcessMetadataContext> processObjects) { var aggregateResult = new BatchResult(); var lockList = new object(); Parallel.ForEach(processObjects, processObject => { try { ProcessFile(processObject); } catch (Exception e) { lock (lockList) { aggregateResult.AddError($"Processing document {processObject.FileName} failed: {e.Message}"); } } }); return(aggregateResult); }
public NeoBatchException(BatchResult result) : base(result.StatusCode, JObject.FromObject(result).ToString(Formatting.Indented)) { Result = result; }
private BatchResult[] ProcessBatch(IList<ICommandData> commands) { var results = new BatchResult[commands.Count]; for (int index = 0; index < commands.Count; index++) { var command = commands[index]; results[index] = command.ExecuteBatch(this); } return results; }
private static void Execute(ICommandData self, DocumentDatabase database, BatchResult batchResult) { var deleteCommandData = self as DeleteCommandData; if (deleteCommandData != null) { var result = database.Documents.Delete(deleteCommandData.Key, deleteCommandData.Etag, deleteCommandData.TransactionInformation); if (batchResult != null) { batchResult.Deleted = result; } return; } var putCommandData = self as PutCommandData; if (putCommandData != null) { var putResult = database.Documents.Put(putCommandData.Key, putCommandData.Etag, putCommandData.Document, putCommandData.Metadata, putCommandData.TransactionInformation); putCommandData.Etag = putResult.ETag; putCommandData.Key = putResult.Key; return; } var patchCommandData = self as PatchCommandData; if (patchCommandData != null) { var result = database.Patches.ApplyPatch(patchCommandData.Key, patchCommandData.Etag, patchCommandData.Patches, patchCommandData.PatchesIfMissing, patchCommandData.Metadata, patchCommandData.TransactionInformation); if (batchResult != null) { batchResult.PatchResult = result.PatchResult; } var doc = database.Documents.Get(patchCommandData.Key, patchCommandData.TransactionInformation); if (doc != null) { database.TransactionalStorage.ExecuteImmediatelyOrRegisterForSynchronization(() => { patchCommandData.Metadata = doc.Metadata; patchCommandData.Etag = doc.Etag; }); } return; } var advPatchCommandData = self as ScriptedPatchCommandData; if (advPatchCommandData != null) { var result = database.Patches.ApplyPatch(advPatchCommandData.Key, advPatchCommandData.Etag, advPatchCommandData.Patch, advPatchCommandData.PatchIfMissing, advPatchCommandData.Metadata, advPatchCommandData.TransactionInformation, advPatchCommandData.DebugMode); if (batchResult != null) { batchResult.PatchResult = result.Item1.PatchResult; } advPatchCommandData.AdditionalData = new RavenJObject { { "Debug", new RavenJArray(result.Item2) } }; if (advPatchCommandData.DebugMode) { advPatchCommandData.AdditionalData["Document"] = result.Item1.Document; return; } var doc = database.Documents.Get(advPatchCommandData.Key, advPatchCommandData.TransactionInformation); if (doc != null) { database.TransactionalStorage.ExecuteImmediatelyOrRegisterForSynchronization(() => { advPatchCommandData.Metadata = doc.Metadata; advPatchCommandData.Etag = doc.Etag; }); } return; } }
private static BatchResult PopulateObject(object objTarget, Dictionary <string, string> propertyValues) { var result = new BatchResult(); if (objTarget == null || propertyValues == null || propertyValues.Count == 0) { return(result); } var targetProperties = new List <PropertyInfo>(objTarget.GetType().GetProperties()); foreach (var targetProperty in targetProperties.Where(targetProperty => propertyValues.ContainsKey(targetProperty.Name))) { try { var rawValue = propertyValues[targetProperty.Name]; object convertedValue; var typeToCheck = targetProperty.PropertyType; if (targetProperty.PropertyType.IsGenericType && targetProperty.PropertyType.GetGenericTypeDefinition() == typeof(Nullable <>)) { //If we have a nullable type and a blank raw value, we'll just set the property to null if (string.IsNullOrWhiteSpace(rawValue)) { targetProperty.SetValue(objTarget, null, null); continue; } // If we get here, we actually want to check the underlying type typeToCheck = Nullable.GetUnderlyingType(targetProperty.PropertyType); } if (typeToCheck == typeof(string)) { convertedValue = rawValue; } else if (typeToCheck == typeof(short)) { convertedValue = Convert.ToInt16(rawValue); } else if (typeToCheck == typeof(int)) { convertedValue = Convert.ToInt32(rawValue); } else if (typeToCheck == typeof(long)) { convertedValue = Convert.ToInt64(rawValue); } else if (typeToCheck == typeof(decimal)) { convertedValue = Convert.ToDecimal(rawValue); } else if (typeToCheck == typeof(bool)) { convertedValue = Convert.ToBoolean(rawValue); } else if (typeToCheck == typeof(byte)) { convertedValue = Convert.ToByte(rawValue); } else if (typeToCheck == typeof(char)) { convertedValue = Convert.ToChar(rawValue); } else if (typeToCheck == typeof(DateTime)) { convertedValue = Convert.ToDateTime(rawValue); } else if (typeToCheck == typeof(double)) { convertedValue = Convert.ToDouble(rawValue); } else if (typeToCheck == typeof(sbyte)) { convertedValue = Convert.ToSByte(rawValue); } else if (typeToCheck == typeof(float)) { convertedValue = Convert.ToSingle(rawValue); } else if (typeToCheck == typeof(ushort)) { convertedValue = Convert.ToUInt16(rawValue); } else if (typeToCheck == typeof(uint)) { convertedValue = Convert.ToUInt32(rawValue); } else if (typeToCheck == typeof(ulong)) { convertedValue = Convert.ToUInt64(rawValue); } else if (typeToCheck.IsEnum) { convertedValue = Convert.ChangeType(rawValue, Enum.GetUnderlyingType(typeToCheck)); if (Enum.IsDefined(typeToCheck, convertedValue)) { convertedValue = Enum.ToObject(typeToCheck, convertedValue); } else { throw new ObjectTransformationException("The enumeration '" + typeToCheck + "' does not contain the value '" + convertedValue + "'. Skipping this property."); } } else { throw new NotSupportedException("The type '" + typeToCheck + "' is not currently supported."); } targetProperty.SetValue(objTarget, convertedValue, null); } catch (Exception ex) { result.Errors.Add(new ObjectTransformationException("Failed to set property '" + targetProperty.Name + "' on object of type '" + objTarget.GetType() + "' to the value '" + propertyValues[targetProperty.Name] + "'.", ex)); } } return(result); }
private BatchResult[] ProcessBatch(IList<ICommandData> commands, CancellationToken token) { var results = new BatchResult[commands.Count]; var participatingIds = commands.Select(x => x.Key).ToArray(); for (int index = 0; index < commands.Count; index++) { token.ThrowIfCancellationRequested(); results[index] = commands[index].ExecuteBatch(this, participatingIds); } return results; }
public BatchResult BatchUpload(string username, string password, string batch, bool sandbox = true) { //Pages the module uploads information too string logon; string uploadPage; string uploadPostPage; if (sandbox) { logon = "https://sandbox.authorize.net/UI/themes/sandbox/logon.aspx"; uploadPage = "https://sandbox.authorize.net/UI/themes/sandbox/popup.aspx?page=batchupload&sub=newfile"; uploadPostPage = "https://test.authorize.net/batchprocessing/batchupload.dll?page=batchupload&sub=newfile&SessionToken="; } else { //YOU MUST FIND AND ADD THE NON SANDBOX URLS HERE AS I DO NOT KNOW THEM //these urls MIGHT worlk i only constructed them based on guessing logon = "https://account.authorize.net/ui/themes/anet/logon.aspx"; uploadPage = "https://account.authorize.net/ui/themes/sandbox/popup.aspx?page=batchupload&sub=newfile"; uploadPostPage = "https://account.authorize.net/batchprocessing/batchupload.dll?page=batchupload&sub=newfile&SessionToken="; } //initialize the result BatchResult output = new BatchResult(); //Go the the logon page so we can obtain values we need for the next string logonGet = getRequest(logon); hiddenValues logonKeys = getLogonValues(logonGet); //if we found the required information continue if (logonKeys.success) { //we have obtained the first logon page NameValueCollection logonAuthData = new NameValueCollection(); logonAuthData["MerchantLogin"] = username; logonAuthData["Password"] = password; logonAuthData["__LOGIN_PAGE_KEY"] = logonKeys.value2; logonAuthData["__VIEWSTATE"] = logonKeys.value1; logonAuthData["__VIEWSTATEENCRYPTED"] = ""; logonAuthData["__PAGE_KEY"] = ""; //now that we have collected all the data, let's proceed to logon string logonPost = postRequest(logon, logonAuthData, logon); bool loggedIn = checkLoggedIn(logonPost); if (loggedIn) { //we have successfuly logged on //go to the page before we upload a transaction //this page has required details before we can proceed to the next string upPage = getRequest(uploadPage); hiddenValues uploadKeys = getUploadValues(upPage); //if we found the required information continue if (uploadKeys.success) { //set post data NameValueCollection uploadFileData = new NameValueCollection(); uploadFileData["__PAGE_KEY"] = uploadKeys.value2; //upload file and check result string uploadFile = postUpload(uploadPostPage + uploadKeys.value1, uploadFileData, batch); hiddenValues uploadVerify = verifyUpload(uploadFile); //now that we uploaded the file check if there was an error if (uploadVerify.success) { output.success = true; output.id = uploadVerify.value1; output.count = uploadVerify.value2; output.result = "Uploaded transaction details successfuly"; output.debug = uploadFile; } else { output.success = false; output.debug = uploadFile; output.result = postUploadError(uploadFile); } } else { output.success = false; output.debug = upPage; output.result = "Failue finding upload keys"; } } else { output.success = false; output.debug = logonPost; output.result = "Failure logging in"; } } else { output.success = false; output.debug = logonGet; output.result = "Failure finding login keys"; } return(output); }
private BatchResult[] ProcessBatch(IList<ICommandData> commands, CancellationToken token) { var results = new BatchResult[commands.Count]; for (int index = 0; index < commands.Count; index++) { token.ThrowIfCancellationRequested(); ICommandData command = commands[index]; results[index] = command.ExecuteBatch(this); } return results; }
public BatchResult BatchUpload(string username, string password, string batch, bool sandbox = true) { //Pages the module uploads information too string logon; string uploadPage; string uploadPostPage; if (sandbox) { logon = "https://sandbox.authorize.net/UI/themes/sandbox/logon.aspx"; uploadPage = "https://sandbox.authorize.net/UI/themes/sandbox/popup.aspx?page=batchupload&sub=newfile"; uploadPostPage = "https://test.authorize.net/batchprocessing/batchupload.dll?page=batchupload&sub=newfile&SessionToken="; } else { //YOU MUST FIND AND ADD THE NON SANDBOX URLS HERE AS I DO NOT KNOW THEM //these urls MIGHT worlk i only constructed them based on guessing logon = "https://account.authorize.net/ui/themes/anet/logon.aspx"; uploadPage = "https://account.authorize.net/ui/themes/sandbox/popup.aspx?page=batchupload&sub=newfile"; uploadPostPage = "https://account.authorize.net/batchprocessing/batchupload.dll?page=batchupload&sub=newfile&SessionToken="; } //initialize the result BatchResult output = new BatchResult(); //Go the the logon page so we can obtain values we need for the next string logonGet = getRequest(logon); hiddenValues logonKeys = getLogonValues(logonGet); //if we found the required information continue if (logonKeys.success) { //we have obtained the first logon page NameValueCollection logonAuthData = new NameValueCollection(); logonAuthData["MerchantLogin"] = username; logonAuthData["Password"] = password; logonAuthData["__LOGIN_PAGE_KEY"] = logonKeys.value2; logonAuthData["__VIEWSTATE"] = logonKeys.value1; logonAuthData["__VIEWSTATEENCRYPTED"] = ""; logonAuthData["__PAGE_KEY"] = ""; //now that we have collected all the data, let's proceed to logon string logonPost = postRequest(logon, logonAuthData, logon); bool loggedIn = checkLoggedIn(logonPost); if (loggedIn) { //we have successfuly logged on //go to the page before we upload a transaction //this page has required details before we can proceed to the next string upPage = getRequest(uploadPage); hiddenValues uploadKeys = getUploadValues(upPage); //if we found the required information continue if (uploadKeys.success) { //set post data NameValueCollection uploadFileData = new NameValueCollection(); uploadFileData["__PAGE_KEY"] = uploadKeys.value2; //upload file and check result string uploadFile = postUpload(uploadPostPage + uploadKeys.value1, uploadFileData, batch); hiddenValues uploadVerify = verifyUpload(uploadFile); //now that we uploaded the file check if there was an error if (uploadVerify.success) { output.success = true; output.id = uploadVerify.value1; output.count = uploadVerify.value2; output.result = "Uploaded transaction details successfuly"; output.debug = uploadFile; } else { output.success = false; output.debug = uploadFile; output.result = postUploadError(uploadFile); } } else { output.success = false; output.debug = upPage; output.result = "Failue finding upload keys"; } } else { output.success = false; output.debug = logonPost; output.result = "Failure logging in"; } } else { output.success = false; output.debug = logonGet; output.result = "Failure finding login keys"; } return output; }