private static Result <bool> AddFoundVars(SingleResult singleResult, string varName, EqSide eqSide, int topBranchindex, int depth, IList <Foundlocation> foundVars) { if (singleResult is SingleValue) { if ((singleResult is Literal) || (singleResult is Constant)) { } else if ((singleResult is Variable var)) { if (var.Name.Equals(varName, StringComparison.Ordinal)) // Case sensitive { foundVars.Add(new Foundlocation() { Depth = depth, EqSide = eqSide, TopBranchindex = topBranchindex, }); } ; } else { return(Result <bool> .Bad(ErrorUtils.UnspecifiedErrorMsg2("Unknown SingleValue sub-type"))); } }
protected RemoteControllerBase() { try { if (App.Configs.ServiceUri == null) { if (App.Configs.ServiceUri.StartsWith("net.tcp")) { m_Binding = CommonUtils.CreateTcpBinding(); } if (App.Configs.ServiceUri.StartsWith("http")) { m_Binding = CommonUtils.CreateHttpBinding(); } m_EndpointAddress = new EndpointAddress(App.Configs.ServiceUri + "/" + GetType().Name); } else { if (App.Configs.ServiceUri.StartsWith("net.tcp")) { m_Binding = CommonUtils.CreateTcpBinding(); } if (App.Configs.ServiceUri.StartsWith("http")) { m_Binding = CommonUtils.CreateHttpBinding(); } m_EndpointAddress = new EndpointAddress(App.Configs.ServiceUri + "/" + GetType().Name); } } catch (Exception ex) { throw ErrorUtils.CreateErrorWithSubMessage(ERR_SYSTEM.ERR_SYSTEM_CONNECT_TO_SERVER_FAIL, ex.Message); } }
/// <param name="fadeInTime"> /// Values below <see cref="MinimumFadeTime"/> will be increased to <see cref="MinimumFadeTime"/> /// (see <see cref="MinimumFadeTime"/> for why). /// <para/> /// Values below zero will cause an error. /// </param> /// <param name="fadeOutTime"> /// Values below <see cref="MinimumFadeTime"/> will be increased to <see cref="MinimumFadeTime"/> /// (see <see cref="MinimumFadeTime"/> for why). /// <para/> /// Values below zero will cause an error. /// </param> public Glow( GlowType glowType, double fadeInTime = 0.05, double fadeOutTime = 0.2, double x = 0.5, double y = 0.5, double scaleX = 1, double scaleY = 1, double rotateAngle = 0, FFmpegColorChannelMixer colorMixer = null ) { ErrorUtils.ThrowIfArgNull(glowType, nameof(glowType)); ErrorUtils.ThrowIfArgLessThan(fadeInTime, 0, nameof(fadeInTime)); ErrorUtils.ThrowIfArgLessThan(fadeOutTime, 0, nameof(fadeOutTime)); this.GlowType = glowType; this.FadeInTime = Math.Max(fadeInTime, MinimumFadeTime); this.FadeOutTime = Math.Max(fadeOutTime, MinimumFadeTime); this.X = x; this.Y = y; this.ScaleX = scaleX; this.ScaleY = scaleY; this.RotateAngle = rotateAngle; this.ColorMixer = colorMixer; }
public FFmpegOption(string name, string value = "") { ErrorUtils.ThrowIfArgNull(name, nameof(name)); this.Name = name; this.Value = value ?? ""; }
/// <summary> /// Filter to catch all exceptions and wrap then into common response /// </summary> /// <param name="context"></param> public override void OnException(HttpActionExecutedContext context) { string errorMessage = ErrorUtils.GetErrorMessage(context.Exception, "Service error"); context.Response = context.Request.CreateResponse(HttpStatusCode.BadRequest, errorMessage); base.OnException(context); }
/// <summary>Handles an exception when processing a batch request.</summary> /// <param name='service'>Data service doing the processing.</param> /// <param name='exception'>Exception thrown.</param> /// <param name='batchWriter'>Output writer for the batch.</param> /// <param name="responseStream">Underlying response stream.</param> internal static void HandleBatchInStreamError(IDataService service, Exception exception, ODataBatchWriter batchWriter, Stream responseStream) { Debug.Assert(service != null, "service != null"); Debug.Assert(exception != null, "exception != null"); Debug.Assert(responseStream != null, "responseStream != null"); Debug.Assert(service.Configuration != null, "service.Configuration != null - it should have been initialized by now"); Debug.Assert(CommonUtil.IsCatchableExceptionType(exception), "CommonUtil.IsCatchableExceptionType(exception) - "); AstoriaRequestMessage requestMessage = service.OperationContext == null ? null : service.OperationContext.RequestMessage; ErrorHandler handler = CreateHandler(service, requestMessage, exception, VersionUtil.DataServiceDefaultResponseVersion); service.InternalHandleException(handler.exceptionArgs); // Make sure to flush the batch writer before we write anything to the underlying stream batchWriter.Flush(); // Note the OData protocol spec did not defined the behavior when an exception is encountered outside of a batch operation. // The batch writer in ODataLib doesn't allow WriteError in this case. // Unfortunately the shipped behavior on the server is we serialize out an error payload in XML format. We need to keep the // existing behavior. The batch client doesn't know how to deserialize an error payload outside of a batch operation however. using (XmlWriter xmlWriter = XmlUtil.CreateXmlWriterAndWriteProcessingInstruction(responseStream, handler.encoding)) { ODataError error = handler.exceptionArgs.CreateODataError(); ErrorUtils.WriteXmlError(xmlWriter, error, handler.exceptionArgs.UseVerboseErrors, MaxInnerErrorDepth); } }
public async Task Handle_AddFilesFail_ShouldFail() { var mockProjectRepo = new Mock <IProjectRepository>(); mockProjectRepo.Setup(x => x.GetById(It.IsAny <int>())).ReturnsAsync(new Project("test", new AsymmetricKey("PUBLIC KEY"))); var mockAddFiles = new Mock <IAddUpdatePackageFilesAction>(); mockAddFiles.Setup(x => x.AddFiles(It.IsAny <UpdatePackage>(), It.IsAny <IEnumerable <UpdateFileInfo> >(), It.IsAny <string>())) .Returns(Task.CompletedTask); mockAddFiles.SetupGet(x => x.HasError).Returns(true); mockAddFiles.SetupGet(x => x.Error).Returns(new Error(ErrorType.ValidationError.ToString(), "Test error", -1)); var useCase = new CreateUpdatePackageUseCase(mockProjectRepo.Object, null, mockAddFiles.Object); var files = new List <UpdateFileInfo> { new UpdateFileInfo("asd", Hash.Parse("FF")) }; var package = new UpdatePackageInfo("1.0.0", null, null, files, null, null); var message = new CreateUpdatePackageRequest(1, package); // act await useCase.Handle(message); // assert ErrorUtils.AssertError(useCase, ErrorType.ValidationError); }
public static IEnumerable <FFmpegCommand> BuildFFmpegCommandSeries( FFmpegInput inputVideo, FFmpegOutput outputFile, DirectoryInfo tempFilesFolder, IEnumerable <TaggedBeat> taggedBeats, BeatPositioner beatPositioner, int approxGlowsPerStage ) { ErrorUtils.ThrowIfArgNull(inputVideo, nameof(inputVideo)); ErrorUtils.ThrowIfArgNull(outputFile, nameof(outputFile)); ErrorUtils.ThrowIfArgNull(taggedBeats, nameof(taggedBeats)); ErrorUtils.ThrowIfArgNull(beatPositioner, nameof(beatPositioner)); ErrorUtils.ThrowIfArgLessThan(approxGlowsPerStage, 1, nameof(approxGlowsPerStage)); var beatGroupInfos = GetBeatGroupInfos(inputVideo, outputFile, tempFilesFolder, taggedBeats, approxGlowsPerStage); return(beatGroupInfos.Select( (x, i) => GlowOverlayCommandBuilder.BuildFFmpegCommand( x.InputFile, x.OutputFile, x.BeatGroup, beatPositioner ) )); }
public virtual void SendSyncSLAErrorResponse(IHttpRequest request, IHttpResponse response, HystrixEventType hystrixEvent) { Type responseType = EndpointHost.Config.MetadataMap[ServicePath].GetResponseTypeByOpName(request.OperationName); var errorResponse = ErrorUtils.CreateSLAErrorResponse(hystrixEvent, responseType); response.WriteToResponse(request, errorResponse); }
/// <summary> /// Asynchronously writes an error message. /// </summary> /// <param name="jsonWriter">The JSON writer to write the error.</param> /// <param name="writeInstanceAnnotationsDelegate">Delegate to write the instance annotations.</param> /// <param name="error">The error instance to write.</param> /// <param name="includeDebugInformation">A flag indicating whether error details should be written (in debug mode only) or not.</param> /// <param name="maxInnerErrorDepth">The maximum number of nested inner errors to allow.</param> /// <returns>A task that represents the asynchronous write operation.</returns> internal static Task WriteErrorAsync( IJsonWriterAsync jsonWriter, Func <ICollection <ODataInstanceAnnotation>, Task> writeInstanceAnnotationsDelegate, ODataError error, bool includeDebugInformation, int maxInnerErrorDepth) { Debug.Assert(jsonWriter != null, "jsonWriter != null"); Debug.Assert(error != null, "error != null"); ExceptionUtils.CheckArgumentNotNull(writeInstanceAnnotationsDelegate, "writeInstanceAnnotationsDelegate"); string code, message; ErrorUtils.GetErrorDetails(error, out code, out message); ODataInnerError innerError = includeDebugInformation ? error.InnerError : null; return(WriteErrorAsync( jsonWriter, code, message, error.Target, error.Details, innerError, error.GetInstanceAnnotations(), writeInstanceAnnotationsDelegate, maxInnerErrorDepth)); }
private static void ExecuteReadFileBilling(string filename) { try { Console.WriteLine("Start read file :" + filename); using (TransactionScope scope = new TransactionScope()) { var listBillingInfo = ReadFileBilling.ReadFile(filename); string DataLogID = string.Empty; // Ten file phai dinh dang DDMMYYYY DataLogController.FinalReadFile(Constants.FileTopupBilling, Path.GetFileNameWithoutExtension(filename).Substring(0, 6), Path.GetFileName(filename), out DataLogID); if (!string.IsNullOrEmpty(DataLogID)) { ReadFileBillingController.ReadFileBilling(listBillingInfo, DataLogID, Path.GetFileNameWithoutExtension(filename).Substring(0, 6), Path.GetFileName(filename)); scope.Complete(); //BackUpFile(filename, "Billing", Constants.READFILE_SUCCESS); Console.WriteLine("End read file Success:" + filename); } } } catch (Exception ex) { BackUpFile(filename, "Billing", Constants.READFILE_FAIL); Console.WriteLine(filename + "-" + ex.Message); ErrorUtils.WriteLog(filename + "-" + ex.Message); } finally { BackUpFile(filename, "Billing", Constants.READFILE_SUCCESS); } }
public static bool Paste() { if (Core.SelectedNode != null) { var data = Manager.NativeManager.GetClipboardText(); var selected = Core.SelectedNode; if (selected != null) { if (Core.IsValidXml(data)) { Command.CommandManager.StartCollection(); var node = selected.AddChild(); Core.Paste(node, data); Command.CommandManager.EndCollection(); if (Core.Root.GetDeepestLayerNumberInChildren() > Constant.NodeLayerLimit) { Command.CommandManager.Undo(true); ErrorUtils.ShowErrorByNodeLayerLimit(); } } } return(true); } return(false); }
public async Task <IActionResult> GetTrip([FromRoute] int id) { if (!ModelState.IsValid) { return(BadRequest(ModelState)); } TripModel model; try { var dao = new TripDao(_db); model = await dao.getTripAsync(id); } catch (Exception e) { return(BadRequest(new { message = ErrorUtils.dbErrorMessage($"Can't get Trip with id={id}", e) })); } if (model == null) { return(NotFound()); } return(Ok(model)); }
public async Task <IActionResult> PutTrip([FromRoute] int id, [FromBody] TripModel tripModel) { if (!ModelState.IsValid) { return(BadRequest(ModelState)); } if (id != tripModel.tripId) { return(BadRequest()); } if (!TripExists(id)) { return(NotFound()); } try { var dao = new TripDao(_db); tripModel = await dao.saveTripAsync(tripModel); } catch (Exception e) { if (!TripExists(id)) { return(NotFound()); } return(BadRequest(new { message = ErrorUtils.dbErrorMessage($"Can't save Trip with id={id}", e) })); } return(NoContent()); // Or Ok(tripModel); }
private void Process() { foreach (var batch in m_Batchs) { try { if (ProcessComplete != null) { ProcessComplete(batch, true, null); } } catch (FaultException ex) { ProcessComplete(batch, false, ex); return; } catch (Exception ex) { ProcessComplete(batch, false, ErrorUtils.CreateErrorWithSubMessage(ERR_SYSTEM.ERR_SYSTEM_UNKNOWN, ex.Message)); return; } } if (EndBatch != null) { EndBatch(this, new EventArgs()); } }
private static void ExecuteReadFileExcel(string filename) { try { Console.WriteLine("Start read file :" + filename); using (TransactionScope scope = new TransactionScope()) { var listTransInfo = ReadFileExcel.ReadExcel(filename); string DataLogID = string.Empty; // Ten file phai dinh dang DDMMYYYY DataLogController.FinalReadFile(Constants.FileData_HT, Path.GetFileNameWithoutExtension(filename).Substring(Path.GetFileNameWithoutExtension(filename).Length - 8, 8), Path.GetFileName(filename), out DataLogID); if (!string.IsNullOrEmpty(DataLogID)) { ReadFileExcelController.ReadFileExcel(listTransInfo, DataLogID, Path.GetFileNameWithoutExtension(filename).Substring(Path.GetFileNameWithoutExtension(filename).Length - 8, 8)); scope.Complete(); BackUpFile(filename, "Excels", Constants.READFILE_SUCCESS); Console.WriteLine("End read file Success:" + filename); } } } catch (TransactionAbortedException ex) { BackUpFile(filename, "Excels", Constants.READFILE_FAIL); ErrorUtils.WriteLog("ReadFileExcel : TransactionAbortedException File: " + filename + " MsgErr:" + ex.Message); //throw ex; } catch (Exception ex) { BackUpFile(filename, "Excels", Constants.READFILE_FAIL); Console.WriteLine(filename + "-" + ex.Message); ErrorUtils.WriteLog(filename + "-" + ex.Message); } }
public static void DiscoveryParameters(NpgsqlCommand comm) { try { var cachedKey = comm.CommandText; if (m_CachedNpgParameters.ContainsKey(cachedKey)) { var source = m_CachedNpgParameters[cachedKey]; foreach (var param in source) { comm.Parameters.Add((NpgsqlParameter)((ICloneable)param).Clone()); } } else { NpgsqlCommandBuilder.DeriveParameters(comm); comm.CommandText = cachedKey; var source = new NpgsqlParameter[comm.Parameters.Count]; for (var i = 0; i < comm.Parameters.Count; i++) { source[i] = (NpgsqlParameter)((ICloneable)comm.Parameters[i]).Clone(); } m_CachedNpgParameters.Add(cachedKey, source); } } catch (Exception ex) { throw ErrorUtils.CreateErrorWithSubMessage(ERR_SQL.ERR_SQL_DISCOVERY_PARAMS_FAIL, ex.Message, comm.CommandText); } }
public async Task Handle_GivenInvalidFiles_ShouldFail() { var mockRepo = new Mock <IUpdatePackageRepository>(); mockRepo.Setup(x => x.GetFirstOrDefaultBySpecs(It.IsAny <ISpecification <UpdatePackage>[]>())).ReturnsAsync(new UpdatePackage("1.0.0")); var mockAddFiles = new Mock <IAddUpdatePackageFilesAction>(); mockAddFiles.Setup(x => x.AddFiles(It.IsAny <UpdatePackage>(), It.IsAny <IEnumerable <UpdateFileInfo> >(), It.IsAny <string>())) .Returns(Task.CompletedTask); mockAddFiles.SetupGet(x => x.HasError).Returns(true); mockAddFiles.SetupGet(x => x.Error).Returns(new Error(ErrorType.InvalidOperation.ToString(), "Test error", -1)); var useCase = new PatchUpdatePackageUseCase(mockRepo.Object, mockAddFiles.Object); var updatePackageInfo = new UpdatePackageInfo("1.0.0", "", null, new List <UpdateFileInfo> { new UpdateFileInfo("asd", Hash.Parse("FF")) }, null, null); // act await useCase.Handle(new PatchUpdatePackageRequest(1, "1.0.0", updatePackageInfo, null)); // assert ErrorUtils.AssertError(useCase, ErrorType.InvalidOperation); }
public async Task Handle_GivenFilesWithInvalidSignature_PrivateKeyNotOnServer_ShouldFail() { var mockProjectRepo = new Mock <IProjectRepository>(); mockProjectRepo.Setup(x => x.GetById(It.IsAny <int>())).ReturnsAsync(new Project("test", new AsymmetricKey("PUBLIC KEY"))); var mockFilesRepo = new Mock <IStoredFileRepository>(); mockFilesRepo.Setup(x => x.FindByHash(It.IsAny <Hash>())).ReturnsAsync(new StoredFile("HASH", 100, 80)); var mockAsymCrypto = new Mock <IAsymmetricCryptoHandler>(); mockAsymCrypto.Setup(x => x.VerifyHash(It.IsAny <Hash>(), "SIGNATURE", "PUBLIC KEY")).Returns(false); var action = new AddUpdatePackageFilesAction(mockFilesRepo.Object, mockProjectRepo.Object, mockAsymCrypto.Object, null); var files = new List <UpdateFileInfo> { new UpdateFileInfo("asd", Hash.Parse("FF"), "SIGNATURE") }; var package = new UpdatePackage("1.0.0"); // act await action.AddFiles(package, files, null); // assert ErrorUtils.AssertError(action, ErrorType.ValidationError); }
public async Task Handle_GivenInvalidKeyPassword_PrivateKeyEncryptedOnServer_ShouldFail() { var mockProjectRepo = new Mock <IProjectRepository>(); mockProjectRepo.Setup(x => x.GetById(It.IsAny <int>())).ReturnsAsync(new Project("test", new AsymmetricKey("PUBLIC KEY", "ENCRYPTED KEY", true))); var mockFilesRepo = new Mock <IStoredFileRepository>(); mockFilesRepo.Setup(x => x.FindByHash(It.IsAny <Hash>())).ReturnsAsync(new StoredFile("HASH", 100, 80)); var mockSymmCrypto = new Mock <ISymmetricEncryption>(); mockSymmCrypto.Setup(x => x.DecryptString("ENCRYPTED KEY", "Invalid Password")).Throws(new Exception("Invalid Password")); var action = new AddUpdatePackageFilesAction(mockFilesRepo.Object, mockProjectRepo.Object, null, mockSymmCrypto.Object); var files = new List <UpdateFileInfo> { new UpdateFileInfo("asd", Hash.Parse("FF"), "") }; var package = new UpdatePackage("1.0.0"); // act await action.AddFiles(package, files, "Invalid Password"); // assert ErrorUtils.AssertError(action, ErrorType.ValidationError); }
public List <ModuleInfo> BuildModulesInfo() { try { var moduleInfos = new List <ModuleInfo>(); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<ModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_STATIC_MODULE).ToArray()); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<ModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_BATCH_MODULE).ToArray()); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<StatisticsModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_STATISTICS_MODULE).ToArray()); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<MaintainModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_MAINTAIN_MODULE).ToArray()); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<ChartModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_CHART_MODULE).ToArray()); moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure <SearchModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_SEARCHMASTER_MODULE).ToArray()); moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure <ModESBInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_MODESB_MODULE).ToArray()); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<SwitchModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_SWITCH_MODULE).ToArray()); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<ImportModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_IMPORT_MODULE).ToArray()); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<ExecProcModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_EXECUTEPROC_MODULE).ToArray()); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<AlertModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_ALERT_MODULE).ToArray()); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<ReportModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_REPORT_MODULE).ToArray()); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<ModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_TREE_MODULE).ToArray()); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<ModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_EXP_MODULE).ToArray()); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<MaintainModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_WORKFLOW_MODULE).ToArray()); //moduleInfos.AddRange(SQLHelper.ExecuteStoreProcedure<DashboardInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_DASHBOARD_MODULE).ToArray()); return(moduleInfos); } catch (Exception ex) { throw ErrorUtils.CreateError(ex); } }
/// <summary> /// Write an error message. /// </summary> /// <param name="jsonWriter">The JSON writer to write the error.</param> /// <param name="writeInstanceAnnotationsDelegate">Action to write the instance annotations.</param> /// <param name="error">The error instance to write.</param> /// <param name="includeDebugInformation">A flag indicating whether error details should be written (in debug mode only) or not.</param> /// <param name="maxInnerErrorDepth">The maximum number of nested inner errors to allow.</param> /// <param name="writingJsonLight">true if we're writing JSON lite, false if we're writing verbose JSON.</param> internal static void WriteError( IJsonWriter jsonWriter, Action <ICollection <ODataInstanceAnnotation> > writeInstanceAnnotationsDelegate, ODataError error, bool includeDebugInformation, int maxInnerErrorDepth, bool writingJsonLight) { Debug.Assert(jsonWriter != null, "jsonWriter != null"); Debug.Assert(error != null, "error != null"); string code, message; ErrorUtils.GetErrorDetails(error, out code, out message); ODataInnerError innerError = includeDebugInformation ? error.InnerError : null; WriteError( jsonWriter, code, message, error.Target, error.Details, innerError, error.GetInstanceAnnotations(), writeInstanceAnnotationsDelegate, maxInnerErrorDepth, writingJsonLight); }
public void ForceLoadModule( out List <ModuleInfo> modulesInfo, out List <ModuleFieldInfo> fieldsInfo, out List <ButtonInfo> buttonsInfo, out List <ButtonParamInfo> buttonParamsInfo, out List <LanguageInfo> languageInfo, out List <OracleParam> oracleParamsInfo, string moduleID) { modulesInfo = null; fieldsInfo = null; buttonsInfo = null; buttonParamsInfo = null; languageInfo = null; oracleParamsInfo = null; try { modulesInfo = new List <ModuleInfo>(); modulesInfo.AddRange(SQLHelper.ExecuteStoreProcedure <ModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.GET_STATIC_MODULE, moduleID).ToArray()); modulesInfo.AddRange(SQLHelper.ExecuteStoreProcedure <MaintainModuleInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.GET_MAINTAIN_MODULE, moduleID).ToArray()); fieldsInfo = SQLHelper.ExecuteStoreProcedure <ModuleFieldInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_FIELD_INFO_BY_MODID, moduleID); buttonsInfo = SQLHelper.ExecuteStoreProcedure <ButtonInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_BUTTON_BY_MODID, moduleID); buttonParamsInfo = SQLHelper.ExecuteStoreProcedure <ButtonParamInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_BUTTON_PARAM_BY_MODID, moduleID); languageInfo = SQLHelper.ExecuteStoreProcedure <LanguageInfo>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_LANGUAGE_BY_MODID, moduleID); //SQLHelper SQLHelper = new SQLHelper(); var stores = SQLHelper.ExecuteStoreProcedure <OracleStore>(ConnectionString, SYSTEM_STORE_PROCEDURES.LIST_STOREPROC_BY_MODID, moduleID); } catch (Exception ex) { throw ErrorUtils.CreateError(ex); } }
private static void ExecuteJRN(string filename, string machineCode) { try { Console.WriteLine("Start read file :" + filename); using (TransactionScope scope = new TransactionScope()) { string DataLogID = string.Empty; var listTransInfo = ReadFileATM.ReadFileJRN(filename); DataLogController.FinalReadFile(Constants.FileData_JRN, Path.GetFileNameWithoutExtension(filename).Substring(Path.GetFileNameWithoutExtension(filename).Length - 8, 8), machineCode + Path.GetFileName(filename), out DataLogID); if (!string.IsNullOrEmpty(DataLogID)) { ReadFileATMController.ReadFileJRN(listTransInfo, machineCode, DataLogID); scope.Complete(); Console.WriteLine("End read file Success:" + filename); BackUpFile(filename, machineCode, Constants.READFILE_SUCCESS); } } } catch (TransactionAbortedException ex) { BackUpFile(filename, machineCode, Constants.READFILE_FAIL); ErrorUtils.WriteLog("ReadFileJDATA : TransactionAbortedException File: " + filename + " MsgErr:" + ex.Message); //throw ex; } catch (Exception ex) { BackUpFile(filename, machineCode, Constants.READFILE_FAIL); Console.WriteLine(filename + "-" + ex.Message); ErrorUtils.WriteLog(filename + "-" + ex.Message); } }
public FFmpegInput(FileInfo file, ImmutableList <FFmpegOption> modifiers = null) { ErrorUtils.ThrowIfArgNull(file, nameof(file)); this.File = file; this.Modifiers = modifiers ?? ImmutableList.Create <FFmpegOption>(); }
public virtual void FilterWhiteList(IHttpRequest request, IHttpResponse response) { if (!WhiteListSettings[request.ServicePath].Enabled) { return; } string pathInfo = request.PathInfo; if (!string.IsNullOrWhiteSpace(pathInfo)) { pathInfo = pathInfo.Trim().ToLower(); string[] pathParts = pathInfo.TrimStart('/').Split(new char[] { '/' }, StringSplitOptions.RemoveEmptyEntries); if (pathParts.Length > 0) { string pathController = pathParts[0].Trim(); if (ExcludedPathControllers.Contains(pathController)) { return; } if (ServiceSpecificExcludedPathControllers.ContainsKey(request.ServicePath) && ServiceSpecificExcludedPathControllers[request.ServicePath].Contains(pathController)) { return; } } } if (request.OperationName != null && request.OperationName.Trim().ToLower() == CheckHealthOperationName) { return; } string requestIdentity; if (ValidateRequest(request, out requestIdentity)) { return; } string message = string.Format("{0} Check refused a request. Request Identity: {1}", Name, requestIdentity); ErrorUtils.LogError(message, request, default(Exception), false, "FXD300012"); if (response.ExecutionResult != null) { response.ExecutionResult.ValidationExceptionThrown = true; } response.StatusCode = (int)HttpStatusCode.Forbidden; response.StatusDescription = message; response.AddHeader(ServiceUtils.ResponseStatusHttpHeaderKey, AckCodeType.Failure.ToString()); string traceIdString = request.Headers[ServiceUtils.TRACE_ID_HTTP_HEADER]; if (!string.IsNullOrWhiteSpace(traceIdString)) { response.AddHeader(ServiceUtils.TRACE_ID_HTTP_HEADER, traceIdString); } response.LogRequest(request); response.EndRequest(); }
/// <summary> /// Note: In order for FFMpeg to fade the glow in and out correctly, /// the image must have at least some transparency somewhere (and this might need to /// be within the region that ends up in the resulting video). Otherwise, /// FFMpeg adds in a black background to the fade effects or something. /// </summary> public GlowType(FileInfo file) //, string tagName) { ErrorUtils.ThrowIfArgNull(file, nameof(file)); //ErrorUtils.ThrowIfArgNull(tagName, nameof(tagName)); this.File = file; //this.TagName = tagName; }
private IndependantGlowSeries(ImmutableList <TaggedBeat> beats, double glowStartTime, double glowEndTime) { ErrorUtils.ThrowIfArgNull(beats, nameof(beats)); this._beats = beats; this.GlowEndTime = glowEndTime; this.GlowStartTime = glowStartTime; }
/// <summary> /// Writes the error with fallback logic for XML cases where the writer is in an error state and a new writer must be created. /// </summary> /// <param name="messageWriter">The message writer.</param> /// <param name="encoding">The encoding to use for the error if we have to fallback.</param> /// <param name="responseStream">The response stream to write to in the fallback case.</param> /// <param name="args">The args for the error.</param> /// <param name="error">The error to write.</param> /// <param name="messageWriterBuilder">MessageWriterBuilder to use if a new ODataMessageWriter needs to be constructed.</param> private static void WriteErrorWithFallbackForXml(ODataMessageWriter messageWriter, Encoding encoding, Stream responseStream, HandleExceptionArgs args, ODataError error, MessageWriterBuilder messageWriterBuilder) { Debug.Assert(args != null, "args != null"); #if DEBUG Debug.Assert(args.ProcessExceptionWasCalled, "ProcessException was not called by the time we tried to serialze this error message with ODataLib."); #endif if (messageWriter != null) { try { // If the XmlWriter inside the ODataMessageWriter had entered Error state, ODataMessageWriter.WriteError would throw an InvalidOperationException // when we try to write to it. Note that XmlWriter doesn't always throw an XmlException when it enters Error state. // The right thing to do is we don't write any more because at this point we don't know what's been written to the underlying // stream. However we still should flush the writer to make sure that all the content that was written but is sitting in the buffers actually appears // in the stream before writing the instream error. Otherwise the buffer will be flushed when disposing the writer later and we would end up with // either content written after the instream error (this would also result in having the Xml declaration in the middle of the payload - // [Astoria-ODataLib-Integration] In-stream errors due to XmlExceptions are written out backwards (error before partial valid payload)) or, // hypothetically, the instream error in the middle of the other content that was already partially written. For example we can end up with a payload that // looks like <element attr="val<m:error... The XmlReader would not be able to parse the error payload in this case. Disposing the writer will flush the buffer. // It is fine to do it since the writer is not usable at this point anyways. Also note that the writer will be disposed more than once (e.g. in finally block // in ResponseBodySerializer) but only the first call will have any effect. // However since in the versions we shipped we always create a new XmlWriter to serialize the error payload when the existing // one is in error state, we will continue to do the same to avoid introducing any breaking change here. messageWriter.WriteError(error, args.UseVerboseErrors); } catch (ODataException e) { // Yikes, ODataLib threw while writing the error. This tends to happen if the service author did something invalid during custom // error handling, such as add an custom instance annotation to the error payload. In this dire case, we treat it almost like // an in-stream error, and abort the previous writing. We write out the new error. Note that this will produce an invalid payload like // the situation noted above with XmlWriter errors. WebUtil.Dispose(messageWriter); messageWriterBuilder.SetMessageForErrorInError(); var newErrorWriter = messageWriterBuilder.CreateWriter(); ODataError errorWhileWritingOtherError = new ODataError() { ErrorCode = "500", InnerError = new ODataInnerError(e), Message = Strings.ErrorHandler_ErrorWhileWritingError }; newErrorWriter.WriteError(errorWhileWritingOtherError, args.UseVerboseErrors); } catch (InvalidOperationException) { Debug.Assert(ContentTypeUtil.IsNotJson(args.ResponseContentType), "Should never get here for JSON responses"); WebUtil.Dispose(messageWriter); // if either an InvalidOperationException was encountered (see comment above) or the message writer was null, write the error out manually. Debug.Assert(responseStream != null, "responseStream != null"); using (XmlWriter xmlWriter = XmlWriter.Create(responseStream, XmlUtil.CreateXmlWriterSettings(encoding))) { ErrorUtils.WriteXmlError(xmlWriter, error, args.UseVerboseErrors, MaxInnerErrorDepth); } } } }
public static List <T> ExecuteStoreProcedure <T>(string connectionString, string commandText, params object[] values) where T : class, new() { using (var conn = new SqlConnection(connectionString)) { try { conn.Open(); } catch (Exception ex) { throw ErrorUtils.CreateErrorWithSubMessage( ERR_SQL.ERR_SQL_OPEN_CONNECTION_FAIL, ex.Message, commandText); } using (var comm = new SqlCommand(commandText, conn)) { try { comm.CommandType = CommandType.StoredProcedure; AssignParameters(comm, values); using (var dr = comm.ExecuteReader()) { if ( comm.Parameters.Contains(CONSTANTS.ORACLE_EXCEPTION_PARAMETER_NAME) && comm.Parameters[CONSTANTS.ORACLE_EXCEPTION_PARAMETER_NAME].Value != DBNull.Value ) { var errCode = int.Parse(comm.Parameters[CONSTANTS.ORACLE_EXCEPTION_PARAMETER_NAME].Value.ToString()); if (errCode != 0) { throw ErrorUtils.CreateError(errCode, commandText, values); } } return(dr.ToList <T>()); } } catch (SqlException ex) { throw ThrowSqlUserException(ex, commandText); } catch (FaultException) { throw; } catch (Exception ex) { throw ErrorUtils.CreateErrorWithSubMessage( ERR_SQL.ERR_SQL_EXECUTE_COMMAND_FAIL, ex.Message, commandText); } finally { conn.Close(); } } } }