/// <summary>
        /// All requests are processed through evaluate script, however in the context of this connector, the script is a JSON notation string which contains the metadata required to correctly process the attached data.
        /// </summary>
        public override async Task EvaluateScript(IAsyncStreamReader <global::Qlik.Sse.BundledRows> requestStream, IServerStreamWriter <global::Qlik.Sse.BundledRows> responseStream, ServerCallContext context)
        {
            ScriptRequestHeader scriptHeader;
            CommonRequestHeader commonHeader;

            Qlik2DataRobotMetrics.RequestCounter.Inc();
            int reqHash = requestStream.GetHashCode();

            try
            {
                var header = GetHeader(context.RequestHeaders, "qlik-scriptrequestheader-bin");
                scriptHeader = ScriptRequestHeader.Parser.ParseFrom(header);

                var commonRequestHeader = GetHeader(context.RequestHeaders, "qlik-commonrequestheader-bin");
                commonHeader = CommonRequestHeader.Parser.ParseFrom(commonRequestHeader);

                Logger.Info($"{reqHash} - EvaluateScript called from client ({context.Peer}), hashid ({reqHash})");
                Logger.Debug($"{reqHash} - EvaluateScript header info: AppId ({commonHeader.AppId}), UserId ({commonHeader.UserId}), Cardinality ({commonHeader.Cardinality} rows)");
            }
            catch (Exception e)
            {
                Logger.Error($"EvaluateScript with hashid ({reqHash}) failed: {e.Message}");
                throw new RpcException(new Status(StatusCode.DataLoss, e.Message));
            }

            try
            {
                var stopwatch = new Stopwatch();
                stopwatch.Start();

                var paramnames = $"{reqHash} - EvaluateScript call with hashid({reqHash}) got Param names: ";

                foreach (var param in scriptHeader.Params)
                {
                    paramnames += $" {param.Name}";
                }
                Logger.Trace("{0}", paramnames);

                Logger.Trace(scriptHeader.Script);
                RequestSpecification config = JsonConvert.DeserializeObject <RequestSpecification>(scriptHeader.Script);

                var Params = GetParams(scriptHeader.Params.ToArray());

                string keyname = null;
                if (config.keyfield != null)
                {
                    keyname = config.keyfield;
                }


                ResultDataColumn keyField = new ResultDataColumn();
                var rowdatastream         = await ConvertBundledRowsToCSV(Params, requestStream, context, keyField, keyname, config.timestamp_field, config.timestamp_format);

                Logger.Debug($"{reqHash} - Input Data Size: {rowdatastream.Length}");

                var outData = await SelectFunction(config, rowdatastream, reqHash);

                rowdatastream = null;

                bool shouldCache = config.should_cache;

                bool inc_details   = config.inc_details;
                bool rawExplain    = false;
                bool shouldExplain = false;
                int  max_codes     = 0;

                if (config.explain != null)
                {
                    shouldExplain = true;
                    rawExplain    = config.explain.return_raw;
                    max_codes     = config.explain.max_codes;
                }

                string request_type = config.request_type;

                await GenerateResult(request_type, outData, responseStream, context, reqHash, cacheResultInQlik : shouldCache, keyField : keyField, keyname : keyname, includeDetail : inc_details, shouldExplain : shouldExplain, rawExplain : rawExplain, explain_max : max_codes);

                outData = null;
                stopwatch.Stop();
                Logger.Debug($"{reqHash} - Took {stopwatch.ElapsedMilliseconds} ms, hashid ({reqHash})");
                Qlik2DataRobotMetrics.DurHist.Observe(stopwatch.ElapsedMilliseconds / 1000);
            }
            catch (Exception e)
            {
                Logger.Error($"{reqHash} - ERROR: {e.Message}");
                throw new RpcException(new Status(StatusCode.InvalidArgument, $"{e.Message}"));
            }
            finally
            {
            }

            GC.Collect();
        }
        /// <summary>
        /// Select the functiona based upon the request specification
        /// </summary>
        private async Task <MemoryStream> SelectFunction(RequestSpecification config, MemoryStream rowdatastream, int reqHash)
        {
            Logger.Info($"{reqHash} - Start DataRobot");
            DataRobotRestRequest dr = new DataRobotRestRequest(reqHash);

            string api_token     = Convert.ToString(config.auth_config.api_token);
            string datarobot_key = config.auth_config.datarobot_key;

            string host          = config.auth_config.endpoint;
            string project_id    = config.project_id;
            string model_id      = config.model_id;
            string deployment_id = config.deployment_id;
            string keyField      = config.keyfield;

            MemoryStream result = new MemoryStream();

            switch (config.request_type)
            {
            case "createproject":
                Logger.Info($"{reqHash} - Create Project");
                string project_name = Convert.ToString(config.project_name);

                var zippedstream = await CompressStream(rowdatastream, project_name, reqHash);

                Logger.Info($"{reqHash} - Zipped Data Size: {zippedstream.Length}");

                string endpoint = Convert.ToString(config.auth_config.endpoint);
                if (endpoint.Substring(endpoint.Length - 2) != "/")
                {
                    endpoint = endpoint + "/";
                }

                result = await dr.CreateProjectsAsync(endpoint, api_token, zippedstream, project_name, project_name + ".zip");

                break;

            case "predictapi":
                Logger.Info($"{reqHash} - Predict API");



                int    maxCodes      = 0;
                double thresholdHigh = 0;
                double thresholdLow  = 0;
                bool   explain       = false;

                if (config.explain != null)
                {
                    maxCodes      = config.explain.max_codes;
                    thresholdHigh = config.explain.threshold_high;
                    thresholdLow  = config.explain.threshold_low;
                    explain       = true;
                }

                result = await dr.PredictApiAsync(rowdatastream, api_token, datarobot_key, host, deployment_id : deployment_id, project_id : project_id, model_id : model_id, keyField : keyField, explain : explain, maxCodes : maxCodes, thresholdHigh : thresholdHigh, thresholdLow : thresholdLow);

                break;

            case "timeseries":
                Logger.Info($"{reqHash} - Time Series Prediction API");

                string forecast_point = null;



                if (config.forecast_point != null)
                {
                    forecast_point = Convert.ToString(config.forecast_point);
                    //forecast_point = config.forecast_point.ToString("s");
                }


                result = await dr.TimeSeriesAsync(rowdatastream, api_token, datarobot_key, host, deployment_id : deployment_id, project_id : project_id, model_id : model_id, forecast_point : forecast_point);

                break;

            default:
                break;
            }

            Logger.Info($"{reqHash} - DataRobot Finish");
            return(result);
        }