Beispiel #1
0
        public override async Task DoGet(FlightTicket ticket, FlightServerRecordBatchStreamWriter responseStream, ServerCallContext context)
        {
            var flightDescriptor = FlightDescriptor.CreatePathDescriptor(ticket.Ticket.ToStringUtf8());

            if (_flightStore.Flights.TryGetValue(flightDescriptor, out var flightHolder))
            {
                var batches = flightHolder.GetRecordBatches();


                foreach (var batch in batches)
                {
                    await responseStream.WriteAsync(batch.RecordBatch, batch.Metadata);
                }
            }
        }
        public override async Task DoGet(
            FlightTicket ticket,
            FlightServerRecordBatchStreamWriter responseStream,
            ServerCallContext context
            )
        {
            if (!_flightData.Tables.ContainsKey(ticket))
            {
                throw new RpcException(new Status(StatusCode.NotFound, "Flight not found."));
            }
            var table = _flightData.Tables[ticket];

            foreach (var batch in table)
            {
                await responseStream.WriteAsync(batch);
            }
        }
        public override async Task DoGet(FlightTicket ticket, FlightServerRecordBatchStreamWriter responseStream, ServerCallContext context)
        {
            try
            {
                int maxBatchSize         = DefaultMaxBatchSize;
                var maxBatchSizeMetadata = context.RequestHeaders.Get("max-batch-size");

                if (maxBatchSizeMetadata != null && int.TryParse(maxBatchSizeMetadata.Value, out var parsedMaxBatchSize))
                {
                    maxBatchSize = parsedMaxBatchSize;
                }


                var queryResult = await _koraliumTransportService.Execute(ticket.Ticket.ToStringUtf8(), new Shared.SqlParameters(), context.GetHttpContext());

                //Get the resulting schema
                var schema = GetSchema(queryResult.Columns);

                var encoders = queryResult.Columns.Select(x => EncoderHelper.GetEncoder(x)).ToArray();

                foreach (var encoder in encoders)
                {
                    encoder.NewBatch();
                }

                List <object> list  = new List <object>(20000);
                int           count = 0;

                System.Diagnostics.Stopwatch stopwatch = new System.Diagnostics.Stopwatch();
                stopwatch.Start();

                foreach (var obj in queryResult.Result)
                {
                    count++;
                    for (int i = 0; i < encoders.Length; i++)
                    {
                        encoders[i].Encode(obj);
                    }
                    if (encoders.Select(x => x.Size()).Sum() > maxBatchSize)
                    {
                        await responseStream.WriteAsync(new RecordBatch(schema, encoders.Select(x => x.BuildArray()), count));

                        foreach (var encoder in encoders)
                        {
                            encoder.NewBatch();
                        }
                        count = 0;
                    }
                }
                stopwatch.Stop();

                var batch = new RecordBatch(schema, encoders.Select(x => x.BuildArray()), count);
                await responseStream.WriteAsync(batch);
            }
            catch (SqlErrorException error)
            {
                throw new RpcException(new Status(StatusCode.InvalidArgument, error.Message));
            }
            catch (AuthorizationFailedException authFailed)
            {
                throw new RpcException(new Status(StatusCode.Unauthenticated, authFailed.Message));
            }
            catch (Exception e)
            {
                throw new RpcException(new Status(StatusCode.Internal, "Internal error"));
            }
        }