internal override async Task <bool> NextAsync()
        {
            if (isClosed)
            {
                throw new SnowflakeDbException(SFError.DATA_READER_ALREADY_CLOSED);
            }

            _currentChunkRowIdx++;
            if (_currentChunkRowIdx < _currentChunkRowCount)
            {
                return(true);
            }

            if (_chunkDownloader != null)
            {
                // GetNextChunk could be blocked if download result is not done yet.
                // So put this piece of code in a seperate task
                Logger.Info("Get next chunk from chunk downloader");
                IResultChunk nextChunk = await _chunkDownloader.GetNextChunkAsync();

                if (nextChunk != null)
                {
                    resetChunkInfo(nextChunk);
                    return(true);
                }
                else
                {
                    return(false);
                }
            }

            return(false);
        }
        internal override bool Next()
        {
            if (isClosed)
            {
                throw new SnowflakeDbException(SFError.DATA_READER_ALREADY_CLOSED);
            }

            _currentChunkRowIdx++;
            if (_currentChunkRowIdx < _currentChunkRowCount)
            {
                return(true);
            }

            if (_chunkDownloader != null)
            {
                Logger.Info("Get next chunk from chunk downloader");
                IResultChunk nextChunk = Task.Run(async() => await _chunkDownloader.GetNextChunkAsync()).Result;
                if (nextChunk != null)
                {
                    resetChunkInfo(nextChunk);
                    return(true);
                }
            }
            return(false);
        }
        public void ParseChunk(IResultChunk chunk)
        {
            SFReusableChunk rc = (SFReusableChunk)chunk;

            // parse results row by row
            using (StreamReader sr = new StreamReader(stream))
                using (JsonTextReader jr = new JsonTextReader(sr))
                {
                    while (jr.Read())
                    {
                        switch (jr.TokenType)
                        {
                        case JsonToken.StartArray:
                        case JsonToken.None:
                        case JsonToken.EndArray:
                            break;

                        case JsonToken.Null:
                            rc.AddCell(null);
                            break;

                        case JsonToken.String:
                            rc.AddCell((string)jr.Value);
                            break;

                        default:
                            throw new SnowflakeDbException(SFError.INTERNAL_ERROR, $"Unexpected token type: {jr.TokenType}");
                        }
                    }
                }
        }
 public void ParseChunk(IResultChunk chunk)
 {
     // parse results row by row
     using (StreamReader sr = new StreamReader(stream))
         using (JsonTextReader jr = new JsonTextReader(sr))
         {
             ((SFResultChunk)chunk).rowSet = JsonSerializer.Deserialize <string[, ]>(jr);
         }
 }
 internal void resetChunkInfo(IResultChunk nextChunk)
 {
     Logger.Debug($"Recieved chunk #{nextChunk.GetChunkIndex() + 1} of {_totalChunkCount}");
     if (_currentChunk is SFResultChunk)
     {
         ((SFResultChunk)_currentChunk).rowSet = null;
     }
     _currentChunk         = nextChunk;
     _currentChunkRowIdx   = 0;
     _currentChunkRowCount = _currentChunk.GetRowCount();
 }
Exemple #6
0
        /// <summary>
        ///     Content from s3 in format of
        ///     ["val1", "val2", null, ...],
        ///     ["val3", "val4", null, ...],
        ///     ...
        ///     To parse it as a json, we need to preappend '[' and append ']' to the stream
        /// </summary>
        /// <param name="content"></param>
        /// <param name="resultChunk"></param>
        private void ParseStreamIntoChunk(Stream content, IResultChunk resultChunk)
        {
            Stream openBracket  = new MemoryStream(Encoding.UTF8.GetBytes("["));
            Stream closeBracket = new MemoryStream(Encoding.UTF8.GetBytes("]"));

            Stream concatStream = new ConcatenatedStream(new Stream[3] {
                openBracket, content, closeBracket
            });

            IChunkParser parser = new ReusableChunkParser(concatStream);

            parser.ParseChunk(resultChunk);
        }
Exemple #7
0
        public void ParseChunk(IResultChunk chunk)
        {
            // parse results row by row
            using (StreamReader sr = new StreamReader(stream))
                using (JsonTextReader jr = new JsonTextReader(sr)
                {
                    DateParseHandling = DateParseHandling.None
                })
                {
                    int row = 0;
                    int col = 0;

                    var outputMatrix = new string[chunk.GetRowCount(), ((SFResultChunk)chunk).colCount];

                    while (jr.Read())
                    {
                        switch (jr.TokenType)
                        {
                        case JsonToken.StartArray:
                        case JsonToken.None:
                            break;

                        case JsonToken.EndArray:
                            if (col > 0)
                            {
                                col = 0;
                                row++;
                            }

                            break;

                        case JsonToken.Null:
                            outputMatrix[row, col++] = null;
                            break;

                        case JsonToken.String:
                            outputMatrix[row, col++] = (string)jr.Value;
                            break;

                        default:
                            throw new SnowflakeDbException(SFError.INTERNAL_ERROR, $"Unexpected token type: {jr.TokenType}");
                        }
                    }

                    ((SFResultChunk)chunk).rowSet = outputMatrix;
                }
        }
        public SFResultSet(QueryExecResponseData responseData, SFStatement sfStatement, CancellationToken cancellationToken) : base()
        {
            columnCount           = responseData.rowType.Count;
            _currentChunkRowIdx   = -1;
            _currentChunkRowCount = responseData.rowSet.GetLength(0);

            this.sfStatement = sfStatement;
            updateSessionStatus(responseData);

            if (responseData.chunks != null)
            {
                // counting the first chunk
                _totalChunkCount = responseData.chunks.Count;
                _chunkDownloader = ChunkDownloaderFactory.GetDownloader(responseData, this, cancellationToken);
            }

            _currentChunk       = new SFResultChunk(responseData.rowSet);
            responseData.rowSet = null;

            sfResultSetMetaData = new SFResultSetMetaData(responseData);

            isClosed = false;
        }
        public void ParseChunk(IResultChunk chunk)
        {
            SFReusableChunk rc = (SFReusableChunk)chunk;

            bool inString = false;
            int  c;
            var  input = new FastStreamWrapper(stream);
            var  ms    = new FastMemoryStream();

            while ((c = input.ReadByte()) >= 0)
            {
                if (!inString)
                {
                    // n means null
                    // " quote means begin string
                    // all else are ignored
                    if (c == '"')
                    {
                        inString = true;
                    }
                    else if (c == 'n')
                    {
                        rc.AddCell(null, 0);
                    }
                    // ignore anything else
                }
                else
                {
                    // Inside a string, look for end string
                    // Anything else is saved in the buffer
                    if (c == '"')
                    {
                        rc.AddCell(ms.GetBuffer(), ms.Length);
                        ms.Clear();
                        inString = false;
                    }
                    else if (c == '\\')
                    {
                        // Process next character
                        c = input.ReadByte();
                        switch (c)
                        {
                        case 'n':
                            c = '\n';
                            break;

                        case 'r':
                            c = '\r';
                            break;

                        case 'b':
                            c = '\b';
                            break;

                        case 't':
                            c = '\t';
                            break;

                        case -1:
                            throw new SnowflakeDbException(SFError.INTERNAL_ERROR, $"Unexpected end of stream in escape sequence");
                        }
                        ms.WriteByte((byte)c);
                    }
                    else
                    {
                        ms.WriteByte((byte)c);
                    }
                }
            }
            if (inString)
            {
                throw new SnowflakeDbException(SFError.INTERNAL_ERROR, $"Unexpected end of stream in string");
            }
        }
        /// <summary>
        ///     Content from s3 in format of
        ///     ["val1", "val2", null, ...],
        ///     ["val3", "val4", null, ...],
        ///     ...
        ///     To parse it as a json, we need to preappend '[' and append ']' to the stream
        /// </summary>
        /// <param name="content"></param>
        /// <param name="resultChunk"></param>
        private void ParseStreamIntoChunk(Stream content, IResultChunk resultChunk)
        {
            IChunkParser parser = new ReusableChunkParser(content);

            parser.ParseChunk(resultChunk);
        }