public PqlCompilerState( IExpressionEvaluatorRuntime parentRuntime, ParsedRequest parsedRequest, DataRequestParams requestParams, DataContainerDescriptor containerDescriptor, Type contextType, Type returnType) : base(parentRuntime, contextType, returnType, null) { if (parsedRequest == null) { throw new ArgumentNullException("parsedRequest"); } if (containerDescriptor == null) { throw new ArgumentNullException("containerDescriptor"); } if (requestParams == null) { throw new ArgumentNullException("requestParams"); } ParsedRequest = parsedRequest; RequestParameters = requestParams; ContainerDescriptor = containerDescriptor; FieldRefs = new Dictionary<int, Tuple<ParameterExpression, Expression>>(); ParamRefs = new Dictionary<int, Tuple<ParameterExpression, Expression>>(); }
public void AttachContainerDescriptor(DataContainerDescriptor containerDescriptor) { if (containerDescriptor == null) { throw new ArgumentNullException("containerDescriptor"); } ContainerDescriptor = containerDescriptor; }
public QueryPreprocessor(DataContainerDescriptor containerDescriptor) { if (containerDescriptor == null) { throw new ArgumentNullException("containerDescriptor"); } m_containerDescriptor = containerDescriptor; m_identifierAliasParts = new ConcurrentDictionary <string, ParseTreeNode>(StringComparer.OrdinalIgnoreCase); }
private static void CompileInsertUpdateClauses( IStorageDriver storageDriver, DataContainerDescriptor containerDescriptor, RequestExecutionContextCacheInfo cacheInfo, DriverChangeType changeType) { var updates = cacheInfo.ParsedRequest.Modify.UpdateAssignments; var clauses = cacheInfo.ParsedRequest.Modify.InsertUpdateSetClauses; var fields = cacheInfo.ParsedRequest.Modify.ModifiedFields; if (clauses.Count != fields.Count) { throw new Exception(string.Format("Internal error: insert/update clauses count ({0}) does not match count of modified fields ({1})", clauses.Count, fields.Count)); } // compile field assignment clauses (SET clauses or value inserts) for (var ordinal = 0; ordinal < clauses.Count; ordinal++) { var clause = clauses[ordinal]; var field = fields[ordinal]; // for bulk requests, primary key is there but it is only used to lookup the record // for non-bulk requests, primary key should not be in the list of UPDATE clauses if (changeType == DriverChangeType.Update && !cacheInfo.ParsedRequest.IsBulk) { if (!storageDriver.CanUpdateField(field.FieldId)) { throw new Exception(string.Format("Cannot update field {0}/{1} on entity {2}", field.FieldId, field.Name, cacheInfo.ParsedRequest.TargetEntity.Name)); } } // prepare Action compilation context var compilerState = QueryParser.PrepareCompilerState(containerDescriptor, cacheInfo, null); compilerState.CompileToAction = true; // extractor has signature like Func<ClauseEvaluationContext, T> var extractor = clause == null ? QueryParser.CompileFieldValueExtractorClause(compilerState, field, containerDescriptor, cacheInfo, MakeNullableType(field.DbType)) : QueryParser.CompileClause(compilerState, clause, containerDescriptor, cacheInfo, MakeNullableType(field.DbType)); // get the value into local variable, to prevent multiple invokations when row writer checks for null var extractedValue = Expression.Variable(extractor.Type); // now take the extractor and create another method, that will take the value and then put it into the changebuffer's data var changeBufferData = Expression.Field(Expression.Field(compilerState.Context, "ChangeBuffer"), "Data"); var blockBody = Expression.Block( new[] {extractedValue}, Expression.Assign(extractedValue, extractor), DriverRowData.CreateWriteAccessor(extractedValue, changeBufferData, field.DbType, ordinal)); updates.Add( new ParsedRequest.FieldAssignment { Field = field, CompiledExpression = (Action<ClauseEvaluationContext>)QueryParser.CompileClause(blockBody, compilerState) }); } }
public DataContainerDescriptor Commit() { if (m_descriptor == null) { throw new InvalidOperationException("Cannot invoke Commit more than once"); } var result = m_descriptor; m_descriptor = null; return(result); }
public DocumentDataContainer( DataContainerDescriptor dataContainerDescriptor, DocumentTypeDescriptor documentTypeDescriptor, IUnmanagedAllocator allocator, ITracer tracer) { if (tracer == null) { throw new ArgumentNullException("tracer"); } if (dataContainerDescriptor == null) { throw new ArgumentNullException("dataContainerDescriptor"); } if (documentTypeDescriptor == null) { throw new ArgumentNullException("documentTypeDescriptor"); } if (allocator == null) { throw new ArgumentNullException("allocator"); } m_logger = tracer; m_allocator = allocator; DocDesc = documentTypeDescriptor; DataContainerDescriptor = dataContainerDescriptor; ColumnStores = new ColumnDataBase[DocDesc.Fields.Length]; DocumentKeys = new ExpandableArrayOfKeys(m_allocator); FieldIdToColumnStore = new Dictionary<int, int>(ColumnStores.Length * 2); PrimaryKeyFieldId = dataContainerDescriptor.RequireField(documentTypeDescriptor.DocumentType, documentTypeDescriptor.PrimaryKeyFieldName).FieldId; for (var i = 0; i < DocDesc.Fields.Length; i++) { var field = dataContainerDescriptor.RequireField(DocDesc.Fields[i]); ColumnStores[i] = CreateColumnStore(field.DbType, m_allocator, null); FieldIdToColumnStore.Add(field.FieldId, i); } DocumentIdToIndex = new ConcurrentHashmapOfKeys(m_allocator); ValidDocumentsBitmap = new BitVector(m_allocator); SortIndexManager = new SortIndexManager(this); StructureLock = new ReaderWriterLockSlim(LockRecursionPolicy.SupportsRecursion); }
private static void CompileSelectClauses(DataContainerDescriptor containerDescriptor, RequestExecutionContextCacheInfo cacheInfo) { var parsedRequest = cacheInfo.ParsedRequest; if (parsedRequest.IsBulk) { foreach (var field in parsedRequest.Select.SelectFields) { var exprType = MakeNullableType(field.DbType); parsedRequest.Select.OutputColumns.Add( new ParsedRequest.SelectOutputColumn { Label = field.Name, CompiledExpression = QueryParser.CompileFieldValueExtractorClause(field, containerDescriptor, cacheInfo, exprType), DbType = field.DbType, IsNullable = exprType.IsNullableType() }); } } else { foreach (var clause in parsedRequest.Select.SelectClauses) { // under column item, there is a "columnSource" element (with a single child), and optional "Id" element for alias var columnExpressionNode = clause.RequireChild("columnSource", 0).RequireChild(null, 0); var compiled = QueryParser.CompileClause(columnExpressionNode, containerDescriptor, cacheInfo, null); var returnType = compiled.GetType().GetGenericArguments()[1]; var dbType = DriverRowData.DeriveDataType(returnType.GetUnderlyingType()); // get alias var aliasNode = clause.TryGetChild("Id", 1) ?? columnExpressionNode; var span = aliasNode.Span; var label = span.Length > 0 ? cacheInfo.CommandText.Substring(span.Location.Position, span.Length) : aliasNode.FindTokenAndGetText(); parsedRequest.Select.OutputColumns.Add( new ParsedRequest.SelectOutputColumn { Label = label, CompiledExpression = compiled, DbType = dbType, IsNullable = returnType.IsNullableType() }); } } }
public static DataContainerDescriptor BuildContainerDescriptor() { var result = new DataContainerDescriptor(); result.AddDocumentTypeName("testDoc"); var testDocId = result.RequireDocumentTypeName("testDoc"); var fieldId = 1; for (var i = 0; i < 2; i++) { result.AddField(new FieldMetadata(fieldId, "FieldByte" + fieldId, "Byte" + fieldId, DbType.Byte, testDocId)); fieldId++; result.AddField(new FieldMetadata(fieldId, "FieldGuid" + fieldId, "Guid" + fieldId, DbType.Guid, testDocId)); fieldId++; result.AddField(new FieldMetadata(fieldId, "FieldString" + fieldId, "String" + fieldId, DbType.String, testDocId)); fieldId++; result.AddField(new FieldMetadata(fieldId, "FieldBinary" + fieldId, "Binary" + fieldId, DbType.Binary, testDocId)); fieldId++; result.AddField(new FieldMetadata(fieldId, "FieldDecimal" + fieldId, "Decimal" + fieldId, DbType.Decimal, testDocId)); fieldId++; result.AddField(new FieldMetadata(fieldId, "FieldBool" + fieldId, "Bool" + fieldId, DbType.Boolean, testDocId)); fieldId++; result.AddField(new FieldMetadata(fieldId, "FieldDate" + fieldId, "Date" + fieldId, DbType.DateTime, testDocId)); fieldId++; result.AddField(new FieldMetadata(fieldId, "FieldTime" + fieldId, "Time" + fieldId, DbType.Time, testDocId)); fieldId++; result.AddField(new FieldMetadata(fieldId, "FieldDateTimeOffset" + fieldId, "DateTimeOffset" + fieldId, DbType.DateTimeOffset, testDocId)); fieldId++; } result.AddField(new FieldMetadata(fieldId + 1, "id", "primary key", DbType.Int64, testDocId)); var fieldIds = result.EnumerateFields().Select(x => x.FieldId).ToArray(); result.AddDocumentTypeDescriptor(new DocumentTypeDescriptor("testDoc", "testDoc", 1, "id", fieldIds)); result.AddIdentifierAlias("testDoc", new List<string> { "id", "ALIAS1" }, new[] { "id" }); return result; }
public static void WriteDescriptorToStore(DataContainerDescriptor descriptor, string storageRoot) { if (string.IsNullOrEmpty(storageRoot)) { throw new ArgumentNullException(storageRoot); } if (descriptor == null) { throw new ArgumentNullException("descriptor"); } Directory.CreateDirectory(storageRoot); var file = new DataContainerDescriptorFile(RamDriverFactory.CurrentStoreVersion(), descriptor); using (var writer = new StreamWriter( new FileStream(Path.Combine(storageRoot, "descriptor.json"), FileMode.Create, FileAccess.ReadWrite))) { var serializer = new JsonSerializer(); serializer.Serialize(writer, file); writer.Flush(); } }
public static object CompileClause(ParseTreeNode parseTreeNode, DataContainerDescriptor containerDescriptor, RequestExecutionContextCacheInfo cacheInfo, Type returnType) { var compilerState = PrepareCompilerState(containerDescriptor, cacheInfo, returnType); var exprBody = CompileClause(compilerState, parseTreeNode, containerDescriptor, cacheInfo, returnType); return s_expressionRuntime.Compile(exprBody, compilerState); }
public SchemaBuilder() { m_descriptor = new DataContainerDescriptor(); }
public static Expression CompileClause(PqlCompilerState compilerState, ParseTreeNode parseTreeNode, DataContainerDescriptor containerDescriptor, RequestExecutionContextCacheInfo cacheInfo, Type returnType) { var exprBody = s_expressionRuntime.Analyze(parseTreeNode, compilerState); // now add our field references to the expression if (compilerState.FieldRefs.Count > 0 || compilerState.ParamRefs.Count > 0) { // variable declarations var localVariables = new ParameterExpression[compilerState.FieldRefs.Count + compilerState.ParamRefs.Count]; var exprList = new Expression[1 + localVariables.Length]; var ix = 0; foreach (var pair in compilerState.FieldRefs) { localVariables[ix] = pair.Value.Item1; exprList[ix] = Expression.Assign(pair.Value.Item1, pair.Value.Item2); ix++; } foreach (var pair in compilerState.ParamRefs) { localVariables[ix] = pair.Value.Item1; exprList[ix] = Expression.Assign(pair.Value.Item1, pair.Value.Item2); ix++; } // and the expression code itself exprList[ix] = exprBody; // ready to go exprBody = Expression.Block(localVariables, exprList); } return s_expressionRuntime.AdjustReturnType(exprBody, returnType); }
public static object CompileFieldValueExtractorClause(FieldMetadata field, DataContainerDescriptor containerDescriptor, RequestExecutionContextCacheInfo cacheInfo, Type returnType) { var compilerState = PrepareCompilerState(containerDescriptor, cacheInfo, returnType); var fieldAccessor = CompileFieldValueExtractorClause(compilerState, field, containerDescriptor, cacheInfo, returnType); return s_expressionRuntime.Compile(fieldAccessor, compilerState); }
public static Expression CompileFieldValueExtractorClause(PqlCompilerState compilerState, FieldMetadata field, DataContainerDescriptor containerDescriptor, RequestExecutionContextCacheInfo cacheInfo, Type returnType) { var ordinal = GetFieldOrdinalInDriverFetchFields(cacheInfo.ParsedRequest, field); var rowData = Expression.Field(compilerState.Context, "InputRow"); return s_expressionRuntime.AdjustReturnType(DriverRowData.CreateReadAccessor(rowData, field.DbType, ordinal), returnType); }
private void CompileClauses(DataContainerDescriptor containerDescriptor, RequestExecutionContextCacheInfo cacheInfo) { if (cacheInfo.ParsedRequest.SpecialCommand.IsSpecialCommand) { return; } if (cacheInfo.ParsedRequest.BaseDataset.WhereClauseRoot != null) { cacheInfo.ParsedRequest.BaseDataset.WhereClauseProcessor = (Func<ClauseEvaluationContext, bool>) QueryParser.CompileWhereClause( containerDescriptor, cacheInfo.ParsedRequest.BaseDataset.WhereClauseRoot, cacheInfo); } switch (cacheInfo.ParsedRequest.StatementType) { case StatementType.Select: CompileSelectClauses(containerDescriptor, cacheInfo); break; case StatementType.Update: CompileInsertUpdateClauses(m_storageDriver, containerDescriptor, cacheInfo, DriverChangeType.Update); break; case StatementType.Insert: CompileInsertUpdateClauses(m_storageDriver, containerDescriptor, cacheInfo, DriverChangeType.Insert); break; case StatementType.Delete: break; default: throw new Exception("Invalid statement type: " + cacheInfo.ParsedRequest.StatementType); } }
public static object CompileWhereClause(DataContainerDescriptor containerDescriptor, ParseTreeNode parseTreeNode, RequestExecutionContextCacheInfo cacheInfo) { return CompileClause(parseTreeNode, containerDescriptor, cacheInfo, typeof (bool)); }
public DataContainerDescriptor Commit() { if (m_descriptor == null) { throw new InvalidOperationException("Cannot invoke Commit more than once"); } var result = m_descriptor; m_descriptor = null; return result; }
/// <summary> /// Ctr. /// </summary> public RamDriverSettings(RamDriverSettings settings) { if (settings != null) { InitializationCommand = settings.InitializationCommand; StorageRoot = settings.StorageRoot; Descriptor = settings.Descriptor; } }
private static DataContainerDescriptor BuildDemoContainerDescriptor() { var result = new DataContainerDescriptor(); result.AddDocumentTypeName("testDoc"); var testDocId = result.RequireDocumentTypeName("testDoc"); var count = 12; for (var fieldId = 1; fieldId <= count; ) { result.AddField(new FieldMetadata(fieldId, "Field" + fieldId, "Byte" + fieldId, DbType.Byte, testDocId)); fieldId++; result.AddField(new FieldMetadata(fieldId, "Field" + fieldId, "Guid" + fieldId, DbType.Guid, testDocId)); fieldId++; result.AddField(new FieldMetadata(fieldId, "Field" + fieldId, "String" + fieldId, DbType.String, testDocId)); fieldId++; result.AddField(new FieldMetadata(fieldId, "Field" + fieldId, "Binary" + fieldId, DbType.Binary, testDocId)); fieldId++; result.AddField(new FieldMetadata(fieldId, "Field" + fieldId, "Decimal" + fieldId, DbType.Decimal, testDocId)); fieldId++; result.AddField(new FieldMetadata(fieldId, "Field" + fieldId, "Bool" + fieldId, DbType.Boolean, testDocId)); fieldId++; } result.AddField(new FieldMetadata(count + 1, "id", "primary key", DbType.Int64, testDocId)); var fieldIds = result.EnumerateFields().Select(x => x.FieldId).ToArray(); result.AddDocumentTypeDescriptor(new DocumentTypeDescriptor("testDoc", "testDoc", 1, "id", fieldIds)); return result; }
/// <summary> /// Ctr. /// </summary> public QueryParser(DataContainerDescriptor containerDescriptor, int maxConcurrency) { if (containerDescriptor == null) { throw new ArgumentNullException("containerDescriptor"); } m_parsers = new ObjectPool<Irony.Parsing.Parser>(maxConcurrency, null); for (var i = 0; i < maxConcurrency; i++) { m_parsers.Return(new Irony.Parsing.Parser(LangData, PqlNonTerminal)); } m_containerDescriptor = containerDescriptor; m_preprocessor = new QueryPreprocessor(containerDescriptor); // these predefined instances of ParseTreeNode are substituted when parsing "select * from .." statement, // in order to avoid allocating them every time m_simpleFieldAccessorNodes = new Dictionary<int, ParseTreeNode>(); foreach (var field in m_containerDescriptor.EnumerateFields()) { // generate columnItem -> columnSource -> Id -> id_simple hierarchy, exactly same structure as it comes out of grammar-based parser var idNode = new ParseTreeNode(new NonTerminal("Id"), new SourceSpan()); idNode.ChildNodes.Add(new ParseTreeNode(new Token(new Terminal("id_simple"), new SourceLocation(), field.Name, field.Name))); var columnSourceNode = new ParseTreeNode(new NonTerminal("columnSource"), new SourceSpan()); columnSourceNode.ChildNodes.Add(idNode); var columnItemNode = new ParseTreeNode(new NonTerminal("columnItem"), new SourceSpan()); columnItemNode.ChildNodes.Add(columnSourceNode); m_simpleFieldAccessorNodes.Add(field.FieldId, columnItemNode); } }
private static FieldMetadata TryGetFieldByIdentifierNode(ParseTreeNode node, DataContainerDescriptor containerDescriptor, int docType) { // we support simple and dot-separated identifiers // for a dot-separated identifier, field name is the part before first dot, and it MUST be an object type field if (node.ChildNodes.Count >= 1) { var idsimple = node.ChildNodes[0]; if (0 == StringComparer.Ordinal.Compare(idsimple.Term.Name, "id_simple")) { var fieldName = idsimple.Token.ValueString; var field = containerDescriptor.TryGetField(docType, fieldName); if (field != null) { if (node.ChildNodes.Count > 1) { throw new CompilationException("Dotted notation is supported, but not allowed in Pql server context"); //if (field.DbType != DbType.Object) //{ // throw new CompilationException("Dotted notation is only allowed on object-type fields", node); //} } } return field; } } return null; }
private static TreeIteratorContext GetTreeIteratorContext(ParsedRequest parsedRequest, DataContainerDescriptor containerDescriptor) { var result = new TreeIteratorContext(); result.ContainerDescriptor = containerDescriptor; result.ParsedRequest = parsedRequest; return result; }
public static PqlCompilerState PrepareCompilerState(DataContainerDescriptor containerDescriptor, RequestExecutionContextCacheInfo cacheInfo, Type returnType) { return new PqlCompilerState( s_expressionRuntime, cacheInfo.ParsedRequest, cacheInfo.RequestParams, containerDescriptor, typeof(ClauseEvaluationContext), returnType); }
private void CheckInitialized() { if (m_initialized) { return; } lock (m_thisLock) { if (m_initialized) { return; } if (0 == StringComparer.OrdinalIgnoreCase.Compare("demo", m_settings.InitializationCommand)) { m_descriptor = BuildDemoContainerDescriptor(); } else if (m_settings.Descriptor != null) { m_descriptor = m_settings.Descriptor; } else { m_descriptor = GetDescriptorFromStore(); } if (m_descriptor != null) { m_dataContainer = new DataContainer(m_tracer, m_descriptor, m_settings.StorageRoot); m_initialized = true; } } }
public DataEngine(ITracer tracer, string instanceName, int maxConcurrency, IStorageDriver storageDriver, DataContainerDescriptor containerDescriptor) { if (tracer == null) { throw new ArgumentNullException("tracer"); } if (maxConcurrency <= 0 || maxConcurrency > 10000) { throw new ArgumentOutOfRangeException("maxConcurrency"); } if (storageDriver == null) { throw new ArgumentNullException("storageDriver"); } if (containerDescriptor == null) { throw new ArgumentNullException("containerDescriptor"); } m_tracer = tracer; m_containerDescriptor = containerDescriptor; m_maxConcurrency = maxConcurrency; m_parsedRequestCache = new ParsedRequestCache(instanceName); m_storageDriver = storageDriver; m_parser = new QueryParser(containerDescriptor, maxConcurrency); m_activeProcessors = new ConcurrentDictionary<RequestExecutionContext, Task>(m_maxConcurrency, m_maxConcurrency); m_utcLastUsedAt = DateTime.UtcNow; }
public DataContainer(ITracer tracer, DataContainerDescriptor dataContainerDescriptor, string storageRoot) { if (tracer == null) { throw new ArgumentNullException("tracer"); } if (dataContainerDescriptor == null) { throw new ArgumentNullException("dataContainerDescriptor"); } // intentionally allowed to be null - in case if we don't want to use any persistence m_storageRoot = storageRoot; m_tracer = tracer; m_memoryPool = new DynamicMemoryPool(); m_dataContainerDescriptor = dataContainerDescriptor; m_documentDataContainers = new Dictionary<int, DocumentDataContainer>(50); m_documentDataContainerLocks = new Dictionary<int, object>(50); foreach (var item in dataContainerDescriptor.EnumerateDocumentTypes()) { m_documentDataContainerLocks.Add(item.DocumentType, new object()); } }