/// <summary> /// Ctr. /// </summary> public DriverChangeBuffer(int targetEntity, int ordinalOfPrimaryKey, FieldMetadata[] fields) { Fields = fields; TargetEntity = targetEntity; Data = new DriverRowData(fields.Select(x => x.DbType).ToArray()); OrdinalOfPrimaryKey = ordinalOfPrimaryKey; }
public void AddField(FieldMetadata field) { if (field == null) { throw new ArgumentNullException("field"); } if (Fields.ContainsKey(field.FieldId)) { throw new ArgumentException("Duplicate field id: " + field.FieldId, "field"); } var docType = field.OwnerDocumentType; Dictionary<string, int> docFields; if (!DocTypeFieldNameToFieldId.TryGetValue(field.OwnerDocumentType, out docFields)) { docFields = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase); DocTypeFieldNameToFieldId.Add(field.OwnerDocumentType, docFields); } if (docFields.ContainsKey(field.Name)) { throw new ArgumentException( string.Format("Entity {0} already has another field with name {1}", docType, field.Name)); } Fields.Add(field.FieldId, field); docFields.Add(field.Name, field.FieldId); }
public DocumentTypeDescriptor AddDocumentTypeDescriptorWithPrimaryKey(string docTypeName, string baseDatasetName, string primaryKeyFieldName, params object[] data) { if (string.IsNullOrEmpty(docTypeName)) { throw new ArgumentNullException(docTypeName); } if (data == null) { throw new ArgumentNullException("data"); } if (data.Length == 0 || 0 != data.Length % 2) { throw new ArgumentException("Invalid data array length: " + data.Length, "data"); } if (m_fieldsMap == null) { throw new InvalidOperationException("Cannot invoke AddDocumentTypeDescriptor before BeginDefineDocumentTypes"); } if (m_descriptor == null) { throw new InvalidOperationException("Cannot invoke AddDocumentTypeDescriptor after Commit was called"); } var docType = m_descriptor.RequireDocumentTypeName(docTypeName); var fields = new FieldMetadata[data.Length / 2]; for (var i = 0; i < fields.Length; i++) { fields[i] = new FieldMetadata(++m_lastFieldId, (string)data[i * 2], (string)data[i * 2], (DbType)data[i * 2 + 1], docType); } var result = new DocumentTypeDescriptor(docTypeName, baseDatasetName ?? docTypeName, docType, primaryKeyFieldName, fields.Select(x => x.FieldId).ToArray()); m_descriptor.AddDocumentTypeDescriptor(result); foreach (var field in fields) { m_descriptor.AddField(field); } return(result); }
public DocumentTypeDescriptor AddDocumentTypeDescriptorWithPrimaryKey(string docTypeName, string baseDatasetName, string primaryKeyFieldName, params object[] data) { if (string.IsNullOrEmpty(docTypeName)) { throw new ArgumentNullException(docTypeName); } if (data == null) { throw new ArgumentNullException("data"); } if (data.Length == 0 || 0 != data.Length % 2) { throw new ArgumentException("Invalid data array length: " + data.Length, "data"); } if (m_fieldsMap == null) { throw new InvalidOperationException("Cannot invoke AddDocumentTypeDescriptor before BeginDefineDocumentTypes"); } if (m_descriptor == null) { throw new InvalidOperationException("Cannot invoke AddDocumentTypeDescriptor after Commit was called"); } var docType = m_descriptor.RequireDocumentTypeName(docTypeName); var fields = new FieldMetadata[data.Length / 2]; for (var i = 0; i < fields.Length; i++) { fields[i] = new FieldMetadata(++m_lastFieldId, (string)data[i*2], (string)data[i*2], (DbType)data[i*2+1], docType); } var result = new DocumentTypeDescriptor(docTypeName, baseDatasetName ?? docTypeName, docType, primaryKeyFieldName, fields.Select(x => x.FieldId).ToArray()); m_descriptor.AddDocumentTypeDescriptor(result); foreach (var field in fields) { m_descriptor.AddField(field); } return result; }
public void Clear() { IsBulk = false; HaveParametersDataInput = false; OrdinalOfPrimaryKey = -1; StatementType = (StatementType)(-1); TargetEntity = null; TargetEntityPkField = null; if (Bulk != null) { Bulk.Detach(); } Select.SelectClauses.Clear(); Select.SelectFields.Clear(); Select.OutputColumns.Clear(); Modify.InsertUpdateSetClauses.Clear(); Modify.ModifiedFields.Clear(); Modify.UpdateAssignments.Clear(); BaseDataset.BaseFieldsMainCount = 0; BaseDataset.BaseFields.Clear(); BaseDataset.WhereClauseFields.Clear(); BaseDataset.WhereClauseRoot = null; BaseDataset.WhereClauseProcessor = null; BaseDataset.OrderClauseFields.Clear(); BaseDataset.OrderClause = null; BaseDataset.Paging.Offset = PagingOptions.DefaultPagingOffsetFunc; BaseDataset.Paging.PageSize = PagingOptions.DefaultPagingPageSizeFunc; BulkInput.BulkInputFields.Clear(); Params.Names = null; Params.InputValues = null; Params.InputCollections = null; Params.OrdinalToLocalOrdinal = null; Params.DataTypes = null; SpecialCommand.IsSpecialCommand = false; SpecialCommand.CommandType = SpecialCommandData.SpecialCommandType.InvalidValue; }
public void Clear() { IsBulk = false; HaveParametersDataInput = false; OrdinalOfPrimaryKey = -1; StatementType = (StatementType)(-1); TargetEntity = null; TargetEntityPkField = null; if (Bulk != null) { Bulk.Detach(); } Select.SelectClauses.Clear(); Select.SelectFields.Clear(); Select.OutputColumns.Clear(); Modify.InsertUpdateSetClauses.Clear(); Modify.ModifiedFields.Clear(); Modify.UpdateAssignments.Clear(); BaseDataset.BaseFieldsMainCount = 0; BaseDataset.BaseFields.Clear(); BaseDataset.WhereClauseFields.Clear(); BaseDataset.WhereClauseRoot = null; BaseDataset.WhereClauseProcessor = null; BaseDataset.OrderClauseFields.Clear(); BaseDataset.OrderClause = null; BaseDataset.Paging.Offset = PagingOptions.DefaultPagingOffsetFunc; BaseDataset.Paging.PageSize = PagingOptions.DefaultPagingPageSizeFunc; BulkInput.BulkInputFields.Clear(); Params.Names = null; Params.InputValues = null; Params.InputCollections = null; Params.OrdinalToLocalOrdinal = null; Params.DataTypes = null; SpecialCommand.IsSpecialCommand = false; SpecialCommand.CommandType = SpecialCommandData.SpecialCommandType.InvalidValue; }
private string GetColumnNotNullsFileName(FieldMetadata field) { return string.Format("{0}-{1}-{2}.fnn", field.Name, field.FieldId, field.DbType); }
private static Expression GetOrAddFieldRefToCompilationContext(ParsedRequest parsedRequest, PqlCompilerState compilerState, FieldMetadata field) { Tuple<ParameterExpression, Expression> refTuple; if (compilerState.FieldRefs.TryGetValue(field.FieldId, out refTuple)) { return refTuple.Item1; } var ordinal = GetFieldOrdinalInDriverFetchFields(parsedRequest, field); var rowData = Expression.Field(compilerState.Context, "InputRow"); var fieldAccessor = DriverRowData.CreateReadAccessor(rowData, field.DbType, ordinal); var fieldRef = Expression.Variable(fieldAccessor.Type); compilerState.FieldRefs.Add( field.FieldId, new Tuple<ParameterExpression, Expression>(fieldRef, fieldAccessor)); return fieldRef; }
private static int GetFieldOrdinalInDriverFetchFields(ParsedRequest parsedRequest, FieldMetadata field) { // get ordinal of this field in the dataset returned by storage driver var ordinal = parsedRequest.BaseDataset.BaseFields.IndexOf(field); if (ordinal < 0) { throw new Exception( string.Format( "Internal error: driver fetch fields does have have field {0}, id {1} of entity {2}" , field.Name, field.FieldId, parsedRequest.TargetEntity.Name)); } return ordinal; }
public static Expression CompileFieldValueExtractorClause(PqlCompilerState compilerState, FieldMetadata field, DataContainerDescriptor containerDescriptor, RequestExecutionContextCacheInfo cacheInfo, Type returnType) { var ordinal = GetFieldOrdinalInDriverFetchFields(cacheInfo.ParsedRequest, field); var rowData = Expression.Field(compilerState.Context, "InputRow"); return s_expressionRuntime.AdjustReturnType(DriverRowData.CreateReadAccessor(rowData, field.DbType, ordinal), returnType); }
public static object CompileFieldValueExtractorClause(FieldMetadata field, DataContainerDescriptor containerDescriptor, RequestExecutionContextCacheInfo cacheInfo, Type returnType) { var compilerState = PrepareCompilerState(containerDescriptor, cacheInfo, returnType); var fieldAccessor = CompileFieldValueExtractorClause(compilerState, field, containerDescriptor, cacheInfo, returnType); return s_expressionRuntime.Compile(fieldAccessor, compilerState); }