private void ParseBulkRequest(DataRequestBulk requestBulk, ParsedRequest parsedRequest) { switch (requestBulk.DbStatementType) { case StatementType.Insert: case StatementType.Update: parsedRequest.StatementType = requestBulk.DbStatementType; break; default: throw new ArgumentOutOfRangeException("requestBulk", requestBulk.DbStatementType, "Invalid bulk statement type"); } parsedRequest.TargetEntity = m_containerDescriptor.RequireDocumentType( m_containerDescriptor.RequireDocumentTypeName(requestBulk.EntityName)); if (string.IsNullOrEmpty(parsedRequest.TargetEntity.PrimaryKeyFieldName)) { throw new Exception("Target entity does not have a primary key, cannot perform bulk operations on it"); } parsedRequest.TargetEntityPkField = m_containerDescriptor.RequireField( parsedRequest.TargetEntity.DocumentType, parsedRequest.TargetEntity.PrimaryKeyFieldName); // we always expect value of primary key into driver row data for bulk requests at first position parsedRequest.OrdinalOfPrimaryKey = 0; if (0 != StringComparer.OrdinalIgnoreCase.Compare(requestBulk.FieldNames[0], parsedRequest.TargetEntityPkField.Name)) { throw new Exception("First field in bulk request input schema on this entity must be the primary key field"); } for (var ordinal = 0; ordinal < requestBulk.FieldNames.Length; ordinal++) { var fieldName = requestBulk.FieldNames[ordinal]; var field = m_containerDescriptor.RequireField(parsedRequest.TargetEntity.DocumentType, fieldName); if (ordinal != 0 && ReferenceEquals(parsedRequest.TargetEntityPkField, field)) { throw new Exception("Primary key field may only be used in first position"); } if (parsedRequest.Modify.ModifiedFields.Contains(field)) { throw new CompilationException("A field can be assigned only once: " + field.Name, null); } parsedRequest.BulkInput.BulkInputFields.Add(field); parsedRequest.Modify.ModifiedFields.Add(field); parsedRequest.Modify.InsertUpdateSetClauses.Add(null); } }
public DocumentDataContainer( DataContainerDescriptor dataContainerDescriptor, DocumentTypeDescriptor documentTypeDescriptor, IUnmanagedAllocator allocator, ITracer tracer) { if (tracer == null) { throw new ArgumentNullException("tracer"); } if (dataContainerDescriptor == null) { throw new ArgumentNullException("dataContainerDescriptor"); } if (documentTypeDescriptor == null) { throw new ArgumentNullException("documentTypeDescriptor"); } if (allocator == null) { throw new ArgumentNullException("allocator"); } m_logger = tracer; m_allocator = allocator; DocDesc = documentTypeDescriptor; DataContainerDescriptor = dataContainerDescriptor; ColumnStores = new ColumnDataBase[DocDesc.Fields.Length]; DocumentKeys = new ExpandableArrayOfKeys(m_allocator); FieldIdToColumnStore = new Dictionary <int, int>(ColumnStores.Length * 2); PrimaryKeyFieldId = dataContainerDescriptor.RequireField(documentTypeDescriptor.DocumentType, documentTypeDescriptor.PrimaryKeyFieldName).FieldId; for (var i = 0; i < DocDesc.Fields.Length; i++) { var field = dataContainerDescriptor.RequireField(DocDesc.Fields[i]); ColumnStores[i] = CreateColumnStore(field.DbType, m_allocator, null); FieldIdToColumnStore.Add(field.FieldId, i); } DocumentIdToIndex = new ConcurrentHashmapOfKeys(m_allocator); ValidDocumentsBitmap = new BitVector(m_allocator); SortIndexManager = new SortIndexManager(this); StructureLock = new ReaderWriterLockSlim(LockRecursionPolicy.SupportsRecursion); }
public void FlushDataToStore(string docRootPath) { CheckState(); if (docRootPath == null || !Directory.Exists(docRootPath)) { throw new ArgumentException("Storage root is invalid: " + docRootPath); } StructureLock.EnterWriteLock(); try { var tasks = new Task[2 + FieldIdToColumnStore.Count * 2]; var count = 0; tasks[count] = new Task( () => { using (var writer = new BinaryWriter( new FileStream( Path.Combine(docRootPath, "_keysvalid.dat"), FileMode.Create, FileAccess.ReadWrite, FileShare.None, 1 << 22, FileOptions.None))) { //WriteBitVectorToStore(writer, ValidDocumentsBitmap, m_untrimmedDocumentCount); ValidDocumentsBitmap.Write(writer, (ulong)m_untrimmedDocumentCount); } }, TaskCreationOptions.LongRunning); count++; tasks[count] = tasks[count - 1].ContinueWith( prev => { using (var writer = new BinaryWriter( new FileStream( Path.Combine(docRootPath, "_keys.dat"), FileMode.Create, FileAccess.ReadWrite, FileShare.None, 1 << 22, FileOptions.None))) { DocumentKeys.Write(writer, (ulong)m_untrimmedDocumentCount, ValidDocumentsBitmap); } }, CancellationToken.None, TaskContinuationOptions.LongRunning, TaskScheduler.Default); count++; tasks[count - 2].Start(); foreach (var pair in FieldIdToColumnStore) { var field = DataContainerDescriptor.RequireField(pair.Key); var colStore = ColumnStores[FieldIdToColumnStore[pair.Key]]; var colDataPath = Path.Combine(docRootPath, GetColumnDataFileName(field)); var colNotNullsPath = Path.Combine(docRootPath, GetColumnNotNullsFileName(field)); tasks[count] = new Task( () => { using ( var writer = new BinaryWriter(new FileStream( colNotNullsPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None, 1 << 22, FileOptions.None)) ) { colStore.NotNulls.Write(writer, (ulong)m_untrimmedDocumentCount); writer.Flush(); } }, TaskCreationOptions.LongRunning); count++; tasks[count] = tasks[count - 1].ContinueWith( prev => { using ( var writer = new BinaryWriter(new FileStream( colDataPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None, 1 << 22, FileOptions.None)) ) { colStore.WriteData(writer, m_untrimmedDocumentCount); writer.Flush(); } }, CancellationToken.None, TaskContinuationOptions.LongRunning, TaskScheduler.Default); count++; tasks[count - 2].Start(); } Task.WaitAll(tasks); } finally { StructureLock.ExitWriteLock(); } }
public void BeginPrepareColumnData(int fieldId) { var docType = m_descriptor.RequireField(fieldId).OwnerDocumentType; m_dataContainer.RequireDocumentContainer(docType).BeginLoadColumnStore(fieldId); }