Пример #1
0
        /// <summary>
        /// 生成与 Sql 配套的参数列表。
        /// </summary>
        /// <param name="meta"></param>
        /// <param name="entities"></param>
        /// <returns></returns>
        private IDbDataParameter[] GenerateUpdateParameters(EntityBatch meta, IList <Entity> entities)
        {
            var dba       = meta.DBA.RawAccesser;
            var table     = meta.Table;
            var updateLOB = this.UpdateLOB;

            //把所有实体中所有属性的值读取到数组中,参数的值就是这个数组。
            var parameters = new List <IDbDataParameter>();
            var columns    = table.Columns;

            for (int i = 0, c = columns.Count; i < c; i++)
            {
                var column = columns[i];
                if (!column.Info.IsPrimaryKey && (updateLOB || !column.IsLOB))
                {
                    var parameter = ReadIntoBatchParameter(entities, column, dba);
                    parameters.Add(parameter);
                }
            }

            //主键列放在最后。
            var pkParameter = ReadIntoBatchParameter(entities, table.PKColumn, dba);

            parameters.Add(pkParameter);
            return(parameters.ToArray());
        }
Пример #2
0
        private void ProcessUpdates(BindingList <RecordToUpdate> recordsToUpdate, EntityBatch entityBatch, string tableName)
        {
            for (int i = 0; i < recordsToUpdate.Count; i++)
            {
                var recordToUpdate = recordsToUpdate[i];

                var updateFactory = new SqlUpdateFactory(tableName, entityBatch.EntityDefinition.PrimaryKeyColumnNames[0],
                                                         recordToUpdate.PrimaryKeyValues[0].ToString());

                updateFactory.FieldValuePairs = recordsToUpdate[i].FieldValuePairs;

                var sql = updateFactory.GetSQL();

                recordToUpdate.CommandText = sql;

                try
                {
                    ExecuteCommand(sql);
                }
                catch (Exception ex)
                {
                    recordToUpdate.HasError     = true;
                    recordToUpdate.ErrorMessage = ExceptionFormatter.Format(ex);
                    recordToUpdate.Exception    = ex;
                }

                recordToUpdate.HasBeenProcessed = true;
            }
        }
Пример #3
0
        private void ProcessDeletions(BindingList <RecordToDelete> recordsToDelete, EntityBatch entityBatch, string tableName)
        {
            if (recordsToDelete.Count > 0 && entityBatch.EntityDefinition.PrimaryKeyColumnNames.Count > 1)
            {
                throw new NotImplementedException("Support for multiple primary keys is not implemented.");
            }

            var deleteSql = string.Format(@"DELETE FROM {0} WHERE {1}", tableName, entityBatch.EntityDefinition.PrimaryKeyColumnNames[0]) + " = '{0}'";

            for (int i = 0; i < recordsToDelete.Count; i++)
            {
                var sql = string.Format(deleteSql, recordsToDelete[i].PrimaryKeyValues[0]);

                try
                {
                    if (recordsToDelete[i].FieldValuePairs.Count > 0)
                    {
                        throw new Exception("RecordToDelete with field/value pairs is not implemented.");
                    }

                    recordsToDelete[i].CommandText = sql;
                    ExecuteCommand(sql);
                }
                catch (Exception ex)
                {
                    recordsToDelete[i].HasError     = true;
                    recordsToDelete[i].ErrorMessage = ExceptionFormatter.Format(ex);
                    recordsToDelete[i].Exception    = ex;
                }

                recordsToDelete[i].HasBeenProcessed = true;
            }
        }
Пример #4
0
        private static EntityRecord AddUpdateToBatch(OneToMany_OneWayDataMap map, EntityBatch batch, Dictionary <string, DataOnlyField> dataOnlyFields,
                                                     DataRow oneSideRow, IEnumerable <DataRow> manySideFilteredRows,
                                                     TransposeResult transposeResult, Dictionary <string, string> transposeDataOnlyValues)
        {
            // False is ordered before True
            var customSetFieldsForUpdate = map.CustomSetFields
                                           .Where(d => (d.AppliesTo.HasFlag(SyncOperation.Updates) || d.AppliesTo.HasFlag(SyncOperation.All)))
                                           .OrderBy(d => d.OnlyApplyWithOtherChanges);

            var recordToUpdate = GetUpdateRecord(map, batch, dataOnlyFields, oneSideRow, manySideFilteredRows, transposeResult, transposeDataOnlyValues, customSetFieldsForUpdate);

            // add record to batch
            if (recordToUpdate.FieldValuePairs.Count > 0)
            {
                batch.RecordsToUpdate.Add(recordToUpdate);

                return(recordToUpdate);
            }
            else if (recordToUpdate.DataOnlyValues.Count > 0)
            {
                var recordWithoutChange = new EntityRecordWithoutChange(batch, recordToUpdate.PrimaryKeyValues.ToList());

                recordWithoutChange.DataOnlyValues = recordToUpdate.DataOnlyValues;

                recordWithoutChange.SecondaryKeyValues = recordToUpdate.SecondaryKeyValues;

                batch.RecordsWithoutChange.Add(recordWithoutChange);

                return(recordWithoutChange);
            }
            else
            {
                return(null);
            }
        }
Пример #5
0
        public void ProcessBatch(EntityBatch entityBatch)
        {
            SyncEngineLogger.WriteByParallelTaskContext(LogEntryType.Info, this.AssociatedDataSource, () =>
            {
                if (entityBatch.EntityDefinition.TechnicalEntityName == entityBatch.EntityDefinition.UserFriendlyEntityName)
                {
                    return(string.Format("Processing {0} insert(s), {1} update(s), and {2} deletion(s) for entity batch for entity '{3}'.",
                                         entityBatch.RecordsToAdd.Count, entityBatch.RecordsToUpdate.Count, entityBatch.RecordsToDelete.Count,
                                         entityBatch.EntityDefinition.TechnicalEntityName));
                }
                else
                {
                    return(string.Format("Processing {0} insert(s), {1} update(s), and {2} deletion(s) for entity batch for entity '{3}' ({4}).",
                                         entityBatch.RecordsToAdd.Count, entityBatch.RecordsToUpdate.Count, entityBatch.RecordsToDelete.Count,
                                         entityBatch.EntityDefinition.TechnicalEntityName, entityBatch.EntityDefinition.UserFriendlyEntityName));
                }
            });

            if (!(entityBatch.EntityDefinition.PrimaryKeyGenerationType == Data.PrimaryKeyGenerationType.AutoGenerate ||
                  entityBatch.EntityDefinition.PrimaryKeyGenerationType == Data.PrimaryKeyGenerationType.Manual ||
                  entityBatch.EntityDefinition.PrimaryKeyGenerationType == Data.PrimaryKeyGenerationType.Custom))
            {
                throw new EnumValueNotImplementedException <PrimaryKeyGenerationType>(entityBatch.EntityDefinition.PrimaryKeyGenerationType);
            }

            var tableName = entityBatch.EntityDefinition.TechnicalEntityName;

            ProcessInserts(entityBatch.RecordsToAdd, entityBatch, tableName);

            ProcessUpdates(entityBatch.RecordsToUpdate, entityBatch, tableName);

            ProcessDeletions(entityBatch.RecordsToDelete, entityBatch, tableName);

            entityBatch.HasBeenProcessed = true;
        }
Пример #6
0
        private async Task ExecuteBatchOperationAsync(IEnumerable <TAzureTableEntity> entities, SaveType batchMethodName)
        {
            if (entities == null)
            {
                throw new ArgumentNullException("entities");
            }
            // Creating a dictionary to group partitions together since a batch can only represent one partition.
            var batchPartitionPairs = new ConcurrentDictionary <string, List <TAzureTableEntity> >();

            foreach (var entity in entities)
            {
                var entity1 = entity;
                batchPartitionPairs.AddOrUpdate(entity.PartitionKey, new List <TAzureTableEntity> {
                    entity
                }, (s, list) =>
                {
                    list.Add(entity1);
                    return(list);
                });
            }
            // Iterating through the batch key-value pairs and executing the batch one partition at a time.
            await Task.Run(() => Parallel.ForEach(batchPartitionPairs, async pair =>
            {
                var entityBatch = new EntityBatch(pair.Value.ToArray(), batchMethodName);
                var batchTasks  = entityBatch.BatchList.Select(batchOp => _table.ExecuteBatchAsync(batchOp));
                await Task.WhenAll(batchTasks).ConfigureAwait(false);
            }));
        }
Пример #7
0
        private void ExecuteBatchOperation(IEnumerable <TAzureTableEntity> entities, SaveType batchMethodName)
        {
            if (entities == null)
            {
                throw new ArgumentNullException("entities");
            }
            // Creating a dictionary to group partitions together since a batch can only represent one partition.
            var batchPartitionPairs = new ConcurrentDictionary <string, List <TAzureTableEntity> >();

            foreach (var entity in entities)
            {
                var entity1 = entity;
                batchPartitionPairs.AddOrUpdate(entity.PartitionKey, new List <TAzureTableEntity> {
                    entity
                }, (s, list) =>
                {
                    list.Add(entity1);
                    return(list);
                });
            }
            // Iterating through the batch key-value pairs and executing the batch
            Parallel.ForEach(batchPartitionPairs, pair =>
            {
                var entityBatch = new EntityBatch(pair.Value.ToArray(), batchMethodName);
                entityBatch.BatchList.ForEach(batchOp => _table.ExecuteBatch(batchOp));
            });
        }
Пример #8
0
        /// <summary>
        /// 为所有的实体生成 Id。
        /// </summary>
        /// <param name="batch"></param>
        private void GenerateId(EntityBatch batch)
        {
            var dba = batch.DBA;

            //如果批量生成 Id 使用的序列号太低,则需要抛出异常。
            var seqName     = _oracleRunGenerator.SequenceName(batch.Table.Name, batch.Table.IdentityColumn.Name);
            var incrementBy = Convert.ToInt32(dba.QueryValue(
                                                  "SELECT INCREMENT_BY FROM ALL_SEQUENCES WHERE SEQUENCE_NAME = {0} AND SEQUENCE_OWNER = {1}",
                                                  seqName,
                                                  _oracleRunGenerator.IdentifierQuoter.Prepare(DbConnectionSchema.GetOracleUserId(dba.ConnectionSchema))
                                                  ));

            if (incrementBy < 100)
            {
                throw new InvalidOperationException(string.Format("使用批量保存,表 {0} 的序列 {1} 的每次递增数不能少于 100。建议在数据库生成完成后使用 Rafy.Domain.ORM.BatchSubmit.Oracle.OracleBatchImporter.EnableBatchSequence() 来变更序列的每次递增数以批量生成实体聚合中的所有 Id 标识。", batch.Table.Name, seqName));
            }

            //由于每次生成的 Id 号数有限,所以需要分批生成 Id
            var nextSeqValueSql = string.Format("SELECT {0}.NEXTVAL FROM DUAL", seqName);

            foreach (var section in EnumerateAllBatches(batch.InsertBatch, incrementBy))
            {
                var nextValue = Convert.ToInt64(dba.QueryValue(nextSeqValueSql));
                var startId   = nextValue - incrementBy + 1;
                for (int i = 0, c = section.Count; i < c; i++)
                {
                    var item = section[i];
                    if (!((IEntityWithId)item).IdProvider.IsAvailable(item.Id))
                    {
                        item.Id = startId++;
                    }
                }
            }
        }
Пример #9
0
        /// <summary>
        /// 生成批量更新的Sql语句
        /// </summary>
        /// <param name="meta"></param>
        /// <param name="entities">需要插入的实体类型集合</param>
        /// <returns>返回拼接完成的、批量插入的Sql语句</returns>
        private string GenerateBatchUpdateStatement(EntityBatch meta, IList <Entity> entities)
        {
            var dba   = meta.DBA.RawAccesser;
            var table = meta.Table;

            var sql = new StringWriter();

            sql.Write("REPLACE INTO ");
            sql.AppendQuote(table, table.Name).Write("(");

            var columns = table.Columns;

            for (int i = 0, c = columns.Count; i < c; i++)
            {
                if (i != 0)
                {
                    sql.Write(',');
                }

                sql.AppendQuote(table, columns[i].Name);
            }
            sql.Write(")VALUES");

            this.GenerateValuesSql(sql, entities, table);

            return(sql.ToString());
        }
Пример #10
0
        /// <summary>
        /// 为所有的实体生成 Id。
        /// </summary>
        /// <param name="batch"></param>
        private void GenerateId(EntityBatch batch)
        {
            var dba      = batch.DBA;
            var entities = batch.InsertBatch;

            //如果批量生成 Id 使用的序列号太低,则需要抛出异常。
            var seqName     = OracleMigrationProvider.SequenceName(batch.Table.Name, batch.Table.IdentityColumn.Name);
            var incrementBy = Convert.ToInt32(dba.QueryValue("SELECT INCREMENT_BY FROM ALL_SEQUENCES WHERE SEQUENCE_NAME = {0}", seqName));

            if (incrementBy < 100)
            {
                throw new InvalidOperationException(string.Format("使用批量保存,表 {0} 的序列 {1} 的每次递增数不能少于 100。建议在数据库生成完成后使用 Rafy.Domain.ORM.BatchSubmit.Oracle.OracleBatchImporter.EnableBatchSequence() 来变更序列的每次递增数以启用聚合的批量生成。", batch.Table.Name, seqName));
            }

            //由于每次生成的 Id 号数有限,所以需要分批生成 Id
            var nextSeqValueSql = string.Format("SELECT {0}.NEXTVAL FROM DUAL", seqName);

            foreach (var section in EnumerateAllBatches(batch.InsertBatch, incrementBy))
            {
                var nextValue = Convert.ToInt32(dba.QueryValue(nextSeqValueSql));
                var startId   = nextValue - incrementBy + 1;
                for (int i = 0, c = section.Count; i < c; i++)
                {
                    var item = section[i];
                    item.Id = startId++;
                }
            }
        }
Пример #11
0
        /// <summary>
        /// 生成与 Sql 配套的参数列表。
        /// </summary>
        /// <param name="meta">The meta.</param>
        /// <returns></returns>
        private IDbDataParameter[] GenerateUpdateParameters(EntityBatch meta)
        {
            var dba   = meta.DBA.RawAccesser;
            var table = meta.Table;

            //把所有实体中所有属性的值读取到数组中,参数的值就是这个数组。
            var updateLOB  = this.UpdateLOB;
            var parameters = new List <IDbDataParameter>();
            var columns    = table.Columns;

            for (int i = 0, c = columns.Count; i < c; i++)
            {
                var column = columns[i];
                if (!column.Info.IsPrimaryKey && (updateLOB || !column.IsLOB))
                {
                    var parameter = dba.ParameterFactory.CreateParameter();
                    parameter.ParameterName = '@' + column.Name;
                    parameter.SourceColumn  = column.Name;//额外地,需要设置 SourceColumn
                    parameter.DbType        = column.Info.ColumnMeta.DataType ?? DbTypeHelper.ConvertFromCLRType(column.Info.DataType);
                    parameters.Add(parameter);
                }
            }

            //主键列放在最后。
            var pkParameter = dba.ParameterFactory.CreateParameter();

            pkParameter.ParameterName = '@' + table.PKColumn.Name;
            pkParameter.SourceColumn  = table.PKColumn.Name;
            pkParameter.DbType        = table.PKColumn.Info.ColumnMeta.DataType ?? DbTypeHelper.ConvertFromCLRType(table.PKColumn.Info.DataType);
            parameters.Add(pkParameter);

            return(parameters.ToArray());
        }
Пример #12
0
        private void DetectExplosion(EntityBatch batch)
        {
            var chunks    = batch.GetComponentDataReadWrite <TileChunk>();
            var positions = batch.GetComponentData <Position>();

            for (int i = 0; i < batch.length; ++i)
            {
                var chunkPos = positions[i];

                if (chunks[i].flags.HasFlag(TileChunkFlags.NoneDestructible))
                {
                    continue;
                }

                for (int e = 0; e < this.m_ExplosionCount; ++e)
                {
                    var explPos  = this.m_ExplosionPositionBuffer[e];
                    var explData = this.m_ExplosionDataBuffer[e];

                    if (TileChunk.Intersects(chunkPos, explPos, explData.radius))
                    {
                        DestroyTilesInChunk(ref chunks[i], chunkPos, explPos, explData);
                    }
                }
            }
        }
Пример #13
0
        /// <summary>
        /// 保存数据到数据库中。
        /// </summary>
        /// <param name="table">The table.</param>
        /// <param name="meta">The meta.</param>
        private void SaveBulk(DataTable table, EntityBatch meta)
        {
            var bulkCopy = new SqlBulkCopy(
                meta.DBA.Connection as SqlConnection,
                SqlBulkCopyOptions.KeepIdentity | SqlBulkCopyOptions.CheckConstraints,
                meta.DBA.RawAccesser.Transaction as SqlTransaction
                );

            bulkCopy.DestinationTableName = meta.Table.Name;
            bulkCopy.BatchSize            = table.Rows.Count;

            try
            {
                this.SetMappings(bulkCopy.ColumnMappings, meta.Table);

                bulkCopy.WriteToServer(table);
            }
            finally
            {
                if (bulkCopy != null)
                {
                    bulkCopy.Close();
                }
            }
        }
Пример #14
0
        /*********************** 代码块解释 *********************************
         * 由于 ORACLE 中使用 DDL 语句会隐式提交事务,所以下面的方法不再使用。
         **********************************************************************/

        ///// <summary>
        ///// 为所有的实体生成 Id。
        ///// </summary>
        ///// <param name="batch"></param>
        //private void GenerateId(EntityBatch batch)
        //{
        //    var entities = batch.InsertBatch;

        //    var startId = GetBatchIDs(batch.DBA, batch.Table, entities.Count);

        //    for (int i = 0, c = entities.Count; i < c; i++)
        //    {
        //        var item = entities[i];
        //        item.Id = startId++;
        //    }
        //}

        ///// <summary>
        ///// 获取指定大小批量的连续的 Id 号。返回第一个 Id 号的值。
        ///// </summary>
        ///// <param name="dba">The dba.</param>
        ///// <param name="table">The table.</param>
        ///// <param name="size">需要连续的多少个 Id 号。</param>
        ///// <returns></returns>
        //private static int GetBatchIDs(IDbAccesser dba, RdbTable table, int size)
        //{
        //    var nextValue = 0;

        //    var seqName = OracleMigrationProvider.SequenceName(table.Name, table.IdentityColumn.Name);
        //    try
        //    {
        //        //先把 STEP 改为 10 万,再获取下一个加了 10 万的值,这样不会有并发问题。
        //        dba.ExecuteText(string.Format("ALTER SEQUENCE {0} INCREMENT BY {1} NOCACHE", seqName, size));
        //        nextValue = Convert.ToInt32(dba.QueryValue(string.Format("SELECT {0}.NEXTVAL FROM DUAL", seqName)));
        //    }
        //    finally
        //    {
        //        dba.ExecuteText(string.Format("ALTER SEQUENCE {0} INCREMENT BY 1 NOCACHE", seqName));
        //    }

        //    return nextValue - size + 1;
        //}

        #endregion

        #endregion

        #region ImportUpdate

        /// <summary>
        /// 子类重写此方法实现批量更新逻辑。
        /// </summary>
        /// <param name="batch"></param>
        protected override void ImportUpdate(EntityBatch batch)
        {
            //分批插入实体,每批最多十万条数据
            foreach (var section in this.EnumerateAllBatches(batch.UpdateBatch))
            {
                this.ImportUpdate(batch, section);
            }
        }
Пример #15
0
        private void GenerateId(EntityBatch meta, IList <Entity> entities)
        {
            var startId = GetBatchIDs(meta.DBA, meta.Table, entities.Count);

            for (int i = 0, c = entities.Count; i < c; i++)
            {
                var item = entities[i];
                item.Id = startId++;
            }
        }
Пример #16
0
        private static EntityRecord AddDeletionToBatch(OneToMany_OneWayDataMap map, EntityBatch batch, Dictionary <string, DataOnlyField> dataOnlyFields,
                                                       DataRow oneSideRow, IEnumerable <DataRow> manySideFilteredRows,
                                                       TransposeResult transposeResult, Dictionary <string, string> transposeDataOnlyValues)
        {
            var transposeRecordToDelete = (TransposeResult_DeleteRecord)transposeResult;

            var recordToDelete = new RecordToDelete(batch, transposeRecordToDelete.PrimaryKeyValues);

            foreach (var transposeDataOnlyValue in transposeDataOnlyValues)
            {
                recordToDelete.DataOnlyValues.Add(transposeDataOnlyValue.Key, transposeDataOnlyValue.Value);
            }

            foreach (var dataOnlyField in dataOnlyFields)
            {
                if (!recordToDelete.DataOnlyValues.ContainsKey(dataOnlyField.Key))
                {
                    if (dataOnlyField.Value.MethodToPopulateValue == null)
                    {
                        recordToDelete.DataOnlyValues.Add(dataOnlyField.Key, (transposeRecordToDelete.AssociatedManySideDataRow[dataOnlyField.Key] ?? "").ToString());
                    }
                    else if (dataOnlyField.Value.MethodToPopulateValue != null)
                    {
                        recordToDelete.DataOnlyValues.Add(dataOnlyField.Key, (dataOnlyField.Value.MethodToPopulateValue(oneSideRow) ?? "").ToString());
                    }
                }
            }

            // add primary keys, if not auto-generate or not custom
            if (EntityToUpdateDefinition.HasManualGeneratedPrimaryKey(map, batch.EntityDefinition.SyncSide))
            {
                foreach (var primarKeyColumnName in batch.EntityDefinition.PrimaryKeyColumnNames)
                {
                    recordToDelete.PrimaryKeyValues.Add(recordToDelete.DataOnlyValues[primarKeyColumnName]);
                }
            }

            batch.RecordsToDelete.Add(recordToDelete);

            var customSetFieldsForDelete = map.CustomSetFields.Where(d => d.AppliesTo.HasFlag(SyncOperation.Deletes) || d.AppliesTo.HasFlag(SyncOperation.All));

            if (customSetFieldsForDelete.Count() > 0)
            {
                var transposeResultForUpdate = new TransposeResult_UpdateRecord(transposeRecordToDelete.AssociatedManySideDataRow, transposeRecordToDelete.PrimaryKeyValues, null);

                var recordToUpdate = GetUpdateRecord(map, batch, dataOnlyFields, oneSideRow, manySideFilteredRows, transposeResultForUpdate, transposeDataOnlyValues, customSetFieldsForUpdate: customSetFieldsForDelete);

                if (recordToUpdate.FieldValuePairs.Count > 0)
                {
                    batch.RecordsToUpdate.Add(recordToUpdate);
                }
            }

            return(recordToDelete);
        }
Пример #17
0
        private void RenderTiles(EntityBatch batch)
        {
            var chunks    = batch.GetComponentData <TileChunk>();
            var positions = batch.GetComponentData <Position>();

            for (int i = 0; i < batch.length; ++i)
            {
                if (TileChunk.Intersects(positions[i], this.m_ViewRect))
                {
                    RenderTileChunk(in chunks[i], positions[i]);
                }
            }
        }
Пример #18
0
        /// <summary>
        /// 批量导入指定的实体或列表。
        /// </summary>
        /// <param name="batch"></param>
        /// <exception cref="System.NotImplementedException"></exception>
        protected override void ImportInsert(EntityBatch batch)
        {
            var entities = batch.InsertBatch;

            foreach (var section in this.EnumerateAllBatches(entities))
            {
                var table = this.ToDataTable(batch.Table, section);

                this.SaveBulk(table, batch, false);

                //this.ReadIdFromTable(entities, table);
            }
        }
Пример #19
0
        internal virtual void GenerateId(EntityBatch meta, IList <Entity> entities)
        {
            var startId = GetBatchIDs(meta.DBA, meta.Table, entities.Count);

            for (int i = 0, c = entities.Count; i < c; i++)
            {
                var item = entities[i];
                if (!((IEntityWithId)item).IdProvider.IsAvailable(item.Id))
                {
                    item.Id = startId++;
                }
            }
        }
Пример #20
0
        /// <summary>
        /// 子类重写此方法实现批量插入逻辑。
        /// </summary>
        /// <param name="batch"></param>
        protected override void ImportInsert(EntityBatch batch)
        {
            //为所有实体生成 Id
            if (batch.Table.IdentityColumn != null)
            {
                this.GenerateId(batch);
            }

            //分批插入实体,每批最多十万条数据
            foreach (var section in this.EnumerateAllBatches(batch.InsertBatch))
            {
                this.ImportBatch(batch, section);
            }
        }
Пример #21
0
        /// <summary>
        /// 批量导入指定的实体或列表
        /// </summary>
        /// <param name="batch">批量导入数据的对象</param>
        protected override void ImportInsert(EntityBatch batch)
        {
            var entities = batch.InsertBatch;

            if (batch.Table.IdentityColumn != null)
            {
                this.GenerateId(batch, entities);
            }

            foreach (var section in this.EnumerateAllBatches(entities))
            {
                this.ImportBatch(batch, section);
            }
        }
Пример #22
0
        private void ImportUpdate(EntityBatch meta, IList <Entity> entities)
        {
            //生成 Sql
            var sql = this.GenerateUpdateSQL(meta.Table);

            //生成对应的参数列表。
            var parameters = this.GenerateUpdateParameters(meta, entities);

            //设置 ArrayBindCount 后再执行,就是批量导入功能。
            var command = meta.DBA.RawAccesser.CommandFactory.CreateCommand(sql, CommandType.Text, parameters);

            (command as OracleCommand).ArrayBindCount = entities.Count;

            command.ExecuteNonQuery();
        }
Пример #23
0
        private void ImportBatch(EntityBatch meta, IList <Entity> entities)
        {
            var rowsCount = entities.Count;

            var sql = GenerateInsertSQL(meta.Table);

            var parameters = GenerateInsertParameters(meta, entities);

            //设置 ArrayBindCount 后再执行,就是批量导入功能。
            var command = meta.DBA.RawAccesser.CommandFactory.CreateCommand(sql, CommandType.Text, parameters);

            (command as OracleCommand).ArrayBindCount = rowsCount;

            command.ExecuteNonQuery();
        }
Пример #24
0
        private void FindTargetTile(EntityBatch batch)
        {
            var chunks    = batch.GetComponentData <TileChunk>();
            var positions = batch.GetComponentData <Position>();

            for (int i = 0; i < batch.length; ++i)
            {
                if (chunks[i].flags.HasFlag(TileChunkFlags.NoneDestructible))
                {
                    continue;
                }

                FindTargetInChunk(in chunks[i], positions[i]);
            }
        }
Пример #25
0
        /// <summary>
        /// 生成与 Insert 语句相匹配的参数列表。
        /// </summary>
        /// <param name="meta"></param>
        /// <param name="entities"></param>
        /// <returns></returns>
        private IDbDataParameter[] GenerateInsertParameters(EntityBatch meta, IList <Entity> entities)
        {
            var dba   = meta.DBA.RawAccesser;
            var table = meta.Table;

            //把所有实体中所有属性的值读取到数组中,参数的值就是这个数组。
            var parameters = new List <IDbDataParameter>();
            var columns    = table.Columns;

            for (int i = 0, c = columns.Count; i < c; i++)
            {
                var parameter = this.ReadIntoBatchParameter(entities, columns[i], dba);
                parameters.Add(parameter);
            }
            return(parameters.ToArray());
        }
Пример #26
0
        /// <summary>
        /// 批量导入指定的实体或列表。
        /// </summary>
        /// <param name="batch"></param>
        /// <exception cref="System.NotImplementedException"></exception>
        protected override void ImportInsert(EntityBatch batch)
        {
            var entities = batch.InsertBatch;

            if (batch.Table.IdentityColumn != null)
            {
                this.GenerateId(batch, entities);
            }

            foreach (var section in this.EnumerateAllBatches(entities))
            {
                var table = this.ToDataTable(batch.Table, section);

                this.SaveBulk(table, batch);
            }
        }
Пример #27
0
        private async Task ExecuteBatchOperationAsync(IEnumerable <TAzureTableEntity> entities, string batchMethodName)
        {
            if (entities == null)
            {
                throw new ArgumentNullException(nameof(entities));
            }
            if (string.IsNullOrEmpty(batchMethodName))
            {
                throw new ArgumentNullException(nameof(batchMethodName));
            }
            // Creating a dictionary to group partitions together since a batch can only represent one partition.
            var batchPartitionPairs = new ConcurrentDictionary <string, List <TAzureTableEntity> >();

            foreach (var entity in entities)
            {
                var entity1 = entity;
                batchPartitionPairs.AddOrUpdate(entity.PartitionKey, new List <TAzureTableEntity> {
                    entity
                }, (s, list) =>
                {
                    list.Add(entity1);
                    return(list);
                });
            }
            // Iterating through the batch key-value pairs and executing the batch one partition at a time.
            foreach (var pair in batchPartitionPairs)
            {
                try
                {
                    var entityBatch = new EntityBatch(pair.Value.ToArray(), batchMethodName, _encoder);
                    var batchTasks  = entityBatch.BatchList.Select(batchOp => Table.ExecuteBatchAsync(batchOp));
                    await Task.WhenAll(batchTasks).ConfigureAwait(false);
                }
                catch (StorageException e)
                {
                    if (e.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound)
                    {
                        continue;
                    }
                    throw;
                }
                catch (Exception)
                {
                    throw;
                }
            }
        }
Пример #28
0
        /// <summary>
        /// 批量导入指定的实体或列表。
        /// </summary>
        /// <param name="batch"></param>
        protected override void ImportUpdate(EntityBatch batch)
        {
            var sql = this.GenerateUpdateSQL(batch.Table);

            //生成对应的参数列表。
            var parameters = this.GenerateUpdateParameters(batch);

            var table = ToDataTable(batch.Table, batch.UpdateBatch, true);

            var command = batch.DBA.RawAccesser.CommandFactory.CreateCommand(sql, CommandType.Text, parameters);
            var adapter = new SqlDataAdapter();
            adapter.UpdateCommand = command as SqlCommand;
            adapter.UpdateBatchSize = this.BatchSize;
            adapter.UpdateCommand.UpdatedRowSource = UpdateRowSource.None;

            adapter.Update(table);
        }
Пример #29
0
        /// <summary>
        /// 批量导入指定的实体或列表。
        /// </summary>
        /// <param name="batch"></param>
        protected override void ImportUpdate(EntityBatch batch)
        {
            var sql = this.GenerateUpdateSQL(batch.Table);

            //生成对应的参数列表。
            var parameters = this.GenerateUpdateParameters(batch);

            var table = ToDataTable(batch.Table, batch.UpdateBatch, true);

            var command = batch.DBA.RawAccesser.CommandFactory.CreateCommand(sql, CommandType.Text, parameters);
            var adapter = new SqlDataAdapter();

            adapter.UpdateCommand   = command as SqlCommand;
            adapter.UpdateBatchSize = this.BatchSize;
            adapter.UpdateCommand.UpdatedRowSource = UpdateRowSource.None;

            adapter.Update(table);
        }
Пример #30
0
        /// <summary>
        /// 保存数据到数据库中。
        /// </summary>
        /// <param name="table">The table.</param>
        /// <param name="meta">The meta.</param>
        /// <param name="keepIdentity">The meta.</param>
        internal void SaveBulk(DataTable table, EntityBatch meta, bool keepIdentity = true)
        {
            var opt = SqlBulkCopyOptions.CheckConstraints;

            if (keepIdentity)
            {
                opt |= SqlBulkCopyOptions.KeepIdentity;
            }

            using (var bulkCopy = new SqlBulkCopy(
                       meta.DBA.Connection as SqlConnection,
                       opt,
                       meta.DBA.RawAccesser.Transaction as SqlTransaction
                       ))
            {
                bulkCopy.DestinationTableName = meta.Table.Name;
                bulkCopy.BatchSize            = table.Rows.Count;
                bulkCopy.BulkCopyTimeout      = 10 * 60;

                try
                {
                    this.SetMappings(bulkCopy.ColumnMappings, meta.Table);

#if NET45
                    bulkCopy.WriteToServer(table);
#endif
#if NETSTANDARD2_0
                    var reader = new DataTableReader(table);
                    bulkCopy.WriteToServer(reader);
#endif
                }
                finally
                {
                    if (bulkCopy != null)
                    {
                        bulkCopy.Close();
                    }
                }
            }
        }
Пример #31
0
        private void MoveSateliteToPosition(EntityBatch batch)
        {
            var satelites = batch.GetComponentDataReadWrite <Satelite>();
            var positions = batch.GetComponentDataReadWrite <Position>();

            for (int i = 0; i < batch.length; ++i)
            {
                var pos = positions[i].value;

                switch (satelites[i].state)
                {
                case SateliteState.Flight:
                    pos += new Vector2(0f, ThrusterAcc);
                    if ((pos - satelites[i].target).LengthSquared() < LockDistanceSqr)
                    {
                        satelites[i].state = SateliteState.SlowApproach;
                    }
                    positions[i] = new Position(pos);
                    break;

                case SateliteState.SlowApproach:
                    pos += new Vector2(0f, ThrusterAcc * 0.3f);
                    if ((pos - satelites[i].target).LengthSquared() < 150f)
                    {
                        positions[i]       = new Position(satelites[i].target);
                        satelites[i].state = SateliteState.Hover;
                    }
                    else
                    {
                        positions[i] = new Position(pos);
                    }
                    break;

                case SateliteState.Hover:
                    break;
                }
            }
        }
Пример #32
0
        private void GenerateId(EntityBatch meta, IList<Entity> entities)
        {
            var startId = GetBatchIDs(meta.DBA, meta.Table, entities.Count);

            for (int i = 0, c = entities.Count; i < c; i++)
            {
                var item = entities[i];
                item.Id = startId++;
            }
        }
Пример #33
0
        /// <summary>
        /// 生成与 Sql 配套的参数列表。
        /// </summary>
        /// <param name="meta">The meta.</param>
        /// <returns></returns>
        private IDbDataParameter[] GenerateUpdateParameters(EntityBatch meta)
        {
            var dba = meta.DBA.RawAccesser;
            var table = meta.Table;

            //把所有实体中所有属性的值读取到数组中,参数的值就是这个数组。
            var updateLOB = this.UpdateLOB;
            var parameters = new List<IDbDataParameter>();
            var columns = table.Columns;
            for (int i = 0, c = columns.Count; i < c; i++)
            {
                var column = columns[i];
                if (!column.Info.IsPrimaryKey && (updateLOB || !column.IsLOB))
                {
                    var parameter = dba.ParameterFactory.CreateParameter();
                    parameter.ParameterName = '@' + column.Name;
                    parameter.SourceColumn = column.Name;//额外地,需要设置 SourceColumn
                    parameter.DbType = DbTypeHelper.ConvertFromCLRType(column.Info.DataType);
                    parameters.Add(parameter);
                }
            }

            //主键列放在最后。
            var pkParameter = dba.ParameterFactory.CreateParameter();
            pkParameter.ParameterName = '@' + table.PKColumn.Name;
            pkParameter.SourceColumn = table.PKColumn.Name;
            pkParameter.DbType = DbTypeHelper.ConvertFromCLRType(table.PKColumn.Info.DataType);
            parameters.Add(pkParameter);

            return parameters.ToArray();
        }
Пример #34
0
        /// <summary>
        /// 保存数据到数据库中。
        /// </summary>
        /// <param name="table">The table.</param>
        /// <param name="meta">The meta.</param>
        private void SaveBulk(DataTable table, EntityBatch meta)
        {
            var bulkCopy = new SqlBulkCopy(
                meta.DBA.Connection as SqlConnection,
                SqlBulkCopyOptions.KeepIdentity | SqlBulkCopyOptions.CheckConstraints,
                meta.DBA.RawAccesser.Transaction as SqlTransaction
                );
            bulkCopy.DestinationTableName = meta.Table.Name;
            bulkCopy.BatchSize = table.Rows.Count;

            try
            {
                this.SetMappings(bulkCopy.ColumnMappings, meta.Table);

                bulkCopy.WriteToServer(table);
            }
            finally
            {
                if (bulkCopy != null)
                {
                    bulkCopy.Close();
                }
            }
        }
Пример #35
0
        public static void ExecuteStep(JobInstance jobInstance, JobStepInstance previousJobStepInstance,
                                       JobStepInstance currentJobStepInstance, ISyncEngineConfigurator configurator)
        {
            Type jobStepType = jobInstance.Integration.PackageAssembly.GetType(currentJobStepInstance.JobStep.FullyQualifiedName);

            if (jobStepType == null)
            {
                throw new Exception(string.Format("Job step with fully qualified name '{0}' was not found in assembly '{1}'.", currentJobStepInstance.JobStep.FullyQualifiedName, jobInstance.Integration.PackageAssembly.Location));
            }

            // ensure the step class inherits from the proper base class to ensure the Initialize method is available
            bool hasCorrectBaseType = false;

            var baseType = jobStepType.BaseType;

            while (baseType != typeof(Object))
            {
                if (baseType == typeof(JobStepInvocation))
                {
                    hasCorrectBaseType = true;
                    break;
                }

                baseType = baseType.BaseType;
            }

            if (!hasCorrectBaseType)
            {
                throw new Exception(string.Format("Job step class '{0}' must derive from '{1}'.", jobStepType.Name, typeof(JobStepInvocation).FullName));
            }

            var jobStepInvocation = Activator.CreateInstance(jobStepType);

            if (jobStepInvocation is CustomActionStep)
            {
                var jobStepObj = (CustomActionStep)jobStepInvocation;

                jobStepObj.Initialize(jobInstance.Integration, jobInstance, previousJobStepInstance, currentJobStepInstance, configurator);

                jobStepObj.Process();

                if (previousJobStepInstance != null &&
                    ((previousJobStepInstance.HasDeferredExecutionUntilNextStep.HasValue &&
                      previousJobStepInstance.HasDeferredExecutionUntilNextStep.Value == true) ||
                     (previousJobStepInstance.GetType() is CustomActionStep)))
                {
                    currentJobStepInstance.SourceJobBatch = previousJobStepInstance.SourceJobBatch;
                    currentJobStepInstance.TargetJobBatch = previousJobStepInstance.TargetJobBatch;
                    currentJobStepInstance.HasDeferredExecutionUntilNextStep = true;
                }
                else
                {
                    currentJobStepInstance.HasDeferredExecutionUntilNextStep = false;
                }
            }
            else if (jobStepInvocation is DataMapStep)
            {
                var jobStepObj = (DataMapStep)jobStepInvocation;

                jobStepObj.Initialize(jobInstance.Integration, jobInstance, previousJobStepInstance, currentJobStepInstance, configurator);

                var dataMapOutput = jobStepObj.Process();

                if (dataMapOutput == null)
                {
                    throw new Exception("Job step must return a value.");
                }

                currentJobStepInstance.HasDeferredExecutionUntilNextStep = dataMapOutput.DeferExecutionUntilNextStep;

                JobBatch       sourceJobBatch    = null;
                JobBatch       targetJobBatch    = null;
                EntityBatch    sourceEntityBatch = null;
                EntityBatch    targetEntityBatch = null;
                EntityBatch    oneWayEntityBatch = null;
                IOneWayDataMap oneWayDataMap     = null;
                TwoWayDataMap  twoWayDataMap     = null;

                if (dataMapOutput.DataMap is IOneWayDataMap)
                {
                    oneWayDataMap = (IOneWayDataMap)dataMapOutput.DataMap;

                    if (dataMapOutput.DataMap is OneWayDataMap)
                    {
                        oneWayEntityBatch = OneToOneDataMapProcessor.Compare((OneWayDataMap)dataMapOutput.DataMap,
                                                                             dataMapOutput.SourceData, dataMapOutput.SourceDataDuplicateRowBehavior,
                                                                             dataMapOutput.TargetData, dataMapOutput.TargetDataDuplicateRowBehavior,
                                                                             dataMapOutput.RowsToProcess);
                    }
                    else if (dataMapOutput.DataMap is OneToMany_OneWayDataMap)
                    {
                        if (dataMapOutput.RowsToProcess != null)
                        {
                            throw new Exception("Rows to process Func is not supported for one-to-many data maps.");
                        }

                        oneWayEntityBatch = OneToManyDataMapProcessor.Compare((OneToMany_OneWayDataMap)dataMapOutput.DataMap,
                                                                              dataMapOutput.SourceData, dataMapOutput.SourceDataDuplicateRowBehavior,
                                                                              dataMapOutput.TargetData, dataMapOutput.TargetDataDuplicateRowBehavior);
                    }

                    if (oneWayEntityBatch.EntityDefinition.SyncSide == SyncSide.Source)
                    {
                        oneWayEntityBatch.LoggingBehavior = dataMapOutput.SourceSideLoggingBehavior;
                    }
                    else if (oneWayEntityBatch.EntityDefinition.SyncSide == SyncSide.Target)
                    {
                        oneWayEntityBatch.LoggingBehavior = dataMapOutput.TargetSideLoggingBehavior;
                    }
                }
                else if (dataMapOutput.DataMap is TwoWayDataMap)
                {
                    twoWayDataMap = (TwoWayDataMap)dataMapOutput.DataMap;

                    OneToOneDataMapProcessor.Compare(twoWayDataMap, dataMapOutput.SourceData, dataMapOutput.SourceDataDuplicateRowBehavior,
                                                     dataMapOutput.TargetData, dataMapOutput.TargetDataDuplicateRowBehavior,
                                                     dataMapOutput.RowsToProcess, out sourceEntityBatch, out targetEntityBatch);

                    sourceEntityBatch.LoggingBehavior = dataMapOutput.SourceSideLoggingBehavior;
                    targetEntityBatch.LoggingBehavior = dataMapOutput.TargetSideLoggingBehavior;
                }
                else
                {
                    throw new DerivedClassNotImplementedException <OneToOneDataMap>(dataMapOutput.DataMap);
                }

                if (previousJobStepInstance != null &&
                    previousJobStepInstance.HasDeferredExecutionUntilNextStep.HasValue &&
                    previousJobStepInstance.HasDeferredExecutionUntilNextStep.Value == true)
                {
                    sourceJobBatch = previousJobStepInstance.SourceJobBatch;
                    targetJobBatch = previousJobStepInstance.TargetJobBatch;
                }
                else
                {
                    sourceJobBatch = new JobBatch(SyncSide.Source, jobInstance.SourceDataSource);
                    targetJobBatch = new JobBatch(SyncSide.Target, jobInstance.TargetDataSource);
                }

                if (dataMapOutput.DataMap is IOneWayDataMap)
                {
                    if (oneWayDataMap.SyncDirection == SyncDirection.SourceToTarget)
                    {
                        targetJobBatch.EntityBatches.Add(oneWayEntityBatch);
                    }
                    else if (oneWayDataMap.SyncDirection == SyncDirection.TargetToSource)
                    {
                        sourceJobBatch.EntityBatches.Add(oneWayEntityBatch);
                    }
                    else
                    {
                        throw new EnumValueNotImplementedException <SyncDirection>(oneWayDataMap.SyncDirection);
                    }
                }
                else if (dataMapOutput.DataMap is TwoWayDataMap)
                {
                    sourceJobBatch.EntityBatches.Add(sourceEntityBatch);
                    targetJobBatch.EntityBatches.Add(targetEntityBatch);
                }
                else
                {
                    throw new DerivedClassNotImplementedException <OneToOneDataMap>(dataMapOutput.DataMap);
                }

                currentJobStepInstance.SourceJobBatch = sourceJobBatch;
                currentJobStepInstance.TargetJobBatch = targetJobBatch;

                if (!currentJobStepInstance.HasDeferredExecutionUntilNextStep.Value)
                {
                    sourceJobBatch.SubmitToDataSource();

                    if (sourceJobBatch.HasRecordErrors)
                    {
                        currentJobStepInstance.HasRecordErrors = true;
                        currentJobStepInstance.Exceptions      = sourceJobBatch.GetExceptions();
                    }

                    SyncEngineLogger.WriteToLog(jobInstance, currentJobStepInstance, sourceJobBatch);

                    targetJobBatch.SubmitToDataSource();

                    if (targetJobBatch.HasRecordErrors)
                    {
                        currentJobStepInstance.HasRecordErrors = true;
                        currentJobStepInstance.Exceptions      = targetJobBatch.GetExceptions();
                    }

                    SyncEngineLogger.WriteToLog(jobInstance, currentJobStepInstance, targetJobBatch);
                }
            }
            else if (jobStepInvocation is EntityBatchStep)
            {
                var jobStepObj = (EntityBatchStep)jobStepInvocation;

                jobStepObj.Initialize(jobInstance.Integration, jobInstance, previousJobStepInstance, currentJobStepInstance, configurator);

                var entityBatchOutput = jobStepObj.Process();

                if (entityBatchOutput == null)
                {
                    throw new Exception("Job step must return a value.");
                }

                currentJobStepInstance.HasDeferredExecutionUntilNextStep = entityBatchOutput.DeferExecutionUntilNextStep;

                JobBatch sourceJobBatch;
                JobBatch targetJobBatch;

                if (previousJobStepInstance != null && previousJobStepInstance.HasDeferredExecutionUntilNextStep.Value == true)
                {
                    sourceJobBatch = previousJobStepInstance.SourceJobBatch;
                    targetJobBatch = previousJobStepInstance.TargetJobBatch;
                }
                else
                {
                    sourceJobBatch = new JobBatch(SyncSide.Source, jobInstance.SourceDataSource);
                    targetJobBatch = new JobBatch(SyncSide.Target, jobInstance.TargetDataSource);
                }

                if (entityBatchOutput.SourceSideEntityBatches != null)
                {
                    foreach (var sourceSideEntityBatch in entityBatchOutput.SourceSideEntityBatches)
                    {
                        sourceJobBatch.EntityBatches.Add(sourceSideEntityBatch);
                    }
                }

                if (entityBatchOutput.TargetSideEntityBatches != null)
                {
                    foreach (var targetSideEntityBatch in entityBatchOutput.TargetSideEntityBatches)
                    {
                        targetJobBatch.EntityBatches.Add(targetSideEntityBatch);
                    }
                }

                currentJobStepInstance.SourceJobBatch = sourceJobBatch;
                currentJobStepInstance.TargetJobBatch = targetJobBatch;

                if (!currentJobStepInstance.HasDeferredExecutionUntilNextStep.Value)
                {
                    sourceJobBatch.SubmitToDataSource();

                    if (sourceJobBatch.HasRecordErrors)
                    {
                        currentJobStepInstance.HasRecordErrors = true;
                        currentJobStepInstance.Exceptions      = sourceJobBatch.GetExceptions();
                    }

                    SyncEngineLogger.WriteToLog(jobInstance, currentJobStepInstance, sourceJobBatch);

                    targetJobBatch.SubmitToDataSource();

                    if (targetJobBatch.HasRecordErrors)
                    {
                        currentJobStepInstance.HasRecordErrors = true;
                        currentJobStepInstance.Exceptions      = targetJobBatch.GetExceptions();
                    }

                    SyncEngineLogger.WriteToLog(jobInstance, currentJobStepInstance, targetJobBatch);
                }
            }
            else if (jobStepInvocation is JobBatchStep)
            {
                var jobStepObj = (JobBatchStep)jobStepInvocation;

                jobStepObj.Initialize(jobInstance.Integration, jobInstance, previousJobStepInstance, currentJobStepInstance, configurator);

                var jobBatchOutput = jobStepObj.Process();

                if (jobBatchOutput == null)
                {
                    throw new Exception("Job step must return a value.");
                }

                if (jobBatchOutput.SourceSideJobBatch != null)
                {
                    jobBatchOutput.SourceSideJobBatch.SubmitToDataSource();

                    if (jobBatchOutput.SourceSideJobBatch.HasRecordErrors)
                    {
                        currentJobStepInstance.HasRecordErrors = true;
                        currentJobStepInstance.Exceptions      = jobBatchOutput.SourceSideJobBatch.GetExceptions();
                    }

                    SyncEngineLogger.WriteToLog(jobInstance, currentJobStepInstance, jobBatchOutput.SourceSideJobBatch);
                }
                else
                {
                    SyncEngineLogger.WriteToLog(LogEntryType.Info, jobInstance, currentJobStepInstance, jobInstance.SourceDataSource.DataSource,
                                                "Job step '{0}' for job '{1}' returned a null job batch for source-side data source '{2}'.",
                                                currentJobStepInstance.JobStep.Name, jobInstance.Job.Name, jobInstance.SourceDataSource.DataSource.Name);
                }

                if (jobBatchOutput.TargetSideJobBatch != null)
                {
                    jobBatchOutput.TargetSideJobBatch.SubmitToDataSource();

                    if (jobBatchOutput.TargetSideJobBatch.HasRecordErrors)
                    {
                        currentJobStepInstance.HasRecordErrors = true;
                        currentJobStepInstance.Exceptions      = jobBatchOutput.TargetSideJobBatch.GetExceptions();
                    }

                    SyncEngineLogger.WriteToLog(jobInstance, currentJobStepInstance, jobBatchOutput.TargetSideJobBatch);
                }
                else
                {
                    SyncEngineLogger.WriteToLog(LogEntryType.Info, jobInstance, currentJobStepInstance, jobInstance.TargetDataSource.DataSource,
                                                "Job step '{0}' for job '{1}' returned a null job batch for target-side data source '{2}'.",
                                                currentJobStepInstance.JobStep.Name, jobInstance.Job.Name, jobInstance.TargetDataSource.DataSource.Name);
                }
            }
            else
            {
                throw new DerivedClassNotImplementedException <JobStepInvocation>(jobStepInvocation);
            }
        }