Example #1
0
        public void Process()
        {
            try
            {
                var channelFactory = ServicesManager.GetChannelFactory <IVocabularyService>("net.pipe://localhost/VocabularyServiceEndpoint");
                vocabularyService = channelFactory.CreateChannel();

                var dbChunk = new DbChunk(Settings.Current.Building.BuilderConnectionString);
                var part    = new ChunkPart(chunkId, createPersonBuilder, "0");

                var timer = new Stopwatch();
                timer.Start();

                part.Load();
                dbChunk.ChunkLoaded(chunkId);
                Logger.Write(chunkId, LogMessageTypes.Info, string.Format("Loaded - {0} ms | {1} Mb", timer.ElapsedMilliseconds, (GC.GetTotalMemory(false) / 1024f) / 1024f));

                part.Build(vocabularyService);
                part.Save();

                if (Settings.Current.Building.DestinationEngine.Database == Database.Redshift)
                {
                    ServicesManager.AddToSaveQueue(chunkId);
                    Logger.Write(null, LogMessageTypes.Debug, "AddToSaveQueue " + chunkId);
                }
                else
                {
                    dbChunk.ChunkComplete(chunkId);
                }

                channelFactory.Close();
            }
            catch (Exception e)
            {
                Logger.WriteError(chunkId, e);

                throw;
            }
        }
        public void Process()
        {
            try
            {
                var channelFactory = ServicesManager.GetChannelFactory <IVocabularyService>("net.pipe://localhost/VocabularyServiceEndpoint");
                vocabularyService = channelFactory.CreateChannel();

                var dbChunk = new DbChunk(Settings.Current.Building.BuilderConnectionString);
                var timer   = new Stopwatch();
                timer.Start();

                var folder = string.Format("{0}/{1}/raw", Settings.Current.Building.Vendor,
                                           Settings.Current.Building.Id);

                Parallel.ForEach(Settings.Current.Building.SourceQueryDefinitions, qd =>
                {
                    if (qd.Providers != null)
                    {
                        return;
                    }
                    if (qd.Locations != null)
                    {
                        return;
                    }
                    if (qd.CareSites != null)
                    {
                        return;
                    }

                    var sql = qd.GetSql(Settings.Current.Building.SourceEngine.Database,
                                        Settings.Current.Building.Vendor, Settings.Current.Building.SourceSchemaName);

                    if (string.IsNullOrEmpty(sql))
                    {
                        return;
                    }

                    qd.FieldHeaders = new Dictionary <string, int>(StringComparer.OrdinalIgnoreCase);

                    var metadataKey = string.Format("{0}/metadata/{1}", folder, qd.FileName + ".txt");

                    using (var client = new AmazonS3Client(Settings.Current.S3AwsAccessKeyId, Settings.Current.S3AwsSecretAccessKey, Amazon.RegionEndpoint.USEast1))
                        using (var stream = new MemoryStream())
                            using (var sr = new StreamReader(stream))
                            {
                                var request = new GetObjectRequest {
                                    BucketName = Settings.Current.Bucket, Key = metadataKey
                                };
                                using (var response = client.GetObject(request))
                                {
                                    response.ResponseStream.CopyTo(stream);
                                }
                                stream.Position = 0;

                                var index = 0;
                                foreach (var fieldName in sr.ReadLine().Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries))
                                {
                                    try
                                    {
                                        qd.FieldHeaders.Add(fieldName, index);
                                        index++;
                                    }
                                    catch (Exception)
                                    {
                                        throw new Exception("[RestoreMetadataFromS3] fieldName duplication: " + fieldName + " - " + qd.FileName);
                                    }
                                }
                            }
                });

                Parallel.ForEach(GetParts(), new ParallelOptions {
                    MaxDegreeOfParallelism = 2
                }, p =>
                                 //Parallel.ForEach(GetParts(), p =>
                {
                    Logger.Write(chunkId, LogMessageTypes.Info, "load part=" + p);
                    var part = new ChunkPart(chunkId, createPersonBuilder, p);

                    LoadPart(part, p);

                    part.Build(vocabularyService);

                    SavePart(part, p);
                });

                dbChunk.ChunkLoaded(chunkId);

                Logger.Write(chunkId, LogMessageTypes.Info, string.Format("Loaded - {0} ms | {1} Mb", timer.ElapsedMilliseconds, (GC.GetTotalMemory(false) / 1024f) / 1024f));

                if (Settings.Current.Building.DestinationEngine.Database == Database.Redshift)
                {
                    ServicesManager.AddToSaveQueue(chunkId);
                    Logger.Write(null, LogMessageTypes.Debug, "AddToSaveQueue " + chunkId);
                }
                else
                {
                    dbChunk.ChunkComplete(chunkId);
                }

                channelFactory.Close();
            }
            catch (Exception e)
            {
                Logger.WriteError(chunkId, e);

                throw;
            }
        }