Пример #1
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="deviceIdentity"></param>
        /// <param name="deviceId"></param>
        private static async void ManageDeviceJob(DeviceIdentity deviceIdentity, string deviceId)
        {
            var    jobClient   = JobClient.CreateFromConnectionString(deviceIdentity.GetConnectionString());
            string methodJobId = Guid.NewGuid().ToString();


            await deviceIdentity.StartMethodJob(methodJobId, jobClient, deviceId);

            Console.WriteLine("Started Twin Update Job");

            var result1 = deviceIdentity.MonitorJob(methodJobId, jobClient).Result;

            Console.WriteLine("Job Status : " + result1.Status);

            string twinUpdateJobId = Guid.NewGuid().ToString();

            await deviceIdentity.StartTwinUpdateJob(twinUpdateJobId, jobClient, deviceId);

            result1 = deviceIdentity.MonitorJob(twinUpdateJobId, jobClient).Result;
            Console.WriteLine("Job Status : " + result1.Status);
            Console.WriteLine("Press ENTER to exit.");
            Console.ReadLine();
        }
Пример #2
0
    public async Task OnlyJobProcessorShouldBeCalled_IfOutOfBandAttributeHasBeenApplied_ToNotificationHandler()
    {
        var outOfBandHandled = false;

        OutOfBandNotificationHandler.Handled += (_, __) => outOfBandHandled = true;

        var syncHandled = false;

        OutOfBandNotificationHandlerWithoutAttribute.Handled += (_, __) => syncHandled = true;

        var handlers = Container.GetAllInstances <INotificationHandler <OutOfBandNotification> >().ToList();

        Assert.Equal(2, handlers.Count);

        foreach (var handler in handlers)
        {
            await handler.Handle(new OutOfBandNotification(), CancellationToken.None);
        }

        JobClient.ReceivedWithAnyArgs(1).Enqueue <JobProcessor>(x => x.ProcessAsync(null !, null !));
        Assert.False(outOfBandHandled, "Handle must not be called if handler is marked for out of band processing.");
        Assert.True(syncHandled);
    }
Пример #3
0
        public RedisJobServerAsyncTest()
        {
            //Configure storage connection
            var clientConfig = new ClientConfig();

            clientConfig.DBConnectionString = "localhost:6379";
            clientConfig.StorageMode        = "redis";
            jobClient = new JobClient(clientConfig);

            var serverConfig = new ServerConfig();

            serverConfig.DBConnectionString = "localhost:6379";
            serverConfig.StorageMode        = "redis";
            serverConfig.ProcessID          = "JobServerAsyncTest";
            serverConfig.Workers            = 1;
            serverConfig.MaxRunnableJobs    = 1;

            serverConfig.ProgressDBInterval = new TimeSpan(0);
            serverConfig.AutoDeletePeriod   = null;
            serverConfig.ForceStopServer    = true;
            serverConfig.StopServerDelay    = 3000;
            jobServer = new JobServer(serverConfig);
        }
Пример #4
0
 /// <exception cref="System.Exception"/>
 private void EncryptedShuffleWithCerts(bool useClientCerts)
 {
     try
     {
         Configuration conf         = new Configuration();
         string        keystoresDir = new FilePath(Basedir).GetAbsolutePath();
         string        sslConfsDir  = KeyStoreTestUtil.GetClasspathDir(typeof(TestEncryptedShuffle
                                                                              ));
         KeyStoreTestUtil.SetupSSLConfig(keystoresDir, sslConfsDir, conf, useClientCerts);
         conf.SetBoolean(MRConfig.ShuffleSslEnabledKey, true);
         StartCluster(conf);
         FileSystem fs       = FileSystem.Get(GetJobConf());
         Path       inputDir = new Path("input");
         fs.Mkdirs(inputDir);
         TextWriter writer = new OutputStreamWriter(fs.Create(new Path(inputDir, "data.txt"
                                                                       )));
         writer.Write("hello");
         writer.Close();
         Path    outputDir = new Path("output", "output");
         JobConf jobConf   = new JobConf(GetJobConf());
         jobConf.SetInt("mapred.map.tasks", 1);
         jobConf.SetInt("mapred.map.max.attempts", 1);
         jobConf.SetInt("mapred.reduce.max.attempts", 1);
         jobConf.Set("mapred.input.dir", inputDir.ToString());
         jobConf.Set("mapred.output.dir", outputDir.ToString());
         JobClient  jobClient = new JobClient(jobConf);
         RunningJob runJob    = jobClient.SubmitJob(jobConf);
         runJob.WaitForCompletion();
         NUnit.Framework.Assert.IsTrue(runJob.IsComplete());
         NUnit.Framework.Assert.IsTrue(runJob.IsSuccessful());
     }
     finally
     {
         StopCluster();
     }
 }
Пример #5
0
        /// <exception cref="System.Exception"/>
        private long GetTaskCounterUsage(JobClient client, JobID id, int numReports, int
                                         taskId, TaskType type)
        {
            TaskReport[] reports = null;
            if (TaskType.Map.Equals(type))
            {
                reports = client.GetMapTaskReports(id);
            }
            else
            {
                if (TaskType.Reduce.Equals(type))
                {
                    reports = client.GetReduceTaskReports(id);
                }
            }
            NUnit.Framework.Assert.IsNotNull("No reports found for task type '" + type.ToString
                                                 () + "' in job " + id, reports);
            // make sure that the total number of reports match the expected
            NUnit.Framework.Assert.AreEqual("Mismatch in task id", numReports, reports.Length
                                            );
            Counters counters = reports[taskId].GetCounters();

            return(counters.GetCounter(TaskCounter.CommittedHeapBytes));
        }
Пример #6
0
        private void DownloadImageRootProduct(ImageProductInfo imageProductInfo, JobClient updateImagePathProductJobClient, ProducerBasic thumbImageJobClient)
        {
            var fileName = Common.UnicodeToKoDauAndGach(imageProductInfo.Name);

            if (fileName.Length > 100)
            {
                fileName = fileName.Substring(0, 99);
            }
            fileName += ".jpg";
            try
            {
                var folder = Common.GetFolderSaveImageRootProduct(fileName);
                Common.SaveImageProduct(imageProductInfo.ImageUrls, _pathImageRootProduct + folder, fileName);
                imageProductInfo.ImagePath      = Common.GetImagePathRootProduct(folder, fileName);
                imageProductInfo.DownloadedTime = DateTime.Now;
                SendMessageToServiceUpdateImagePath(imageProductInfo, updateImagePathProductJobClient);
                var fulldirectory  = _pathImageProduct.Replace("\\", @"\") + folder.Replace("\\", @"\") + fileName;
                var thumbImageInfo = new ThumbImageProductInfo()
                {
                    ProductId          = imageProductInfo.Id,
                    FileNameImage      = fileName,
                    FolderImage        = folder.Replace("\\", @"\"),
                    FullDirectoryImage = fulldirectory,
                    SizeImage          = widthHeightImages,
                    TypeProduct        = 2
                };
                SendMessageToServiceThumbImage(thumbImageInfo, thumbImageJobClient);
                Log.Info(string.Format("RootProduct: ID = {0} download image success!", imageProductInfo.Id));
            }
            catch (Exception exception)
            {
                Log.Error(string.Format("RootProduct: ID = {0}. ImageUrl: {1} . DetailUrl: {2}", imageProductInfo.Id, imageProductInfo.ImageUrls, imageProductInfo.DetailUrl), exception);
                imageProductInfo.ErrorMessage = exception.ToString();
                SendErrorDownloadImageToService(imageProductInfo);
            }
        }
Пример #7
0
 public FFMpegService(string uri, int threadsPerMachine, IAudioPresetProvider audioPresetProvider, ITimeProvider timeProvider)
 {
     _httpClient = new HttpClient {
         Timeout = TimeSpan.FromSeconds(60)
     };
     _serviceUri  = uri;
     _audioClient = new AudioJobClient(_httpClient)
     {
         BaseUrl = _serviceUri
     };
     _audioMuxClient = new MuxJobClient(_httpClient)
     {
         BaseUrl = _serviceUri
     };
     _hardSubtitlesClient = new HardSubtitlesJobClient(_httpClient)
     {
         BaseUrl = _serviceUri
     };
     _statusClient = new StatusClient(_httpClient)
     {
         BaseUrl = _serviceUri
     };
     _healthClient = new HealthCheckClient(_httpClient)
     {
         BaseUrl = _serviceUri
     };
     _jobClient = new JobClient(_httpClient)
     {
         BaseUrl = _serviceUri
     };
     _timeProvider      = timeProvider;
     _threadsPerMachine = threadsPerMachine;
     _tasksPerAudioJob  = audioPresetProvider.AsDictionary()[StateFormat.audio_od_standard].Count;
     RetryCount         = 3;
     RetrySleepMs       = 10 * 1000;
 }
Пример #8
0
        /// <exception cref="System.IO.IOException"/>
        public virtual void TestCollect()
        {
            JobConf conf = new JobConf();

            Configure(conf);
            try
            {
                JobClient.RunJob(conf);
                // check if all the values were seen by the reducer
                if (TestCollect.Reduce.numSeen != (NumCollectsPerThread * NumFeeders))
                {
                    throw new IOException("Collect test failed!! Total does not match.");
                }
            }
            catch (IOException ioe)
            {
                throw;
            }
            finally
            {
                FileSystem fs = FileSystem.Get(conf);
                fs.Delete(OutputDir, true);
            }
        }
Пример #9
0
        /// <exception cref="System.Exception"/>
        public virtual void TestChain()
        {
            Path inDir  = new Path("testing/chain/input");
            Path outDir = new Path("testing/chain/output");

            // Hack for local FS that does not have the concept of a 'mounting point'
            if (IsLocalFS())
            {
                string localPathRoot = Runtime.GetProperty("test.build.data", "/tmp").Replace(' '
                                                                                              , '+');
                inDir  = new Path(localPathRoot, inDir);
                outDir = new Path(localPathRoot, outDir);
            }
            JobConf conf = CreateJobConf();

            conf.SetBoolean("localFS", IsLocalFS());
            conf.SetInt("mapreduce.job.maps", 1);
            CleanFlags(conf);
            FileSystem fs = FileSystem.Get(conf);

            fs.Delete(outDir, true);
            if (!fs.Mkdirs(inDir))
            {
                throw new IOException("Mkdirs failed to create " + inDir.ToString());
            }
            DataOutputStream file = fs.Create(new Path(inDir, "part-0"));

            file.WriteBytes("1\n2\n");
            file.Close();
            conf.SetJobName("chain");
            conf.SetInputFormat(typeof(TextInputFormat));
            conf.SetOutputFormat(typeof(TextOutputFormat));
            conf.Set("a", "X");
            JobConf mapAConf = new JobConf(false);

            mapAConf.Set("a", "A");
            ChainMapper.AddMapper(conf, typeof(TestChainMapReduce.AMap), typeof(LongWritable)
                                  , typeof(Text), typeof(LongWritable), typeof(Text), true, mapAConf);
            ChainMapper.AddMapper(conf, typeof(TestChainMapReduce.BMap), typeof(LongWritable)
                                  , typeof(Text), typeof(LongWritable), typeof(Text), false, null);
            JobConf reduceConf = new JobConf(false);

            reduceConf.Set("a", "C");
            ChainReducer.SetReducer(conf, typeof(TestChainMapReduce.CReduce), typeof(LongWritable
                                                                                     ), typeof(Text), typeof(LongWritable), typeof(Text), true, reduceConf);
            ChainReducer.AddMapper(conf, typeof(TestChainMapReduce.DMap), typeof(LongWritable
                                                                                 ), typeof(Text), typeof(LongWritable), typeof(Text), false, null);
            JobConf mapEConf = new JobConf(false);

            mapEConf.Set("a", "E");
            ChainReducer.AddMapper(conf, typeof(TestChainMapReduce.EMap), typeof(LongWritable
                                                                                 ), typeof(Text), typeof(LongWritable), typeof(Text), true, mapEConf);
            FileInputFormat.SetInputPaths(conf, inDir);
            FileOutputFormat.SetOutputPath(conf, outDir);
            JobClient  jc  = new JobClient(conf);
            RunningJob job = jc.SubmitJob(conf);

            while (!job.IsComplete())
            {
                Sharpen.Thread.Sleep(100);
            }
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "configure.A"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "configure.B"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "configure.C"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "configure.D"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "configure.E"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "map.A.value.1"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "map.A.value.2"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "map.B.value.1"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "map.B.value.2"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "reduce.C.value.2"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "reduce.C.value.1"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "map.D.value.1"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "map.D.value.2"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "map.E.value.1"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "map.E.value.2"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "close.A"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "close.B"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "close.C"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "close.D"));
            NUnit.Framework.Assert.IsTrue(GetFlag(conf, "close.E"));
        }
Пример #10
0
        public virtual void TestHeapUsageCounter()
        {
            JobConf conf = new JobConf();
            // create a local filesystem handle
            FileSystem fileSystem = FileSystem.GetLocal(conf);
            // define test root directories
            Path rootDir     = new Path(Runtime.GetProperty("test.build.data", "/tmp"));
            Path testRootDir = new Path(rootDir, "testHeapUsageCounter");

            // cleanup the test root directory
            fileSystem.Delete(testRootDir, true);
            // set the current working directory
            fileSystem.SetWorkingDirectory(testRootDir);
            fileSystem.DeleteOnExit(testRootDir);
            // create a mini cluster using the local file system
            MiniMRCluster mrCluster = new MiniMRCluster(1, fileSystem.GetUri().ToString(), 1);

            try
            {
                conf = mrCluster.CreateJobConf();
                JobClient jobClient = new JobClient(conf);
                // define job input
                Path inDir = new Path(testRootDir, "in");
                // create input data
                CreateWordsFile(inDir, conf);
                // configure and run a low memory job which will run without loading the
                // jvm's heap
                RunningJob lowMemJob = RunHeapUsageTestJob(conf, testRootDir, "-Xms32m -Xmx1G", 0
                                                           , 0, fileSystem, jobClient, inDir);
                JobID lowMemJobID           = lowMemJob.GetID();
                long  lowMemJobMapHeapUsage = GetTaskCounterUsage(jobClient, lowMemJobID, 1, 0, TaskType
                                                                  .Map);
                System.Console.Out.WriteLine("Job1 (low memory job) map task heap usage: " + lowMemJobMapHeapUsage
                                             );
                long lowMemJobReduceHeapUsage = GetTaskCounterUsage(jobClient, lowMemJobID, 1, 0,
                                                                    TaskType.Reduce);
                System.Console.Out.WriteLine("Job1 (low memory job) reduce task heap usage: " + lowMemJobReduceHeapUsage
                                             );
                // configure and run a high memory job which will load the jvm's heap
                RunningJob highMemJob = RunHeapUsageTestJob(conf, testRootDir, "-Xms32m -Xmx1G",
                                                            lowMemJobMapHeapUsage + 256 * 1024 * 1024, lowMemJobReduceHeapUsage + 256 * 1024
                                                            * 1024, fileSystem, jobClient, inDir);
                JobID highMemJobID           = highMemJob.GetID();
                long  highMemJobMapHeapUsage = GetTaskCounterUsage(jobClient, highMemJobID, 1, 0,
                                                                   TaskType.Map);
                System.Console.Out.WriteLine("Job2 (high memory job) map task heap usage: " + highMemJobMapHeapUsage
                                             );
                long highMemJobReduceHeapUsage = GetTaskCounterUsage(jobClient, highMemJobID, 1,
                                                                     0, TaskType.Reduce);
                System.Console.Out.WriteLine("Job2 (high memory job) reduce task heap usage: " +
                                             highMemJobReduceHeapUsage);
                NUnit.Framework.Assert.IsTrue("Incorrect map heap usage reported by the map task"
                                              , lowMemJobMapHeapUsage < highMemJobMapHeapUsage);
                NUnit.Framework.Assert.IsTrue("Incorrect reduce heap usage reported by the reduce task"
                                              , lowMemJobReduceHeapUsage < highMemJobReduceHeapUsage);
            }
            finally
            {
                // shutdown the mr cluster
                mrCluster.Shutdown();
                try
                {
                    fileSystem.Delete(testRootDir, true);
                }
                catch (IOException)
                {
                }
            }
        }
Пример #11
0
 private void UpdateImagePathProduct(ImageProductInfo imageProductInfo, ProductTableAdapter productAdapter, JobClient historyJobClient, JobClient updateRedisJobClient)
 {
     while (_isRunning)
     {
         try
         {
             if (productAdapter.Connection.State == ConnectionState.Closed)
             {
                 productAdapter.Connection.Open();
             }
             productAdapter.UpdateImagePathAndValidAndIsNews(imageProductInfo.ImagePath, imageProductInfo.Id);
             Log.Info(string.Format("ProductId {0} : Update ImagePath And Valid success.", imageProductInfo.Id));
             break;
         }
         catch (Exception exception)
         {
             Log.Error(string.Format("ProductId {0} : Update ImagePath And Valid error.", imageProductInfo.Id), exception);
             Thread.Sleep(60000);
         }
     }
     SendMessageToServiceInsertHistoryDownload(new LogHistoryImageProduct
     {
         DateLog      = imageProductInfo.DownloadedTime,
         IsDownloaded = true,
         ErrorName    = string.Empty,
         ProductId    = imageProductInfo.Id,
         NewsToValid  = imageProductInfo.IsNew
     }, historyJobClient);
     if (imageProductInfo.IsNew)
     {
         SendMessageToServiceUpdateSolrAndRedis(imageProductInfo.Id);
     }
     else
     {
         SendMessageToServiceUpdateRedis(imageProductInfo.Id, updateRedisJobClient);
     }
 }
Пример #12
0
        /// <summary>create and run an Aggregate based map/reduce job.</summary>
        /// <param name="args">the arguments used for job creation</param>
        /// <exception cref="System.IO.IOException"/>
        public static void Main(string[] args)
        {
            JobConf job = ValueAggregatorJob.CreateValueAggregatorJob(args);

            JobClient.RunJob(job);
        }
Пример #13
0
 public TrackCrawler()
 {
     rabbitMQServer = RabbitMQManager.GetRabbitMQServer("rabbitMQ177");
     updateProductJobClient_TrackCrawlerProduct = new JobClient("TrackCrawler", GroupType.Topic, "TrackCrawler_TrackInfo", true, rabbitMQServer);
 }
Пример #14
0
        /// <summary>The main driver for sort program.</summary>
        /// <remarks>
        /// The main driver for sort program.
        /// Invoke this method to submit the map/reduce job.
        /// </remarks>
        /// <exception cref="System.IO.IOException">
        /// When there is communication problems with the
        /// job tracker.
        /// </exception>
        /// <exception cref="System.Exception"/>
        public virtual int Run(string[] args)
        {
            Configuration conf         = GetConf();
            JobClient     client       = new JobClient(conf);
            ClusterStatus cluster      = client.GetClusterStatus();
            int           num_reduces  = (int)(cluster.GetMaxReduceTasks() * 0.9);
            string        join_reduces = conf.Get(ReducesPerHost);

            if (join_reduces != null)
            {
                num_reduces = cluster.GetTaskTrackers() * System.Convert.ToInt32(join_reduces);
            }
            Job job = Job.GetInstance(conf);

            job.SetJobName("join");
            job.SetJarByClass(typeof(Sort));
            job.SetMapperClass(typeof(Mapper));
            job.SetReducerClass(typeof(Reducer));
            Type           inputFormatClass  = typeof(SequenceFileInputFormat);
            Type           outputFormatClass = typeof(SequenceFileOutputFormat);
            Type           outputKeyClass    = typeof(BytesWritable);
            Type           outputValueClass  = typeof(TupleWritable);
            string         op        = "inner";
            IList <string> otherArgs = new AList <string>();

            for (int i = 0; i < args.Length; ++i)
            {
                try
                {
                    if ("-r".Equals(args[i]))
                    {
                        num_reduces = System.Convert.ToInt32(args[++i]);
                    }
                    else
                    {
                        if ("-inFormat".Equals(args[i]))
                        {
                            inputFormatClass = Sharpen.Runtime.GetType(args[++i]).AsSubclass <InputFormat>();
                        }
                        else
                        {
                            if ("-outFormat".Equals(args[i]))
                            {
                                outputFormatClass = Sharpen.Runtime.GetType(args[++i]).AsSubclass <OutputFormat>();
                            }
                            else
                            {
                                if ("-outKey".Equals(args[i]))
                                {
                                    outputKeyClass = Sharpen.Runtime.GetType(args[++i]).AsSubclass <WritableComparable
                                                                                                    >();
                                }
                                else
                                {
                                    if ("-outValue".Equals(args[i]))
                                    {
                                        outputValueClass = Sharpen.Runtime.GetType(args[++i]).AsSubclass <Writable>();
                                    }
                                    else
                                    {
                                        if ("-joinOp".Equals(args[i]))
                                        {
                                            op = args[++i];
                                        }
                                        else
                                        {
                                            otherArgs.AddItem(args[i]);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                catch (FormatException)
                {
                    System.Console.Out.WriteLine("ERROR: Integer expected instead of " + args[i]);
                    return(PrintUsage());
                }
                catch (IndexOutOfRangeException)
                {
                    System.Console.Out.WriteLine("ERROR: Required parameter missing from " + args[i -
                                                                                                  1]);
                    return(PrintUsage());
                }
            }
            // exits
            // Set user-supplied (possibly default) job configs
            job.SetNumReduceTasks(num_reduces);
            if (otherArgs.Count < 2)
            {
                System.Console.Out.WriteLine("ERROR: Wrong number of parameters: ");
                return(PrintUsage());
            }
            FileOutputFormat.SetOutputPath(job, new Path(otherArgs.Remove(otherArgs.Count - 1
                                                                          )));
            IList <Path> plist = new AList <Path>(otherArgs.Count);

            foreach (string s in otherArgs)
            {
                plist.AddItem(new Path(s));
            }
            job.SetInputFormatClass(typeof(CompositeInputFormat));
            job.GetConfiguration().Set(CompositeInputFormat.JoinExpr, CompositeInputFormat.Compose
                                           (op, inputFormatClass, Sharpen.Collections.ToArray(plist, new Path[0])));
            job.SetOutputFormatClass(outputFormatClass);
            job.SetOutputKeyClass(outputKeyClass);
            job.SetOutputValueClass(outputValueClass);
            DateTime startTime = new DateTime();

            System.Console.Out.WriteLine("Job started: " + startTime);
            int      ret      = job.WaitForCompletion(true) ? 0 : 1;
            DateTime end_time = new DateTime();

            System.Console.Out.WriteLine("Job ended: " + end_time);
            System.Console.Out.WriteLine("The job took " + (end_time.GetTime() - startTime.GetTime
                                                                ()) / 1000 + " seconds.");
            return(ret);
        }
Пример #15
0
 public JobsSample(JobClient jobClient)
 {
     _jobClient = jobClient ?? throw new ArgumentNullException(nameof(jobClient));
 }
Пример #16
0
        private void DownloadImageSPGoc(long companyId, bool reloadall = false)
        {
            rabbitMQServer = RabbitMQManager.GetRabbitMQServer(rabbitMQServerName);
            //Jobclient update solr and redis
            var updateProductJobClient = new JobClient(updateProductExchangeGroupName, GroupType.Topic, updateProductqueueJobName, true, rabbitMQServer);
            //JobClient download image fails
            //JobClient downloadImageProductJobClient = new JobClient(updateProductImageGroupName, GroupType.Topic, updateProductImageProductJobName, true, rabbitMQServer);
            //JobClient delete thumb
            var deleteThumbJobClient = new JobClient(updateProductImageGroupName, GroupType.Topic, deleteThumbImageProductJobName, true, rabbitMQServer);

            WSS.DownloadImageByHand.DBTableAdapters.ProductTableAdapter productAdapter = new WSS.DownloadImageByHand.DBTableAdapters.ProductTableAdapter();
            productAdapter.Connection.ConnectionString = connectionString;
            WSS.DownloadImageByHand.DB.ProductDataTable productTable = new WSS.DownloadImageByHand.DB.ProductDataTable();
            int demgetproduct = 0;

            while (true)
            {
                try
                {
                    if (reloadall)
                    {
                        productAdapter.FillBy_AllSPGoc(productTable, companyId);
                    }
                    else
                    {
                        //Lấy danh sách sản phẩm valid = 1
                        productAdapter.FillBy_SPGocAndValid(productTable, companyId);
                    }
                    break;
                }
                catch (Exception ex)
                {
                    demgetproduct++;
                    if (demgetproduct == 1)
                    {
                        Log.Error(string.Format("CompanyID = {0} Get Product of Company in SQL Error 1", companyId), ex);
                    }
                    else if (demgetproduct == numbererror)
                    {
                        Log.Error(string.Format("CompanyID = {0} Get Product of Company in SQL Error {1}", companyId, numbererror), ex);
                        break;
                    }
                    Thread.Sleep(60000);
                }
            }
            try
            {
                if (productTable.Rows.Count > 0)
                {
                    this.Invoke(new Action(() =>
                    {
                        rbsuccess.AppendText(companyId + ".Get " + productTable.Rows.Count + " Product from SQL..." + System.Environment.NewLine);
                    }));
                    string path         = pathImageSPGOC;
                    int    countsuccess = 0;
                    int    countproduct = productTable.Rows.Count;
                    for (int i = 0; i < productTable.Rows.Count; i++)
                    {
                        long   productId   = Common.Obj2Int64(productTable.Rows[i]["ID"].ToString());
                        string nameProduct = productTable.Rows[i]["Name"].ToString();
                        string imageUrl    = productTable.Rows[i]["ImageUrls"].ToString();
                        if (string.IsNullOrEmpty(nameProduct) || string.IsNullOrEmpty(imageUrl))
                        {
                            Log.Error(string.Format("CompanyID = {0} ProductID = {1} Name or ImageUrl is null or empty.", companyId, productId));
                            continue;
                        }
                        string filename = Common.UnicodeToKoDauAndGach(nameProduct);
                        string tempf    = filename.Replace("-", "");
                        if (tempf.Length < 3)
                        {
                            continue;
                        }
                        string folder = tempf.Substring(0, 3);
                        if (folder == "bin")
                        {
                            folder = "bin1";
                        }
                        if (folder == "con")
                        {
                            folder = "con1";
                        }
                        if (folder == "aux")
                        {
                            folder = "aux1";
                        }
                        if (folder == "prn")
                        {
                            folder = "prn1";
                        }
                        if (folder == "nul")
                        {
                            folder = "nul1";
                        }
                        if (filename.Length > 100)
                        {
                            filename = filename.Substring(0, 99);
                        }
                        bool fileSaved = true;
                        try
                        {
                            //fileSaved = Common.SaveFileDownloadImage(imageUrl, path + folder, filename + ".jpg", productId, companyId);
                        }
                        catch (Exception)
                        {
                        }
                        if (fileSaved)
                        {
                            int    w = 0, h = 0;
                            string pathsave = "Store/images/" + folder + "/" + filename + ".jpg";
                            while (checkstop)
                            {
                                try
                                {
                                    productAdapter.UpdateQuery(pathsave, w, h, productId);
                                    Log.InfoFormat("{0}/{1}. CompanyID = {2} ProductID = {3} Download Image success.", i, countproduct, companyId, productId);
                                    countsuccess++;
                                    #region Send Message Update solr and redis
                                    Job job = new Job();
                                    job.Data = BitConverter.GetBytes(productId);
                                    job.Type = 2;
                                    updateProductJobClient.PublishJob(job, updateProductJobExpirationMS);
                                    //Log.InfoFormat("Send message To RabbitMq {0} with ID = {1}", rabbitMQServerName, productId);
                                    #endregion

                                    #region Xóa ảnh Thumb nếu có
                                    //push message lên service xóa ảnh thumb
                                    Job deletejob          = new Job();
                                    MqThumbImageInfo thumb = new MqThumbImageInfo();
                                    thumb.ProductId   = productId;
                                    thumb.FolderImage = folder;
                                    thumb.ImageName   = filename;
                                    thumb.TypeProduct = 2;
                                    deletejob.Data    = MqThumbImageInfo.GetMess(thumb);
                                    deleteThumbJobClient.PublishJob(deletejob);
                                    #endregion
                                    this.Invoke(new Action(() =>
                                    {
                                        rbsuccess.AppendText(string.Format("{0}...{1}: {2}.{3} sucess.", 1, companyId, i, productId) + System.Environment.NewLine);
                                    }));
                                    break;
                                }
                                catch (Exception ex)
                                {
                                    this.Invoke(new Action(() =>
                                    {
                                        rbfail.AppendText(i + "." + productId + ": Update Solr + Push service Delete Thumb fails." + ex.ToString() + System.Environment.NewLine);
                                    }));
                                    break;
                                    //Thread.Sleep(10000);
                                    //dem++;
                                    //if (dem == 1)
                                    //    Log.Error(string.Format("CompanyID = {0} Product = {1}, Update ImagePath Error 1", companyId, productId), ex);
                                    //else if (dem == numbererror)
                                    //{
                                    //    Log.Error(string.Format("CompanyID = {0} Product = {1}, Update ImagePath Error {2}", companyId, productId, numbererror), ex);
                                    //    break;
                                    //}
                                }
                            }
                        }
                        else
                        {
                            this.Invoke(new Action(() =>
                            {
                                //lbFails.Text = countfail.ToString();
                                rbsuccess.AppendText(string.Format("{0}...{1}: {2}.{3} fails.", 1, companyId, i, productId) + System.Environment.NewLine);
                                rbfail.AppendText(i + "." + productId + ": Download fails." + imageUrl + " ...Name: " + nameProduct + System.Environment.NewLine);
                            }));
                        }
                    }
                    Log.InfoFormat("CompanyID = {0} Download Image Success {1}/{2}", companyId, countsuccess, productTable.Rows.Count);
                }
                else
                {
                    Log.InfoFormat("CompanyID = {0} 0 product (valid = 1 and ImagePath is Null or empty)", companyId);
                }
            }
            catch (Exception ex)
            {
                Log.Error(string.Format("CompanyID = {0} ERROR~~~", companyId), ex);
            }
        }
Пример #17
0
        static void Main(string[] args)
        {
            string jobId = "DHCMD" + Guid.NewGuid().ToString();

            // The query condition can also be on a single device Id or on a list of device Ids.
            // https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-devguide-query-language covers
            //   IoT Hub query language in additional detail.
            string query = $"DeviceId IN ['{deviceId}']";

            Twin twin = new Twin(deviceId);

            twin.Tags = new TwinCollection();
            twin.Tags[TestTagName] = TestTagValue;

            // *************************************** Start JobClient ***************************************
            Console.WriteLine($"Create JobClient from the connectionString...");
            JobClient jobClient = JobClient.CreateFromConnectionString(connectionString);

            Console.WriteLine($"JobClient created with success");
            Console.WriteLine();

            // *************************************** Schedule twin job ***************************************
            Console.WriteLine($"Schedule twin job {jobId} for {deviceId}...");
            JobResponse createJobResponse = jobClient.ScheduleTwinUpdateAsync(
                jobId, query, twin, DateTime.UtcNow, (long)TimeSpan.FromMinutes(2).TotalSeconds).Result;

            Console.WriteLine($"Schedule response");
            Console.WriteLine(JsonConvert.SerializeObject(createJobResponse, Formatting.Indented));
            Console.WriteLine();

            // *************************************** Get all Jobs ***************************************
            IEnumerable <JobResponse> queryResults = jobClient.CreateQuery().GetNextAsJobResponseAsync().Result;
            var getJobs = queryResults.ToList();

            Console.WriteLine($"getJobs return {getJobs.Count} result(s)");

            foreach (JobResponse job in getJobs)
            {
                Console.WriteLine(JsonConvert.SerializeObject(job, Formatting.Indented));

                if (job.Status != JobStatus.Completed)
                {
                    Console.WriteLine($"Incorrect query jobs result");
                    return;
                }
            }
            Console.WriteLine();

            // *************************************** Check completion ***************************************
            Console.WriteLine($"Monitoring jobClient for job completion...");
            JobResponse jobResponse = jobClient.GetJobAsync(jobId).Result;

            Console.WriteLine($"First result");
            Console.WriteLine(JsonConvert.SerializeObject(jobResponse, Formatting.Indented));
            while (jobResponse.Status != JobStatus.Completed)
            {
                Task.Delay(TimeSpan.FromMilliseconds(100));
                jobResponse = jobClient.GetJobAsync(jobId).Result;
            }
            Console.WriteLine($"Job ends with status {jobResponse.Status}");
        }
Пример #18
0
 public TransportationJobClient(string connectionString)
 {
     jobClient = JobClient.CreateFromConnectionString(connectionString);
 }
Пример #19
0
 private void InitData()
 {
     _productAdapter = new ProductAdapter(new SqlDb(ConfigCrawler.ConnectProduct));
     _jobClient      = new JobClient(ConfigImages.ImboExchangeImages, GroupType.Topic, ConfigImages.ImboRoutingKeyDownloadImageProduct, true, RabbitMQManager.GetRabbitMQServer(ConfigImages.RabbitMqServerName));
 }
Пример #20
0
 public void Setup()
 {
     httpClientHelperMock = new Mock <IHttpClientHelper>();
     jobClient            = new JobClient(httpClientHelperMock.Object);
 }
Пример #21
0
        private void SendMessageToServiceInsertHistoryDownload(LogHistoryImageProduct historyImageProduct, JobClient historyJobClient)
        {
            var job = new Job
            {
                Data = LogHistoryImageProduct.GetMessage(historyImageProduct)
            };

            while (_isRunning)
            {
                try
                {
                    historyJobClient.PublishJob(job);
                    Log.Info(string.Format("Push message to services insert history download image productid = {0}",
                                           historyImageProduct.ProductId));
                    break;
                }
                catch (Exception ex)
                {
                    Log.Error(string.Format("ProductId = {0} Push message to services insert history download image error.", historyImageProduct.ProductId), ex);
                    Thread.Sleep(60000);
                }
            }
        }
Пример #22
0
        public async Task Execute(long ciProjectId, long ciBuildId, long ciJobId, long serverId, bool serverIsExternal,
                                  int retries, CancellationToken cancellationToken)
        {
            // Includes are needed here to provide fully populated data for update notifications
            var job = await Database.CiJobs.Include(j => j.Build !).ThenInclude(b => b.CiProject)
                      .FirstOrDefaultAsync(
                j => j.CiProjectId == ciProjectId && j.CiBuildId == ciBuildId && j.CiJobId == ciJobId,
                cancellationToken);

            BaseServer?server;

            if (serverIsExternal)
            {
                server = await Database.ExternalServers.FindAsync(new object[] { serverId }, cancellationToken);
            }
            else
            {
                server = await Database.ControlledServers.FindAsync(new object[] { serverId }, cancellationToken);
            }

            if (server == null)
            {
                throw new ArgumentException($"Could not find server ({serverId}, external: {serverIsExternal}) " +
                                            "to run build on");
            }

            if (job == null)
            {
                Logger.LogWarning("Skipping CI job as it doesn't exist");
                ReleaseServerReservation(server);
                return;
            }

            if (job.State != CIJobState.WaitingForServer)
            {
                Logger.LogWarning(
                    "CI job is not in waiting for server status, refusing to start running it on server: {ServerId}",
                    serverId);
                ReleaseServerReservation(server);
                return;
            }

            if (server.ReservedFor != job.CiJobId)
            {
                Logger.LogWarning(
                    "CI job id doesn't match reservation on server, refusing to start it on server: {ServerId}",
                    serverId);
                ReleaseServerReservation(server);
                return;
            }

            // Get the CI image for the job
            var imageFileName       = job.GetImageFileName();
            var serverSideImagePath = Path.Join("CI/Images", imageFileName);

            StorageItem?imageItem;

            try
            {
                imageItem = await StorageItem.FindByPath(Database, serverSideImagePath);
            }
            catch (Exception e)
            {
                // ReSharper disable once ExceptionPassedAsTemplateArgumentProblem
                Logger.LogError("Invalid image specified for CI job: {Image}, path parse exception: {@E}", job.Image,
                                e);
                job.SetFinishSuccess(false);
                await job.CreateFailureSection(Database, "Invalid image specified for job (invalid path)");
                await OnJobEnded(server, job);

                return;
            }

            if (string.IsNullOrEmpty(job.Image) || imageItem == null)
            {
                Logger.LogError("Invalid image specified for CI job: {Image}", job.Image);
                job.SetFinishSuccess(false);
                await job.CreateFailureSection(Database, "Invalid image specified for job (not found)");
                await OnJobEnded(server, job);

                return;
            }

            // The CI system uses the first valid image version. For future updates a different file name is needed
            // For example bumping the ":v1" to a ":v2" suffix
            var version = await imageItem.GetLowestUploadedVersion(Database);

            if (version == null || version.StorageFile == null)
            {
                Logger.LogError("Image with no uploaded version specified for CI job: {Image}", job.Image);
                job.SetFinishSuccess(false);
                await job.CreateFailureSection(Database, "Invalid image specified for job (not uploaded version)");
                await OnJobEnded(server, job);

                return;
            }

            // Queue a job to lock writing to the CI image if it isn't write protected yet
            if (imageItem.WriteAccess != FileAccess.Nobody)
            {
                Logger.LogInformation(
                    "Storage item {Id} used as CI image is not write locked, queuing a job to lock it", imageItem.Id);

                // To ensure the upload time is expired, this is upload time + 5 minutes
                JobClient.Schedule <LockCIImageItemJob>(x => x.Execute(imageItem.Id, CancellationToken.None),
                                                        AppInfo.RemoteStorageUploadExpireTime + TimeSpan.FromMinutes(5));
            }

            Logger.LogInformation("Trying to start job {CIProjectId}-{CIBuildId}-{CIJobId} on reserved " +
                                  "server ({Id}, {ServerIsExternal})", ciProjectId, ciBuildId, ciJobId, server.Id, serverIsExternal);

            if (server.PublicAddress == null || server.PublicAddress.Equals(IPAddress.None))
            {
                throw new Exception($"Server ({server.Id}, {serverIsExternal}) doesn't have a public address set");
            }

            // Try to start running the job, this can fail if the server is not actually really up yet
            IBaseSSHAccess sshAccess;

            try
            {
                if (serverIsExternal)
                {
                    externalSSHAccess.ConnectTo(server.PublicAddress.ToString(),
                                                ((ExternalServer)server).SSHKeyFileName);
                    sshAccess = externalSSHAccess;
                }
                else
                {
                    controlledSSHAccess.ConnectTo(server.PublicAddress.ToString());
                    sshAccess = controlledSSHAccess;
                }
            }
            catch (SocketException)
            {
                Logger.LogInformation("Connection failed (socket exception), server is probably not up (yet)");
                await Requeue(job, retries - 1, server, serverIsExternal);

                return;
            }
            catch (SshOperationTimeoutException)
            {
                Logger.LogInformation("Connection failed (ssh timed out), server is probably not up (yet)");
                await Requeue(job, retries - 1, server, serverIsExternal);

                return;
            }

            var imageDownloadUrl =
                remoteDownloadUrls.CreateDownloadFor(version.StorageFile, AppInfo.RemoteStorageDownloadExpireTime);

            // Connection success, so now we can run the job starting on the server
            job.RunningOnServerId         = serverId;
            job.RunningOnServerIsExternal = server.IsExternal;

            // TODO: permanently store on which server this job was ran on and how long since creation it took to get
            // here

            if (job.Build == null)
            {
                throw new NotLoadedModelNavigationException();
            }

            CISecretType jobSpecificSecretType = job.Build.IsSafe ? CISecretType.SafeOnly : CISecretType.UnsafeOnly;

            var secrets = await Database.CiSecrets.Where(s => s.CiProjectId == job.CiProjectId &&
                                                         (s.UsedForBuildTypes == jobSpecificSecretType || s.UsedForBuildTypes == CISecretType.All))
                          .ToListAsync(cancellationToken);

            await PerformServerCleanUpIfNeeded(server, sshAccess);

            // Then move on to the build starting, first thing is to download the CI executor script
            // TODO: is there a possibility that this is not secure? Someone would need to do HTTPS MItM attack...

            var executorDownload         = GetUrlToDownloadCIExecutor();
            var executorResourceDownload = GetUrlToDownloadCIExecutorResource();
            var executorHash             = await hashCalculator.Sha256(executorDownload, cancellationToken);

            var posixHelperHash = await hashCalculator.Sha256(executorResourceDownload, cancellationToken);

            // TODO: using async would be nice for the run commands when supported
            var result1 = sshAccess
                          .RunCommand("set -e\n" +
                                      CreateDownloadCommand("~/CIExecutor", executorHash, executorDownload) +
                                      CreateDownloadCommand("~/libMonoPosixHelper.so", posixHelperHash,
                                                            executorResourceDownload) + "chmod +x ~/CIExecutor");

            if (!result1.Success)
            {
                throw new Exception($"Failed to run executor download step: {result1.Result}, error: {result1.Error}");
            }

            // This save is done here as the build status might get reported back to us before we finish with the ssh
            // commands
            job.State                = CIJobState.Running;
            job.RanOnServer          = serverIsExternal ? $"External server {serverId}" : $"Controlled server {serverId}";
            job.TimeWaitingForServer = DateTime.UtcNow - job.CreatedAt;
            await Database.SaveChangesAsync(cancellationToken);

            // and then run it with environment variables for this build

            // Remove all type secrets if there is one with the same name that is build specific
            var cleanedSecrets = secrets
                                 .Where(s => s.UsedForBuildTypes != CISecretType.All || !secrets.Any(s2 =>
                                                                                                     s2.SecretName == s.SecretName && s2.UsedForBuildTypes != s.UsedForBuildTypes))
                                 .Select(s => s.ToExecutorData());

            if (job.Build.CiProject == null)
            {
                throw new NotLoadedModelNavigationException();
            }

            var env = new StringBuilder(250);

            env.Append("export CI_REF=\"");
            env.Append(BashEscape.EscapeForBash(job.Build.RemoteRef));
            env.Append("\"; export CI_COMMIT_HASH=\"");
            env.Append(BashEscape.EscapeForBash(job.Build.CommitHash));
            env.Append("\"; export CI_EARLIER_COMMIT=\"");
            env.Append(BashEscape.EscapeForBash(job.Build.PreviousCommit ?? AppInfo.NoCommitHash));
            env.Append("\"; export CI_BRANCH=\"");
            env.Append(BashEscape.EscapeForBash(job.Build.Branch ?? "unknown_branch"));
            env.Append("\"; export CI_DEFAULT_BRANCH=\"");
            env.Append(BashEscape.EscapeForBash(job.Build.CiProject.DefaultBranch));
            env.Append("\"; export CI_TRUSTED=\"");
            env.Append(job.Build.IsSafe);
            env.Append("\"; export CI_ORIGIN=\"");
            env.Append(BashEscape.EscapeForBash(job.Build.CiProject.RepositoryCloneUrl));
            env.Append("\"; export CI_IMAGE_DL_URL=\"");
            env.Append(BashEscape.EscapeForBash(imageDownloadUrl));
            env.Append("\"; export CI_IMAGE_NAME=\"");
            env.Append(BashEscape.EscapeForBash(job.Image));
            env.Append("\"; export CI_IMAGE_FILENAME=\"");
            env.Append(BashEscape.EscapeForBash(imageFileName));
            env.Append("\"; export CI_CACHE_OPTIONS=\"");
            env.Append(BashEscape.EscapeForBash(job.CacheSettingsJson ?? "{}"));
            env.Append("\"; export CI_SECRETS=\"");
            env.Append(BashEscape.EscapeForBash(JsonSerializer.Serialize(cleanedSecrets)));
            env.Append("\"; export CI_JOB_NAME=\"");
            env.Append(BashEscape.EscapeForBash(job.JobName));
            env.Append("\";");

            var result2 =
                sshAccess.RunCommand($"{env} nohup ~/CIExecutor {GetConnectToUrl(job)} > " +
                                     "build_script_output.txt 2>&1 &");

            if (!result2.Success)
            {
                throw new Exception($"Failed to start running CI executor: {result2.Result}, error: {result2.Error}");
            }

            JobClient.Schedule <CheckCIJobOutputHasConnectedJob>(
                x => x.Execute(ciProjectId, ciBuildId, ciJobId, serverId, CancellationToken.None),
                TimeSpan.FromMinutes(5));

            JobClient.Schedule <CancelCIBuildIfStuckJob>(
                x => x.Execute(ciProjectId, ciBuildId, ciJobId, serverId, server.IsExternal, CancellationToken.None),
                TimeSpan.FromMinutes(61));

            Logger.LogInformation(
                "CI job startup succeeded, now it's up to the executor to contact us with updates");
        }
Пример #23
0
        /// <exception cref="System.Exception"/>
        public virtual int Run(string[] args)
        {
            if (args.Length < 4)
            {
                //input-dir should contain a huge file ( > 2GB)
                Usage();
            }
            Path bigMapInput  = null;
            Path outputPath   = null;
            bool createInput  = false;
            long fileSizeInMB = 3 * 1024;

            // default of 3GB (>2GB)
            for (int i = 0; i < args.Length; ++i)
            {
                if ("-input".Equals(args[i]))
                {
                    bigMapInput = new Path(args[++i]);
                }
                else
                {
                    if ("-output".Equals(args[i]))
                    {
                        outputPath = new Path(args[++i]);
                    }
                    else
                    {
                        if ("-create".Equals(args[i]))
                        {
                            createInput  = true;
                            fileSizeInMB = long.Parse(args[++i]);
                        }
                        else
                        {
                            Usage();
                        }
                    }
                }
            }
            FileSystem fs      = FileSystem.Get(GetConf());
            JobConf    jobConf = new JobConf(GetConf(), typeof(BigMapOutput));

            jobConf.SetJobName("BigMapOutput");
            jobConf.SetInputFormat(typeof(SortValidator.RecordStatsChecker.NonSplitableSequenceFileInputFormat
                                          ));
            jobConf.SetOutputFormat(typeof(SequenceFileOutputFormat));
            FileInputFormat.SetInputPaths(jobConf, bigMapInput);
            if (fs.Exists(outputPath))
            {
                fs.Delete(outputPath, true);
            }
            FileOutputFormat.SetOutputPath(jobConf, outputPath);
            jobConf.SetMapperClass(typeof(IdentityMapper));
            jobConf.SetReducerClass(typeof(IdentityReducer));
            jobConf.SetOutputKeyClass(typeof(BytesWritable));
            jobConf.SetOutputValueClass(typeof(BytesWritable));
            if (createInput)
            {
                CreateBigMapInputFile(jobConf, fs, bigMapInput, fileSizeInMB);
            }
            DateTime startTime = new DateTime();

            System.Console.Out.WriteLine("Job started: " + startTime);
            JobClient.RunJob(jobConf);
            DateTime end_time = new DateTime();

            System.Console.Out.WriteLine("Job ended: " + end_time);
            return(0);
        }
Пример #24
0
        // The following methods are only used when LoadGenerator is run a MR job
        /// <summary>Based on args we submit the LoadGenerator as MR job.</summary>
        /// <remarks>
        /// Based on args we submit the LoadGenerator as MR job.
        /// Number of MapTasks is numMapTasks
        /// </remarks>
        /// <returns>exitCode for job submission</returns>
        private int SubmitAsMapReduce()
        {
            System.Console.Out.WriteLine("Running as a MapReduce job with " + numMapTasks + " mapTasks;  Output to file "
                                         + mrOutDir);
            Configuration conf = new Configuration(GetConf());

            // First set all the args of LoadGenerator as Conf vars to pass to MR tasks
            conf.Set(LgRoot, root.ToString());
            conf.SetInt(LgMaxdelaybetweenops, maxDelayBetweenOps);
            conf.SetInt(LgNumofthreads, numOfThreads);
            conf.Set(LgReadpr, readProbs[0] + string.Empty);
            //Pass Double as string
            conf.Set(LgWritepr, writeProbs[0] + string.Empty);
            //Pass Double as string
            conf.SetLong(LgSeed, seed);
            //No idea what this is
            conf.SetInt(LgNummaptasks, numMapTasks);
            if (scriptFile == null && durations[0] <= 0)
            {
                System.Console.Error.WriteLine("When run as a MapReduce job, elapsed Time or ScriptFile must be specified"
                                               );
                System.Environment.Exit(-1);
            }
            conf.SetLong(LgElapsedtime, durations[0]);
            conf.SetLong(LgStarttime, startTime);
            if (scriptFile != null)
            {
                conf.Set(LgScriptfile, scriptFile);
            }
            conf.Set(LgFlagfile, flagFile.ToString());
            // Now set the necessary conf variables that apply to run MR itself.
            JobConf jobConf = new JobConf(conf, typeof(Org.Apache.Hadoop.FS.LoadGenerator.LoadGenerator
                                                       ));

            jobConf.SetJobName("NNLoadGeneratorViaMR");
            jobConf.SetNumMapTasks(numMapTasks);
            jobConf.SetNumReduceTasks(1);
            // 1 reducer to collect the results
            jobConf.SetOutputKeyClass(typeof(Text));
            jobConf.SetOutputValueClass(typeof(IntWritable));
            jobConf.SetMapperClass(typeof(LoadGeneratorMR.MapperThatRunsNNLoadGenerator));
            jobConf.SetReducerClass(typeof(LoadGeneratorMR.ReducerThatCollectsLGdata));
            jobConf.SetInputFormat(typeof(LoadGeneratorMR.DummyInputFormat));
            jobConf.SetOutputFormat(typeof(TextOutputFormat));
            // Explicitly set number of max map attempts to 1.
            jobConf.SetMaxMapAttempts(1);
            // Explicitly turn off speculative execution
            jobConf.SetSpeculativeExecution(false);
            // This mapReduce job has no input but has output
            FileOutputFormat.SetOutputPath(jobConf, new Path(mrOutDir));
            try
            {
                JobClient.RunJob(jobConf);
            }
            catch (IOException e)
            {
                System.Console.Error.WriteLine("Failed to run job: " + e.Message);
                return(-1);
            }
            return(0);
        }
Пример #25
0
        // set up heap options, target value for memory loader and the output
        // directory before running the job
        /// <exception cref="System.IO.IOException"/>
        private static RunningJob RunHeapUsageTestJob(JobConf conf, Path testRootDir, string
                                                      heapOptions, long targetMapValue, long targetReduceValue, FileSystem fs, JobClient
                                                      client, Path inDir)
        {
            // define a job
            JobConf jobConf = new JobConf(conf);

            // configure the jobs
            jobConf.SetNumMapTasks(1);
            jobConf.SetNumReduceTasks(1);
            jobConf.SetMapperClass(typeof(TestJobCounters.MemoryLoaderMapper));
            jobConf.SetReducerClass(typeof(TestJobCounters.MemoryLoaderReducer));
            jobConf.SetInputFormat(typeof(TextInputFormat));
            jobConf.SetOutputKeyClass(typeof(LongWritable));
            jobConf.SetOutputValueClass(typeof(Org.Apache.Hadoop.IO.Text));
            jobConf.SetMaxMapAttempts(1);
            jobConf.SetMaxReduceAttempts(1);
            jobConf.Set(JobConf.MapredTaskJavaOpts, heapOptions);
            // set the targets
            jobConf.SetLong(TestJobCounters.MemoryLoaderMapper.TargetValue, targetMapValue);
            jobConf.SetLong(TestJobCounters.MemoryLoaderReducer.TargetValue, targetReduceValue
                            );
            // set the input directory for the job
            FileInputFormat.SetInputPaths(jobConf, inDir);
            // define job output folder
            Path outDir = new Path(testRootDir, "out");

            fs.Delete(outDir, true);
            FileOutputFormat.SetOutputPath(jobConf, outDir);
            // run the job
            RunningJob job = client.SubmitJob(jobConf);

            job.WaitForCompletion();
            JobID jobID = job.GetID();

            NUnit.Framework.Assert.IsTrue("Job " + jobID + " failed!", job.IsSuccessful());
            return(job);
        }
Пример #26
0
        private void ResendToServiceDownloadImageProduct(ImageProductInfo imageProductInfo, JobClient downloadImageProductJobClient)
        {
            var job = new Job {
                Data = ImageProductInfo.GetMessage(imageProductInfo)
            };

            while (_isRunning)
            {
                try
                {
                    downloadImageProductJobClient.PublishJob(job);
                    Log.Info(string.Format("Resend to services download image productid {0}", imageProductInfo.Id));
                    break;
                }
                catch (Exception ex)
                {
                    Log.Error(string.Format("ProductId = {0} resend message to service downloadimageproduct.", imageProductInfo.Id), ex);
                    Thread.Sleep(120000);
                }
            }
        }
Пример #27
0
        // generates the input for the benchmark
        /// <summary>This is the main routine for launching the benchmark.</summary>
        /// <remarks>
        /// This is the main routine for launching the benchmark. It generates random
        /// input data. The input is non-splittable. Sort is used for benchmarking.
        /// This benchmark reports the effect of having multiple sort and spill
        /// cycles over a single sort and spill.
        /// </remarks>
        /// <exception cref="System.IO.IOException"></exception>
        /// <exception cref="System.Exception"/>
        public virtual int Run(string[] args)
        {
            Log.Info("Starting the benchmark for threaded spills");
            string version = "ThreadedMapBenchmark.0.0.1";

            System.Console.Out.WriteLine(version);
            string usage = "Usage: threadedmapbenchmark " + "[-dataSizePerMap <data size (in mb) per map, default is 128 mb>] "
                           + "[-numSpillsPerMap <number of spills per map, default is 2>] " + "[-numMapsPerHost <number of maps per host, default is 1>]";
            int dataSizePerMap = 128;
            // in mb
            int     numSpillsPerMap = 2;
            int     numMapsPerHost  = 1;
            JobConf masterConf      = new JobConf(GetConf());

            for (int i = 0; i < args.Length; i++)
            {
                // parse command line
                if (args[i].Equals("-dataSizePerMap"))
                {
                    dataSizePerMap = System.Convert.ToInt32(args[++i]);
                }
                else
                {
                    if (args[i].Equals("-numSpillsPerMap"))
                    {
                        numSpillsPerMap = System.Convert.ToInt32(args[++i]);
                    }
                    else
                    {
                        if (args[i].Equals("-numMapsPerHost"))
                        {
                            numMapsPerHost = System.Convert.ToInt32(args[++i]);
                        }
                        else
                        {
                            System.Console.Error.WriteLine(usage);
                            System.Environment.Exit(-1);
                        }
                    }
                }
            }
            if (dataSizePerMap < 1 || numSpillsPerMap < 1 || numMapsPerHost < 1)
            {
                // verify arguments
                System.Console.Error.WriteLine(usage);
                System.Environment.Exit(-1);
            }
            FileSystem fs = null;

            try
            {
                // using random-writer to generate the input data
                GenerateInputData(dataSizePerMap, numSpillsPerMap, numMapsPerHost, masterConf);
                // configure job for sorting
                JobConf job = new JobConf(masterConf, typeof(ThreadedMapBenchmark));
                job.SetJobName("threaded-map-benchmark-unspilled");
                job.SetJarByClass(typeof(ThreadedMapBenchmark));
                job.SetInputFormat(typeof(SortValidator.RecordStatsChecker.NonSplitableSequenceFileInputFormat
                                          ));
                job.SetOutputFormat(typeof(SequenceFileOutputFormat));
                job.SetOutputKeyClass(typeof(BytesWritable));
                job.SetOutputValueClass(typeof(BytesWritable));
                job.SetMapperClass(typeof(IdentityMapper));
                job.SetReducerClass(typeof(IdentityReducer));
                FileInputFormat.AddInputPath(job, InputDir);
                FileOutputFormat.SetOutputPath(job, OutputDir);
                JobClient     client  = new JobClient(job);
                ClusterStatus cluster = client.GetClusterStatus();
                job.SetNumMapTasks(numMapsPerHost * cluster.GetTaskTrackers());
                job.SetNumReduceTasks(1);
                // set mapreduce.task.io.sort.mb to avoid spill
                int ioSortMb = (int)Math.Ceil(Factor * dataSizePerMap);
                job.Set(JobContext.IoSortMb, ioSortMb.ToString());
                fs = FileSystem.Get(job);
                Log.Info("Running sort with 1 spill per map");
                long startTime = Runtime.CurrentTimeMillis();
                JobClient.RunJob(job);
                long endTime = Runtime.CurrentTimeMillis();
                Log.Info("Total time taken : " + (endTime - startTime).ToString() + " millisec");
                fs.Delete(OutputDir, true);
                // set mapreduce.task.io.sort.mb to have multiple spills
                JobConf spilledJob = new JobConf(job, typeof(ThreadedMapBenchmark));
                ioSortMb = (int)Math.Ceil(Factor * Math.Ceil((double)dataSizePerMap / numSpillsPerMap
                                                             ));
                spilledJob.Set(JobContext.IoSortMb, ioSortMb.ToString());
                spilledJob.SetJobName("threaded-map-benchmark-spilled");
                spilledJob.SetJarByClass(typeof(ThreadedMapBenchmark));
                Log.Info("Running sort with " + numSpillsPerMap + " spills per map");
                startTime = Runtime.CurrentTimeMillis();
                JobClient.RunJob(spilledJob);
                endTime = Runtime.CurrentTimeMillis();
                Log.Info("Total time taken : " + (endTime - startTime).ToString() + " millisec");
            }
            finally
            {
                if (fs != null)
                {
                    fs.Delete(BaseDir, true);
                }
            }
            return(0);
        }
Пример #28
0
        private void CheckErrorDownloadImageProduct(ImageProductInfo imageProductInfo, JobClient downloadImageProductJobClient, JobClient historyJobClient)
        {
            //Check số lần download error trên Redis
            var errordownload = RedisErrorDownloadImageProductAdapter.GetErrorDownloadImage(imageProductInfo.Id);

            //errordownload < 5  resend message to service downloadimageproduct
            if (errordownload <= 5)
            {
                errordownload++;
                RedisErrorDownloadImageProductAdapter.SetErrorDownloadImage(imageProductInfo.Id, errordownload);
                ResendToServiceDownloadImageProduct(imageProductInfo, downloadImageProductJobClient);
            }
            else // send to service insert log history download image
            {
                SendMessageToServiceInsertHistoryDownload(imageProductInfo, historyJobClient);
            }
        }
Пример #29
0
        /// <summary>This is the main routine for launching a distributed random write job.</summary>
        /// <remarks>
        /// This is the main routine for launching a distributed random write job.
        /// It runs 10 maps/node and each node writes 1 gig of data to a DFS file.
        /// The reduce doesn't do anything.
        /// </remarks>
        /// <exception cref="System.IO.IOException"></exception>
        /// <exception cref="System.Exception"/>
        public virtual int Run(string[] args)
        {
            if (args.Length == 0)
            {
                return(PrintUsage());
            }
            Configuration conf                  = GetConf();
            JobClient     client                = new JobClient(conf);
            ClusterStatus cluster               = client.GetClusterStatus();
            int           numMapsPerHost        = conf.GetInt(MapsPerHost, 10);
            long          numBytesToWritePerMap = conf.GetLong(BytesPerMap, 1 * 1024 * 1024 * 1024);

            if (numBytesToWritePerMap == 0)
            {
                System.Console.Error.WriteLine("Cannot have " + BytesPerMap + " set to 0");
                return(-2);
            }
            long totalBytesToWrite = conf.GetLong(TotalBytes, numMapsPerHost * numBytesToWritePerMap
                                                  * cluster.GetTaskTrackers());
            int numMaps = (int)(totalBytesToWrite / numBytesToWritePerMap);

            if (numMaps == 0 && totalBytesToWrite > 0)
            {
                numMaps = 1;
                conf.SetLong(BytesPerMap, totalBytesToWrite);
            }
            conf.SetInt(MRJobConfig.NumMaps, numMaps);
            Job job = Job.GetInstance(conf);

            job.SetJarByClass(typeof(RandomTextWriter));
            job.SetJobName("random-text-writer");
            job.SetOutputKeyClass(typeof(Org.Apache.Hadoop.IO.Text));
            job.SetOutputValueClass(typeof(Org.Apache.Hadoop.IO.Text));
            job.SetInputFormatClass(typeof(RandomWriter.RandomInputFormat));
            job.SetMapperClass(typeof(RandomTextWriter.RandomTextMapper));
            Type           outputFormatClass = typeof(SequenceFileOutputFormat);
            IList <string> otherArgs         = new AList <string>();

            for (int i = 0; i < args.Length; ++i)
            {
                try
                {
                    if ("-outFormat".Equals(args[i]))
                    {
                        outputFormatClass = Sharpen.Runtime.GetType(args[++i]).AsSubclass <OutputFormat>();
                    }
                    else
                    {
                        otherArgs.AddItem(args[i]);
                    }
                }
                catch (IndexOutOfRangeException)
                {
                    System.Console.Out.WriteLine("ERROR: Required parameter missing from " + args[i -
                                                                                                  1]);
                    return(PrintUsage());
                }
            }
            // exits
            job.SetOutputFormatClass(outputFormatClass);
            FileOutputFormat.SetOutputPath(job, new Path(otherArgs[0]));
            System.Console.Out.WriteLine("Running " + numMaps + " maps.");
            // reducer NONE
            job.SetNumReduceTasks(0);
            DateTime startTime = new DateTime();

            System.Console.Out.WriteLine("Job started: " + startTime);
            int      ret     = job.WaitForCompletion(true) ? 0 : 1;
            DateTime endTime = new DateTime();

            System.Console.Out.WriteLine("Job ended: " + endTime);
            System.Console.Out.WriteLine("The job took " + (endTime.GetTime() - startTime.GetTime
                                                                ()) / 1000 + " seconds.");
            return(ret);
        }
Пример #30
0
        private void SendMessageToServiceInsertHistoryDownload(ImageProductInfo imageProductInfo, JobClient historyJobClient)
        {
            var job = new Job
            {
                Data = LogHistoryImageProduct.GetMessage(new LogHistoryImageProduct()
                {
                    ProductId    = imageProductInfo.Id,
                    DateLog      = DateTime.Now,
                    ErrorName    = imageProductInfo.ErrorMessage,
                    IsDownloaded = false,
                    NewsToValid  = false
                })
            };

            while (_isRunning)
            {
                try
                {
                    historyJobClient.PublishJob(job);
                    Log.Info(string.Format("Push message to services insert history download image productid = {0}",
                                           imageProductInfo.Id));
                    break;
                }
                catch (Exception ex)
                {
                    Log.Error(string.Format("ProductId = {0} Push message to services insert history download image.", imageProductInfo.Id), ex);
                    Thread.Sleep(60000);
                }
            }
        }