Пример #1
0
        public void TestSparkContextProxy()
        {
            var sparkContext = new SparkContext("masterUrl", "appName");

            sparkContext.AddFile(null);
            sparkContext.BinaryFiles(null, null);
            sparkContext.CancelAllJobs();
            sparkContext.CancelJobGroup(null);
            sparkContext.EmptyRDD <string>();
            sparkContext.GetLocalProperty(null);
            sparkContext.HadoopFile(null, null, null, null);
            sparkContext.HadoopRDD(null, null, null);
            sparkContext.NewAPIHadoopFile(null, null, null, null);
            sparkContext.NewAPIHadoopRDD(null, null, null);
            sparkContext.Parallelize <int>(new int[] { 1, 2, 3, 4, 5 });
            sparkContext.SequenceFile(null, null, null, null, null, null);
            sparkContext.SetCheckpointDir(null);
            sparkContext.SetJobGroup(null, null);
            sparkContext.SetLocalProperty(null, null);
            sparkContext.SetLogLevel(null);
            sparkContext.TextFile(null);
            sparkContext.WholeTextFiles(null);
            sparkContext.Stop();
            sparkContext.Union <string>(null);
        }
Пример #2
0
        public void TestCancelAllJobs()
        {
            // Arrange
            Mock <ISparkContextProxy> sparkContextProxy = new Mock <ISparkContextProxy>();

            sparkContextProxy.Setup(m => m.CancelAllJobs());
            SparkContext sc = new SparkContext(sparkContextProxy.Object, null);

            // Act
            sc.CancelAllJobs();

            // Assert
            sparkContextProxy.Verify(m => m.CancelAllJobs(), Times.Once);
        }
Пример #3
0
 public void TestSparkContextProxy()
 {
     var sparkContext = new SparkContext("masterUrl", "appName");
     sparkContext.AddFile(null);
     sparkContext.BinaryFiles(null, null);
     sparkContext.CancelAllJobs();
     sparkContext.CancelJobGroup(null);
     sparkContext.EmptyRDD<string>();
     sparkContext.GetLocalProperty(null);
     sparkContext.HadoopFile(null, null, null, null);
     sparkContext.HadoopRDD(null, null, null);
     sparkContext.NewAPIHadoopFile(null, null, null, null);
     sparkContext.NewAPIHadoopRDD(null, null, null);
     sparkContext.Parallelize<int>(new int[] { 1, 2, 3, 4, 5 });
     sparkContext.SequenceFile(null, null, null, null, null, null);
     sparkContext.SetCheckpointDir(null);
     sparkContext.SetJobGroup(null, null);
     sparkContext.SetLocalProperty(null, null);
     sparkContext.SetLogLevel(null);
     sparkContext.TextFile(null);
     sparkContext.WholeTextFiles(null);
     sparkContext.Stop();
     sparkContext.Union<string>(null);
 }