public void TestSignaturesV2_3_X() { SparkContext sc = SparkContext.GetOrCreate(new SparkConf()); Assert.IsType <SparkConf>(sc.GetConf()); Assert.IsType <int>(sc.DefaultParallelism); sc.SetJobDescription("job description"); sc.SetLogLevel("ALL"); sc.SetLogLevel("debug"); Assert.Throws <Exception>(() => sc.SetLogLevel("INVALID")); sc.SetJobGroup("group id", "description"); sc.SetJobGroup("group id", "description", true); sc.ClearJobGroup(); string filePath = $"{TestEnvironment.ResourceDirectory}people.txt"; sc.AddFile(filePath); sc.AddFile(filePath, true); using var tempDir = new TemporaryDirectory(); sc.SetCheckpointDir(TestEnvironment.ResourceDirectory); }
public void TestSparkContextProxy() { var sparkContext = new SparkContext("masterUrl", "appName"); sparkContext.AddFile(null); sparkContext.BinaryFiles(null, null); sparkContext.CancelAllJobs(); sparkContext.CancelJobGroup(null); sparkContext.EmptyRDD <string>(); sparkContext.GetLocalProperty(null); sparkContext.HadoopFile(null, null, null, null); sparkContext.HadoopRDD(null, null, null); sparkContext.NewAPIHadoopFile(null, null, null, null); sparkContext.NewAPIHadoopRDD(null, null, null); sparkContext.Parallelize <int>(new int[] { 1, 2, 3, 4, 5 }); sparkContext.SequenceFile(null, null, null, null, null, null); sparkContext.SetCheckpointDir(null); sparkContext.SetJobGroup(null, null); sparkContext.SetLocalProperty(null, null); sparkContext.SetLogLevel(null); sparkContext.TextFile(null); sparkContext.WholeTextFiles(null); sparkContext.Stop(); sparkContext.Union <string>(null); }
public void TestSetLogLevel() { // Arrange Mock <ISparkContextProxy> sparkContextProxy = new Mock <ISparkContextProxy>(); sparkContextProxy.Setup(m => m.SetLogLevel(It.IsAny <string>())); SparkContext sc = new SparkContext(sparkContextProxy.Object, null); // Act const string logLevel = "INFO"; sc.SetLogLevel(logLevel); // Assert sparkContextProxy.Verify(m => m.SetLogLevel(logLevel), Times.Once); }
public void TestSparkContextProxy() { var sparkContext = new SparkContext("masterUrl", "appName"); sparkContext.AddFile(null); sparkContext.BinaryFiles(null, null); sparkContext.CancelAllJobs(); sparkContext.CancelJobGroup(null); sparkContext.EmptyRDD<string>(); sparkContext.GetLocalProperty(null); sparkContext.HadoopFile(null, null, null, null); sparkContext.HadoopRDD(null, null, null); sparkContext.NewAPIHadoopFile(null, null, null, null); sparkContext.NewAPIHadoopRDD(null, null, null); sparkContext.Parallelize<int>(new int[] { 1, 2, 3, 4, 5 }); sparkContext.SequenceFile(null, null, null, null, null, null); sparkContext.SetCheckpointDir(null); sparkContext.SetJobGroup(null, null); sparkContext.SetLocalProperty(null, null); sparkContext.SetLogLevel(null); sparkContext.TextFile(null); sparkContext.WholeTextFiles(null); sparkContext.Stop(); sparkContext.Union<string>(null); }