public void TestSignaturesV2_3_X() { SparkContext sc = SparkContext.GetOrCreate(new SparkConf()); Assert.IsType <SparkConf>(sc.GetConf()); Assert.IsType <int>(sc.DefaultParallelism); sc.SetJobDescription("job description"); sc.SetLogLevel("ALL"); sc.SetLogLevel("debug"); Assert.Throws <Exception>(() => sc.SetLogLevel("INVALID")); sc.SetJobGroup("group id", "description"); sc.SetJobGroup("group id", "description", true); sc.ClearJobGroup(); string filePath = $"{TestEnvironment.ResourceDirectory}people.txt"; sc.AddFile(filePath); sc.AddFile(filePath, true); using var tempDir = new TemporaryDirectory(); sc.SetCheckpointDir(TestEnvironment.ResourceDirectory); }
public bool AddReference(string localPath) { if (!File.Exists(localPath)) { return(false); } Execute("#r \"" + localPath + "\""); sc.AddFile(new Uri(localPath).ToString()); return(true); }
public void TestSparkContextProxy() { var sparkContext = new SparkContext("masterUrl", "appName"); sparkContext.AddFile(null); sparkContext.BinaryFiles(null, null); sparkContext.CancelAllJobs(); sparkContext.CancelJobGroup(null); sparkContext.EmptyRDD <string>(); sparkContext.GetLocalProperty(null); sparkContext.HadoopFile(null, null, null, null); sparkContext.HadoopRDD(null, null, null); sparkContext.NewAPIHadoopFile(null, null, null, null); sparkContext.NewAPIHadoopRDD(null, null, null); sparkContext.Parallelize <int>(new int[] { 1, 2, 3, 4, 5 }); sparkContext.SequenceFile(null, null, null, null, null, null); sparkContext.SetCheckpointDir(null); sparkContext.SetJobGroup(null, null); sparkContext.SetLocalProperty(null, null); sparkContext.SetLogLevel(null); sparkContext.TextFile(null); sparkContext.WholeTextFiles(null); sparkContext.Stop(); sparkContext.Union <string>(null); }
public void TestSignaturesV2_3_X() { SparkContext sc = SparkContext.GetOrCreate(new SparkConf()); _ = sc.GetConf(); _ = sc.DefaultParallelism; sc.SetJobDescription("job description"); sc.SetJobGroup("group id", "description"); sc.SetJobGroup("group id", "description", true); sc.ClearJobGroup(); string filePath = TestEnvironment.ResourceDirectory + "people.txt"; sc.AddFile(filePath); sc.AddFile(filePath, true); sc.SetCheckpointDir(TestEnvironment.ResourceDirectory); }
public void TestAddFile() { // Arrange const string path = @"D:\tmp"; Mock <ISparkContextProxy> sparkContextProxy = new Mock <ISparkContextProxy>(); sparkContextProxy.Setup(m => m.AddFile(It.IsAny <string>())); SparkContext sc = new SparkContext(sparkContextProxy.Object, null); // Act sc.AddFile(path); // Assert sparkContextProxy.Verify(m => m.AddFile(path), Times.Once); }
public void TestSparkContextProxy() { var sparkContext = new SparkContext("masterUrl", "appName"); sparkContext.AddFile(null); sparkContext.BinaryFiles(null, null); sparkContext.CancelAllJobs(); sparkContext.CancelJobGroup(null); sparkContext.EmptyRDD<string>(); sparkContext.GetLocalProperty(null); sparkContext.HadoopFile(null, null, null, null); sparkContext.HadoopRDD(null, null, null); sparkContext.NewAPIHadoopFile(null, null, null, null); sparkContext.NewAPIHadoopRDD(null, null, null); sparkContext.Parallelize<int>(new int[] { 1, 2, 3, 4, 5 }); sparkContext.SequenceFile(null, null, null, null, null, null); sparkContext.SetCheckpointDir(null); sparkContext.SetJobGroup(null, null); sparkContext.SetLocalProperty(null, null); sparkContext.SetLogLevel(null); sparkContext.TextFile(null); sparkContext.WholeTextFiles(null); sparkContext.Stop(); sparkContext.Union<string>(null); }