public void TestSignaturesV2_3_X() { SparkContext sc = SparkContext.GetOrCreate(new SparkConf()); Assert.IsType <SparkConf>(sc.GetConf()); Assert.IsType <int>(sc.DefaultParallelism); sc.SetJobDescription("job description"); sc.SetLogLevel("ALL"); sc.SetLogLevel("debug"); Assert.Throws <Exception>(() => sc.SetLogLevel("INVALID")); sc.SetJobGroup("group id", "description"); sc.SetJobGroup("group id", "description", true); sc.ClearJobGroup(); string filePath = $"{TestEnvironment.ResourceDirectory}people.txt"; sc.AddFile(filePath); sc.AddFile(filePath, true); using var tempDir = new TemporaryDirectory(); sc.SetCheckpointDir(TestEnvironment.ResourceDirectory); }
public void TestSignaturesV3_1_X() { SparkContext sc = SparkContext.GetOrCreate(new SparkConf()); string archivePath = $"{TestEnvironment.ResourceDirectory}archive.zip"; sc.AddArchive(archivePath); Assert.IsType <string[]>(sc.ListArchives().ToArray()); }
/// <summary> /// Gets an existing [[SparkSession]] or, if there is no existing one, creates a new /// one based on the options set in this builder. /// </summary> /// <returns></returns> public SparkSession GetOrCreate() { var sparkConf = new SparkConf(); foreach (var option in options) { sparkConf.Set(option.Key, option.Value); } var sparkContext = SparkContext.GetOrCreate(sparkConf); return(SqlContext.GetOrCreate(sparkContext).SparkSession); }
public void TestSignaturesV2_3_X() { SparkContext sc = SparkContext.GetOrCreate(new SparkConf()); _ = sc.GetConf(); _ = sc.DefaultParallelism; sc.SetJobDescription("job description"); sc.SetJobGroup("group id", "description"); sc.SetJobGroup("group id", "description", true); sc.ClearJobGroup(); string filePath = TestEnvironment.ResourceDirectory + "people.txt"; sc.AddFile(filePath); sc.AddFile(filePath, true); sc.SetCheckpointDir(TestEnvironment.ResourceDirectory); }
public RDDTests() { _sc = SparkContext.GetOrCreate(new SparkConf()); }
public PairRDDFunctionsTests() { _sc = SparkContext.GetOrCreate(new SparkConf()); }