public void TestSparkContextProxy() { var sparkContext = new SparkContext("masterUrl", "appName"); sparkContext.AddFile(null); sparkContext.BinaryFiles(null, null); sparkContext.CancelAllJobs(); sparkContext.CancelJobGroup(null); sparkContext.EmptyRDD <string>(); sparkContext.GetLocalProperty(null); sparkContext.HadoopFile(null, null, null, null); sparkContext.HadoopRDD(null, null, null); sparkContext.NewAPIHadoopFile(null, null, null, null); sparkContext.NewAPIHadoopRDD(null, null, null); sparkContext.Parallelize <int>(new int[] { 1, 2, 3, 4, 5 }); sparkContext.SequenceFile(null, null, null, null, null, null); sparkContext.SetCheckpointDir(null); sparkContext.SetJobGroup(null, null); sparkContext.SetLocalProperty(null, null); sparkContext.SetLogLevel(null); sparkContext.TextFile(null); sparkContext.WholeTextFiles(null); sparkContext.Stop(); sparkContext.Union <string>(null); }
public void TestBinaryFiles() { // Arrange const string filePath = @"d:\data"; const int minPartitions = 10; Mock <IRDDProxy> rddProxy = new Mock <IRDDProxy>(); Mock <ISparkContextProxy> sparkContextProxy = new Mock <ISparkContextProxy>(); sparkContextProxy.Setup(m => m.BinaryFiles(filePath, minPartitions)).Returns(rddProxy.Object); sparkContextProxy.Setup(m => m.DefaultMinPartitions).Returns(minPartitions); SparkContext sc = new SparkContext(sparkContextProxy.Object, null); // Act RDD <KeyValuePair <byte[], byte[]> > rdd = sc.BinaryFiles(filePath, null); // Assert Assert.IsNotNull(rdd); Assert.AreEqual(rddProxy.Object, rdd.RddProxy); Assert.AreEqual(sc, rdd.sparkContext); Assert.AreEqual(SerializedMode.Pair, rdd.serializedMode); }
public void TestSparkContextProxy() { var sparkContext = new SparkContext("masterUrl", "appName"); sparkContext.AddFile(null); sparkContext.BinaryFiles(null, null); sparkContext.CancelAllJobs(); sparkContext.CancelJobGroup(null); sparkContext.EmptyRDD<string>(); sparkContext.GetLocalProperty(null); sparkContext.HadoopFile(null, null, null, null); sparkContext.HadoopRDD(null, null, null); sparkContext.NewAPIHadoopFile(null, null, null, null); sparkContext.NewAPIHadoopRDD(null, null, null); sparkContext.Parallelize<int>(new int[] { 1, 2, 3, 4, 5 }); sparkContext.SequenceFile(null, null, null, null, null, null); sparkContext.SetCheckpointDir(null); sparkContext.SetJobGroup(null, null); sparkContext.SetLocalProperty(null, null); sparkContext.SetLogLevel(null); sparkContext.TextFile(null); sparkContext.WholeTextFiles(null); sparkContext.Stop(); sparkContext.Union<string>(null); }