public void TestSparkContextProxy() { var sparkContext = new SparkContext("masterUrl", "appName"); sparkContext.AddFile(null); sparkContext.BinaryFiles(null, null); sparkContext.CancelAllJobs(); sparkContext.CancelJobGroup(null); sparkContext.EmptyRDD <string>(); sparkContext.GetLocalProperty(null); sparkContext.HadoopFile(null, null, null, null); sparkContext.HadoopRDD(null, null, null); sparkContext.NewAPIHadoopFile(null, null, null, null); sparkContext.NewAPIHadoopRDD(null, null, null); sparkContext.Parallelize <int>(new int[] { 1, 2, 3, 4, 5 }); sparkContext.SequenceFile(null, null, null, null, null, null); sparkContext.SetCheckpointDir(null); sparkContext.SetJobGroup(null, null); sparkContext.SetLocalProperty(null, null); sparkContext.SetLogLevel(null); sparkContext.TextFile(null); sparkContext.WholeTextFiles(null); sparkContext.Stop(); sparkContext.Union <string>(null); }
public void TestHadoopFile() { // Arrange const string filePath = @"hdfs://path/to/files"; const string keyClass = "java.lang.Long"; const string valueClass = "java.lang.String"; const string keyConverterClass = "xyz.KeyConveter"; const string valueConverterClass = "xyz.valueConveter"; Mock <IRDDProxy> rddProxy = new Mock <IRDDProxy>(); Mock <ISparkContextProxy> sparkContextProxy = new Mock <ISparkContextProxy>(); sparkContextProxy.Setup(m => m.HadoopFile(filePath, It.IsAny <string>(), keyClass, valueClass, keyConverterClass, valueConverterClass, It.IsAny <IEnumerable <KeyValuePair <string, string> > >(), It.IsAny <int>())) .Returns(rddProxy.Object); SparkContext sc = new SparkContext(sparkContextProxy.Object, null); const string inputFormatClass = "org.apache.hadoop.mapreduce.lib.input.TextInputFormat"; // Act RDD <byte[]> rdd = sc.HadoopFile(filePath, inputFormatClass, keyClass, valueClass, keyConverterClass, valueConverterClass); // Assert Assert.IsNotNull(rdd); Assert.AreEqual(rddProxy.Object, rdd.RddProxy); Assert.AreEqual(sc, rdd.sparkContext); Assert.AreEqual(SerializedMode.None, rdd.serializedMode); }
public void TestSparkContextProxy() { var sparkContext = new SparkContext("masterUrl", "appName"); sparkContext.AddFile(null); sparkContext.BinaryFiles(null, null); sparkContext.CancelAllJobs(); sparkContext.CancelJobGroup(null); sparkContext.EmptyRDD<string>(); sparkContext.GetLocalProperty(null); sparkContext.HadoopFile(null, null, null, null); sparkContext.HadoopRDD(null, null, null); sparkContext.NewAPIHadoopFile(null, null, null, null); sparkContext.NewAPIHadoopRDD(null, null, null); sparkContext.Parallelize<int>(new int[] { 1, 2, 3, 4, 5 }); sparkContext.SequenceFile(null, null, null, null, null, null); sparkContext.SetCheckpointDir(null); sparkContext.SetJobGroup(null, null); sparkContext.SetLocalProperty(null, null); sparkContext.SetLogLevel(null); sparkContext.TextFile(null); sparkContext.WholeTextFiles(null); sparkContext.Stop(); sparkContext.Union<string>(null); }