Exemplo n.º 1
0
        public void TestSparkContextProxy()
        {
            var sparkContext = new SparkContext("masterUrl", "appName");

            sparkContext.AddFile(null);
            sparkContext.BinaryFiles(null, null);
            sparkContext.CancelAllJobs();
            sparkContext.CancelJobGroup(null);
            sparkContext.EmptyRDD <string>();
            sparkContext.GetLocalProperty(null);
            sparkContext.HadoopFile(null, null, null, null);
            sparkContext.HadoopRDD(null, null, null);
            sparkContext.NewAPIHadoopFile(null, null, null, null);
            sparkContext.NewAPIHadoopRDD(null, null, null);
            sparkContext.Parallelize <int>(new int[] { 1, 2, 3, 4, 5 });
            sparkContext.SequenceFile(null, null, null, null, null, null);
            sparkContext.SetCheckpointDir(null);
            sparkContext.SetJobGroup(null, null);
            sparkContext.SetLocalProperty(null, null);
            sparkContext.SetLogLevel(null);
            sparkContext.TextFile(null);
            sparkContext.WholeTextFiles(null);
            sparkContext.Stop();
            sparkContext.Union <string>(null);
        }
Exemplo n.º 2
0
        public void TestNewAPIHadoopRDD()
        {
            // Arrange
            const string keyClass            = "java.lang.Long";
            const string valueClass          = "java.lang.String";
            const string keyConverterClass   = "xyz.KeyConveter";
            const string valueConverterClass = "xyz.valueConveter";

            Mock <IRDDProxy>          rddProxy          = new Mock <IRDDProxy>();
            Mock <ISparkContextProxy> sparkContextProxy = new Mock <ISparkContextProxy>();

            sparkContextProxy.Setup(m => m.NewAPIHadoopRDD(It.IsAny <string>(), keyClass, valueClass, keyConverterClass, valueConverterClass, It.IsAny <IEnumerable <KeyValuePair <string, string> > >(), It.IsAny <int>()))
            .Returns(rddProxy.Object);
            SparkContext sc = new SparkContext(sparkContextProxy.Object, null);

            const string inputFormatClass = "org.apache.hadoop.mapreduce.lib.input.TextInputFormat";
            var          conf             = new KeyValuePair <string, string>[] { };
            // Act
            RDD <byte[]> rdd = sc.NewAPIHadoopRDD(inputFormatClass, keyClass, valueClass, keyConverterClass, valueConverterClass, conf);

            // Assert
            Assert.IsNotNull(rdd);
            Assert.AreEqual(rddProxy.Object, rdd.RddProxy);
            Assert.AreEqual(sc, rdd.sparkContext);
            Assert.AreEqual(SerializedMode.None, rdd.serializedMode);
        }
Exemplo n.º 3
0
 public void TestSparkContextProxy()
 {
     var sparkContext = new SparkContext("masterUrl", "appName");
     sparkContext.AddFile(null);
     sparkContext.BinaryFiles(null, null);
     sparkContext.CancelAllJobs();
     sparkContext.CancelJobGroup(null);
     sparkContext.EmptyRDD<string>();
     sparkContext.GetLocalProperty(null);
     sparkContext.HadoopFile(null, null, null, null);
     sparkContext.HadoopRDD(null, null, null);
     sparkContext.NewAPIHadoopFile(null, null, null, null);
     sparkContext.NewAPIHadoopRDD(null, null, null);
     sparkContext.Parallelize<int>(new int[] { 1, 2, 3, 4, 5 });
     sparkContext.SequenceFile(null, null, null, null, null, null);
     sparkContext.SetCheckpointDir(null);
     sparkContext.SetJobGroup(null, null);
     sparkContext.SetLocalProperty(null, null);
     sparkContext.SetLogLevel(null);
     sparkContext.TextFile(null);
     sparkContext.WholeTextFiles(null);
     sparkContext.Stop();
     sparkContext.Union<string>(null);
 }