예제 #1
0
        public void TestSparkContextConstructor()
        {
            var sparkContext = new SparkContext("masterUrl", "appName");

            Assert.IsNotNull((sparkContext.SparkContextProxy as MockSparkContextProxy).mockSparkContextReference);
            var paramValuesToConstructor = (sparkContext.SparkContextProxy as MockSparkContextProxy).mockSparkContextReference as object[];

            Assert.AreEqual("masterUrl", (paramValuesToConstructor[0] as MockSparkConfProxy).stringConfDictionary["mockmaster"]);
            Assert.AreEqual("appName", (paramValuesToConstructor[0] as MockSparkConfProxy).stringConfDictionary["mockappName"]);
            Assert.AreEqual(sparkContext, SparkContext.GetActiveSparkContext());

            sparkContext = new SparkContext("masterUrl", "appName", "sparkhome");
            Assert.IsNotNull((sparkContext.SparkContextProxy as MockSparkContextProxy).mockSparkContextReference);
            paramValuesToConstructor = (sparkContext.SparkContextProxy as MockSparkContextProxy).mockSparkContextReference as object[];
            Assert.AreEqual("masterUrl", (paramValuesToConstructor[0] as MockSparkConfProxy).stringConfDictionary["mockmaster"]);
            Assert.AreEqual("appName", (paramValuesToConstructor[0] as MockSparkConfProxy).stringConfDictionary["mockappName"]);
            Assert.AreEqual("sparkhome", (paramValuesToConstructor[0] as MockSparkConfProxy).stringConfDictionary["mockhome"]);
            Assert.AreEqual(sparkContext, SparkContext.GetActiveSparkContext());

            sparkContext = new SparkContext(null);
            Assert.IsNotNull((sparkContext.SparkContextProxy as MockSparkContextProxy).mockSparkContextReference);
            paramValuesToConstructor = (sparkContext.SparkContextProxy as MockSparkContextProxy).mockSparkContextReference as object[];
            Assert.IsNotNull(paramValuesToConstructor[0]); //because SparkContext constructor create default sparkConf
            Assert.AreEqual(sparkContext, SparkContext.GetActiveSparkContext());
        }
예제 #2
0
        internal RDD <T> Execute(double t)
        {
            var sc = SparkContext.GetActiveSparkContext();

            int[] array      = new int[numPartitions];
            var   initialRdd = sc.Parallelize(array.AsEnumerable(), numPartitions);

            return(initialRdd.MapPartitionsWithIndex <T>(new CSharpInputDStreamMapPartitionWithIndexHelper <T>(t, func).Execute, true));
        }
예제 #3
0
 /// <summary>
 /// Converts this strongly typed collection of data to generic Dataframe. In contrast to the
 /// strongly typed objects that Dataset operations work on, a Dataframe returns generic[[Row]]
 /// objects that allow fields to be accessed by ordinal or name.
 /// </summary>
 /// <returns>DataFrame created from Dataset</returns>
 public DataFrame ToDF()
 {
     return(dataFrame ?? (dataFrame = new DataFrame(datasetProxy.ToDF(), SparkContext.GetActiveSparkContext())));
 }
예제 #4
0
 public DataFrame CreateExternalTable(string tableName, string path, string source)
 {
     return(new DataFrame(
                new DataFrameIpcProxy(
                    new JvmObjectReference(
                        SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmCatalogReference, "createExternalTable",
                                                                           new object[] { tableName, path, source }).ToString()), sqlContextProxy), SparkContext.GetActiveSparkContext()));
 }