Esempio n. 1
0
        public void TestSparkContextConstructor()
        {
            var sparkContext = new SparkContext("masterUrl", "appName");

            Assert.IsNotNull((sparkContext.SparkContextProxy as MockSparkContextProxy).mockSparkContextReference);
            var paramValuesToConstructor = (sparkContext.SparkContextProxy as MockSparkContextProxy).mockSparkContextReference as object[];

            Assert.AreEqual("masterUrl", (paramValuesToConstructor[0] as MockSparkConfProxy).stringConfDictionary["mockmaster"]);
            Assert.AreEqual("appName", (paramValuesToConstructor[0] as MockSparkConfProxy).stringConfDictionary["mockappName"]);
            Assert.AreEqual(sparkContext, SparkContext.GetActiveSparkContext());

            sparkContext = new SparkContext("masterUrl", "appName", "sparkhome");
            Assert.IsNotNull((sparkContext.SparkContextProxy as MockSparkContextProxy).mockSparkContextReference);
            paramValuesToConstructor = (sparkContext.SparkContextProxy as MockSparkContextProxy).mockSparkContextReference as object[];
            Assert.AreEqual("masterUrl", (paramValuesToConstructor[0] as MockSparkConfProxy).stringConfDictionary["mockmaster"]);
            Assert.AreEqual("appName", (paramValuesToConstructor[0] as MockSparkConfProxy).stringConfDictionary["mockappName"]);
            Assert.AreEqual("sparkhome", (paramValuesToConstructor[0] as MockSparkConfProxy).stringConfDictionary["mockhome"]);
            Assert.AreEqual(sparkContext, SparkContext.GetActiveSparkContext());

            sparkContext = new SparkContext(null);
            Assert.IsNotNull((sparkContext.SparkContextProxy as MockSparkContextProxy).mockSparkContextReference);
            paramValuesToConstructor = (sparkContext.SparkContextProxy as MockSparkContextProxy).mockSparkContextReference as object[];
            Assert.IsNotNull(paramValuesToConstructor[0]); //because SparkContext constructor create default sparkConf
            Assert.AreEqual(sparkContext, SparkContext.GetActiveSparkContext());
        }
        internal RDD <T> Execute(double t)
        {
            var sc = SparkContext.GetActiveSparkContext();

            int[] array      = new int[numPartitions];
            var   initialRdd = sc.Parallelize(array.AsEnumerable(), numPartitions);

            return(initialRdd.MapPartitionsWithIndex <T>(new CSharpInputDStreamMapPartitionWithIndexHelper <T>(t, func).Execute, true));
        }
Esempio n. 3
0
 /// <summary>
 /// Converts this strongly typed collection of data to generic Dataframe. In contrast to the
 /// strongly typed objects that Dataset operations work on, a Dataframe returns generic[[Row]]
 /// objects that allow fields to be accessed by ordinal or name.
 /// </summary>
 /// <returns>DataFrame created from Dataset</returns>
 public DataFrame ToDF()
 {
     return(dataFrame ?? (dataFrame = new DataFrame(datasetProxy.ToDF(), SparkContext.GetActiveSparkContext())));
 }
Esempio n. 4
0
 public DataFrame CreateExternalTable(string tableName, string path, string source)
 {
     return(new DataFrame(
                new DataFrameIpcProxy(
                    new JvmObjectReference(
                        SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmCatalogReference, "createExternalTable",
                                                                           new object[] { tableName, path, source }).ToString()), sqlContextProxy), SparkContext.GetActiveSparkContext()));
 }