コード例 #1
0
        /// <summary>
        /// explicitly convert KeyValuePair<K, V> to KeyValuePair<K, dynamic>
        /// since they are incompatibles types unlike V to dynamic
        /// </summary>
        /// <typeparam name="K"></typeparam>
        /// <typeparam name="V"></typeparam>
        /// <typeparam name="W1"></typeparam>
        /// <typeparam name="W2"></typeparam>
        /// <typeparam name="W3"></typeparam>
        /// <param name="self"></param>
        /// <returns></returns>
        private static RDD <KeyValuePair <K, dynamic> > MapPartitionsWithIndex <K, V, W1, W2, W3>(this RDD <KeyValuePair <K, dynamic> > self)
        {
            CSharpWorkerFunc csharpWorkerFunc = new CSharpWorkerFunc(new DynamicTypingWrapper <K, V, W1, W2, W3>().Execute);
            var pipelinedRDD = new PipelinedRDD <KeyValuePair <K, dynamic> >
            {
                workerFunc            = csharpWorkerFunc,
                preservesPartitioning = true,
                previousRddProxy      = self.rddProxy,
                prevSerializedMode    = self.serializedMode,

                sparkContext   = self.sparkContext,
                rddProxy       = null,
                serializedMode = SerializedMode.Byte,
                partitioner    = self.partitioner
            };

            return(pipelinedRDD);
        }
コード例 #2
0
ファイル: PipelinedRDD.cs プロジェクト: hhland/SparkCLR
        //TODO - give generic types a better id
        public override RDD <U1> MapPartitionsWithIndex <U1>(Func <int, IEnumerable <U>, IEnumerable <U1> > newFunc, bool preservesPartitioningParam = false)
        {
            if (IsPipelinable())
            {
                var pipelinedRDD = new PipelinedRDD <U1>
                {
                    func = new MapPartitionsWithIndexHelper <U, U1>(newFunc, func).Execute,
                    preservesPartitioning = preservesPartitioning && preservesPartitioningParam,
                    previousRddProxy      = this.previousRddProxy,
                    prevSerializedMode    = this.prevSerializedMode,

                    sparkContext   = this.sparkContext,
                    rddProxy       = null,
                    serializedMode = SerializedMode.Byte
                };
                return(pipelinedRDD);
            }

            return(base.MapPartitionsWithIndex(newFunc, preservesPartitioningParam));
        }