コード例 #1
0
ファイル: DStreamSamples.cs プロジェクト: outifaout/Mobius
        internal static void DStreamTextFileSamples()
        {
            count = 0;

            string directory      = SparkCLRSamples.Configuration.SampleDataLocation;
            string checkpointPath = Path.Combine(directory, "checkpoint");

            SparkContext sc = SparkCLRSamples.SparkContext;
            var          b  = sc.Broadcast <int>(0);

            StreamingContext ssc = StreamingContext.GetOrCreate(checkpointPath,
                                                                () =>
            {
                StreamingContext context = new StreamingContext(sc, 2000);
                context.Checkpoint(checkpointPath);

                var lines = context.TextFileStream(Path.Combine(directory, "test"));
                lines     = context.Union(lines, lines);
                var words = lines.FlatMap(l => l.Split(' '));
                var pairs = words.Map(w => new KeyValuePair <string, int>(w, 1));

                // since operations like ReduceByKey, Join and UpdateStateByKey are
                // separate dstream transformations defined in CSharpDStream.scala
                // an extra CSharpRDD is introduced in between these operations
                var wordCounts = pairs.ReduceByKey((x, y) => x + y);
                var join       = wordCounts.Join(wordCounts, 2);
                var state      = join.UpdateStateByKey <string, Tuple <int, int>, int>(new UpdateStateHelper(b).Execute);

                state.ForeachRDD((time, rdd) =>
                {
                    // there's chance rdd.Take conflicts with ssc.Stop
                    if (stopFileServer)
                    {
                        return;
                    }

                    object[] taken = rdd.Take(10);
                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("Time: {0}", time);
                    Console.WriteLine("-------------------------------------------");
                    foreach (object record in taken)
                    {
                        Console.WriteLine(record);
                    }
                    Console.WriteLine();

                    stopFileServer = count++ > 100;
                });

                return(context);
            });

            ssc.Start();

            StartFileServer(directory, "words.txt", 100);

            ssc.AwaitTermination();
            ssc.Stop();
        }
コード例 #2
0
ファイル: DStreamSamples.cs プロジェクト: zwffff2015/Mobius
        internal static void DStreamDirectKafkaWithRepartitionSample()
        {
            count = 0;

            string directory      = SparkCLRSamples.Configuration.SampleDataLocation;
            string checkpointPath = Path.Combine(directory, "checkpoint");

            StreamingContext ssc = StreamingContext.GetOrCreate(checkpointPath,
                                                                () =>
            {
                var conf                 = new SparkConf();
                SparkContext sc          = new SparkContext(conf);
                StreamingContext context = new StreamingContext(sc, 2000L);
                context.Checkpoint(checkpointPath);

                var kafkaParams = new List <Tuple <string, string> > {
                    new Tuple <string, string>("metadata.broker.list", brokers),
                    new Tuple <string, string>("auto.offset.reset", "smallest")
                };

                conf.Set("spark.mobius.streaming.kafka.numPartitions." + topic, partitions.ToString());
                var dstream = KafkaUtils.CreateDirectStream(context, new List <string> {
                    topic
                }, kafkaParams, Enumerable.Empty <Tuple <string, long> >());

                dstream.ForeachRDD((time, rdd) =>
                {
                    long batchCount   = rdd.Count();
                    int numPartitions = rdd.GetNumPartitions();

                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("Time: {0}", time);
                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("Count: " + batchCount);
                    Console.WriteLine("Partitions: " + numPartitions);

                    // only first batch has data and is repartitioned into 10 partitions
                    if (count++ == 0)
                    {
                        Assert.AreEqual(messages, batchCount);
                        Assert.IsTrue(numPartitions >= partitions);
                    }
                    else
                    {
                        Assert.AreEqual(0, batchCount);
                        Assert.IsTrue(numPartitions == 0);
                    }
                });

                return(context);
            });

            ssc.Start();
            ssc.AwaitTermination();
        }
コード例 #3
0
ファイル: Program.cs プロジェクト: zwffff2015/Mobius
        static void Main(string[] args)
        {
            var sparkContext    = new SparkContext(new SparkConf().SetAppName("SparkCLREventHub Example"));
            var eventhubsParams = new Dictionary <string, string>()
            {
                { "eventhubs.policyname", "<policyname>" },
                { "eventhubs.policykey", "<policykey>" },
                { "eventhubs.namespace", "<namespace>" },
                { "eventhubs.name", "<name>" },
                { "eventhubs.partition.count", "<partitioncount>" },
                { "eventhubs.consumergroup", "$default" },
                { "eventhubs.checkpoint.dir", "<hdfs path to eventhub checkpoint dir>" },
                { "eventhubs.checkpoint.interval", "<interval>" },
            };
            const int    windowDurationInSecs = 5;
            const int    slideDurationInSecs  = 5;
            const string checkpointPath       = "<hdfs path to spark checkpoint dir>";
            //const string outputPath = "<hdfs path to output dir>";

            const long       slideDurationInMillis = 5000;
            StreamingContext sparkStreamingContext = StreamingContext.GetOrCreate(checkpointPath,
                                                                                  () =>
            {
                var ssc = new StreamingContext(sparkContext, slideDurationInMillis);
                ssc.Checkpoint(checkpointPath);

                var stream = EventHubsUtils.CreateUnionStream(ssc, eventhubsParams.Select(v => new Tuple <string, string>(v.Key, v.Value)));
                var countByLogLevelAndTime = stream
                                             .Map(bytes => Encoding.UTF8.GetString(bytes))
                                             .Filter(line => line.Contains(","))
                                             .Map(line => line.Split(','))
                                             .Map(columns => new Tuple <string, int>(string.Format("{0},{1}", columns[0], columns[1]), 1))
                                             .ReduceByKeyAndWindow((x, y) => x + y, (x, y) => x - y, windowDurationInSecs, slideDurationInSecs, 3)
                                             .Map(logLevelCountPair => string.Format("{0},{1}", logLevelCountPair.Item1, logLevelCountPair.Item2));

                countByLogLevelAndTime.ForeachRDD(countByLogLevel =>
                {
                    //dimensionalCount.SaveAsTextFile(string.Format("{0}/{1}", outputPath, Guid.NewGuid()));
                    var dimensionalCountCollection = countByLogLevel.Collect();
                    foreach (var dimensionalCountItem in dimensionalCountCollection)
                    {
                        Console.WriteLine(dimensionalCountItem);
                    }
                });

                return(ssc);
            });

            sparkStreamingContext.Start();
            sparkStreamingContext.AwaitTermination();
        }
コード例 #4
0
        static void Main(string[] args)
        {
            var          sparkContext = new SparkContext(new SparkConf().SetAppName("SparkCLRKafka Example"));
            const string topicName    = "<topicName>";
            var          topicList    = new List <string> {
                topicName
            };
            var kafkaParams = new Dictionary <string, string> //refer to http://kafka.apache.org/documentation.html#configuration
            {
                { "metadata.broker.list", "<kafka brokers list>" },
                { "auto.offset.reset", "smallest" }
            };
            var          perTopicPartitionKafkaOffsets = new Dictionary <string, long>();
            const int    windowDurationInSecs          = 5;
            const int    slideDurationInSecs           = 5;
            const string checkpointPath = "<hdfs path to spark checkpoint directory>";
            const string appOutputPath  = "<hdfs path to app output directory>";


            const long       slideDurationInMillis = 5000;
            StreamingContext sparkStreamingContext = StreamingContext.GetOrCreate(checkpointPath,
                                                                                  () =>
            {
                var ssc = new StreamingContext(sparkContext, slideDurationInMillis);
                ssc.Checkpoint(checkpointPath);

                var stream = KafkaUtils.CreateDirectStream(ssc, topicList, kafkaParams.Select(v => new Tuple <string, string>(v.Key, v.Value)), perTopicPartitionKafkaOffsets.Select(v => new Tuple <string, long>(v.Key, v.Value)));
                var countByLogLevelAndTime = stream
                                             .Map(tuple => Encoding.UTF8.GetString(tuple.Item2))
                                             .Filter(line => line.Contains(","))
                                             .Map(line => line.Split(','))
                                             .Map(columns => new Tuple <string, int>(string.Format("{0},{1}", columns[0], columns[1]), 1))
                                             .ReduceByKeyAndWindow((x, y) => x + y, (x, y) => x - y, windowDurationInSecs, slideDurationInSecs, 3)
                                             .Map(logLevelCountPair => string.Format("{0},{1}", logLevelCountPair.Item1, logLevelCountPair.Item2));

                countByLogLevelAndTime.ForeachRDD(countByLogLevel =>
                {
                    countByLogLevel.SaveAsTextFile(string.Format("{0}/{1}", appOutputPath, Guid.NewGuid()));
                    foreach (var logCount in countByLogLevel.Collect())
                    {
                        Console.WriteLine(logCount);
                    }
                });

                return(ssc);
            });

            sparkStreamingContext.Start();
            sparkStreamingContext.AwaitTermination();
        }
コード例 #5
0
        public static void Process(string AppName, string CheckpointPath, Dictionary <string, string> kafkaParams)
        {
            var sparkContext = new SparkContext(new SparkConf().SetAppName(AppName));
            var topicList    = new List <string> {
                kafkaParams["topic"]
            };
            var        perTopicPartitionKafkaOffsets = new Dictionary <string, long>();
            const long slideDurationInMillis         = 1000;

            StreamingContext sparkStreamingContext = StreamingContext.GetOrCreate(CheckpointPath,
                                                                                  () =>
            {
                var ssc = new StreamingContext(sparkContext, slideDurationInMillis);

                var stream = KafkaUtils.CreateDirectStream(ssc, topicList, kafkaParams, perTopicPartitionKafkaOffsets);

                stream.Map(kvp =>
                {
                    if (kvp.Value != null)
                    {
                        return(Encoding.UTF8.GetString(kvp.Value));
                    }
                    else
                    {
                        return(null);
                    }
                }
                           ).ForeachRDD(RDD =>
                {
                    foreach (string line in RDD.Collect())
                    {
                        var message = JObject.Parse(line);
                        var _id     = message.SelectToken("docid").ToString();
                        // =======================
                        //  TODO: Process message
                        // =======================
                    }
                }
                                        );
                ssc.Checkpoint(CheckpointPath);

                return(ssc);
            });

            sparkStreamingContext.Start();
            sparkStreamingContext.AwaitTermination();
        }
コード例 #6
0
        static void Main(string[] args)
        {
            if (args.Length < 2)
            {
                Console.WriteLine("Usage: HdfsWordCount <checkpointDirectory> <inputDirectory>");
                return;
            }

            string checkpointPath = args[0];
            string inputDir       = args[1];

            StreamingContext ssc = StreamingContext.GetOrCreate(checkpointPath,
                                                                () =>
            {
                var sparkConf = new SparkConf();
                sparkConf.SetAppName("HdfsWordCount");
                var sc = new SparkContext(sparkConf);
                StreamingContext context = new StreamingContext(sc, 30000);
                context.Checkpoint(checkpointPath);

                var lines      = context.TextFileStream(inputDir);
                var words      = lines.FlatMap(l => l.Split(' '));
                var pairs      = words.Map(w => new KeyValuePair <string, int>(w, 1));
                var wordCounts = pairs.ReduceByKey((x, y) => x + y);

                wordCounts.ForeachRDD((time, rdd) =>
                {
                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("Time: {0}", time);
                    Console.WriteLine("-------------------------------------------");
                    object[] taken = rdd.Take(10);
                    foreach (object record in taken)
                    {
                        Console.WriteLine(record);
                    }
                    Console.WriteLine();
                });

                return(context);
            });

            ssc.Start();
            ssc.AwaitTermination();
            ssc.Stop();
        }
コード例 #7
0
        public override void Run(Lazy <SparkContext> sparkContext, int currentTimes, int totalTimes)
        {
            DeleteCheckPointDirectory(currentTimes);

            var options = Options as UnionTopicTestOptions;

            var streamingContext = StreamingContext.GetOrCreate(options.CheckPointDirectory,
                                                                () =>
            {
                var ssc = new StreamingContext(sparkContext.Value, options.BatchSeconds * 1000L);
                ssc.Checkpoint(options.CheckPointDirectory);

                var stream1 = KafkaUtils.CreateDirectStream(ssc, new List <string> {
                    options.Topic1
                }, kafkaParams, offsetsRange)
                              .Map(line => new RowIdCountTime().Deserialize(line.Value));
                var stream2 = KafkaUtils.CreateDirectStream(ssc, new List <string> {
                    options.Topic2
                }, kafkaParams, offsetsRange)
                              .Map(line => new RowIdCountTime().Deserialize(line.Value));
                var stream = stream1.Union(stream2);

                if (options.RePartition > 0)
                {
                    stream = stream.Repartition(options.RePartition);
                }

                stream.ForeachRDD(rdd =>
                {
                    rdd.Foreach(idCount =>
                    {
                        Console.WriteLine($"{NowMilli} {this.GetType().Name} : {idCount.ToString()}");
                    });
                });

                SaveStreamToFile(stream.Map(it => it.ToString()));
                return(ssc);
            });

            streamingContext.Start();

            WaitTerminationOrTimeout(streamingContext);
        }
コード例 #8
0
ファイル: WindowSlideTest.cs プロジェクト: lqm678/testMobius
        public override void Run(Lazy <SparkContext> sparkContext, int currentTimes, int totalTimes)
        {
            DeleteCheckPointDirectory(currentTimes);

            var options      = Options as WindowSlideTestOptions;
            var allBeginTime = DateTime.Now;

            var topicList = new List <string>(options.Topics.Split(";,".ToArray()));

            ParseKafkaParameters();

            for (var k = 0; options.TestTimes <= 0 || k < options.TestTimes; k++)
            {
                var beginTime = DateTime.Now;
                //Logger.LogInfo("begin test[{0}]-{1} , sparkContext = {2}", k + 1, options.TestTimes > 0 ? options.TestTimes.ToString() : "infinite", sparkContext.Value);
                var streamingContext = StreamingContext.GetOrCreate(options.CheckPointDirectory,
                                                                    () =>
                {
                    var ssc = new StreamingContext(sparkContext.Value, options.BatchSeconds * 1000L);
                    ssc.Checkpoint(options.CheckPointDirectory);

                    var stream = KafkaUtils.CreateDirectStream(ssc, topicList, kafkaParams, offsetsRange)
                                 .Map(line => Encoding.UTF8.GetString(line.Value));

                    var pairs = stream.Map(new ParseKeyValueArray(options.ElementCount, options.ShowReceivedLines).Parse);

                    var reducedStream = pairs.ReduceByKeyAndWindow(
                        new ReduceHelper(options.CheckArrayAtFirst).Sum,
                        new ReduceHelper(options.CheckArrayAtFirst).InverseSum,
                        options.WindowSeconds,
                        options.SlideSeconds
                        );

                    reducedStream.ForeachRDD(new SumCountStatic().ForeachRDD <int[]>);
                    SaveStreamToFile(reducedStream);
                    return(ssc);
                });

                streamingContext.Start();
                WaitTerminationOrTimeout(streamingContext);
            }
        }
コード例 #9
0
        static void Main(string[] args)
        {
            var checkpointPath        = "";
            var sparkContext          = new SparkContext(new SparkConf());
            var slideDurationInMillis = 10;
            var topics      = new List <string>();
            var kafkaParams = new List <Tuple <string, string> >();
            var perTopicPartitionKafkaOffsets = new List <Tuple <string, long> >();
            var windowDurationInSecs          = 10;
            var slideDurationInSecs           = 10;

            StreamingContext sparkStreamingContext = StreamingContext.GetOrCreate(checkpointPath, () =>
            {
                var ssc = new StreamingContext(sparkContext, slideDurationInMillis);
                ssc.Checkpoint(checkpointPath);
                var stream = KafkaUtils.CreateDirectStream(ssc, topics, kafkaParams, perTopicPartitionKafkaOffsets);

                var countByLogLevelAndTime = stream
                                             .Map(kvp => Encoding.UTF8.GetString(kvp.Item2))
                                             .Filter(line => line.Contains(","))
                                             .Map(line => line.Split(','))
                                             .Map(columns => new Tuple <string, int>(
                                                      string.Format("{0},{1}", columns[0], columns[1]), 1))
                                             .ReduceByKeyAndWindow((x, y) => x + y, (x, y) => x - y,
                                                                   windowDurationInSecs, slideDurationInSecs, 3)
                                             .Map(logLevelCountPair => string.Format("{0},{1}",
                                                                                     logLevelCountPair.Item1, logLevelCountPair.Item2));
                countByLogLevelAndTime.ForeachRDD(countByLogLevel =>
                {
                    foreach (var logCount in countByLogLevel.Collect())
                    {
                        Console.WriteLine(logCount);
                    }
                });
                return(ssc);
            });

            sparkStreamingContext.Start();
            sparkStreamingContext.AwaitTermination();

            Console.WriteLine("Hello World!");
        }
コード例 #10
0
ファイル: DStreamSamples.cs プロジェクト: zwffff2015/Mobius
        internal static void DStreamTextFileSample()
        {
            count = 0;

            string directory      = SparkCLRSamples.Configuration.SampleDataLocation;
            string checkpointPath = Path.Combine(directory, "checkpoint");

            SparkContext sc = SparkCLRSamples.SparkContext;
            var          b  = sc.Broadcast <int>(0);

            StreamingContext ssc = StreamingContext.GetOrCreate(checkpointPath,
                                                                () =>
            {
                StreamingContext context = new StreamingContext(sc, 2000L);     // batch interval is in milliseconds
                context.Checkpoint(checkpointPath);

                var lines = context.TextFileStream(Path.Combine(directory, "test"));
                lines     = context.Union(lines, lines);
                var words = lines.FlatMap(l => l.Split(' '));
                var pairs = words.Map(w => new Tuple <string, int>(w, 1));

                // since operations like ReduceByKey, Join and UpdateStateByKey are
                // separate dstream transformations defined in CSharpDStream.scala
                // an extra CSharpRDD is introduced in between these operations
                var wordCounts      = pairs.ReduceByKey((x, y) => x + y);
                var join            = wordCounts.Window(2, 2).Join(wordCounts, 2);
                var initialStateRdd = sc.Parallelize(new[] { new Tuple <string, int>("AAA", 88), new Tuple <string, int>("BBB", 88) });
                var state           = join.UpdateStateByKey(new UpdateStateHelper(b).Execute, initialStateRdd);

                state.ForeachRDD((time, rdd) =>
                {
                    // there's chance rdd.Take conflicts with ssc.Stop
                    if (stopFileServer)
                    {
                        return;
                    }

                    object[] taken = rdd.Take(10);
                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("Time: {0}", time);
                    Console.WriteLine("-------------------------------------------");
                    foreach (object record in taken)
                    {
                        Console.WriteLine(record);

                        var countByWord = (Tuple <string, int>)record;
                        Assert.AreEqual(countByWord.Item2, countByWord.Item1 == "The" || countByWord.Item1 == "lazy" || countByWord.Item1 == "dog" ? 92 : 88);
                    }
                    Console.WriteLine();

                    stopFileServer = true;
                });

                return(context);
            });

            StartFileServer(ssc, directory, "words.txt");

            ssc.Start();

            ssc.AwaitTermination();
        }
コード例 #11
0
ファイル: DStreamStateSample.cs プロジェクト: valmac/Mobius
        internal static void DStreamMapWithStateSample()
        {
            string directory      = SparkCLRSamples.Configuration.SampleDataLocation;
            string checkpointPath = Path.Combine(directory, "checkpoint");

            StreamingContext ssc = StreamingContext.GetOrCreate(checkpointPath,
                                                                () =>
            {
                SparkContext sc          = SparkCLRSamples.SparkContext;
                StreamingContext context = new StreamingContext(sc, 10000L);     // batch interval is in milliseconds
                context.Checkpoint(checkpointPath);

                var lines = context.TextFileStream(Path.Combine(directory, "test1"));
                lines     = context.Union(lines, lines);
                var words = lines.FlatMap(l => l.Split(' '));
                var pairs = words.Map(w => new KeyValuePair <string, int>(w, 1));

                var wordCounts   = pairs.ReduceByKey((x, y) => x + y);
                var initialState = sc.Parallelize(new[] { new KeyValuePair <string, int>("NOT_A_WORD", 1024), new KeyValuePair <string, int>("dog", 10000), }, 1);
                var stateSpec    = new StateSpec <string, int, int, KeyValuePair <string, int> >((word, count, state) =>
                {
                    if (state.IsTimingOut())
                    {
                        Console.WriteLine("Found timing out word: {0}", word);
                        return(new KeyValuePair <string, int>(word, state.Get()));
                    }

                    var sum = 0;
                    if (state.Exists())
                    {
                        sum = state.Get();
                    }
                    state.Update(sum + count);
                    Console.WriteLine("word: {0}, count: {1}", word, sum + count);
                    return(new KeyValuePair <string, int>(word, sum + count));
                }).NumPartitions(1).InitialState(initialState).Timeout(TimeSpan.FromSeconds(30));

                var snapshots = wordCounts.MapWithState(stateSpec).StateSnapshots();
                snapshots.ForeachRDD((double time, RDD <dynamic> rdd) =>
                {
                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("Snapshots @ Time: {0}", time);
                    Console.WriteLine("-------------------------------------------");

                    foreach (KeyValuePair <string, int> record in rdd.Collect())
                    {
                        Console.WriteLine("[{0}, {1}]", record.Key, record.Value);
                    }
                    Console.WriteLine();
                });

                return(context);
            });

            ssc.Start();

            StartFileServer(directory, "words.txt", 100);

            ssc.AwaitTermination();
            ssc.Stop();
        }