Exemplo n.º 1
0
        public void Batch()
        {
            List<Document> list = new List<Document>();
            for (int i = 0; i < 5; i++)
            {
                Document task = new Document();
                task.Id = System.Guid.NewGuid().ToString();
                task.Text = _message;
                list.Add(task);
            }

            int res = _session.QueueBatchOfDocuments(list, _configId);
            Assert.IsTrue((res == 200 || res == 202), "RESULT: " + res);

            System.Threading.Thread.Sleep(5000);

            IList<DocAnalyticData> data = _session.GetProcessedDocuments(_configId);
            Assert.IsNotNull(data);
        }
Exemplo n.º 2
0
        public static List<TweetModel> AddScore(List<TweetModel> tweetList)
        {
            // Creates JSON serializer instance
            ISerializer serializer = new JsonSerializer();

            // Initializes new session with the serializer object and the keys.
            using (Session session = Session.CreateSession(consumerKey, consumerSecret, serializer))
            {
                // Error callback handler. This event will occur in case of server-side error
                session.Error += new Session.ErrorHandler(delegate(object sender, ResponseErrorEventArgs ea)
                {
                    Console.WriteLine(string.Format("{0}: {1}", (int)ea.Status, ea.Message));
                });

                //Obtaining subscription object to get user limits applicable on server side
                Subscription subscription = session.GetSubscription();
                Dictionary<string, Semantria.Com.TaskStatus> docsTracker = new Dictionary<string, Semantria.Com.TaskStatus>();

                List<Document> outgoingBatch = new List<Document>(subscription.BasicSettings.BatchLimit);

                foreach (var item in tweetList)
                {
                    if (docsTracker.ContainsKey(item.TweetIdStr))
                        continue;

                    docsTracker.Add(item.TweetIdStr, Semantria.Com.TaskStatus.QUEUED);

                    Document doc = new Document()
                    {
                        Id = item.TweetIdStr,
                        Text = item.Text
                    };

                    outgoingBatch.Add(doc);

                    if (outgoingBatch.Count == subscription.BasicSettings.BatchLimit)
                    {
                        break;
                    }

                  }

                if (outgoingBatch.Count > 0)
                {
                    // Queues batch of documents for processing on Semantria service
                    if (session.QueueBatchOfDocuments(outgoingBatch) != -1)
                    {
                        Console.WriteLine(string.Format("{0} documents queued successfully.", outgoingBatch.Count));
                    }
                }

                List<DocAnalyticData> results = new List<DocAnalyticData>();
                int count = 0;
                while (docsTracker.Any(item => item.Value == Semantria.Com.TaskStatus.QUEUED) && count <= 200)
                {
                    count++;
                    System.Threading.Thread.Sleep(500);

                    // Requests processed results from Semantria service
                    Console.WriteLine("Retrieving your processed results...");
                    IList<DocAnalyticData> incomingBatch = session.GetProcessedDocuments();

                    foreach (DocAnalyticData item in incomingBatch)
                    {
                        if (docsTracker.ContainsKey(item.Id))
                        {
                            docsTracker[item.Id] = item.Status;
                            results.Add(item);
                        }
                    }
                }

                foreach (DocAnalyticData data in results)
                {
                   var currentTweet = tweetList.First(res=>res.TweetIdStr == data.Id);
                    float score = 0;
                    // Printing of document entities
                    if (data.Phrases != null && data.Phrases.Count > 0)
                    {

                        foreach (var entity in data.Phrases)
                        {
                            score += entity.SentimentScore;
                        }
                        score = score / data.Phrases.Count();
                    }

                    currentTweet.Score = score;

                }

            }
            return tweetList;
        }
Exemplo n.º 3
0
        static void Main(string[] args)
        {
            // Use correct Semantria API credentias here
            string consumerKey = "";
            string consumerSecret = "";

            // A dictionary that keeps IDs of sent documents and their statuses. It's required to make sure that we get correct documents from the API.
            Dictionary<string, TaskStatus> docsTracker = new Dictionary<string, TaskStatus>(4);
            List<string> initialTexts = new List<string>();

            Console.WriteLine("Semantria Detailed processing mode demo.");

            string path = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "source.txt");
            if (!File.Exists(path))
            {
                Console.WriteLine("Source file isn't available.");
                return;
            }

            //Reads collection from the source file
            Console.WriteLine("Reading dataset from file...");
            using (StreamReader stream = new StreamReader(path))
            {
                while (!stream.EndOfStream)
                {
                    string line = stream.ReadLine();
                    if (string.IsNullOrEmpty(line) || line.Length < 3)
                        continue;

                    initialTexts.Add(line);
                }
            }

            // Creates JSON serializer instance
            ISerializer serializer = new JsonSerializer();

            // Initializes new session with the serializer object and the keys.
            using (Session session = Session.CreateSession(consumerKey, consumerSecret, serializer))
            {
                // Error callback handler. This event will occur in case of server-side error
                session.Error += new Session.ErrorHandler(delegate(object sender, ResponseErrorEventArgs ea)
                {
                    Console.WriteLine(string.Format("{0}: {1}", (int)ea.Status, ea.Message));
                });

                //Obtaining subscription object to get user limits applicable on server side
                Subscription subscription = session.GetSubscription();

                List<Document> outgoingBatch = new List<Document>(subscription.BasicSettings.IncomingBatchLimit);
                IEnumerator<string> iterrator = initialTexts.GetEnumerator();
                while (iterrator.MoveNext())
                {
                    string docId = Guid.NewGuid().ToString();
                    Document doc = new Document()
                    {
                        Id = docId,
                        Text = iterrator.Current
                    };

                    outgoingBatch.Add(doc);
                    docsTracker.Add(docId, TaskStatus.QUEUED);

                    if (outgoingBatch.Count == subscription.BasicSettings.IncomingBatchLimit)
                    {
                        // Queues batch of documents for processing on Semantria service
                        if (session.QueueBatchOfDocuments(outgoingBatch) != -1)
                        {
                            Console.WriteLine(string.Format("{0} documents queued successfully.", outgoingBatch.Count));
                            outgoingBatch.Clear();
                        }
                    }
                }

                if (outgoingBatch.Count > 0)
                {
                    // Queues batch of documents for processing on Semantria service
                    if (session.QueueBatchOfDocuments(outgoingBatch) != -1)
                    {
                        Console.WriteLine(string.Format("{0} documents queued successfully.", outgoingBatch.Count));
                    }
                }

                Console.WriteLine();

                // As Semantria isn't real-time solution you need to wait some time before getting of the processed results
                // In real application here can be implemented two separate jobs, one for queuing of source data another one for retreiving
                // Wait ten seconds while Semantria process queued document
                List<DocAnalyticData> results = new List<DocAnalyticData>();
                while (docsTracker.Any(item => item.Value == TaskStatus.QUEUED))
                {
                    System.Threading.Thread.Sleep(500);

                    // Requests processed results from Semantria service
                    Console.WriteLine("Retrieving your processed results...");
                    IList<DocAnalyticData> incomingBatch = session.GetProcessedDocuments();

                    foreach (DocAnalyticData item in incomingBatch)
                    {
                        if (docsTracker.ContainsKey(item.Id))
                        {
                            docsTracker[item.Id] = item.Status;
                            results.Add(item);
                        }
                    }
                }
                Console.WriteLine();

                foreach (DocAnalyticData data in results)
                {
                    // Printing of document sentiment score
                    Console.WriteLine(string.Format("Document {0}. Sentiment score: {1}", data.Id, data.SentimentScore));

                    // Printing of intentions
                    if (data.Topics != null && data.Topics.Count > 0)
                    {
                        Console.WriteLine("Document topics:");
                        foreach (Topic topic in data.Topics)
                            Console.WriteLine(string.Format("\t{0} (type: {1}) (strength: {2})", topic.Title, topic.Type, topic.SentimentScore));
                    }

                    // Printing of intentions
                    if (data.AutoCategories != null && data.AutoCategories.Count > 0)
                    {
                        Console.WriteLine("Document categories:");
                        foreach (Topic category in data.AutoCategories)
                            Console.WriteLine(string.Format("\t{0} (strength: {1})", category.Title, category.StrengthScore));
                    }

                    // Printing of document themes
                    if (data.Themes != null && data.Themes.Count > 0)
                    {
                        Console.WriteLine("Document themes:");
                        foreach (DocTheme theme in data.Themes)
                            Console.WriteLine(string.Format("\t{0} (sentiment: {1})", theme.Title, theme.SentimentScore));
                    }

                    // Printing of document entities
                    if (data.Entities != null && data.Entities.Count > 0)
                    {
                        Console.WriteLine("Entities:");
                        foreach (DocEntity entity in data.Entities)
                            Console.WriteLine(string.Format("\t{0} : {1} (sentiment: {2})", entity.Title, entity.EntityType, entity.SentimentScore));
                    }

                    Console.WriteLine();
                }
            }

            Console.ReadKey(false);
        }
Exemplo n.º 4
0
        public void Document()
        {
            Document task = new Document();
            task.Id = _docId;
            task.Text = _message;

            int res = _session.QueueDocument(task, _configId);
            Assert.IsTrue((res == 200 || res == 202), "RESULT: " + res);

            DocAnalyticData data = _session.GetDocument(_docId, _configId);
            Assert.IsNotNull(data);

            int r = _session.CancelDocument(_docId, _configId);
            Assert.IsTrue((r == 200 || r == 202 || r == 404), "RESULT: " + res);
        }
Exemplo n.º 5
0
        private IList<DocAnalyticData> _getSemanticDataForTweets(List<ITweet> tweets)
        {
            ISerializer serializer = new JsonSerializer();
            IList<DocAnalyticData> results = new List<DocAnalyticData>();
            // Initializes new session with the serializer weobject and the keys.
            using (Session session = Session.CreateSession(ConsumerKey, ConsumerSecret, serializer))
            {
                // Error callback handler. This event will be rised in case of server-side error
                session.Error += new Session.ErrorHandler(delegate(object sender, ResponseErrorEventArgs ea)
                                                          {
                                                              Console.WriteLine(string.Format("{0}: {1}",
                                                                  (int)ea.Status, ea.Message));
                                                          });

                foreach (ITweet tweet in tweets)
                {
                    string docId = Guid.NewGuid().ToString();
                    // Creates a sample document which need to be processed on Semantria
                    Document doc = new Document()
                    {
                        // Unique document ID
                        Id = string.Format("{0}", docId),
                        // Source text which need to be processed
                        Text = tweet.Text,
                        Tag = tweet.IdStr
                    };

                    // Queues document for processing on Semantria service
                    if (session.QueueDocument(doc) != -1)
                        Console.WriteLine(string.Format("\"{0}\" document queued successfully.", docId));
                }
                // Console.WriteLine();

                // As Semantria isn't real-time solution you need to wait some time before getting of the processed results
                // In real application here can be implemented two separate jobs, one for queuing of source data another one for retreiving
                // Wait ten seconds while Semantria process queued document
                int resCount = tweets.Count;

                while (resCount > 0)
                {
                    System.Threading.Thread.Sleep(2000);

                    // Requests processed results from Semantria service
                    Console.WriteLine("Retrieving your processed results...");
                    ((List<DocAnalyticData>)results).AddRange(session.GetProcessedDocuments());

                    resCount -= results.Count;
                }

            }

            return results;
        }
Exemplo n.º 6
0
        static void Main(string[] args)
        {
            // Use correct Semantria API credentias here
            string consumerKey = string.Empty;
            string consumerSecret = string.Empty;

            // null - send every single document separately
            // false - send uniqueJobIdCount batches
            // true - send all documents in single batch
            bool? dataSendingMode = true;
            int uniqueJobIdCount = 4;

            Dictionary<string, int> jobIds = new Dictionary<string, int>(uniqueJobIdCount);
            Dictionary<string, List<Document>> documents = new Dictionary<string, List<Document>>();

            Console.WriteLine("Semantria JobId feature demo.");

            string path = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "source.txt");
            if (!File.Exists(path))
            {
                Console.WriteLine("Source file isn't available.");
                return;
            }

            //Generates N unique jobId values
            for (int index = 0; index < uniqueJobIdCount; index++)
            {
                string id = Guid.NewGuid().ToString();

                jobIds.Add(id, 0);
                documents.Add(id, new List<Document>());
            }

            //Reads documents from the source file
            Console.WriteLine("Reading documents from file...");
            Console.WriteLine();
            using (StreamReader stream = new StreamReader(path))
            {
                Random rnd = new Random();

                while (!stream.EndOfStream)
                {
                    string line = stream.ReadLine();
                    if (string.IsNullOrEmpty(line) || line.Length < 3)
                        continue;

                    string jobId = jobIds.ElementAt(rnd.Next(uniqueJobIdCount)).Key;
                    jobIds[jobId]++;

                    Document doc = new Document()
                    {
                        Id = Guid.NewGuid().ToString(),
                        Text = line,
                        JobId = jobId
                    };

                    documents[jobId].Add(doc);
                }
            }

            // Creates JSON serializer instance
            ISerializer serializer = new JsonSerializer();

            // Initializes new session with the serializer object and the keys.
            using (Session session = Session.CreateSession(consumerKey, consumerSecret, serializer))
            {
                // Error callback handler. This event will occur in case of server-side error
                session.Error += new Session.ErrorHandler(delegate(object sender, ResponseErrorEventArgs ea)
                {
                    if ((int)ea.Status < 500)
                    {
                        Console.WriteLine(string.Format("{0}: {1}", (int)ea.Status, ea.Message));
                    }
                    else
                    {
                        Console.WriteLine(string.Format("{0}: {1}", (int)ea.Status, "Unhandled server error happened."));
                    }
                });

                if (dataSendingMode == null)
                {
                    foreach (KeyValuePair<string, List<Document>> pair in documents)
                    {
                        foreach (Document doc in pair.Value)
                        {
                            // Queues document for processing on Semantria service
                            session.QueueDocument(doc);
                        }

                        Console.WriteLine("{0} documents queued for {1} job ID", pair.Value.Count, pair.Key);
                    }
                }
                else if (!dataSendingMode.Value)
                {
                    foreach (KeyValuePair<string, List<Document>> pair in documents)
                    {
                        // Queues batch of documents for processing on Semantria service
                        if (session.QueueBatchOfDocuments(pair.Value) != -1)
                        {
                            Console.WriteLine("{0} documents queued for {1} job ID", pair.Value.Count, pair.Key);
                        }
                    }
                }
                else
                {
                    List<Document> aBatch = new List<Document>();

                    foreach (KeyValuePair<string, List<Document>> pair in documents)
                    {
                        aBatch.AddRange(pair.Value);
                    }

                    if (session.QueueBatchOfDocuments(aBatch) != -1)
                    {
                        Console.WriteLine("{0} documents queued in single batch", aBatch.Count);
                    }
                }

                Console.WriteLine();
                Dictionary<string, int> aCopy = new Dictionary<string, int>(jobIds);
                foreach (KeyValuePair<string, int> pair in aCopy)
                {
                    int count = 0;
                    while (jobIds[pair.Key] > 0)
                    {
                        // Waits half of second while Semantria process queued document
                        System.Threading.Thread.Sleep(500);

                        // Requests processed results from Semantria service
                        List<DocAnalyticData> results = new List<DocAnalyticData>(pair.Value);
                        results.AddRange(session.GetProcessedDocumentsByJobId(pair.Key));
                        jobIds[pair.Key] -= results.Count;
                        count += results.Count;
                    }

                    Console.WriteLine("{0} documents received for {1} Job ID.", count, pair.Key);
                }
            }

            Console.WriteLine();
            Console.WriteLine("Done!");
            Console.ReadKey(false);
        }
Exemplo n.º 7
0
        static void Main(string[] args)
        {
            // Use correct Semantria API credentias here
            string consumerKey = "";
            string consumerSecret = "";

            // Initial texts for processing
            List<string> initialTexts = new List<string>() {
                @"Lisa - there's 2 Skinny cow coupons available $5 skinny cow ice cream coupons on special k boxes and Printable FPC from facebook - a teeny tiny cup of ice cream. I printed off 2 (1 from my account and 1 from dh's). I couldn't find them instore and i'm not going to walmart before the 19th. Oh well sounds like i'm not missing much ...lol",
                @"In Lake Louise - a guided walk for the family with Great Divide Nature Tours  rent a canoe on Lake Louise or Moraine Lake  go for a hike to the Lake Agnes Tea House. In between Lake Louise and Banff - visit Marble Canyon or Johnson Canyon or both for family friendly short walks. In Banff  a picnic at Johnson Lake  rent a boat at Lake Minnewanka  hike up Tunnel Mountain  walk to the Bow Falls and the Fairmont Banff Springs Hotel  visit the Banff Park Museum. The ""must-do"" in Banff is a visit to the Banff Gondola and some time spent on Banff Avenue - think candy shops and ice cream.",
                @"On this day in 1786 - In New York City  commercial ice cream was manufactured for the first time."
            };

            System.Console.WriteLine("Semantria service demo.");

            // Creates JSON serializer instance
            ISerializer serializer = new XmlSerializer();

            // Initializes new session with the serializer object and the keys.
            using (Session session = Session.CreateSession(consumerKey, consumerSecret, serializer))
            {
                // Error callback handler. This event will be rised in case of server-side error
                session.Error += new Session.ErrorHandler(delegate(object sender, ResponseEventArgs ea)
                    {
                        System.Console.WriteLine(string.Format("{0}: {1}", (int)ea.Status, ea.Message));
                    });

                foreach (string text in initialTexts)
                {
                    string docId = Guid.NewGuid().ToString();
                    // Creates a sample document which need to be processed on Semantria
                    Document doc = new Document()
                    {
                        // Unique document ID
                        Id = string.Format("{0}", docId),
                        // Source text which need to be processed
                        Text = text
                    };

                    // Queues document for processing on Semantria service
                    if (session.QueueDocument(doc) != -1)
                        System.Console.WriteLine(string.Format("\"{0}\" document queued successfully.", docId));
                }
                System.Console.WriteLine();

                // As Semantria isn't real-time solution you need to wait some time before getting of the processed results
                // In real application here can be implemented two separate jobs, one for queuing of source data another one for retreiving
                // Wait ten seconds while Semantria process queued document
                int resCount = initialTexts.Count;
                IList<DocAnalyticData> results = new List<DocAnalyticData>();
                while (resCount > 0)
                {
                    System.Threading.Thread.Sleep(2000);

                    // Requests processed results from Semantria service
                    System.Console.WriteLine("Retrieving your processed results...");
                    ((List<DocAnalyticData>)results).AddRange(session.GetProcessedDocuments());

                    resCount -= results.Count;
                }

                foreach (DocAnalyticData data in results)
                {
                    // Printing of document sentiment score
                    System.Console.WriteLine(string.Format("Document {0}. Sentiment score: {1}", data.Id, data.SentimentScore));

                    // Printing of document themes
                    if (data.Themes != null && data.Themes.Count > 0)
                    {
                        System.Console.WriteLine("Document themes:");
                        foreach (DocTheme theme in data.Themes)
                            System.Console.WriteLine(string.Format("\t{0} (sentiment: {1})", theme.Title, theme.SentimentScore));
                    }

                    // Printing of document entities
                    if (data.Entities != null && data.Entities.Count > 0)
                    {
                        System.Console.WriteLine("Entities:");
                        foreach (DocEntity entity in data.Entities)
                            System.Console.WriteLine(string.Format("\t{0} : {1} (sentiment: {2})", entity.Title, entity.EntityType, entity.SentimentScore));
                    }

                    System.Console.WriteLine();
                }
            }

            System.Console.ReadKey(false);
        }
Exemplo n.º 8
0
        public void CreateDocument()
        {
            Document task = new Document();
            task.Id = _id;
            task.Text = _message;

            int res = _session.QueueDocument(task);
            Assert.IsTrue((res == 200 || res == 202), "RESULT: " + res);
        }
Exemplo n.º 9
0
        public void CreateBatch()
        {
            List<Document> list = new List<Document>();
            for (int i = 0; i < 5; i++)
            {
                Document task = new Document();
                task.Id = System.Guid.NewGuid().ToString();
                task.Text = _message;
                list.Add(task);
            }

            int res = _session.QueueBatchOfDocuments(list);
            Assert.IsTrue((res == 200 || res == 202), "RESULT: " + res);
        }