Esempio n. 1
0
        /// <summary>
        /// 更新期初数
        /// </summary>
        /// <param name="data"></param>
        /// <returns></returns>
        public string UpdBeginningBalance(string data)
        {
            string strFmt = "{{\"Result\":{0},\"Msg\":\"{1}\"}}";
            string msg    = string.Empty;
            List <T_BeginningBalance> beginningBalance =
                new JavaScriptSerializer().Deserialize <List <T_BeginningBalance> >(data);
            T_BeginningBalance currItem = new T_BeginningBalance();

            while (beginningBalance.Any(i => i.children.Count > 0))
            {
                currItem = beginningBalance.Where(i => i.children.Count > 0).FirstOrDefault();
                beginningBalance.AddRange(currItem.children);
                currItem.children = new List <T_BeginningBalance>();
            }
            bool result = false;

            if (true)
            {
                result = new ReportSvc().UpdBeginningBalance(beginningBalance, Session["CurrentCompanyGuid"].ToString());
                msg    = result ? General.Resource.Common.Success : General.Resource.Common.Failed;
            }
            else
            {
                msg = FMS.Resource.FinanceReport.FinanceReport.VaildError;
            }
            return(string.Format(strFmt, result.ToString().ToLower(), msg));
        }
Esempio n. 2
0
        public List <TweetDict> RecTweetGrabber(DateTime start_date, DateTime end_date)
        {
            string s_date = start_date.ToString("yyyy-MM-dd");
            string e_date = end_date.ToString("yyyy-MM-dd");

            Debug.WriteLine("Searching with start_date:[{0}] end_date:[{1}]", s_date, e_date);

            WebPageGrabber url_grabber = new WebPageGrabber();
            string         response    = url_grabber.GET(String.Format(base_url, s_date, e_date));


            //Convert response into a List<TweetDict> object
            var response_list = new JavaScriptSerializer().Deserialize <List <TweetDict> >(response);


            // If the List size is 100, which means response limit, try to split in half to get a more accurate dataset that does not meet limit
            if (response_list.Count >= 100)
            {
                Debug.WriteLine("Total List size is:[{0}]", response_list.Count);
                if (start_date == end_date)
                {
                    //Too much data on single day, can't scrape any deeper, Just return the data without splitting
                    return(response_list);
                }

                Debug.WriteLine("Splitting Date range: start_date:[{0}] end_date:[{1}]", start_date, end_date);
                // Logic to split Date range in half, make sure left_end and right_start are not same day, to avoid duplicate data
                double half_days = (end_date - start_date).TotalDays / 2;
                //start_date;
                DateTime left_end = start_date.AddDays(half_days);

                DateTime right_start = left_end.AddDays(1);
                //end_date;

                Debug.WriteLine("Left Half is: start_date:[{0}] left_end:[{1}]", start_date, left_end);
                Debug.WriteLine("Right Half is: right_start:[{0}] end_date:[{1}]", right_start, end_date);

                // Recursively Search the smaller date ranges now
                response_list.AddRange(RecTweetGrabber(start_date, left_end));
                response_list.AddRange(RecTweetGrabber(right_start, end_date));
            }

            // If not too much data, automatically return List of JSON string
            return(response_list);
        }