private async Task <string> ObtainLatestRadiometersParameters() { string retStr = ""; if (Directory.Exists(RadiometersDataPath)) { List <FileInfo> lCSVdataFilesInfoList = ((new DirectoryInfo(RadiometersDataPath)).GetFiles("*.csv", SearchOption.AllDirectories)).ToList(); lCSVdataFilesInfoList.Sort((finfo1, finfo2) => finfo1.CreationTimeUtc.CompareTo(finfo2.CreationTimeUtc)); FileInfo lastCSVdataFileInfo = lCSVdataFilesInfoList.Last(); List <List <string> > Contents = ServiceTools.ReadDataFromCSV(lastCSVdataFileInfo.FullName, 2, true, ";"); List <string> lastRdDataStrings = Contents.Last(); retStr += "Radiometers:" + Environment.NewLine + "Date,time: " + lastRdDataStrings[0] + Environment.NewLine + "SW ave.(W/m2): " + lastRdDataStrings[7] + Environment.NewLine + "LW downward ave.[W/m2]: " + lastRdDataStrings[15] + Environment.NewLine + Environment.NewLine; } else { retStr = ""; } return(retStr); }
private void ReadObservationsReportCSVfile() { if (File.Exists(strObservationsReportCSVfile)) { List <List <string> > ObservationsReportCSVfileContents = ServiceTools.ReadDataFromCSV(strObservationsReportCSVfile, 1, true, ";", "\n"); lMissionObservedData = ObservationsReportCSVfileContents.ConvertAll(strList => new MissionsObservedData(strList)); } }
static async Task <ImageStatsDataCalculationResult> ConvertImageTask(object inputArgs) { ImageStatsDataCalculationResult Result = new ImageStatsDataCalculationResult(); Dictionary <string, object> ParametersPassed = inputArgs as Dictionary <string, object>; string logFileName = ""; string currentFullFileName = ParametersPassed["currentFullFileName"] as string; Stopwatch stopwatch = null; List <Tuple <string, string> > lImagesRoundMasksMappingFiles = null; if (ParametersPassed != null) { if (ParametersPassed.ContainsKey("ImagesRoundMasksXMLfilesMappingList")) { string ImagesRoundMasksXMLfilesMappingListPassed = (string)ParametersPassed["ImagesRoundMasksXMLfilesMappingList"]; if (File.Exists(ImagesRoundMasksXMLfilesMappingListPassed)) { List <List <string> > llImagesRoundMasksMappingFiles = ServiceTools.ReadDataFromCSV(ImagesRoundMasksXMLfilesMappingListPassed, 0, true, ";", Environment.NewLine); lImagesRoundMasksMappingFiles = llImagesRoundMasksMappingFiles.ConvertAll( list => new Tuple <string, string>(list[0], list[1])); } } if (ParametersPassed.ContainsKey("Stopwatch")) { stopwatch = ParametersPassed["Stopwatch"] as Stopwatch; } if (ParametersPassed.ContainsKey("logFileName")) { logFileName = ParametersPassed["logFileName"] as string; } } TimeSpan procStart = Process.GetCurrentProcess().TotalProcessorTime; try { RoundData predefinedRoundedMask = null; if (lImagesRoundMasksMappingFiles != null) { if (lImagesRoundMasksMappingFiles.Any()) { if (lImagesRoundMasksMappingFiles.Find(tpl => (new WildcardPattern(tpl.Item1)).IsMatch(currentFullFileName)) != null) { string strFoundPredefinedRoundedMaskParametersXMLfile = lImagesRoundMasksMappingFiles.Find( tpl => (new WildcardPattern(tpl.Item1)).IsMatch(currentFullFileName)).Item2; strFoundPredefinedRoundedMaskParametersXMLfile = strFoundPredefinedRoundedMaskParametersXMLfile.Substring(0, strFoundPredefinedRoundedMaskParametersXMLfile.IndexOf(".xml") + 4); predefinedRoundedMask = ServiceTools.ReadObjectFromXML(strFoundPredefinedRoundedMaskParametersXMLfile, typeof(RoundData)) as RoundData; } } } Image <Bgr, byte> currImg = new Image <Bgr, byte>(currentFullFileName); ImageProcessing imgP = new ImageProcessing(currImg, predefinedRoundedMask); Image <Bgr, byte> maskImage = new Image <Bgr, byte>(new Image <Gray, byte>[] { imgP.significantMaskImageCircled, imgP.significantMaskImageCircled, imgP.significantMaskImageCircled }); Image <Bgr, byte> img = imgP.tmpImage.Mul(maskImage); string ncFileName = ConventionalTransitions.NetCDFimageBareChannelsDataFilename(currentFullFileName, ParametersPassed["outputNetCDFfilesDirectory"] as string, true, ParametersPassed["ImagesBasePath"] as string); Dictionary <string, object> dataToNCwrite = new Dictionary <string, object>(); dataToNCwrite.Add("ColorChannels", img.Data); NetCDFoperations.SaveVariousDataToFile(dataToNCwrite, ncFileName); TimeSpan procEnd = Process.GetCurrentProcess().TotalProcessorTime; Result = new ImageStatsDataCalculationResult() { calcResult = true, imgFilename = currentFullFileName, mp5Result = null, grixyrgbStatsData = null, stopwatch = stopwatch, exception = null, procTotalProcessorTimeEnd = procEnd, procTotalProcessorTimeStart = procStart }; return(Result); } catch (Exception ex) { TimeSpan procEnd = Process.GetCurrentProcess().TotalProcessorTime; Result = new ImageStatsDataCalculationResult() { calcResult = false, imgFilename = currentFullFileName, stopwatch = stopwatch, exception = ex, procTotalProcessorTimeEnd = procEnd, procTotalProcessorTimeStart = procStart }; return(Result); } }
//private void ImageProcessing(ImagesProcessingData ipdt) //{ //} private void EnumerateFilesToProcess() { string directory = Path.GetDirectoryName(inputBasePath); string filemask = "*.jpg"; List <string> filesList = new List <string>(Directory.EnumerateFiles(directory, filemask, bEnumerateFilesRecursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly)); #region filter by camID //...devID1.jpg string ptrnCamID = "devid" + CamIDtoProcess + ".jpg"; filesList = filesList.Where(fname => fname.ToLower().Contains(ptrnCamID)).ToList(); #endregion Console.WriteLine("found " + filesList.Count + " images."); #region list, read and map image stats files Console.WriteLine("filtering by ready-to-use GrIxYRGB XML files..."); List <string> statsFilesList = new List <string>(Directory.EnumerateFiles(imageYRGBstatsXMLdataFilesDirectory, ConventionalTransitions.ImageGrIxYRGBstatsFileNamesPattern(), bEnumerateFilesRecursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly)); List <string> statsFilesListWOpath = statsFilesList.ConvertAll(Path.GetFileName); Console.WriteLine("found " + statsFilesList.Count + " XML stats files in directory " + Environment.NewLine + imageYRGBstatsXMLdataFilesDirectory + Environment.NewLine + "by mask " + Environment.NewLine + ConventionalTransitions.ImageGrIxYRGBstatsFileNamesPattern()); int removed = filesList.RemoveAll( fname => !statsFilesListWOpath.Contains(ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(fname, "", false))); Console.WriteLine("removed " + removed + " items (couldn`t find stats data files). Remains " + filesList.Count + " to process."); if (!filesList.Any()) { Console.WriteLine("There is no " + filemask + " files that sutisfy settings specified. Processing will not be started."); return; } lStatsProcessing = filesList.ConvertAll(strImgFname => { ImagesProcessingData retVal = new ImagesProcessingData() { filename = strImgFname }; return(retVal); }); #if DEBUG //lStatsProcessing = lStatsProcessing.Where((ipd, ind) => ind < 10).ToList(); #endif Console.WriteLine("started reading and mapping stats data"); int totalFilesCountToRead = lStatsProcessing.Count; int filesRead = 0; int currProgressPerc = 0; foreach (ImagesProcessingData ipdt in lStatsProcessing) { ipdt.grixyrgbStatsXMLfile = statsFilesList.First( statsFname => statsFname.Contains(ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(ipdt.filename, "", false))); ipdt.grixyrgbStats = ServiceTools.ReadObjectFromXML(ipdt.grixyrgbStatsXMLfile, typeof(SkyImageIndexesStatsData)) as SkyImageIndexesStatsData; #region calculate and report progress filesRead++; double progress = 100.0d * (double)filesRead / (double)totalFilesCountToRead; if (progress - (double)currProgressPerc > 1.0d) { currProgressPerc = Convert.ToInt32(progress); Console.WriteLine("read " + currProgressPerc + "%"); } #endregion calculate and report progress } #endregion #region list, read and map concurrent data List <string> concurrentDataFilesList = Directory.EnumerateFiles(ConcurrentDataXMLfilesDirectory, ConventionalTransitions.ImageConcurrentDataFilesNamesPattern(), bEnumerateFilesRecursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly).ToList(); List <ConcurrentData> lConcurrentData = null; #region reading Console.WriteLine("started concurrent data reading"); totalFilesCountToRead = concurrentDataFilesList.Count; filesRead = 0; currProgressPerc = 0; List <Dictionary <string, object> > lDictionariesConcurrentData = new List <Dictionary <string, object> >(); foreach (string strConcDataXMLFile in concurrentDataFilesList) { Dictionary <string, object> currDict = ServiceTools.ReadDictionaryFromXML(strConcDataXMLFile); currDict.Add("XMLfileName", Path.GetFileName(strConcDataXMLFile)); lDictionariesConcurrentData.Add(currDict); #region calculate and report progress filesRead++; double progress = 100.0d * (double)filesRead / (double)totalFilesCountToRead; if (progress - (double)currProgressPerc > 1.0d) { currProgressPerc = Convert.ToInt32(progress); Console.WriteLine("read " + currProgressPerc + "%"); } #endregion calculate and report progress } lDictionariesConcurrentData.RemoveAll(dict => dict == null); lConcurrentData = lDictionariesConcurrentData.ConvertAll <ConcurrentData>(dict => { ConcurrentData retVal = null; try { retVal = new ConcurrentData(dict); } catch (Exception ex) { string strError = "couldn`t parse XML file " + dict["XMLfileName"] + " : " + Environment.NewLine + ex.Message; Console.WriteLine(strError); } return(retVal); }); lConcurrentData.RemoveAll(val => val == null); #endregion reading #region mapping // map obtained concurrent data to images by its datetime Console.WriteLine("concurrent data mapping started"); lStatsProcessing = lStatsProcessing.ConvertAll(ipdt => { string currImgFilename = ipdt.filename; currImgFilename = Path.GetFileNameWithoutExtension(currImgFilename); string ptrn = @"(devID\d)"; Regex rgxp = new Regex(ptrn, RegexOptions.IgnoreCase); string strCurrImgDT = rgxp.Replace(currImgFilename.Substring(4), ""); //2015-12-16T06-01-38 strCurrImgDT = strCurrImgDT.Substring(0, 11) + strCurrImgDT.Substring(11).Replace("-", ":"); DateTime currImgDT = DateTime.Parse(strCurrImgDT, null, System.Globalization.DateTimeStyles.AdjustToUniversal); ConcurrentData nearestConcurrentData = lConcurrentData.Aggregate((cDt1, cDt2) => { TimeSpan tspan1 = new TimeSpan(Math.Abs((cDt1.datetimeUTC - currImgDT).Ticks)); TimeSpan tspan2 = new TimeSpan(Math.Abs((cDt2.datetimeUTC - currImgDT).Ticks)); return((tspan1 <= tspan2) ? (cDt1) : (cDt2)); }); if (new TimeSpan(Math.Abs((nearestConcurrentData.datetimeUTC - currImgDT).Ticks)) >= TimeSpanForConcurrentDataMappingTolerance) { string strError = "couldn`t find close enough concurrent data file for image:" + Environment.NewLine + currImgFilename + Environment.NewLine + "closest concurrent data file is:" + Environment.NewLine + nearestConcurrentData.filename + Environment.NewLine + "with date-time value " + nearestConcurrentData.datetimeUTC.ToString("o"); Console.WriteLine(strError); nearestConcurrentData = null; } ipdt.concurrentData = nearestConcurrentData; if (nearestConcurrentData != null) { ipdt.concurrentDataXMLfile = nearestConcurrentData.filename; } return(ipdt); }); #endregion mapping removed = lStatsProcessing.RemoveAll(ipdt => ipdt.concurrentData == null); Console.WriteLine("removed " + removed + " items (couldn`t find concurrent data). " + lStatsProcessing.Count + " files remains to process."); #endregion list, read and map concurrent data if (!lStatsProcessing.Any()) { Console.WriteLine("There is no files that sutisfy settings specified and have all required concurrent data (stats or GPS etc.). Processing will not be proceeded."); return; } #region Predict SDC values using pre-trained NN parameters string csvHeader = lStatsProcessing[0].grixyrgbStats.CSVHeader() + ",SunElevationDeg,SunAzimuthDeg,sunDiskCondition"; List <string> lCSVheader = csvHeader.Split(',').ToList(); List <int> columnsToDelete = lCSVheader.Select((str, idx) => new Tuple <int, string>(idx, str)) .Where(tpl => tpl.Item2.ToLower().Contains("filename")).ToList().ConvertAll(tpl => tpl.Item1); List <List <string> > lCalculatedData = lStatsProcessing.ConvertAll(dt => { string currImageALLstatsDataCSVWithConcurrentData = dt.grixyrgbStats.ToCSV() + "," + dt.concurrentData.gps.SunZenithAzimuth() .ElevationAngle.ToString() .Replace(",", ".") + "," + dt.concurrentData.gps.SunZenithAzimuth() .Azimuth.ToString() .Replace(",", "."); List <string> retVal = currImageALLstatsDataCSVWithConcurrentData.Split(',').ToList(); retVal = retVal.Where((str, idx) => !columnsToDelete.Contains(idx)).ToList(); return(retVal); }); List <DenseVector> lDV_objects_features = lCalculatedData.ConvertAll( list => DenseVector.OfEnumerable(list.ConvertAll <double>(str => Convert.ToDouble(str.Replace(".", ","))))); DenseVector dvMeans = (DenseVector)((DenseMatrix)ServiceTools.ReadDataFromCSV(NormMeansFile, 0, ",")).Row(0); DenseVector dvRanges = (DenseVector)((DenseMatrix)ServiceTools.ReadDataFromCSV(NormRangeFile, 0, ",")).Row(0); lDV_objects_features = lDV_objects_features.ConvertAll(dv => { DenseVector dvShifted = dv - dvMeans; DenseVector dvNormed = (DenseVector)dvShifted.PointwiseDivide(dvRanges); return(dvNormed); }); DenseMatrix dmObjectsFeatures = DenseMatrix.OfRowVectors(lDV_objects_features); DenseVector dvThetaValues = (DenseVector)ServiceTools.ReadDataFromCSV(NNtrainedParametersFile, 0, ","); List <int> NNlayersConfig = new List <double>(((DenseMatrix)ServiceTools.ReadDataFromCSV(NNconfigFile, 0, ",")).Row(0)).ConvertAll (dVal => Convert.ToInt32(dVal)); List <List <double> > lDecisionProbabilities = null; List <SunDiskCondition> predictedSDClist = NNclassificatorPredictor <SunDiskCondition> .NNpredict(dmObjectsFeatures, dvThetaValues, NNlayersConfig, out lDecisionProbabilities, SunDiskConditionData.MatlabEnumeratedSDCorderedList()).ToList(); //List<SunDiskCondition> predictedSDClist = predictedSDC.ConvertAll(sdcInt => //{ // switch (sdcInt) // { // case 4: // return SunDiskCondition.NoSun; // break; // case 1: // return SunDiskCondition.Sun0; // break; // case 2: // return SunDiskCondition.Sun1; // break; // case 3: // return SunDiskCondition.Sun2; // break; // default: // return SunDiskCondition.Defect; // } //}); string strToShow = "SDC values probabilities: " + Environment.NewLine + "| No Sun | Sun_0 | Sun_1 | Sun_2 | Detected |" + Environment.NewLine; foreach (List <double> lDecisionProbability in lDecisionProbabilities) { strToShow += "| " + lDecisionProbability[3].ToString("F4") + " | " + lDecisionProbability[0].ToString("F4") + " | " + lDecisionProbability[1].ToString("F4") + " | " + lDecisionProbability[2].ToString("F4") + " |" + predictedSDClist[lDecisionProbabilities.IndexOf(lDecisionProbability)] + "|" + Environment.NewLine; } ServiceTools.logToTextFile(errorLogFilename, strToShow, true, false); #endregion //lStatsProcessing = // lStatsProcessing.Where((ipd, idx) => predictedSDClist[idx] == SunDiskCondition.Sun2).ToList(); lStatsProcessing = lStatsProcessing.Where((ipd, idx) => predictedSDClist[idx] == sdcFilter).ToList(); Console.WriteLine("Detected " + lStatsProcessing.Count + " images with SDC = " + sdcFilter.ToString()); if (!lStatsProcessing.Any()) { Console.WriteLine("There is no files with SDC = Sun2. Processing will not be proceeded."); return; } Console.WriteLine("finished enumerating and filtering files. Files to process: " + lStatsProcessing.Count); }
public void Start(string[] args) { readDefaultProperties(); List <string> argsList = new List <string>(args); if (argsList.Find(str => str == "--recursive") != null) { bEnumerateFilesRecursively = true; } if (argsList.Find(str => str == "-y") != null) { bStartWithoutConfirmation = true; } // --filter-by-observed-cloud-cover-records // bFilterByObservedCloudCoverRecords if (argsList.Find(str => str == "--filter-by-observed-cloud-cover-records") != null) { bFilterByObservedCloudCoverRecords = true; } // sdcFilter if (argsList.Where(str => str.Contains("--sdc=")).Count() > 0) { string foundArg = argsList.Where(str => str.Contains("--sdc=")).ToList()[0]; string strValue = foundArg.Replace("--sdc=", ""); //sdcFilter if (strValue == "none") { sdcFilter = SunDiskCondition.NoSun; } else if (strValue == "0") { sdcFilter = SunDiskCondition.Sun0; } else if (strValue == "1") { sdcFilter = SunDiskCondition.Sun1; } else if (strValue == "2") { sdcFilter = SunDiskCondition.Sun2; } else { sdcFilter = SunDiskCondition.Sun2; } } else { Console.WriteLine("SDC filter is not specified. Filtering by SDC will not applied."); sdcFilter = SunDiskCondition.Undefined; // Не применять фильтрацию } if (argsList.Where(str => str.Contains("--camera-id=")).Count() > 0) { string foundArg = argsList.Where(str => str.Contains("--camera-id=")).ToList()[0]; string strValue = foundArg.Replace("--camera-id=", ""); CamIDtoProcess = Convert.ToInt32(strValue); if ((CamIDtoProcess != 1) && (CamIDtoProcess != 2)) { Console.WriteLine("camera ID out of range detected. I will not filter by camera ID."); CamIDtoProcess = 0; } } else { Console.WriteLine("camera ID out of range detected. I will not filter by camera ID"); CamIDtoProcess = 0; // will not filter } if (!bStartWithoutConfirmation) { Console.Write("Start with the mentioned properties? [y/n] "); string strReply = Console.ReadLine(); if (strReply.ToLower().First() != 'y') { Console.WriteLine("\nWill not proceed due to user interruprion."); Console.WriteLine("===FINISHED==="); Console.ReadKey(); return; } } outputDataFile = strOutputDirectory + Path.GetFileNameWithoutExtension(outputDataFile) + "-" + sdcFilter.ToString() + "-camID" + CamIDtoProcess + ".xml"; string outputCSVfile = strOutputDirectory + Path.GetFileNameWithoutExtension(outputDataFile) + "-" + sdcFilter.ToString() + "-camID" + CamIDtoProcess + ".csv"; Console.WriteLine("getting files list"); #region Enumerating files string directory = Path.GetDirectoryName(inputBasePath); string filemask = "*.jpg"; List <string> filesList = new List <string>(Directory.EnumerateFiles(directory, filemask, bEnumerateFilesRecursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly)); #region filter by camID //...devID1.jpg if (CamIDtoProcess > 0) { string ptrnCamID = "devid" + CamIDtoProcess + ".jpg"; filesList = filesList.Where(fname => fname.ToLower().Contains(ptrnCamID)).ToList(); } #endregion Console.WriteLine("found " + filesList.Count + " images."); #region try to find concurrent and stats data already assembled into a small set of files List <string> assembledDataFilesList = Directory.EnumerateFiles(ConcurrentDataXMLfilesDirectory, "*.xml", SearchOption.TopDirectoryOnly).ToList(); List <ImagesProcessingData> lReadAssembledData = new List <ImagesProcessingData>(); foreach (string strAssembledDataXMlfName in assembledDataFilesList) { try { List <ImagesProcessingData> currFileContent = ServiceTools.ReadObjectFromXML(strAssembledDataXMlfName, typeof(List <ImagesProcessingData>)) as List <ImagesProcessingData>; lReadAssembledData.AddRange(currFileContent); } catch (Exception ex) { continue; } } if (lReadAssembledData.Any()) { Console.WriteLine("Found pre-assembled ImagesProcessingData XML files: "); foreach (string s in assembledDataFilesList) { Console.WriteLine(s); } Console.WriteLine("Read records from this set: " + lReadAssembledData.Count); Console.WriteLine("Images to process originally: " + filesList.Count); Console.WriteLine("Should I use these pre-assembled data? (y/n): "); string ans = Console.ReadKey().KeyChar.ToString(); if (ans == "y") { lStatsProcessing = lReadAssembledData; } } #endregion try to find data already compiled into a small set of files if (!lStatsProcessing.Any()) { #region list, read and map image stats files Console.WriteLine("filtering by ready-to-use GrIxYRGB XML files..."); List <string> statsFilesList = new List <string>(Directory.EnumerateFiles(imageYRGBstatsXMLdataFilesDirectory, ConventionalTransitions.ImageGrIxYRGBstatsFileNamesPattern(), bEnumerateFilesRecursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly)); List <string> statsFilesListWOpath = statsFilesList.ConvertAll(Path.GetFileName); Console.WriteLine("found " + statsFilesList.Count + " XML stats files in directory " + Environment.NewLine + imageYRGBstatsXMLdataFilesDirectory + Environment.NewLine + "by mask " + Environment.NewLine + ConventionalTransitions.ImageGrIxYRGBstatsFileNamesPattern()); int removed = filesList.RemoveAll( fname => !statsFilesListWOpath.Contains(ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(fname, "", false))); Console.WriteLine("removed " + removed + " items (couldn`t find stats data files). Remains " + filesList.Count + " to process."); if (!filesList.Any()) { Console.WriteLine("There is no " + filemask + " files that sutisfy settings specified. Processing will not be started."); return; } lStatsProcessing = filesList.ConvertAll(strImgFname => { ImagesProcessingData retVal = new ImagesProcessingData() { filename = strImgFname }; return(retVal); }); //#if DEBUG // lStatsProcessing = lStatsProcessing.Where((ipd, ind) => ind < 10).ToList(); //#endif Console.WriteLine("started reading and mapping stats data"); int totalFilesCountToRead = lStatsProcessing.Count; int filesRead = 0; int currProgressPerc = 0; foreach (ImagesProcessingData ipdt in lStatsProcessing) { ipdt.grixyrgbStatsXMLfile = statsFilesList.First( statsFname => statsFname.Contains(ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(ipdt.filename, "", false))); ipdt.grixyrgbStats = ServiceTools.ReadObjectFromXML(ipdt.grixyrgbStatsXMLfile, typeof(SkyImageIndexesStatsData)) as SkyImageIndexesStatsData; #region calculate and report progress filesRead++; double progress = 100.0d * (double)filesRead / (double)totalFilesCountToRead; if (progress - (double)currProgressPerc > 1.0d) { currProgressPerc = Convert.ToInt32(progress); Console.WriteLine("read " + currProgressPerc + "%"); } #endregion calculate and report progress } #endregion #region list, read and map concurrent data List <ConcurrentData> lConcurrentData = null; List <string> concurrentDataFilesList = Directory.EnumerateFiles(ConcurrentDataXMLfilesDirectory, ConventionalTransitions.ImageConcurrentDataFilesNamesPattern(), bEnumerateFilesRecursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly).ToList(); #region reading Console.WriteLine("started concurrent data reading"); totalFilesCountToRead = concurrentDataFilesList.Count; filesRead = 0; currProgressPerc = 0; List <Dictionary <string, object> > lDictionariesConcurrentData = new List <Dictionary <string, object> >(); foreach (string strConcDataXMLFile in concurrentDataFilesList) { Dictionary <string, object> currDict = ServiceTools.ReadDictionaryFromXML(strConcDataXMLFile); currDict.Add("XMLfileName", Path.GetFileName(strConcDataXMLFile)); lDictionariesConcurrentData.Add(currDict); #region calculate and report progress filesRead++; double progress = 100.0d * (double)filesRead / (double)totalFilesCountToRead; if (progress - (double)currProgressPerc > 1.0d) { currProgressPerc = Convert.ToInt32(progress); Console.WriteLine("read " + currProgressPerc + "%"); } #endregion calculate and report progress } lDictionariesConcurrentData.RemoveAll(dict => dict == null); lConcurrentData = lDictionariesConcurrentData.ConvertAll <ConcurrentData>(dict => { ConcurrentData retVal = null; try { retVal = new ConcurrentData(dict); } catch (Exception ex) { string strError = "couldn`t parse XML file " + dict["XMLfileName"] + " : " + Environment.NewLine + ex.Message; Console.WriteLine(strError); } return(retVal); }); lConcurrentData.RemoveAll(val => val == null); #endregion reading #region mapping // map obtained concurrent data to images by its datetime Console.WriteLine("concurrent data mapping started"); lStatsProcessing = lStatsProcessing.ConvertAll(ipdt => { string currImgFilename = ipdt.filename; currImgFilename = Path.GetFileNameWithoutExtension(currImgFilename); DateTime currImgDT = ConventionalTransitions.DateTimeOfSkyImageFilename(currImgFilename); ConcurrentData nearestConcurrentData = lConcurrentData.Aggregate((cDt1, cDt2) => { TimeSpan tspan1 = new TimeSpan(Math.Abs((cDt1.datetimeUTC - currImgDT).Ticks)); TimeSpan tspan2 = new TimeSpan(Math.Abs((cDt2.datetimeUTC - currImgDT).Ticks)); return((tspan1 <= tspan2) ? (cDt1) : (cDt2)); }); if (new TimeSpan(Math.Abs((nearestConcurrentData.datetimeUTC - currImgDT).Ticks)) >= TimeSpanForConcurrentDataMappingTolerance) { string strError = "couldn`t find close enough concurrent data file for image:" + Environment.NewLine + currImgFilename + Environment.NewLine + "closest concurrent data file is:" + Environment.NewLine + nearestConcurrentData.filename + Environment.NewLine + "with date-time value " + nearestConcurrentData.datetimeUTC.ToString("o"); Console.WriteLine(strError); nearestConcurrentData = null; } ipdt.concurrentData = nearestConcurrentData; if (nearestConcurrentData != null) { ipdt.concurrentDataXMLfile = nearestConcurrentData.filename; } return(ipdt); }); #endregion mapping removed = lStatsProcessing.RemoveAll(ipdt => ipdt.concurrentData == null); Console.WriteLine("removed " + removed + " items (couldn`t find concurrent data). " + lStatsProcessing.Count + " files remains to process."); #endregion list, read and map concurrent data } if (!lStatsProcessing.Any()) { Console.WriteLine("There is no files that sutisfy settings specified and have all required concurrent data (stats or GPS etc.). Processing will not be proceeded."); return; } #region Filter by SDC values predicting it using pre-trained NN parameters #region // //string csvHeader = lStatsProcessing[0].grixyrgbStats.CSVHeader() + // ",SunElevationDeg,SunAzimuthDeg,sunDiskCondition"; //List<string> lCSVheader = csvHeader.Split(',').ToList(); //List<int> columnsToDelete = // lCSVheader.Select((str, idx) => new Tuple<int, string>(idx, str)) // .Where(tpl => tpl.Item2.ToLower().Contains("filename")).ToList().ConvertAll(tpl => tpl.Item1); //List<List<string>> lCalculatedData = lStatsProcessing.ConvertAll(dt => //{ // string currImageALLstatsDataCSVWithConcurrentData = dt.grixyrgbStats.ToCSV() + "," + // dt.concurrentData.gps.SunZenithAzimuth() // .ElevationAngle.ToString() // .Replace(",", ".") + "," + // dt.concurrentData.gps.SunZenithAzimuth() // .Azimuth.ToString() // .Replace(",", "."); // List<string> retVal = currImageALLstatsDataCSVWithConcurrentData.Split(',').ToList(); // retVal = retVal.Where((str, idx) => !columnsToDelete.Contains(idx)).ToList(); // return retVal; //}); //List<DenseVector> lDV_objects_features = // lCalculatedData.ConvertAll( // list => // DenseVector.OfEnumerable(list.ConvertAll<double>(str => Convert.ToDouble(str.Replace(".", ","))))); #endregion DenseVector dvMeans = (DenseVector)((DenseMatrix)ServiceTools.ReadDataFromCSV(NormMeansFile, 0, ",")).Row(0); DenseVector dvRanges = (DenseVector)((DenseMatrix)ServiceTools.ReadDataFromCSV(NormRangeFile, 0, ",")).Row(0); #region // //lDV_objects_features = lDV_objects_features.ConvertAll(dv => //{ // DenseVector dvShifted = dv - dvMeans; // DenseVector dvNormed = (DenseVector)dvShifted.PointwiseDivide(dvRanges); // return dvNormed; //}); //DenseMatrix dmObjectsFeatures = DenseMatrix.OfRowVectors(lDV_objects_features); #endregion DenseVector dvThetaValues = (DenseVector)ServiceTools.ReadDataFromCSV(NNtrainedParametersFile, 0, ","); List <int> NNlayersConfig = new List <double>(((DenseMatrix)ServiceTools.ReadDataFromCSV(NNconfigFile, 0, ",")).Row(0)).ConvertAll (dVal => Convert.ToInt32(dVal)); #region // // List<List<double>> lDecisionProbabilities = null; #endregion List <Tuple <ImagesProcessingData, List <SDCdecisionProbability>, SunDiskCondition> > lTplsPredictedSDClist = new List <Tuple <ImagesProcessingData, List <SDCdecisionProbability>, SunDiskCondition> >(); List <List <double> > SDCdecisionProbabilitiesListDoubles = new List <List <double> >(); List <SunDiskCondition> imagesSDCpredicted = SDCpredictorNN.PredictSDC_NN(lStatsProcessing, NNlayersConfig, dvThetaValues, dvMeans, dvRanges, out SDCdecisionProbabilitiesListDoubles); List <List <SDCdecisionProbability> > SDCdecisionProbabilitiesLists = SDCdecisionProbabilitiesListDoubles.ConvertAll( currSDCdecisionProbabilities => currSDCdecisionProbabilities.Select((dProb, idx) => new SDCdecisionProbability() { sdc = SunDiskConditionData.MatlabSDCenum(idx + 1), sdcDecisionProbability = dProb }).ToList()); lTplsPredictedSDClist = lStatsProcessing.Zip( SDCdecisionProbabilitiesLists.Zip(imagesSDCpredicted, (lDecProb, sdcPredicted) => new Tuple <List <SDCdecisionProbability>, SunDiskCondition>(lDecProb, sdcPredicted)).ToList(), (ipd, tpl) => new Tuple <ImagesProcessingData, List <SDCdecisionProbability>, SunDiskCondition>(ipd, tpl.Item1, tpl.Item2)).ToList(); #region // //foreach (ImagesProcessingData dt in lStatsProcessing) //{ // List<double> currSDCdecisionProbabilities = new List<double>(); // SunDiskCondition currSDC = SDCpredictorNN.PredictSDC_NN(dt.grixyrgbStats, dt.concurrentData, // NNlayersConfig, dvThetaValues, dvMeans, dvRanges, out currSDCdecisionProbabilities); // List<SDCdecisionProbability> currSDCdecisionProbabilitiesList = currSDCdecisionProbabilities.Select((dProb, idx) => new SDCdecisionProbability() // { // sdc = SunDiskConditionData.MatlabSDCenum(idx + 1), // sdcDecisionProbability = dProb // }).ToList(); // lTplsPredictedSDClist.Add( // new Tuple<ImagesProcessingData, List<SDCdecisionProbability>, SunDiskCondition>(dt, // currSDCdecisionProbabilitiesList, currSDC)); //} #endregion #region // //List<int> predictedSDC = // NNclassificatorPredictor.NNpredict(dmObjectsFeatures, dvThetaValues, NNlayersConfig, // out lDecisionProbabilities).ToList(); //List<SunDiskCondition> predictedSDClist = predictedSDC.ConvertAll(sdcInt => //{ // switch (sdcInt) // { // case 4: // return SunDiskCondition.NoSun; // break; // case 1: // return SunDiskCondition.Sun0; // break; // case 2: // return SunDiskCondition.Sun1; // break; // case 3: // return SunDiskCondition.Sun2; // break; // default: // return SunDiskCondition.Defect; // } //}); //List<Tuple<ImagesProcessingData, SunDiskCondition>> lTplsPredictedSDClist = // predictedSDClist.Zip(lStatsProcessing, // (sdc, ipd) => new Tuple<ImagesProcessingData, SunDiskCondition>(ipd, sdc)).ToList(); #endregion #region output obtained SDC data to log file string strToShow = "SDC values probabilities: " + Environment.NewLine + "| NoSun | Sun0 | Sun1 | Sun2 |" + Environment.NewLine; foreach (Tuple <ImagesProcessingData, List <SDCdecisionProbability>, SunDiskCondition> tpl in lTplsPredictedSDClist) { List <SDCdecisionProbability> currSDCdecisionProbabilitiesList = tpl.Item2; strToShow += "|" + String.Format("{0,9}", (currSDCdecisionProbabilitiesList.First( prob => prob.sdc == SunDiskCondition.NoSun).sdcDecisionProbability * 100.0d) .ToString("F2") + "%") + "|" + String.Format("{0,9}", (currSDCdecisionProbabilitiesList.First( prob => prob.sdc == SunDiskCondition.Sun0).sdcDecisionProbability * 100.0d) .ToString("F2") + "%") + "|" + String.Format("{0,9}", (currSDCdecisionProbabilitiesList.First( prob => prob.sdc == SunDiskCondition.Sun1).sdcDecisionProbability * 100.0d) .ToString("F2") + "%") + "|" + String.Format("{0,9}", (currSDCdecisionProbabilitiesList.First( prob => prob.sdc == SunDiskCondition.Sun2).sdcDecisionProbability * 100.0d) .ToString("F2") + "%") + "|" + Environment.NewLine; } ServiceTools.logToTextFile(errorLogFilename, strToShow, true, false); #endregion output obtained SDC data to log file #region filter by SDC value if needed if (sdcFilter != SunDiskCondition.Undefined) { lStatsProcessing = lStatsProcessing.Where((ipd, idx) => lTplsPredictedSDClist[idx].Item3 == sdcFilter).ToList(); Console.WriteLine("Detected " + lStatsProcessing.Count + " images with SDC = " + sdcFilter.ToString()); } #endregion filter by SDC value if needed #endregion Filter by SDC values predicting it using pre-trained NN parameters #region ObservedCloudCoverDataCSVfile if (bFilterByObservedCloudCoverRecords) { Console.WriteLine("Reading observed cloud cover data CSV file..."); if (!File.Exists(ObservedCloudCoverDataCSVfile)) { Console.WriteLine("Unable to read observed data CSV file: " + ObservedCloudCoverDataCSVfile); return; } List <List <string> > lCSVfileContents = ServiceTools.ReadDataFromCSV(ObservedCloudCoverDataCSVfile, 1, true); if (!lCSVfileContents.Any()) { Console.WriteLine("The observed cloud cover CSV file seems to be empty: " + ObservedCloudCoverDataCSVfile); return; } List <ObservedClCoverData> lObservedData = lCSVfileContents.ConvertAll(lStr => new ObservedClCoverData(lStr)); List <SkyImagesDataWith_Concurrent_Stats_CloudCover_SDC> lImagesFilteredByAvailableObservedData = new List <SkyImagesDataWith_Concurrent_Stats_CloudCover_SDC>(); #region filter images by available observed data using DateTimeFilterTolerance Console.WriteLine("Filtering by observed data available..."); foreach (ObservedClCoverData observedData in lObservedData) { DateTime currObservedDatumDateTime = observedData.dt; List <SkyImagesDataWith_Concurrent_Stats_CloudCover_SDC> lImagesCloseToCurrObservedDatum = lStatsProcessing .Where(ipd => { TimeSpan tspan = new TimeSpan( Math.Abs( (ConventionalTransitions.DateTimeOfSkyImageFilename(ipd.filename) - currObservedDatumDateTime).Ticks)); return(tspan <= DateTimeFilterTolerance); }) .ToList() .ConvertAll(ifd => new SkyImagesDataWith_Concurrent_Stats_CloudCover_SDC() { // observedData // ifd skyImageFullFileName = ifd.filename, skyImageFileName = Path.GetFileName(ifd.filename), currImageDateTime = ConventionalTransitions.DateTimeOfSkyImageFilename(ifd.filename), observedCloudCoverData = observedData, concurrentDataXMLfile = ifd.concurrentDataXMLfile, concurrentData = ifd.concurrentData, grixyrgbStatsXMLfile = ifd.grixyrgbStatsXMLfile, grixyrgbStats = ifd.grixyrgbStats, SDCvalue = lTplsPredictedSDClist.First(tpl => tpl.Item1 == ifd).Item3, SDCprobabilities = lTplsPredictedSDClist.First(tpl => tpl.Item1 == ifd).Item2 }); lImagesFilteredByAvailableObservedData.AddRange(lImagesCloseToCurrObservedDatum); } #endregion filter images by available observed data using DateTimeFilterTolerance if (!lImagesFilteredByAvailableObservedData.Any()) { Console.WriteLine( "There is no images remain after filtering using all available data. Output will be empty."); } ServiceTools.WriteObjectToXML(lImagesFilteredByAvailableObservedData, outputDataFile); #region Сформируем и запишем данные в CSV-файл // Здесь есть данные по наблюдаемому CloudCover string csvHeader = lImagesFilteredByAvailableObservedData[0].grixyrgbStats.CSVHeader() + ",SunElevationDeg,SunAzimuthDeg,ObservedTotalCloudCover,ObservedLowerCloudCover,SDC,SDCprobabilityNoSun,SDCprobabilitySun0,SDCprobabilitySun1,SDCprobabilitySun2"; List <string> lCSVoutputData = lImagesFilteredByAvailableObservedData.ConvertAll(ifd => { // все стат. предикторы - как для SDC // данные CloudCover string retVal = ""; //ImagesProcessingData dt = tpl.Item2; //ObservedClCoverData clCov = tpl.Item1; retVal = ifd.grixyrgbStats.ToCSV() + "," + ifd.concurrentData.gps.SunZenithAzimuth().ElevationAngle.ToString().Replace(",", ".") + "," + ifd.concurrentData.gps.SunZenithAzimuth().Azimuth.ToString().Replace(",", ".") + "," + ifd.observedCloudCoverData.CloudCoverTotal.ToString() + "," + ifd.observedCloudCoverData.CloudCoverLower.ToString() + "," + ifd.SDCvalue.ToString() + "," + ifd.SDCprobabilities.First(prob => prob.sdc == SunDiskCondition.NoSun) .sdcDecisionProbability.ToString().Replace(",", ".") + "," + ifd.SDCprobabilities.First(prob => prob.sdc == SunDiskCondition.Sun0) .sdcDecisionProbability.ToString().Replace(",", ".") + "," + ifd.SDCprobabilities.First(prob => prob.sdc == SunDiskCondition.Sun1) .sdcDecisionProbability.ToString().Replace(",", ".") + "," + ifd.SDCprobabilities.First(prob => prob.sdc == SunDiskCondition.Sun2) .sdcDecisionProbability.ToString().Replace(",", "."); return(retVal); }); string strToOutputToCSVfile = string.Join(Environment.NewLine, lCSVoutputData); ServiceTools.logToTextFile(outputCSVfile, csvHeader + Environment.NewLine, true, false); ServiceTools.logToTextFile(outputCSVfile, strToOutputToCSVfile, true, false); #endregion Сформируем и запишем данные в CSV-файл } else { ServiceTools.WriteObjectToXML(lStatsProcessing, outputDataFile); #region Сформируем и запишем данные в CSV-файл // здесь нет данных по наблюдаемому Cloud Cover // не надо нам такое. Оставим все это только в виде XML-файла. #endregion Сформируем и запишем данные в CSV-файл } #endregion ObservedCloudCoverDataCSVfile #endregion Enumerating files and output to XML and CSV file Console.WriteLine("saved output data to file: " + Environment.NewLine + outputDataFile + Environment.NewLine + Environment.NewLine); Console.WriteLine("===FINISHED==="); Console.ReadKey(); }
private async Task <string> ObtainLatestMeteoParameters() { string retStr = ""; // WSLufftUMBappPath // Date time ; Temperature [°C] ; Abs. air pressure [hPa] ; Relative humidity [%] ; Abs. humidity [g/m³] if (Directory.Exists(WSLufftUMBappPath)) { List <FileInfo> lTXTdataFilesInfoList = ((new DirectoryInfo(WSLufftUMBappPath)).GetFiles("????-??-??Values.Txt", SearchOption.AllDirectories)).ToList(); lTXTdataFilesInfoList.Sort((finfo1, finfo2) => finfo1.CreationTimeUtc.CompareTo(finfo2.CreationTimeUtc)); FileInfo lastTXTdataFileInfo = lTXTdataFilesInfoList.Last(); List <List <string> > Contents = ServiceTools.ReadDataFromCSV(lastTXTdataFileInfo.FullName, 2, true, ";"); List <string> lastWSdataStrings = Contents.Last(); retStr += "WS:" + Environment.NewLine + "Date time: " + lastWSdataStrings[0] + Environment.NewLine + "Temperature [°C]: " + lastWSdataStrings[1] + Environment.NewLine + "Abs. air pressure [hPa]: " + lastWSdataStrings[2] + Environment.NewLine + "Relative humidity [%]: " + lastWSdataStrings[3] + Environment.NewLine + "Abs. humidity [g/m³]" + lastWSdataStrings[4] + Environment.NewLine + Environment.NewLine; // retStr += string.Join(" ; ", Contents.Last()) + Environment.NewLine; } // R2SLufftUMBappPath if (Directory.Exists(R2SLufftUMBappPath)) { List <FileInfo> lTXTdataFilesInfoList = ((new DirectoryInfo(R2SLufftUMBappPath)).GetFiles("????-??-??Values.Txt", SearchOption.AllDirectories)).ToList(); lTXTdataFilesInfoList.Sort((finfo1, finfo2) => finfo1.CreationTimeUtc.CompareTo(finfo2.CreationTimeUtc)); FileInfo lastTXTdataFileInfo = lTXTdataFilesInfoList.Last(); List <List <string> > Contents = ServiceTools.ReadDataFromCSV(lastTXTdataFileInfo.FullName, 2, true, ";"); List <string> lastR2SdataStrings = Contents.Last(); retStr += "R2S:" + Environment.NewLine + "Date,time:" + lastR2SdataStrings[0] + Environment.NewLine + "Precipitation absol. [mm]: " + lastR2SdataStrings[1] + Environment.NewLine + "Precipitation type: " + lastR2SdataStrings[2] + Environment.NewLine + "Ambient temperature [°C]" + lastR2SdataStrings[3] + Environment.NewLine + "Precipitat.intensity [mil/h]: " + lastR2SdataStrings[4] + Environment.NewLine + Environment.NewLine; //retStr += string.Join(" ; ", Contents.Last()) + Environment.NewLine; } // VentusLufftUMBappPath // Date time ; Virtual temperature [°C] ; Wind speed [m/s] ; Wind speed [m/s] Vect. ; Wind direction [°] ; Wind direction [°] Vect. ; Abs. air pressure [hPa] ; Wind value quality [%] if (Directory.Exists(VentusLufftUMBappPath)) { List <FileInfo> lTXTdataFilesInfoList = ((new DirectoryInfo(VentusLufftUMBappPath)).GetFiles("????-??-??Values.Txt", SearchOption.AllDirectories)).ToList(); lTXTdataFilesInfoList.Sort((finfo1, finfo2) => finfo1.CreationTimeUtc.CompareTo(finfo2.CreationTimeUtc)); FileInfo lastTXTdataFileInfo = lTXTdataFilesInfoList.Last(); List <List <string> > Contents = ServiceTools.ReadDataFromCSV(lastTXTdataFileInfo.FullName, 2, true, ";"); List <string> lastVentusdataStrings = Contents.Last(); retStr += "Ventus:" + Environment.NewLine + "Date,time: " + lastVentusdataStrings[0] + Environment.NewLine + "Virtual temperature [°C]: " + lastVentusdataStrings[1] + Environment.NewLine + "Wind speed [m/s]: " + lastVentusdataStrings[2] + Environment.NewLine + "Wind speed [m/s] Vect.:" + lastVentusdataStrings[3] + Environment.NewLine + "Wind direction [°]: " + lastVentusdataStrings[4] + Environment.NewLine + "Wind direction [°] Vect.: " + lastVentusdataStrings[5] + Environment.NewLine + "Abs. air pressure [hPa]: " + lastVentusdataStrings[6] + Environment.NewLine + "Wind value quality [%]: " + lastVentusdataStrings[7] + Environment.NewLine + Environment.NewLine; //retStr += string.Join(" ; ", Contents.Last()) + Environment.NewLine; } return(retStr); }
private void CurrFileProcessing(ImagesConvertingData dat) { string currentFullFileName = dat.filename; try { List <List <string> > csvFileContents = ServiceTools.ReadDataFromCSV(currentFullFileName, 3, true, ";", Environment.NewLine); List <double> lCenterXValues = csvFileContents.ConvertAll <double>(lStr => Convert.ToDouble(lStr[1])); List <double> lCenterYValues = csvFileContents.ConvertAll <double>(lStr => Convert.ToDouble(lStr[2])); List <double> lRadiiValues = csvFileContents.ConvertAll <double>(lStr => Convert.ToDouble(lStr[3])); // filter default values // filteringMarks = "те, которые оставляем" List <bool> filteringMarks = csvFileContents.ConvertAll <bool>(lStr => true); if (filterDefaultDoubleValueCenterX > 0.0d) { List <bool> filteringMarksAdd = lCenterXValues.ConvertAll <bool>(dVal => dVal != filterDefaultDoubleValueCenterX); filteringMarks = (filteringMarks.Zip <bool, bool, bool>(filteringMarksAdd, (bVal1, bVal2) => bVal1 && bVal2)) .ToList(); } if (filterDefaultDoubleValueCenterY > 0.0d) { List <bool> filteringMarksAdd = lCenterYValues.ConvertAll <bool>(dVal => dVal != filterDefaultDoubleValueCenterY); filteringMarks = (filteringMarks.Zip <bool, bool, bool>(filteringMarksAdd, (bVal1, bVal2) => bVal1 && bVal2)) .ToList(); } if (filterDefaultDoubleValueRadius > 0.0d) { List <bool> filteringMarksAdd = lRadiiValues.ConvertAll <bool>(dVal => dVal != filterDefaultDoubleValueRadius); filteringMarks = (filteringMarks.Zip <bool, bool, bool>(filteringMarksAdd, (bVal1, bVal2) => bVal1 && bVal2)) .ToList(); } List <int> indexes = filteringMarks .Select((val, idx) => new { val, idx }) .Where(x => x.val) .Select(x => x.idx) .ToList(); lCenterXValues = lCenterXValues .Select((dVal, idx) => new { dVal, idx }) .Where(x => indexes.Contains(x.idx)) .Select(x => x.dVal).ToList(); lCenterYValues = lCenterYValues .Select((dVal, idx) => new { dVal, idx }) .Where(x => indexes.Contains(x.idx)) .Select(x => x.dVal).ToList(); lRadiiValues = lRadiiValues .Select((dVal, idx) => new { dVal, idx }) .Where(x => indexes.Contains(x.idx)) .Select(x => x.dVal).ToList(); DescriptiveStatistics stats = new DescriptiveStatistics(lCenterXValues, true); double dCenterXvalue = stats.Mean; stats = new DescriptiveStatistics(lCenterYValues, true); double dCenterYvalue = stats.Mean; stats = new DescriptiveStatistics(lRadiiValues, true); double dRadiusvalue = stats.Mean; RoundData rd = new RoundData(dCenterXvalue, dCenterYvalue, dRadiusvalue); string xmlFilename = Path.GetDirectoryName(currentFullFileName); xmlFilename += (xmlFilename.Last() == Path.DirectorySeparatorChar) ? ("") : (Path.DirectorySeparatorChar.ToString()); xmlFilename += Path.GetFileNameWithoutExtension(currentFullFileName) + "-RoundImagemask.xml"; ServiceTools.WriteObjectToXML(rd, xmlFilename); Console.WriteLine("finished processing file " + Environment.NewLine + currentFullFileName); } catch (Exception ex) { #region report #if DEBUG Console.WriteLine("exception has been thrown: " + ex.Message + Environment.NewLine + ServiceTools.CurrentCodeLineDescription()); #else ServiceTools.logToTextFile(errorLogFilename, "exception has been thrown: " + ex.Message + Environment.NewLine + ServiceTools.CurrentCodeLineDescription(), true, true); #endif #endregion report } }
public void Start(string[] args) { readDefaultProperties(); List <string> argsList = new List <string>(args); string filename = argsList.Last(); if (!File.Exists(filename)) { Console.WriteLine("Couldn`t find input file \"" + filename + "\""); return; } CommonTools.PrintDictionaryToConsole(defaultProperties, "Default settings specified in file \"" + defaultPropertiesXMLfileName + "\""); if (!File.Exists(NNconfigFile) || !File.Exists(NNtrainedParametersFile) || !File.Exists(NormMeansFile) || !File.Exists(NormRangeFile)) { Console.WriteLine( "couldn`t find at least one of pre-calculated NN parameters file specified in settings: "); return; } List <List <string> > csvFileContentStrings = ServiceTools.ReadDataFromCSV(filename, 0, true, ","); List <string> csvFileHeader = csvFileContentStrings[0]; csvFileContentStrings = csvFileContentStrings.Where((list, idx) => idx > 0).ToList(); List <int> columnsToDelete = csvFileHeader.Select((str, idx) => new Tuple <int, string>(idx, str)) .Where(tpl => tpl.Item2.ToLower().Contains("filename")).ToList().ConvertAll(tpl => tpl.Item1); List <List <string> > csvFileContentStringsFiltered = new List <List <string> >(); foreach (List <string> listDataStrings in csvFileContentStrings) { csvFileContentStringsFiltered.Add( listDataStrings.Where((str, idx) => !columnsToDelete.Contains(idx)).ToList()); } List <SunDiskCondition> trueAnswers = csvFileContentStringsFiltered.ConvertAll( lstr => (SunDiskCondition)Enum.Parse(typeof(SunDiskCondition), lstr.Last())); //List<int> trueAnswersInt = trueAnswers.ConvertAll(sdc => SunDiskConditionData.MatlabNumeralSDC(sdc)); List <List <string> > csvFileContentStringsFiltered_wo_sdc = csvFileContentStringsFiltered.ConvertAll(list => list.Where((val, idx) => idx < list.Count - 1).ToList()); List <DenseVector> lDV_objects_features = csvFileContentStringsFiltered_wo_sdc.ConvertAll( list => DenseVector.OfEnumerable(list.ConvertAll <double>(str => Convert.ToDouble(str.Replace(".", ","))))); DenseVector dvMeans = (DenseVector)((DenseMatrix)ServiceTools.ReadDataFromCSV(NormMeansFile, 0, ",")).Row(0); DenseVector dvRanges = (DenseVector)((DenseMatrix)ServiceTools.ReadDataFromCSV(NormRangeFile, 0, ",")).Row(0); lDV_objects_features = lDV_objects_features.ConvertAll(dv => { DenseVector dvShifted = dv - dvMeans; DenseVector dvNormed = (DenseVector)dvShifted.PointwiseDivide(dvRanges); return(dvNormed); }); DenseMatrix dmObjectsFeatures = DenseMatrix.OfRowVectors(lDV_objects_features); DenseVector dvThetaValues = (DenseVector)ServiceTools.ReadDataFromCSV(NNtrainedParametersFile, 0, ","); List <int> NNlayersConfig = new List <double>(((DenseMatrix)ServiceTools.ReadDataFromCSV(NNconfigFile, 0, ",")).Row(0)).ConvertAll (dVal => Convert.ToInt32(dVal)); List <SunDiskCondition> predictedSDC = NNclassificatorPredictor <SunDiskCondition> .NNpredict(dmObjectsFeatures, dvThetaValues, NNlayersConfig, SunDiskConditionData.MatlabEnumeratedSDCorderedList()).ToList(); List <Tuple <SunDiskCondition, SunDiskCondition> > PredictedVStrue = predictedSDC.Zip(trueAnswers, (predVal, trueVal) => new Tuple <SunDiskCondition, SunDiskCondition>(predVal, trueVal)).ToList(); Console.WriteLine("=== Prediction result vs true ==="); foreach (Tuple <SunDiskCondition, SunDiskCondition> tpl in PredictedVStrue) { Console.WriteLine("pred: " + tpl.Item1.ToString() + ":" + tpl.Item2.ToString() + " :true"); } double accuracy = 100.0d * ((double)PredictedVStrue.Count(tpl => tpl.Item1 == tpl.Item2)) / (double)PredictedVStrue.Count(); Console.WriteLine("accuracy: " + accuracy); Console.WriteLine("Finished. Press any key..."); Console.ReadKey(); }
private void btnCreateList_Click(object sender, EventArgs e) { theLogWindow = ServiceTools.LogAText(theLogWindow, "Started processing..."); BackgroundWorker bgwCreateList = new BackgroundWorker(); bgwCreateList.DoWork += (bgwSender, bgwArgs) => { List <ImageFileDescription> lImagesFilesList = new List <ImageFileDescription>(); List <ImagesProcessingData> lImagesAllConcurrentData = new List <ImagesProcessingData>(); List <ObservedClCoverData> lObservedData = new List <ObservedClCoverData>(); #region check available data //ImagesBaseSourcePath = ""; //ConcurrentAndStatsXMLfilesDir = ""; //ObservedDataCSVfile = ""; //DestinationPath = ""; //DateTimeFilterTolerance = new TimeSpan(0, 5, 0); #region ImagesBaseSourcePath theLogWindow = ServiceTools.LogAText(theLogWindow, "Started enumerating images..."); if (!Directory.Exists(ImagesBaseSourcePath)) { theLogWindow = ServiceTools.LogAText(theLogWindow, "Unable to find sky-images base source path. Please check if it does exist and contain at least one sky-image *.jpg file."); FINISHED(); return; } lImagesFilesList = Directory.GetFiles(ImagesBaseSourcePath, "*.jpg", SearchOption.AllDirectories) .ToList() .ConvertAll(str => new ImageFileDescription(str)); if (!lImagesFilesList.Any()) { theLogWindow = ServiceTools.LogAText(theLogWindow, "Unable to find any sky-images. Please check if at least one sky-image exists in base source path."); FINISHED(); return; } #endregion ImagesBaseSourcePath #region ConcurrentAndStatsXMLfilesDir theLogWindow = ServiceTools.LogAText(theLogWindow, "Started reading concurrent data and stats data using existing XML files..."); if (!Directory.Exists(ConcurrentAndStatsXMLfilesDir)) { theLogWindow = ServiceTools.LogAText(theLogWindow, "Unable to find sky-images pre-calculated concurrent data XML files directory: " + Environment.NewLine + ConcurrentAndStatsXMLfilesDir + Environment.NewLine + ". Please check if it does exist."); FINISHED(); return; } #region read all concurrent data using all-included XML files List <string> lXMLfiles = Directory.GetFiles(ConcurrentAndStatsXMLfilesDir, "ImagesCameraPositioning-stats-*-camID?.xml", SearchOption.AllDirectories).ToList(); if (!lXMLfiles.Any()) { theLogWindow = ServiceTools.LogAText(theLogWindow, "Unable to find any XML file satisfied the following mask: \"ImagesCameraPositioning-stats-*-camID?.xml\". Please check if at least one XML of that kind does exist in directory " + ConcurrentAndStatsXMLfilesDir); FINISHED(); return; } foreach (string xmlFile in lXMLfiles) { theLogWindow = ServiceTools.LogAText(theLogWindow, "trying to read and parse file " + xmlFile); try { List <ImagesProcessingData> currFileContents = ServiceTools.ReadObjectFromXML(xmlFile, typeof(List <ImagesProcessingData>)) as List <ImagesProcessingData>; if (currFileContents != null) { lImagesAllConcurrentData.AddRange(currFileContents); } } catch (Exception ex) { theLogWindow = ServiceTools.LogAText(theLogWindow, "ERROR! Unable to read data from file: " + xmlFile + Environment.NewLine + ex.Message); continue; } } if (!lImagesAllConcurrentData.Any()) { theLogWindow = ServiceTools.LogAText(theLogWindow, "Unable to read any concurrent data. Please check the directory for files containing valid concurrent data XML files: " + Environment.NewLine + "directory: " + ConcurrentAndStatsXMLfilesDir + Environment.NewLine + "XML files mask: \"ImagesCameraPositioning-stats-*-camID?.xml\""); FINISHED(); return; } #endregion read all concurrent data using all-included XML files #endregion ConcurrentAndStatsXMLfilesDir #region ObservedDataCSVfile theLogWindow = ServiceTools.LogAText(theLogWindow, "Started reading observed data CSV file..."); if (!File.Exists(ObservedDataCSVfile)) { theLogWindow = ServiceTools.LogAText(theLogWindow, "Unable to read observed data CSV file: " + ObservedDataCSVfile); FINISHED(); return; } List <List <string> > lCSVfileContents = ServiceTools.ReadDataFromCSV(ObservedDataCSVfile, 1, true); if (!lCSVfileContents.Any()) { theLogWindow = ServiceTools.LogAText(theLogWindow, "Unable to read observed data CSV file: " + ObservedDataCSVfile); FINISHED(); return; } lObservedData = lCSVfileContents.ConvertAll(lStr => new ObservedClCoverData(lStr)); #endregion ObservedDataCSVfile #region DestinationPath if (!Directory.Exists(DestinationPath)) { theLogWindow = ServiceTools.LogAText(theLogWindow, "Unable to find the output directory: " + DestinationPath); FINISHED(); return; } #endregion DestinationPath #endregion check available data List <Tuple <ObservedClCoverData, ImageFileDescription> > lImagesFilteredByAvailableObservedData = new List <Tuple <ObservedClCoverData, ImageFileDescription> >(); #region filter images by available observed data using DateTimeFilterTolerance theLogWindow = ServiceTools.LogAText(theLogWindow, "Filtering by observed data available..."); foreach (ObservedClCoverData observedData in lObservedData) { DateTime currObservedDatumDateTime = observedData.dt; List <Tuple <ObservedClCoverData, ImageFileDescription> > lImagesCloseToCurrObservedDatum = lImagesFilesList .Where(ifd => { TimeSpan tspan = new TimeSpan(Math.Abs((ifd.currImageDateTime - currObservedDatumDateTime).Ticks)); return(tspan <= DateTimeFilterTolerance); }) .ToList() .ConvertAll(ifd => new Tuple <ObservedClCoverData, ImageFileDescription>(observedData, ifd)); lImagesFilteredByAvailableObservedData.AddRange(lImagesCloseToCurrObservedDatum); } #endregion filter images by available observed data using DateTimeFilterTolerance List <SkyImagesDataWith_Concurrent_Stats_CloudCover> lImagesFilteredByAnyAvailableData = new List <SkyImagesDataWith_Concurrent_Stats_CloudCover>(); #region map available stats data using image filename theLogWindow = ServiceTools.LogAText(theLogWindow, "Mapping concurrent and stats data..."); lImagesFilteredByAnyAvailableData = lImagesFilteredByAvailableObservedData.ConvertAll(tpl => { SkyImagesDataWith_Concurrent_Stats_CloudCover retVal = null; try { ImagesProcessingData foundConcurrentData = lImagesAllConcurrentData.Where(ipd => Path.GetFileName(ipd.filename) == tpl.Item2.fileName) .ElementAt(0); retVal = new SkyImagesDataWith_Concurrent_Stats_CloudCover() { skyImageFullFileName = tpl.Item2.fullFileName, skyImageFileName = tpl.Item2.fileName, currImageDateTime = tpl.Item2.currImageDateTime, observedCloudCoverData = tpl.Item1, concurrentDataXMLfile = foundConcurrentData.concurrentDataXMLfile, concurrentData = foundConcurrentData.concurrentData, grixyrgbStatsXMLfile = foundConcurrentData.grixyrgbStatsXMLfile, grixyrgbStats = foundConcurrentData.grixyrgbStats }; //(tpl.Item1, tpl.Item2, foundConcurrentData); } catch (Exception ex) { theLogWindow = ServiceTools.LogAText(theLogWindow, "ERROR! Couldn`t find concurrent data for file " + Path.GetFileName(tpl.Item2.fileName) + Environment.NewLine + ex.Message); } return(retVal); }); lImagesFilteredByAnyAvailableData.RemoveAll(tpl => tpl == null); if (!lImagesFilteredByAnyAvailableData.Any()) { theLogWindow = ServiceTools.LogAText(theLogWindow, "There is no images remain after filtering using all available data. Output will be empty."); } #endregion map available stats data using image filename theLogWindow = ServiceTools.LogAText(theLogWindow, "Writing output list to file..."); string strOutputXMLfileName = DestinationPath + ((DestinationPath.Last() == Path.DirectorySeparatorChar) ? ("") : (Path.DirectorySeparatorChar.ToString())) + "FilesListToDetectCloudCover.xml"; ServiceTools.WriteObjectToXML(lImagesFilteredByAnyAvailableData, strOutputXMLfileName); theLogWindow = ServiceTools.LogAText(theLogWindow, "images list wrote to file: " + strOutputXMLfileName); }; bgwCreateList.RunWorkerAsync(); }