private string ReadLastConcurrentInfo() { DirectoryInfo dir = new DirectoryInfo(DataStoreDirectory); List <FileInfo> lXMLFilesInfo = dir.GetFiles(ConventionalTransitions.ImageConcurrentDataFilesNamesPattern(), SearchOption.AllDirectories) .ToList(); lXMLFilesInfo.Sort((finfo1, finfo2) => finfo1.CreationTimeUtc.CompareTo(finfo2.CreationTimeUtc)); Dictionary <string, object> dictReadXMLfileData = ServiceTools.ReadDictionaryFromXML(lXMLFilesInfo.Last().FullName); DateTime utcNow = DateTime.UtcNow; DateTime concurrentDataDateTime = DateTime.Parse(dictReadXMLfileData["DateTime"] as string); concurrentDataDateTime = concurrentDataDateTime.ToUniversalTime(); string strDataToReport = "Current server date/time UTC: " + utcNow.ToString("dd.MM.yyyy HH:mm:ss") + Environment.NewLine; strDataToReport += "Concurrent data date/time UTC: " + concurrentDataDateTime.ToString("dd.MM.yyyy HH:mm:ss") + Environment.NewLine; strDataToReport += "time elapsed since last shot: " + Math.Round((utcNow - concurrentDataDateTime).TotalSeconds) + "s" + Environment.NewLine; GPSdata gps = new GPSdata((string)dictReadXMLfileData["GPSdata"], GPSdatasources.CloudCamArduinoGPS, concurrentDataDateTime); strDataToReport += "GPS: " + gps.HRString(2) + Environment.NewLine; SPA spaCalcObject = null; AzimuthZenithAngle angle = gps.SunZenithAzimuth(out spaCalcObject); DateTime dtSunriseUTC = utcNow; TimeOfDay todSunriseUTC = new TimeOfDay(spaCalcObject.spa.sunrise); dtSunriseUTC = new DateTime(dtSunriseUTC.Year, dtSunriseUTC.Month, dtSunriseUTC.Day, todSunriseUTC.hour, todSunriseUTC.minute, todSunriseUTC.second, DateTimeKind.Utc); // TimeZoneInfo mowTimeZone = TimeZoneInfo.FindSystemTimeZoneById("Russian Standard Time"); TimeZoneInfo mowTimeZone = TimeZoneInfo.Local; DateTime dtSunriseMOW = TimeZoneInfo.ConvertTimeFromUtc(dtSunriseUTC, mowTimeZone); strDataToReport += "Sunrise Moscow time: " + (dtSunriseMOW.TimeOfDay.ToString()) + Environment.NewLine; DateTime dtSunsetUTC = utcNow; TimeOfDay todSunsetUTC = new TimeOfDay(spaCalcObject.spa.sunset); dtSunsetUTC = new DateTime(dtSunsetUTC.Year, dtSunsetUTC.Month, dtSunsetUTC.Day, todSunsetUTC.hour, todSunsetUTC.minute, todSunsetUTC.second, DateTimeKind.Utc); DateTime dtSunsetMOW = TimeZoneInfo.ConvertTimeFromUtc(dtSunsetUTC, mowTimeZone); strDataToReport += "Sunset Moscow time: " + (dtSunsetMOW.TimeOfDay.ToString()) + Environment.NewLine; return(strDataToReport); }
private void EnumerateFilesToProcess() { string directory = Path.GetDirectoryName(currPath2Process); string filemask = "*.jpg"; List <string> filesList = new List <string>(Directory.EnumerateFiles(directory, filemask, bSearchImagesTopDirectoryOnly ? SearchOption.TopDirectoryOnly : SearchOption.AllDirectories)); Console.WriteLine("found " + filesList.Count + " files."); Console.WriteLine("filtering..."); List <string> statsFilesList = new List <string>(Directory.EnumerateFiles(imageYRGBstatsXMLdataFilesDirectory, ConventionalTransitions.ImageGrIxYRGBstatsFileNamesPattern(), bSearchImagesTopDirectoryOnly ? SearchOption.TopDirectoryOnly : SearchOption.AllDirectories)); statsFilesList = statsFilesList.ConvertAll(strFileName => Path.GetFileName(strFileName)); int removed = filesList.RemoveAll( fname => !statsFilesList.Contains(ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(fname, "", false))); Console.WriteLine("removed " + removed + " items. Remains " + filesList.Count + " to process."); if (!filesList.Any()) { Console.WriteLine("There is no " + filemask + " files that sutisfy settings specified. Processing will not be started."); return; } int intFinalIndex = filesList.Count; int idx = 0; int currPerc = 0; int prevPerc = 0; int consoleWidth = Console.WindowWidth; foreach (string fName in filesList) { string strToShow = String.Format("{0," + consoleWidth + "}", "adding: " + fName); Console.WriteLine(strToShow); //Console.SetCursorPosition(0, Console.CursorTop - 1); lStatsCalculation.Add(new ImageStatsCollectingData() { filename = fName, State = ImageStatsCollectingState.Queued }); idx++; } Console.WriteLine("finished enumerating files. Files to process: " + lStatsCalculation.Count); }
private async Task <string> ObtainLastImageCC() { string retStr = ""; if (!Directory.Exists(ConcurrentDataXMLfilesBasePath)) { throw new DirectoryNotFoundException("unable to locate directory: " + ConcurrentDataXMLfilesBasePath); } DirectoryInfo dir = new DirectoryInfo(ConcurrentDataXMLfilesBasePath); List <FileInfo> lXMLFilesInfo = dir.GetFiles(ConventionalTransitions.ImageProcessedAndPredictedDataFileNamesPattern(), SearchOption.AllDirectories).ToList(); if (lXMLFilesInfo.Count == 0) { return("No snapshots has been analyzed yet. Please wait for a couple of minutes."); } lXMLFilesInfo.Sort((finfo1, finfo2) => finfo1.CreationTimeUtc.CompareTo(finfo2.CreationTimeUtc)); SkyImagesProcessedAndPredictedData data = null; try { data = (SkyImagesProcessedAndPredictedData) ServiceTools.ReadObjectFromXML(lXMLFilesInfo.Last().FullName, typeof(SkyImagesProcessedAndPredictedData)); } catch (Exception ex) { throw ex; } if (data != null) { retStr += "Please note that this finction is still in BETA version!" + Environment.NewLine + "date of snapshot analyzed (UTC): " + data.imageShootingDateTimeUTC.ToString("u") + Environment.NewLine + "Sun disk condition: " + data.PredictedSDC.ToString() + Environment.NewLine + "Total cloud cover: " + data.PredictedCC.CloudCoverTotal + " (of 8)"; } return(retStr); }
static async Task <ImageStatsDataCalculationResult> ConvertImageTask(object inputArgs) { ImageStatsDataCalculationResult Result = new ImageStatsDataCalculationResult(); Dictionary <string, object> ParametersPassed = inputArgs as Dictionary <string, object>; string logFileName = ""; string currentFullFileName = ParametersPassed["currentFullFileName"] as string; Stopwatch stopwatch = null; List <Tuple <string, string> > lImagesRoundMasksMappingFiles = null; if (ParametersPassed != null) { if (ParametersPassed.ContainsKey("ImagesRoundMasksXMLfilesMappingList")) { string ImagesRoundMasksXMLfilesMappingListPassed = (string)ParametersPassed["ImagesRoundMasksXMLfilesMappingList"]; if (File.Exists(ImagesRoundMasksXMLfilesMappingListPassed)) { List <List <string> > llImagesRoundMasksMappingFiles = ServiceTools.ReadDataFromCSV(ImagesRoundMasksXMLfilesMappingListPassed, 0, true, ";", Environment.NewLine); lImagesRoundMasksMappingFiles = llImagesRoundMasksMappingFiles.ConvertAll( list => new Tuple <string, string>(list[0], list[1])); } } if (ParametersPassed.ContainsKey("Stopwatch")) { stopwatch = ParametersPassed["Stopwatch"] as Stopwatch; } if (ParametersPassed.ContainsKey("logFileName")) { logFileName = ParametersPassed["logFileName"] as string; } } TimeSpan procStart = Process.GetCurrentProcess().TotalProcessorTime; try { RoundData predefinedRoundedMask = null; if (lImagesRoundMasksMappingFiles != null) { if (lImagesRoundMasksMappingFiles.Any()) { if (lImagesRoundMasksMappingFiles.Find(tpl => (new WildcardPattern(tpl.Item1)).IsMatch(currentFullFileName)) != null) { string strFoundPredefinedRoundedMaskParametersXMLfile = lImagesRoundMasksMappingFiles.Find( tpl => (new WildcardPattern(tpl.Item1)).IsMatch(currentFullFileName)).Item2; strFoundPredefinedRoundedMaskParametersXMLfile = strFoundPredefinedRoundedMaskParametersXMLfile.Substring(0, strFoundPredefinedRoundedMaskParametersXMLfile.IndexOf(".xml") + 4); predefinedRoundedMask = ServiceTools.ReadObjectFromXML(strFoundPredefinedRoundedMaskParametersXMLfile, typeof(RoundData)) as RoundData; } } } Image <Bgr, byte> currImg = new Image <Bgr, byte>(currentFullFileName); ImageProcessing imgP = new ImageProcessing(currImg, predefinedRoundedMask); Image <Bgr, byte> maskImage = new Image <Bgr, byte>(new Image <Gray, byte>[] { imgP.significantMaskImageCircled, imgP.significantMaskImageCircled, imgP.significantMaskImageCircled }); Image <Bgr, byte> img = imgP.tmpImage.Mul(maskImage); string ncFileName = ConventionalTransitions.NetCDFimageBareChannelsDataFilename(currentFullFileName, ParametersPassed["outputNetCDFfilesDirectory"] as string, true, ParametersPassed["ImagesBasePath"] as string); Dictionary <string, object> dataToNCwrite = new Dictionary <string, object>(); dataToNCwrite.Add("ColorChannels", img.Data); NetCDFoperations.SaveVariousDataToFile(dataToNCwrite, ncFileName); TimeSpan procEnd = Process.GetCurrentProcess().TotalProcessorTime; Result = new ImageStatsDataCalculationResult() { calcResult = true, imgFilename = currentFullFileName, mp5Result = null, grixyrgbStatsData = null, stopwatch = stopwatch, exception = null, procTotalProcessorTimeEnd = procEnd, procTotalProcessorTimeStart = procStart }; return(Result); } catch (Exception ex) { TimeSpan procEnd = Process.GetCurrentProcess().TotalProcessorTime; Result = new ImageStatsDataCalculationResult() { calcResult = false, imgFilename = currentFullFileName, stopwatch = stopwatch, exception = ex, procTotalProcessorTimeEnd = procEnd, procTotalProcessorTimeStart = procStart }; return(Result); } }
private void EnumerateFilesToProcess() { string directory = Path.GetDirectoryName(ImagesBasePath); string filemask = "*.jpg"; List <string> filesList = new List <string>(Directory.EnumerateFiles(directory, filemask, bSearchImagesTopDirectoryOnly ? SearchOption.TopDirectoryOnly : SearchOption.AllDirectories)); Console.WriteLine("found " + filesList.Count + " files."); #region filtering Console.WriteLine("filtering (if an image stats has been already calculated - it should be excluded) ..."); List <string> statsFilesList = new List <string>(Directory.EnumerateFiles(imageYRGBstatsXMLdataFilesDirectory, ConventionalTransitions.ImageGrIxYRGBstatsFileNamesPattern(), bSearchImagesTopDirectoryOnly ? SearchOption.TopDirectoryOnly : SearchOption.AllDirectories)); statsFilesList = statsFilesList.ConvertAll(strFileName => Path.GetFileName(strFileName)); int removed = filesList.RemoveAll( fname => statsFilesList.Contains(ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(fname, "", false))); Console.WriteLine("removed " + removed + " items. Remains " + filesList.Count + " to process."); Console.WriteLine("press any key..."); Console.ReadKey(); #endregion filtering if (!filesList.Any()) { Console.WriteLine("There is no " + filemask + " files that sutisfy settings specified. Processing will not be started."); return; } #region compiling files processing list lStatsCalculation int idx = 0; int currPerc = 0; int prevPerc = 0; int consoleWidth = Console.WindowWidth; foreach (string fName in filesList) { string strToShow = String.Format("{0," + consoleWidth + "}", "adding: " + fName); Console.WriteLine(strToShow); //Console.SetCursorPosition(0, Console.CursorTop - 1); lStatsCalculation.Add(new ImageStatsCollectingData() { filename = fName, State = ImageStatsCollectingState.Queued }); idx++; #if DEBUG //if (idx >= 10) //{ // Console.WriteLine("Finished enumerating files due to DEBUG configuration limitations\n"); // Console.WriteLine("press any key..."); // Console.ReadKey(); // break; //} #endif } #endregion compiling files processing list Console.WriteLine("finished enumerating files. Files to process: " + lStatsCalculation.Count); Console.WriteLine("press any key..."); Console.ReadKey(); }
private void ProcessImage(ImageStatsCollectingData srcData) { Interlocked.Increment(ref totalFilesProcessed); int perc = Convert.ToInt32(100.0d * (double)totalFilesProcessed / (double)totalFilesCountToProcess); Console.WriteLine(DateTime.Now.ToString("s") + " : " + perc + "% : started processing file " + Environment.NewLine + srcData.filename); Dictionary <string, object> optionalParameters = new Dictionary <string, object>(); optionalParameters.Add("logFileName", errorLogFilename); // найти и записать данные GPS GPSdata currimageGPS = ServiceTools.FindProperGPSdataForImage(srcData.filename, null, defaultProperties, ref NVdataFilesAlreadyReadDateTimeSpans, ref NVdataFilesAlreadyReadData); if (currimageGPS != null) { ServiceTools.WriteObjectToXML(currimageGPS, ConventionalTransitions.ConcurrentGPSdataFileName(srcData.filename, strConcurrentGPSdataXMLfilesPath)); } // найти и записать данные SDC и Cloud Cover DateTime curDateTime = GetImageDateTime(srcData.filename); if (!lMissionObservedData.Any()) { return; } lMissionObservedData.Sort((obsRecord1, obsRecord2) => { double dev1 = Math.Abs((obsRecord1.dateTime - curDateTime).TotalMilliseconds); double dev2 = Math.Abs((obsRecord2.dateTime - curDateTime).TotalMilliseconds); return((dev1 >= dev2) ? (1) : (-1)); }); MissionsObservedData closestObservedDatum = lMissionObservedData[0]; if ((closestObservedDatum.dateTime - curDateTime).TotalSeconds > 600) { return; } SunDiskConditionData currImageSDC = new SunDiskConditionData() { filename = srcData.filename, sunDiskCondition = closestObservedDatum.SDC }; ServiceTools.WriteObjectToXML(currImageSDC, ConventionalTransitions.SunDiskConditionFileName(srcData.filename, SunDiskConditionXMLdataFilesDirectory)); // find grixyrgbStatsXMLfile SkyImageIndexesStatsData currImageStatsData = null; string currImageStatsDataXMLfile = ""; if (Directory.Exists(imageYRGBstatsXMLdataFilesDirectory)) { List <string> foundXMLfiles = Directory.EnumerateFiles(imageYRGBstatsXMLdataFilesDirectory, ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(srcData.filename, "", false), SearchOption.AllDirectories).ToList(); if (foundXMLfiles.Any()) { // возьмем первый попавшийся currImageStatsDataXMLfile = foundXMLfiles[0]; currImageStatsData = (SkyImageIndexesStatsData) ServiceTools.ReadObjectFromXML(currImageStatsDataXMLfile, typeof(SkyImageIndexesStatsData)); } } SkyImagesDataWith_Concurrent_Stats_CloudCover_SDC currImageData = new SkyImagesDataWith_Concurrent_Stats_CloudCover_SDC () { skyImageFullFileName = srcData.filename, skyImageFileName = Path.GetFileName(srcData.filename), currImageDateTime = curDateTime, observedCloudCoverData = new ObservedClCoverData() { dt = closestObservedDatum.dateTime, CloudCoverTotal = closestObservedDatum.CloudCoverTotal, CloudCoverLower = closestObservedDatum.CloudCoverLower }, concurrentDataXMLfile = "", concurrentData = new ConcurrentData() { filename = "", datetimeUTC = currimageGPS.DateTimeUTC, GPSdata = "", GPSLat = currimageGPS.Lat, GPSLon = currimageGPS.Lon, GPSDateTimeUTC = currimageGPS.DateTimeUTC, PressurePa = closestObservedDatum.pressure, gps = currimageGPS }, grixyrgbStatsXMLfile = currImageStatsDataXMLfile, grixyrgbStats = currImageStatsData, SDCvalue = closestObservedDatum.SDC }; ServiceTools.WriteObjectToXML(currImageData, ConventionalTransitions.SkyImagesDataWithConcurrentStatsCloudCoverAndSDC_FileName(srcData.filename, strSkyImagesDataWithConcurrentStatsCloudCoverAndSDCDirectory)); }
//private void ImageProcessing(ImagesProcessingData ipdt) //{ //} private void EnumerateFilesToProcess() { string directory = Path.GetDirectoryName(inputBasePath); string filemask = "*.jpg"; List <string> filesList = new List <string>(Directory.EnumerateFiles(directory, filemask, bEnumerateFilesRecursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly)); #region filter by camID //...devID1.jpg string ptrnCamID = "devid" + CamIDtoProcess + ".jpg"; filesList = filesList.Where(fname => fname.ToLower().Contains(ptrnCamID)).ToList(); #endregion Console.WriteLine("found " + filesList.Count + " images."); #region list, read and map image stats files Console.WriteLine("filtering by ready-to-use GrIxYRGB XML files..."); List <string> statsFilesList = new List <string>(Directory.EnumerateFiles(imageYRGBstatsXMLdataFilesDirectory, ConventionalTransitions.ImageGrIxYRGBstatsFileNamesPattern(), bEnumerateFilesRecursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly)); List <string> statsFilesListWOpath = statsFilesList.ConvertAll(Path.GetFileName); Console.WriteLine("found " + statsFilesList.Count + " XML stats files in directory " + Environment.NewLine + imageYRGBstatsXMLdataFilesDirectory + Environment.NewLine + "by mask " + Environment.NewLine + ConventionalTransitions.ImageGrIxYRGBstatsFileNamesPattern()); int removed = filesList.RemoveAll( fname => !statsFilesListWOpath.Contains(ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(fname, "", false))); Console.WriteLine("removed " + removed + " items (couldn`t find stats data files). Remains " + filesList.Count + " to process."); if (!filesList.Any()) { Console.WriteLine("There is no " + filemask + " files that sutisfy settings specified. Processing will not be started."); return; } lStatsProcessing = filesList.ConvertAll(strImgFname => { ImagesProcessingData retVal = new ImagesProcessingData() { filename = strImgFname }; return(retVal); }); #if DEBUG //lStatsProcessing = lStatsProcessing.Where((ipd, ind) => ind < 10).ToList(); #endif Console.WriteLine("started reading and mapping stats data"); int totalFilesCountToRead = lStatsProcessing.Count; int filesRead = 0; int currProgressPerc = 0; foreach (ImagesProcessingData ipdt in lStatsProcessing) { ipdt.grixyrgbStatsXMLfile = statsFilesList.First( statsFname => statsFname.Contains(ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(ipdt.filename, "", false))); ipdt.grixyrgbStats = ServiceTools.ReadObjectFromXML(ipdt.grixyrgbStatsXMLfile, typeof(SkyImageIndexesStatsData)) as SkyImageIndexesStatsData; #region calculate and report progress filesRead++; double progress = 100.0d * (double)filesRead / (double)totalFilesCountToRead; if (progress - (double)currProgressPerc > 1.0d) { currProgressPerc = Convert.ToInt32(progress); Console.WriteLine("read " + currProgressPerc + "%"); } #endregion calculate and report progress } #endregion #region list, read and map concurrent data List <string> concurrentDataFilesList = Directory.EnumerateFiles(ConcurrentDataXMLfilesDirectory, ConventionalTransitions.ImageConcurrentDataFilesNamesPattern(), bEnumerateFilesRecursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly).ToList(); List <ConcurrentData> lConcurrentData = null; #region reading Console.WriteLine("started concurrent data reading"); totalFilesCountToRead = concurrentDataFilesList.Count; filesRead = 0; currProgressPerc = 0; List <Dictionary <string, object> > lDictionariesConcurrentData = new List <Dictionary <string, object> >(); foreach (string strConcDataXMLFile in concurrentDataFilesList) { Dictionary <string, object> currDict = ServiceTools.ReadDictionaryFromXML(strConcDataXMLFile); currDict.Add("XMLfileName", Path.GetFileName(strConcDataXMLFile)); lDictionariesConcurrentData.Add(currDict); #region calculate and report progress filesRead++; double progress = 100.0d * (double)filesRead / (double)totalFilesCountToRead; if (progress - (double)currProgressPerc > 1.0d) { currProgressPerc = Convert.ToInt32(progress); Console.WriteLine("read " + currProgressPerc + "%"); } #endregion calculate and report progress } lDictionariesConcurrentData.RemoveAll(dict => dict == null); lConcurrentData = lDictionariesConcurrentData.ConvertAll <ConcurrentData>(dict => { ConcurrentData retVal = null; try { retVal = new ConcurrentData(dict); } catch (Exception ex) { string strError = "couldn`t parse XML file " + dict["XMLfileName"] + " : " + Environment.NewLine + ex.Message; Console.WriteLine(strError); } return(retVal); }); lConcurrentData.RemoveAll(val => val == null); #endregion reading #region mapping // map obtained concurrent data to images by its datetime Console.WriteLine("concurrent data mapping started"); lStatsProcessing = lStatsProcessing.ConvertAll(ipdt => { string currImgFilename = ipdt.filename; currImgFilename = Path.GetFileNameWithoutExtension(currImgFilename); string ptrn = @"(devID\d)"; Regex rgxp = new Regex(ptrn, RegexOptions.IgnoreCase); string strCurrImgDT = rgxp.Replace(currImgFilename.Substring(4), ""); //2015-12-16T06-01-38 strCurrImgDT = strCurrImgDT.Substring(0, 11) + strCurrImgDT.Substring(11).Replace("-", ":"); DateTime currImgDT = DateTime.Parse(strCurrImgDT, null, System.Globalization.DateTimeStyles.AdjustToUniversal); ConcurrentData nearestConcurrentData = lConcurrentData.Aggregate((cDt1, cDt2) => { TimeSpan tspan1 = new TimeSpan(Math.Abs((cDt1.datetimeUTC - currImgDT).Ticks)); TimeSpan tspan2 = new TimeSpan(Math.Abs((cDt2.datetimeUTC - currImgDT).Ticks)); return((tspan1 <= tspan2) ? (cDt1) : (cDt2)); }); if (new TimeSpan(Math.Abs((nearestConcurrentData.datetimeUTC - currImgDT).Ticks)) >= TimeSpanForConcurrentDataMappingTolerance) { string strError = "couldn`t find close enough concurrent data file for image:" + Environment.NewLine + currImgFilename + Environment.NewLine + "closest concurrent data file is:" + Environment.NewLine + nearestConcurrentData.filename + Environment.NewLine + "with date-time value " + nearestConcurrentData.datetimeUTC.ToString("o"); Console.WriteLine(strError); nearestConcurrentData = null; } ipdt.concurrentData = nearestConcurrentData; if (nearestConcurrentData != null) { ipdt.concurrentDataXMLfile = nearestConcurrentData.filename; } return(ipdt); }); #endregion mapping removed = lStatsProcessing.RemoveAll(ipdt => ipdt.concurrentData == null); Console.WriteLine("removed " + removed + " items (couldn`t find concurrent data). " + lStatsProcessing.Count + " files remains to process."); #endregion list, read and map concurrent data if (!lStatsProcessing.Any()) { Console.WriteLine("There is no files that sutisfy settings specified and have all required concurrent data (stats or GPS etc.). Processing will not be proceeded."); return; } #region Predict SDC values using pre-trained NN parameters string csvHeader = lStatsProcessing[0].grixyrgbStats.CSVHeader() + ",SunElevationDeg,SunAzimuthDeg,sunDiskCondition"; List <string> lCSVheader = csvHeader.Split(',').ToList(); List <int> columnsToDelete = lCSVheader.Select((str, idx) => new Tuple <int, string>(idx, str)) .Where(tpl => tpl.Item2.ToLower().Contains("filename")).ToList().ConvertAll(tpl => tpl.Item1); List <List <string> > lCalculatedData = lStatsProcessing.ConvertAll(dt => { string currImageALLstatsDataCSVWithConcurrentData = dt.grixyrgbStats.ToCSV() + "," + dt.concurrentData.gps.SunZenithAzimuth() .ElevationAngle.ToString() .Replace(",", ".") + "," + dt.concurrentData.gps.SunZenithAzimuth() .Azimuth.ToString() .Replace(",", "."); List <string> retVal = currImageALLstatsDataCSVWithConcurrentData.Split(',').ToList(); retVal = retVal.Where((str, idx) => !columnsToDelete.Contains(idx)).ToList(); return(retVal); }); List <DenseVector> lDV_objects_features = lCalculatedData.ConvertAll( list => DenseVector.OfEnumerable(list.ConvertAll <double>(str => Convert.ToDouble(str.Replace(".", ","))))); DenseVector dvMeans = (DenseVector)((DenseMatrix)ServiceTools.ReadDataFromCSV(NormMeansFile, 0, ",")).Row(0); DenseVector dvRanges = (DenseVector)((DenseMatrix)ServiceTools.ReadDataFromCSV(NormRangeFile, 0, ",")).Row(0); lDV_objects_features = lDV_objects_features.ConvertAll(dv => { DenseVector dvShifted = dv - dvMeans; DenseVector dvNormed = (DenseVector)dvShifted.PointwiseDivide(dvRanges); return(dvNormed); }); DenseMatrix dmObjectsFeatures = DenseMatrix.OfRowVectors(lDV_objects_features); DenseVector dvThetaValues = (DenseVector)ServiceTools.ReadDataFromCSV(NNtrainedParametersFile, 0, ","); List <int> NNlayersConfig = new List <double>(((DenseMatrix)ServiceTools.ReadDataFromCSV(NNconfigFile, 0, ",")).Row(0)).ConvertAll (dVal => Convert.ToInt32(dVal)); List <List <double> > lDecisionProbabilities = null; List <SunDiskCondition> predictedSDClist = NNclassificatorPredictor <SunDiskCondition> .NNpredict(dmObjectsFeatures, dvThetaValues, NNlayersConfig, out lDecisionProbabilities, SunDiskConditionData.MatlabEnumeratedSDCorderedList()).ToList(); //List<SunDiskCondition> predictedSDClist = predictedSDC.ConvertAll(sdcInt => //{ // switch (sdcInt) // { // case 4: // return SunDiskCondition.NoSun; // break; // case 1: // return SunDiskCondition.Sun0; // break; // case 2: // return SunDiskCondition.Sun1; // break; // case 3: // return SunDiskCondition.Sun2; // break; // default: // return SunDiskCondition.Defect; // } //}); string strToShow = "SDC values probabilities: " + Environment.NewLine + "| No Sun | Sun_0 | Sun_1 | Sun_2 | Detected |" + Environment.NewLine; foreach (List <double> lDecisionProbability in lDecisionProbabilities) { strToShow += "| " + lDecisionProbability[3].ToString("F4") + " | " + lDecisionProbability[0].ToString("F4") + " | " + lDecisionProbability[1].ToString("F4") + " | " + lDecisionProbability[2].ToString("F4") + " |" + predictedSDClist[lDecisionProbabilities.IndexOf(lDecisionProbability)] + "|" + Environment.NewLine; } ServiceTools.logToTextFile(errorLogFilename, strToShow, true, false); #endregion //lStatsProcessing = // lStatsProcessing.Where((ipd, idx) => predictedSDClist[idx] == SunDiskCondition.Sun2).ToList(); lStatsProcessing = lStatsProcessing.Where((ipd, idx) => predictedSDClist[idx] == sdcFilter).ToList(); Console.WriteLine("Detected " + lStatsProcessing.Count + " images with SDC = " + sdcFilter.ToString()); if (!lStatsProcessing.Any()) { Console.WriteLine("There is no files with SDC = Sun2. Processing will not be proceeded."); return; } Console.WriteLine("finished enumerating and filtering files. Files to process: " + lStatsProcessing.Count); }
public void Start(string[] args) { readDefaultProperties(); List <string> argsList = new List <string>(args); if (argsList.Find(str => str == "--recursive") != null) { bEnumerateFilesRecursively = true; } if (argsList.Find(str => str == "-y") != null) { bStartWithoutConfirmation = true; } // --filter-by-observed-cloud-cover-records // bFilterByObservedCloudCoverRecords if (argsList.Find(str => str == "--filter-by-observed-cloud-cover-records") != null) { bFilterByObservedCloudCoverRecords = true; } // sdcFilter if (argsList.Where(str => str.Contains("--sdc=")).Count() > 0) { string foundArg = argsList.Where(str => str.Contains("--sdc=")).ToList()[0]; string strValue = foundArg.Replace("--sdc=", ""); //sdcFilter if (strValue == "none") { sdcFilter = SunDiskCondition.NoSun; } else if (strValue == "0") { sdcFilter = SunDiskCondition.Sun0; } else if (strValue == "1") { sdcFilter = SunDiskCondition.Sun1; } else if (strValue == "2") { sdcFilter = SunDiskCondition.Sun2; } else { sdcFilter = SunDiskCondition.Sun2; } } else { Console.WriteLine("SDC filter is not specified. Filtering by SDC will not applied."); sdcFilter = SunDiskCondition.Undefined; // Не применять фильтрацию } if (argsList.Where(str => str.Contains("--camera-id=")).Count() > 0) { string foundArg = argsList.Where(str => str.Contains("--camera-id=")).ToList()[0]; string strValue = foundArg.Replace("--camera-id=", ""); CamIDtoProcess = Convert.ToInt32(strValue); if ((CamIDtoProcess != 1) && (CamIDtoProcess != 2)) { Console.WriteLine("camera ID out of range detected. I will not filter by camera ID."); CamIDtoProcess = 0; } } else { Console.WriteLine("camera ID out of range detected. I will not filter by camera ID"); CamIDtoProcess = 0; // will not filter } if (!bStartWithoutConfirmation) { Console.Write("Start with the mentioned properties? [y/n] "); string strReply = Console.ReadLine(); if (strReply.ToLower().First() != 'y') { Console.WriteLine("\nWill not proceed due to user interruprion."); Console.WriteLine("===FINISHED==="); Console.ReadKey(); return; } } outputDataFile = strOutputDirectory + Path.GetFileNameWithoutExtension(outputDataFile) + "-" + sdcFilter.ToString() + "-camID" + CamIDtoProcess + ".xml"; string outputCSVfile = strOutputDirectory + Path.GetFileNameWithoutExtension(outputDataFile) + "-" + sdcFilter.ToString() + "-camID" + CamIDtoProcess + ".csv"; Console.WriteLine("getting files list"); #region Enumerating files string directory = Path.GetDirectoryName(inputBasePath); string filemask = "*.jpg"; List <string> filesList = new List <string>(Directory.EnumerateFiles(directory, filemask, bEnumerateFilesRecursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly)); #region filter by camID //...devID1.jpg if (CamIDtoProcess > 0) { string ptrnCamID = "devid" + CamIDtoProcess + ".jpg"; filesList = filesList.Where(fname => fname.ToLower().Contains(ptrnCamID)).ToList(); } #endregion Console.WriteLine("found " + filesList.Count + " images."); #region try to find concurrent and stats data already assembled into a small set of files List <string> assembledDataFilesList = Directory.EnumerateFiles(ConcurrentDataXMLfilesDirectory, "*.xml", SearchOption.TopDirectoryOnly).ToList(); List <ImagesProcessingData> lReadAssembledData = new List <ImagesProcessingData>(); foreach (string strAssembledDataXMlfName in assembledDataFilesList) { try { List <ImagesProcessingData> currFileContent = ServiceTools.ReadObjectFromXML(strAssembledDataXMlfName, typeof(List <ImagesProcessingData>)) as List <ImagesProcessingData>; lReadAssembledData.AddRange(currFileContent); } catch (Exception ex) { continue; } } if (lReadAssembledData.Any()) { Console.WriteLine("Found pre-assembled ImagesProcessingData XML files: "); foreach (string s in assembledDataFilesList) { Console.WriteLine(s); } Console.WriteLine("Read records from this set: " + lReadAssembledData.Count); Console.WriteLine("Images to process originally: " + filesList.Count); Console.WriteLine("Should I use these pre-assembled data? (y/n): "); string ans = Console.ReadKey().KeyChar.ToString(); if (ans == "y") { lStatsProcessing = lReadAssembledData; } } #endregion try to find data already compiled into a small set of files if (!lStatsProcessing.Any()) { #region list, read and map image stats files Console.WriteLine("filtering by ready-to-use GrIxYRGB XML files..."); List <string> statsFilesList = new List <string>(Directory.EnumerateFiles(imageYRGBstatsXMLdataFilesDirectory, ConventionalTransitions.ImageGrIxYRGBstatsFileNamesPattern(), bEnumerateFilesRecursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly)); List <string> statsFilesListWOpath = statsFilesList.ConvertAll(Path.GetFileName); Console.WriteLine("found " + statsFilesList.Count + " XML stats files in directory " + Environment.NewLine + imageYRGBstatsXMLdataFilesDirectory + Environment.NewLine + "by mask " + Environment.NewLine + ConventionalTransitions.ImageGrIxYRGBstatsFileNamesPattern()); int removed = filesList.RemoveAll( fname => !statsFilesListWOpath.Contains(ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(fname, "", false))); Console.WriteLine("removed " + removed + " items (couldn`t find stats data files). Remains " + filesList.Count + " to process."); if (!filesList.Any()) { Console.WriteLine("There is no " + filemask + " files that sutisfy settings specified. Processing will not be started."); return; } lStatsProcessing = filesList.ConvertAll(strImgFname => { ImagesProcessingData retVal = new ImagesProcessingData() { filename = strImgFname }; return(retVal); }); //#if DEBUG // lStatsProcessing = lStatsProcessing.Where((ipd, ind) => ind < 10).ToList(); //#endif Console.WriteLine("started reading and mapping stats data"); int totalFilesCountToRead = lStatsProcessing.Count; int filesRead = 0; int currProgressPerc = 0; foreach (ImagesProcessingData ipdt in lStatsProcessing) { ipdt.grixyrgbStatsXMLfile = statsFilesList.First( statsFname => statsFname.Contains(ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(ipdt.filename, "", false))); ipdt.grixyrgbStats = ServiceTools.ReadObjectFromXML(ipdt.grixyrgbStatsXMLfile, typeof(SkyImageIndexesStatsData)) as SkyImageIndexesStatsData; #region calculate and report progress filesRead++; double progress = 100.0d * (double)filesRead / (double)totalFilesCountToRead; if (progress - (double)currProgressPerc > 1.0d) { currProgressPerc = Convert.ToInt32(progress); Console.WriteLine("read " + currProgressPerc + "%"); } #endregion calculate and report progress } #endregion #region list, read and map concurrent data List <ConcurrentData> lConcurrentData = null; List <string> concurrentDataFilesList = Directory.EnumerateFiles(ConcurrentDataXMLfilesDirectory, ConventionalTransitions.ImageConcurrentDataFilesNamesPattern(), bEnumerateFilesRecursively ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly).ToList(); #region reading Console.WriteLine("started concurrent data reading"); totalFilesCountToRead = concurrentDataFilesList.Count; filesRead = 0; currProgressPerc = 0; List <Dictionary <string, object> > lDictionariesConcurrentData = new List <Dictionary <string, object> >(); foreach (string strConcDataXMLFile in concurrentDataFilesList) { Dictionary <string, object> currDict = ServiceTools.ReadDictionaryFromXML(strConcDataXMLFile); currDict.Add("XMLfileName", Path.GetFileName(strConcDataXMLFile)); lDictionariesConcurrentData.Add(currDict); #region calculate and report progress filesRead++; double progress = 100.0d * (double)filesRead / (double)totalFilesCountToRead; if (progress - (double)currProgressPerc > 1.0d) { currProgressPerc = Convert.ToInt32(progress); Console.WriteLine("read " + currProgressPerc + "%"); } #endregion calculate and report progress } lDictionariesConcurrentData.RemoveAll(dict => dict == null); lConcurrentData = lDictionariesConcurrentData.ConvertAll <ConcurrentData>(dict => { ConcurrentData retVal = null; try { retVal = new ConcurrentData(dict); } catch (Exception ex) { string strError = "couldn`t parse XML file " + dict["XMLfileName"] + " : " + Environment.NewLine + ex.Message; Console.WriteLine(strError); } return(retVal); }); lConcurrentData.RemoveAll(val => val == null); #endregion reading #region mapping // map obtained concurrent data to images by its datetime Console.WriteLine("concurrent data mapping started"); lStatsProcessing = lStatsProcessing.ConvertAll(ipdt => { string currImgFilename = ipdt.filename; currImgFilename = Path.GetFileNameWithoutExtension(currImgFilename); DateTime currImgDT = ConventionalTransitions.DateTimeOfSkyImageFilename(currImgFilename); ConcurrentData nearestConcurrentData = lConcurrentData.Aggregate((cDt1, cDt2) => { TimeSpan tspan1 = new TimeSpan(Math.Abs((cDt1.datetimeUTC - currImgDT).Ticks)); TimeSpan tspan2 = new TimeSpan(Math.Abs((cDt2.datetimeUTC - currImgDT).Ticks)); return((tspan1 <= tspan2) ? (cDt1) : (cDt2)); }); if (new TimeSpan(Math.Abs((nearestConcurrentData.datetimeUTC - currImgDT).Ticks)) >= TimeSpanForConcurrentDataMappingTolerance) { string strError = "couldn`t find close enough concurrent data file for image:" + Environment.NewLine + currImgFilename + Environment.NewLine + "closest concurrent data file is:" + Environment.NewLine + nearestConcurrentData.filename + Environment.NewLine + "with date-time value " + nearestConcurrentData.datetimeUTC.ToString("o"); Console.WriteLine(strError); nearestConcurrentData = null; } ipdt.concurrentData = nearestConcurrentData; if (nearestConcurrentData != null) { ipdt.concurrentDataXMLfile = nearestConcurrentData.filename; } return(ipdt); }); #endregion mapping removed = lStatsProcessing.RemoveAll(ipdt => ipdt.concurrentData == null); Console.WriteLine("removed " + removed + " items (couldn`t find concurrent data). " + lStatsProcessing.Count + " files remains to process."); #endregion list, read and map concurrent data } if (!lStatsProcessing.Any()) { Console.WriteLine("There is no files that sutisfy settings specified and have all required concurrent data (stats or GPS etc.). Processing will not be proceeded."); return; } #region Filter by SDC values predicting it using pre-trained NN parameters #region // //string csvHeader = lStatsProcessing[0].grixyrgbStats.CSVHeader() + // ",SunElevationDeg,SunAzimuthDeg,sunDiskCondition"; //List<string> lCSVheader = csvHeader.Split(',').ToList(); //List<int> columnsToDelete = // lCSVheader.Select((str, idx) => new Tuple<int, string>(idx, str)) // .Where(tpl => tpl.Item2.ToLower().Contains("filename")).ToList().ConvertAll(tpl => tpl.Item1); //List<List<string>> lCalculatedData = lStatsProcessing.ConvertAll(dt => //{ // string currImageALLstatsDataCSVWithConcurrentData = dt.grixyrgbStats.ToCSV() + "," + // dt.concurrentData.gps.SunZenithAzimuth() // .ElevationAngle.ToString() // .Replace(",", ".") + "," + // dt.concurrentData.gps.SunZenithAzimuth() // .Azimuth.ToString() // .Replace(",", "."); // List<string> retVal = currImageALLstatsDataCSVWithConcurrentData.Split(',').ToList(); // retVal = retVal.Where((str, idx) => !columnsToDelete.Contains(idx)).ToList(); // return retVal; //}); //List<DenseVector> lDV_objects_features = // lCalculatedData.ConvertAll( // list => // DenseVector.OfEnumerable(list.ConvertAll<double>(str => Convert.ToDouble(str.Replace(".", ","))))); #endregion DenseVector dvMeans = (DenseVector)((DenseMatrix)ServiceTools.ReadDataFromCSV(NormMeansFile, 0, ",")).Row(0); DenseVector dvRanges = (DenseVector)((DenseMatrix)ServiceTools.ReadDataFromCSV(NormRangeFile, 0, ",")).Row(0); #region // //lDV_objects_features = lDV_objects_features.ConvertAll(dv => //{ // DenseVector dvShifted = dv - dvMeans; // DenseVector dvNormed = (DenseVector)dvShifted.PointwiseDivide(dvRanges); // return dvNormed; //}); //DenseMatrix dmObjectsFeatures = DenseMatrix.OfRowVectors(lDV_objects_features); #endregion DenseVector dvThetaValues = (DenseVector)ServiceTools.ReadDataFromCSV(NNtrainedParametersFile, 0, ","); List <int> NNlayersConfig = new List <double>(((DenseMatrix)ServiceTools.ReadDataFromCSV(NNconfigFile, 0, ",")).Row(0)).ConvertAll (dVal => Convert.ToInt32(dVal)); #region // // List<List<double>> lDecisionProbabilities = null; #endregion List <Tuple <ImagesProcessingData, List <SDCdecisionProbability>, SunDiskCondition> > lTplsPredictedSDClist = new List <Tuple <ImagesProcessingData, List <SDCdecisionProbability>, SunDiskCondition> >(); List <List <double> > SDCdecisionProbabilitiesListDoubles = new List <List <double> >(); List <SunDiskCondition> imagesSDCpredicted = SDCpredictorNN.PredictSDC_NN(lStatsProcessing, NNlayersConfig, dvThetaValues, dvMeans, dvRanges, out SDCdecisionProbabilitiesListDoubles); List <List <SDCdecisionProbability> > SDCdecisionProbabilitiesLists = SDCdecisionProbabilitiesListDoubles.ConvertAll( currSDCdecisionProbabilities => currSDCdecisionProbabilities.Select((dProb, idx) => new SDCdecisionProbability() { sdc = SunDiskConditionData.MatlabSDCenum(idx + 1), sdcDecisionProbability = dProb }).ToList()); lTplsPredictedSDClist = lStatsProcessing.Zip( SDCdecisionProbabilitiesLists.Zip(imagesSDCpredicted, (lDecProb, sdcPredicted) => new Tuple <List <SDCdecisionProbability>, SunDiskCondition>(lDecProb, sdcPredicted)).ToList(), (ipd, tpl) => new Tuple <ImagesProcessingData, List <SDCdecisionProbability>, SunDiskCondition>(ipd, tpl.Item1, tpl.Item2)).ToList(); #region // //foreach (ImagesProcessingData dt in lStatsProcessing) //{ // List<double> currSDCdecisionProbabilities = new List<double>(); // SunDiskCondition currSDC = SDCpredictorNN.PredictSDC_NN(dt.grixyrgbStats, dt.concurrentData, // NNlayersConfig, dvThetaValues, dvMeans, dvRanges, out currSDCdecisionProbabilities); // List<SDCdecisionProbability> currSDCdecisionProbabilitiesList = currSDCdecisionProbabilities.Select((dProb, idx) => new SDCdecisionProbability() // { // sdc = SunDiskConditionData.MatlabSDCenum(idx + 1), // sdcDecisionProbability = dProb // }).ToList(); // lTplsPredictedSDClist.Add( // new Tuple<ImagesProcessingData, List<SDCdecisionProbability>, SunDiskCondition>(dt, // currSDCdecisionProbabilitiesList, currSDC)); //} #endregion #region // //List<int> predictedSDC = // NNclassificatorPredictor.NNpredict(dmObjectsFeatures, dvThetaValues, NNlayersConfig, // out lDecisionProbabilities).ToList(); //List<SunDiskCondition> predictedSDClist = predictedSDC.ConvertAll(sdcInt => //{ // switch (sdcInt) // { // case 4: // return SunDiskCondition.NoSun; // break; // case 1: // return SunDiskCondition.Sun0; // break; // case 2: // return SunDiskCondition.Sun1; // break; // case 3: // return SunDiskCondition.Sun2; // break; // default: // return SunDiskCondition.Defect; // } //}); //List<Tuple<ImagesProcessingData, SunDiskCondition>> lTplsPredictedSDClist = // predictedSDClist.Zip(lStatsProcessing, // (sdc, ipd) => new Tuple<ImagesProcessingData, SunDiskCondition>(ipd, sdc)).ToList(); #endregion #region output obtained SDC data to log file string strToShow = "SDC values probabilities: " + Environment.NewLine + "| NoSun | Sun0 | Sun1 | Sun2 |" + Environment.NewLine; foreach (Tuple <ImagesProcessingData, List <SDCdecisionProbability>, SunDiskCondition> tpl in lTplsPredictedSDClist) { List <SDCdecisionProbability> currSDCdecisionProbabilitiesList = tpl.Item2; strToShow += "|" + String.Format("{0,9}", (currSDCdecisionProbabilitiesList.First( prob => prob.sdc == SunDiskCondition.NoSun).sdcDecisionProbability * 100.0d) .ToString("F2") + "%") + "|" + String.Format("{0,9}", (currSDCdecisionProbabilitiesList.First( prob => prob.sdc == SunDiskCondition.Sun0).sdcDecisionProbability * 100.0d) .ToString("F2") + "%") + "|" + String.Format("{0,9}", (currSDCdecisionProbabilitiesList.First( prob => prob.sdc == SunDiskCondition.Sun1).sdcDecisionProbability * 100.0d) .ToString("F2") + "%") + "|" + String.Format("{0,9}", (currSDCdecisionProbabilitiesList.First( prob => prob.sdc == SunDiskCondition.Sun2).sdcDecisionProbability * 100.0d) .ToString("F2") + "%") + "|" + Environment.NewLine; } ServiceTools.logToTextFile(errorLogFilename, strToShow, true, false); #endregion output obtained SDC data to log file #region filter by SDC value if needed if (sdcFilter != SunDiskCondition.Undefined) { lStatsProcessing = lStatsProcessing.Where((ipd, idx) => lTplsPredictedSDClist[idx].Item3 == sdcFilter).ToList(); Console.WriteLine("Detected " + lStatsProcessing.Count + " images with SDC = " + sdcFilter.ToString()); } #endregion filter by SDC value if needed #endregion Filter by SDC values predicting it using pre-trained NN parameters #region ObservedCloudCoverDataCSVfile if (bFilterByObservedCloudCoverRecords) { Console.WriteLine("Reading observed cloud cover data CSV file..."); if (!File.Exists(ObservedCloudCoverDataCSVfile)) { Console.WriteLine("Unable to read observed data CSV file: " + ObservedCloudCoverDataCSVfile); return; } List <List <string> > lCSVfileContents = ServiceTools.ReadDataFromCSV(ObservedCloudCoverDataCSVfile, 1, true); if (!lCSVfileContents.Any()) { Console.WriteLine("The observed cloud cover CSV file seems to be empty: " + ObservedCloudCoverDataCSVfile); return; } List <ObservedClCoverData> lObservedData = lCSVfileContents.ConvertAll(lStr => new ObservedClCoverData(lStr)); List <SkyImagesDataWith_Concurrent_Stats_CloudCover_SDC> lImagesFilteredByAvailableObservedData = new List <SkyImagesDataWith_Concurrent_Stats_CloudCover_SDC>(); #region filter images by available observed data using DateTimeFilterTolerance Console.WriteLine("Filtering by observed data available..."); foreach (ObservedClCoverData observedData in lObservedData) { DateTime currObservedDatumDateTime = observedData.dt; List <SkyImagesDataWith_Concurrent_Stats_CloudCover_SDC> lImagesCloseToCurrObservedDatum = lStatsProcessing .Where(ipd => { TimeSpan tspan = new TimeSpan( Math.Abs( (ConventionalTransitions.DateTimeOfSkyImageFilename(ipd.filename) - currObservedDatumDateTime).Ticks)); return(tspan <= DateTimeFilterTolerance); }) .ToList() .ConvertAll(ifd => new SkyImagesDataWith_Concurrent_Stats_CloudCover_SDC() { // observedData // ifd skyImageFullFileName = ifd.filename, skyImageFileName = Path.GetFileName(ifd.filename), currImageDateTime = ConventionalTransitions.DateTimeOfSkyImageFilename(ifd.filename), observedCloudCoverData = observedData, concurrentDataXMLfile = ifd.concurrentDataXMLfile, concurrentData = ifd.concurrentData, grixyrgbStatsXMLfile = ifd.grixyrgbStatsXMLfile, grixyrgbStats = ifd.grixyrgbStats, SDCvalue = lTplsPredictedSDClist.First(tpl => tpl.Item1 == ifd).Item3, SDCprobabilities = lTplsPredictedSDClist.First(tpl => tpl.Item1 == ifd).Item2 }); lImagesFilteredByAvailableObservedData.AddRange(lImagesCloseToCurrObservedDatum); } #endregion filter images by available observed data using DateTimeFilterTolerance if (!lImagesFilteredByAvailableObservedData.Any()) { Console.WriteLine( "There is no images remain after filtering using all available data. Output will be empty."); } ServiceTools.WriteObjectToXML(lImagesFilteredByAvailableObservedData, outputDataFile); #region Сформируем и запишем данные в CSV-файл // Здесь есть данные по наблюдаемому CloudCover string csvHeader = lImagesFilteredByAvailableObservedData[0].grixyrgbStats.CSVHeader() + ",SunElevationDeg,SunAzimuthDeg,ObservedTotalCloudCover,ObservedLowerCloudCover,SDC,SDCprobabilityNoSun,SDCprobabilitySun0,SDCprobabilitySun1,SDCprobabilitySun2"; List <string> lCSVoutputData = lImagesFilteredByAvailableObservedData.ConvertAll(ifd => { // все стат. предикторы - как для SDC // данные CloudCover string retVal = ""; //ImagesProcessingData dt = tpl.Item2; //ObservedClCoverData clCov = tpl.Item1; retVal = ifd.grixyrgbStats.ToCSV() + "," + ifd.concurrentData.gps.SunZenithAzimuth().ElevationAngle.ToString().Replace(",", ".") + "," + ifd.concurrentData.gps.SunZenithAzimuth().Azimuth.ToString().Replace(",", ".") + "," + ifd.observedCloudCoverData.CloudCoverTotal.ToString() + "," + ifd.observedCloudCoverData.CloudCoverLower.ToString() + "," + ifd.SDCvalue.ToString() + "," + ifd.SDCprobabilities.First(prob => prob.sdc == SunDiskCondition.NoSun) .sdcDecisionProbability.ToString().Replace(",", ".") + "," + ifd.SDCprobabilities.First(prob => prob.sdc == SunDiskCondition.Sun0) .sdcDecisionProbability.ToString().Replace(",", ".") + "," + ifd.SDCprobabilities.First(prob => prob.sdc == SunDiskCondition.Sun1) .sdcDecisionProbability.ToString().Replace(",", ".") + "," + ifd.SDCprobabilities.First(prob => prob.sdc == SunDiskCondition.Sun2) .sdcDecisionProbability.ToString().Replace(",", "."); return(retVal); }); string strToOutputToCSVfile = string.Join(Environment.NewLine, lCSVoutputData); ServiceTools.logToTextFile(outputCSVfile, csvHeader + Environment.NewLine, true, false); ServiceTools.logToTextFile(outputCSVfile, strToOutputToCSVfile, true, false); #endregion Сформируем и запишем данные в CSV-файл } else { ServiceTools.WriteObjectToXML(lStatsProcessing, outputDataFile); #region Сформируем и запишем данные в CSV-файл // здесь нет данных по наблюдаемому Cloud Cover // не надо нам такое. Оставим все это только в виде XML-файла. #endregion Сформируем и запишем данные в CSV-файл } #endregion ObservedCloudCoverDataCSVfile #endregion Enumerating files and output to XML and CSV file Console.WriteLine("saved output data to file: " + Environment.NewLine + outputDataFile + Environment.NewLine + Environment.NewLine); Console.WriteLine("===FINISHED==="); Console.ReadKey(); }
private void ProcessImage(ImageStatsCollectingData srcData) { Interlocked.Increment(ref totalFilesProcessed); int perc = Convert.ToInt32(100.0d * (double)totalFilesProcessed / (double)totalFilesCountToProcess); Console.WriteLine(DateTime.Now.ToString("s") + " : " + perc + "% : started processing file " + Environment.NewLine + srcData.filename); Dictionary <string, object> optionalParameters = new Dictionary <string, object>(); optionalParameters.Add("ImagesRoundMasksXMLfilesMappingList", ImagesRoundMasksXMLfilesMappingList); Stopwatch sw = new Stopwatch(); sw.Start(); optionalParameters.Add("Stopwatch", sw); optionalParameters.Add("logFileName", errorLogFilename); ImageStatsDataCalculationResult currImageProcessingResult = ImageProcessing.CalculateImageStatsData(srcData.filename, optionalParameters); currImageProcessingResult.stopwatch.Stop(); if (currImageProcessingResult.calcResult) { string currentFullFileName = currImageProcessingResult.imgFilename; string strPerfCountersData = currentFullFileName + ";" + currImageProcessingResult.stopwatch.ElapsedMilliseconds + ";" + (currImageProcessingResult.procTotalProcessorTimeEnd - currImageProcessingResult.procTotalProcessorTimeStart).TotalMilliseconds + Environment.NewLine; ServiceTools.logToTextFile(strPerformanceCountersStatsFile, strPerfCountersData, true); //string strImageGrIxMedianP5DataFileName = // ConventionalTransitions.ImageGrIxMedianP5DataFileName(currentFullFileName, imageMP5statsXMLdataFilesDirectory); //ServiceTools.WriteObjectToXML(currImageProcessingResult.mp5Result, strImageGrIxMedianP5DataFileName); string strImageGrIxYRGBDataFileName = ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(currentFullFileName, imageYRGBstatsXMLdataFilesDirectory, true, currPath2Process); ServiceTools.WriteObjectToXML(currImageProcessingResult.grixyrgbStatsData, strImageGrIxYRGBDataFileName); ImageStatsCollectingData foundDataObj = lStatsCalculation.Find(obj => obj.filename == currentFullFileName); foundDataObj.State = ImageStatsCollectingState.Finished; foundDataObj.GrIxMedianValue = currImageProcessingResult.mp5Result.GrIxStatsMedian; foundDataObj.GrIxPerc5Value = currImageProcessingResult.mp5Result.GrIxStatsPerc5; Console.WriteLine(DateTime.Now.ToString("s") + " : finished processing file " + Environment.NewLine + currentFullFileName); } else { string currentFullFileName = currImageProcessingResult.imgFilename; ImageStatsCollectingData foundDataObj = lStatsCalculation.Find(obj => obj.filename == currentFullFileName); foundDataObj.State = ImageStatsCollectingState.Error; Console.WriteLine("ERROR processing file " + Path.GetFileName(currentFullFileName)); try { //report full error to error log file #region report error #if (DEBUG && MONO) ServiceTools.logToTextFile(errorLogFilename, "Error processing file: " + Environment.NewLine + currentFullFileName + Environment.NewLine + "messages: " + ServiceTools.GetExceptionMessages(currImageProcessingResult.exception) + Environment.NewLine + "Stack trace: " + Environment.NewLine + Environment.StackTrace + Environment.NewLine + Environment.NewLine, true, true); #else #if MONO ServiceTools.logToTextFile(errorLogFilename, "Error processing file: " + Environment.NewLine + currentFullFileName + Environment.NewLine + "messages: " + ServiceTools.GetExceptionMessages(currImageProcessingResult.exception) + Environment.NewLine + "Stack trace: " + Environment.NewLine + Environment.StackTrace + Environment.NewLine + Environment.NewLine, true, true); #else ServiceTools.logToTextFile(errorLogFilename, "Error processing file: " + Environment.NewLine + currentFullFileName + Environment.NewLine + "message: " + ServiceTools.GetExceptionMessages(currImageProcessingResult.exception) + Environment.NewLine + ServiceTools.CurrentCodeLineDescription() + Environment.NewLine + "Stack trace: " + Environment.NewLine + Environment.StackTrace + Environment.NewLine + Environment.NewLine, true, true); #endif #endif #endregion report error } catch (Exception ex) { return; } } }
private AngleSunDeviationCalcResult CalculateDevDataForImage(FileInfo finfo, Dictionary <string, object> defaultProperties, LogWindow currImageLogWindow, bool showOnlyErrors = false) { FileInfo currFileInfo = finfo; Dictionary <string, object> defaultProps = defaultProperties; AngleSunDeviationCalcResult retRes = new AngleSunDeviationCalcResult() { fileName = currFileInfo.FullName, }; GPSdata gps = ServiceTools.FindProperGPSdataForImage(currFileInfo.FullName, theLogWindow, defaultProperties, ref NVdataFilesAlreadyReadDateTimeSpans, ref NVdataFilesAlreadyReadData); if (gps == null) { theLogWindow = ServiceTools.LogAText(theLogWindow, "Couldn`t find GPS data for this image."); retRes.calculationSucceeded = false; retRes.resultMessage = "Couldn`t find GPS data for this image."; return(retRes); } #region // obsolete //// определяем дату-время файла //DateTime curDateTime = DateTime.UtcNow; //Image anImage = Image.FromFile(currFileInfo.FullName); //ImageInfo newIInfo = new ImageInfo(anImage); //int minute = 0; //String dateTime = (String)newIInfo.getValueByKey("ExifDTOrig"); //if (dateTime == null) //{ // //попробуем вытащить из имени файла // string strDateTime = currFileInfo.Name; // strDateTime = strDateTime.Substring(4, 19); // dateTime = strDateTime; //} //try //{ // curDateTime = CommonTools.DateTimeOfString(dateTime); //} //catch (Exception) //{ // retRes.calculationSucceeded = false; // retRes.resultMessage = "couldn`t get date/time for file: " + Environment.NewLine + currFileInfo.Name; // return retRes; //} //curDateTime = DateTime.SpecifyKind(curDateTime, DateTimeKind.Utc); //GPSdata neededGPSdata = new GPSdata(); //string currPath = currFileInfo.DirectoryName; //string navFilesPath = defaultProps["IoffeMeteoNavFilesDirectory"] as string; //List<IoffeVesselDualNavDataConverted> lAllNavData = new List<IoffeVesselDualNavDataConverted>(); //string[] sNavFilenames = Directory.GetFiles(navFilesPath, "*.nv2", SearchOption.AllDirectories); //if (!sNavFilenames.Any()) //{ // retRes.calculationSucceeded = false; // retRes.resultMessage = "Не найдено файлов данных навигации в директории " + navFilesPath; // return retRes; //} //else //{ // foreach (string navFilename in sNavFilenames) // { // Tuple<DateTime, DateTime> timeSpan = // IoffeVesselNavDataReader.GetNavFileDateTimeMargins(navFilename); // if (timeSpan == null) // { // continue; // } // if ((curDateTime < timeSpan.Item1) || (curDateTime > timeSpan.Item2)) // { // continue; // } // List<IoffeVesselDualNavDataConverted> dataHasBeenRead = IoffeVesselNavDataReader.ReadNavFile(navFilename); // if (dataHasBeenRead == null) // { // continue; // } // Application.DoEvents(); // lAllNavData.AddRange(dataHasBeenRead); // } //} //lAllNavData.Sort((gpsRecord1, gpsRecord2) => //{ // double dev1 = Math.Abs((gpsRecord1.gps.dateTimeUTC - curDateTime).TotalMilliseconds); // double dev2 = Math.Abs((gpsRecord2.gps.dateTimeUTC - curDateTime).TotalMilliseconds); // return (dev1 >= dev2) ? (1) : (-1); //}); //neededGPSdata = lAllNavData[0].gps; #endregion // obsolete retRes.gpsData = gps; #region obsolete //double lat = gps.LatDec; //double lon = gps.LonDec; //SPA spaCalc = new SPA(curDateTime.Year, curDateTime.Month, curDateTime.Day, curDateTime.Hour, // curDateTime.Minute, curDateTime.Second, (float)lon, (float)lat, // (float)SPAConst.DeltaT(curDateTime)); //int res = spaCalc.spa_calculate(); //AzimuthZenithAngle sunPositionSPAext = new AzimuthZenithAngle(spaCalc.spa.azimuth, // spaCalc.spa.zenith); #endregion obsolete AzimuthZenithAngle sunPositionSPAext = gps.SunZenithAzimuth(); if (!showOnlyErrors) { currImageLogWindow = ServiceTools.LogAText(currImageLogWindow, "SPA ext sun position for " + gps.dateTimeUTC.ToString("s") + ": " + sunPositionSPAext); } retRes.sunSPAcomputedPosition = sunPositionSPAext; Image <Bgr, Byte> img2process = new Image <Bgr, byte>(currFileInfo.FullName); img2process = ImageProcessing.ImageResizer(img2process, Convert.ToInt32(defaultProps["DefaultMaxImageSize"])); Image <Bgr, Byte> LocalProcessingImage = ImageProcessing.SquareImageDimensions(img2process); RoundData sunRoundData = RoundData.nullRoundData(); //посмотрим, нет ли уже имеющихся данных о положении и размере солнечного диска на изображении string sunDiskInfoFileName = ConventionalTransitions.SunDiskInfoFileName(currFileInfo.FullName); //string sunDiskInfoFileName = currFileInfo.DirectoryName + "\\" + // Path.GetFileNameWithoutExtension(currFileInfo.FullName) + "-SunDiskInfo.xml"; RoundData existingRoundData = RoundData.nullRoundData(); Size imgSizeUnderExistingRoundData = LocalProcessingImage.Bitmap.Size; object existingRoundDataObj = ServiceTools.ReadObjectFromXML(sunDiskInfoFileName, typeof(RoundDataWithUnderlyingImgSize)); if (existingRoundDataObj != null) { existingRoundData = ((RoundDataWithUnderlyingImgSize)existingRoundDataObj).circle; imgSizeUnderExistingRoundData = ((RoundDataWithUnderlyingImgSize)existingRoundDataObj).imgSize; } double currScale = (double)LocalProcessingImage.Width / (double)imgSizeUnderExistingRoundData.Width; if (currScale != 1.0d) { existingRoundData.DCenterX *= currScale; existingRoundData.DCenterY *= currScale; existingRoundData.DRadius *= currScale; } if (!existingRoundData.IsNull) { sunRoundData = existingRoundData; } ImageProcessing imgP = new ImageProcessing(LocalProcessingImage, true); if (sunRoundData.IsNull) { SkyCloudClassification classificator = new SkyCloudClassification(img2process, defaultProperties); classificator.verbosityLevel = 0; classificator.ParentForm = ParentForm; classificator.theLogWindow = currImageLogWindow; classificator.ClassificationMethod = ClassificationMethods.GrIx; classificator.isCalculatingUsingBgWorker = false; // classificator.SelfWorker = currBGWsender as BackgroundWorker; classificator.defaultOutputDataDirectory = (string)defaultProps["DefaultDataFilesLocation"]; classificator.theStdDevMarginValueDefiningSkyCloudSeparation = Convert.ToDouble(defaultProps["GrIxDefaultSkyCloudMarginWithoutSun"]); classificator.sourceImageFileName = currFileInfo.FullName; retRes.imageEdgesDetected = new RoundDataWithUnderlyingImgSize() { circle = imgP.imageRD, imgSize = LocalProcessingImage.Size, }; DenseMatrix dmProcessingData = (DenseMatrix)imgP.eval("grix").Clone(); try { sunRoundData = classificator.DetectSunWithSerieOfArcs(imgP, dmProcessingData); if (!sunRoundData.IsNull) { RoundDataWithUnderlyingImgSize infoToSave = new RoundDataWithUnderlyingImgSize() { circle = sunRoundData, imgSize = LocalProcessingImage.Size, }; ServiceTools.WriteObjectToXML(infoToSave, sunDiskInfoFileName); } } catch (Exception ex) { retRes.calculationSucceeded = false; retRes.resultMessage = ex.Message; return(retRes); } ServiceTools.FlushMemory(); } if (sunRoundData.IsNull) { throw new Exception(finfo.Name + ": couldn`t detect sun position"); } else { retRes.sunDiskDetectedPosition = new RoundDataWithUnderlyingImgSize() { circle = sunRoundData, imgSize = LocalProcessingImage.Size, }; } RoundData imageDetectedRound = imgP.imageRD.Copy(); retRes.imageEdgesDetected = new RoundDataWithUnderlyingImgSize() { circle = imageDetectedRound, imgSize = LocalProcessingImage.Size, }; try { double dev = retRes.computedAzimuthDeviation; retRes.calculationSucceeded = true; } catch (Exception ex) { retRes.calculationSucceeded = false; retRes.resultMessage = ex.Message; return(retRes); } return(retRes); }
public ImageFileDescription(string fName) { fullFileName = fName; fileName = Path.GetFileName(fName); currImageDateTime = ConventionalTransitions.DateTimeOfSkyImageFilename(fName); }
private void BgwSnapshotsFilteringWithSunElevation_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker selfWorker = sender as BackgroundWorker; object[] bgwArgs = e.Argument as object[]; string SnapshotsBasePath = bgwArgs[0] as string; string concurrentDataFilesPath = bgwArgs[1] as string; string directoryToMoveFilesTo = bgwArgs[2] as string; DirectoryInfo srcDir = new DirectoryInfo(SnapshotsBasePath); if (!srcDir.Exists) { theLogWindow = ServiceTools.LogAText(theLogWindow, "Операция не выполнена. Не найдена директория:" + Environment.NewLine + SnapshotsBasePath + Environment.NewLine); //ThreadSafeOperations.SetTextTB(tbLog, "Операция не выполнена. Не найдена директория:" + Environment.NewLine + fromPath + Environment.NewLine, true); return; } List <FileInfo> lFileList2Process = srcDir.GetFiles("*.jpg", SearchOption.AllDirectories).ToList(); List <FileInfoWithSnapshotDateTime> lSnapshotsInfos = lFileList2Process.ConvertAll(finfo => new FileInfoWithSnapshotDateTime(finfo)); #region read concurrent data from XML files theLogWindow = ServiceTools.LogAText(theLogWindow, "started concurrent data reading"); List <Tuple <string, ConcurrentData> > lImagesConcurrentData = new List <Tuple <string, ConcurrentData> >(); List <string> filesListConcurrentData = new List <string>(Directory.EnumerateFiles(concurrentDataFilesPath, ConventionalTransitions.ImageConcurrentDataFilesNamesPattern(), SearchOption.AllDirectories)); int totalFilesCountToRead = filesListConcurrentData.Count; int filesRead = 0; int currProgressPerc = 0; selfWorker.ReportProgress(0); List <Dictionary <string, object> > lDictionariesConcurrentData = new List <Dictionary <string, object> >(); foreach (string strConcDataXMLFile in filesListConcurrentData) { Dictionary <string, object> currDict = ServiceTools.ReadDictionaryFromXML(strConcDataXMLFile); currDict.Add("XMLfileName", Path.GetFileName(strConcDataXMLFile)); lDictionariesConcurrentData.Add(currDict); #region calculate and report progress filesRead++; double progress = 100.0d * (double)filesRead / (double)totalFilesCountToRead; if (progress - (double)currProgressPerc > 1.0d) { currProgressPerc = Convert.ToInt32(progress); selfWorker.ReportProgress(currProgressPerc); } if (selfWorker.CancellationPending) { return; } #endregion calculate and report progress } lDictionariesConcurrentData.RemoveAll(dict => dict == null); List <ConcurrentData> lConcurrentData = lDictionariesConcurrentData.ConvertAll <ConcurrentData>(dict => { ConcurrentData retVal = null; try { retVal = new ConcurrentData(dict); GPSdata gpsOfGPSstring = new GPSdata((string)dict["GPSdata"], GPSdatasources.CloudCamArduinoGPS, retVal.datetimeUTC.Date); if (!gpsOfGPSstring.validGPSdata) { throw new Exception("invalid GPS data"); } } catch (Exception ex) { string strError = "couldn`t parse XML file " + dict["XMLfileName"] + " : " + Environment.NewLine + ex.Message; return(null); } if (retVal.gps.validGPSdata) { return(retVal); } else { return(null); } }); lConcurrentData.RemoveAll(val => val == null); // map obtained concurrent data to images by its datetime theLogWindow = ServiceTools.LogAText(theLogWindow, "started concurrent data mapping"); int totalrecordsToMap = lSnapshotsInfos.Count; int recordsMapped = 0; currProgressPerc = 0; selfWorker.ReportProgress(0); foreach (FileInfoWithSnapshotDateTime info in lSnapshotsInfos) { DateTime currImgDT = info.dateTime; ConcurrentData nearestConcurrentData = lConcurrentData.Aggregate((cDt1, cDt2) => { TimeSpan tspan1 = new TimeSpan(Math.Abs((cDt1.datetimeUTC - currImgDT).Ticks)); TimeSpan tspan2 = new TimeSpan(Math.Abs((cDt2.datetimeUTC - currImgDT).Ticks)); return((tspan1 <= tspan2) ? (cDt1) : (cDt2)); }); if (new TimeSpan(Math.Abs((nearestConcurrentData.datetimeUTC - currImgDT).Ticks)) <= TimeSpanForConcurrentDataMappingTolerance) { info.concurrentData = nearestConcurrentData; } #region calculate and report progress recordsMapped++; double progress = 100.0d * (double)recordsMapped / (double)totalrecordsToMap; if (progress - (double)currProgressPerc > 1.0d) { currProgressPerc = Convert.ToInt32(progress); selfWorker.ReportProgress(currProgressPerc); } if (selfWorker.CancellationPending) { return; } #endregion calculate and report progress } #endregion read concurrent data from XML files List <FileInfo> filesToMove = new List <FileInfo>(); filesToMove.AddRange( lSnapshotsInfos.Where(inf => inf.concurrentData == null).ToList().ConvertAll(inf => inf.finfo)); lSnapshotsInfos.RemoveAll(inf => inf.concurrentData == null); filesToMove.AddRange(lSnapshotsInfos.Where(inf => { GPSdata currGPS = new GPSdata(inf.concurrentData.GPSdata, GPSdatasources.CloudCamArduinoGPS, inf.dateTime.Date); var spa = currGPS.SunZenithAzimuth(); return(spa.ZenithAngle >= 85.0); }).ToList().ConvertAll(inf => inf.finfo)); totalFilesCountToRead = filesToMove.Count; filesRead = 0; currProgressPerc = 0; selfWorker.ReportProgress(0); foreach (FileInfo inf in filesToMove) { #region calculate and report progress filesRead++; double progress = 100.0d * (double)filesRead / (double)totalFilesCountToRead; if (progress - (double)currProgressPerc > 1.0d) { currProgressPerc = Convert.ToInt32(progress); selfWorker.ReportProgress(currProgressPerc); } if (selfWorker.CancellationPending) { return; } #endregion calculate and report progress string strFilenameMoveTo = directoryToMoveFilesTo + ((directoryToMoveFilesTo.Last() == Path.DirectorySeparatorChar) ? ("") : (Path.DirectorySeparatorChar.ToString())); string currImgFilenameRelPath = ConventionalTransitions.MakeRelativePath(inf.FullName, SnapshotsBasePath); strFilenameMoveTo += currImgFilenameRelPath; theLogWindow = ServiceTools.LogAText(theLogWindow, "moving " + inf.FullName); if (!ServiceTools.CheckIfDirectoryExists(strFilenameMoveTo)) { theLogWindow = ServiceTools.LogAText(theLogWindow, "Unable to move file " + Environment.NewLine + inf.FullName + Environment.NewLine + "to:" + Environment.NewLine + strFilenameMoveTo + Environment.NewLine + "Directory couldn`t be located or created"); continue; } File.Move(inf.FullName, strFilenameMoveTo); } selfWorker.ReportProgress(0); }
public FileInfoWithSnapshotDateTime(FileInfo _fInfo) { finfo = _fInfo; dateTime = ConventionalTransitions.DateTimeOfSkyImageFilename(finfo.Name); }
private void bgwCopier_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker SelfWorker = sender as BackgroundWorker; object[] bgwArgs = e.Argument as object[]; string fromPath = bgwArgs[0] as string; string concurrentDataFilesPath = bgwArgs[1] as string; string toPath = bgwArgs[2] as string; string imagesStatsXMLfilesDir = bgwArgs[3] as string; DirectoryInfo dir = new DirectoryInfo(fromPath); String destDirectory = toPath + ((toPath.Last() == Path.DirectorySeparatorChar) ? ("") : (Path.DirectorySeparatorChar.ToString())); if (!dir.Exists) { theLogWindow = ServiceTools.LogAText(theLogWindow, "Операция не выполнена. Не найдена директория:" + Environment.NewLine + fromPath + Environment.NewLine); //ThreadSafeOperations.SetTextTB(tbLog, "Операция не выполнена. Не найдена директория:" + Environment.NewLine + fromPath + Environment.NewLine, true); return; } FileInfo[] FileList2Process = dir.GetFiles("*.jpg", SearchOption.AllDirectories); List <Tuple <string, string> > imagesStatsXMLfiles = new List <Tuple <string, string> >(); if (Directory.Exists(imagesStatsXMLfilesDir)) { imagesStatsXMLfiles = (new DirectoryInfo(imagesStatsXMLfilesDir)).EnumerateFiles( ConventionalTransitions.ImageGrIxYRGBstatsFileNamesPattern(), SearchOption.AllDirectories) .ToList() .ConvertAll(fInfo => new Tuple <string, string>(fInfo.Name, fInfo.FullName)); } DirectoryInfo dirConcurrentDataFiles = new DirectoryInfo(concurrentDataFilesPath); List <Tuple <string, DateTime> > lConcurrentDataFiles = dirConcurrentDataFiles.EnumerateFiles(ConventionalTransitions.ImageConcurrentDataFilesNamesPattern(), SearchOption.AllDirectories).ToList().ConvertAll(fInfo => { // data-2015-12-15T06-12-56.0590302Z.xml string strDateTimeOfFile = Path.GetFileNameWithoutExtension(fInfo.Name).Substring(5, 28); strDateTimeOfFile = strDateTimeOfFile.Substring(0, 11) + strDateTimeOfFile.Substring(11).Replace('-', ':'); DateTime currFileDT = DateTime.Parse(strDateTimeOfFile, null, System.Globalization.DateTimeStyles.AdjustToUniversal); currFileDT = DateTime.SpecifyKind(currFileDT, DateTimeKind.Utc); return(new Tuple <string, DateTime>(fInfo.FullName, currFileDT)); }); int filesCount = FileList2Process.Length; theLogWindow = ServiceTools.LogAText(theLogWindow, "searching in directory: " + dir.FullName + Environment.NewLine); //ThreadSafeOperations.SetTextTB(tbLog, "searching in directory: " + dir.FullName + Environment.NewLine, true); theLogWindow = ServiceTools.LogAText(theLogWindow, "files found count: " + filesCount + Environment.NewLine); //ThreadSafeOperations.SetTextTB(tbLog, "files found count: " + filesCount + Environment.NewLine, true); String usedDateTimes = ""; List <DateTime> listUsedHours = new List <DateTime>(); int counter = 0; foreach (FileInfo fileInfo in FileList2Process) { if (SelfWorker.CancellationPending) { break; } counter++; double percCounter = (double)counter * 1000.0d / (double)filesCount; SelfWorker.ReportProgress(Convert.ToInt32(percCounter)); Image anImage = Image.FromFile(fileInfo.FullName); ImageInfo newIInfo = new ImageInfo(anImage); //ThreadSafeOperations.SetTextTB(tbLog, "processing file " + fileInfo.Name + Environment.NewLine, true); //String curDateTime = ""; int minute = 0; //String dateTime = (String)newIInfo.getValueByKey("DateTime"); String strDateTimeEXIF = (String)newIInfo.getValueByKey("ExifDTOrig"); if (strDateTimeEXIF == null) { //попробуем вытащить из имени файла string strDateTime = fileInfo.Name; strDateTime = strDateTime.Substring(4, 19); strDateTimeEXIF = strDateTime; } //curDateTime = dateTime; DateTime curImgDateTime; DateTime theHour = RoundToHour(DateTime.UtcNow); try { //curImgDateTime = DateTimeOfString(strDateTimeEXIF); curImgDateTime = ConventionalTransitions.DateTimeOfSkyImageFilename(fileInfo.Name); theHour = RoundToHour(curImgDateTime); } catch (Exception ex) { continue; } //minute = Convert.ToInt32(strDateTimeEXIF.Substring(14, 2)); //if ((minute == 0) && (!listUsedHours.Contains(theHour))) if (new TimeSpan(Math.Abs((theHour - curImgDateTime).Ticks)) <= filterTolerance) { #region copy the image file itself listUsedHours.Add(theHour); string dateDirectorySuffix = curImgDateTime.ToString("yyyy-MM-dd"); string currDateDestDirectory = destDirectory + dateDirectorySuffix + Path.DirectorySeparatorChar; if (!ServiceTools.CheckIfDirectoryExists(currDateDestDirectory)) { currDateDestDirectory = destDirectory; } String newFileName = currDateDestDirectory + fileInfo.Name; File.Copy(fileInfo.FullName, newFileName); theLogWindow = ServiceTools.LogAText(theLogWindow, "COPY: " + fileInfo.FullName + " >>> " + newFileName + Environment.NewLine); //ThreadSafeOperations.SetTextTB(tbLog, "COPY: " + fileInfo.FullName + " >>> " + newFileName + Environment.NewLine, true); #endregion copy the image file itself #region find and copy the GrIx,YRGB stats data file if (imagesStatsXMLfiles.Any()) { string xmlStatsFileName = ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(fileInfo.FullName, "", false); Tuple <string, string> foundXMLfile = imagesStatsXMLfiles.Find(tpl => tpl.Item1 == xmlStatsFileName); if (foundXMLfile != null) { string sStatsXMLfilename = foundXMLfile.Item2; string newStatsXMLfilename = currDateDestDirectory + foundXMLfile.Item1; File.Copy(sStatsXMLfilename, newStatsXMLfilename); theLogWindow = ServiceTools.LogAText(theLogWindow, "COPY: " + sStatsXMLfilename + " >>> " + newStatsXMLfilename + Environment.NewLine); //ThreadSafeOperations.SetTextTB(tbLog, // "COPY: " + sStatsXMLfilename + " >>> " + newStatsXMLfilename + Environment.NewLine, // true); } else { theLogWindow = ServiceTools.LogAText(theLogWindow, "========== ERROR: couldn`t find GrIx,YRGB stats XML file" + Environment.NewLine); //ThreadSafeOperations.SetTextTB(tbLog, // "========== ERROR: couldn`t find GrIx,YRGB stats XML file" + Environment.NewLine, true); } } #endregion find and copy the GrIx,YRGB stats data file #region find and copy concurrent data XML file if (lConcurrentDataFiles.Any()) { //найдем ближайший по времени List <Tuple <string, TimeSpan> > lCurrFileConcurrentDataNearest = lConcurrentDataFiles.ConvertAll( tpl => new Tuple <string, TimeSpan>(tpl.Item1, new TimeSpan(Math.Abs((tpl.Item2 - curImgDateTime).Ticks)))); lCurrFileConcurrentDataNearest.Sort(new Comparison <Tuple <string, TimeSpan> >((tpl1, tpl2) => { if (tpl1 == null) { if (tpl2 == null) { return(0); } else { return(-1); } } else { if (tpl2 == null) { return(1); } else { return(tpl1.Item2.CompareTo(tpl2.Item2)); } } })); GPSdata gps = new GPSdata(); Tuple <string, TimeSpan> nearestConcurrentDataFile = null; int concurrentDataFileIdx = 0; while (!gps.validGPSdata) { nearestConcurrentDataFile = lCurrFileConcurrentDataNearest[concurrentDataFileIdx]; Dictionary <string, object> dictSavedData = ServiceTools.ReadDictionaryFromXML(nearestConcurrentDataFile.Item1); gps = new GPSdata((string)dictSavedData["GPSdata"], GPSdatasources.CloudCamArduinoGPS, DateTime.Parse((string)dictSavedData["GPSDateTimeUTC"], null, System.Globalization.DateTimeStyles.RoundtripKind)); concurrentDataFileIdx++; } string currValidConcurrentDataFile = nearestConcurrentDataFile.Item1; string currValidConcurrentDataFileToCopyTo = currDateDestDirectory + "data-" + Path.GetFileNameWithoutExtension(fileInfo.FullName) + ".xml"; File.Copy(currValidConcurrentDataFile, currValidConcurrentDataFileToCopyTo); theLogWindow = ServiceTools.LogAText(theLogWindow, "COPY: " + currValidConcurrentDataFile + " >>> " + currValidConcurrentDataFileToCopyTo + Environment.NewLine); //ThreadSafeOperations.SetTextTB(tbLog, // "COPY: " + currValidConcurrentDataFile + " >>> " + currValidConcurrentDataFileToCopyTo + // Environment.NewLine, true); } else { theLogWindow = ServiceTools.LogAText(theLogWindow, "========== ERROR: couldn`t find concurrent data file for " + fileInfo.FullName + Environment.NewLine); //ThreadSafeOperations.SetTextTB(tbLog, "========== ERROR: couldn`t find concurrent data file for " + fileInfo.FullName + Environment.NewLine, true); } #endregion find and copy concurrent data XML file theLogWindow.ClearLog(); } } }
private async Task <string> ObtainLatestMeteoParameters() { string retStr = ""; if (!Directory.Exists(DataStoreDirectory)) { throw new DirectoryNotFoundException("unable to locate directory: " + DataStoreDirectory); } #region WS DirectoryInfo dir = new DirectoryInfo(DataStoreDirectory); List <FileInfo> lWSxmlFilesInfo = dir.GetFiles(ConventionalTransitions.WSUMBdataFileNamePattern(), SearchOption.AllDirectories).ToList(); if (lWSxmlFilesInfo.Count == 0) { return("No data processed yet. Please wait for a couple of minutes."); } lWSxmlFilesInfo.Sort((finfo1, finfo2) => finfo1.CreationTimeUtc.CompareTo(finfo2.CreationTimeUtc)); LufftWSdata WSdata = null; try { WSdata = (LufftWSdata)ServiceTools.ReadObjectFromXML(lWSxmlFilesInfo.Last().FullName, typeof(LufftWSdata)); } catch (Exception ex) { return("No data processed yet. Please wait for a couple of minutes."); } if (WSdata != null) { retStr += WSdata.ToString() + Environment.NewLine + Environment.NewLine; } #endregion WS #region R2S List <FileInfo> lR2SxmlFilesInfo = dir.GetFiles(ConventionalTransitions.R2SUMBdataFileNamePattern(), SearchOption.AllDirectories).ToList(); if (lR2SxmlFilesInfo.Count == 0) { return("No data processed yet. Please wait for a couple of minutes."); } lR2SxmlFilesInfo.Sort((finfo1, finfo2) => finfo1.CreationTimeUtc.CompareTo(finfo2.CreationTimeUtc)); LufftR2Sdata R2Sdata = null; try { R2Sdata = (LufftR2Sdata)ServiceTools.ReadObjectFromXML(lR2SxmlFilesInfo.Last().FullName, typeof(LufftR2Sdata)); } catch (Exception ex) { return("No data processed yet. Please wait for a couple of minutes."); } if (R2Sdata != null) { retStr += R2Sdata.ToString() + Environment.NewLine + Environment.NewLine; } #endregion R2S #region Ventus List <FileInfo> lVentusXMLfilesInfo = dir.GetFiles(ConventionalTransitions.VentusUMBdataFileNamePattern(), SearchOption.AllDirectories).ToList(); if (lVentusXMLfilesInfo.Count == 0) { return("No data processed yet. Please wait for a couple of minutes."); } lVentusXMLfilesInfo.Sort((finfo1, finfo2) => finfo1.CreationTimeUtc.CompareTo(finfo2.CreationTimeUtc)); LufftVentusdata VentusData = null; try { VentusData = (LufftVentusdata)ServiceTools.ReadObjectFromXML(lVentusXMLfilesInfo.Last().FullName, typeof(LufftVentusdata)); } catch (Exception ex) { return("No data processed yet. Please wait for a couple of minutes."); } if (VentusData != null) { retStr += VentusData.ToString() + Environment.NewLine + Environment.NewLine; } #endregion Ventus return(retStr); }
private async Task <string> ReadCurrentCCinfo() { string retStr = ""; if (!Directory.Exists(DataStoreDirectory)) { throw new DirectoryNotFoundException("unable to locate directory: " + DataStoreDirectory); } DirectoryInfo dir = new DirectoryInfo(DataStoreDirectory); List <FileInfo> lXMLFilesInfo = dir.GetFiles(ConventionalTransitions.ImageProcessedAndPredictedDataFileNamesPattern(), SearchOption.AllDirectories).ToList(); if (lXMLFilesInfo.Count == 0) { return("No snapshots has been analyzed yet. Please wait for a couple of minutes."); } lXMLFilesInfo.Sort((finfo1, finfo2) => finfo1.CreationTimeUtc.CompareTo(finfo2.CreationTimeUtc)); SkyImagesProcessedAndPredictedData data = null; try { data = (SkyImagesProcessedAndPredictedData) ServiceTools.ReadObjectFromXML(lXMLFilesInfo.Last().FullName, typeof(SkyImagesProcessedAndPredictedData)); } catch (Exception ex) { throw ex; } if (data != null) { retStr += "Please note that this finction is still in BETA version!" + Environment.NewLine + "date of snapshot analyzed (UTC): " + data.imageShootingDateTimeUTC.ToString("u") + Environment.NewLine + "Sun disk condition: " + data.PredictedSDC.ToString() + Environment.NewLine + "Total cloud cover: " + data.PredictedCC.CloudCoverTotal + " (of 8)" + Environment.NewLine + Environment.NewLine + "SDC predictions probabilities:" + Environment.NewLine; string strToShowSDCs = Environment.NewLine + "| NoSun | Sun0 | Sun1 | Sun2 |" + Environment.NewLine + "" + "|" + String.Format("{0,9}", (data.sdcDecisionProbabilities.First( prob => prob.sdc == SunDiskCondition.NoSun).sdcDecisionProbability * 100.0d) .ToString("F2") + "%") + "|" + String.Format("{0,9}", (data.sdcDecisionProbabilities.First( prob => prob.sdc == SunDiskCondition.Sun0).sdcDecisionProbability * 100.0d) .ToString("F2") + "%") + "|" + String.Format("{0,9}", (data.sdcDecisionProbabilities.First( prob => prob.sdc == SunDiskCondition.Sun1).sdcDecisionProbability * 100.0d) .ToString("F2") + "%") + "|" + String.Format("{0,9}", (data.sdcDecisionProbabilities.First( prob => prob.sdc == SunDiskCondition.Sun2).sdcDecisionProbability * 100.0d) .ToString("F2") + "%") + "|"; retStr += strToShowSDCs; } return(retStr); }