//static List<string> source = new List<string>(); //static List<string> target = new List<string>(); public static bool compareSourceAndTarget(DirectoryInfo[] source, DirectoryInfo[] target) { if(source == null) source = new DirectoryInfo(badMP3BasicPath).GetDirectories(); if(target == null) source = new DirectoryInfo(goodMP3BasicPath).GetDirectories(); if ((source.Count() != target.Count())) return false; int dirs = target.Count(); for (int i = 0; i < dirs; i++) { if (source[i].GetDirectories().Count() > 0 && target[i].GetDirectories().Count() > 0) { return compareSourceAndTarget(source[i].GetDirectories(), target[i].GetDirectories()); } else { return compareSourceAndTarget(source[i].GetFiles(), target[i].GetFiles()); } if (source[i].Name != target[i].Name) return false; } return true; }
public IActionResult Index(int? Page) { ViewBag.Title = "Index"; //Get the configured folder files info and sort it by create time. var files = new DirectoryInfo(_appsetting.DNXFolder).GetFileSystemInfos("*.html").OrderByDescending(f => f.CreationTime); //---Pagenavi start here --- //Calculate the number of pages. var pnum = Page ?? 1; var PageCount = files.Count() / _appsetting.PageNaviNum + (files.Count() % _appsetting.PageNaviNum == 0 ? 0 : 1); pnum = pnum > PageCount ? PageCount : (pnum < 1 ? 1 : pnum); ViewBag.Page = pnum; ViewBag.PageCount = PageCount; //Pick up the number of posts which assigned by appsetting.json file. var qf = files.Skip((pnum - 1) * _appsetting.PageNaviNum).Take(_appsetting.PageNaviNum); //---Pagenavi end here --- var blogs = new List<Blog>(); foreach (var item in qf) { var tblog = new Blog(); tblog.Id = item.Name.Split('.').FirstOrDefault(); tblog.CreateTime = item.CreationTime; //The blog content will fill by get request. System.Net.Http.HttpClient object will request the uri which configured by appsetting.json file. tblog.Content = (new System.Net.Http.HttpClient()).GetStringAsync("http://" + (_appsetting.RequestDomain == "" ? Request.Host.ToString() : _appsetting.RequestDomain) + "/" + _appsetting.FilePath + "/" + item.Name).Result; blogs.Add(tblog); } return View(blogs); }
public void doTracking(String path, String image, String resultPath) { FileInfo[] list = new DirectoryInfo(path).GetFiles("*.jpg"); int posicaoImage=0; for (int i = 0; i < list.Count(); i++) if (image.Equals(list[i].FullName)) { posicaoImage = i; break; } Blob primeiroBlob = getBlobMaisAlto(image); Blob trackBlob = primeiroBlob; for (int i = posicaoImage; i < list.Count(); i++) { try { trackBlob = tracking(trackBlob, list[i].FullName); if (trackBlob == null) break; else new Bitmap(list[i].FullName).Save(resultPath+@"\"+i+".png"); } catch (Exception ex) { } } trackBlob = getBlobMaisAlto(image); for (int i = posicaoImage; i > 0; i--) { try { trackBlob = tracking(trackBlob, list[i].FullName); if (trackBlob == null) break; else new Bitmap(list[i].FullName).Save(resultPath + @"\" + i + ".png"); } catch (Exception ex) { } } }
private static void Pack(DirectoryInfo[] dirs) { int numOfPackagesToPack = dirs.Count(); if(numOfPackagesToPack == 0) { return; } mNumOfPackedPackages += numOfPackagesToPack; var myDirUtils = new DirUtils(); System.Diagnostics.Process process = new System.Diagnostics.Process(); System.Diagnostics.ProcessStartInfo startInfo = new System.Diagnostics.ProcessStartInfo(); startInfo.WorkingDirectory = mPackModulesDest; startInfo.WindowStyle = System.Diagnostics.ProcessWindowStyle.Hidden; startInfo.FileName = "cmd.exe"; StringBuilder listOfModules = new StringBuilder(); foreach(var d in dirs) { listOfModules.Append(d.FullName + " "); Console.WriteLine("packing " + d.FullName); } startInfo.Arguments = "/C npm pack " + listOfModules; process.StartInfo = startInfo; process.Start(); process.WaitForExit(); }
public void BuildExistingSiteList() { listViewSites.Items.Clear(); _XAMPPInstallDir = ConfigManager.GetXAMPPInstallDir(); _XAMPPWebRootPath = Path.Combine(_XAMPPInstallDir, "htdocs"); _XAMPPApacheVhostsConfigFile = Path.Combine(_XAMPPInstallDir, "apache\\conf\\extra\\httpd-vhosts.conf"); if (!CheckSettings()) { return; } _vhostsManager = new ApacheVhostsConfigManager(_XAMPPApacheVhostsConfigFile, _XAMPPWebRootPath); listViewSites.Items.Clear(); List<VhostsEntry> vhostEntries = _vhostsManager.GetVHostEntries(); List<HostsEntry> hostsEntries = _hostsManager.GetHostsEntries(); List<DirectoryInfo> sites = new DirectoryInfo(_XAMPPWebRootPath).GetDirectories("*", SearchOption.TopDirectoryOnly).ToList(); foreach (VhostsEntry vHostEntry in vhostEntries) { if (hostsEntries.Count(p => p.DomainName == vHostEntry.ServerName) > 0 || sites.Count(p => p.Name == vHostEntry.ServerName) > 0) { ListViewItem lvi = new ListViewItem(vHostEntry.ServerName); lvi.SubItems.Add("OK"); listViewSites.Items.Add(lvi); } } }
private static void ParsePath(int pathId, string path, IList<string> validFileExtensions) { Logger.Info("Scanning Path {0} for files", path); var files = new DirectoryInfo(path).GetFiles("*", SearchOption.AllDirectories); Logger.Debug("Got {0} files in path {1}", files.Count(), path); foreach (var file in files) { if (!validFileExtensions.Contains(file.Extension)) { Logger.Trace("Invalid file extension: {0} on file {1}. Skipping", file.Extension, file.FullName); continue; } lock (ParserStatusLockObject) { if (!ParserStatus.ContainsKey(file.ToString())) { ParserStatus.Add(file.FullName, new ParseStatus(file.FullName)); } } ProcessFile(pathId, path, file.FullName); } }
public void ShippingShouldNotBlockLogFileRolling() { Process shipper = null; Process processWithLogFileRolling = null; DeleteOldLogFiles(); File.Delete("LogSearchShipper.exe.config.bak"); File.Move("LogSearchShipper.exe.config", "LogSearchShipper.exe.config.bak"); File.Move("LogSearchShipper.exe.config.ShipDummyService", "LogSearchShipper.exe.config"); try { shipper = ProcessUtils.StartProcess(Environment.CurrentDirectory + @"\LogSearchShipper.exe", "-instance:integrationtest001"); processWithLogFileRolling = ProcessUtils.StartProcess(Environment.CurrentDirectory + @"\DummyServiceWithLogRolling.exe", ""); System.Threading.Thread.Sleep(TimeSpan.FromSeconds(10)); //There should be 6 DummyServiceWithLogRolling.log.* files, unless the shipper has blocked file rolling var logFiles = new DirectoryInfo(Environment.CurrentDirectory).GetFiles("DummyServiceWithLogRolling.log.*"); Assert.AreEqual(6, logFiles.Count()); } finally { Utils.ShutdownProcess(shipper); Utils.ShutdownProcess(processWithLogFileRolling); File.Delete("LogSearchShipper.exe.config"); File.Move("LogSearchShipper.exe.config.bak", "LogSearchShipper.exe.config"); } }
public static void UploadTraces(string sourceFolderPath, string destinationFolderPath) { if (Directory.Exists(sourceFolderPath)) { var directoriesToUpload = new DirectoryInfo(sourceFolderPath).GetDirectories("DataBackup*"); if (directoriesToUpload.Count() == 0) { Log($"There are no trace directory starting with DataBackup in {sourceFolderPath}"); return; } var perfResultDestinationFolderName = string.Format("PerfResults-{0:yyyy-MM-dd_hh-mm-ss-tt}", DateTime.Now); var destination = Path.Combine(destinationFolderPath, perfResultDestinationFolderName); foreach (var directoryToUpload in directoriesToUpload) { var destinationDataBackupDirectory = Path.Combine(destination, directoryToUpload.Name); if (Directory.Exists(destinationDataBackupDirectory)) { Directory.CreateDirectory(destinationDataBackupDirectory); } CopyDirectory(directoryToUpload.FullName, destinationDataBackupDirectory); } foreach (var file in new DirectoryInfo(sourceFolderPath).GetFiles().Where(f => f.Name.StartsWith("ConsumptionTemp", StringComparison.OrdinalIgnoreCase) || f.Name.StartsWith("Roslyn-", StringComparison.OrdinalIgnoreCase))) { File.Copy(file.FullName, Path.Combine(destination, file.Name)); } } else { Log($"sourceFolderPath: {sourceFolderPath} does not exist"); } }
public static Boolean HasNonHidden(String path) { if (!Directory.Exists(path)) { return false; } IEnumerable<FileInfo> files = new DirectoryInfo(path).GetFiles().Where(x => (x.Attributes & FileAttributes.Hidden) == 0); IEnumerable<DirectoryInfo> directories = new DirectoryInfo(path).GetDirectories().Where(x => (x.Attributes & FileAttributes.Hidden) == 0); return files.Count() > 0 || directories.Count() > 0; }
public void NavigationOnDiskService_GetItems_Check_Return_Count() { //arrange INavigationOnDiskService navigationOnDiskService = new NavigationOnDiskService(); const string path = @"C:\"; //act var items = navigationOnDiskService.GetItems(path); var directories = new DirectoryInfo(path).GetDirectories(); var files = new DirectoryInfo(path).GetFiles(); //assert Assert.AreEqual(directories.Count() + files.Count(), items.Count); }
public static void InitLogger() { if (!Directory.Exists(LogFolder)) Directory.CreateDirectory(LogFolder); var logs = new DirectoryInfo(LogFolder).GetFiles().OrderBy(s => s.LastWriteTime).ToList(); while (logs.Count() >= ServerMain.settings.maximumLogs) { var first = logs.First(); first.Delete(); logs.Remove(first); } }
/// <summary> /// Finds the *.save game with the most recent Date Modified. /// This is usually the latest save game, but can be used to force a specific save to load instead /// </summary> /// <param name="saveFolder">The folder to search for the latest .save file</param> static public void LoadLatestSaveFile(string saveFolder) { var files = new System.IO.DirectoryInfo(saveFolder).GetFileSystemInfos("*.save").OrderBy(f => f.LastWriteTime); if (files.Count() != 0) { var filePath = files.Last().FullName; LoadSaveFile(filePath); } else { var errorMessage = String.Format("Cannot find the save file in folder: {0}", saveFolder); throw new FileNotFoundException(errorMessage); } }
/// <summary> /// Check the node libraries defined in the package json file are valid and have a valid certificate /// </summary> /// <param name="packageDirectoryPath">path to package location</param> /// <param name="discoveredPkg">package object to check</param> private static void CheckPackageNodeLibraryCertificates(string packageDirectoryPath, Package discoveredPkg) { var dllfiles = new System.IO.DirectoryInfo(discoveredPkg.BinaryDirectory).EnumerateFiles("*.dll"); if (discoveredPkg.Header.node_libraries.Count() == 0 && dllfiles.Count() != 0) { throw new LibraryLoadFailedException(packageDirectoryPath, String.Format( Resources.InvalidPackageNoNodeLibrariesDefinedInPackageJson, discoveredPkg.Name, discoveredPkg.RootDirectory)); } foreach (var nodeLibraryAssembly in discoveredPkg.Header.node_libraries) { //Try to get the assembly name from the manifest file string filename; try { filename = new AssemblyName(nodeLibraryAssembly).Name + ".dll"; } catch { throw new LibraryLoadFailedException(packageDirectoryPath, String.Format( Resources.InvalidPackageMalformedNodeLibraryDefinition, discoveredPkg.Name, discoveredPkg.RootDirectory)); } //Verify the node library exists in the package bin directory and has a valid certificate var filepath = Path.Combine(discoveredPkg.BinaryDirectory, filename); try { CertificateVerification.CheckAssemblyForValidCertificate(filepath); } catch (Exception e) { throw new LibraryLoadFailedException(packageDirectoryPath, String.Format( Resources.InvalidPackageNodeLibraryIsNotSigned, discoveredPkg.Name, discoveredPkg.RootDirectory, e.Message)); } } discoveredPkg.RequiresSignedEntryPoints = true; }
static void Main(string[] args) { InitalizeLog4Net(); string localDirPath = @"C:\Outils\FileTest\Local"; string masterDirPath = @"C:\Users\Laurent\Pictures\Photos LIGER\"; var filesEnumerator = new DirectoryInfo(localDirPath).EnumerateFiles("*", SearchOption.AllDirectories); int count = filesEnumerator.Count(); double totalLength = filesEnumerator.Sum(f => f.Length) / 1024.0 / 1024.0; Console.WriteLine("{0} fichiers. Taille totale : {1:0.00} Go, Taille moy. : {2:0.00} mo", count, totalLength / 1024, totalLength / count); CancellationTokenSource cts = new CancellationTokenSource(); var breakTask = Task.Factory.StartNew(() => { BreakExecution(cts); }, cts.Token); Repository syncDir = new Repository(localDirPath, cts); Console.WriteLine(); // on recommence, mais cette fois pour la synchro vers un autre repertoire cts = new CancellationTokenSource(); breakTask = Task.Factory.StartNew(() => { BreakExecution(cts); }, cts.Token); syncDir.SyncWithMaster(masterDirPath, new SyncOptions() { MaxLocalStorage = FileLength.FromGigaBytes(7) }, cts); //var doublons = syncDir.Map.Values // .GroupBy(ci => ci.Sha1String) // .Where(g => g.Count() > 1); //int totalDoublons = 0; //// affichage des doublons : //foreach (var group in doublons) //{ // foreach (var item in group) // { // Console.WriteLine("{0} : {1}", group.Key, item.FileName); // } // totalDoublons += group.Count(); // Console.WriteLine("-------- {0} doublons ---------", group.Count()); //} //Console.WriteLine("-------- {0} doublons au total ---------", totalDoublons); Console.WriteLine("Appuyer sur une touche pour fermer cette fenetre."); Console.Read(); }
public static IEnumerable<FileInfo> GetFileInfos(string path) { Queue<string> queue = new Queue<string>(); queue.Enqueue(path); while (queue.Count > 0) { path = queue.Dequeue(); try { foreach (string subDir in Directory.GetDirectories(path)) { queue.Enqueue(subDir); } } catch (Exception ex) { Console.Error.WriteLine(ex); } FileInfo[] files = null; try { files = new DirectoryInfo(path).GetFiles(); } catch (Exception ex) { Console.Error.WriteLine(ex); } if (files != null) { for (int i = 0; i < files.Count(); i++) { yield return files[i]; } } } }
private void BwBusiness_DoWork(object sender, DoWorkEventArgs e) { Stopwatch directoryStopwatch = new Stopwatch(); threadCount++; do { //Get available memory var computerInfo = new ComputerInfo(); var availablePhysicalMemory = (decimal)(computerInfo.AvailablePhysicalMemory); var totalPhysicalMemory = (decimal)(computerInfo.TotalPhysicalMemory); var freeMemRatio = availablePhysicalMemory / totalPhysicalMemory; //Memory protection check if (freeMemRatio > (decimal).09) { bool errorFlag = false; if (!directoryStopwatch.IsRunning) { directoryStopwatch.Start(); } //Get file info so that any necessary logging can be written to the //appropriate folder FileInfo fiPeek; fileQueue.TryPeek(out fiPeek); try { Stopwatch swFile = new Stopwatch(); swFile.Start(); bool isCR2Processing = false; //Keep trying to get a file from the queue FileInfo fi; while (!fileQueue.TryDequeue(out fi) && fileQueue.Count() != 0) { } if (fi != null && File.Exists(fi.FullName)) { string filePath = fi.FullName; string directoryPath = fi.DirectoryName; currentDirectory = directoryPath; //Create Subfolders for Processed DNGs and Logs if (fi.Name.ToLower().Contains(".dng")) { fi.Directory.CreateSubdirectory("Dual ISO DNG"); } if (fi.Name.ToLower().Contains(".cr2")) { isCR2Processing = true; fi.Directory.CreateSubdirectory("Dual ISO CR2"); fi.Directory.CreateSubdirectory("Dual ISO Original CR2"); } fi.Directory.CreateSubdirectory("logs"); //Handle condition where application exited before files moved if (File.Exists(fi.FullName.ToLower().Replace(".dng", ".cr2"))) { isCR2Processing = true; } //Get initial image path pre-move string initialImagePath = fi.FullName; //Get initial log path pre-move string initialLogPath = initialImagePath + ".log"; //Get log directory path string logDirectoryPath = fi.Directory.EnumerateDirectories().Where( d => d.Name.Contains("logs")).FirstOrDefault().FullName; //Get destination log path string destLogPath = fi.Directory.EnumerateDirectories().Where( d => d.Name.Contains("logs")).FirstOrDefault().FullName + "\\" + fi.Name + ".log"; //Get destination image path string destImagePath = string.Empty; if (fi.Name.ToLower().Contains(".dng")) { if (!isCR2Processing) { destImagePath = fi.Directory.EnumerateDirectories().Where( d => d.Name.Contains("Dual ISO DNG")).FirstOrDefault().FullName + "\\" + fi.Name; } else { destImagePath = fi.Directory.EnumerateDirectories().Where( d => d.Name.Contains("Dual ISO CR2")).FirstOrDefault().FullName + "\\" + fi.Name; } } else if (fi.Name.ToLower().Contains(".cr2")) { destImagePath = fi.Directory.EnumerateDirectories().Where( d => d.Name.Contains("Dual ISO Original CR2")).FirstOrDefault().FullName + "\\" + fi.Name; } string initialDNGPath = string.Empty; string destDNGPath = string.Empty; if (isCR2Processing) { initialDNGPath = initialImagePath.ToLower().Replace(".cr2", ".dng").ToUpper(); destDNGPath = destImagePath.Replace("Dual ISO Original CR2", "Dual ISO CR2").ToLower().Replace(".cr2", ".dng").ToUpper(); } string output = string.Empty; string errorOutput = string.Empty; //Get available memory availablePhysicalMemory = (decimal)(computerInfo.AvailablePhysicalMemory); totalPhysicalMemory = (decimal)(computerInfo.TotalPhysicalMemory); freeMemRatio = availablePhysicalMemory / totalPhysicalMemory; //Memory protection check if (!formClosed && freeMemRatio > (decimal).09) { // Use ProcessStartInfo class. ProcessStartInfo startInfo = new ProcessStartInfo(); startInfo.CreateNoWindow = true; startInfo.FileName = "\"" + txtCr2hdrPath.Text + "\""; startInfo.WorkingDirectory = txtCr2hdrPath.Text.Length > 0 ? txtCr2hdrPath.Text.Substring(0, txtCr2hdrPath.Text.IndexOf(".exe")).Substring(0, txtCr2hdrPath.Text.LastIndexOf("\\")) : startInfo.WorkingDirectory; startInfo.WindowStyle = ProcessWindowStyle.Hidden; startInfo.Arguments = "\"" + filePath + "\""; startInfo.RedirectStandardOutput = true; startInfo.RedirectStandardError = true; startInfo.UseShellExecute = false; Process exeProcess = Process.Start(startInfo); // Start the process with the info we specified. // Call WaitForExit and then the using-statement will close. using (exeProcess) { output = exeProcess.StandardOutput.ReadToEnd(); errorOutput = exeProcess.StandardError.ReadToEnd(); exeProcess.WaitForExit(); output += "\r\nProcess Errored?: "; //Check exitcode to see if process errored or not if (exeProcess.ExitCode == 0) { output += "No"; } else { output += "Yes"; errorCount++; errorFlag = true; } output += "\r\n" + errorOutput; output += "\r\n\r\n" + exeProcess.TotalProcessorTime.ToString() + " process duration."; string processDuration = exeProcess.TotalProcessorTime.ToString(); } //Move Image if Processing was Successful if (!errorFlag) { if (File.Exists(initialImagePath)) { if (!File.Exists(destImagePath)) { fi.MoveTo(destImagePath); } else { if (destImagePath.ToLower().Contains(".dng")) { fi.MoveTo(destImagePath.ToLower().Replace(".dng", "." + Guid.NewGuid().ToString() + ".dng")); } else if (destImagePath.ToLower().Contains(".cr2")) { fi.MoveTo(destImagePath.ToLower().Replace(".cr2", "." + Guid.NewGuid().ToString() + ".cr2")); } } imagesProcessed++; FileInfo subfolderFileInfoDequeued; subFolderFileQueue.TryDequeue(out subfolderFileInfoDequeued); } } swFile.Stop(); output += "\r\n" + swFile.Elapsed.ToString() + " file activity duration."; //Write appropriate flavor of output log if (errorFlag) { File.WriteAllText(filePath + ".ProcessErrored.log", output); } else { File.WriteAllText(filePath + ".log", output); } //Move Log and perform cleanup if Processing was Successful if (!errorFlag) { if (File.Exists(initialLogPath)) { if (!File.Exists(destLogPath)) { File.Move(initialLogPath, destLogPath); } else { File.Move(initialLogPath, destLogPath.Replace(".log", "." + Guid.NewGuid().ToString() + ".log")); } } if (isCR2Processing) { //Move resultant DNG if CR2 was processed if (File.Exists(initialDNGPath)) { if (!File.Exists(destDNGPath)) { File.Move(initialDNGPath, destDNGPath); } else { File.Move(initialDNGPath, (destDNGPath.ToLower().Replace(".dng", "." + Guid.NewGuid().ToString() + ".dng"))); } } else //Resultant DNG leftover while CR2 still in folder { DirectoryInfo dirInfo = new DirectoryInfo(currentDirectory); dirInfo.CreateSubdirectory("Dual ISO Original CR2"); string leftoverInitialCR2Path = initialImagePath.ToLower().Replace(".dng", ".cr2").ToUpper(); string leftoverDestCR2Path = destImagePath.Replace( "Dual ISO CR2", "Dual ISO Original CR2").ToLower().Replace(".dng", ".cr2").ToUpper(); if (File.Exists(leftoverInitialCR2Path)) { if (!File.Exists(leftoverDestCR2Path)) { File.Move(leftoverInitialCR2Path, leftoverDestCR2Path); } else { File.Move(leftoverInitialCR2Path, leftoverDestCR2Path.ToLower().Replace(".cr2", "." + Guid.NewGuid().ToString() + ".cr2")); } } } } string tempDurationPerImage = durationPerImage.ToString(); //Anything else left in the queue? if (fileQueue.Count() > 0) { FileInfo fiPeekNext; while (!fileQueue.TryPeek(out fiPeekNext) && fileQueue.Count() != 0) { } //Switching to a new folder? Clean up logs after making sure there are no images left in folder. if ( (fiPeekNext.DirectoryName != directoryPath && Directory.EnumerateFiles( directoryPath, "*.dng", SearchOption.TopDirectoryOnly).Union(Directory.EnumerateFiles( directoryPath, "*.cr2", SearchOption.TopDirectoryOnly)).Count() == 0) || fiPeekNext == null) { directoryStopwatch.Stop(); subFolderFileQueue = new ConcurrentQueue<FileInfo>( fiPeekNext.Directory.EnumerateFiles("*.dng", SearchOption.TopDirectoryOnly).Union( fiPeekNext.Directory.EnumerateFiles("*.cr2", SearchOption.TopDirectoryOnly)).ToList() ); maxSubfolderFileQueueCount = subFolderFileQueue.Count(); string directoryDuration = directoryStopwatch.Elapsed.ToString() + " Directory Duration."; directoryDuration += "\r\n" + tempDurationPerImage + " Duration Per Image."; if (!File.Exists(directoryPath + "\\directoryDuration.log")) { File.WriteAllText(directoryPath + "\\directoryDuration.log", directoryDuration); } directoryStopwatch.Reset(); IEnumerable<FileInfo> logs = new DirectoryInfo(directoryPath).EnumerateFiles("*.log", SearchOption.TopDirectoryOnly); if (logs.Count() > 0) { foreach (FileInfo log in logs) { if (Directory.Exists(log.DirectoryName + "\\logs")) { log.MoveTo(log.DirectoryName + "\\logs\\" + log.Name); } } } } } else { //No more files in queue. Clean up logs for the final time after making sure there are no image files remaining if (Directory.EnumerateFiles(directoryPath, "*.dng", SearchOption.TopDirectoryOnly).Union( Directory.EnumerateFiles(directoryPath, "*.cr2", SearchOption.TopDirectoryOnly)).Count() == 0) { directoryStopwatch.Stop(); string directoryDuration = directoryStopwatch.Elapsed.ToString() + " Directory Duration."; directoryDuration += "\r\n" + tempDurationPerImage + " Duration Per Image."; if (!File.Exists(directoryPath + "\\directoryDuration.log")) { File.WriteAllText(directoryPath + "\\directoryDuration.log", directoryDuration); } directoryStopwatch.Reset(); IEnumerable<FileInfo> logs = new DirectoryInfo(directoryPath).EnumerateFiles("*.log", SearchOption.TopDirectoryOnly); if (logs.Count() > 0) { foreach (FileInfo log in logs) { if (Directory.Exists(log.DirectoryName + "\\logs")) { if (File.Exists(log.FullName)) { log.MoveTo(log.DirectoryName + "\\logs\\" + log.Name); } } } } } } } } else { //Memory protection check failed? Put file back into queue fileQueue.Enqueue(fi); } } if (isCR2Processing) { //If processing CR2, sleep thread for between 5 and 10 seconds Random r = new Random(); Thread.Sleep(r.Next(5000, 10000)); } } catch (Exception ex) { if (fiPeek != null) { File.WriteAllText(fiPeek.DirectoryName + "\\ExceptionLog." + fiPeek.Name + "." + Guid.NewGuid().ToString() + ".log", ex.Message + "\r\n" + ex.StackTrace); } } } } while (fileQueue.Count() > 0 && !((BackgroundWorker)(sender)).CancellationPending && !formClosed); //Queue empty and last worker? Time to clean up logs and signal work completed. if (fileQueue.Count() == 0 && threadCount == 1) { workCompleted = true; if (!bwCancel.IsBusy) { bwCancel.RunWorkerAsync(); Cancel(); //Clean up logs DirectoryInfo di = new DirectoryInfo(txtImageFolderPath.Text); if (di.Exists) { try { IEnumerable<DirectoryInfo> diList = di.EnumerateDirectories("*", SearchOption.AllDirectories).Where( d => d.EnumerateFiles("*.log", SearchOption.TopDirectoryOnly).Count() > 0 && !d.Name.Contains("Dual ISO DNG") && !d.Name.Contains("Dual ISO CR2") && !d.Name.Contains("Dual ISO Original CR2") && !d.Name.Contains("logs")); List<FileInfo> logList = new List<FileInfo>(); //We don't want to clean up the process errored logs, //since we want to keep those next to the files that need to be reprocessed. logList.AddRange(di.EnumerateFiles("*.log", SearchOption.TopDirectoryOnly).Where( lgFile => !lgFile.Name.Contains("ProcessErrored")).ToList()); foreach (DirectoryInfo dInfo in diList) { string path = dInfo.FullName; logList.AddRange(dInfo.EnumerateFiles("*.log", SearchOption.TopDirectoryOnly).Where( lgFile => !lgFile.Name.Contains("ProcessErrored")).ToList()); } foreach (FileInfo log in logList) { if (Directory.Exists(log.DirectoryName + "\\logs")) { log.MoveTo(log.DirectoryName + "\\logs\\" + log.Name); } } } catch (Exception ex) { File.WriteAllText(di.FullName + "\\ExceptionLog.WorkComplete." + Guid.NewGuid().ToString() + ".log", ex.Message + "\r\n" + ex.StackTrace); } } } } threadCount--; }
/// <summary> /// Convert LEAN data to custom data /// </summary> /// <param name="OutputResolution">Output resoltution for intraday cases (hour/minute/second)</param> /// <returns></returns> private static async Task Convert2CustomCSV(string OutputResolution) { var symbols = new DirectoryInfo(_settings.OutputFolder + @"/" + _settings.SecurityType + @"/bra/" + OutputResolution + @"/") .GetDirectories().ToList(); if (_settings.InstrumentList.Length > 0) symbols.RemoveAll(s => !_settings.InstrumentList.Contains(s.Name.ToUpper())); if (symbols.Count == 0) { } var sum = 0.0; var total = 0.0; symbols.ForEach(d => total += d.GetFiles().Sum(f => f.Length)); var roottime = DateTime.Now; Console.WriteLine("\r" + symbols.Count + " symbol directories to read (" + (total / 1024 / 1024).ToString("000.00") + " MB).\t" + roottime); foreach (var symbol in symbols) { var starttime = DateTime.Now; var factors = GetTickerFactors(symbol.Name); var zipfiles = symbol.GetFiles("*.zip").ToList(); var csvFile = new FileInfo(symbol.Name.ToUpper() + "_" + _settings.OutputResolution + ".csv"); if (csvFile.Exists) csvFile.Delete(); foreach (var zipfile in zipfiles) { var date = zipfile.Name.ToDateTime(); var factor = factors.FirstOrDefault(kvp => kvp.Key >= date); File.AppendAllLines(csvFile.FullName, (await ReadAsyncZipFile(zipfile)).Select(l => { var data = l.Split(','); return symbol.Name.ToUpper() + ";" + date.AddMilliseconds(data[0].ToInt64()).ToString(@"dd/MM/yyyy;HH\:mm\:ss") + ";" + Math.Round(data[1].ToDecimal() * factor.Value / 10000, 2).ToString("0.00", _ptBR) + ";" + Math.Round(data[2].ToDecimal() * factor.Value / 10000, 2).ToString("0.00", _ptBR) + ";" + Math.Round(data[3].ToDecimal() * factor.Value / 10000, 2).ToString("0.00", _ptBR) + ";" + Math.Round(data[4].ToDecimal() * factor.Value / 10000, 2).ToString("0.00", _ptBR) + ";" + (data.Length == 6 ? data[5] : data[6] + ";" + data[5]); }).ToArray()); sum += zipfile.Length; } Console.Write("\r" + (sum / total).ToString("0.00%") + "\t" + symbol.Name.ToUpper() + ": \t" + zipfiles.Count + " days were read/written in " + (DateTime.Now - starttime).ToString(@"ss\.ff") + " secs.\t"); } // For options and futures var csvFiles = new DirectoryInfo(Environment.CurrentDirectory).GetFiles("*_" + _settings.OutputResolution + ".csv"); if (csvFiles.Count() > 0) { sum = 0.0; total = csvFiles.Sum(f => f.Length); Console.WriteLine("Zipping " + (total / 1024 / 1024).ToString("000.00") + " MB"); csvFiles.GroupBy(g => { if (_settings.SecurityType == "equity") return g.Name.Replace(".csv", ".zip"); if (_settings.SecurityType == "futures") return g.Name.Substring(0, 3).ToUpper() + "FUT" + "_" + _settings.OutputResolution + ".zip"; var type = ("ABCDEFGHIJKL".Contains(g.Name[4]) ? "_C" : "_P") + "_" + _settings.OutputResolution + ".zip"; return g.Name.Substring(0, 4) + type; }) //.ToList().ForEach(f => //{ // var outputfile = new FileInfo(f.Key); // if (outputfile.Exists) outputfile.Delete(); // Compression.Zip(outputfile.FullName); // using (var z = new FileStream(outputfile.FullName, FileMode.Create)) // using (var a = new ZipArchive(z, ZipArchiveMode.Create, true)) // f.ToList().ForEach(csvFile => // { // a.CreateEntryFromFile(csvFile.FullName, csvFile.Name, CompressionLevel.Optimal); // csvFile.Delete(); // sum += csvFile.Length; // Console.Write("\r" + (sum / total).ToString("0.00%") + "\tLast zippped file:\t" + csvFile.Name.ToUpper()); // }); //}) ; } Console.WriteLine("\r\n... exiting routine at " + DateTime.Now); }
private static bool IsComplete() { var config = ConfigService.Generate(RootPath + "Current/"); var images = new DirectoryInfo(RootPath + "Current/").GetFiles().OnlyImages(); return images.Count() == (config.Width * config.Height); }
public void ToAssembliesDoesNotLeakLoadExceptions() { var x = new DirectoryInfo(".").GetFiles().Where(e=>e.Extension != ".msi").Select(f => f.FullName).ToAssemblies(); var c = x.Count(); }
private void slurpLocation(object stateInfo) { string location = (string)stateInfo; // Get every file underneath the path given FileInfo[] files = new DirectoryInfo(location).GetFiles("*", SearchOption.AllDirectories); Trace.WriteLine(string.Format("Got {0} files in {1}", files.Length, location)); // Divide the list up into chunks int workItems = Environment.ProcessorCount * ThreadsPerProcessor; int chunkSize = Math.Max(files.Count() / workItems, 1); int count = workItems; // Use an event to wait for all work items using (var manualResetEvent = new ManualResetEvent(false)) { // Each work item processes approx. 1/Nth of the data items WaitCallback callback = state => { int iteration = (int)state; int from = chunkSize * iteration; int to = iteration == workItems - 1 ? files.Count() : chunkSize * (iteration + 1); Trace.WriteLine(string.Format("Thread {1}: Sub-tasked {0} files.", to - from, iteration)); // Create a per-thread list to add to the main list at the end. List<Song> songs = new List<Song>(); // Iterate through our thread's slice of the whole list. while (from < to) { Song song = songFromFileSystemInfo(files[from++]); if (song != null) { songs.Add(song); } } Trace.WriteLine(string.Format("Thread {1}: Filled {0} songs.", songs.Count, iteration)); lock (_locker) { Songs.AddRange(songs); } if (Interlocked.Decrement(ref count) == 0) { manualResetEvent.Set(); } }; // The ThreadPool is used to process all but one of the // chunks; the current thread is used for that chunk, // rather than just blocking. for (int i = 0; i < workItems; i++) { if (i < workItems - 1) { ThreadPool.QueueUserWorkItem(callback, i); } else { callback(i); } } // Wait for all work to complete manualResetEvent.WaitOne(); } }
public string GetLogSummary( string folderPath ) { var summary = ""; ExecuteDbMethod( cn => { var cutOffDateTime = DateTime.Now.AddHours( -24 ); var command = new InlineSelect( "count( * )".ToSingleElementArray(), "from RsisLogBackups", false ); command.AddCondition( new InequalityCondition( InequalityCondition.Operator.GreaterThan, new InlineDbCommandColumnValue( "DateAndTimeSaved", new DbParameterValue( cutOffDateTime, "DateTime2" ) ) ) ); var numberOfLogsRestored = 0; command.Execute( cn, r => { r.Read(); numberOfLogsRestored = r.GetInt32( 0 ); } ); summary = "In the last 24 hours, " + numberOfLogsRestored + " logs were successfully restored."; if( Directory.Exists( folderPath ) ) { var logsDownloaded = new DirectoryInfo( folderPath ).GetFiles().Where( f => f.LastWriteTime > cutOffDateTime ).ToList(); var totalSizeInBytes = logsDownloaded.Sum( f => f.Length ); summary += " " + logsDownloaded.Count() + " logs were downloaded, with a total size of " + FormattingMethods.GetFormattedBytes( totalSizeInBytes ) + "."; } } ); return summary; }
static int Main(string[] args) { #region Command Line argument processing if ( args.Contains ("--help") ) { Console.WriteLine ("This tool depends on Microsoft Office 2010+"); Console.WriteLine ("Valid switches are"); Console.WriteLine ("-ignore <comma separated list of file extn> : Ignore line with pattern"); Console.WriteLine ("-include <comma separated list of file extn> : Filter for pattern"); Console.WriteLine ("-concurrency <No of minutes> : Concurrency Window in minutes"); Console.WriteLine ("-toppages <No of pages> : No of Top Pages/day"); Console.WriteLine ("-peaks <No of peaks> : No of Peak Hours to consider"); Console.WriteLine ("-param <comma seperated list of patterns> : Summarize specific URL parameters"); Console.WriteLine ("-export <export filename> : Excel file report name, default will be with time stamp"); Console.WriteLine ("-folder <log file folder path> : Current folder will be defaulted. All .log files in this folder will be processed."); Console.WriteLine ("Add a space after the pattern if you want extension mapping (e.g. .aspx ,.jpg)"); return 0; } if ( args.Length % 2 != 0 ) { throw new ArgumentException ("Command line arguments not valid, try --help to see valid ones!"); } Dictionary<string, string> cmdArgs = new Dictionary<string, string> (); for ( int i = 0; i < args.Length; i += 2 ) { cmdArgs.Add (args[i].ToLower (), args[i + 1]); } List<string> ignoredTypes = new List<string> (), filterTypes = new List<string> (), hitsPerURLParams = new List<string> (); if ( cmdArgs.ContainsKey (IgnoreSwitch) ) { ignoredTypes = cmdArgs[IgnoreSwitch].ToLower ().Split (',').ToList (); } if ( cmdArgs.ContainsKey (FilterSwitch) ) { filterTypes = cmdArgs[FilterSwitch].ToLower ().Split (',').ToList (); } if ( cmdArgs.ContainsKey (URLParamsSwitch) ) { hitsPerURLParams = cmdArgs[URLParamsSwitch].ToLower ().Split (',').ToList (); } float concurrencyWindow = 5; if ( cmdArgs.ContainsKey (ConcurrencySwitch) ) { concurrencyWindow = float.Parse (cmdArgs[ConcurrencySwitch]); } else cmdArgs.Add (ConcurrencySwitch, concurrencyWindow.ToString ()); int topPagesPerDay = 10; if ( cmdArgs.ContainsKey (TopPagesSwitch) ) { topPagesPerDay = int.Parse (cmdArgs[TopPagesSwitch]); } else cmdArgs.Add (TopPagesSwitch, topPagesPerDay.ToString ()); int peakHoursCount = 3; if ( cmdArgs.ContainsKey (PeakHoursSwitch) ) { peakHoursCount = int.Parse (cmdArgs[PeakHoursSwitch]); } else cmdArgs.Add (PeakHoursSwitch, peakHoursCount.ToString ()); string exportFileName = null; if ( cmdArgs.ContainsKey (ExportFileSwitch) ) { try { exportFileName = Path.GetFullPath (cmdArgs[ExportFileSwitch]); } catch ( Exception e ) { Console.WriteLine ("Error creating report file:{0},{1}", e.GetType ().Name, e.Message); } } if ( exportFileName == null ) { exportFileName = Path.GetFullPath ("Processing results_" + DateTime.Now.ToString ("dd_hh_mm") + ".xlsx"); Console.WriteLine ("Writing output to {0}", exportFileName); } string curerntPath; if ( cmdArgs.ContainsKey (FolderSwitch) ) { try { curerntPath = Path.GetFullPath (cmdArgs[FolderSwitch]); } catch ( Exception e ) { Console.WriteLine ("Error accessing folder {0}:{1},{2}", cmdArgs[FolderSwitch], e.GetType ().Name, e.Message); return 1; } } else { curerntPath = Directory.GetCurrentDirectory (); Console.WriteLine ("Working on IIS logs from current folder {0}", curerntPath); } #endregion Stopwatch stopWatch = new Stopwatch (); stopWatch.Start (); //var files = Directory.GetFiles(curerntPath, "*.log").ToList(); var files = new DirectoryInfo (curerntPath) .GetFiles ("*.log") .OrderBy (f => f.LastWriteTime) .Select (f => f.FullName) .ToArray (); var totalFile = files.Count (); if ( totalFile == 0 ) { Console.WriteLine ("No log files found!!"); return 0; } Console.WriteLine ("Found {0} log files", totalFile); var tmpFile = System.IO.Path.GetTempFileName (); int fileCount = 0; int headerRows = 4; int entryCount = 0; List<IISLogEntry> processingList = new List<IISLogEntry> (); DateTime nextTime = DateTime.MinValue; long TotalHits = 0, ServedRequests = 0; List<ConcurrentRequest> requests = new List<ConcurrentRequest> (); HashSet<string> uniqueIPs = new HashSet<string> (); Dictionary<int, int> httpStatus = new Dictionary<int, int> (); Dictionary<string, MethodInfo> pageViewsForPeriod = new Dictionary<string, MethodInfo> (); int totalDays = 0, totalHours = 0; Dictionary<string, MethodInfo> pageViewsDaily = new Dictionary<string, MethodInfo> (); HashSet<MethodInfo> dailyPages = new HashSet<MethodInfo> (); Dictionary<string, MethodInfo> pageViewsHourly = new Dictionary<string, MethodInfo> (); HashSet<MethodInfo> hourlyPages = new HashSet<MethodInfo> (); //hits for key URL parameters Dictionary<string, MethodInfo> urlParamHits = new Dictionary<string, MethodInfo> (); DateTime firstEntry = DateTime.MinValue, lastEntry = DateTime.MinValue; //placeholder HashSet<MethodInfo> filteredEntries = new HashSet<MethodInfo> (); int startRow = 1, startCol = 1; int reportRow = 2, reportCol = 1; Console.WriteLine ("Preparing to Process.."); foreach ( var f in files ) { try { ++fileCount; var progress = fileCount * 100 / totalFile; IEnumerable<string> matchedEntries = null; var contents = File.ReadLines (f); Dictionary<string, int> fieldIndex = new Dictionary<string, int> (); #region Content filter if ( filterTypes.Any () && ignoredTypes.Any () ) matchedEntries = contents.Where (s => s.StartsWith ("#") || ( filterTypes.Any (x => s.ToLower ().Contains (x)) && !ignoredTypes.Any (x => s.ToLower ().Contains (x)) )); else if ( filterTypes.Any () ) matchedEntries = contents.Where (s => s.StartsWith ("#") || filterTypes.Any (x => s.ToLower ().Contains (x))); else if ( ignoredTypes.Any () ) matchedEntries = contents.Where (s => s.StartsWith ("#") || !ignoredTypes.Any (x => s.ToLower ().Contains (x))); else matchedEntries = contents; foreach ( var rawLogEntry in matchedEntries ) { IISLogEntry logEntry; if ( rawLogEntry.StartsWith ("#") ) { if ( rawLogEntry.StartsWith ("#Fields:") ) fieldIndex = ParseHeaderFields (rawLogEntry); } else { Console.Write ("\r{0} File {1} of {2} files ({3}%), processing {4} ", stopWatch.Elapsed.ToString (@"hh\:mm\:ss"), fileCount, totalFile, progress, ++TotalHits); var columns = rawLogEntry.Split (' '); logEntry = new IISLogEntry () { TimeStamp = DateTime.Parse (columns[0] + " " + columns[1]), ClientIPAddress = fieldIndex.ContainsKey (IISLogEntry.propClientIPAddress) ? columns[fieldIndex[IISLogEntry.propClientIPAddress]] : String.Empty, UserName = fieldIndex.ContainsKey (IISLogEntry.propUserName) ? columns[fieldIndex[IISLogEntry.propUserName]] : String.Empty, ServiceNameandInstanceNumber = fieldIndex.ContainsKey (IISLogEntry.propServiceNameandInstanceNumber) ? columns[fieldIndex[IISLogEntry.propServiceNameandInstanceNumber]] : String.Empty, ServerName = fieldIndex.ContainsKey (IISLogEntry.propServerName) ? columns[fieldIndex[IISLogEntry.propServerName]] : String.Empty, ServerIPAddress = fieldIndex.ContainsKey (IISLogEntry.propServerIPAddress) ? columns[fieldIndex[IISLogEntry.propServerIPAddress]] : String.Empty, ServerPort = fieldIndex.ContainsKey (IISLogEntry.propClientIPAddress) ? Int32.Parse (columns[fieldIndex[IISLogEntry.propServerPort]]) : 0, Method = fieldIndex.ContainsKey (IISLogEntry.propMethod) ? columns[fieldIndex[IISLogEntry.propMethod]] : String.Empty, URIStem = fieldIndex.ContainsKey (IISLogEntry.propURIStem) ? columns[fieldIndex[IISLogEntry.propURIStem]] : String.Empty, URIQuery = fieldIndex.ContainsKey (IISLogEntry.propURIQuery) ? columns[fieldIndex[IISLogEntry.propURIQuery]] : String.Empty, HTTPStatus = fieldIndex.ContainsKey (IISLogEntry.propHTTPStatus) ? Int32.Parse (columns[fieldIndex[IISLogEntry.propHTTPStatus]]) : 0, //Win32Status = fieldIndex.ContainsKey(IISLogEntry.propWin32Status) ? Int32.Parse(row[fieldIndex[IISLogEntry.propWin32Status]]) : 0, BytesSent = fieldIndex.ContainsKey (IISLogEntry.propBytesSent) ? Int32.Parse (columns[fieldIndex[IISLogEntry.propBytesSent]]) : 0, BytesReceived = fieldIndex.ContainsKey (IISLogEntry.propBytesReceived) ? Int32.Parse (columns[fieldIndex[IISLogEntry.propBytesReceived]]) : 0, TimeTaken = fieldIndex.ContainsKey (IISLogEntry.propTimeTaken) ? Int32.Parse (columns[fieldIndex[IISLogEntry.propTimeTaken]]) : 0, ProtocolVersion = fieldIndex.ContainsKey (IISLogEntry.propProtocolVersion) ? columns[fieldIndex[IISLogEntry.propProtocolVersion]] : String.Empty, Host = fieldIndex.ContainsKey (IISLogEntry.propHost) ? columns[fieldIndex[IISLogEntry.propHost]] : String.Empty, UserAgent = fieldIndex.ContainsKey (IISLogEntry.propUserAgent) ? columns[fieldIndex[IISLogEntry.propUserAgent]] : String.Empty, Cookie = fieldIndex.ContainsKey (IISLogEntry.propCookie) ? columns[fieldIndex[IISLogEntry.propCookie]] : String.Empty, Referrer = fieldIndex.ContainsKey (IISLogEntry.propReferrer) ? columns[fieldIndex[IISLogEntry.propReferrer]] : String.Empty, ProtocolSubstatus = fieldIndex.ContainsKey (IISLogEntry.propProtocolSubstatus) ? columns[fieldIndex[IISLogEntry.propProtocolSubstatus]] : String.Empty }; #endregion #region entry processing var url = logEntry.URIStem.ToLower (); #region HTTP status codes & IP if ( httpStatus.ContainsKey (logEntry.HTTPStatus) ) httpStatus[logEntry.HTTPStatus]++; else httpStatus.Add (logEntry.HTTPStatus, 1); if ( !uniqueIPs.Contains (logEntry.ClientIPAddress) ) uniqueIPs.Add (logEntry.ClientIPAddress); #endregion if ( nextTime == DateTime.MinValue ) { firstEntry = logEntry.TimeStamp; lastEntry = logEntry.TimeStamp; nextTime = logEntry.TimeStamp.Date. AddHours (logEntry.TimeStamp.Hour). AddMinutes (logEntry.TimeStamp.Minute). AddMinutes (concurrencyWindow); } if ( logEntry.TimeStamp > nextTime ) { if ( processingList.Any () ) { requests.Add (new ConcurrentRequest (concurrencyWindow) { TimeStamp = nextTime, Transactions = processingList.Count, AverageResponseTime = processingList.Average (p => p.TimeTaken), BytesSent = processingList.Sum (t => t.BytesSent) }); processingList.Clear (); } else { requests.Add (new ConcurrentRequest (concurrencyWindow) { TimeStamp = nextTime, Transactions = 0, AverageResponseTime = 0, BytesSent = 0 }); } nextTime = nextTime.AddMinutes (concurrencyWindow); } if ( lastEntry.Hour != logEntry.TimeStamp.Hour ) { totalHours++; AddHourlyPages (pageViewsHourly, hourlyPages, lastEntry); } if ( lastEntry.Date != logEntry.TimeStamp.Date ) { totalDays++; AddDailyPages (pageViewsDaily, dailyPages, lastEntry); } //add the current one to future processing, otherwise one in teh borderlien will be missing if ( logEntry.HTTPStatus == 200 ) { processingList.Add (logEntry); ServedRequests++; if ( pageViewsForPeriod.ContainsKey (url) ) pageViewsForPeriod[url].Hit (logEntry.TimeTaken); else pageViewsForPeriod.Add (url, new MethodInfo (logEntry.URIStem, logEntry.TimeTaken)); if ( lastEntry.Hour == logEntry.TimeStamp.Hour ) { if ( pageViewsHourly.ContainsKey (url) ) pageViewsHourly[url].Hit (logEntry.TimeTaken); else pageViewsHourly.Add (url, new MethodInfo (logEntry.URIStem, logEntry.TimeTaken)); } if ( lastEntry.Date == logEntry.TimeStamp.Date ) { if ( pageViewsDaily.ContainsKey (url) ) pageViewsDaily[url].Hit (logEntry.TimeTaken); else pageViewsDaily.Add (url, new MethodInfo (logEntry.URIStem, logEntry.TimeTaken)); } if ( hitsPerURLParams.Any () ) { var urlParam = hitsPerURLParams.Where (p => logEntry.URIQuery.Contains (p)).FirstOrDefault (); if ( urlParam != null && urlParam != String.Empty ) { if ( urlParamHits.ContainsKey (url) ) urlParamHits[url].Hit (logEntry.TimeTaken); else urlParamHits.Add (url, new MethodInfo (urlParam, logEntry.TimeTaken)); } } } lastEntry = logEntry.TimeStamp; } } if ( processingList.Any () ) { requests.Add (new ConcurrentRequest (concurrencyWindow) { TimeStamp = nextTime, Transactions = processingList.Count, AverageResponseTime = processingList.Average (p => p.TimeTaken), BytesSent = processingList.Sum (t => t.BytesSent) }); processingList.Clear (); } AddHourlyPages (pageViewsHourly, hourlyPages, lastEntry); AddDailyPages (pageViewsDaily, dailyPages, lastEntry); #endregion } catch ( Exception e ) { Console.WriteLine ("Error!! {0}:{1} - {2}", e.GetType ().Name, e.Message, e.StackTrace); Debug.WriteLine ("Error!! {0}:{1}", e.GetType ().Name, e.Message); } } Console.WriteLine ("\nGenerating Statistics"); #region resultprocessing IEnumerable<MethodInfo> topPages; IEnumerable<IGrouping<DateTime, MethodInfo>> hourlyHits = null; long peakHits; IEnumerable<IGrouping<DateTime, MethodInfo>> peakHourPages = null; try { excelApp = new Application (); excelApp.Visible = false; reportSpreadsheet = excelApp.Workbooks.Add (); excelApp.Calculation = XlCalculation.xlCalculationManual; reportSheet = reportSpreadsheet.ActiveSheet; #region Concurrent Users if ( requests.Any () ) { Console.WriteLine ("{0} Calculating Concurrent User Count", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); reportSheet.Name = "Concurrent Users"; reportSheet.Cells[reportRow, reportCol++] = "Timestamp"; reportSheet.Cells[reportRow, reportCol++] = "Requests"; reportSheet.Cells[reportRow, reportCol++] = "TPS"; reportSheet.Cells[reportRow, reportCol++] = "Average Response Time"; reportSheet.Cells[reportRow, reportCol++] = "Concurrent Users (based on Little's Law)"; reportSheet.Cells[reportRow, reportCol++] = "Bytes Sent"; reportSheet.Cells[reportRow, reportCol++] = "Network Speed (Mbps)"; foreach ( var p in requests ) { reportCol = 1; reportRow++; reportSheet.Cells[reportRow, reportCol++] = p.TimeStamp; reportSheet.Cells[reportRow, reportCol++] = p.Transactions; reportSheet.Cells[reportRow, reportCol++] = p.Tps; reportSheet.Cells[reportRow, reportCol++] = p.AverageResponseTime; reportSheet.Cells[reportRow, reportCol++] = p.ConcurrentUsers; reportSheet.Cells[reportRow, reportCol++] = p.BytesSent; reportSheet.Cells[reportRow, reportCol++] = p.NetworkSpeed; } } #endregion reportSpreadsheet.Application.DisplayAlerts = false; reportSpreadsheet.SaveAs (exportFileName, ConflictResolution: XlSaveConflictResolution.xlLocalSessionChanges); #region Page visit Summary if ( pageViewsForPeriod.Any () ) { Console.WriteLine ("{0} Genrating Page visit Summary", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); reportSheet = reportSpreadsheet.Worksheets.Add (Type.Missing, reportSheet, 1); reportSheet.Name = "Page visit Summary"; startRow = startCol = 1; startRow = CollectionToTable (pageViewsForPeriod.Values, startRow, startCol, "Page visit Summary (for the period)"); reportSheet.Shapes.AddChart (XlChartType.xlLine).Select (); excelApp.ActiveChart.SetSourceData (Source: reportSheet.get_Range ("A1:B" + startRow)); reportSheet.Shapes.AddChart (XlChartType.xlPie).Select (); excelApp.ActiveChart.SetSourceData (Source: reportSheet.get_Range ("A1:B" + startRow)); excelApp.ActiveChart.ClearToMatchStyle (); try { excelApp.ActiveChart.ChartStyle = 256; } catch ( Exception e ) { } excelApp.ActiveChart.SetElement (Microsoft.Office.Core.MsoChartElementType.msoElementChartTitleAboveChart); excelApp.ActiveChart.ChartTitle.Text = "Page visit Summary (for the period) Most Visited Pages"; reportSheet.Shapes.AddChart (XlChartType.xlBarClustered).Select (); excelApp.ActiveChart.SetSourceData (Source: reportSheet.get_Range ("A1:D" + startRow)); excelApp.ActiveChart.ClearToMatchStyle (); try { excelApp.ActiveChart.ChartStyle = 222; } catch ( Exception e ) { } excelApp.ActiveChart.SetElement (Microsoft.Office.Core.MsoChartElementType.msoElementChartTitleAboveChart); excelApp.ActiveChart.ChartTitle.Text = "Page visit Summary (for the period) Average Response Time"; SpreadCharts (reportSheet); } #endregion #region Daily Analysis if ( dailyPages.Any () ) { Console.WriteLine ("{0} Genrating Daily Statistics", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); reportSheet = reportSpreadsheet.Worksheets.Add (Type.Missing, reportSheet, 1); reportSheet.Name = "Daily Analysis"; foreach ( var d in dailyPages.Select (p => p.Timestamp).Distinct () ) { filteredEntries.UnionWith (dailyPages.Where (p => p.Timestamp == d.Date) .OrderByDescending (p => p.Hits).Take (topPagesPerDay)); //Debug.WriteLine("Date: {0} - {1}", date, MethodInfo.TotalHits(dailyPages.Where(p => p.Timestamp == d.Date))); } topPages = filteredEntries.Where (p => filteredEntries.Count (q => q.Url == p.Url) > totalDays / 2); startRow = startCol = 1; AddChartFromSeries (startRow, startCol, "Daily Top Pages - Visits Trend", topPages, p => p.Hits, d => d.ToString (DateTimeFormatInfo.CurrentInfo.ShortDatePattern)); startRow = reportRow + 10; startCol = 1; AddChartFromSeries (startRow, startCol, "Daily Top Pages - Response Time(Average) Trend", topPages, p => p.AvgResponseTime, d => d.ToString (DateTimeFormatInfo.CurrentInfo.ShortDatePattern)); startRow = reportRow + 10; startCol = 1; AddChartFromSeries (startRow, startCol, "Daily Top Pages - Response Time(90%tile) Trend", topPages, p => p.NinetiethPercentile, d => d.ToString (DateTimeFormatInfo.CurrentInfo.ShortDatePattern)); startRow = 1; startCol = 30; filteredEntries.Clear (); //reportSheet.Cells[reportRow, reportCol] = "Date"; foreach ( var d in dailyPages.Select (p => p.Timestamp).Distinct () ) { filteredEntries.UnionWith (dailyPages.Where (p => p.Timestamp == d.Date) .OrderByDescending (p => p.NinetiethPercentile).Take (topPagesPerDay)); } topPages = filteredEntries.Where (p => filteredEntries.Count (q => q.Url == p.Url) > totalDays / 2); AddChartFromSeries (startRow, startCol, "Daily Slow Pages - Response Time(90%tile) Trend", topPages, p => p.NinetiethPercentile, d => d.ToString (DateTimeFormatInfo.CurrentInfo.ShortDatePattern)); startRow = reportRow + 10; startCol = 30; filteredEntries.Clear (); //reportSheet.Cells[reportRow, reportCol] = "Date"; foreach ( var d in dailyPages.Select (p => p.Timestamp).Distinct () ) { filteredEntries.UnionWith (dailyPages.Where (p => p.Timestamp == d.Date) .OrderByDescending (p => p.AvgResponseTime).Take (topPagesPerDay)); //Debug.WriteLine("Date: {0} - {1}", date, MethodInfo.TotalHits(dailyPages.Where(p => p.Timestamp == d.Date))); } topPages = filteredEntries.Where (p => filteredEntries.Count (q => q.Url == p.Url) > totalDays / 2); AddChartFromSeries (startRow, startCol, "Daily Slow Pages - Response Time(Average) Trend", topPages, p => p.AvgResponseTime, d => d.ToString (DateTimeFormatInfo.CurrentInfo.ShortDatePattern)); SpreadCharts (reportSheet); } #endregion #region Hourly analysis if ( hourlyPages.Any () ) { Console.WriteLine ("{0} Genrating Hourly Statistics", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); reportSheet = reportSpreadsheet.Worksheets.Add (Type.Missing, reportSheet, 1); reportSheet.Name = "Hourly Analysis"; startRow = 1; startCol = 1; filteredEntries.Clear (); foreach ( var d in hourlyPages.Select (p => p.Timestamp).Distinct () ) { filteredEntries.UnionWith (hourlyPages.Where (p => p.Timestamp == d.Date.AddHours (d.Hour)) .OrderByDescending (p => p.Hits).Take (topPagesPerDay)); //Debug.WriteLine("Date: {0} - {1}", date, MethodInfo.TotalHits(dailyPages.Where(p => p.Timestamp == d.Date))); } var totalHits = hourlyPages.Sum (p => p.Hits); //filter out top pages which are there for 10% of time or 2% traffic topPages = filteredEntries.Where (p => filteredEntries.Count (q => q.Url == p.Url) > totalHours / 10 || p.Hits > totalHits * 2 / 100); startRow += AddChartFromSeries (startRow, startCol, "Hourly Top Pages Summary (By Hits)", topPages, p => p.Hits, d => d.ToString ()); excelApp.ActiveChart.Axes (XlAxisType.xlCategory).CategoryType = XlCategoryType.xlCategoryScale; hourlyHits = hourlyPages.GroupBy (p => p.Timestamp, q => q); peakHits = hourlyHits.Select (p => p.Sum (q => q.Hits)).OrderByDescending (p => p).Take (peakHoursCount).Min (); peakHourPages = hourlyHits.Where (p => p.Sum (q => q.Hits) >= peakHits); startRow += 10; startCol = 1; startRow += AddChartFromSeries (startRow, startCol, "Peak Hour Top Pages Summary (By Hits)", peakHourPages.SelectMany (g => g.Where (p => p.Hits > peakHits * 2 / 100)), p => p.Hits, d => d.ToString ()); excelApp.ActiveChart.Axes (XlAxisType.xlCategory).CategoryType = XlCategoryType.xlCategoryScale; CollectionToTable (peakHourPages.SelectMany (g => g), startRow + 10, 1, "Peak Hour Pages", true); SpreadCharts (reportSheet); } #endregion #region URL Param Hits Summary if ( hitsPerURLParams.Any () ) { Console.WriteLine ("{0} Genrating URL parameter statistics", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); reportSheet = reportSpreadsheet.Worksheets.Add (Type.Missing, reportSheet, 1); startRow = startCol = 1; reportSheet.Name = "URL Parameters"; CollectionToTable (urlParamHits.Values, startRow, startCol, "URL Parameters Summary (for the period)"); } #endregion #region Summary Console.WriteLine ("{0} Genrating Summary", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); reportSheet = reportSpreadsheet.Worksheets.Add (reportSheet, Type.Missing, 1); reportRow = reportCol = 1; reportSheet.Name = "Summary"; reportSheet.Cells[reportRow, 1] = "Running From"; reportSheet.Cells[reportRow++, 2] = curerntPath; reportSheet.Cells[reportRow, 1] = "Commandline Argument"; reportSheet.Cells[reportRow++, 2] = string.Join (";", cmdArgs.Select (x => x.Key + "=" + x.Value)); reportSheet.Cells[reportRow, 1] = "Files Processed"; reportSheet.Cells[reportRow++, 2] = fileCount; reportSheet.Cells[reportRow, 1] = "From"; reportSheet.Cells[reportRow++, 2] = firstEntry; reportSheet.Cells[reportRow, 1] = "To"; reportSheet.Cells[reportRow++, 2] = lastEntry; reportSheet.Cells[reportRow, 1] = "TotalHits"; reportSheet.Cells[reportRow++, 2] = TotalHits; reportSheet.Cells[reportRow, 1] = "Average Transactions/Sec"; reportSheet.Cells[reportRow++, 2] = requests.Average (p => p.Tps); if ( hourlyHits!=null ) { reportSheet.Cells[reportRow, 1] = "Average Transactions/Hour"; reportSheet.Cells[reportRow++, 2] = hourlyHits.Average (p => p.Sum (q => q.Hits)); } if ( peakHourPages!=null ) { reportSheet.Cells[reportRow, 1] = "Peak Hour Transactions/Hour"; reportSheet.Cells[reportRow++, 2] = peakHourPages.Average (p => p.Sum (q => q.Hits)); reportSheet.Cells[reportRow, 1] = "Peak Hour Transactions/Sec"; reportSheet.Cells[reportRow++, 2] = peakHourPages.Average (p => p.Sum (q => q.Hits) / 3600); } reportSheet.Cells[reportRow, 1] = "UniqueIPs"; reportSheet.Cells[reportRow++, 2] = uniqueIPs.Count; reportSheet.Cells[reportRow, 1] = "ServedRequests"; reportSheet.Cells[reportRow++, 2] = ServedRequests; reportRow += 10; reportSheet.Cells[reportRow++, 1] = "Http Status code summary"; reportSheet.Cells[reportRow, 1] = "HTTP Code"; reportSheet.Cells[reportRow++, 2] = "Count"; foreach ( var i in httpStatus ) { reportSheet.Cells[reportRow, reportCol++] = i.Key; reportSheet.Cells[reportRow++, reportCol--] = ( i.Value ); } #endregion } catch ( Exception e ) { Console.WriteLine ("Error!! {0}:{1} - {2}", e.GetType ().Name, e.Message, e.StackTrace); Debug.WriteLine ("Error!! {0}:{1}", e.GetType ().Name, e.Message); } finally { if ( excelApp != null ) { excelApp.Calculation = XlCalculation.xlCalculationAutomatic; if ( reportSpreadsheet != null ) { reportSpreadsheet.Save (); reportSpreadsheet.Close (); excelApp.Quit (); } } File.Delete (tmpFile); stopWatch.Stop (); Console.WriteLine ("Done, Final time : {0}", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); } #endregion return 0; }
private bool IsImageExist(ImageViewModel viewModel, string fileName) { string mapPath = Server.MapPath("/Content/"); string imageDir = "Uploads/Images/"; var imagefiles = new DirectoryInfo(mapPath + imageDir).GetFiles("*.jpg"); if (imagefiles.Count() > 0) { List<string> imageFileNames = new List<string>(); foreach (var item in imagefiles) { imageFileNames.Add(item.Name); } if (imageFileNames.Contains(fileName)) { return true; } } return false; }
public string GetFileName(TeaseMedia media) { if (media.Id.StartsWith("http://")) { return media.Id; } if (FullMediaDirectoryPath.StartsWith("http://")) { return FullMediaDirectoryPath + media.Id; } var matchingFiles = new DirectoryInfo(FullMediaDirectoryPath).GetFiles(media.Id); if (matchingFiles.Count() > 0) { return matchingFiles[random.Next(matchingFiles.Length)].FullName; } return null; }
static void Main(string[] args) { if (args.Length != 2) { Console.WriteLine("Expected 2 command line arguments!"); Console.WriteLine("For example: EmailExtractor.exe \"c:\\<folder to read>\" \"c:\\filetowriteto.txt\""); return; } var folderIn = args[0]; if (!Directory.Exists(folderIn)) { Console.WriteLine("The directory '" + folderIn + "' does not exist"); return;; } var toFile = args[1]; // Get all the .msg files from the folder var files = new DirectoryInfo(folderIn).GetFiles("*.msg"); Console.WriteLine("Found '" + files.Count() + "' files to process"); // Loop through all the files foreach (var file in files) { Console.WriteLine("Checking file '" + file.FullName + "'"); // Load the msg file using (var message = new Storage.Message(file.FullName)) { Console.WriteLine("Found '" + message.Attachments.Count + "' attachments"); // Loop through all the attachments foreach (var attachment in message.Attachments) { // Try to cast the attachment to a Message file var msg = attachment as Storage.Message; // If the file not is null then we have an msg file if (msg != null) { using (msg) { Console.WriteLine("Found msg file '" + msg.Subject + "'"); if (!string.IsNullOrWhiteSpace(msg.MailingListSubscribe)) Console.WriteLine("Mailing list subscribe page: '" + msg.MailingListSubscribe + "'"); foreach (var recipient in msg.Recipients) { if (!string.IsNullOrWhiteSpace(recipient.Email)) { Console.WriteLine("Recipient E-mail: '" + recipient.Email + "'"); File.AppendAllText(toFile, recipient.Email + Environment.NewLine); } else if (!string.IsNullOrWhiteSpace(recipient.DisplayName)) { Console.WriteLine("Recipient display name: '" + recipient.DisplayName + "'"); File.AppendAllText(toFile, recipient.DisplayName + Environment.NewLine); } } } } } } } }
/// <summary> /// Primary entry point to the program /// </summary> static void Main(string[] args) { //Document the process: Console.WriteLine("QuantConnect.ToolBox: AlgoSeek Options Processor "); Console.WriteLine("================================================="); Console.WriteLine("The AlgoSeek Options Processor transforms AlgoSeek options ticks into the LEAN Algorithmic Trading Engine Data Format."); Console.WriteLine("Three parameters are required: "); Console.WriteLine(" 1> Output resolution of the LEAN data. (either minute, second or hour)"); Console.WriteLine(" 2> Source Directory of Zipped AlgoSeek Options Data."); Console.WriteLine(" 3> Destination Directory of LEAN Data Folder. (Typically located under Lean/Data)"); Console.WriteLine(" "); Console.WriteLine("NOTE: THIS WILL OVERWRITE ANY EXISTING FILES."); Console.WriteLine("Press any key to continue..."); Console.ReadKey(); if (args.Length > 0) _resolution = args[0]; if (args.Length > 1) _sourceDirectory = args[1]; if (args.Length > 2) _destinationDirectory = args[2]; //Validate the user input: Validate(_sourceDirectory, _destinationDirectory, _resolution); //Remove the final slash to make the path building easier: _sourceDirectory = StripFinalSlash(_sourceDirectory); _destinationDirectory = StripFinalSlash(_destinationDirectory); //Get all the zip files to process Console.WriteLine("Counting Files..."); var zipFiles = new DirectoryInfo(_sourceDirectory).GetFiles("*.zip"); Console.WriteLine("Processing {0} Files ...", zipFiles.Count()); // Dummy zip file var zipOut = new Ionic.Zip.ZipFile(); foreach (var zipFile in zipFiles) { var starttime = DateTime.Now; Console.WriteLine("Reading " + zipFile.Name); var listTick = new List<TmpTick>(); using (var reader = Compression.Unzip(zipFile.FullName, out zipOut)) while (!reader.EndOfStream) { var tick = new TmpTick(zipFile.Name.Split('.')[1], reader.ReadLine()); if (tick.IsInvalid) continue; if (listTick.Count == 0) { listTick.Add(tick); continue; } var lastRoundTime = listTick.Last().Time.RoundDown(_span); if (lastRoundTime == tick.Time.RoundDown(_span)) { listTick.Add(tick); continue; } Console.Write("\r" + lastRoundTime.ToString(@"HH\:mm\:ss") + "\t" + listTick.Count + "\t" + (DateTime.Now - starttime).ToString(@"hh\:mm\:ss\.fff")); WriteLeanCsvFiles(listTick); listTick.Add(tick); } WriteLeanCsvFiles(listTick); CompressLeanCSV(zipFile); Console.WriteLine("... done!"); } Console.WriteLine("All done! Press any key to exit."); Console.ReadKey(); }
public void ParseEmlFiles(CancellationToken token) { var emails = new DirectoryInfo(Settings.WorkingDirectory).GetFiles(); if (emails.Any()) { if (Settings.ParserMaxBatchSize > 0 && emails.Count() > Settings.ParserMaxBatchSize) { emails = emails.Take(Settings.ParserMaxBatchSize).ToArray(); } foreach (var eml in emails) { if (token.IsCancellationRequested) { break; } var yahooHeatData = new YahooHeatData() {IsFirst = true, ServerId = Settings.ServerID}; var archiveFolder = Path.Combine(Settings.ArchiveDirectory, DateTime.Now.ToString("yyyyMMdd")); if (!Directory.Exists(archiveFolder)) { log.Info("Creating archive folder: " + archiveFolder); Directory.CreateDirectory(archiveFolder); } try { log.Info("Parsing email " + eml.FullName); var emailMessage = new MailBuilder().CreateFromEmlFile(eml.FullName); yahooHeatData.RawText = emailMessage.RenderEml(true); yahooHeatData.HeatTime = DateTime.Parse(emailMessage.Document.Root.Headers["x-heat-time"]); yahooHeatData.heat_ip = emailMessage.Document.Root.Headers["x-heat-ip"]; const string fromRegex = @"\""?(?<fromname>.+)""?\s+<(?<email>.+)>"; var match = Regex.Match(emailMessage.Document.Root.Headers["from"], fromRegex); var fromAddress = new MailAddress(match.Groups["email"].ToString(), match.Groups["fromname"].ToString()); // FROM //TODO: Where do these go? //yahooHeatData.from = emailMessage.Document.Root.Headers["from"].Replace("\"", string.Empty); yahooHeatData.fromnamedisplay = fromAddress.DisplayName.Replace("\"", ""); //yahooHeatData.FromAddress = fromAddress.Address; yahooHeatData.fromemailaddress_user = fromAddress.User; yahooHeatData.fromemailaddress_host = fromAddress.Host; // TO yahooHeatData.toemailaddress = emailMessage.Document.Root.Headers["to"]; var recipient = new MailAddress(emailMessage.Document.Root.Headers["to"]); string recipientDomain = recipient.Host; // BULK/INBOX if (recipientDomain == "yahoo.com" || recipientDomain == "ymail.com" || recipientDomain == "rocketmail.com" || recipientDomain == "yahoo.co.uk" || recipientDomain == "att.net") { yahooHeatData.foldername = string.IsNullOrEmpty(emailMessage.Document.Root.Headers["x-yahoofilteredbulk"]) ? "INBOX" : "BULK"; } else if (recipientDomain == "aol.com") { yahooHeatData.foldername = emailMessage.Document.Root.Headers["x-heat-folder"].ToLower() == "inbox" ? "INBOX" : "BULK"; } yahooHeatData.subject = emailMessage.Document.Root.Headers["subject"]; // SENT TIME yahooHeatData.DateSent = DateTime.Parse(emailMessage.Document.Root.Headers["date"]); var receivedRegex = string.Empty; if (recipientDomain == "yahoo.com" || recipientDomain == "ymail.com" || recipientDomain == "rocketmail.com" || recipientDomain == "yahoo.co.uk" || recipientDomain == "att.net") { receivedRegex = @"\(EHLO\s(?<ehlodomain>[a-zA-Z0-9\-\.]+)\)\s+\((?<ehloip>(?:[0-9]{1,3}.?){4})\)\s+by\s+(?<receivedmx>[a-zA-Z0-9\.\-]+)\s+with\s+SMTP;\s+(?<receiveddate>.+)"; } else if (recipientDomain == "aol.com") { receivedRegex = @"\((?<ehlodomain>[a-zA-Z0-9\-\.]+)\s+\[(?<ehloip>(?:[0-9]{1,3}\.?){4})\]\)\s+by\s+(?<receivedmx>[a-zA-Z0-9\.\-]+)\s.+;\s(?<receiveddate>.+)\s\("; } bool matched = false; foreach (var headerValue in emailMessage.Document.Root.Headers.GetValues("received")) { match = Regex.Match(headerValue, receivedRegex, RegexOptions.ExplicitCapture | RegexOptions.IgnoreCase); if (match.Success) { yahooHeatData.ehlodomain = match.Groups["ehlodomain"].ToString(); // ehlo domain yahooHeatData.ehloip = match.Groups["ehloip"].ToString(); // ehlo ip yahooHeatData.receivedmx = match.Groups["receivedmx"].ToString(); // received mx yahooHeatData.DateReceived = DateTime.Parse(match.Groups["receiveddate"].ToString()); matched = true; break; } } if (!matched) { throw new Exception("Failed to find EHLO info from \"received\" headers"); } // SPF string spfHeader = string.Empty; string spfRegex = string.Empty; if (recipientDomain == "yahoo.com" || recipientDomain == "ymail.com" || recipientDomain == "rocketmail.com" || recipientDomain == "yahoo.co.uk" || recipientDomain == "att.net") { spfRegex = @"^(\w+)"; spfHeader = "received-spf"; } else if (recipientDomain == "aol.com") { spfRegex = @"SPF : (\w+)"; spfHeader = "x-aol-spf"; } if (!string.IsNullOrWhiteSpace(emailMessage.Document.Root.Headers[spfHeader])) { match = Regex.Match(emailMessage.Document.Root.Headers[spfHeader], spfRegex); yahooHeatData.spfheader = match.Groups[1].ToString(); } // DomainKey string domainkeysHeader; string domainkeysRegex; if (recipientDomain == "yahoo.com" || recipientDomain == "ymail.com" || recipientDomain == "rocketmail.com" || recipientDomain == "yahoo.co.uk" || recipientDomain == "att.net") { domainkeysHeader = "Authentication-Results"; domainkeysRegex = @"domainkeys=(\w+)"; match = Regex.Match(emailMessage.Document.Root.Headers[domainkeysHeader], domainkeysRegex); yahooHeatData.domainkeysAuthenticationResults = match.Groups[1].ToString(); } // DKIM string dkimHeader = string.Empty; string dkimRegex = string.Empty; if (recipientDomain == "yahoo.com" || recipientDomain == "ymail.com" || recipientDomain == "rocketmail.com" || recipientDomain == "yahoo.co.uk" || recipientDomain == "att.net") { dkimHeader = "Authentication-Results"; dkimRegex = @"dkim=(\w+)"; } else if (recipientDomain == "aol.com") { dkimHeader = "x-aol-scoll-authentication"; dkimRegex = @"DKIM\s:\s(\w+)"; } if (!string.IsNullOrWhiteSpace(emailMessage.Document.Root.Headers[dkimHeader])) { match = Regex.Match(emailMessage.Document.Root.Headers[dkimHeader], dkimRegex); yahooHeatData.dkimAuthenticationResults = match.Groups[1].ToString(); } // Recipient domain yahooHeatData.recipientDomain = recipientDomain; const string unsubscribeHeader = "list-unsubscribe"; // Base 26 Info if (!string.IsNullOrWhiteSpace(emailMessage.Document.Root.Headers[unsubscribeHeader])) { // Batch-Subscriber-List Id IDictionary<string, int> base26Values; MailingSystem mailingSystem = DetermineMailingSystemFromListUnsubscribe(emailMessage.Document.Root.Headers[unsubscribeHeader]); switch (mailingSystem) { case MailingSystem.WhiteDelivery: base26Values = GetWdBase26Info(emailMessage.Document.Root.Headers[unsubscribeHeader]); yahooHeatData.BatchId = base26Values["batch_id"]; yahooHeatData.SubscriberID = base26Values["subscriber_id"]; yahooHeatData.ListID = base26Values["list_id"]; break; case MailingSystem.Avenlo: base26Values = GetAvenloBase26Info(emailMessage.Document.Root.Headers[unsubscribeHeader]); yahooHeatData.AvenloDeploymentId = base26Values["action_id"]; yahooHeatData.AvenloRecipientId = base26Values["redirect_id"]; break; } } const string apperentlyToHeader = "x-apparently-to"; if (!string.IsNullOrWhiteSpace(emailMessage.Document.Root.Headers[apperentlyToHeader])) { const string apparentlyToRegex = @"(?<apparentFromEmail>.+)\s+via\s+(?<apparentReceivedIp>.+);\s+(?<apparentReceivedDate>.+)"; match = Regex.Match(emailMessage.Document.Root.Headers[apperentlyToHeader], apparentlyToRegex); yahooHeatData.received_ip = match.Groups["apparentReceivedIp"].ToString(); } if (!PublishMessage(yahooHeatData)) { if (Settings.ParserBatchWaitTime < MinimumWaitSecondsOnQueueConnectionFailure) { System.Threading.Thread.Sleep(MinimumWaitSecondsOnQueueConnectionFailure - Settings.ParserProcessingDelay); } return; } // archive file string archiveFile = Path.Combine(archiveFolder, eml.Name); eml.MoveTo(archiveFile); log.Debug("Archived file: " + archiveFile); } catch (Exception ex) { log.Error("Failed to parse: " + eml.FullName); log.Error(ex.ToString()); eml.MoveTo(Path.Combine(Settings.ErrorDirectory, eml.Name)); continue; } if (Settings.ParserProcessingDelay > 0) { System.Threading.Thread.Sleep(Settings.ParserProcessingDelay); } } } else { log.Info("No emails to parse"); } }
public async Task DataForAamrq() { FileInfo aamqrFileInfo = new FileInfo(@"I:\Dropbox\JJ\badsymbols.csv"); AllDataDownloader dl = new AllDataDownloader(aamqrFileInfo.FullName, null) { ZipOutput = true, OutputDirectory = aamqrFileInfo.DirectoryName + @"\data\" }; dl.SymbolList = aamqrFileInfo.FullName; // // Puts it in I:\Dropbox\JJ\equity\daily instead of I:\Dropbox\JJ\data\equity\daily // await dl.DownloadDataFromListAsync(); var files = new DirectoryInfo(dl.OutputDirectory).GetFiles("aamrq.*", SearchOption.AllDirectories); int count = files.Count(); Assert.IsTrue(count > 0); }
/// <summary> /// Die dispatcherTimer Methode; ruft die Backup oder die Restore Klasse auf. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void dispatcherTimer_Tick(object sender, EventArgs e) { changeLayout("startDispatcherTimer"); this._Counter += _ADD_COUNT_FACTOR; if (_Counter == _TIME_FACTOR_TO_ENABLED_BACKUP_CANCEL_BUTTON || this._DbLevel == "restore") { btCancelProcess.Visibility = System.Windows.Visibility.Visible; } try { if (_Cusb.checkUSB()) { KöTaf.Utils.BackupRestore.DriveLetter drivelet = _Cusb._DriveL; String dl = drivelet._DriveLetter; switch (this._DbLevel) { case "backupExist": break; case "backup": dispatcherTimer.Stop(); try { br.StartBR("backupDB", dl + _USB_FOLDER_NAME); } catch (Exception ex) { textBlock1.Text = IniParser.GetSetting("ERRORMSG", "backupRuntimeError") + ex.ToString(); } changeLayout("backup"); Timer timer = new Timer(); timer.writeToXML(Environment.CurrentDirectory + @"\TimeStamp.xml", "True", DateTime.Now.ToShortDateString(), "timeStamp/backup/date", "timeStamp/backup/iswrite"); break; case "restore": string[] FileList; dispatcherTimer.Stop(); hList= drivelet.CheckSavedFiles(drivelet._DriveLetter); //Rufe alle Backup Dateien von USB - Datenspeicher ab. var Files = new DirectoryInfo(dl + _USB_FOLDER_NAME).GetFiles() .OrderBy(f => f.CreationTime) .ToArray(); for (int i = Files.Count() -1; i >= 0; i--) { try { FileList = Files[i].ToString().Split(new Char[] { '.' }); if (FileList[2].ToString() == this._FILENAME_DB_EXTENSION.ToString()) listBox1.Items.Add(Files[i]); } catch { throw new Exception(IniParser.GetSetting("ERRORMSG", "falseFilesInFolder")); } } changeLayout("restore"); break; } return; } decide_DbLevel(this._DbLevel); } catch (Exception ex) { isNotCorrectUSB(ex.Message); } }
private void Refresh(CancellationTokenSource cts) { var snapshotTask = Task.Factory.StartNew(() => { while (true) { SaveSnapshot(); if (cts.Token.IsCancellationRequested) { break; } Thread.Sleep(TimeSpan.FromSeconds(5)); } }, cts.Token); var hashTask = Task.Factory.StartNew(() => { Stopwatch watch = Stopwatch.StartNew(); var filesEnumerator = new DirectoryInfo(this.RootPath).EnumerateFiles("*", SearchOption.AllDirectories); long repoSize = filesEnumerator.Sum(f => f.Length); int repoFileCount = filesEnumerator.Count(); _logger.InfoFormat("Refresh du repository '{0}' : {1} fichiers ({2:0.00} go).", this.RootPath, repoFileCount, FileLength.ToMegaBytes(repoSize)); Stopwatch logWatch = Stopwatch.StartNew(); int index = 0; List<FileInfo> lastBatch = new List<FileInfo>(); foreach (var fi in filesEnumerator) { lastBatch.Add(fi); if (fi.Name == SnapshotFileName) { // on saute le fichier de snapshot continue; } string relativeFileName = fi.GetRelativePath(this.RootPath); Map.AddOrUpdate(relativeFileName, _ => new CustomInfo(relativeFileName, fi), (_, info) => info.Update(fi)); if (cts.Token.IsCancellationRequested) { break; } if (logWatch.ElapsedMilliseconds >= 5000) { long lastBatchSize = lastBatch.Sum(f => f.Length); long remaining = repoSize - lastBatchSize; double bytesPerSec = Convert.ToDouble(lastBatchSize / (logWatch.ElapsedMilliseconds / 1000)); TimeSpan etaSec = TimeSpan.FromSeconds(remaining / bytesPerSec); _logger.InfoFormat("Refresh : {0:P2} - {1:0.00} Mo/s - eta : {2}", Convert.ToDouble(index) / repoFileCount, FileLength.ToMegaBytes(bytesPerSec), etaSec); lastBatch.Clear(); logWatch.Reset(); logWatch.Start(); } index++; } cts.Cancel(); return watch.Elapsed; }, cts.Token); Task.WaitAll(snapshotTask, hashTask); }
void _bgwLoader_DoWork(object sender, DoWorkEventArgs e) { _areReadyToClose.Reset(); if (_bgwUpdater != null) if (_bgwUpdater.IsBusy) { _bgwUpdater.CancelAsync(); _areReadyToClose.WaitOne(); } Logger.LogDisp.NewMessage(LogType.Info,"Started player DB initialization..."); if (StateChanged != null) StateChanged(DBStates.Initiating); using (var cmd = new SQLiteCommand("DELETE FROM Accounts;", _conn)) cmd.ExecuteNonQuery(); var accDirs = new DirectoryInfo(_db.AccPath).GetDirectories("??-????????").OrderByDescending(d => d.LastAccessTime); //Directory.GetDirectories(path, "??-????????").OrderByDescending(d => d.La); var i = 0; var count = accDirs.Count(); if ((bool)e.Argument) Parallel.ForEach(accDirs, account => { if (_bgwLoader.CancellationPending) { _areReadyToClose.Set(); e.Cancel = true; return; } LoadAccountDirectory(account.FullName); _bgwLoader.ReportProgress((i / count) * 100); i++; } ); else { foreach (var acc in accDirs) { if (_bgwLoader.CancellationPending) { _areReadyToClose.Set(); e.Cancel = true; return; } LoadAccountDirectory(acc.FullName); _bgwLoader.ReportProgress( (int)( ((double)i / count) * 100 ), _db.Queue.Count ); i++; } } _areReadyToClose.Set(); Logger.LogDisp.NewMessage(LogType.Info, "Player DB initialized."); }
private void RellenarTreeViewMp3(TreeNode nodoSeleccionado) { // Cargamos todos los directorios colgantes de ese directorio DirectoryInfo[] info2 = new DirectoryInfo(nodoSeleccionado.Tag.ToString()).GetDirectories(); // Ordenamos alfabeticamente el directoryinfo Array.Sort(info2, delegate(DirectoryInfo f1, DirectoryInfo f2) { return f1.Name.CompareTo(f2.Name); }); if (info2.Count() > 0) { // Pasamos por cada directorio colgante y cogemos su informacion foreach (DirectoryInfo Directorio in info2) { if (!Directorio.Attributes.ToString().Contains("Hidden")) { // Creamos un nodo con esa informacion TreeNode nuevoNodo = new TreeNode(); nuevoNodo.Text = Directorio.Name; nuevoNodo.Tag = Directorio.FullName; nodoSeleccionado.Nodes.Add(nuevoNodo); } } nodoSeleccionado.Expand(); } }