static void Main(string[] args) { InitalizeLog4Net(); string localDirPath = @"C:\Outils\FileTest\Local"; string masterDirPath = @"C:\Users\Laurent\Pictures\Photos LIGER\"; var filesEnumerator = new DirectoryInfo(localDirPath).EnumerateFiles("*", SearchOption.AllDirectories); int count = filesEnumerator.Count(); double totalLength = filesEnumerator.Sum(f => f.Length) / 1024.0 / 1024.0; Console.WriteLine("{0} fichiers. Taille totale : {1:0.00} Go, Taille moy. : {2:0.00} mo", count, totalLength / 1024, totalLength / count); CancellationTokenSource cts = new CancellationTokenSource(); var breakTask = Task.Factory.StartNew(() => { BreakExecution(cts); }, cts.Token); Repository syncDir = new Repository(localDirPath, cts); Console.WriteLine(); // on recommence, mais cette fois pour la synchro vers un autre repertoire cts = new CancellationTokenSource(); breakTask = Task.Factory.StartNew(() => { BreakExecution(cts); }, cts.Token); syncDir.SyncWithMaster(masterDirPath, new SyncOptions() { MaxLocalStorage = FileLength.FromGigaBytes(7) }, cts); //var doublons = syncDir.Map.Values // .GroupBy(ci => ci.Sha1String) // .Where(g => g.Count() > 1); //int totalDoublons = 0; //// affichage des doublons : //foreach (var group in doublons) //{ // foreach (var item in group) // { // Console.WriteLine("{0} : {1}", group.Key, item.FileName); // } // totalDoublons += group.Count(); // Console.WriteLine("-------- {0} doublons ---------", group.Count()); //} //Console.WriteLine("-------- {0} doublons au total ---------", totalDoublons); Console.WriteLine("Appuyer sur une touche pour fermer cette fenetre."); Console.Read(); }
/// <summary> /// Converts Bovespa zip files with trade/bid/ask data for equities, options and futures into Lean format /// </summary> /// <returns></returns> private static async Task IntradayFilesConverter() { var zipfiles = new DirectoryInfo(_settings.InputFolder + @"\" + _settings.SecurityType) .GetFiles((_settings.InputDataType == "bid" ? "OFER_CPA" : _settings.InputDataType == "ask" ? "OFER_VDA" : "NEG") + "*zip") .Where(f => f.Name.Substring(f.Name.Length - 12, 8).ToDateTime() > _settings.StartDate).ToList(); if (zipfiles.Count == 0) { Console.WriteLine("No zip files with {0} Bovespa data.", _settings.InputDataType); return; } ZipFile zip; var sum = 0.0; var total = zipfiles.Sum(z => z.Length); var destinationFolder = Directory.CreateDirectory(_settings.OutputFolder + @"\" + _settings.SecurityType + @"\bra\" + (_settings.InputDataType == "trade" ? "tick" : _settings.InputDataType) + @"\"); // Delete existing files var csvFiles = destinationFolder.GetFiles("*_" + _settings.InputDataType + "_Tick.csv").ToList(); csvFiles.ForEach(c => c.Delete()); // var starttime = DateTime.Now; Console.WriteLine(zipfiles.Count + " zip files with Bovespa {0} data (" + (total / Math.Pow(1024, 3)).ToString("0.00 GB") + ") \t" + starttime, _settings.InputDataType); foreach (var zipfile in zipfiles) { Compression.Unzip(zipfile.FullName, out zip); for (var i = 0; i < zip.Entries.Count; i++) { using (var reader = new StreamReader(zip[i].OpenReader())) { while (!reader.EndOfStream) { var csv = (await reader.ReadLineAsync()).Split(';'); if (csv.Length < 5) continue; // Ticker if (!ValidateSymbol(csv[1] = csv[1].Trim())) continue; // TimeOfDay csv[0] = csv[0].Replace("-", ""); // Data csv[2] = TimeSpan.Parse(csv[5]).TotalMilliseconds.ToString("F0") + "," + (10000 * decimal.Parse(csv[3])).ToString("F0") + "," + csv[4].ToInt64() + "\r\n"; // CSV file csv[6] = destinationFolder.FullName + @"\" + csv[0] + "_" + csv[1].ToLower() + "_Trade_Tick.csv"; File.AppendAllText(csv[6], csv[2]); } } } Console.Write("\r" + (sum += zipfile.Length / total).ToString("0.00%") + "\t" + (DateTime.Now - starttime).ToString(@"hh\:mm\:ss") + " Last file: " + zipfile.Name.ToUpper()); } var subdirs = new Dictionary<string, string>(); csvFiles = destinationFolder.GetFiles("*_" + _settings.InputDataType + "_Tick.csv").ToList(); sum = 0.0; total = csvFiles.Sum(z => z.Length); starttime = DateTime.Now; Console.WriteLine(csvFiles.Count + " csv files with {0} data (" + (total / Math.Pow(1024, 3)).ToString("0.00 GB") + ") to zip. \t" + starttime, _settings.InputDataType); foreach (var csvFile in csvFiles) { var data = csvFile.Name.Split('_'); var date = data[0]; var ticker = data[1]; if(!subdirs.ContainsKey(ticker)) { var subdir = Directory.CreateDirectory(destinationFolder.FullName + @"\" + ticker + @"\"); subdirs.Add(ticker, subdir.FullName); } Compression.Zip(subdirs[ticker] + data[0] + "_trade.zip", new Dictionary<string, string> { { csvFile.Name, File.ReadAllText(csvFile.FullName) } }); Console.Write("\r" + (sum += csvFile.Length / total).ToString("0.00%") + "\t" + (DateTime.Now - starttime).ToString(@"hh\:mm\:ss") + " Last file: " + csvFile.Name.ToUpper()); csvFile.Delete(); } Console.WriteLine("\r... exiting routine at " + DateTime.Now); }
/// <summary> /// Converts Bovespa zip files with daily data for equities into Lean format (input-data-type = "daily") /// </summary> /// <returns></returns> private static async Task DailyFilesConverter() { var zipfiles = new DirectoryInfo(_settings.InputFolder + @"\equity").GetFiles("COTAHIST_A*zip") .Where(f => f.Name.Substring(f.Name.Length - 8, 4).ToInt32() > 1997).ToList(); if(zipfiles.Count==0) { Console.WriteLine("No zip files with daily Bovespa data."); return; } var sum = 0.0; var total = (long)zipfiles.Count; total = zipfiles.Sum(z => z.Length); //FolderCleanUp("equity", "daily"); var outputdir = Directory.CreateDirectory(_settings.OutputFolder + @"\equity\bra\daily\"); var starttime = DateTime.Now; Console.WriteLine(zipfiles.Count + " zip files with Bovespa daily data (" + (total / Math.Pow(1024, 2)).ToString("0.00 MB") + ") \t" + starttime); foreach (var zipfile in zipfiles) { starttime = DateTime.Now; var data = await ReadAsyncZipFile(zipfile); data.RemoveAll(d => Filter(d)); data.GroupBy(d => d.Substring(12, 12).Trim().ToLower() + ".csv").ToList().ForEach(d => { File.AppendAllLines(outputdir.FullName + d.Key, d.Select(l => { return l.Substring(2, 8) + "," + 100 * Convert.ToInt64(l.Substring(56, 13)) + "," + 100 * Convert.ToInt64(l.Substring(69, 13)) + "," + 100 * Convert.ToInt64(l.Substring(82, 13)) + "," + 100 * Convert.ToInt64(l.Substring(108, 13)) + "," + 100 * Convert.ToInt64(l.Substring(152, 18)); }).OrderBy(l => l)); }); Console.WriteLine((sum += zipfile.Length / total).ToString("0.00%\t") + zipfile.Name.ToUpper() + " read in " + (DateTime.Now - starttime).ToString(@"ss\.ff") + " secs"); } var csvFiles = outputdir.GetFiles("*.csv").ToDictionary(x => x.Name.Replace(".csv", "").Trim(), y => y); Console.WriteLine(csvFiles.Count + " csv files to zip.\t" + DateTime.Now); sum = 0.0; total = csvFiles.Values.Sum(c => c.Length); foreach(var kvp in csvFiles) { await Task.Factory.StartNew(() => Compression.Zip(kvp.Value.FullName)); Console.Write("\r" + (sum += kvp.Value.Length / total).ToString("0.00%\t") + kvp.Key.ToUpper() + "\t"); }; Console.WriteLine("\r... exiting routine at " + DateTime.Now); }
/// <summary> /// Convert LEAN data to custom data /// </summary> /// <param name="OutputResolution">Output resoltution for intraday cases (hour/minute/second)</param> /// <returns></returns> private static async Task Convert2CustomCSV(string OutputResolution) { var symbols = new DirectoryInfo(_settings.OutputFolder + @"/" + _settings.SecurityType + @"/bra/" + OutputResolution + @"/") .GetDirectories().ToList(); if (_settings.InstrumentList.Length > 0) symbols.RemoveAll(s => !_settings.InstrumentList.Contains(s.Name.ToUpper())); if (symbols.Count == 0) { } var sum = 0.0; var total = 0.0; symbols.ForEach(d => total += d.GetFiles().Sum(f => f.Length)); var roottime = DateTime.Now; Console.WriteLine("\r" + symbols.Count + " symbol directories to read (" + (total / 1024 / 1024).ToString("000.00") + " MB).\t" + roottime); foreach (var symbol in symbols) { var starttime = DateTime.Now; var factors = GetTickerFactors(symbol.Name); var zipfiles = symbol.GetFiles("*.zip").ToList(); var csvFile = new FileInfo(symbol.Name.ToUpper() + "_" + _settings.OutputResolution + ".csv"); if (csvFile.Exists) csvFile.Delete(); foreach (var zipfile in zipfiles) { var date = zipfile.Name.ToDateTime(); var factor = factors.FirstOrDefault(kvp => kvp.Key >= date); File.AppendAllLines(csvFile.FullName, (await ReadAsyncZipFile(zipfile)).Select(l => { var data = l.Split(','); return symbol.Name.ToUpper() + ";" + date.AddMilliseconds(data[0].ToInt64()).ToString(@"dd/MM/yyyy;HH\:mm\:ss") + ";" + Math.Round(data[1].ToDecimal() * factor.Value / 10000, 2).ToString("0.00", _ptBR) + ";" + Math.Round(data[2].ToDecimal() * factor.Value / 10000, 2).ToString("0.00", _ptBR) + ";" + Math.Round(data[3].ToDecimal() * factor.Value / 10000, 2).ToString("0.00", _ptBR) + ";" + Math.Round(data[4].ToDecimal() * factor.Value / 10000, 2).ToString("0.00", _ptBR) + ";" + (data.Length == 6 ? data[5] : data[6] + ";" + data[5]); }).ToArray()); sum += zipfile.Length; } Console.Write("\r" + (sum / total).ToString("0.00%") + "\t" + symbol.Name.ToUpper() + ": \t" + zipfiles.Count + " days were read/written in " + (DateTime.Now - starttime).ToString(@"ss\.ff") + " secs.\t"); } // For options and futures var csvFiles = new DirectoryInfo(Environment.CurrentDirectory).GetFiles("*_" + _settings.OutputResolution + ".csv"); if (csvFiles.Count() > 0) { sum = 0.0; total = csvFiles.Sum(f => f.Length); Console.WriteLine("Zipping " + (total / 1024 / 1024).ToString("000.00") + " MB"); csvFiles.GroupBy(g => { if (_settings.SecurityType == "equity") return g.Name.Replace(".csv", ".zip"); if (_settings.SecurityType == "futures") return g.Name.Substring(0, 3).ToUpper() + "FUT" + "_" + _settings.OutputResolution + ".zip"; var type = ("ABCDEFGHIJKL".Contains(g.Name[4]) ? "_C" : "_P") + "_" + _settings.OutputResolution + ".zip"; return g.Name.Substring(0, 4) + type; }) //.ToList().ForEach(f => //{ // var outputfile = new FileInfo(f.Key); // if (outputfile.Exists) outputfile.Delete(); // Compression.Zip(outputfile.FullName); // using (var z = new FileStream(outputfile.FullName, FileMode.Create)) // using (var a = new ZipArchive(z, ZipArchiveMode.Create, true)) // f.ToList().ForEach(csvFile => // { // a.CreateEntryFromFile(csvFile.FullName, csvFile.Name, CompressionLevel.Optimal); // csvFile.Delete(); // sum += csvFile.Length; // Console.Write("\r" + (sum / total).ToString("0.00%") + "\tLast zippped file:\t" + csvFile.Name.ToUpper()); // }); //}) ; } Console.WriteLine("\r\n... exiting routine at " + DateTime.Now); }
/// <summary> /// Convert LEAN data to custom data /// </summary> /// <returns></returns> private static async Task Convert2CustomCSV() { if (_settings.OutputFormat == "lean") return; if (_settings.OutputResolution != "daily") { await Convert2CustomCSV(_settings.OutputResolution); return; } var zipfiles = new DirectoryInfo(_settings.OutputFolder + @"/" + _settings.SecurityType + @"/bra/daily/").GetFiles("*.zip").ToList(); if (zipfiles.Count == 0) { } var sum = 0.0; var total = zipfiles.Sum(f => f.Length); var roottime = DateTime.Now; Console.WriteLine(zipfiles.Count + " symbol to read.(" + (total / 1024 / 1024).ToString("000.00") + "MB)\t" + roottime); foreach (var zipfile in zipfiles) { var starttime = DateTime.Now; var symbol = zipfile.Name.Replace(".zip", ""); var csvFile = new FileInfo(symbol.ToUpper() + "_Diário.csv"); if (csvFile.Exists) csvFile.Delete(); var factors = GetTickerFactors(symbol); File.WriteAllLines(csvFile.FullName, (await ReadAsyncZipFile(zipfile)).Select(l => { var data = l.Split(','); var date = data[0].ToDateTime(); var factor = factors.FirstOrDefault(kvp => kvp.Key >= date); return symbol.ToUpper() + ";" + date.ToString("dd/MM/yyyy") + ";" + Math.Round(data[1].ToDecimal() * factor.Value / 10000, 2).ToString("0.00", _ptBR) + ";" + Math.Round(data[2].ToDecimal() * factor.Value / 10000, 2).ToString("0.00", _ptBR) + ";" + Math.Round(data[3].ToDecimal() * factor.Value / 10000, 2).ToString("0.00", _ptBR) + ";" + Math.Round(data[4].ToDecimal() * factor.Value / 10000, 2).ToString("0.00", _ptBR) + ";" + (data.Length == 6 ? data[5] : data[6] + ";" + data[5]); }).ToArray()); Console.Write("\r" + ((sum += zipfile.Length) / total).ToString("0.00%\t") + symbol.ToUpper() + "\twas read/written in " + (DateTime.Now - starttime).ToString(@"ss\.ff") + " secs\t"); } Console.WriteLine("\r... exiting routine at " + DateTime.Now + ". Took " + (DateTime.Now - roottime).ToString(@"mm\:ss")); }
public static void LoadPlugin(string searchPattern = "*.cmd.dll") { var files = new DirectoryInfo(Directory.GetCurrentDirectory()).GetFiles(searchPattern, SearchOption.AllDirectories); if (files.Length == 0) { Console.WriteLine(string.Format("No Commant Library Files Found whith Search Pattern '{0}'", searchPattern)); return; } Console.WriteLine(string.Format("\r\nTotal {0} Commands Loaded.", files.Sum(fileInfo => LoadPlugin(fileInfo.FullName, false)))); //重置 帮助 和 智能感知 ResetHelp(); ResetIntelliSense(); }
private void TrimOlgLogFiles(string archiveLogDir, int maxMegabytes) { var files = new DirectoryInfo(archiveLogDir).GetFiles(); if (files.Any()) { var totalSize = files.Sum(info => info.Length); if (totalSize > ((long)maxMegabytes) * 1024L * 1024L) { var fileToDelete = files.Single(file => file.CreationTime == files.Min(info => info.CreationTime)); fileToDelete.Delete(); TrimOlgLogFiles(archiveLogDir, maxMegabytes); } } }
private void Refresh(CancellationTokenSource cts) { var snapshotTask = Task.Factory.StartNew(() => { while (true) { SaveSnapshot(); if (cts.Token.IsCancellationRequested) { break; } Thread.Sleep(TimeSpan.FromSeconds(5)); } }, cts.Token); var hashTask = Task.Factory.StartNew(() => { Stopwatch watch = Stopwatch.StartNew(); var filesEnumerator = new DirectoryInfo(this.RootPath).EnumerateFiles("*", SearchOption.AllDirectories); long repoSize = filesEnumerator.Sum(f => f.Length); int repoFileCount = filesEnumerator.Count(); _logger.InfoFormat("Refresh du repository '{0}' : {1} fichiers ({2:0.00} go).", this.RootPath, repoFileCount, FileLength.ToMegaBytes(repoSize)); Stopwatch logWatch = Stopwatch.StartNew(); int index = 0; List<FileInfo> lastBatch = new List<FileInfo>(); foreach (var fi in filesEnumerator) { lastBatch.Add(fi); if (fi.Name == SnapshotFileName) { // on saute le fichier de snapshot continue; } string relativeFileName = fi.GetRelativePath(this.RootPath); Map.AddOrUpdate(relativeFileName, _ => new CustomInfo(relativeFileName, fi), (_, info) => info.Update(fi)); if (cts.Token.IsCancellationRequested) { break; } if (logWatch.ElapsedMilliseconds >= 5000) { long lastBatchSize = lastBatch.Sum(f => f.Length); long remaining = repoSize - lastBatchSize; double bytesPerSec = Convert.ToDouble(lastBatchSize / (logWatch.ElapsedMilliseconds / 1000)); TimeSpan etaSec = TimeSpan.FromSeconds(remaining / bytesPerSec); _logger.InfoFormat("Refresh : {0:P2} - {1:0.00} Mo/s - eta : {2}", Convert.ToDouble(index) / repoFileCount, FileLength.ToMegaBytes(bytesPerSec), etaSec); lastBatch.Clear(); logWatch.Reset(); logWatch.Start(); } index++; } cts.Cancel(); return watch.Elapsed; }, cts.Token); Task.WaitAll(snapshotTask, hashTask); }
public string GetLogSummary( string folderPath ) { var summary = ""; ExecuteDbMethod( cn => { var cutOffDateTime = DateTime.Now.AddHours( -24 ); var command = new InlineSelect( "count( * )".ToSingleElementArray(), "from RsisLogBackups", false ); command.AddCondition( new InequalityCondition( InequalityCondition.Operator.GreaterThan, new InlineDbCommandColumnValue( "DateAndTimeSaved", new DbParameterValue( cutOffDateTime, "DateTime2" ) ) ) ); var numberOfLogsRestored = 0; command.Execute( cn, r => { r.Read(); numberOfLogsRestored = r.GetInt32( 0 ); } ); summary = "In the last 24 hours, " + numberOfLogsRestored + " logs were successfully restored."; if( Directory.Exists( folderPath ) ) { var logsDownloaded = new DirectoryInfo( folderPath ).GetFiles().Where( f => f.LastWriteTime > cutOffDateTime ).ToList(); var totalSizeInBytes = logsDownloaded.Sum( f => f.Length ); summary += " " + logsDownloaded.Count() + " logs were downloaded, with a total size of " + FormattingMethods.GetFormattedBytes( totalSizeInBytes ) + "."; } } ); return summary; }