public void OnChanged(object source, FileSystemEventArgs e) { if (!ProcessedFiles.ContainsKey(e.Name)) { Console.WriteLine("{0} : Copying {1}", DateTime.Now, e.Name); if (File.Exists(string.Format("{0}{1}", DeployLocation, e.Name))) { File.Delete(string.Format("{0}{1}", DeployLocation, e.Name)); } Thread.Sleep(100); if (File.Exists(WatchLocation + e.Name)) { File.Copy(string.Format("{0}{1}", WatchLocation, e.Name), string.Format("{0}{1}", DeployLocation, e.Name)); ProcessedFiles.Add(e.Name, File.GetLastWriteTime(WatchLocation + e.Name)); } else { Console.WriteLine("{0} : The file {1} does not exist. We cannot copy it.", DateTime.Now, e.Name); } } else { DateTime outDate; ProcessedFiles.TryGetValue(e.Name, out outDate); if (ProcessedFiles.ContainsKey(e.Name) && File.GetLastWriteTime(WatchLocation + e.Name) > outDate) { AddToBeProcessed(e.Name); } } }
public void ClearFiles() { Files.Clear(); ProcessedFiles.Clear(); FileCount = 0; DroppedFileSize = 0d; UploadFileSize = 0d; }
void ValidateFiles( ulong expectedVersion, ulong actualVersion, ProcessedFiles originalProcessedFiles, ProcessedFiles newProcessedFiles, string originalFilesMap, string newFilesMap) { Assert.AreEqual(expectedVersion, actualVersion); Assert.NotNull(newProcessedFiles.Files.Find(q => q.FileName.Equals("hello.html"))); // TODO: fix this; incorrect way of testing equality of this struct Assert.AreNotEqual(originalProcessedFiles, newProcessedFiles); Assert.AreNotEqual(originalFilesMap, newFilesMap); }
public ActionResult ProcessFile(string Filename) { Console.WriteLine("**** Read in the CSV file *****"); var path = @"C:\" + Filename; using (var reader = new StreamReader(path, Encoding.Default)) using (var csv = new CsvReader(reader, System.Globalization.CultureInfo.CreateSpecificCulture("en-UK"))) { csv.Configuration.RegisterClassMap <MeterMap>(); var records = csv.GetRecords <MtrReading>().ToList(); foreach (var reading in records) { Console.WriteLine("Processing " + reading.AccountId); //MtrReadingValidityCheck(reading.AccountId); var meterReadingToAdd = new MtrReading() { AccountId = reading.AccountId, MeterReadingDateTime = reading.MeterReadingDateTime, MeterReadValue = reading.MeterReadValue }; _context.MtrReadings.Add(meterReadingToAdd); // _context.MtrReading.Add(meterReadingToAdd); _context.SaveChanges(); } var fileToAdd = new ProcessedFiles() { ProcessedFileName = Filename }; _context.ProcessedFiles.Add(fileToAdd); _context.SaveChanges(); } return(View()); }
/// <summary> /// Clear the processing list and process the files that are queued /// </summary> public virtual void ClearProcessedList() { if (!CurrentlyProcessing && ProcessedFiles.Count > 0) { //We are now processing CurrentlyProcessing = true; Console.WriteLine("{0} : Clearing the List", DateTime.Now); ProcessedFiles.Clear(); Console.WriteLine("{0} : Cleared the List", DateTime.Now); //Finished Processing CurrentlyProcessing = false; if (ToBeProcessed.Count > 0) { Console.WriteLine("{0} : Copying files that have had changes since the last copy", DateTime.Now); ToBeProcessed.ForEach(x => OnChanged(new object(), new FileSystemEventArgs(WatcherChangeTypes.Changed, WatchLocation, x.ToString(CultureInfo.DefaultThreadCurrentUICulture)))); ToBeProcessed.Clear(); } } }
private static void ProcessFile(ProcessedFiles files, string FilePath) { if (files.ContainsKey(FilePath)) return; var fileInfo = new FileInfo(FilePath); if (!fileInfo.Exists) { throw new Exception(FilePath + " not found."); } var pf = new ProcessedFile { FilePath = FilePath, }; files[FilePath] = pf; List<ILine> list = null; using (var sr = new StreamReader(FilePath)) { string content = sr.ReadToEnd(); list = Parse(content); } var outline = list.GetOutline(pf); pf.Lines = outline; foreach (var referenceLine in pf.References) { string newPath = FilePath.NavigateTo(referenceLine.Value.ClientSideReference); ProcessFile(files, newPath); } }
public static ProcessedFiles ParseFile(string FilePath) { var files = new ProcessedFiles(); ProcessFile(files, FilePath); return files; }
public async Task <ProcessedFiles> InsertOnSubmit(ProcessedFiles processedFiles) { ProcessedFiles res = await _processedFilesRepository.InsertAsync(processedFiles); return(res); }
public void AddItem(VssProject project) { if (project == null) { throw new ArgumentNullException("project"); } else if (project.Database != Database) { throw new ArgumentException("Project database mismatch", "project"); } rootProjects.AddLast(project); PathMatcher exclusionMatcher = null; if (!string.IsNullOrEmpty(ExcludeFiles)) { var excludeFileArray = ExcludeFiles.Split( new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries); exclusionMatcher = new PathMatcher(excludeFileArray); } workQueue.AddLast(delegate(object work) { logger.WriteSectionSeparator(); LogStatus(work, "Building revision list"); logger.WriteLine("Root project: {0}", project.Path); logger.WriteLine("Excluded files: {0}", ExcludeFiles); var excludedProjects = 0; var excludedFiles = 0; var stopwatch = Stopwatch.StartNew(); VssUtil.RecurseItems(project, delegate(VssProject subproject) { if (workQueue.IsAborting) { return(RecursionStatus.Abort); } var path = subproject.Path; if (exclusionMatcher != null && exclusionMatcher.Matches(path)) { logger.WriteLine("Excluding project {0}", path); ++excludedProjects; return(RecursionStatus.Skip); } ProcessItem(subproject, path, exclusionMatcher); ++projectCount; return(RecursionStatus.Continue); }, delegate(VssProject subproject, VssFile file) { if (workQueue.IsAborting) { return(RecursionStatus.Abort); } var path = file.GetPath(subproject); if (exclusionMatcher != null && exclusionMatcher.Matches(path)) { logger.WriteLine("Excluding file {0}", path); ++excludedFiles; return(RecursionStatus.Skip); } // only process shared files once (projects are never shared) if (!ProcessedFiles.Contains(file.PhysicalName)) { ProcessedFiles.Add(file.PhysicalName); ProcessItem(file, path, exclusionMatcher); ++fileCount; } return(RecursionStatus.Continue); }); stopwatch.Stop(); logger.WriteSectionSeparator(); logger.WriteLine("Analysis complete in {0:HH:mm:ss}", new DateTime(stopwatch.ElapsedTicks)); logger.WriteLine("Projects: {0} ({1} excluded)", projectCount, excludedProjects); logger.WriteLine("Files: {0} ({1} excluded)", fileCount, excludedFiles); logger.WriteLine("Revisions: {0}", revisionCount); }); }
/// <summary> /// Initiates loading of world objects. /// </summary> public void Init(bool withSprites) { WithSprites = withSprites; Iffs = new FAR1Provider <IffFile>(ContentManager, new IffCodec(), "objectdata/objects/objiff.far"); TuningTables = new FAR1Provider <OTFFile>(ContentManager, new OTFCodec(), new Regex(".*/objotf.*\\.far")); Iffs.Init(); TuningTables.Init(); if (withSprites) { Sprites = new FAR1Provider <IffFile>(ContentManager, new IffCodec(), new Regex(".*/objspf.*\\.far")); Sprites.Init(); } /** Load packingslip **/ Entries = new Dictionary <ulong, GameObjectReference>(); Cache = new TimedReferenceCache <ulong, GameObject>(); var packingslip = new XmlDocument(); packingslip.Load(ContentManager.GetPath("packingslips/objecttable.xml")); var objectInfos = packingslip.GetElementsByTagName("I"); foreach (XmlNode objectInfo in objectInfos) { ulong FileID = Convert.ToUInt32(objectInfo.Attributes["g"].Value, 16); Entries.Add(FileID, new GameObjectReference(this) { ID = FileID, FileName = objectInfo.Attributes["n"].Value, Source = GameObjectSource.Far, Name = objectInfo.Attributes["o"].Value, Group = Convert.ToInt16(objectInfo.Attributes["m"].Value), SubIndex = Convert.ToInt16(objectInfo.Attributes["i"].Value) }); } //init local objects, piff clones //Directory.CreateDirectory(Path.Combine(FSOEnvironment.ContentDir, "Objects")); string[] paths = Directory.GetFiles(Path.Combine(FSOEnvironment.ContentDir, "Objects"), "*.iff", SearchOption.AllDirectories); for (int i = 0; i < paths.Length; i++) { string entry = paths[i]; string filename = Path.GetFileName(entry); IffFile iffFile = new IffFile(entry); var objs = iffFile.List <OBJD>(); if (objs == null) { continue; } foreach (var obj in objs) { Entries.Add(obj.GUID, new GameObjectReference(this) { ID = obj.GUID, FileName = entry, Source = GameObjectSource.Standalone, Name = obj.ChunkLabel, Group = (short)obj.MasterID, SubIndex = obj.SubIndex }); } } var piffModified = PIFFRegistry.GetOBJDRewriteNames(); foreach (var name in piffModified) { ProcessedFiles.GetOrAdd(name, GenerateResource(new GameObjectReference(this) { FileName = name.Substring(0, name.Length - 4), Source = GameObjectSource.Far })); } }
public void AddProcessedFile(string file) { ProcessedFiles.Add(file); }