/// <summary> /// Gets a specific file's last write time, in UTC. /// </summary> /// <param name="path">The path of the file.</param> /// <returns> /// A <see cref="DateTime" /> in UTC. /// </returns> /// <exception cref="ArgumentNullException"> /// <paramref name="path" /> is <see langword="null" /> (<see langword="Nothing" /> in Visual Basic). /// </exception> public DateTime GetLastWriteTime(string path) { _ = Requires.NotNullOrWhiteSpace( path, nameof(path)); return(FSFile.GetLastWriteTimeUtc(path)); }
public string GetCurrentFileVersionFast(IOConnectionInfo ioc) { if (ioc.IsLocalFile()) { return(File.GetLastWriteTimeUtc(ioc.Path).ToString(CultureInfo.InvariantCulture)); } else { return(DateTime.MinValue.ToString(CultureInfo.InvariantCulture)); } }
public LangFile(string path) { Doc = XDocument.Load(path); Name = Doc.Descendants("language").First().Attribute("name")?.Value; Base = Doc.Descendants("language").First().Attribute("base")?.Value; Variant = Doc.Descendants("language").First().Attribute("variant")?.Value; IsDefault = Doc.Descendants("language").First().Attribute("isDefault")?.Value == "true"; LangCode = Doc.Descendants("language").First().Attribute("code")?.Value; FilePath = path; FileName = Path.GetFileNameWithoutExtension(path); LatestUpdate = File.GetLastWriteTimeUtc(path); }
public List <Issue> GetAllIssues(bool pulls, DateTime?since = null) { var issues = new List <Issue>(); foreach (var directory in Directory.EnumerateDirectories(IssuesBaseDirectory)) { var directoryName = directory.Split('\\').Last(); var repositoryName = directoryName.Substring(directoryName.LastIndexOf("__", StringComparison.Ordinal) + 2); var issuesDirectory = directory + "\\issues\\"; if (pulls) { issuesDirectory = issuesDirectory + "\\pulls\\"; } if (Directory.Exists(issuesDirectory) == false) { continue; } var issueFiles = Directory.EnumerateFiles(issuesDirectory, "*.combined.json"); foreach (var file in issueFiles) { // Skip the file if older than the specified timestamp if (since != null && File.GetLastWriteTimeUtc(file) < since.Value) { continue; } Issue item = null; Retry.Do(() => { var fileContent = File.ReadAllText(file); item = JsonConvert.DeserializeObject <Issue>(fileContent); }, TimeSpan.FromMilliseconds(200), 3); if (item == null) { continue; } item.RepositoryName = repositoryName; issues.Add(item); } } return(issues); }
public bool CheckForFileChangeFast(IOConnectionInfo ioc, string previousFileVersion) { if (!ioc.IsLocalFile()) { return(false); } if (previousFileVersion == null) { return(false); } DateTime previousDate; if (!DateTime.TryParse(previousFileVersion, CultureInfo.InvariantCulture, DateTimeStyles.None, out previousDate)) { return(false); } DateTime currentModificationDate = File.GetLastWriteTimeUtc(ioc.Path); TimeSpan diff = currentModificationDate - previousDate; return(diff > TimeSpan.FromSeconds(1)); //don't use > operator because milliseconds are truncated //return File.GetLastWriteTimeUtc(ioc.Path) - previousDate >= TimeSpan.FromSeconds(1); }
public override DateTimeOffset GetLastWriteTime() => new DateTimeOffset(F.GetLastWriteTimeUtc(this.FullName), TimeSpan.Zero);
public List <Issue> GetAllCommunityIssues(bool pulls, DateTime?since = null) { var issues = new List <Issue>(); var pullRequestService = new GitHubService(); var hqMembers = pullRequestService.GetHqMembers(); var teamMembers = pullRequestService.GetTeamMembers(); foreach (var directory in Directory.EnumerateDirectories(IssuesBaseDirectory)) { var directoryName = directory.Split('\\').Last(); var repositoryName = directoryName.Substring(directoryName.LastIndexOf("__", StringComparison.Ordinal) + 2); var issuesDirectory = directory + "\\issues\\"; if (pulls) { issuesDirectory = issuesDirectory + "\\pulls\\"; } if (Directory.Exists(issuesDirectory) == false) { continue; } var issueFiles = Directory.EnumerateFiles(issuesDirectory, "*.combined.json"); var reviewers = new List <string>(); reviewers.AddRange(hqMembers); var team = teamMembers.FirstOrDefault(x => x.TeamName == repositoryName); if (team != null) { reviewers.AddRange(team.Members); } foreach (var file in issueFiles) { // Skip the file if older than the specified timestamp if (since != null && File.GetLastWriteTimeUtc(file) < since.Value) { continue; } var fileContent = File.ReadAllText(file); var item = JsonConvert.DeserializeObject <Issue>(fileContent); // Exclude issues created by HQ if (hqMembers.Contains(item.User.Login.ToLowerInvariant())) { continue; } item.RepositoryName = repositoryName; foreach (var comment in item.Comments) { var commenter = comment.User.Login.ToLowerInvariant(); if (item.FirstPrTeamOrHqComment == null && reviewers.Contains(commenter)) { item.FirstPrTeamOrHqComment = comment.CreateDateTime.ToLocalTime(); } } issues.Add(item); } } return(issues); }
public void PopulateCatalog_WhenPluginXmlContainsPreprocessorInstructions_AppliesThem() { string pluginContents = "<plugin pluginId=\"pluginId\" xmlns=\"http://www.gallio.org/\"><traits><?ifdef A?><name>A</name><?endif?><?ifdef B?><property>B</property><?endif?></traits></plugin>"; PluginLoaderTest.RunWithTemporaryPluginFile((pluginDir, pluginFile) => { Guid installationId = Guid.NewGuid(); var loader = new CachingPluginLoader(); loader.InstallationId = installationId; loader.AddPluginPath(pluginFile); loader.DefinePreprocessorConstant("A"); Hash64 hash = new Hash64().Add(pluginFile).Add("A").Add(installationId.ToString()); var cacheDir = CachingPluginLoader.GetCurrentUserPluginCacheDir(); string cacheFilePath = Path.Combine(cacheDir, hash + ".xml"); if (System.IO.File.Exists(cacheFilePath)) { System.IO.File.Delete(cacheFilePath); } // First pass. { Plugin plugin = null; var catalog = MockRepository.GenerateMock <IPluginCatalog>(); catalog.Expect(x => x.AddPlugin(null, null)).IgnoreArguments() .Do((Gallio.Common.Action <Plugin, DirectoryInfo>) delegate(Plugin pluginArg, DirectoryInfo baseDirectoryArg) { plugin = pluginArg; }); loader.PopulateCatalog(catalog, NullProgressMonitor.CreateInstance()); catalog.VerifyAllExpectations(); // added one plugin Assert.AreEqual(new PropertySet() { { "name", "A" } }, plugin.Traits.PropertySet); } // Check cache file. { Assert.IsTrue(File.Exists(cacheFilePath)); Cache cache = Assert.XmlDeserialize <Cache>(File.ReadAllText(cacheFilePath)); Assert.AreEqual(installationId.ToString(), cache.InstallationId); Assert.AreEqual(1, cache.PluginInfos.Count); Assert.AreEqual(pluginDir, cache.PluginInfos[0].BaseDirectory); Assert.AreEqual("pluginId", cache.PluginInfos[0].Plugin.PluginId); Assert.AreEqual(pluginFile, cache.PluginInfos[0].PluginFile); Assert.AreEqual(File.GetLastWriteTimeUtc(pluginFile), cache.PluginInfos[0].PluginFileModificationTime); } // Second pass should restore from cache. { Plugin plugin = null; var catalog = MockRepository.GenerateMock <IPluginCatalog>(); catalog.Expect(x => x.AddPlugin(null, null)).IgnoreArguments() .Do((Gallio.Common.Action <Plugin, DirectoryInfo>) delegate(Plugin pluginArg, DirectoryInfo baseDirectoryArg) { plugin = pluginArg; }); loader.PopulateCatalog(catalog, NullProgressMonitor.CreateInstance()); catalog.VerifyAllExpectations(); // added one plugin Assert.AreEqual(new PropertySet() { { "name", "A" } }, plugin.Traits.PropertySet); } }, pluginContents); }
/// <inheritdoc /> protected override void LoadPlugins(PluginCallback pluginCallback, IProgressMonitor progressMonitor) { // Attempt to read the old cache. string cacheFilePath; try { Hash64 hash = new Hash64(); foreach (string pluginPath in PluginPaths) { hash = hash.Add(pluginPath); } foreach (string constant in InitialPreprocessorConstants) { hash = hash.Add(constant); } hash = hash.Add(InstallationId.ToString()); string cacheDirPath = GetCurrentUserPluginCacheDir(); string cacheFileName = hash + ".xml"; cacheFilePath = Path.Combine(cacheDirPath, cacheFileName); if (Directory.Exists(cacheDirPath)) { if (File.Exists(cacheFilePath)) { Cache oldCache = ReadCacheFile(cacheFilePath); if (oldCache != null) { foreach (var pluginInfo in oldCache.PluginInfos) { pluginCallback(pluginInfo.Plugin, new DirectoryInfo(pluginInfo.BaseDirectory), pluginInfo.PluginFile); } return; } } } else { Directory.CreateDirectory(cacheDirPath); } } catch (Exception) { // Fallback on any failure. // There can be all sorts of weird security exceptions that will prevent // us from manipulating the local application data directory. base.LoadPlugins(pluginCallback, progressMonitor); return; } // Load plugin metadata. var newCache = new Cache { InstallationId = InstallationId.ToString() }; base.LoadPlugins((plugin, baseDirectory, pluginFile) => { newCache.PluginInfos.Add(new CachePluginInfo { Plugin = plugin, BaseDirectory = baseDirectory.FullName, PluginFile = pluginFile, PluginFileModificationTime = File.GetLastWriteTimeUtc(pluginFile) }); pluginCallback(plugin, baseDirectory, pluginFile); }, progressMonitor); // Attempt to store it in the cache. try { WriteCacheFile(cacheFilePath, newCache); } catch (Exception) { // Ignore any failure. } }
public override void OnActionExecuted(HttpActionExecutedContext actionExecutedContext) { base.OnActionExecuted(actionExecutedContext); var tempFolders = new List <string>(); if (_incomingModel) { if (actionExecutedContext.ActionContext.ActionArguments.Any()) { var contentItem = actionExecutedContext.ActionContext.ActionArguments.First().Value as IHaveUploadedFiles; if (contentItem != null) { //cleanup any files associated foreach (var f in contentItem.UploadedFiles) { //track all temp folders so we can remove old files afterwords var dir = Path.GetDirectoryName(f.TempFilePath); if (tempFolders.Contains(dir) == false) { tempFolders.Add(dir); } try { File.Delete(f.TempFilePath); } catch (System.Exception ex) { LogHelper.Error <FileUploadCleanupFilterAttribute>("Could not delete temp file " + f.TempFilePath, ex); } } } } } else { if (actionExecutedContext == null) { LogHelper.Warn <FileUploadCleanupFilterAttribute>("The actionExecutedContext is null!!??"); return; } if (actionExecutedContext.Request == null) { LogHelper.Warn <FileUploadCleanupFilterAttribute>("The actionExecutedContext.Request is null!!??"); return; } if (actionExecutedContext.Request.Content == null) { LogHelper.Warn <FileUploadCleanupFilterAttribute>("The actionExecutedContext.Request.Content is null!!??"); return; } ObjectContent objectContent; try { objectContent = actionExecutedContext.Response.Content as ObjectContent; } catch (System.Exception ex) { LogHelper.Error <FileUploadCleanupFilterAttribute>("Could not acquire actionExecutedContext.Response.Content", ex); return; } if (objectContent != null) { var uploadedFiles = objectContent.Value as IHaveUploadedFiles; if (uploadedFiles != null) { if (uploadedFiles.UploadedFiles != null) { //cleanup any files associated foreach (var f in uploadedFiles.UploadedFiles) { if (f.TempFilePath.IsNullOrWhiteSpace() == false) { //track all temp folders so we can remove old files afterwords var dir = Path.GetDirectoryName(f.TempFilePath); if (tempFolders.Contains(dir) == false) { tempFolders.Add(dir); } LogHelper.Debug <FileUploadCleanupFilterAttribute>("Removing temp file " + f.TempFilePath); try { File.Delete(f.TempFilePath); } catch (System.Exception ex) { LogHelper.Error <FileUploadCleanupFilterAttribute>("Could not delete temp file " + f.TempFilePath, ex); } //clear out the temp path so it's not returned in the response f.TempFilePath = ""; } else { LogHelper.Warn <FileUploadCleanupFilterAttribute>("The f.TempFilePath is null or whitespace!!??"); } } } else { LogHelper.Warn <FileUploadCleanupFilterAttribute>("The uploadedFiles.UploadedFiles is null!!??"); } } else { LogHelper.Warn <FileUploadCleanupFilterAttribute>("The actionExecutedContext.Request.Content.Value is not IHaveUploadedFiles, it is " + objectContent.Value.GetType()); } } else { LogHelper.Warn <FileUploadCleanupFilterAttribute>("The actionExecutedContext.Request.Content is not ObjectContent, it is " + actionExecutedContext.Request.Content.GetType()); } } //Now remove all old files so that the temp folder(s) never grow foreach (var tempFolder in tempFolders.Distinct()) { var files = Directory.GetFiles(tempFolder); foreach (var file in files) { if (DateTime.UtcNow - File.GetLastWriteTimeUtc(file) > TimeSpan.FromDays(1)) { try { File.Delete(file); } catch (System.Exception ex) { LogHelper.Error <FileUploadCleanupFilterAttribute>("Could not delete temp file " + file, ex); } } } } }
public Task <DateTime> GetLastWriteTimeUtcAsync(DriveItem driveItem) { return(Task.FromResult(File.GetLastWriteTimeUtc(driveItem.GetAbsolutePath(this.BasePath)))); }
public static System.DateTime GetLastWriteTimeUtc(string path) => MSIOF.GetLastWriteTimeUtc(path);
private void FilterFile(string inputFilePath, string outputFilePath) { // nothing to do if the output file already exists and is newer than both the input file and Cross Time DSP's app.config file if (File.Exists(outputFilePath)) { DateTime inputLastWriteTimeUtc = File.GetLastWriteTimeUtc(inputFilePath); DateTime outputLastWriteTimeUtc = File.GetLastWriteTimeUtc(outputFilePath); if ((outputLastWriteTimeUtc > inputLastWriteTimeUtc) && (outputLastWriteTimeUtc > this.Configuration.LastWriteTimeUtc)) { this.log.ReportVerbose("'{0}': skipped as '{1}' is newer.", Path.GetFileName(inputFilePath), Path.GetFileName(outputFilePath)); return; } } // get input DateTime processingStartedUtc = DateTime.UtcNow; MediaFoundationReader inputStream = new MediaFoundationReader(inputFilePath); if (Constant.SampleBlockSizeInBytes % inputStream.WaveFormat.BlockAlign != 0) { this.log.ReportError("'{0}': cannot be processed as sample block size of {0} bytes is not an exact multiple of the input block alignment of {1} bytes.", Path.GetFileName(inputFilePath), Constant.SampleBlockSizeInBytes, inputStream.WaveFormat.BlockAlign); return; } // ensure output directory exists so that output file write succeeds string outputDirectoryPath = Path.GetDirectoryName(outputFilePath); if (String.IsNullOrEmpty(outputDirectoryPath) == false && Directory.Exists(outputDirectoryPath) == false) { Directory.CreateDirectory(outputDirectoryPath); } StreamPerformance streamMetrics; using (WaveStream outputStream = this.FilterStream(inputStream, out streamMetrics)) { // do the filtering if (this.Stopping) { // if the stop flag was set during filtering outputStream will be null return; } // write output file MediaType outputMediaType; if (this.Configuration.Output.Encoding == Encoding.Wave) { // work around NAudio bug: MediaFoundationEncoder supports Wave files but GetOutputMediaTypes() fails on Wave outputMediaType = new MediaType(outputStream.WaveFormat); } else { List <MediaType> outputMediaTypes = MediaFoundationEncoder.GetOutputMediaTypes(Constant.EncodingGuids[this.Configuration.Output.Encoding]).Where(mediaType => mediaType.BitsPerSample == outputStream.WaveFormat.BitsPerSample && mediaType.ChannelCount == outputStream.WaveFormat.Channels && mediaType.SampleRate == outputStream.WaveFormat.SampleRate).ToList(); if ((outputMediaTypes == null) || (outputMediaTypes.Count < 1)) { this.log.ReportError("'{0}': no media type found for {1} bits per sample, {2} channels, at {3} kHz.", Path.GetFileName(inputFilePath), outputStream.WaveFormat.BitsPerSample, outputStream.WaveFormat.Channels, outputStream.WaveFormat.SampleRate); return; } outputMediaType = outputMediaTypes[0]; } MediaFoundationEncoder outputEncoder = new MediaFoundationEncoder(outputMediaType); streamMetrics.EncodingStartedUtc = DateTime.UtcNow; outputEncoder.Encode(outputFilePath, outputStream); streamMetrics.EncodingStoppedUtc = DateTime.UtcNow; } // copy metadata Tag inputMetadata; using (FileStream inputMetadataStream = new FileStream(inputFilePath, FileMode.Open, FileAccess.Read, FileShare.Read)) { using (TagFile inputTagFile = TagFile.Create(new StreamFileAbstraction(inputMetadataStream.Name, inputMetadataStream, inputMetadataStream))) { inputMetadata = inputTagFile.Tag; } } using (FileStream outputMetadataStream = new FileStream(outputFilePath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read)) { using (TagFile outputTagFile = TagFile.Create(new StreamFileAbstraction(outputMetadataStream.Name, outputMetadataStream, outputMetadataStream))) { if (this.TryApplyMetadata(inputMetadata, outputTagFile.Tag)) { outputTagFile.Save(); } } } DateTime processingStoppedUtc = DateTime.UtcNow; TimeSpan encodingTime = streamMetrics.EncodingStoppedUtc - streamMetrics.EncodingStartedUtc; TimeSpan processingTime = processingStoppedUtc - processingStartedUtc; if (streamMetrics.HasReverseTime) { TimeSpan reverseBufferTime = streamMetrics.ReverseBufferCompleteUtc - streamMetrics.StartTimeUtc; TimeSpan reverseProcessingTime = streamMetrics.ReverseTimeCompleteUtc - streamMetrics.ReverseBufferCompleteUtc; if (streamMetrics.HasForwardTime) { TimeSpan forwardProcessingTime = streamMetrics.CompleteTimeUtc - streamMetrics.ReverseTimeCompleteUtc; this.log.ReportVerbose("'{0}' to '{1}' in {2} (buffer {3}, reverse {4}, forward {5}, encode {6}).", Path.GetFileName(inputFilePath), Path.GetFileName(outputFilePath), processingTime.ToString(Constant.ElapsedTimeFormat), reverseBufferTime.ToString(Constant.ElapsedTimeFormat), reverseProcessingTime.ToString(Constant.ElapsedTimeFormat), forwardProcessingTime.ToString(Constant.ElapsedTimeFormat), encodingTime.ToString(Constant.ElapsedTimeFormat)); } else { this.log.ReportVerbose("'{0}' to '{1}' in {2} (buffer {3}, reverse {4}, encode {5}).", Path.GetFileName(inputFilePath), Path.GetFileName(outputFilePath), processingTime.ToString(Constant.ElapsedTimeFormat), reverseBufferTime.ToString(Constant.ElapsedTimeFormat), reverseProcessingTime.ToString(Constant.ElapsedTimeFormat), encodingTime.ToString(Constant.ElapsedTimeFormat)); } } else { TimeSpan filteringTime = streamMetrics.CompleteTimeUtc - streamMetrics.StartTimeUtc; this.log.ReportVerbose("'{0}' to '{1}' in {2} (load+filter {3}, encode {4}).", Path.GetFileName(inputFilePath), Path.GetFileName(outputFilePath), processingTime.ToString(Constant.ElapsedTimeFormat), filteringTime.ToString(Constant.ElapsedTimeFormat), encodingTime.ToString(Constant.ElapsedTimeFormat)); } }