public MediaFile(IMediaFileData file, string storeId, DataSourceId dataSourceId, string filePath) { _dataSourceId = dataSourceId; StoreId = storeId; this.Id = file.Id; this.FileName = file.FileName; this.FolderPath = file.FolderPath; this.Title = file.Title; this.Description = file.Description; this.Tags = file.Tags; this.MimeType = file.MimeType; this.Length = file.Length; this.IsReadOnly = false; this.Culture = file.CultureInfo; this.CreationTime = file.CreationTime; this.LastWriteTime = file.LastWriteTime; this.SystemPath = filePath; }
public void TestInsertAt2() { var sources = new ObservableCollection <SingleDataSourceViewModel> { new SingleDataSourceViewModel(new SingleDataSource(_logFileFactory, _scheduler, new DataSource("test.log") { Id = DataSourceId.CreateNew() }), _actionCenter.Object) }; _control.ItemsSource = sources; sources.Insert(1, new SingleDataSourceViewModel(new SingleDataSource(_logFileFactory, _scheduler, new DataSource("test2.log") { Id = DataSourceId.CreateNew() }), _actionCenter.Object)); _control.FilteredItemsSource.Should().Equal(sources); }
public void TestAddDataSourceDuplicateBookmarks() { var dataSourceId = DataSourceId.CreateNew(); _bookmarks.Setup(x => x.All).Returns(new[] { new BookmarkSettings(dataSourceId, new LogLineIndex(42)), new BookmarkSettings(dataSourceId, new LogLineIndex(42)), }); var collection = new BookmarkCollection(_bookmarks.Object, TimeSpan.Zero); var dataSource = new Mock <IDataSource>(); dataSource.Setup(x => x.UnfilteredLogFile).Returns(new InMemoryLogFile()); dataSource.Setup(x => x.Id).Returns(dataSourceId); new Action(() => collection.AddDataSource(dataSource.Object)).Should().NotThrow(); collection.Bookmarks.Should().Equal(new object[] { new Bookmark(dataSource.Object, new LogLineIndex(42)) }, "because even though there are two bookmarks in the settings object, they describe the same log line and thus only one bookmark should have been added in the end"); }
public void TestDataSourceId() { string fname = Path.GetTempFileName(); var settings = new ApplicationSettings(fname); var dataSource = new DataSource("foo"); dataSource.Id = DataSourceId.CreateNew(); settings.DataSources.Add(dataSource); settings.Save(); var settings2 = new ApplicationSettings(fname); bool neededPatching; settings2.Restore(out neededPatching); neededPatching.Should().BeFalse(); settings2.DataSources.Count.Should().Be(1); DataSource dataSource2 = settings2.DataSources[0]; dataSource2.File.Should().Be("foo"); dataSource2.Id.Should().Be(dataSource.Id); }
/// <summary> /// /// </summary> /// <returns></returns> internal override ReturnResultBase Execute() { LocalDataviewManager localDataViewManager = Task.DataviewManager.LocalDataviewManager; RuntimeReadOnlyView dataControlView = localDataViewManager.TaskViews.GetDataControlViewByBoundControlId(control.getDitIdx()); DataSourceId dataSourceId = dataControlView.DataSourceViewDefinition.TaskDataSource.DataSourceDefinition.Id; DataControlRangeDataCollection rangeData = new DataControlRangeDataCollection(dataSourceId, dataControlView.RangeBuilder, control.getDitIdx()); //remove old DataControlRangeDataCollection entry from map. localDataViewManager.rangeToDcValuesMap.Remove(rangeData); //build new DC values for control LocallyComputedDcValuesBuilder dcValuesBuilder = new LocallyComputedDcValuesBuilder(dataControlView); var dataControlValues = dcValuesBuilder.Build(); localDataViewManager.DataviewSynchronizer.ApplyDCValuesAndRefreshControl(dataControlValues, rangeData, control); return(new ReturnResult()); }
public CustomDataSource AddCustom(CustomDataSourceId id) { CustomDataSource dataSource; var plugin = _logSourceFactory.CustomDataSources.First(x => x.Id == id); lock (_syncRoot) { var settings = new DataSource { Id = DataSourceId.CreateNew(), DisplayName = plugin.DisplayName, CustomDataSourceId = plugin.Id, CustomDataSourceConfiguration = plugin.CreateConfiguration(null) }; _settings.Add(settings); dataSource = (CustomDataSource)AddDataSource(settings); } return(dataSource); }
public void TestDispose1() { LogFileProxy permanentLogFile; LogFileSearchProxy permanentSearch; SingleDataSource source; using (source = new SingleDataSource(_logFileFactory, _scheduler, new DataSource(@"E:\somelogfile.txt") { Id = DataSourceId.CreateNew() })) { permanentLogFile = (LogFileProxy)source.FilteredLogFile; permanentSearch = (LogFileSearchProxy)source.Search; permanentLogFile.IsDisposed.Should().BeFalse(); permanentSearch.IsDisposed.Should().BeFalse(); } source.IsDisposed.Should().BeTrue(); permanentLogFile.IsDisposed.Should().BeTrue(); permanentSearch.IsDisposed.Should().BeTrue(); }
public void TestChangeFilterType1() { var model = new QuickFiltersSidePanelViewModel(_settings, _quickFilters) { CurrentDataSource = new SingleDataSourceViewModel(new SingleDataSource(_logFileFactory, _scheduler, new DataSource("adw") { Id = DataSourceId.CreateNew() }), _actionCenter.Object) }; var numFilterChanges = 0; var filter = model.AddQuickFilter(); filter.MatchType.Should().Be(FilterMatchType.SubstringFilter); filter.Value = "Foobar"; filter.IsActive = true; model.OnFiltersChanged += () => ++ numFilterChanges; filter.MatchType = FilterMatchType.WildcardFilter; numFilterChanges.Should().Be(1); }
public void TestCtor2() { var settings = new DataSourceSettings { new DataSource("test1.log") { Id = DataSourceId.CreateNew() }, new DataSource("test2.log") { Id = DataSourceId.CreateNew() }, new DataSource("test.log") { Id = DataSourceId.CreateNew() } }; var merged = new DataSource { Id = DataSourceId.CreateNew() }; settings.Add(merged); settings[0].ParentId = merged.Id; settings[1].ParentId = merged.Id; using (var dataSources = new Tailviewer.BusinessLogic.DataSources.DataSources(_logSourceFactory, _scheduler, _filesystem, settings, _bookmarks.Object)) { dataSources.Count.Should().Be(4, "Because we've loaded 4 data sources"); var mergedDataSource = dataSources[3] as MergedDataSource; mergedDataSource.Should().NotBeNull(); mergedDataSource.DataSourceCount.Should().Be(2, "Because 2 of the data sources are part of this group"); IDataSource dataSource1 = dataSources[0]; IDataSource dataSource2 = dataSources[1]; mergedDataSource.OriginalSources.Should().Equal(new object[] { dataSource1, dataSource2 }); dataSource1.ParentId.Should().Be(merged.Id); dataSource2.ParentId.Should().Be(merged.Id); } }
private MediaFile(Guid id, string fileName, string folderPath, string title, string description, string tags, string mimeType, int?length, bool isReadOnly, string culture, DateTime creationTime, DateTime lastWriteTime, string storeId, DataSourceId dataSourceId, string filePath) { DataSourceId = dataSourceId; StoreId = storeId; this.Id = id; this.FileName = fileName; this.FolderPath = folderPath; this.Title = title; this.Description = description; this.Tags = tags; this.MimeType = mimeType; this.Length = length; this.IsReadOnly = isReadOnly; this.Culture = culture; this.CreationTime = creationTime; this.LastWriteTime = lastWriteTime; this.SystemPath = filePath; }
public void TestRemove2() { _settings = new ApplicationSettings("foobar"); var group = new DataSource { Id = DataSourceId.CreateNew() }; var source1 = new DataSource("foo") { Id = DataSourceId.CreateNew(), ParentId = group.Id }; var source2 = new DataSource("bar") { Id = DataSourceId.CreateNew(), ParentId = group.Id }; var source3 = new DataSource("clondyke") { Id = DataSourceId.CreateNew(), ParentId = group.Id }; _settings.DataSources.Add(source1); _settings.DataSources.Add(source2); _settings.DataSources.Add(source3); _settings.DataSources.Add(group); _dataSources = new DataSources(_logFileFactory, _scheduler, _settings.DataSources); _model = new DataSourcesViewModel(_settings, _dataSources, _actionCenter.Object); var merged = (MergedDataSourceViewModel)_model.Observable[0]; var viewModel1 = merged.Observable.ElementAt(0); var viewModel2 = merged.Observable.ElementAt(1); var viewModel3 = merged.Observable.ElementAt(2); viewModel1.RemoveCommand.Execute(null); merged.ChildCount.Should().Be(2); merged.Observable.Should().NotContain(viewModel1); _model.Observable.Should().Equal(new object[] { merged }); _dataSources.Sources.Should().Equal(new object[] { viewModel2.DataSource, viewModel3.DataSource, merged.DataSource }); _settings.DataSources.Should().Equal(new object[] { source2, source3, group }); }
public void TestRemoveLogFile() { var engine = new Mock <IDataSourceAnalyserEngine>(); var analyser = new Mock <IDataSourceAnalyser>(); engine.Setup(x => x.CreateAnalyser(It.IsAny <ILogFile>(), It.IsAny <AnalyserTemplate>())) .Returns(analyser.Object); var activeAnalysis = new ActiveAnalysis(AnalysisId.CreateNew(), _template, _taskScheduler, engine.Object, TimeSpan.Zero); activeAnalysis.Add(AnalyserPluginId.Empty, new TestLogAnalyserConfiguration()); analyser.Verify(x => x.OnLogFileRemoved(It.IsAny <DataSourceId>(), It.IsAny <ILogFile>()), Times.Never, "because we haven't removed any log file from analysis just yet"); var id = DataSourceId.CreateNew(); var logFile = new Mock <ILogFile>(); activeAnalysis.Add(id, logFile.Object); analyser.Verify(x => x.OnLogFileRemoved(id, It.IsAny <ILogFile>()), Times.Never, "because we haven't removed any log file from analysis just yet"); activeAnalysis.Remove(id, logFile.Object); analyser.Verify(x => x.OnLogFileRemoved(id, logFile.Object), Times.Once, "because we've just removed a log file from analysis and thus the analyser should have been notified"); }
public void TestRemove2() { var settings1 = new DataSource("foo") { Id = DataSourceId.CreateNew() }; var dataSource1 = new FileDataSource(_logSourceFactory, _taskScheduler, settings1); _merged.Add(dataSource1); var settings2 = new DataSource("bar") { Id = DataSourceId.CreateNew() }; var dataSource2 = new FileDataSource(_logSourceFactory, _taskScheduler, settings2); _merged.Add(dataSource2); _merged.Remove(dataSource2); var mergedLogFile = GetMergedLogFile(); mergedLogFile.Sources.Should().Equal(new object[] { dataSource1.OriginalLogSource }); }
public SingleDataSource AddFile(string fileName) { string key = GetKey(fileName, out var fullFileName); SingleDataSource dataSource; lock (_syncRoot) { dataSource = (SingleDataSource) _dataSources.FirstOrDefault(x => string.Equals(x.FullFileName, key, StringComparison.InvariantCultureIgnoreCase)); if (dataSource == null) { var settings = new DataSource(fullFileName) { Id = DataSourceId.CreateNew() }; _settings.Add(settings); dataSource = (SingleDataSource)AddDataSource(settings); } } return(dataSource); }
public void TestRemove2() { var settings1 = new DataSource("foo") { Id = DataSourceId.CreateNew() }; var dataSource1 = new SingleDataSource(_logFileFactory, _taskScheduler, settings1); _merged.Add(dataSource1); var settings2 = new DataSource("bar") { Id = DataSourceId.CreateNew() }; var dataSource2 = new SingleDataSource(_logFileFactory, _taskScheduler, settings2); _merged.Add(dataSource2); _merged.Remove(dataSource2); _merged.UnfilteredLogFile.Should().NotBeNull(); _merged.UnfilteredLogFile.Should().BeOfType <MergedLogFile>(); ((MergedLogFile)_merged.UnfilteredLogFile).Sources.Should().Equal(new object[] { dataSource1.OriginalLogFile }); }
public void TestChangeShowElapsedTime([Values(true, false)] bool showElapsedTime) { using (var source = new SingleDataSource(_scheduler, new DataSource { Id = DataSourceId.CreateNew(), File = @"C:\temp\foo.txt", ShowElapsedTime = showElapsedTime }, new Mock <ILogFile>().Object, TimeSpan.Zero)) { var model = new SingleDataSourceViewModel(source, _actionCenter.Object); var changes = new List <string>(); model.PropertyChanged += (sender, args) => changes.Add(args.PropertyName); model.ShowElapsedTime = !showElapsedTime; changes.Should().Equal(new object[] { "ShowElapsedTime" }, "because the property should've changed once"); model.ShowElapsedTime = !showElapsedTime; changes.Should().Equal(new object[] { "ShowElapsedTime" }, "because the property didn't change"); model.ShowElapsedTime = showElapsedTime; changes.Should().Equal(new object[] { "ShowElapsedTime", "ShowElapsedTime" }, "because the property changed a 2nd time"); } }
protected void Page_PreRender(object sender, EventArgs e) { var dataSourceId = DataSourceId.Deserialize(Request.QueryString["DataSourceId"]); IData data = DataFacade.GetDataFromDataSourceId(dataSourceId); if (data is IMediaFile) { ViewMediaToolbar.Visible = true; ViewMediaContextMenu.Visible = true; } else { ViewDataToolbar.Visible = true; ViewDataContextMenu.Visible = true; } using (var dc = new DataConnection()) { var activities = VersioningFacade.GetActivities(dc, dataSourceId); activities = activities.Where(item => !(item.FirstOrDefault() == null && (item.Key.TaskType == "Edit" || item.Key.TaskType == "Rollback"))); string eventTarget = Context.Request.Form["__EVENTTARGET"]; if (eventTarget == "export") { GenerateXlsDocument(activities); } else { var entityToken = data.GetDataEntityToken(); BuildReportTable(activities, entityToken); } } }
public void TestCtor() { var source = new SingleDataSourceViewModel( new SingleDataSource(_logFileFactory, _scheduler, new DataSource("Foobar") { Id = DataSourceId.CreateNew() }), _actionCenter.Object); source.LevelsFilter = LevelFlags.All; var control = new LogViewerControl { DataSource = source }; control.ShowTrace.Should().BeTrue(); control.ShowDebug.Should().BeTrue(); control.ShowInfo.Should().BeTrue(); control.ShowWarning.Should().BeTrue(); control.ShowError.Should().BeTrue(); control.ShowFatal.Should().BeTrue(); }
/// <inheritdoc /> public void WriteAttribute(string name, DataSourceId value) { WriteAttribute(name, value.ToString()); }
private static void UpgradeStoredData() { const string _ET = "EntityToken"; const string _DSI = "DataSourceId"; List <string> magicPropertyNames = new List <string> { _ET, _DSI }; Func <DataFieldDescriptor, bool> isSerializedFieldFunc = g => magicPropertyNames.Any(s => g.Name.Contains(s)); var descriptors = DataMetaDataFacade.AllDataTypeDescriptors.Where(f => f.Fields.Any(isSerializedFieldFunc)); foreach (var descriptor in descriptors) { Type dataType = descriptor.GetInterfaceType(); if (dataType == null) { continue; } var propertiesToUpdate = new List <PropertyInfo>(); foreach (var tokenField in descriptor.Fields.Where(isSerializedFieldFunc)) { var tokenProperty = dataType.GetProperty(tokenField.Name); propertiesToUpdate.Add(tokenProperty); } using (var dc = new DataConnection(PublicationScope.Unpublished)) { var allRows = DataFacade.GetData(dataType).ToDataList(); foreach (var rowItem in allRows) { bool rowChange = false; foreach (var tokenProperty in propertiesToUpdate) { string token = tokenProperty.GetValue(rowItem) as string; if (tokenProperty.Name.Contains(_ET)) { try { var entityToken = EntityTokenSerializer.Deserialize(token); var tokenReserialized = EntityTokenSerializer.Serialize(entityToken); if (tokenReserialized != token) { tokenProperty.SetValue(rowItem, tokenReserialized); rowChange = true; } } catch (Exception ex) { _log.LogError(nameof(LegacySerializedEntityTokenUpgrader), "Failed to upgrade old token {0} from data type {1} as EntityToken.\n{2}", token, dataType.FullName, ex); } } if (tokenProperty.Name.Contains(_DSI)) { try { token = EnsureValidDataSourceId(token); var dataSourceId = DataSourceId.Deserialize(token); var dataSourceIdReserialized = dataSourceId.Serialize(); if (dataSourceIdReserialized != token) { tokenProperty.SetValue(rowItem, dataSourceIdReserialized); rowChange = true; } } catch (Exception ex) { _log.LogError(nameof(LegacySerializedEntityTokenUpgrader), "Failed to upgrade old token {0} from data type {1} as DataSourceId.\n{2}", token, dataType.FullName, ex); } } if (rowChange) { DataFacade.Update(rowItem); } } } } } }
public bool Contains(DataSourceId id) { return(_dataSourceIds.Contains(id)); }
public void GetListItemPropertiesForFieldListAsync(DataSourceId dataSourceId, string dataMember, object asyncState) { RaiseScalarOperationCompletedEvent(GetListItemPropertiesForFieldListCompleted, ReportService.GetListItemPropertiesForFieldList(dataSourceId, dataMember), asyncState); }
public IDataSourceViewModel TryGet(DataSourceId id) { var viewModel = _observable.FirstOrDefault(x => x.DataSource.Id == id); return(viewModel); }
/// <inheritdoc /> public bool TryReadAttribute(string name, out DataSourceId value) { return(_documentReader.TryReadAttribute(name, out value)); }
/// <summary> /// Initializes a new instance of the <see cref="BayesFeatures"/> class. /// </summary> /// <param name="id">The source of data.</param> /// <param name="words">The collection of words extracted from the data.</param> /// <exception cref="ArgumentNullException"> /// <para><paramref name="id"/> is <b>null</b>.</para> /// <para>-or-</para> /// <para><paramref name="words"/> is <b>null</b>.</para> /// </exception> public BayesFeatures(DataSourceId id, IEnumerable <string> words) : base(id) { this.words.AddRange(words); }
public static void Clean() { var archiveList = new Dictionary <Guid, DataSourceId>(); using (var conn = new DataConnection(PublicationScope.Unpublished)) { var activities = from a in conn.Get <IActivity>() join t in conn.Get <ITask>() on a.TaskId equals t.Id join tt in conn.Get <ITaskTarget>() on t.TaskTargetId equals tt.Id select new { a.Id, a.TaskId, a.ActivityTime, tt.TargetDataSourceId }; var activityByTargets = (from a in activities group a by a.TargetDataSourceId).ToDictionary(d => d.Key, d => d.OrderBy(a => a.ActivityTime).ToList()); var publishingByTargets = (from t in conn.Get <ITask>() join tt in conn.Get <ITaskTarget>() on t.TaskTargetId equals tt.Id where t.TaskType == "Publish" group t by tt.TargetDataSourceId).ToDictionary(d => d.Key, d => d.OrderBy(a => a.StartTime).ToList()); //var targets = conn.Get<ITaskTarget>().ToDictionary(d => d.Id, d => ); //Response.Write(targets.Count); Log.LogVerbose(CleanerFacade.Title, activities.Count() + "<br />"); foreach (var activity in activities.Where(d => d.ActivityTime < DateTime.Now.AddMonths(-1)).OrderBy(d => d.ActivityTime).ToList()) { var targetDataSourceId = activity.TargetDataSourceId; DataSourceId dataSourceId = null; if (DataSourceId.TryDeserialize(targetDataSourceId, out dataSourceId)) { if (dataSourceId.InterfaceType.GetInterfaces().Contains(typeof(IPublishControlled))) { if (publishingByTargets.ContainsKey(targetDataSourceId)) { var nextPublishing = publishingByTargets[targetDataSourceId].Where(d => d.StartTime > activity.ActivityTime).FirstOrDefault(); var nextActivity = activityByTargets[targetDataSourceId].Where(d => d.ActivityTime > activity.ActivityTime).FirstOrDefault(); if (nextPublishing != null && nextActivity != null && nextActivity.ActivityTime < nextPublishing.StartTime) { archiveList.Add(activity.Id, dataSourceId); } } } else { if (activityByTargets[activity.TargetDataSourceId].Any(d => d.ActivityTime > activity.ActivityTime && d.ActivityTime < activity.ActivityTime.AddHours(1))) { archiveList.Add(activity.Id, dataSourceId); } } } else { archiveList.Add(activity.Id, null); } //var data = targets.First().DataSourceId. } Log.LogVerbose(CleanerFacade.Title, archiveList.Count().ToString()); foreach (var item in archiveList) { conn.DeleteActivity(item.Key, item.Value); } } }
public bool RequestBringIntoView(DataSourceId dataSource, LogLineIndex index) { var dataSourceViewModel = _dataSources.DataSources.FirstOrDefault(x => x.DataSource.Id == dataSource); return(RequestBringIntoView(dataSourceViewModel, index)); }
private static void UpgradeStoredData() { const string _ET = "EntityToken"; const string _DSI = "DataSourceId"; List <string> magicPropertyNames = new List <string> { _ET, _DSI }; Func <DataFieldDescriptor, bool> isSerializedFieldFunc = g => magicPropertyNames.Any(s => g.Name.Contains(s)); var descriptors = DataMetaDataFacade.AllDataTypeDescriptors.Where(f => f.Fields.Any(isSerializedFieldFunc)); foreach (var descriptor in descriptors) { Type dataType = descriptor.GetInterfaceType(); if (dataType == null) { continue; } var propertiesToUpdate = new List <PropertyInfo>(); foreach (var tokenField in descriptor.Fields.Where(isSerializedFieldFunc)) { var tokenProperty = dataType.GetProperty(tokenField.Name); propertiesToUpdate.Add(tokenProperty); } using (new DataConnection(PublicationScope.Unpublished)) { var allRows = DataFacade.GetData(dataType).ToDataList(); var toUpdate = new List <IData>(); int errors = 0, updated = 0; foreach (var rowItem in allRows) { bool rowChange = false; foreach (var tokenProperty in propertiesToUpdate) { string token = tokenProperty.GetValue(rowItem) as string; try { string tokenReserialized; if (tokenProperty.Name.Contains(_ET)) { var entityToken = EntityTokenSerializer.Deserialize(token); tokenReserialized = EntityTokenSerializer.Serialize(entityToken); } else if (tokenProperty.Name.Contains(_DSI)) { token = EnsureValidDataSourceId(token); var dataSourceId = DataSourceId.Deserialize(token); tokenReserialized = dataSourceId.Serialize(); } else { throw new InvalidOperationException("This line should not be reachable"); } if (tokenReserialized != token) { tokenProperty.SetValue(rowItem, tokenReserialized); rowChange = true; } } catch (Exception ex) { errors++; if (errors <= MaxErrorMessagesPerType) { _log.LogError(LogTitle, $"Failed to upgrade old token '{token}' from data type '{dataType.FullName}' as EntityToken.\n{ex}"); } } } if (rowChange) { updated++; toUpdate.Add(rowItem); if (toUpdate.Count >= 1000) { DataFacade.Update(toUpdate, true, false, false); toUpdate.Clear(); } } } if (toUpdate.Count > 0) { DataFacade.Update(toUpdate, true, false, false); toUpdate.Clear(); } _log.LogInformation(LogTitle, $"Finished updating serialized tokens for data type '{dataType.FullName}'. Rows: {allRows.Count}, Updated: {updated}, Errors: {errors}"); } } }
public bool PrepareInput() { //int dataSourceId = 0; //int.TryParse(e.ApplicationParameters["DataSourceId"].ToString(), out dataSourceId); //if (dataSourceId == 0) // return; InputFileNameOnly = Path.GetFileNameWithoutExtension(InputFileName); //string InputFileExtension = Path.GetExtension(InputFileName); Keys = Cache.Instance.Bag[DataSourceId + ".keys"] as List <IdpeKey>; if (Keys == null) { Keys = DataSource.LoadKeys(DataSourceId); } OutputFolder = DataSource.GetOutputFolder(DataSourceId, Keys); ActualOutputFolder = OutputFolder; OutputFileName = DataSource.GetOutputFileName(DataSourceId, Keys, OutputFolder, InputFileNameOnly); string appWatchFilter = Keys.GetKeyValue(IdpeKeyTypes.WatchFilter); ZipInterfaceName = Keys.GetKeyValue(IdpeKeyTypes.ZipInterfaceName); if ((InputFileExtension.ToLower() == ".zip") || (InputFileExtension.ToLower() == ".rar") || (InputFileExtension.ToLower() == ".tar")) { OutputFolder = Path.Combine(EyediaCoreConfigurationSection.CurrentConfig.TempDirectory, Constants.IdpeBaseFolderName); OutputFolder = Path.Combine(OutputFolder, "RedirectedOutput"); OutputFolder = Path.Combine(OutputFolder, DateTime.Now.ToDBDateFormat()); OutputFolder = Path.Combine(OutputFolder, DataSourceId.ToString()); } if ((!string.IsNullOrEmpty(appWatchFilter)) && (appWatchFilter != Pullers.FileExtensionSupportAll)) { List <string> filters = new List <string>(); if (appWatchFilter.Contains("|")) { filters.AddRange(appWatchFilter.ToLower().Split("|".ToCharArray())); } else { filters.Add(appWatchFilter.ToLower()); } var filterOrNot = (from f in filters where f == InputFileExtension.ToLower() select f).SingleOrDefault(); if (filterOrNot == null) { if (!InputFileNameOnly.StartsWith(Constants.UnzippedFilePrefix)) { IdpeMessage warn = new IdpeMessage(IdpeMessageCodes.IDPE_FILE_TYPE_NOT_SUPPORTED); DataSource dataSource = new DataSource(DataSourceId, string.Empty); WithWarning = string.Format(warn.Message, dataSource.Name, appWatchFilter, Path.GetFileName(InputFileName)); ExtensionMethods.TraceInformation(WithWarning); new PostMan(dataSource).Send(PostMan.__warningStartTag + WithWarning + PostMan.__warningEndTag, "File Ignored"); return(false); } } } if (InputFileNameOnly.StartsWith(Constants.WCFFilePrefix)) { IsRequestFromWCF = true; JobId = InputFileNameOnly.Replace(Constants.WCFFilePrefix, ""); JobId = JobId.Replace(InputFileExtension, ""); } else if (InputFileNameOnly.StartsWith(Constants.UnzippedFilePrefix)) { ZipUniuqeId = ZipFileWatcher.ExtractUniqueId(InputFileNameOnly); OutputFolder = Path.Combine(OutputFolder, ZipUniuqeId); if (!Directory.Exists(OutputFolder)) { Directory.CreateDirectory(OutputFolder); } OutputFileName = Path.Combine(OutputFolder, InputFileNameOnly + Path.GetExtension(OutputFileName)); OutputFileName = ZipFileWatcher.ExtractActualFileName(OutputFileName); } return(true); }
private IReadOnlyList <IDataSource> SynchronizeDataSources(IReadOnlyList <IFileInfo> files) { var newFiles = new List <IFileInfo>(); var oldFiles = new List <IFileInfo>(); var dataSources = new List <IDataSource>(); try { lock (_syncRoot) { foreach (var file in _dataSources.Keys) { if (!files.Contains(file)) { oldFiles.Add(file); } } foreach (var file in oldFiles) { _dataSources.TryGetValue(file, out var dataSource); _dataSources.Remove(file); dataSource?.Dispose(); } foreach (var file in files) { if (!_dataSources.TryGetValue(file, out var dataSource)) { // We'll print a nice warning to the user if this happens if (_dataSources.Count >= LogLineSourceId.MaxSources) { break; } var settings = new DataSource(file.FullPath) { Id = DataSourceId.CreateNew() }; dataSource = new SingleDataSource(_logFileFactory, _taskScheduler, settings); _dataSources.Add(file, dataSource); newFiles.Add(file); } dataSources.Add(dataSource); } } } catch (Exception) { foreach (var dataSource in dataSources) { dataSource.Dispose(); } throw; } if (Log.IsDebugEnabled) { Log.DebugFormat("Adding #{0} files ({1}), removing #{2} files ({3})", newFiles.Count, string.Join(", ", newFiles.Select(x => x.FullPath)), oldFiles.Count, string.Join(", ", oldFiles.Select(x => x.FullPath))); } return(dataSources); }