public void TestCategories() { using (ImportEngine eng = new ImportEngine()) { XmlHelper xml = new XmlHelper(); xml.LoadXml("<category/>"); xml.WriteVal("@name", "boo"); xml.WriteVal("@cat", "self"); xml.WriteVal("@dstfield", "cat"); var sel = xml.DocumentElement.AddElement("select"); sel.SetAttribute("field", "name"); sel.SetAttribute("expr", "weerd"); Category cat = Category.Create(xml.DocumentElement); PipelineContext ctx = new PipelineContext(eng); EndpointWrapper ep = new EndpointWrapper(eng, xml.DocumentElement); IDataEndpoint dep = ep.CreateDataEndpoint(ctx, "abc"); dep.SetField("name", "peter weerd"); bool handled = cat.HandleRecord(ctx, dep, (JObject)dep.GetField(null)); Assert.IsTrue(handled); Assert.AreEqual("self", dep.GetFieldAsStr("cat")); dep.SetField("name", "peter weerd peter"); handled = cat.HandleRecord(ctx, dep, (JObject)dep.GetField(null)); Assert.IsTrue(handled); Assert.AreEqual("self;self", dep.GetFieldAsStr("cat")); } }
public OldSynchronizationAction(Func <IDataEndpoint <T>, T, bool> applies, Action <IDataEndpoint <T>, T> executeAction, IDataEndpoint <T> dataEndpoint, string name = "Unnamed") { _applies = applies; _executeAction = executeAction; _dataEndpoint = dataEndpoint; _name = name; }
public SynchronizationChannel(IDataEndpoint <T> source, IDataEndpoint <T> target, bool isBatchChannel) { _source = source; _target = target; _isBatchChannel = isBatchChannel; CreateDispatcher(); }
public PostProcessorBase(PostProcessorBase other, IDataEndpoint epOrnextProcessor) { this.name = other.name; this.nextEndpoint = epOrnextProcessor; this.nextProcessor = epOrnextProcessor as IPostProcessor; instanceNo = ++other.instanceNo; }
/// <summary> /// Create the data endpoint /// </summary> /// <param name="logger">The logger to use</param> /// <param name="globalMeta">Global meta dictionary</param> /// <param name="meta">Meta dictionary</param> /// <returns></returns> public override IDataEndpoint Create(Logger logger, MetaDictionary meta, MetaDictionary globalMeta) { IDataEndpoint ret = Script.Container.GetInstance(ClassName) as IDataEndpoint; if (ret == null) { throw new NetServiceException(CANAPE.Documents.Properties.Resources.ScriptDataEndpointFactory_InvalidType); } ServerConfig config = (ServerConfig)Config; foreach (KeyValuePair <string, string> pair in config.Properties) { meta[pair.Key] = pair.Value; } ret.Meta = meta; ret.GlobalMeta = globalMeta; if (ret is IPersistNode) { IPersistNode persist = ret as IPersistNode; persist.SetState(config.GetConfig(), logger); } return(ret); }
//Optional wraps an existing endpoint with a set of post-processors private IDataEndpoint wrapPostProcessors(PipelineContext ctx, IDataEndpoint ep, String processors) { if (String.IsNullOrEmpty(processors)) { return(ep); } String[] arr = processors.SplitStandard(); if (arr.Length == 0) { return(ep); } IDataEndpoint wrapped = ep; //Warning: always wrap from the back to the front! This will lead to String results in case of duplicate postprocessors: // the last one has the lowest instanceId. But it is the right way! for (int i = arr.Length - 1; i >= 0; i--) { ctx.PostProcessor = ctx.ImportEngine.PostProcessors.GetPostProcessor(arr[i]); wrapped = (IDataEndpoint)ctx.PostProcessor.Clone(ctx, wrapped); } ctx.PostProcessor = null; return(wrapped); }
public void GivenSearchForNull() { _endpoint = Mock.Of <IDataEndpoint>(); Mock.Get(_endpoint).Setup(m => m.Search(It.IsAny <string>())).Returns(new SearchResult()); _controller = new SearchController(_endpoint); _result = _controller.ExecuteSearch(null); }
public BatchListCleanupEndpointDecorator(IDataEndpoint <T> decorated) { if (decorated == null) { throw new ArgumentNullException(nameof(decorated)); } _decorated = decorated; }
public SearchController(IDataEndpoint dataEndpoint) { if (dataEndpoint == null) { throw new ArgumentException("dataEndpoint"); } _dataEndpoint = dataEndpoint; }
public TopProcessor(PipelineContext ctx, TopProcessor other, IDataEndpoint epOrnextProcessor) : base(other, epOrnextProcessor) { sorter = other.sorter; reverse = other.reverse; topCount = other.topCount; prique = new FixedPriorityQueue <JObject>(topCount, ComparisonWrappers.Create(sorter, reverse)); }
private static void setSortSubject(IDataEndpoint ep, String subject, String type) { if (subject == null || type != "Mail") { return; } String sortSubject = subject; String[] arr = subject.Split(SEPS, StringSplitOptions.None); if (arr.Length <= 1) { goto EXIT_RTN; } int i; for (i = 0; i < arr.Length - 1; i++) { String part = arr[i]; if (part.Length == 0) { break; } if (!char.IsUpper(part[0])) { break; } if (!onlyAlpha(part)) { break; } } if (i == 0) { goto EXIT_RTN; } sortSubject = arr[i]; String words = arr[0]; for (int j = 1; j < i; j++) { words = words + " " + arr[j]; } for (int j = i + 1; j < arr.Length; j++) { sortSubject = sortSubject + ": " + arr[j]; } ep.SetField("sort_subject_prefixes", words); EXIT_RTN: if (String.IsNullOrEmpty(sortSubject)) { sortSubject = " "; } ep.SetField("sort_subject", sortSubject); }
/// <summary> /// Constructor /// </summary> /// <param name="server">Data endpoint object</param> /// <param name="logger">Logger object</param> public DataEndpointAdapter(IDataEndpoint server, Logger logger) { _endpoint = server; _logger = logger; this.Description = server.Description; _thread = new Thread(StartEndpoint); _thread.IsBackground = true; }
/// <summary> /// Constructor /// </summary> /// <param name="server">Data endpoint object</param> /// <param name="logger">Logger object</param> public DataEndpointAdapter(IDataEndpoint server, Logger logger) { _endpoint = server; _logger = logger; this.Description = server.Description; _thread = new Thread(StartEndpoint); _thread.CurrentUICulture = Thread.CurrentThread.CurrentUICulture; _thread.IsBackground = true; }
protected void HandleSubcats(PipelineContext ctx, IDataEndpoint ep, JObject rec) { if (SubCats == null) { return; } for (int i = 0; i < SubCats.Length; i++) { SubCats[i].HandleRecord(ctx, ep, rec); } }
public SortProcessor(PipelineContext ctx, SortProcessor other, IDataEndpoint epOrnextProcessor) : base(other, epOrnextProcessor) { Sorter = other.Sorter; Undupper = other.Undupper; if (other.undupActions != null) { undupActions = other.undupActions.Clone(ctx); } afterSort = other.afterSort; beforeSort = other.beforeSort; }
public void HandleRecord(PipelineContext ctx) { IDataEndpoint ep = ctx.Action.Endpoint; JObject rec = (JObject)ep.GetField(null); for (int i = 0; i < Categories.Count; i++) { if (!Categories[i].HandleRecord(ctx, ep, rec)) { continue; } if (mode == CategoryMode.One) { break; } } }
protected override IDataEndpoint CreateDataEndpoint(PipelineContext ctx, string dataName, bool mustExcept) { var dt = getDocType(dataName, mustExcept); IDataEndpoint ret = null; if (dt != null) { switch (dt.TypeName) { case "_mapping": case "_settings": ret = new ESCmdEndPoint(this, dt); break; default: ret = new ESDataEndpoint(this, dt); break; } } return(ret); }
public virtual void Start(PipelineContext ctx) { converters = ctx.ImportEngine.Converters.ToConverters(convertersName); endPoint = ctx.Pipeline.CreateOrGetDataEndpoint(ctx, endpointName, postProcessors); if (ConvertAndCallScriptNeeded) { var list = new List <ScriptDelegate>(4); if (valueSource != null) { list.Add(valueSource.GetValue); } if (ConvertersFirst) { addConverters(list); } if (condExpr != null) { list.Add(pipeline.CreateScriptExprDelegate <ScriptDelegate>(condExprFunc, node)); } if (valExpr != null) { list.Add(pipeline.CreateScriptExprDelegate <ScriptDelegate>(valExprFunc, node)); } if (scriptName != null) { list.Add(pipeline.CreateScriptDelegate <ScriptDelegate>(scriptName, node)); } if (bodyFunc != null) { list.Add(pipeline.CreateScriptExprDelegate <ScriptDelegate>(bodyFunc, node)); } if (!ConvertersFirst) { addConverters(list); } scriptDelegates = list.ToArray(); } }
public override bool HandleRecord(PipelineContext ctx, IDataEndpoint ep, JObject rec) { if (!Selector.IsSelected(rec)) { return(false); } if (SubCats != null) { HandleSubcats(ctx, ep, rec); } if (FieldIsEvent) { ctx.Pipeline.HandleValue(ctx, Field, Value); } else { ep.SetField(Field, Value, FieldFlags.OverWrite); } return(true); }
public void ShouldNotBreakPublishing() { _testTarget = new InMemoryDataEndpoint <TestResource>(t => t.Id); _testTarget.AddSyncAction(t => t.Deleted, (ds, t) => { throw new Exception("Ooops"); }, "Delete"); _testTarget.AddSyncAction(t => string.IsNullOrEmpty(t.CorrelationId), (ds, r) => ds.Create(r), "Create"); using (var channel = new SynchronizationChannel <TestResource>(_testSource, _testTarget)) { channel.Open(); _testResource = new TestResource(1); _testSource.Create(_testResource); _testSource.Delete(_testResource); var testResource2 = new TestResource(2); _testSource.Create(testResource2); Assert.AreEqual(testResource2, _testTarget.Get(2)); } }
public void SetUpTest() { _testSource = new InMemoryDataEndpoint<TestResource>(t => t.Id); _testSource.ResourceDeleted.Subscribe(t => t.Deleted = true); _testTarget = new InMemoryDataEndpoint<TestResource>(t => t.Id); _testTarget.AddSyncAction(t => t.Deleted, (ds, t) => ds.Delete(t), "Delete"); _testTarget.AddSyncAction(t => string.IsNullOrEmpty(t.CorrelationId), (ds, r) => ds.Create(r), "Create"); _testTarget.AddSyncAction( t => !string.IsNullOrEmpty(t.CorrelationId), (ds, r) => { var resourceToUpdate = ds.Get(r.Id); if (resourceToUpdate != null) { resourceToUpdate.Update(r); ds.Update(resourceToUpdate); } }, "Update"); _testChannel = new SynchronizationChannel<TestResource>(_testSource, _testTarget); }
public void SetUpTest() { _testSource = new InMemoryDataEndpoint <TestResource>(t => t.Id); _testSource.ResourceDeleted.Subscribe(t => t.Deleted = true); _testTarget = new InMemoryDataEndpoint <TestResource>(t => t.Id); _testTarget.AddSyncAction(t => t.Deleted, (ds, t) => ds.Delete(t), "Delete"); _testTarget.AddSyncAction(t => string.IsNullOrEmpty(t.CorrelationId), (ds, r) => ds.Create(r), "Create"); _testTarget.AddSyncAction( t => !string.IsNullOrEmpty(t.CorrelationId), (ds, r) => { var resourceToUpdate = ds.Get(r.Id); if (resourceToUpdate != null) { resourceToUpdate.Update(r); ds.Update(resourceToUpdate); } }, "Update"); _testChannel = new SynchronizationChannel <TestResource>(_testSource, _testTarget); }
/// <summary> /// Gets an existing endpoint from the cache or creates and initializes a new one. /// If a new endpoint is instantiated, its optionally wrapped by a list of postprocessors /// </summary> public IDataEndpoint CreateOrGetDataEndpoint(PipelineContext ctx, String name, String postProcessors) { String endpointName = getEndpointName(name, ctx.DatasourceAdmin); EndpointCacheEntry epEntry; if (endPointCache == null) { endPointCache = new StringDict <EndpointCacheEntry>(); } if (endPointCache.TryGetValue(endpointName, out epEntry)) { if (postProcessors == null || String.Equals(postProcessors, epEntry.PostProcessors, StringComparison.OrdinalIgnoreCase)) { return(epEntry.Endpoint); } throw new BMException("Endpoint [{0}] is used with different post-processors [{1}] adn [{2}].", epEntry.Name, epEntry.PostProcessors, postProcessors); } IDataEndpoint ep = this.ImportEngine.Endpoints.GetDataEndpoint(ctx, endpointName); if (postProcessors == null) { postProcessors = DefaultPostProcessors; } if (postProcessors != null) { ep = wrapPostProcessors(ctx, ep, postProcessors); } epEntry = new EndpointCacheEntry(endpointName, ep, postProcessors); endPointCache.Add(endpointName, epEntry); if (started) { epEntry.Endpoint.Start(ctx); } return(epEntry.Endpoint); }
public MapReduceProcessor(PipelineContext ctx, MapReduceProcessor other, IDataEndpoint epOrnextProcessor) : base(other, epOrnextProcessor) { directory = other.directory; hasher = other.hasher; sorter = other.sorter; undupper = other.undupper; keepFiles = other.keepFiles; fanOut = other.fanOut; compress = other.compress; maxNullIndex = other.maxNullIndex; bufferSize = other.bufferSize; readMaxParallel = other.readMaxParallel; if (other.undupActions != null) { undupActions = other.undupActions.Clone(ctx); } if (bufferSize > 0) { buffer = new List <JObject>(bufferSize); asyncQ = AsyncRequestQueue.Create(1); } ctx.ImportLog.Log("Postprocessor [{0}]: mapping to {1}. Fan-out={2}.", Name, directory == null ? "<memory>" : directory, fanOut); }
/// <summary> /// Method to create the data endpoint /// </summary> /// <param name="logger">The logger to use</param> /// <param name="globalMeta">Global meta dictionary</param> /// <param name="meta">Meta dictionary</param> /// <returns>The data endpoint</returns> public override IDataEndpoint Create(Logger logger, MetaDictionary meta, MetaDictionary globalMeta) { IDataEndpoint server = null; ConstructorInfo ci = _type.GetConstructor(new [] { typeof(Logger) }); if (ci != null) { server = (IDataEndpoint)ci.Invoke(new [] { logger }); } else { ci = _type.GetConstructor(new Type[0]); if (ci == null) { throw new NetServiceException("Can not find an appropriate constructor for endpoint"); } server = (IDataEndpoint)ci.Invoke(new object[0]); } server.Meta = meta; server.GlobalMeta = globalMeta; if (Config != null) { IPersistNode persist = server as IPersistNode; if (persist != null) { persist.SetState(Config, logger); } } return(server); }
public override IPostProcessor Clone(PipelineContext ctx, IDataEndpoint epOrnextProcessor) { return(new RepeatProcessor(ctx, this, epOrnextProcessor)); }
public RepeatProcessor(PipelineContext ctx, RepeatProcessor other, IDataEndpoint epOrnextProcessor) : base(other, epOrnextProcessor) { repeatCount = other.repeatCount; }
public EndpointCacheEntry(String name, IDataEndpoint endpoint, String postProcessors) { Name = name; Endpoint = endpoint; PostProcessors = postProcessors; }
public abstract IPostProcessor Clone(PipelineContext ctx, IDataEndpoint epOrnextProcessor);
private SearchController NewSearchController(IDataEndpoint dataEndpoint = null) { return(new SearchController(dataEndpoint ?? MockDataEndpoint())); }
public void ShouldNotBreakPublishing() { _testTarget = new InMemoryDataEndpoint<TestResource>(t => t.Id); _testTarget.AddSyncAction(t => t.Deleted, (ds, t) => { throw new Exception("Ooops"); }, "Delete"); _testTarget.AddSyncAction(t => string.IsNullOrEmpty(t.CorrelationId), (ds, r) => ds.Create(r), "Create"); using (var channel = new SynchronizationChannel<TestResource>(_testSource, _testTarget)) { channel.Open(); _testResource = new TestResource(1); _testSource.Create(_testResource); _testSource.Delete(_testResource); var testResource2 = new TestResource(2); _testSource.Create(testResource2); Assert.AreEqual(testResource2, _testTarget.Get(2)); } }
public SynchronizationChannel(IDataEndpoint <T> source, IDataEndpoint <T> target) : this(source, target, false) { }
public UniqueProcessor(PipelineContext ctx, UniqueProcessor other, IDataEndpoint epOrnextProcessor) : base(other, epOrnextProcessor) { undupper = other.undupper; }