public RunAdministrations LoadAdministration(PipelineContext ctx) { JObject cmdObj = JObject.Parse("{ 'sort': [{'adm_date': 'desc'}]}"); var ret = new RunAdministrations(ctx.ImportEngine.RunAdminSettings); try { if (!DocType.IndexExists) { return(ret); //Nothing to load here... } String url = ((ctx.ImportFlags & _ImportFlags.FullImport) == 0) ? DocType.UrlPart : DocType.UrlPartForPreviousIndex; if (!Connection.Exists(url)) { ctx.ErrorLog.Log("Cannot load previous administration: '{0}' not exists", url); return(ret); } var e = new ESRecordEnum(Connection, url, cmdObj, ret.Settings.Capacity, "5m", false); foreach (var doc in e) { RunAdministration ra; try { ra = new RunAdministration(doc._Source); } catch (Exception err) { String msg = String.Format("Invalid record in run administration. Skipped.\nRecord={0}.", doc); ctx.ImportLog.Log(_LogType.ltWarning, msg); ctx.ErrorLog.Log(_LogType.ltWarning, msg); ctx.ErrorLog.Log(err); continue; } ret.Add(ra); if (ret.Count >= 500) { break; } } return(ret.Dump("loaded")); } catch (Exception err) { if ((ctx.ImportFlags & _ImportFlags.FullImport) == 0) { throw; } ctx.ErrorLog.Log("Cannot load previous administration:"); ctx.ErrorLog.Log(err); return(ret); } }
private void importUrl(PipelineContext ctx, IDatasourceSink sink, IStreamProvider elt) { int maxParallel = elt.ContextNode.ReadInt("@maxparallel", this.maxParallel); int splitUntil = elt.ContextNode.ReadInt("@splituntil", this.splitUntil); if (splitUntil < 0) { splitUntil = int.MaxValue; } bool scan = elt.ContextNode.ReadBool("@scan", this.scan); String url = elt.ToString(); ctx.SendItemStart(elt); String command = elt.ContextNode.ReadStr("@command", null); String index = command != null ? null : elt.ContextNode.ReadStr("@index"); //mutual exclusive with command String reqBody = elt.ContextNode.ReadStr("request", this.requestBody); JObject req = null; if (reqBody != null) { req = JObject.Parse(reqBody); } ctx.DebugLog.Log("Request scan={1}, body={0}", reqBody, scan); try { Uri uri = new Uri(url); ESConnection conn = ESHelper.CreateConnection(ctx, url); ContextCallback cb = new ContextCallback(ctx, this, elt); conn.Timeout = timeoutInMs; //Same timeout as what we send to ES conn.OnPrepareRequest = cb.OnPrepareRequest; if (command != null) { var resp = conn.SendCmd("POST", command, reqBody); resp.ThrowIfError(); Pipeline.EmitToken(ctx, sink, resp.JObject, "response", splitUntil); } else { ESRecordEnum e = new ESRecordEnum(conn, index, req, numRecords, timeout, scan); if (maxParallel > 0) { e.Async = true; } ctx.ImportLog.Log("Starting scan of {0} records. Index={1}, connection={2}, async={3}, buffersize={4} requestbody={5}, splituntil={6}, scan={7}.", e.Count, index, url, e.Async, numRecords, req != null, splitUntil, scan); foreach (var doc in e) { ctx.IncrementEmitted(); sink.HandleValue(ctx, "record/_sort", doc.Sort); sink.HandleValue(ctx, "record/_type", doc.Type); if (splitUntil != 0) { foreach (var kvp in doc) { String pfx = "record/" + kvp.Key; if (splitUntil == 1) { sink.HandleValue(ctx, pfx, kvp.Value); continue; } Pipeline.EmitToken(ctx, sink, kvp.Value, pfx, splitUntil - 1); } } sink.HandleValue(ctx, "record", doc); } } ctx.SendItemStop(); } catch (Exception e) { ctx.HandleException(e); } }