protected void EmitQuery(PipelineContext ctx, DbConnection connection, Query q) { var cmd = q.CreateCommand(connection); ctx.DebugLog.Log("Exec SQL command [{0}], timeout={1} (sec).", cmd.CommandText, cmd.CommandTimeout); ctx.SendItemStart(cmd); using (DbDataReader rdr = executeReader(ctx, cmd)) { EmitRecords(ctx, rdr, q); } ctx.SendItemStop(cmd); }
protected virtual void ImportUrl(PipelineContext ctx, IDatasourceSink sink, IStreamProvider elt) { int orgEmitted = ctx.Emitted; if (addEmitted) { ctx.IncrementEmitted(); } DateTime dtFile = elt.LastModified; ctx.SendItemStart(elt); //TODO if ((ctx.ActionFlags & _ActionFlags.Skip) != 0 //Check if we need to import this file if ((ctx.ImportFlags & _ImportFlags.ImportFull) == 0) //Not a full import { if (!CheckNeedImport(ctx, sink, elt)) { goto SKIPPED; } } if (ctx.SkipUntilKey == "record") { goto SKIPPED; } using (Stream fs = _CreateStream(ctx, elt)) { ImportStream(ctx, sink, elt, fs); } if (!addEmitted && orgEmitted == ctx.Emitted) { ctx.IncrementEmitted(); } ctx.OptSendItemStop(); return; SKIPPED: ctx.Skipped++; if (!addEmitted && orgEmitted == ctx.Emitted) { ctx.IncrementEmitted(); } if (logSkips) { ctx.DebugLog.Log("Skipped: {0}. Date={1}", elt.FullName, elt.LastModified); } }
private void importUrl(PipelineContext ctx, IDatasourceSink sink, IStreamProvider elt) { int splitUntil = elt.ContextNode.ReadInt("@splituntil", this.splitUntil); bool objectPerLine = elt.ContextNode.ReadBool("@objectperline", this.objectPerLine); ctx.SendItemStart(elt); if ((ctx.ActionFlags & _ActionFlags.Skip) != 0) { return; } ExistState existState = ExistState.NotExist; if ((ctx.ImportFlags & _ImportFlags.ImportFull) == 0) //Not a full import { existState = toExistState(sink.HandleValue(ctx, "record/_checkexist", null)); } //Check if we need to convert this file if ((existState & (ExistState.ExistSame | ExistState.ExistNewer | ExistState.Exist)) != 0) { ctx.Skipped++; ctx.ImportLog.Log("Skipped: {0}. Date={1}", elt, 0);// dtFile); return; } List <String> keys = new List <string>(); List <String> values = new List <String>(); Stream fs = null; try { fs = elt.CreateStream(ctx); if (!this.objectPerLine) { importRecord(ctx, sink, fs, splitUntil); } else { byte[] buf = new byte[4096]; int offset = 0; MemoryStream tmp = new MemoryStream(); while (true) { int len = offset + fs.Read(buf, offset, buf.Length - offset); if (len == offset) { break; } int i = offset; for (; i < len; i++) { if (buf[i] == '\n') { break; } } tmp.Write(buf, offset, i - offset); if (i == offset) { offset = 0; continue; } if (tmp.Position > 0) { tmp.Position = 0; importRecord(ctx, sink, tmp, splitUntil); tmp.Position = 0; } if (i + 1 < offset) { tmp.Write(buf, i + 1, len - i - 1); } } if (offset > 0) { tmp.Write(buf, 0, offset); } if (tmp.Position > 0) { tmp.Position = 0; importRecord(ctx, sink, tmp, splitUntil); } } ctx.OptSendItemStop(); } catch (Exception e) { ctx.HandleException(e); } }
private void importUrl(PipelineContext ctx, IDatasourceSink sink, IStreamProvider elt) { int maxParallel = elt.ContextNode.ReadInt("@maxparallel", this.maxParallel); int splitUntil = elt.ContextNode.ReadInt("@splituntil", this.splitUntil); if (splitUntil < 0) { splitUntil = int.MaxValue; } bool scan = elt.ContextNode.ReadBool("@scan", this.scan); String url = elt.ToString(); ctx.SendItemStart(elt); String command = elt.ContextNode.ReadStr("@command", null); String index = command != null ? null : elt.ContextNode.ReadStr("@index"); //mutual exclusive with command String reqBody = elt.ContextNode.ReadStr("request", this.requestBody); JObject req = null; if (reqBody != null) { req = JObject.Parse(reqBody); } ctx.DebugLog.Log("Request scan={1}, body={0}", reqBody, scan); try { Uri uri = new Uri(url); ESConnection conn = ESHelper.CreateConnection(ctx, url); ContextCallback cb = new ContextCallback(ctx, this, elt); conn.Timeout = timeoutInMs; //Same timeout as what we send to ES conn.OnPrepareRequest = cb.OnPrepareRequest; if (command != null) { var resp = conn.SendCmd("POST", command, reqBody); resp.ThrowIfError(); Pipeline.EmitToken(ctx, sink, resp.JObject, "response", splitUntil); } else { ESRecordEnum e = new ESRecordEnum(conn, index, req, numRecords, timeout, scan); if (maxParallel > 0) { e.Async = true; } ctx.ImportLog.Log("Starting scan of {0} records. Index={1}, connection={2}, async={3}, buffersize={4} requestbody={5}, splituntil={6}, scan={7}.", e.Count, index, url, e.Async, numRecords, req != null, splitUntil, scan); foreach (var doc in e) { ctx.IncrementEmitted(); sink.HandleValue(ctx, "record/_sort", doc.Sort); sink.HandleValue(ctx, "record/_type", doc.Type); if (splitUntil != 0) { foreach (var kvp in doc) { String pfx = "record/" + kvp.Key; if (splitUntil == 1) { sink.HandleValue(ctx, pfx, kvp.Value); continue; } Pipeline.EmitToken(ctx, sink, kvp.Value, pfx, splitUntil - 1); } } sink.HandleValue(ctx, "record", doc); } } ctx.SendItemStop(); } catch (Exception e) { ctx.HandleException(e); } }