/// <summary> /// Generic export method /// </summary> /// <param name="objMgr"></param> /// <param name="workspaceId"></param> /// <param name="batchSize"></param> /// <param name="fields"></param> /// <param name="queryRequest"></param> /// <param name="outDirectory"></param> /// <returns></returns> private static void Export( IObjectManager objMgr, int workspaceId, int batchSize, List <Field> fields, QueryRequest queryRequest, string outDirectory) { // check if directory exists and delete it if it does if (Directory.Exists(outDirectory)) { Console.WriteLine("Deleting " + outDirectory); Directory.Delete(outDirectory, true); } Directory.CreateDirectory(outDirectory); // specify a load file name string loadFile = $"{outDirectory}\\load.csv"; // write all of the names of the fields to the load file // so they become column headings IEnumerable <string> fieldNames = fields.Select(x => x.Name); File.AppendAllText(loadFile, String.Join(",", fieldNames)); // convert to FieldRefs for the query IEnumerable <FieldRef> fieldRefs = fields.Select(x => new FieldRef { ArtifactID = x.ArtifactID }); queryRequest.Fields = fieldRefs; const int startPage = 0; // index of starting page // initialize export so we know how many documents total we // are exporting ExportInitializationResults initResults = objMgr.InitializeExportAsync(workspaceId, queryRequest, startPage).Result; long totalCount = initResults.RecordCount; // assign totalCount's length (number of digits) int numDigits = Common.CountBase10Digits(totalCount); Common.MaxDigits = numDigits; // if total count is evenly divisble by the // batch size, then we don't have to create any batches // for the leftovers long batchCountMaybe = totalCount / batchSize; long batchCountDefinitely = totalCount % batchSize == 0 ? batchCountMaybe : batchCountMaybe + 1; long currBatchCount = 1; while (true) { RelativityObjectSlim[] docBatch = objMgr.RetrieveNextResultsBlockFromExportAsync(workspaceId, initResults.RunID, batchSize).Result; if (docBatch == null || !docBatch.Any()) { break; } Console.WriteLine($"Exporting batch {currBatchCount} of {batchCountDefinitely} (size {docBatch.Length})."); foreach (RelativityObjectSlim obj in docBatch) { List <object> fieldValues = obj.Values; Common.AppendToLoadFileAsync( workspaceId, obj.ArtifactID, fields, fieldValues, loadFile); } currBatchCount++; } // finish up the queue Common.CompleteAddingBatches(); //Directory.Delete(outDirectory, true); }
private void Run() { // Create lists for the various thread types List <Thread> handerCallerThreads = new List <Thread>(); List <Thread> textStreamerThreads = new List <Thread>(); List <Thread> blockingConsumerThreads = new List <Thread>(); // Lets catch all exceptions and report them through the handler try { // Get Object Manager try { _objectManager = GetKeplerServiceFactory() .CreateProxy <Relativity.Services.Objects.IObjectManager>(); } catch (Exception exception) { _userHandler.Error("GetKeplerServiceFactory().CreateProxy failed", exception); return; } // Initialize Query ExportInitializationResults exportInitializationResults = null; try { exportInitializationResults = _objectManager.InitializeExportAsync(_workspaceId, _queryRequest, 0).Result; } catch (Exception exception) { _userHandler.Error("InitializeExportAsync failed", exception); return; } // Save Run info _runId = exportInitializationResults.RunID; _recordCount = exportInitializationResults.RecordCount; // Find indexes of all long text fields List <int> longTextIds = new List <int>(); for (int i = 0; i < exportInitializationResults.FieldData.Count; i++) { if (exportInitializationResults.FieldData[i].FieldType == FieldType.LongText) { longTextIds.Add(i); } } _longTextIds = longTextIds.ToArray(); // Call the handler's Before method _userHandler.Before(exportInitializationResults); // Create threads that reads blocks of documents for (int i = 0; i < _scaleFactor; i++) { Thread t = new Thread(BlockConsumer) { Name = "BlockConcumer" + i }; t.Start(); blockingConsumerThreads.Add(t); } // Create threads that open long text streams for (int i = 0; i < _scaleFactor; i++) { Thread t = new Thread(TextStreamer) { Name = "TextStreamer" + i }; t.Start(); textStreamerThreads.Add(t); } // Create threads that call the handler's Item method. // Use only a single thread if the handler has not // declared itself as thread safe. int handlerCallerThreadCount = _userHandler.ThreadSafe ? _scaleFactor : 1; for (int i = 0; i < handlerCallerThreadCount; i++) { Thread t = new Thread(HandlerCaller) { Name = "HandlerCaller" + i }; t.Start(); handerCallerThreads.Add(t); } } catch (Exception exception) { SendErrorAndCancel("Unexpected exception", exception); } // Wait for the threads reading blocks of documents // to complete foreach (Thread t in blockingConsumerThreads) { t.Join(); } // Indicate the we will add no more documents // to the standard and stream queues _standardQueue.CompleteAdding(); _streamQueue.CompleteAdding(); // Wait for the threads opening streams to complete foreach (Thread t in textStreamerThreads) { t.Join(); } // Indicate that no more documents with open // streams will be added to that queue _openStreamQueue.CompleteAdding(); // Wait for all the documents remaining // in the standard and open stream queues // to be sent to the handler foreach (Thread t in handerCallerThreads) { t.Join(); } // Call the handler's After method _userHandler.After(!_cancellationToken.IsCancellationRequested); }