public List<Error> FinishUpload(TaskManager taskManager) { List<Error> temp = new List<Error>(); DatasetManager dm = new DatasetManager(); DatasetVersion workingCopy = new DatasetVersion(); //datatuple list List<DataTuple> rows = new List<DataTuple>(); Dataset ds = null; bool inputWasAltered = false; if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { long id = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); DataStructureManager dsm = new DataStructureManager(); long iddsd = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); ds = dm.GetDataset(id); // Javad: Please check if the dataset does exists!! #region Progress Informations if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion #region structured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { try { //Stopwatch fullTime = Stopwatch.StartNew(); //Stopwatch loadDT = Stopwatch.StartNew(); List<long> datatupleFromDatabaseIds = dm.GetDatasetVersionEffectiveTupleIds(dm.GetDatasetLatestVersion(ds.Id)); //loadDT.Stop(); //Debug.WriteLine("Load DT From Db Time " + loadDT.Elapsed.TotalSeconds.ToString()); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { int packageSize = 10000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; ExcelReader reader = new ExcelReader(); //schleife dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); workingCopy = dm.GetDatasetWorkingCopy(ds.Id); //workingCopy.ContentDescriptors = new List<ContentDescriptor>(); do { //Stopwatch packageTime = Stopwatch.StartNew(); counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; // open file Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); Stopwatch upload = Stopwatch.StartNew(); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), sds, (int)id, packageSize); upload.Stop(); Debug.WriteLine("ReadFile: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, rows, null, null); upload.Stop(); Debug.WriteLine("EditDatasetVersion: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Stopwatch split = Stopwatch.StartNew(); Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<DataTuple>>(); splittedDatatuples = UploadWizardHelper.GetSplitDatatuples(rows, (List<long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); //split.Stop(); //Debug.WriteLine("Split : " + counter + " Time " + split.Elapsed.TotalSeconds.ToString()); //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); // upload.Stop(); // Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); // Debug.WriteLine("----"); } } } else { } } Stream.Close(); //packageTime.Stop(); //Debug.WriteLine("Package : " + counter + " packageTime Time " + packageTime.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0); //fullTime.Stop(); //Debug.WriteLine("FullTime " + fullTime.Elapsed.TotalSeconds.ToString()); } #endregion #region ascii reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { // open file AsciiReader reader = new AsciiReader(); //Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); //DatasetManager dm = new DatasetManager(); //Dataset ds = dm.GetDataset(id); Stopwatch totalTime = Stopwatch.StartNew(); if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); int packageSize = 100000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; do { counter++; inputWasAltered = false; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO], sds, id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { foreach (var err in reader.ErrorMessages) { temp.Add(new Error(ErrorType.Dataset, err.GetMessage())); } //return temp; } //model.Validated = true; Stopwatch dbTimer = Stopwatch.StartNew(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { dm.EditDatasetVersion(workingCopy, rows, null, null); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<AbstractTuple>>(); var splittedDatatuples = UploadWizardHelper.GetSplitDatatuples(rows, (List<long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } } else { if (rows.Count() > 0) { Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<DataTuple>>(); splittedDatatuples = UploadWizardHelper.GetSplitDatatuples(rows, (List<long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } dbTimer.Stop(); Debug.WriteLine(" db time" + dbTimer.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0 || inputWasAltered == true); totalTime.Stop(); Debug.WriteLine(" Total Time " + totalTime.Elapsed.TotalSeconds.ToString()); } //Stream.Close(); } #endregion #region contentdescriptors //remove all contentdescriptors from the old version //generatedTXT if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedTXT"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedTXT")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generatedCSV if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedCSV"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedCSV")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generated if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generated"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generated")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } #endregion // ToDo: Get Comment from ui and users MoveAndSaveOriginalFileInContentDiscriptor(workingCopy); dm.CheckInDataset(ds.Id, "upload data from upload wizard", GetUsernameOrDefault()); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); } finally { } } #endregion #region unstructured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { // checkout the dataset, apply the changes, and check it in. if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); SaveFileInContentDiscriptor(workingCopy); dm.EditDatasetVersion(workingCopy, null, null, null); // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload unstructured data", GetUsernameOrDefault()); } } #endregion } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } if (temp.Count <= 0) { dm.CheckInDataset(ds.Id, "checked in but no update on data tuples", GetUsernameOrDefault()); } else { dm.UndoCheckoutDataset(ds.Id, GetUsernameOrDefault()); } return temp; }
public List<Error> FinishUpload2(TaskManager taskManager) { List<Error> temp = new List<Error>(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { long id = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); DataStructureManager dsm = new DataStructureManager(); long iddsd = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); //datatuple list List<DataTuple> rows; DatasetManager dm = new DatasetManager(); Dataset ds = dm.GetDataset(id); DatasetVersion workingCopy = new DatasetVersion(); #region Progress Informations if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion #region structured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { try { //Stopwatch fullTime = Stopwatch.StartNew(); //Stopwatch loadDT = Stopwatch.StartNew(); List<AbstractTuple> datatupleFromDatabase = dm.GetDatasetVersionEffectiveTuples(dm.GetDatasetLatestVersion(ds.Id)); //loadDT.Stop(); //Debug.WriteLine("Load DT From Db Time " + loadDT.Elapsed.TotalSeconds.ToString()); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { int packageSize = 10000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; ExcelReader reader = new ExcelReader(); //schleife dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); workingCopy = dm.GetDatasetWorkingCopy(ds.Id); do { //Stopwatch packageTime = Stopwatch.StartNew(); counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; // open file Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), sds, (int)id, packageSize); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, rows, null, null); //Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Stopwatch split = Stopwatch.StartNew(); Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<DataTuple>>(); splittedDatatuples = UploadWizardHelper.GetSplitDatatuples2(rows, (List<long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); //split.Stop(); //Debug.WriteLine("Split : " + counter + " Time " + split.Elapsed.TotalSeconds.ToString()); //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); // upload.Stop(); // Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); // Debug.WriteLine("----"); } } } else { } } Stream.Close(); //packageTime.Stop(); //Debug.WriteLine("Package : " + counter + " packageTime Time " + packageTime.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0); //fullTime.Stop(); //Debug.WriteLine("FullTime " + fullTime.Elapsed.TotalSeconds.ToString()); } #endregion #region ascii reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { // open file AsciiReader reader = new AsciiReader(); //Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); //DatasetManager dm = new DatasetManager(); //Dataset ds = dm.GetDataset(id); Stopwatch totalTime = Stopwatch.StartNew(); if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); int packageSize = 100000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; do { counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO], sds, id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //model.Validated = true; Stopwatch dbTimer = Stopwatch.StartNew(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { dm.EditDatasetVersion(workingCopy, rows, null, null); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<DataTuple>>(); splittedDatatuples = UploadWizardHelper.GetSplitDatatuples2(rows, (List<long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } } else { if (rows.Count() > 0) { Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<DataTuple>>(); splittedDatatuples = UploadWizardHelper.GetSplitDatatuples2(rows, (List<long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } dbTimer.Stop(); Debug.WriteLine(" db time" + dbTimer.Elapsed.TotalSeconds.ToString()); } } while (rows.Count() > 0); totalTime.Stop(); Debug.WriteLine(" Total Time " + totalTime.Elapsed.TotalSeconds.ToString()); } //Stream.Close(); } #endregion // start download generator // filepath //string path = ""; //if (workingCopy != null) //{ // path = GenerateDownloadFile(workingCopy); // dm.EditDatasetVersion(workingCopy, null, null, null); //} // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload data from upload wizard", GetUsernameOrDefault()); LoggerFactory.LogData(id.ToString(), typeof(Dataset).Name, Vaiona.Entities.Logging.CrudState.Updated); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); dm.CheckInDataset(ds.Id, "checked in but no update on data tuples", GetUsernameOrDefault()); } finally { } } #endregion #region unstructured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { workingCopy = dm.GetDatasetLatestVersion(ds.Id); SaveFileInContentDiscriptor(workingCopy); dm.EditDatasetVersion(workingCopy, null, null, null); // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload unstructured data", GetUsernameOrDefault()); } #endregion } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } return temp; }
public ActionResult ValidateFile() { TaskManager TaskManager = (TaskManager)Session["TaskManager"]; ValidationModel model = new ValidationModel(); model.StepInfo = TaskManager.Current(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { try { long id = (long)Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); DataStructureManager dsm = new DataStructureManager(); long iddsd = (long)Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { // open FileStream ExcelReader reader = new ExcelReader(); Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); reader.ValidateFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), sds, id); model.ErrorList = reader.ErrorMessages; if (TaskManager.Bus.ContainsKey(TaskManager.NUMBERSOFROWS)) { TaskManager.Bus[TaskManager.NUMBERSOFROWS] = reader.NumberOfRows; } else { TaskManager.Bus.Add(TaskManager.NUMBERSOFROWS, reader.NumberOfRows); } } if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { AsciiReader reader = new AsciiReader(); Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); reader.ValidateFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO], sds, id); model.ErrorList = reader.ErrorMessages; if (TaskManager.Bus.ContainsKey(TaskManager.NUMBERSOFROWS)) { TaskManager.Bus[TaskManager.NUMBERSOFROWS] = reader.NumberOfRows; } } } catch (Exception ex) { model.ErrorList.Add(new Error(ErrorType.Other, "Can not valid. : " + ex.Message)); TaskManager.AddToBus(TaskManager.VALID, false); } finally { Stream.Close(); } } else { model.ErrorList.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); TaskManager.AddToBus(TaskManager.VALID, false); } if (model.ErrorList.Count() == 0) { model.Validated = true; TaskManager.AddToBus(TaskManager.VALID, true); } return PartialView(TaskManager.Current().GetActionInfo.ActionName, model); }
public ActionResult SelectAFile(object[] data) { SelectFileViewModel model = new SelectFileViewModel(); TaskManager = (TaskManager)Session["TaskManager"]; if (data != null) TaskManager.AddToBus(data); model.StepInfo = TaskManager.Current(); TaskManager.Current().SetValid(false); if (TaskManager != null) { // is path of FileStream exist if (TaskManager.Bus.ContainsKey(TaskManager.FILEPATH)) { if (IsSupportedExtention(TaskManager)) { try { if (GetDataStructureType().Equals(DataStructureType.Structured)) { #region structured datastructure //try save FileStream string filePath = TaskManager.Bus[TaskManager.FILEPATH].ToString(); //if extention like a makro excel FileStream if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { // open FileStream ExcelReader reader = new ExcelReader(); Stream = reader.Open(filePath); //Session["Stream"] = Stream; //check is it template if (reader.IsTemplate(Stream)) { TaskManager.Current().SetValid(true); TaskManager.AddToBus(TaskManager.IS_TEMPLATE, "true"); } else { model.ErrorList.Add(new Error(ErrorType.Other, "File is not a Template")); TaskManager.AddToBus(TaskManager.IS_TEMPLATE, "false"); } Stream.Close(); } else { TaskManager.AddToBus(TaskManager.IS_TEMPLATE, "false"); // excel FileStream if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xls")) { // open FileStream ExcelReader reader = new ExcelReader(); Stream = reader.Open(filePath); //Session["Stream"] = Stream; TaskManager.Current().SetValid(true); Stream.Close(); } // text ór csv FileStream else if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { // open FileStream AsciiReader reader = new AsciiReader(); Stream = reader.Open(filePath); //Session["Stream"] = Stream; TaskManager.Current().SetValid(true); Stream.Close(); } } #endregion } if (GetDataStructureType().Equals(DataStructureType.Unstructured)) { #region unstructured datastructure //try save FileStream string filePath = TaskManager.Bus[TaskManager.FILEPATH].ToString(); if(FileHelper.FileExist( filePath )) { TaskManager.Current().SetValid(true); } #endregion } } catch { model.ErrorList.Add(new Error(ErrorType.Other, "Cannot access FileStream on server.")); } } else { model.ErrorList.Add(new Error(ErrorType.Other, "File is not supported.")); } } else { model.ErrorList.Add(new Error(ErrorType.Other, "No FileStream selected or submitted.")); } if (TaskManager.Current().IsValid()) { TaskManager.AddExecutedStep(TaskManager.Current()); TaskManager.GoToNext(); Session["TaskManager"] = TaskManager; ActionInfo actionInfo = TaskManager.Current().GetActionInfo; return RedirectToAction(actionInfo.ActionName, actionInfo.ControllerName, new RouteValueDictionary { { "area", actionInfo.AreaName }, { "index", TaskManager.GetCurrentStepInfoIndex() } }); } } model.serverFileList = GetServerFileList(); //get datastuctureType model.DataStructureType = GetDataStructureType(); model.SupportedFileExtentions = UploadWizardHelper.GetExtentionList(model.DataStructureType); return PartialView(model); }
/// <summary> /// test unique of primary keys in a FileStream /// </summary> /// <remarks></remarks> /// <seealso cref=""/> /// <param name="taskManager"></param> /// <param name="datasetId"></param> /// <param name="primaryKeys"></param> /// <param name="ext"></param> /// <param name="filename"></param> /// <returns></returns> public static bool IsUnique(TaskManager taskManager, long datasetId, List<long> primaryKeys, string ext, string filename) { Hashtable hashtable = new Hashtable(); Hashtable test = new Hashtable(); List<string> testString = new List<string>(); List<string> primaryValuesAsOneString = new List<string>(); TaskManager TaskManager = taskManager; int packageSize = 1000; int position = 1; if (ext.Equals(".txt") || ext.Equals(".csv")) { #region csv do { primaryValuesAsOneString = new List<string>(); AsciiReader reader = new AsciiReader(); reader.Position = position; Stream stream = reader.Open(TaskManager.Bus["FilePath"].ToString()); AsciiFileReaderInfo afri = (AsciiFileReaderInfo)TaskManager.Bus["FileReaderInfo"]; DataStructureManager datastructureManager = new DataStructureManager(); StructuredDataStructure sds = datastructureManager.StructuredDataStructureRepo.Get(Convert.ToInt64(TaskManager.Bus["DataStructureId"].ToString())); // get a list of values for each row // e.g. // primarky keys id, name // 1 [1][David] // 2 [2][Javad] List<List<string>> tempList = reader.ReadValuesFromFile(stream, filename, afri, sds, datasetId, primaryKeys, packageSize); // convert List of Lists to list of strings // 1 [1][David] = 1David // 2 [2][Javad] = 2Javad foreach (List<string> l in tempList) { string tempString = ""; foreach (string s in l) { tempString += s; } if (!String.IsNullOrEmpty(tempString)) primaryValuesAsOneString.Add(tempString); } // add all primary keys pair into the hasttable foreach (string pKey in primaryValuesAsOneString) { if (pKey != "") { try { hashtable.Add(Utility.ComputeKey(pKey), "pKey"); } catch { return false; } } } position = reader.Position + 1; stream.Close(); } while (primaryValuesAsOneString.Count > 0); #endregion } if (ext.Equals(".xlsm") ) { #region excel template do { //reset primaryValuesAsOneString = new List<string>(); ExcelReader reader = new ExcelReader(); reader.Position = position; Stream stream = reader.Open(TaskManager.Bus["FilePath"].ToString()); DataStructureManager datastructureManager = new DataStructureManager(); StructuredDataStructure sds = datastructureManager.StructuredDataStructureRepo.Get(Convert.ToInt64(TaskManager.Bus["DataStructureId"].ToString())); // get a list of values for each row // e.g. // primarky keys id, name // 1 [1][David] // 2 [2][Javad] List<List<string>> tempList = reader.ReadValuesFromFile(stream, filename, sds, datasetId, primaryKeys, packageSize); // convert List of Lists to list of strings // 1 [1][David] = 1David // 2 [2][Javad] = 2Javad foreach (List<string> l in tempList) { string tempString = ""; foreach (string s in l) { tempString += s; } if (!String.IsNullOrEmpty(tempString)) primaryValuesAsOneString.Add(tempString); } // add all primary keys pair into the hasttable foreach (string pKey in primaryValuesAsOneString) { if (pKey != "") { try { hashtable.Add(Utility.ComputeKey(pKey), pKey); } catch { stream.Close(); return false; } } } position = reader.Position + 1; stream.Close(); } while (primaryValuesAsOneString.Count > 0); #endregion } return true; }