public GetDatasetLatestVersion ( Dataset dataset ) : |
||
dataset | Dataset | The dataset instance |
return |
/// <summary> /// generate a text file with JSON from a datastructure of a dataset /// and stored this file on the server /// and store the path in the content discriptor /// </summary> /// <param name="datasetId"></param> /// <returns>dynamic filepath</returns> public static string GenerateDataStructure(long datasetId) { string path = ""; try { DatasetManager datasetManager = new DatasetManager(); DatasetVersion datasetVersion = datasetManager.GetDatasetLatestVersion(datasetId); DataStructureManager dataStructureManager = new DataStructureManager(); long dataStructureId = datasetVersion.Dataset.DataStructure.Id; DataStructure dataStructure = dataStructureManager.StructuredDataStructureRepo.Get(dataStructureId); if (dataStructure != null) { } // store in content descriptor path = storeGeneratedFilePathToContentDiscriptor(datasetId, datasetVersion, "datastructure", ".txt"); } catch (Exception ex) { throw ex; } return path; }
// GET: api/data/5 /// <summary> /// In addition to the id, it is possible to have projection and selection criteria passed to the action via query string parameters /// </summary> /// <param name="id">Dataset Id</param> /// <returns></returns> /// <remarks> The action accepts the following additional parameters via the query string /// 1: projection: is a comman separated list of ids that determines which variables of the dataset version tuples should take part in the result set /// 2: selection: is a logical expression that filters the tuples of the chosen dataset. The expression should have been written against the variables of the dataset only. /// logical operators, nesting, precedence, and SOME functions should be supported. /// </remarks> public HttpResponseMessage Get(int id) { string projection = this.Request.GetQueryNameValuePairs().FirstOrDefault(p => "header".Equals(p.Key, StringComparison.InvariantCultureIgnoreCase)).Value; string selection = this.Request.GetQueryNameValuePairs().FirstOrDefault(p => "filter" .Equals(p.Key, StringComparison.InvariantCultureIgnoreCase)).Value; OutputDataManager ioOutputDataManager = new OutputDataManager(); DatasetManager dm = new DatasetManager(); DatasetVersion version = dm.GetDatasetLatestVersion(id); string title = XmlDatasetHelper.GetInformation(version, NameAttributeValues.title); // check the data sturcture type ... if (version.Dataset.DataStructure.Self is StructuredDataStructure) { // apply selection and projection //var tuples = dm.GetDatasetVersionEffectiveTuples(version); DataTable dt = OutputDataManager.ConvertPrimaryDataToDatatable(dm, version, title, true); if (!string.IsNullOrEmpty(selection)) { dt = OutputDataManager.SelectionOnDataTable(dt, selection); } if (!string.IsNullOrEmpty(projection)) { // make the header names upper case to make them case insensitive dt = OutputDataManager.ProjectionOnDataTable(dt, projection.ToUpper().Split(',')); } DatasetModel model = new DatasetModel(); model.DataTable = dt; var response = Request.CreateResponse(); response.Content = new ObjectContent(typeof(DatasetModel), model, new DatasetModelCsvFormatter(model.DataTable.TableName)); //set headers on the "response" return response; //return model; } else { return Request.CreateResponse(); } }
// GET: api/Metadata/5 // HttpResponseMessage response = new HttpResponseMessage { Content = new StringContent(doc.innerXml, Encoding.UTF8,"application/xml") }; public HttpResponseMessage Get(int id) { string convertTo = ""; try { convertTo = this.Request.GetQueryNameValuePairs().FirstOrDefault(p => "format".Equals(p.Key, StringComparison.InvariantCultureIgnoreCase)).Value; } catch (Exception ex) { } DatasetManager dm = new DatasetManager(); DatasetVersion dsv = dm.GetDatasetLatestVersion(id); XmlDocument xmldoc = dsv.Metadata; if (string.IsNullOrEmpty(convertTo)) { //return xmldoc; HttpResponseMessage response = new HttpResponseMessage { Content = new StringContent(xmldoc.InnerXml, Encoding.UTF8, "application/xml") }; return response; } else { try { XmlDocument newXmlDoc = OutputMetadataManager.GetConvertedMetadata(id, TransmissionType.mappingFileExport, convertTo); HttpResponseMessage response = new HttpResponseMessage { Content = new StringContent(newXmlDoc.InnerXml, Encoding.UTF8, "application/xml") }; return response; } catch (Exception ex) { return null; } } }
public static string GetSchemaDirectoryPath(long datasetId) { try { DatasetManager datasetManager = new DatasetManager(); DatasetVersion datasetVersion = datasetManager.GetDatasetLatestVersion(datasetId); MetadataStructureManager metadataStructureManager = new MetadataStructureManager(); MetadataStructure metadataStructure = metadataStructureManager.Repo.Get(datasetVersion.Dataset.MetadataStructure.Id); string path = Path.Combine(AppConfiguration.GetModuleWorkspacePath("DCM"), "Metadata", metadataStructure.Name); if(!String.IsNullOrEmpty(path) && Directory.Exists(path)) return path; } catch (Exception ex) { throw ex; } return String.Empty; }
public static XmlDocument GetConvertedMetadata(long datasetId, TransmissionType type, string mappingName, bool storing = true) { XmlDocument newXml; try { DatasetManager datasetManager = new DatasetManager(); DatasetVersion datasetVersion = datasetManager.GetDatasetLatestVersion(datasetId); string mappingFileName = XmlDatasetHelper.GetTransmissionInformation(datasetVersion, type, mappingName); string pathMappingFile = Path.Combine(AppConfiguration.GetModuleWorkspacePath("DIM"), mappingFileName); XmlMapperManager xmlMapperManager = new XmlMapperManager(TransactionDirection.InternToExtern); xmlMapperManager.Load(pathMappingFile, "exporttest"); newXml = xmlMapperManager.Export(datasetVersion.Metadata, datasetVersion.Id, mappingName, true); string title = XmlDatasetHelper.GetInformation(datasetVersion, NameAttributeValues.title); // store in content descriptor if (storing) { if(String.IsNullOrEmpty(mappingName) || mappingName.ToLower() == "generic") storeGeneratedFilePathToContentDiscriptor(datasetId, datasetVersion, "metadata", ".xml"); else storeGeneratedFilePathToContentDiscriptor(datasetId, datasetVersion, "metadata_"+ mappingName, ".xml"); } } catch (Exception ex) { throw ex; } return newXml; }
private List<long> GetDataTuples(long datasetId) { DatasetManager datasetManager = new DatasetManager(); DatasetVersion datasetVersion = datasetManager.GetDatasetLatestVersion(datasetId); return datasetManager.GetDatasetVersionEffectiveTupleIds(datasetVersion); }
private void AddDatatuplesToFile(long datasetId, long dataStructureId, string path) { DatasetManager datasetManager = new DatasetManager(); DatasetVersion datasetVersion = datasetManager.GetDatasetLatestVersion(datasetId); List<long> tempDataTuplesIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetVersion); ExcelWriter excelWriter = new ExcelWriter(); excelWriter.AddDataTuplesToTemplate(datasetManager, tempDataTuplesIds, path, dataStructureId); }
public List<Error> FinishUpload(TaskManager taskManager) { List<Error> temp = new List<Error>(); DatasetManager dm = new DatasetManager(); DatasetVersion workingCopy = new DatasetVersion(); //datatuple list List<DataTuple> rows = new List<DataTuple>(); Dataset ds = null; bool inputWasAltered = false; if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { long id = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); DataStructureManager dsm = new DataStructureManager(); long iddsd = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); ds = dm.GetDataset(id); // Javad: Please check if the dataset does exists!! #region Progress Informations if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion #region structured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { try { //Stopwatch fullTime = Stopwatch.StartNew(); //Stopwatch loadDT = Stopwatch.StartNew(); List<long> datatupleFromDatabaseIds = dm.GetDatasetVersionEffectiveTupleIds(dm.GetDatasetLatestVersion(ds.Id)); //loadDT.Stop(); //Debug.WriteLine("Load DT From Db Time " + loadDT.Elapsed.TotalSeconds.ToString()); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { int packageSize = 10000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; ExcelReader reader = new ExcelReader(); //schleife dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); workingCopy = dm.GetDatasetWorkingCopy(ds.Id); //workingCopy.ContentDescriptors = new List<ContentDescriptor>(); do { //Stopwatch packageTime = Stopwatch.StartNew(); counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; // open file Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); Stopwatch upload = Stopwatch.StartNew(); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), sds, (int)id, packageSize); upload.Stop(); Debug.WriteLine("ReadFile: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, rows, null, null); upload.Stop(); Debug.WriteLine("EditDatasetVersion: " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Stopwatch split = Stopwatch.StartNew(); Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<DataTuple>>(); splittedDatatuples = UploadWizardHelper.GetSplitDatatuples(rows, (List<long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); //split.Stop(); //Debug.WriteLine("Split : " + counter + " Time " + split.Elapsed.TotalSeconds.ToString()); //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); // upload.Stop(); // Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); // Debug.WriteLine("----"); } } } else { } } Stream.Close(); //packageTime.Stop(); //Debug.WriteLine("Package : " + counter + " packageTime Time " + packageTime.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0); //fullTime.Stop(); //Debug.WriteLine("FullTime " + fullTime.Elapsed.TotalSeconds.ToString()); } #endregion #region ascii reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { // open file AsciiReader reader = new AsciiReader(); //Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); //DatasetManager dm = new DatasetManager(); //Dataset ds = dm.GetDataset(id); Stopwatch totalTime = Stopwatch.StartNew(); if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); int packageSize = 100000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; do { counter++; inputWasAltered = false; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO], sds, id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { foreach (var err in reader.ErrorMessages) { temp.Add(new Error(ErrorType.Dataset, err.GetMessage())); } //return temp; } //model.Validated = true; Stopwatch dbTimer = Stopwatch.StartNew(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { dm.EditDatasetVersion(workingCopy, rows, null, null); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<AbstractTuple>>(); var splittedDatatuples = UploadWizardHelper.GetSplitDatatuples(rows, (List<long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } } else { if (rows.Count() > 0) { Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<DataTuple>>(); splittedDatatuples = UploadWizardHelper.GetSplitDatatuples(rows, (List<long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabaseIds); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); inputWasAltered = true; } } dbTimer.Stop(); Debug.WriteLine(" db time" + dbTimer.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0 || inputWasAltered == true); totalTime.Stop(); Debug.WriteLine(" Total Time " + totalTime.Elapsed.TotalSeconds.ToString()); } //Stream.Close(); } #endregion #region contentdescriptors //remove all contentdescriptors from the old version //generatedTXT if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedTXT"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedTXT")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generatedCSV if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generatedCSV"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generatedCSV")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } //generated if (workingCopy.ContentDescriptors.Any(c => c.Name.Equals("generated"))) { ContentDescriptor tmp = workingCopy.ContentDescriptors.Where(c => c.Name.Equals("generated")) .FirstOrDefault(); dm.DeleteContentDescriptor(tmp); } #endregion // ToDo: Get Comment from ui and users MoveAndSaveOriginalFileInContentDiscriptor(workingCopy); dm.CheckInDataset(ds.Id, "upload data from upload wizard", GetUsernameOrDefault()); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); } finally { } } #endregion #region unstructured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { // checkout the dataset, apply the changes, and check it in. if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); SaveFileInContentDiscriptor(workingCopy); dm.EditDatasetVersion(workingCopy, null, null, null); // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload unstructured data", GetUsernameOrDefault()); } } #endregion } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } if (temp.Count <= 0) { dm.CheckInDataset(ds.Id, "checked in but no update on data tuples", GetUsernameOrDefault()); } else { dm.UndoCheckoutDataset(ds.Id, GetUsernameOrDefault()); } return temp; }
public List<Error> FinishUpload2(TaskManager taskManager) { List<Error> temp = new List<Error>(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_ID) && TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_ID)) { long id = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASET_ID]); DataStructureManager dsm = new DataStructureManager(); long iddsd = Convert.ToInt32(TaskManager.Bus[TaskManager.DATASTRUCTURE_ID]); //datatuple list List<DataTuple> rows; DatasetManager dm = new DatasetManager(); Dataset ds = dm.GetDataset(id); DatasetVersion workingCopy = new DatasetVersion(); #region Progress Informations if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGESIZE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGESIZE, 0); } if (TaskManager.Bus.ContainsKey(TaskManager.CURRENTPACKAGE)) { TaskManager.Bus[TaskManager.CURRENTPACKAGE] = 0; } else { TaskManager.Bus.Add(TaskManager.CURRENTPACKAGE, 0); } #endregion #region structured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Structured)) { try { //Stopwatch fullTime = Stopwatch.StartNew(); //Stopwatch loadDT = Stopwatch.StartNew(); List<AbstractTuple> datatupleFromDatabase = dm.GetDatasetVersionEffectiveTuples(dm.GetDatasetLatestVersion(ds.Id)); //loadDT.Stop(); //Debug.WriteLine("Load DT From Db Time " + loadDT.Elapsed.TotalSeconds.ToString()); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(iddsd); dsm.StructuredDataStructureRepo.LoadIfNot(sds.Variables); #region excel reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".xlsm")) { int packageSize = 10000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; int counter = 0; ExcelReader reader = new ExcelReader(); //schleife dm.CheckOutDatasetIfNot(ds.Id, GetUsernameOrDefault()); // there are cases, the dataset does not get checked out!! if (!dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault())) throw new Exception(string.Format("Not able to checkout dataset '{0}' for user '{1}'!", ds.Id, GetUsernameOrDefault())); workingCopy = dm.GetDatasetWorkingCopy(ds.Id); do { //Stopwatch packageTime = Stopwatch.StartNew(); counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; // open file Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), sds, (int)id, packageSize); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //XXX Add packagesize to excel read function if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, rows, null, null); //Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); //Debug.WriteLine("----"); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { //Stopwatch split = Stopwatch.StartNew(); Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<DataTuple>>(); splittedDatatuples = UploadWizardHelper.GetSplitDatatuples2(rows, (List<long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); //split.Stop(); //Debug.WriteLine("Split : " + counter + " Time " + split.Elapsed.TotalSeconds.ToString()); //Stopwatch upload = Stopwatch.StartNew(); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); // upload.Stop(); // Debug.WriteLine("Upload : " + counter + " Time " + upload.Elapsed.TotalSeconds.ToString()); // Debug.WriteLine("----"); } } } else { } } Stream.Close(); //packageTime.Stop(); //Debug.WriteLine("Package : " + counter + " packageTime Time " + packageTime.Elapsed.TotalSeconds.ToString()); } while (rows.Count() > 0); //fullTime.Stop(); //Debug.WriteLine("FullTime " + fullTime.Elapsed.TotalSeconds.ToString()); } #endregion #region ascii reader if (TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".csv") || TaskManager.Bus[TaskManager.EXTENTION].ToString().Equals(".txt")) { // open file AsciiReader reader = new AsciiReader(); //Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); //DatasetManager dm = new DatasetManager(); //Dataset ds = dm.GetDataset(id); Stopwatch totalTime = Stopwatch.StartNew(); if (dm.IsDatasetCheckedOutFor(ds.Id, GetUsernameOrDefault()) || dm.CheckOutDataset(ds.Id, GetUsernameOrDefault())) { workingCopy = dm.GetDatasetWorkingCopy(ds.Id); int packageSize = 100000; TaskManager.Bus[TaskManager.CURRENTPACKAGESIZE] = packageSize; //schleife int counter = 0; do { counter++; TaskManager.Bus[TaskManager.CURRENTPACKAGE] = counter; Stream = reader.Open(TaskManager.Bus[TaskManager.FILEPATH].ToString()); rows = reader.ReadFile(Stream, TaskManager.Bus[TaskManager.FILENAME].ToString(), (AsciiFileReaderInfo)TaskManager.Bus[TaskManager.FILE_READER_INFO], sds, id, packageSize); Stream.Close(); if (reader.ErrorMessages.Count > 0) { //model.ErrorList = reader.errorMessages; } else { //model.Validated = true; Stopwatch dbTimer = Stopwatch.StartNew(); if (TaskManager.Bus.ContainsKey(TaskManager.DATASET_STATUS)) { if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("new")) { dm.EditDatasetVersion(workingCopy, rows, null, null); } if (TaskManager.Bus[TaskManager.DATASET_STATUS].ToString().Equals("edit")) { if (rows.Count() > 0) { Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<DataTuple>>(); splittedDatatuples = UploadWizardHelper.GetSplitDatatuples2(rows, (List<long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } } else { if (rows.Count() > 0) { Dictionary<string, List<DataTuple>> splittedDatatuples = new Dictionary<string, List<DataTuple>>(); splittedDatatuples = UploadWizardHelper.GetSplitDatatuples2(rows, (List<long>)TaskManager.Bus[TaskManager.PRIMARY_KEYS], workingCopy, ref datatupleFromDatabase); dm.EditDatasetVersion(workingCopy, splittedDatatuples["new"], splittedDatatuples["edit"], null); } } dbTimer.Stop(); Debug.WriteLine(" db time" + dbTimer.Elapsed.TotalSeconds.ToString()); } } while (rows.Count() > 0); totalTime.Stop(); Debug.WriteLine(" Total Time " + totalTime.Elapsed.TotalSeconds.ToString()); } //Stream.Close(); } #endregion // start download generator // filepath //string path = ""; //if (workingCopy != null) //{ // path = GenerateDownloadFile(workingCopy); // dm.EditDatasetVersion(workingCopy, null, null, null); //} // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload data from upload wizard", GetUsernameOrDefault()); LoggerFactory.LogData(id.ToString(), typeof(Dataset).Name, Vaiona.Entities.Logging.CrudState.Updated); } catch (Exception e) { temp.Add(new Error(ErrorType.Other, "Can not upload. : " + e.Message)); dm.CheckInDataset(ds.Id, "checked in but no update on data tuples", GetUsernameOrDefault()); } finally { } } #endregion #region unstructured data if (TaskManager.Bus.ContainsKey(TaskManager.DATASTRUCTURE_TYPE) && TaskManager.Bus[TaskManager.DATASTRUCTURE_TYPE].Equals(DataStructureType.Unstructured)) { workingCopy = dm.GetDatasetLatestVersion(ds.Id); SaveFileInContentDiscriptor(workingCopy); dm.EditDatasetVersion(workingCopy, null, null, null); // ToDo: Get Comment from ui and users dm.CheckInDataset(ds.Id, "upload unstructured data", GetUsernameOrDefault()); } #endregion } else { temp.Add(new Error(ErrorType.Dataset, "Dataset is not selected.")); } return temp; }
public static GFBIOPangaeaFormularObject GetGFBIOPangaeaFormularObject(long datasetId) { DatasetManager datasetManager = new DatasetManager(); datasetManager.GetDataset(datasetId); Dataset dataset = datasetManager.GetDataset(datasetId); DatasetVersion datasetVersion = datasetManager.GetDatasetLatestVersion(datasetId); MetadataStructureManager metadataStructureManager = new MetadataStructureManager(); MetadataStructure metadataStructure = metadataStructureManager.Repo.Get(dataset.MetadataStructure.Id); GFBIOPangaeaFormularObject gfbioPangaeaFormularObject = new GFBIOPangaeaFormularObject(); string title = XmlDatasetHelper.GetInformation(datasetId, NameAttributeValues.title); string description = XmlDatasetHelper.GetInformation(datasetId, NameAttributeValues.description); return gfbioPangaeaFormularObject; }
public static GFBIODataCenterFormularObject GetGFBIODataCenterFormularObject(long datasetId) { DatasetManager datasetManager = new DatasetManager(); datasetManager.GetDataset(datasetId); Dataset dataset = datasetManager.GetDataset(datasetId); DatasetVersion datasetVersion = datasetManager.GetDatasetLatestVersion(datasetId); GFBIODataCenterFormularObject gfbioDataCenterFormularObject = new GFBIODataCenterFormularObject(); gfbioDataCenterFormularObject.ProjectId = 1; gfbioDataCenterFormularObject.ProjectTitle = "Test Poject title"; gfbioDataCenterFormularObject.ProjectLabel = "Test Poject label"; gfbioDataCenterFormularObject.ProjectAbstract = ""; gfbioDataCenterFormularObject.UserId = 1; gfbioDataCenterFormularObject.UserName = "******"; gfbioDataCenterFormularObject.UserEmail = "testEmail"; gfbioDataCenterFormularObject.DatasetAuthor = "TestAuthor"; gfbioDataCenterFormularObject.DatasetId = datasetId; gfbioDataCenterFormularObject.DatasetVersion = datasetVersion.Id; gfbioDataCenterFormularObject.DatasetTitle = XmlDatasetHelper.GetInformation(datasetId,NameAttributeValues.title); gfbioDataCenterFormularObject.DatasetLabel = XmlDatasetHelper.GetInformation(datasetId,NameAttributeValues.title); gfbioDataCenterFormularObject.DatasetDescription = XmlDatasetHelper.GetInformation(datasetId,NameAttributeValues.description); gfbioDataCenterFormularObject.DatasetCollectionDate = datasetVersion.Dataset.LastCheckIOTimestamp; MetadataStructureManager metadataStructureManager = new MetadataStructureManager(); MetadataStructure metadataStructure = metadataStructureManager.Repo.Get(dataset.MetadataStructure.Id); gfbioDataCenterFormularObject.MetadataSchemaName = metadataStructure.Name; return gfbioDataCenterFormularObject; }
private void SetParametersToTaskmanager(long datasetId) { if (TaskManager == null) { TaskManager = (TaskManager) Session["TaskManager"]; } #region set dataset id & dataset title if (datasetId > 0) { try { long datasetid = Convert.ToInt64(datasetId); TaskManager.AddToBus(TaskManager.DATASET_ID, datasetid); // get title DatasetManager dm = new DatasetManager(); string title = ""; // is checkedIn? if (dm.IsDatasetCheckedIn(datasetid)) { title = XmlDatasetHelper.GetInformation(dm.GetDatasetLatestVersion(datasetid), NameAttributeValues.title); } TaskManager.AddToBus(TaskManager.DATASET_TITLE, title); } catch (Exception ex) { throw ex; } } #endregion }
/// <summary> /// test unique of primary keys on a dataset /// </summary> /// <remarks></remarks> /// <seealso cref=""/> /// <param name="datasetId"></param> /// <param name="primaryKeys"></param> /// <returns></returns> ////[MeasurePerformance] public static Boolean IsUnique2(long datasetId, List<long> primaryKeys) { Hashtable hashtable = new Hashtable(); // load data DatasetManager datasetManager = new DatasetManager(); Dataset dataset = datasetManager.GetDataset(datasetId); DatasetVersion datasetVersion; List<long> dataTupleIds = new List<long>(); if (datasetManager.IsDatasetCheckedIn(datasetId)) { datasetVersion = datasetManager.GetDatasetLatestVersion(datasetId); #region load all datatuples first int size = 10000; int counter = 0; IEnumerable<long> dataTuplesIds; dataTuplesIds = datasetManager.GetDatasetVersionEffectiveTupleIds(datasetVersion); IEnumerable<long> currentIds; DataTuple dt; do { currentIds = dataTupleIds.Skip(counter * size).Take(size); //byte[] pKey; string pKey; foreach (long dtId in currentIds) { dt = datasetManager.DataTupleRepo.Query(d=>d.Id.Equals(dtId)).FirstOrDefault(); //pKey = getPrimaryKeysAsByteArray(dt, primaryKeys); pKey = pKey = getPrimaryKeysAsStringFromXml(dt, primaryKeys); if (pKey.Count() > 0) { try { //Debug.WriteLine(pKey +" : " +Utility.ComputeKey(pKey)); hashtable.Add(pKey, ""); //hashtable.Add(pKey, 0); } catch { return false; } } } counter++; } while (currentIds.Count() >= (size * counter)); #endregion } else { throw new Exception("Dataset is not checked in."); } return true; }
/// <summary> /// CAUTION !!!!!!!!!!!!!!!!! /// upload bezieht sich derzeit nur auf daten mit einem block (einer tabelle) /// umsetzung von mehreren blöckes noch nicht geklärt. /// (für einen wert gibt es eine weitere tabelle) /// </summary> /// <param name="dataSetID"></param> /// <param name="DataBase"></param> public void uploadData(string dataSetID, string DataBase) { DatasetManager datasetManager = new DatasetManager(); DataStructureManager dataStructureManager = new DataStructureManager(); XmlDataReader xmlDataReader = new XmlDataReader(); User user = new User(); string variableNames = ""; // query metadataAuthor and variable names from explorer.datasets queryAuthorAndVariables(ref user, ref variableNames, dataSetID, DataBase); List<string> varNames = variableNames.Split(',').ToList(); // get all dataStructures with equal variables count List<StructuredDataStructure> dataStructures = dataStructureManager.StructuredDataStructureRepo.Get(s => varNames.Count().Equals(s.Variables.Count)).ToList(); // get all Ids of dataStructures with equal variables List<long> dataStructureIds = new List<long>(); foreach (StructuredDataStructure dataStructure in dataStructures) { bool isSimilarStructure = true; foreach (Variable variable in dataStructure.Variables) { if (!varNames.Contains(variable.Label)) { isSimilarStructure &= false; break; } } if (isSimilarStructure) dataStructureIds.Add(dataStructure.Id); } // get the wanted dataset by comparing the old B1 datasetId out of the datasets with similar dataStructure Dataset dataset = null; List<Dataset> datasets = datasetManager.DatasetRepo.Get(d => dataStructureIds.Contains(d.DataStructure.Id)).ToList(); foreach (Dataset ds in datasets) { string oldDatasetId = ""; try { XmlNode extraID = ds.Versions.FirstOrDefault().Metadata.SelectSingleNode("Metadata/general/general/id/id"); oldDatasetId = extraID.InnerText; } catch { } if (oldDatasetId == dataSetID) { dataset = ds; break; } } if (dataset != null) { // get distinct and ascending ordered insertdates from DB List<DB2TimeStamp> distInsertDates = queryDistInsertDates(dataSetID, DataBase); bool checkObsIds = false; foreach (DB2TimeStamp insertDate in distInsertDates) { List<DataTuple> createdDataTuples = new List<DataTuple>(); List<DataTuple> editedDataTuples = new List<DataTuple>(); List<DataTuple> deletedDataTuples = new List<DataTuple>(); DatasetVersion workingCopy = datasetManager.GetDatasetLatestVersion(dataset.Id); // get dataset // get obsid, data, deleted and newest for each observation from DB List<Observation> observations = queryObservation(dataSetID, DataBase, insertDate.ToString()); Dictionary<long, long> obsIdMapsToTupleId = new Dictionary<long, long>(); if (checkObsIds) { // get all EffectiveTupleIds List<long> datasetTupleIds = datasetManager.GetDatasetVersionEffectiveTupleIds(workingCopy).ToList(); // id-mapping-list key=obsId, value=TupleId foreach EffectiveTupleId and obsId from Tuple.Extra obsIdMapsToTupleId = idMapping(datasetTupleIds, ref datasetManager); } // observation counter int observationIndex = 0; bool isNewest = true; foreach (Observation observation in observations) //////////////parallel { isNewest = (isNewest && observation.newest != 'Y') ? false : isNewest; // create dataTuple with xmlDataReader // split xml to string list and use DataReader.ReadRow DataTuple dataRow = xmlDataReader.XmlRowReader(observation.data, dataset.DataStructure.Id, observationIndex); // check if observation.obsid is in id-mapping-list long TupleId; if (checkObsIds && obsIdMapsToTupleId.TryGetValue(observation.obsid, out TupleId)) { DataTuple dataTuple = datasetManager.DataTupleRepo.Get(TupleId); if (observation.deleted != 'Y') { dataTuple.VariableValues = dataRow.VariableValues; // edit tuple if observation exists as tuple in EffectiveTuple and observation is not deleted editedDataTuples.Add(dataTuple); } else { // delete tuple if observation exists as tuple in EffectiveTuple and observ. is deleted deletedDataTuples.Add(dataTuple); } } else { // write the obsId and oldBExISdatasetId in Extra: <extra><obsid>[obsid]</obsid><oldBExISdatasetId>[dataSetID]</oldBExISdatasetId></extra> dataRow.Extra = oldIdsIntoExtra(observation.obsid, dataSetID, dataRow.Extra); // create tuple if observation exists not in EffectiveTuple createdDataTuples.Add(dataRow); } observationIndex++; // observation counter } checkObsIds = (!checkObsIds && !isNewest) ? true : checkObsIds; // checkOut if (datasetManager.IsDatasetCheckedOutFor(dataset.Id, user.Name) || datasetManager.CheckOutDataset(dataset.Id, user.Name)) { workingCopy = datasetManager.GetDatasetWorkingCopy(dataset.Id); // get dataset datasetManager.EditDatasetVersion(workingCopy, createdDataTuples, editedDataTuples, deletedDataTuples); // edit dataset datasetManager.CheckInDataset(dataset.Id, "Primary data row was submited.", user.Name); // checkIn } } } }
private void addSelectedDatasetToBus(long datasetId) { TaskManager = (TaskManager)Session["TaskManager"]; DatasetManager datasetManager = new DatasetManager(); if (datasetManager.GetDatasetVersionEffectiveTupleCount(datasetManager.GetDatasetLatestVersion(datasetId)) > 0) { TaskManager.AddToBus("DatasetStatus", "edit"); } else TaskManager.AddToBus("DatasetStatus", "new"); DatasetVersion datasetVersion = datasetManager.GetDatasetLatestVersion(datasetId); TaskManager.AddToBus(TaskManager.DATASET_ID, datasetId); //Add Metadata to Bus //TITLE TaskManager.AddToBus(TaskManager.DATASET_TITLE, XmlDatasetHelper.GetInformation(datasetVersion, NameAttributeValues.title)); ResearchPlanManager rpm = new ResearchPlanManager(); ResearchPlan rp = rpm.Repo.Get(datasetVersion.Dataset.ResearchPlan.Id); TaskManager.AddToBus(TaskManager.RESEARCHPLAN_ID, rp.Id); TaskManager.AddToBus(TaskManager.RESEARCHPLAN_TITLE, rp.Title); }
/// <summary> /// /// </summary> /// <remarks></remarks> /// <seealso cref=""/> /// <param name="id"></param> /// <param name="metadataDoc"></param> /// <return></return> private void writeBexisIndex(long id, XmlDocument metadataDoc) { String docId = id.ToString();//metadataDoc.GetElementsByTagName("bgc:id")[0].InnerText; var dataset = new Document(); List<XmlNode> facetNodes = facetXmlNodeList; dataset.Add(new Field("doc_id", docId, Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.NOT_ANALYZED)); foreach (XmlNode facet in facetNodes) { String multivalued = facet.Attributes.GetNamedItem("multivalued").Value; string[] metadataElementNames = facet.Attributes.GetNamedItem("metadata_name").Value.Split(','); String lucene_name = facet.Attributes.GetNamedItem("lucene_name").Value; foreach (string metadataElementName in metadataElementNames) { XmlNodeList elemList = metadataDoc.SelectNodes(metadataElementName); if (elemList != null) { for (int i = 0; i < elemList.Count; i++) { string eleme = elemList[i].InnerText; if (!elemList[i].InnerText.Trim().Equals("")) { dataset.Add(new Field("facet_" + lucene_name, elemList[i].InnerText, Lucene.Net.Documents.Field.Store.YES, Field.Index.NOT_ANALYZED)); dataset.Add(new Field("ng_all", elemList[i].InnerText, Lucene.Net.Documents.Field.Store.YES, Field.Index.ANALYZED)); writeAutoCompleteIndex(docId, lucene_name, elemList[i].InnerText); writeAutoCompleteIndex(docId, "ng_all", elemList[i].InnerText); } } } } } List<XmlNode> propertyNodes = propertyXmlNodeList; foreach (XmlNode property in propertyNodes) { String multivalued = property.Attributes.GetNamedItem("multivalued").Value; String lucene_name = property.Attributes.GetNamedItem("lucene_name").Value; string[] metadataElementNames = property.Attributes.GetNamedItem("metadata_name").Value.Split(','); foreach (string metadataElementName in metadataElementNames) { XmlNodeList elemList = metadataDoc.SelectNodes(metadataElementName); if (elemList != null) { String primitiveType = property.Attributes.GetNamedItem("primitive_type").Value; if (elemList[0] != null) { if (primitiveType.ToLower().Equals("string")) { dataset.Add(new Field("property_" + lucene_name, elemList[0].InnerText, Lucene.Net.Documents.Field.Store.YES, Field.Index.NOT_ANALYZED)); dataset.Add(new Field("ng_all", elemList[0].InnerText, Lucene.Net.Documents.Field.Store.YES, Field.Index.ANALYZED)); writeAutoCompleteIndex(docId, lucene_name, elemList[0].InnerText); writeAutoCompleteIndex(docId, "ng_all", elemList[0].InnerText); } else if (primitiveType.ToLower().Equals("date")) { //DateTime MyDateTime = DateTime.Now; DateTime MyDateTime = new DateTime(); /*String dTFormatElementName = property.Attributes.GetNamedItem("date_format").Value; XmlNodeList dtFormatElements = metadataDoc.GetElementsByTagName(dTFormatElementName); String dateTimeFormat = dtFormatElements[0].InnerText;*/ if (DateTime.TryParse(elemList[0].InnerText, out MyDateTime)) { //MyDateTime = DateTime.ParseExact(elemList[0].InnerText, dateTimeFormat, // CultureInfo.InvariantCulture); long t = MyDateTime.Ticks; NumericField xyz = new NumericField("property_numeric_" + lucene_name).SetLongValue( MyDateTime.Ticks); String dateToString = MyDateTime.Date.ToString("d", CultureInfo.CreateSpecificCulture("en-US")); dataset.Add(xyz); dataset.Add(new Field("property_" + lucene_name, dateToString, Lucene.Net.Documents.Field.Store.NO, Field.Index.NOT_ANALYZED)); writeAutoCompleteIndex(docId, lucene_name, MyDateTime.Date.ToString()); writeAutoCompleteIndex(docId, "ng_all", MyDateTime.Date.ToString()); } } else if (primitiveType.ToLower().Equals("integer")) { dataset.Add( new NumericField("property_numeric" + lucene_name).SetIntValue( Convert.ToInt32(elemList[0].InnerText))); dataset.Add(new Field("property_" + lucene_name, elemList[0].InnerText, Lucene.Net.Documents.Field.Store.NO, Field.Index.NOT_ANALYZED)); // writeAutoCompleteIndex(lucene_name, elemList[0].InnerText); } else if (primitiveType.ToLower().Equals("double")) { dataset.Add( new NumericField("property_numeric" + lucene_name).SetDoubleValue( Convert.ToDouble(elemList[0].InnerText))); dataset.Add(new Field("property_" + lucene_name, elemList[0].InnerText, Lucene.Net.Documents.Field.Store.NO, Field.Index.NOT_ANALYZED)); writeAutoCompleteIndex(docId, lucene_name, elemList[0].InnerText); writeAutoCompleteIndex(docId, "ng_all", elemList[0].InnerText); } } } } } List<XmlNode> categoryNodes = categoryXmlNodeList; foreach (XmlNode category in categoryNodes) { if (category.Attributes.GetNamedItem("type").Value.Equals("primary_data_field")) { String primitiveType = category.Attributes.GetNamedItem("primitive_type").Value; String lucene_name = category.Attributes.GetNamedItem("lucene_name").Value; String analysing = category.Attributes.GetNamedItem("analysed").Value; float boosting = Convert.ToSingle(category.Attributes.GetNamedItem("boost").Value); var toAnalyse = Lucene.Net.Documents.Field.Index.NOT_ANALYZED; if (analysing.ToLower().Equals("yes")) { toAnalyse = Lucene.Net.Documents.Field.Index.ANALYZED; } DatasetManager dm = new DatasetManager(); if (dm.IsDatasetCheckedIn(id)) { DatasetVersion dsv = dm.GetDatasetLatestVersion(id); DataStructureManager dsm = new DataStructureManager(); StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(dsv.Dataset.DataStructure.Id); // Javad: check if the dataset is "checked-in". If yes, then use the paging version of the GetDatasetVersionEffectiveTuples method // number of tuples for the for loop is also available via GetDatasetVersionEffectiveTupleCount // a proper fetch (page) size can be obtained by calling dm.PreferedBatchSize int fetchSize = dm.PreferedBatchSize; long tupleSize = dm.GetDatasetVersionEffectiveTupleCount(dsv); long noOfFetchs = tupleSize/fetchSize + 1; for (int round = 0; round < noOfFetchs; round++) { List<AbstractTuple> dsVersionTuples = dm.GetDatasetVersionEffectiveTuples(dsv, round, fetchSize); List<string> primaryDataStringToindex = generateStringFromTuples(dsVersionTuples, sds); if (primaryDataStringToindex != null) { foreach (string pDataValue in primaryDataStringToindex) // Loop through List with foreach { Field a = new Field("category_" + lucene_name, pDataValue, Lucene.Net.Documents.Field.Store.NO, toAnalyse); a.Boost = boosting; dataset.Add(a); dataset.Add(new Field("ng_" + lucene_name, pDataValue, Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED)); dataset.Add(new Field("ng_all", pDataValue, Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED)); writeAutoCompleteIndex(docId, lucene_name, pDataValue); writeAutoCompleteIndex(docId, "ng_all", pDataValue); } } GC.Collect(); } } } else { String multivalued = category.Attributes.GetNamedItem("multivalued").Value; String primitiveType = category.Attributes.GetNamedItem("primitive_type").Value; String lucene_name = category.Attributes.GetNamedItem("lucene_name").Value; String storing = category.Attributes.GetNamedItem("store").Value; String analysing = category.Attributes.GetNamedItem("analysed").Value; float boosting = Convert.ToSingle(category.Attributes.GetNamedItem("boost").Value); var toStore = Lucene.Net.Documents.Field.Store.NO; var toAnalyse = Lucene.Net.Documents.Field.Index.NOT_ANALYZED; if (storing.ToLower().Equals("yes")) { toStore = Lucene.Net.Documents.Field.Store.YES; } if (analysing.ToLower().Equals("yes")) { toAnalyse = Lucene.Net.Documents.Field.Index.ANALYZED; } string[] metadataElementNames = category.Attributes.GetNamedItem("metadata_name").Value.Split(','); foreach (string metadataElementName in metadataElementNames) { XmlNodeList elemList = metadataDoc.SelectNodes(metadataElementName); if (elemList != null) { for (int i = 0; i < elemList.Count; i++) { Field a = new Field("category_" + lucene_name, elemList[i].InnerText, toStore, toAnalyse); a.Boost = boosting; dataset.Add(a); dataset.Add(new Field("ng_" + lucene_name, elemList[i].InnerText, Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED)); dataset.Add(new Field("ng_all", elemList[i].InnerText, Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED)); writeAutoCompleteIndex(docId, lucene_name, elemList[i].InnerText); writeAutoCompleteIndex(docId, "ng_all", elemList[i].InnerText); } } } } } List<XmlNode> generalNodes = generalXmlNodeList; foreach (XmlNode general in generalNodes) { String multivalued = general.Attributes.GetNamedItem("multivalued").Value; String primitiveType = general.Attributes.GetNamedItem("primitive_type").Value; String lucene_name = general.Attributes.GetNamedItem("lucene_name").Value; String storing = general.Attributes.GetNamedItem("store").Value; String analysing = general.Attributes.GetNamedItem("analysed").Value; var toStore = Lucene.Net.Documents.Field.Store.NO; var toAnalyse = Lucene.Net.Documents.Field.Index.NOT_ANALYZED; if (storing.ToLower().Equals("yes")) { toStore = Lucene.Net.Documents.Field.Store.YES; } if (analysing.ToLower().Equals("yes")) { toAnalyse = Lucene.Net.Documents.Field.Index.ANALYZED; } float boosting = Convert.ToSingle(general.Attributes.GetNamedItem("boost").Value); string[] metadataElementNames = general.Attributes.GetNamedItem("metadata_name").Value.Split(','); foreach (string metadataElementName in metadataElementNames) { XmlNodeList elemList = metadataDoc.SelectNodes(metadataElementName); for (int i = 0; i < elemList.Count; i++) { Field a = new Field(lucene_name, elemList[i].InnerText, toStore, toAnalyse); a.Boost = boosting; dataset.Add(a); dataset.Add(new Field("ng_all", elemList[i].InnerText, Lucene.Net.Documents.Field.Store.NO, Field.Index.ANALYZED)); writeAutoCompleteIndex(docId, lucene_name, elemList[i].InnerText); writeAutoCompleteIndex(docId, "ng_all", elemList[i].InnerText); } } } indexWriter.AddDocument(dataset); }
public string TransferDataPermission() { SubjectManager subjectManager = new SubjectManager(); var securityMigration = new SecurityMigration(); Dictionary<int, int> DataSetIDs = new Dictionary<int, int>(); var groups = subjectManager.GetAllGroups(); string DatasetMappingPath = Path.Combine(AppConfiguration.DataPath, "DatasetMapping.txt"); //Key is last datasetId and value is the new one Dictionary<int, int> DatasetsMapping = File.ReadAllLines(DatasetMappingPath).AsEnumerable().Select(item => new { oldId = int.Parse(item.Split('\t')[0]), newId = int.Parse(item.Split('\t')[1]) }).ToDictionary(c => c.oldId, c => c.newId); DatasetManager dm = new DatasetManager(); PermissionManager permissionManager = new PermissionManager(); List<SecurityMigration.Right> rights = securityMigration.GetBexisRights(DataBase, DatasetsMapping); foreach (var group in groups) { var groupRights = rights.Where(item => item.RoleName == group.Name || item.RoleName == "_" + group.Name); foreach (var right in groupRights) { int newDataSetId = DatasetsMapping.FirstOrDefault(item => item.Key == right.DataSetId).Value; //each entity wich exists in this list has view and download feature permissionManager.CreateDataPermission(group.Id, 1, newDataSetId, RightType.View); permissionManager.CreateDataPermission(group.Id, 1, newDataSetId, RightType.Download); if (right.CanEdit) permissionManager.CreateDataPermission(group.Id, 1, newDataSetId, RightType.Update); } } foreach (var DatasetMapping in DatasetsMapping) { //extract grant user from the last version and add it to new ver if (dm.GetDataset(DatasetMapping.Value) == null) continue; DatasetVersion dsv = dm.GetDatasetLatestVersion(DatasetMapping.Value); string grantUserEmailAddress = dsv.Metadata.SelectSingleNode("Metadata/general/general/designatedDatasetManager/contactType/email/email").InnerText; if (!string.IsNullOrEmpty(grantUserEmailAddress)) { var grantUser = subjectManager.GetUserByEmail(grantUserEmailAddress); permissionManager.CreateDataPermission(grantUser.Id, 1, DatasetMapping.Value, RightType.Grant); } } return "All of permissions transfered successfully."; }
public ActionResult StoreSelectedDatasetSetup(SetupModel model) { CreateTaskmanager TaskManager = (CreateTaskmanager)Session["CreateDatasetTaskmanager"]; DatasetManager dm = new DatasetManager(); if (model == null) { model = GetDefaultModel(); return PartialView("Index", model); } model = LoadLists(model); if (ModelState.IsValid ) { TaskManager.AddToBus(CreateTaskmanager.METADATASTRUCTURE_ID, model.SelectedMetadataStructureId); TaskManager.AddToBus(CreateTaskmanager.DATASTRUCTURE_ID, model.SelectedDataStructureId); // set datastructuretype TaskManager.AddToBus(CreateTaskmanager.DATASTRUCTURE_TYPE, GetDataStructureType(model.SelectedDataStructureId)); //dataset is selected if (model.SelectedDatasetId != 0 && model.SelectedDatasetId != -1) { if (dm.IsDatasetCheckedIn(model.SelectedDatasetId)) { DatasetVersion datasetVersion = dm.GetDatasetLatestVersion(model.SelectedDatasetId); TaskManager.AddToBus(CreateTaskmanager.RESEARCHPLAN_ID, datasetVersion.Dataset.ResearchPlan.Id); TaskManager.AddToBus(CreateTaskmanager.ENTITY_TITLE, XmlDatasetHelper.GetInformation(datasetVersion, NameAttributeValues.title)); // set datastructuretype TaskManager.AddToBus(CreateTaskmanager.DATASTRUCTURE_TYPE, GetDataStructureType(model.SelectedDataStructureId)); // set MetadataXml From selected existing Dataset XDocument metadata = XmlUtility.ToXDocument(datasetVersion.Metadata); SetXml(metadata); } else { ModelState.AddModelError(string.Empty, "Dataset is just in processing"); } } else { ResearchPlanManager rpm = new ResearchPlanManager(); TaskManager.AddToBus(CreateTaskmanager.RESEARCHPLAN_ID, rpm.Repo.Get().First().Id); } return RedirectToAction("StartMetadataEditor", "Form"); } return View("Index", model); }
public ActionResult _CustomMyDatasetBinding() { DataTable model = new DataTable(); ViewData["PageSize"] = 10; ViewData["CurrentPage"] = 1; #region header List<HeaderItem> headerItems = new List<HeaderItem>(); HeaderItem headerItem = new HeaderItem() { Name = "ID", DisplayName = "ID", DataType = "Int64" }; headerItems.Add(headerItem); ViewData["Id"] = headerItem; headerItem = new HeaderItem() { Name = "Title", DisplayName = "Title", DataType = "String" }; headerItems.Add(headerItem); headerItem = new HeaderItem() { Name = "Description", DisplayName = "Description", DataType = "String" }; headerItems.Add(headerItem); headerItem = new HeaderItem() { Name = "View", DisplayName = "View", DataType = "String" }; headerItems.Add(headerItem); headerItem = new HeaderItem() { Name = "Update", DisplayName = "Update", DataType = "String" }; headerItems.Add(headerItem); headerItem = new HeaderItem() { Name = "Delete", DisplayName = "Delete", DataType = "String" }; headerItems.Add(headerItem); headerItem = new HeaderItem() { Name = "Download", DisplayName = "Download", DataType = "String" }; headerItems.Add(headerItem); headerItem = new HeaderItem() { Name = "Grant", DisplayName = "Grant", DataType = "String" }; headerItems.Add(headerItem); ViewData["DefaultHeaderList"] = headerItems; #endregion model = CreateDataTable(headerItems); DatasetManager datasetManager = new DatasetManager(); PermissionManager permissionManager = new PermissionManager(); SubjectManager subjectManager = new SubjectManager(); List<long> gridCommands = datasetManager.GetDatasetLatestIds(); gridCommands.Skip(Convert.ToInt16(ViewData["CurrentPage"])).Take(Convert.ToInt16(ViewData["PageSize"])); foreach (long datasetId in gridCommands) { //get permissions List<int> rights = permissionManager.GetAllRights(subjectManager.GetUserByName(GetUsernameOrDefault()).Id, 1, datasetId).ToList(); if (rights.Count > 0) { DataRow dataRow = model.NewRow(); Object[] rowArray = new Object[8]; if (datasetManager.IsDatasetCheckedIn(datasetId)) { DatasetVersion dsv = datasetManager.GetDatasetLatestVersion(datasetId); MetadataStructureManager msm = new MetadataStructureManager(); dsv.Dataset.MetadataStructure = msm.Repo.Get(dsv.Dataset.MetadataStructure.Id); string title = XmlDatasetHelper.GetInformation(dsv, NameAttributeValues.title); string description = XmlDatasetHelper.GetInformation(dsv, NameAttributeValues.description); rowArray[0] = Convert.ToInt64(datasetId); rowArray[1] = title; rowArray[2] = description; } else { rowArray[0] = Convert.ToInt64(datasetId); rowArray[1] = ""; rowArray[2] = "Dataset is just in processing."; } if (rights.Contains(1)) { rowArray[3] = "✔"; } else { rowArray[3] = "✘"; } if (rights.Contains(2)) { rowArray[4] = "✔"; } else { rowArray[4] = "✘"; } if (rights.Contains(3)) { rowArray[5] = "✔"; } else { rowArray[5] = "✘"; } if (rights.Contains(4)) { rowArray[6] = "✔"; } else { rowArray[6] = "✘"; } if (rights.Contains(5)) { rowArray[7] = "✔"; } else { rowArray[7] = "✘"; } dataRow = model.NewRow(); dataRow.ItemArray = rowArray; model.Rows.Add(dataRow); } } return View(new GridModel(model)); }
public ActionResult Test() { UiTestModel model = new UiTestModel(); model = DynamicListToDataTable(); SubmissionManager pm = new SubmissionManager(); DatasetManager dm = new DatasetManager(); pm.Load(); DataRepository gfbio = pm.DataRepositories.Where(d => d.Name.ToLower().Equals("gfbio")).FirstOrDefault(); // get metadata long testdatasetId = 1; string formatname = ""; XmlDocument newXmlDoc; DatasetVersion dsv = dm.GetDatasetLatestVersion(testdatasetId); string title = XmlDatasetHelper.GetInformation(dsv, NameAttributeValues.title); if (gfbio != null) { formatname = XmlDatasetHelper.GetAllTransmissionInformation(1, TransmissionType.mappingFileExport, AttributeNames.name) .First(); OutputMetadataManager.GetConvertedMetadata(testdatasetId, TransmissionType.mappingFileExport, formatname); // get primary data // check the data sturcture type ... if (dsv.Dataset.DataStructure.Self is StructuredDataStructure) { OutputDataManager odm = new OutputDataManager(); // apply selection and projection odm.GenerateAsciiFile(testdatasetId, title, gfbio.PrimaryDataFormat); } string zipName = pm.GetZipFileName(testdatasetId, dsv.Id); string zipPath = pm.GetDirectoryPath(testdatasetId, gfbio); string zipFilePath = Path.Combine(zipPath, zipName); FileHelper.CreateDicrectoriesIfNotExist(Path.GetDirectoryName(zipFilePath)); if (FileHelper.FileExist(zipFilePath)) { if (FileHelper.WaitForFile(zipFilePath)) { FileHelper.Delete(zipFilePath); } } ZipFile zip = new ZipFile(); foreach (ContentDescriptor cd in dsv.ContentDescriptors) { string path = Path.Combine(AppConfiguration.DataPath, cd.URI); string name = cd.URI.Split('\\').Last(); if (FileHelper.FileExist(path)) { zip.AddFile(path, ""); } } zip.Save(zipFilePath); } else { newXmlDoc = dsv.Metadata; } return View("Index", model); }
public static string IsValideAgainstSchema(long datasetId, TransmissionType type, string mappingName) { try { DatasetManager datasetManager = new DatasetManager(); DatasetVersion datasetVersion = datasetManager.GetDatasetLatestVersion(datasetId); string mappingFileName = XmlDatasetHelper.GetTransmissionInformation(datasetVersion, type, mappingName); string pathMappingFile = Path.Combine(AppConfiguration.GetModuleWorkspacePath("DIM"), mappingFileName); XmlMapperManager xmlMapperManager = new XmlMapperManager(TransactionDirection.InternToExtern); xmlMapperManager.Load(pathMappingFile, "exporttest"); XmlDocument tmp = GetConvertedMetadata(datasetId, type, mappingName, false); string path = Path.Combine(AppConfiguration.DataPath, "Temp", "System", "convertedMetadata.xml"); if (FileHelper.FileExist(path)) FileHelper.Delete(path); FileHelper.CreateDicrectoriesIfNotExist(Path.GetDirectoryName(path)); tmp.Save(path); XmlDocument metadataForImport = new XmlDocument(); metadataForImport.Load(path); return xmlMapperManager.Validate(metadataForImport); } catch (Exception ex) { return ex.Message; } }
private void removeContentDescriptor() { DatasetManager dm = new DatasetManager(); Dataset dataset = dm.GetDataset(1); // check if the dataset is in the checked-in status DatasetVersion dsVersion = dm.GetDatasetLatestVersion(dataset); if(dsVersion.ContentDescriptors.Count(p => p.Name.Equals("generated")) > 0) { dm.CheckOutDataset(1, "admin"); dsVersion = dm.GetDatasetWorkingCopy(1); //dm.EditDatasetVersion(dsVersion, null, null, null, null); // The descriptor to be deleted must be object equal to the one in the list. The following command does the job. // The condition can be different, but the item should be taken from the list, and any other instance must be released, by setting them to NULL. var cd = dsVersion.ContentDescriptors.FirstOrDefault(p => p.Name.Equals("generated")); dm.DeleteContentDescriptor(cd); dm.CheckInDataset(1, "removed content descriptor:" + cd.Name, "admin"); } }
public List<ListViewItem> LoadDatasetVersionViewList( DataStructureType dataStructureType) { PermissionManager permissionManager = new PermissionManager(); SubjectManager subjectManager = new SubjectManager(); // add security ICollection<long> datasetIDs = permissionManager.GetAllDataIds(subjectManager.GetUserByName(GetUsernameOrDefault()).Id, 1, RightType.Update).ToList(); DataStructureManager dataStructureManager = new DataStructureManager(); DatasetManager dm = new DatasetManager(); Dictionary<long, XmlDocument> dmtemp = new Dictionary<long, XmlDocument>(); dmtemp = dm.GetDatasetLatestMetadataVersions(); List<ListViewItem> temp = new List<ListViewItem>(); if (dataStructureType.Equals(DataStructureType.Structured)) { List<StructuredDataStructure> list = dataStructureManager.StructuredDataStructureRepo.Get().ToList(); foreach (StructuredDataStructure sds in list) { sds.Materialize(); foreach (Dataset d in sds.Datasets) { if (dm.IsDatasetCheckedIn(d.Id)) { if (datasetIDs.Contains(d.Id)) { temp.Add(new ListViewItem(d.Id, XmlDatasetHelper.GetInformation(dm.GetDatasetLatestVersion(d), NameAttributeValues.title))); } } } } } else { List<UnStructuredDataStructure> list = dataStructureManager.UnStructuredDataStructureRepo.Get().ToList(); foreach (UnStructuredDataStructure sds in list) { foreach (Dataset d in sds.Datasets) { if (datasetIDs.Contains(d.Id)) { if (dm.IsDatasetCheckedIn(d.Id)) { DatasetVersion datasetVersion = dm.GetDatasetLatestVersion(d); temp.Add(new ListViewItem(d.Id, XmlDatasetHelper.GetInformation(datasetVersion, NameAttributeValues.title))); } } } } } return temp.OrderBy(p => p.Title).ToList(); }
/// <summary> /// test unique of primary keys on a dataset /// </summary> /// <remarks></remarks> /// <seealso cref=""/> /// <param name="datasetId"></param> /// <param name="primaryKeys"></param> /// <returns></returns> ////[MeasurePerformance] public static Boolean IsUnique(long datasetId, List<long> primaryKeys) { Hashtable hashtable = new Hashtable(); // load data DatasetManager datasetManager = new DatasetManager(); Dataset dataset = datasetManager.GetDataset(datasetId); DatasetVersion datasetVersion; List<long> dataTupleIds = new List<long>(); if (datasetManager.IsDatasetCheckedIn(datasetId)) { datasetVersion = datasetManager.GetDatasetLatestVersion(datasetId); #region load all datatuples first int size = 10000; int counter = 0; IEnumerable<AbstractTuple> dataTuples; do { dataTuples = datasetManager.GetDatasetVersionEffectiveTuples(datasetVersion,counter,size); //byte[] pKey; string pKey; foreach (DataTuple dt in dataTuples) { //pKey = getPrimaryKeysAsByteArray(dt, primaryKeys); pKey = getPrimaryKeysAsString(dt, primaryKeys); if (pKey.Count() > 0) { try { //Debug.WriteLine(pKey +" : " +Utility.ComputeKey(pKey)); hashtable.Add(pKey, ""); //hashtable.Add(pKey, 0); } catch { return false; } } } counter++; } while (dataTuples.Count() >= (size * counter)); #endregion } else { throw new Exception("Dataset is not checked in."); } return true; }