public List <ApiDatasetAttachmentsModel> Get() { DatasetManager dm = new DatasetManager(); List <ApiDatasetAttachmentsModel> tmp = new List <ApiDatasetAttachmentsModel>(); try { var datasetIds = dm.GetDatasetLatestIds(); foreach (long id in datasetIds) { var model = GetApiDatasetAttachmentsModel(id, dm); if (model != null) { tmp.Add(model); } } return(tmp); } finally { dm.Dispose(); } }
// GET: DQManager public ActionResult Index() { DatasetManager dm = new DatasetManager(); //dataset manager ManageDQ manageModel = new ManageDQ(); //DatasetVersion dsv = new DatasetVersion(); //dataset version manager List <long> datasetIds = dm.GetDatasetLatestIds(); //get latest //List<List<long>> matrixId = new List<List<long>>(); List <dataset> datasets = new List <dataset>(); foreach (long Id in datasetIds) //for each dataset { dataset ds = new dataset(); ds.Id = Id; ds.title = dm.GetDatasetLatestVersion(Id).Title; //List<DatasetVersion> datasetVersions = dm.GetDatasetVersions(Id); //List<long> versionIds = new List<long>(); //for (int i = 0; i < datasetVersions.Count; ++i) //{ // long versionId = datasetVersions[i].Id; // versionIds.Add(versionId); //} ////matrixId.Add(versions); //ds.versionIds = versionIds; datasets.Add(ds); } //manageModel.matrixId = matrixId; manageModel.datasets = datasets; return(View(manageModel)); }
// GET: api/Metadata public IEnumerable <MetadataViewObject> Get() { DatasetManager dm = new DatasetManager(); try { var datasetIds = dm.GetDatasetLatestIds(); List <MetadataViewObject> tmp = new List <MetadataViewObject>(); foreach (var id in datasetIds) { MetadataViewObject mvo = new MetadataViewObject(); mvo.DatasetId = id; List <string> t = xmlDatasetHelper.GetAllTransmissionInformation(id, TransmissionType.mappingFileExport, AttributeNames.name).ToList(); mvo.Format = t.ToArray(); tmp.Add(mvo); } return(tmp); } finally { dm.Dispose(); } }
/// <summary> /// THE ACTIONRESULT FOR SHOW DATASET LIST VIEW /// </summary> /// <param name="datasetIds"></param> /// <param name="performerName"></param> /// <returns></returns> public ActionResult ShowDatasetList() { ExternalLink dsModel = new ExternalLink(); List <datasetInfo> datasetInfos = new List <datasetInfo>(); DatasetManager dm = new DatasetManager(); DataStructureManager dsm = new DataStructureManager(); List <long> datasetIds = dm.GetDatasetLatestIds(); foreach (long Id in datasetIds) { if (dm.IsDatasetCheckedIn(Id)) { DatasetVersion datasetVersion = dm.GetDatasetLatestVersion(Id); //get last dataset versions datasetInfo datasetInfo = new datasetInfo(); datasetInfo.title = datasetVersion.Title; DataStructure dataStr = dsm.AllTypesDataStructureRepo.Get(datasetVersion.Dataset.DataStructure.Id); string type = "file"; if (dataStr.Self.GetType() == typeof(StructuredDataStructure)) { type = "tabular"; } datasetInfo.type = type; datasetInfo.Id = Id; datasetInfos.Add(datasetInfo); } } dsModel.datasetInfos = datasetInfos; return(View(dsModel)); }
public List <EntityStoreItem> GetVersionsById(long id) { DatasetManager dm = new DatasetManager(); List <EntityStoreItem> tmp = new List <EntityStoreItem>(); try { var datasetIds = dm.GetDatasetLatestIds(); var datasetHelper = new XmlDatasetHelper(); var versions = dm.GetDataset(id).Versions.OrderBy(v => v.Timestamp).ToList(); foreach (var v in versions) { tmp.Add(new EntityStoreItem() { Id = v.Id, Title = v.Title, Version = versions.IndexOf(v) + 1, CommitComment = "(" + v.Timestamp.ToString("dd.MM.yyyy HH:mm") + "): " + v.ChangeDescription }); } return(tmp); } catch (Exception ex) { return(tmp); } finally { dm.Dispose(); } }
/// <summary> /// /// </summary> /// <remarks></remarks> /// <seealso cref=""/> public void Index() { configureBexisIndexing(true); // there is no need for the metadataAccess class anymore. Talked with David and deleted. 30.18.13. Javad/ compare to the previous version to see the deletions DatasetManager dm = new DatasetManager(); List <string> errors = new List <string>(); try { IList <long> ids = dm.GetDatasetLatestIds(); //ToDo only enitities from type dataset should be indexed in this index foreach (var id in ids) { try { writeBexisIndex(id, dm.GetDatasetLatestMetadataVersion(id)); //GC.Collect(); } catch (Exception ex) { errors.Add(string.Format("Enountered a probelm indexing dataset '{0}'. Details: {1}", id, ex.Message)); } } //GC.Collect(); indexWriter.Optimize(); autoCompleteIndexWriter.Optimize(); if (!reIndex) { indexWriter.Dispose(); autoCompleteIndexWriter.Dispose(); } if (errors.Count > 0) { throw new Exception(string.Join("\n\r", errors)); } } catch (Exception ex) { throw ex; } finally { dm.Dispose(); GC.Collect(); var es = new EmailService(); es.Send(MessageHelper.GetSearchReIndexHeader(), MessageHelper.GetSearchReIndexMessage(errors), ConfigurationManager.AppSettings["SystemEmail"]); } }
public void PurgeAllDatasets() { DatasetManager dm = new DatasetManager(); try { dm.GetDatasetLatestIds(true).ForEach(dsId => dm.PurgeDataset(dsId)); } finally { dm.Dispose(); } }
// GET: api/data public IEnumerable <long> Get() { DatasetManager dm = new DatasetManager(); try { var datasetIds = dm.GetDatasetLatestIds(); return(datasetIds); } finally { dm.Dispose(); } }
/// <summary> /// This funcion finds all datasets that a performer is involved. /// </summary> /// <param name="dm">A list of all datasets</param> /// <param name="userName">The performer's username</param> /// <returns>A list of dataset IDs</returns> private List <long> FindDatasetsFromPerformerUsername(DatasetManager dm, UserManager um, string username) { List <long> datasetIds = dm.GetDatasetLatestIds(); List <long> Ids = new List <long>(); foreach (long datasetId in datasetIds) { List <string> names = FindDatasetPerformers(dm, datasetId, dm.GetDatasetLatestVersionId(datasetId)); if (names.Contains(username)) { Ids.Add(datasetId); } } return(Ids); }
public List <long> Get() { // get token from the request string token = this.Request.Headers.Authorization?.Parameter; // flag for the public dataset check bool isPublic = false; List <long> ids = new List <long>(); using (var datasetManager = new DatasetManager()) { var datasetIds = datasetManager.GetDatasetLatestIds(); foreach (var id in datasetIds) { using (EntityPermissionManager entityPermissionManager = new EntityPermissionManager()) using (EntityManager entityManager = new EntityManager()) using (UserManager userManager = new UserManager()) { // load the entity id of the e.g. is it a sample or dataset or publication long?entityTypeId = entityManager.FindByName(typeof(Dataset).Name)?.Id; entityTypeId = entityTypeId.HasValue ? entityTypeId.Value : -1; // if the subject is null and one entry exist, means this dataset is public isPublic = entityPermissionManager.Exists(null, entityTypeId.Value, id); // load user based on token User user = userManager.Users.Where(u => u.Token.Equals(token)).FirstOrDefault(); if (isPublic || user != null) { if (isPublic || entityPermissionManager.HasEffectiveRight(user.Name, typeof(Dataset), id, RightType.Read)) { var filepath = Path.Combine(AppConfiguration.DataPath, "Datasets", id.ToString(), "geoengine.json"); if (File.Exists(filepath)) { ids.Add(id); } } } } } } return(ids); }
public ActionResult SyncAll() { var datasetManager = new DatasetManager(); var datasetIds = datasetManager.GetDatasetLatestIds(); try { datasetManager.SyncView(datasetIds, ViewCreationBehavior.Create | ViewCreationBehavior.Refresh); // if the viewData has a model error, the redirect forgets about it. return(RedirectToAction("Index", new { area = "Sam" })); } catch (Exception ex) { ViewData.ModelState.AddModelError("", $@"'{ex.Message}'"); return(View("Sync")); } }
public int CountVersions(long id) { DatasetManager dm = new DatasetManager(); try { var datasetIds = dm.GetDatasetLatestIds(); var datasetHelper = new XmlDatasetHelper(); int version = dm.GetDataset(id).Versions.Count; return(version); } catch (Exception ex) { return(0); } finally { dm.Dispose(); } }
public List <EntityStoreItem> GetEntities() { using (var uow = this.GetUnitOfWork()) { DatasetManager dm = new DatasetManager(); try { var datasetIds = dm.GetDatasetLatestIds(); var datasetHelper = new XmlDatasetHelper(); var entities = datasetIds.Select(id => new EntityStoreItem() { Id = id, Title = datasetHelper.GetInformation(id, NameAttributeValues.title) }); return(entities.ToList()); } finally { dm.Dispose(); } } }
public List <int> Get() { List <int> structuredIds = new List <int>(); DatasetManager dm = new DatasetManager(); try { var datasetIds = dm.GetDatasetLatestIds(); foreach (int id in datasetIds) { DatasetVersion datasetVersion = dm.GetDatasetLatestVersion(id); if (datasetVersion.Dataset.DataStructure.Self is StructuredDataStructure) { structuredIds.Add(id); } } return(structuredIds); } finally { dm.Dispose(); } }
public static XDocument ListIdentifiersOrRecords( string verb, string from, string until, string metadataPrefix, string set, string resumptionToken, bool isRoundtrip, List <XElement> errorList, bool?loadAbout) { List <XElement> errors = errorList; DateTime? fromDate = DateTime.MinValue; DateTime? untilDate = DateTime.MaxValue; /* VERB */ bool isRecord = false; if (String.IsNullOrEmpty(verb) || !(verb == "ListIdentifiers" || verb == "ListRecords")) { errors.Add(MlErrors.badVerbArgument); } else { isRecord = verb == "ListRecords"; } /* FROM */ bool isFrom = !String.IsNullOrEmpty(from); fromDate = MlDecode.SafeDateTime(from); if (isFrom && fromDate == null) { errors.Add(MlErrors.badFromArgument); } /* UNTIL */ bool isUntil = !String.IsNullOrEmpty(until); untilDate = MlDecode.SafeDateTime(until); if (isUntil && untilDate == null) { errors.Add(MlErrors.badUntilArgument); } if (isFrom && isUntil && fromDate > untilDate) { errors.Add(MlErrors.badFromAndUntilArgument); } // if both dates exist, they should be in the same format if (!string.IsNullOrEmpty(from) && !string.IsNullOrEmpty(until)) { if (from.Count() != until.Count()) { errors.Add(MlErrors.badFromAndUntilFormatArgument); } } if (fromDate == null) { fromDate = new DateTime(1900, 1, 1); } if (until == null) { untilDate = DateTime.Now; } if (untilDate != null) { untilDate = ((DateTime)untilDate).AddMilliseconds(999); } /* METADATA PREFIX */ bool isPrefixOk = !String.IsNullOrEmpty(metadataPrefix); /* SETS */ bool isSet = !String.IsNullOrEmpty(set); if (isSet && !Properties.supportSets) { errors.Add(MlErrors.noSetHierarchy); } /* RESUMPTION TOKEN */ bool isResumption = !String.IsNullOrEmpty(resumptionToken); if (isResumption && !isRoundtrip) { if (isFrom || isUntil || isPrefixOk || isSet) { errors.Add(MlErrors.badResumptionArgumentOnly); } if (!(Properties.resumptionTokens.ContainsKey(resumptionToken) && Properties.resumptionTokens[resumptionToken].Verb == verb && Properties.resumptionTokens[resumptionToken].ExpirationDate >= DateTime.UtcNow)) { errors.Insert(0, MlErrors.badResumptionArgument); } if (errors.Count == 0) { return(ListIdentifiersOrRecords( verb, Properties.resumptionTokens[resumptionToken].From.HasValue ? Properties.resumptionTokens[resumptionToken].From.Value.ToUniversalTime().ToString(Properties.granularity) : null, Properties.resumptionTokens[resumptionToken].Until.HasValue ? Properties.resumptionTokens[resumptionToken].Until.Value.ToUniversalTime().ToString(Properties.granularity) : null, Properties.resumptionTokens[resumptionToken].MetadataPrefix, Properties.resumptionTokens[resumptionToken].Set, resumptionToken, true, errors, loadAbout)); } } if (!isPrefixOk) /* Check if the only required attribute is included in the request */ { errors.Add(MlErrors.badMetadataArgument); } else if (FormatList.Prefix2Int(metadataPrefix) == 0) { errors.Add(MlErrors.cannotDisseminateFormat); } bool isAbout = loadAbout.HasValue ? loadAbout.Value : Properties.loadAbout; XElement request = new XElement("request", new XAttribute("verb", verb), isFrom ? new XAttribute("from", from) : null, isUntil ? new XAttribute("until", until) : null, isPrefixOk ? new XAttribute("metadataPrefix", metadataPrefix) : null, isSet ? new XAttribute("set", set) : null, isResumption ? new XAttribute("resumptionToken", resumptionToken) : null, Properties.baseURL); if (errors.Count > 0) { errors.Insert(0, request); /* add request on the first position, that it will be diplayed before errors */ return(CreateXml(errors.ToArray())); } var records = new List <RecordQueryResult>(); List <string> sets = Common.Helper.GetAllSets(set); var formatNum = FormatList.Prefix2Int(metadataPrefix); EntityManager entityManager = new EntityManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); DatasetManager datasetManager = new DatasetManager(); OAIHelper oaiHelper = new OAIHelper(); try { //1. Get list of all datasetids which shoudl be harvested - // ToDo use also the existing parameters like from date long?entityTypeId = entityManager.FindByName(typeof(Dataset).Name)?.Id; entityTypeId = entityTypeId.HasValue ? entityTypeId.Value : -1; // get all datasetids with the last modify date List <long> dsvIds = datasetManager.GetDatasetVersionLatestIds(); // ToDo to get all datasets with the last modfied date, the datasetversionrepo of the dataset manager is used, but its many wrong because of session problem in the past List <long> datasetIds = datasetManager.GetDatasetLatestIds(); datasetIds = datasetManager.DatasetVersionRepo.Query(dsv => dsvIds.Contains(dsv.Id) && dsv.Timestamp >= fromDate && dsv.Timestamp <= untilDate ).Select(dsv => dsv.Dataset.Id).ToList(); //2. Generate a list of headers var recordsQuery = new List <Header>(); foreach (long id in datasetIds) { if (entityPermissionManager.Exists(null, entityTypeId.Value, id)) { recordsQuery.Add(oaiHelper.GetHeader(id)); } } if (isSet) { recordsQuery = recordsQuery.Where(h => h.OAI_Set.Equals(AppConfiguration.ApplicationName)).ToList(); } int recordsCount = recordsQuery.Count(); if (recordsCount == 0) { return(CreateXml(new XElement[] { request, MlErrors.noRecordsMatch })); } else if (isRoundtrip) { Properties.resumptionTokens[resumptionToken].CompleteListSize = recordsCount; recordsQuery = recordsQuery.AsEnumerable().Skip( Properties.resumptionTokens[resumptionToken].Cursor.Value).Take( isRecord ? Properties.maxRecordsInList : Properties.maxIdentifiersInList).ToList(); } else if ((isRecord ? Properties.resumeListRecords : Properties.resumeListIdentifiers) && (isRecord ? recordsCount > Properties.maxRecordsInList : recordsCount > Properties.maxIdentifiersInList)) { resumptionToken = Common.Helper.CreateGuid(); isResumption = true; Properties.resumptionTokens.Add(resumptionToken, new ResumptionToken() { Verb = verb, From = isFrom ? fromDate : null, Until = isUntil ? untilDate : null, MetadataPrefix = metadataPrefix, Set = set, ExpirationDate = DateTime.UtcNow.Add(Properties.expirationTimeSpan), CompleteListSize = recordsCount, Cursor = 0 }); recordsQuery = recordsQuery.AsEnumerable().Take( isRecord ? Properties.maxRecordsInList : Properties.maxIdentifiersInList).ToList(); } /* get data from database */ //var recGroup = (from rec in recordsQuery // join omd in context.ObjectMetadata on rec.HeaderId equals omd.ObjectId // join mdt in context.Metadata on omd.MetadataId equals mdt.MetadataId // group new { OmdMetaType = omd.MetadataType, OaiMetaData = mdt } by rec into grp // select grp).ToList(); /* distribute data into logical units */ foreach (var header in recordsQuery) { long id = oaiHelper.ConvertToId(header.OAI_Identifier); //ToDo add about to the RecordQueryResult object, currently its only null records.Add(new RecordQueryResult(header, oaiHelper.GetMetadata(id, metadataPrefix), null)); } } finally { datasetManager.Dispose(); entityPermissionManager.Dispose(); } bool isCompleted = isResumption ? Properties.resumptionTokens[resumptionToken].Cursor + records.Count >= Properties.resumptionTokens[resumptionToken].CompleteListSize : false; XElement list = new XElement(verb, isRecord ? GetListRecords(records, isAbout) : GetListIdentifiers(records), isResumption ? /* add resumption token or not */ MlEncode.ResumptionToken(Properties.resumptionTokens[resumptionToken], resumptionToken, isCompleted) : null); if (isResumption) { if (isCompleted) { Properties.resumptionTokens.Remove(resumptionToken); } else { Properties.resumptionTokens[resumptionToken].Cursor = Properties.resumptionTokens[resumptionToken].Cursor + records.Count; } } return(CreateXml(new XElement[] { request, list })); }
/// <summary> /// create a model to fill the table of My Dataset /// </summary> /// <remarks></remarks> /// <seealso cref="ShowMyDatasets"/> /// <param>NA</param> /// <returns>model</returns> public ActionResult _CustomMyDatasetBinding() { DataTable model = new DataTable(); ViewData["PageSize"] = 10; ViewData["CurrentPage"] = 1; #region header List <HeaderItem> headerItems = CreateHeaderItems(); ViewData["DefaultHeaderList"] = headerItems; #endregion header model = CreateDataTable(headerItems); DatasetManager datasetManager = new DatasetManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); UserManager userManager = new UserManager(); EntityManager entityManager = new EntityManager(); try { var entity = entityManager.FindByName("Dataset"); var user = userManager.FindByNameAsync(GetUsernameOrDefault()).Result; List <long> gridCommands = datasetManager.GetDatasetLatestIds(); gridCommands.Skip(Convert.ToInt16(ViewData["CurrentPage"])).Take(Convert.ToInt16(ViewData["PageSize"])); List <DatasetVersion> datasetVersions = datasetManager.GetDatasetLatestVersions(gridCommands, false); foreach (var dsv in datasetVersions) { var datasetId = dsv.Dataset.Id; //get permissions int rights = entityPermissionManager.GetEffectiveRights(user?.Id, entity.Id, datasetId); if (rights > 0) { DataRow dataRow = model.NewRow(); Object[] rowArray = new Object[8]; string isValid = "no"; if (datasetManager.IsDatasetCheckedIn(datasetId)) { string title = dsv.Title; string description = dsv.Description; if (dsv.StateInfo != null) { isValid = DatasetStateInfo.Valid.ToString().Equals(dsv.StateInfo.State) ? "yes" : "no"; } rowArray[0] = Convert.ToInt64(datasetId); rowArray[1] = title; rowArray[2] = description; } else { rowArray[0] = Convert.ToInt64(datasetId); rowArray[1] = ""; rowArray[2] = "Dataset is just in processing."; } rowArray[3] = (rights & (int)RightType.Read) > 0 ? "✔" : "✘"; rowArray[4] = (rights & (int)RightType.Write) > 0 ? "✔" : "✘"; rowArray[5] = (rights & (int)RightType.Delete) > 0 ? "✔" : "✘"; //rowArray[6] = (rights & (int)RightType.Download) > 0 ? "✔" : "✘"; rowArray[6] = (rights & (int)RightType.Grant) > 0 ? "✔" : "✘"; rowArray[7] = isValid; dataRow = model.NewRow(); dataRow.ItemArray = rowArray; model.Rows.Add(dataRow); } } return(View(new GridModel(model))); } catch (Exception ex) { throw ex; } finally { datasetManager.Dispose(); entityPermissionManager.Dispose(); entityManager.Dispose(); userManager.Dispose(); } }
// GET: DQ public ActionResult ShowDQ(long datasetId, long versionId) { DQModels dqModel = new DQModels(); Dictionary <string, string> datasetInfo = new Dictionary <string, string>(); List <performer> performers = new List <performer>(); List <varVariable> varVariables = new List <varVariable>(); Dictionary <string, double> datasetSize = new Dictionary <string, double>(); DatasetManager dm = new DatasetManager(); DataStructureManager dsm = new DataStructureManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); PartyManager pm = new PartyManager(); UserManager um = new UserManager(); DatasetVersion dsv = new DatasetVersion(); UserManager userManager = new UserManager(); // data quality files try { string pathPerformerDataset = @"C:\Data\DatasetQualities\PerformerDataset.csv"; StreamReader readerPerformerDataset = new StreamReader(pathPerformerDataset); } catch (Exception ex) { } ////////////////////////////////////////////////////////////////////////// DatasetVersion currentDatasetVersion = dm.GetDatasetVersion(versionId); //Current dataset version DataStructure currentDataStr = dsm.AllTypesDataStructureRepo.Get(currentDatasetVersion.Dataset.DataStructure.Id); //current data structure var currentUser = userManager.FindByNameAsync(GetUsernameOrDefault()).Result; //Find current user //Find the dataset Type string currentDatasetType = "file"; if (currentDataStr.Self.GetType() == typeof(StructuredDataStructure)) { currentDatasetType = "tabular"; } dqModel.type = currentDatasetType; #region performers #region dataset's performers try { string pathPerformerDataset = @"C:\Data\DatasetQualities\PerformerDataset.csv"; StreamReader readerPerformerDataset = new StreamReader(pathPerformerDataset); string pathPerformers = @"C:\Data\DatasetQualities\Performers.csv"; StreamReader readerPerformers = new StreamReader(pathPerformers); string performerLine; List <string> pfs = new List <string>(); List <performer> ps = new List <performer>(); while ((performerLine = readerPerformerDataset.ReadLine()) != null) { string[] s = performerLine.Split(','); if (long.Parse(s[1]) == datasetId) { pfs.Add(s[0]); } } while ((performerLine = readerPerformers.ReadLine()) != null) { string[] s = performerLine.Split(','); if (pfs.Contains(s[0])) { performer p = new performer(); p.performerName = FindPerformerNameFromUsername(um, s[0]); //find performer name p.performerRate = int.Parse(s[1]); List <long> pfIds = FindDatasetsFromPerformerUsername(dm, um, s[0]); //Find all datasets in wich the username is involved. p.DatasetIds = pfIds; ps.Add(p); } } dqModel.performers = ps; readerPerformerDataset.Close(); readerPerformers.Close(); } catch (Exception ex) { } #endregion #endregion //performers //dqModel.isPublic = entityPermissionManager.GetRights(null, 1, datasetId); //check if dataset is public //check the read permission for current dataset bool rPermission = entityPermissionManager.HasEffectiveRight(currentUser.UserName, typeof(Dataset), datasetId, Security.Entities.Authorization.RightType.Read); //find if user has read permission if (rPermission == true) //has read permission or public = readable { dqModel.readable = 1; } else { dqModel.readable = 0; } //cannot read //Check if the current metadata is valid if (currentDatasetVersion.StateInfo != null) { dqModel.isValid = DatasetStateInfo.Valid.ToString().Equals(currentDatasetVersion.StateInfo.State) ? 1 : 0; //1:valid; 0:invalid. } else { dqModel.isValid = 0; } List <long> datasetIds = dm.GetDatasetLatestIds(); dqModel.allDatasets = datasetIds.Count; List <int> metadataRates = new List <int>(); List <int> dsDescLength = new List <int>(); List <int> dstrDescLength = new List <int>(); List <int> dstrUsage = new List <int>(); List <int> datasetSizeTabular = new List <int>(); List <int> datasetRows = new List <int>(); List <int> datasetCols = new List <int>(); List <double> datasetSizeFiles = new List <double>(); double datasetSizeFile = new double(); List <int> datasetFileNumber = new List <int>(); List <int> restrictions = new List <int>(); int fileNumber = 0; List <int> sizeTabular = new List <int>(); //collect size, column number, and row number for one dataset int publicDatasets = 0; int restrictedDatasets = 0; int fileDatasets = 0; int tabularDatasets = 0; int rpTrue = 0; int rp; int validMetadata = 0; int allValidMetadas = 0; foreach (long Id in datasetIds) //for each dataset { if (dm.IsDatasetCheckedIn(Id)) { DatasetVersion datasetVersion = dm.GetDatasetLatestVersion(Id); //get last dataset versions //If user has read permission rPermission = entityPermissionManager.HasEffectiveRight(currentUser.UserName, typeof(Dataset), Id, Security.Entities.Authorization.RightType.Read); if (rPermission == true) //has read permission or public = readable { rp = 1; rpTrue += 1; } else { rp = 0; } //cannot read } } dqModel.allReadables = rpTrue; string pathDatasetInfo = @"C:\Data\DatasetQualities\datasetInfo.csv"; StreamReader readerDatasetInfo = new StreamReader(pathDatasetInfo); List <datasetInformation> datasetsInformation = new List <datasetInformation>(); try { string lineDatasetInfo; while ((lineDatasetInfo = readerDatasetInfo.ReadLine()) != null) { string[] dsInf = lineDatasetInfo.Split(';'); datasetInformation datasetInformation = new datasetInformation(); long id = long.Parse(dsInf[0]); datasetInformation.datasetId = id; datasetInformation.title = dm.GetDatasetLatestVersion(id).Title; rPermission = entityPermissionManager.HasEffectiveRight(currentUser.UserName, typeof(Dataset), id, Security.Entities.Authorization.RightType.Read); if (rPermission == true) { datasetInformation.readable = 1; } if (rPermission == false) { datasetInformation.readable = 0; } datasetInformation.type = dsInf[1]; datasetInformation.metadataValidation = int.Parse(dsInf[2]); datasetInformation.metadataComplition = int.Parse(dsInf[3]); datasetInformation.descriptionLength = int.Parse(dsInf[4]); datasetInformation.structureDescriptionLength = int.Parse(dsInf[5]); datasetInformation.structureUsage = int.Parse(dsInf[6]); datasetInformation.columnNumber = int.Parse(dsInf[7]); datasetInformation.rowNumber = int.Parse(dsInf[8]); datasetInformation.fileNumber = int.Parse(dsInf[9]); datasetInformation.datasetSizeFile = double.Parse(dsInf[10]); string[] pfrms = dsInf[11].Split(','); List <string> performerNames = new List <string>(); foreach (string p in pfrms) { performerNames.Add(p); } datasetInformation.performerNames = performerNames; datasetsInformation.Add(datasetInformation); if (datasetId == id) { dqModel.metadataComplition.requiredFields = int.Parse(dsInf[2]); dqModel.metadataComplition.totalFields = int.Parse(dsInf[3]); dqModel.datasetDescriptionLength.currentDescriptionLength = int.Parse(dsInf[4]); dqModel.dataStrDescriptionLength.currentDescriptionLength = int.Parse(dsInf[5]); dqModel.dataStrUsage.currentDataStrUsage = int.Parse(dsInf[6]); dqModel.columnNumber = datasetInformation.columnNumber; dqModel.rowNumber = datasetInformation.rowNumber; dqModel.fileNumber = datasetInformation.fileNumber; dqModel.datasetTotalSize.currentTotalSize = datasetInformation.datasetSizeFile; } } } catch { } dqModel.datasetsInformation = datasetsInformation; readerDatasetInfo.Close(); //CURRENT DATASET VERSION //dqModel.metadataComplition.totalFields = GetMetadataRate(currentDatasetVersion); //current dataset version: metadata rate //dqModel.metadataComplition.requiredFields = 100; //Need to calculate: metadataStructureId = dsv.Dataset.MetadataStructure.Id; //dqModel.datasetDescriptionLength.currentDescriptionLength = currentDatasetVersion.Description.Length; // Current dataset vesion: dataset description length //dqModel.dataStrDescriptionLength.currentDescriptionLength = currentDatasetVersion.Dataset.DataStructure.Description.Length; // Current dataset version: data structure description length //dqModel.dataStrUsage.currentDataStrUsage = currentDataStr.Datasets.Count() - 1; // Current dataset version: how many times the data structure is used in other datasets #region comparision try { string pathComparison = @"C:\Data\DatasetQualities\Comparison.csv"; StreamReader readerComparison = new StreamReader(pathComparison); string infoline; List <int> infos = new List <int>(); while ((infoline = readerComparison.ReadLine()) != null) { string[] s = infoline.Split(','); if (s[0] == "performersActivity") { dqModel.performersActivity.minActivity = int.Parse(s[1]); dqModel.performersActivity.medianActivity = int.Parse(s[2]); dqModel.performersActivity.maxActivity = int.Parse(s[3]); } else if (s[0] == "type") { dqModel.allDatasets = int.Parse(s[1]); dqModel.tabularDatasets = int.Parse(s[2]); dqModel.fileDatasets = int.Parse(s[3]); } else if (s[0] == "metadataRates") { dqModel.metadataComplition.minRate = int.Parse(s[1]); dqModel.metadataComplition.medianRate = int.Parse(s[2]); dqModel.metadataComplition.maxRate = int.Parse(s[3]); } else if (s[0] == "allValidMetadas") { dqModel.allValids = int.Parse(s[1]); } else if (s[0] == "datasetDescriptionLength") { dqModel.datasetDescriptionLength.minDescriptionLength = int.Parse(s[1]); dqModel.datasetDescriptionLength.medianDescriptionLength = int.Parse(s[2]); dqModel.datasetDescriptionLength.maxDescriptionLength = int.Parse(s[3]); } else if (s[0] == "dataStrDescriptionLength") { dqModel.dataStrDescriptionLength.minDescriptionLength = int.Parse(s[1]); dqModel.dataStrDescriptionLength.medianDescriptionLength = int.Parse(s[2]); dqModel.dataStrDescriptionLength.maxDescriptionLength = int.Parse(s[3]); } else if (s[0] == "dataStrUsage") { dqModel.dataStrUsage.minDataStrUsage = int.Parse(s[1]); dqModel.dataStrUsage.medianDataStrUsage = int.Parse(s[2]); dqModel.dataStrUsage.maxDataStrUsage = int.Parse(s[3]); } else if (s[0] == "datasetColNumber") { dqModel.datasetColNumber.minColNumber = int.Parse(s[1]); dqModel.datasetColNumber.medianColNumber = int.Parse(s[2]); dqModel.datasetColNumber.maxColNumber = int.Parse(s[3]); } else if (s[0] == "datasetRowNumber") { dqModel.datasetRowNumber.minRowNumber = int.Parse(s[1]); dqModel.datasetRowNumber.medianRowNumber = int.Parse(s[2]); dqModel.datasetRowNumber.maxRowNumber = int.Parse(s[3]); } else if (s[0] == "datasetFileNumber") { dqModel.datasetFileNumber.minFileNumber = int.Parse(s[1]); dqModel.datasetFileNumber.medianFileNumber = int.Parse(s[2]); dqModel.datasetFileNumber.maxFileNumber = int.Parse(s[3]); } else if (s[0] == "datasetTotalSizeFiles") { dqModel.datasetTotalSize.minSizeFile = double.Parse(s[1]); dqModel.datasetTotalSize.medianSizeFile = double.Parse(s[2]); dqModel.datasetTotalSize.maxSizeFile = double.Parse(s[3]); } } readerComparison.Close(); } catch (Exception ex) { } #endregion /////////////////////////////////////////////////////////////////////// #region TABULAR FORMAT DATASET //If it is a tabular format dataset if (currentDatasetType == "tabular") { string pathVariables = @"C:\Data\DatasetQualities\Variables.csv"; StreamReader readerVariables = new StreamReader(pathVariables); string varLine; while ((varLine = readerVariables.ReadLine()) != null) { string[] varDetail = varLine.Split(','); if (varDetail[0] == datasetId.ToString()) { varVariable v = new varVariable(); v.varLabel = varDetail[1]; v.varType = varDetail[2]; v.varDescription = varDetail[3]; v.varUsage = int.Parse(varDetail[4]); v.missing = int.Parse(varDetail[5]); varVariables.Add(v); } } dqModel.varVariables = varVariables; readerVariables.Close(); } // string serverName = "http://*****:*****@"C:\Data\DatasetQualities\Files.csv"; try { StreamReader readerFiles = new StreamReader(pathFiles); while ((fileLine = readerFiles.ReadLine()) != null) { string[] fileDetail = fileLine.Split(','); if (fileDetail[0] == datasetId.ToString()) { fileInformation f = new fileInformation(); f.fileName = fileDetail[1]; f.fileFormat = fileDetail[2]; double d = Convert.ToDouble(fileDetail[3]); f.fileSize = d; filesInformations.Add(f); } } readerFiles.Close(); } catch { } dqModel.filesInformation = filesInformations; } //if (currentDatasetVersion != null) //{ // List<ContentDescriptor> contentDescriptors = currentDatasetVersion.ContentDescriptors.ToList(); // double totalSize = 0; // if (contentDescriptors.Count > 0) // { // foreach (ContentDescriptor cd in contentDescriptors) // { // if (cd.Name.ToLower().Equals("unstructureddata")) // { // fileInformation fileInformation = new fileInformation(); // string uri = cd.URI; // //get the file path // try // { // String path = Server.UrlDecode(uri); // path = Path.Combine(AppConfiguration.DataPath, path); // Stream fileStream = System.IO.File.OpenRead(path); // if (fileStream != null) // { // FileStream fs = fileStream as FileStream; // if (fs != null) // { // //get file information // FileInformation fileInfo = new FileInformation(fs.Name.Split('\\').LastOrDefault(), MimeMapping.GetMimeMapping(fs.Name), (uint)fs.Length, uri); // fileInformation.fileName = fileInfo.Name.Split('.')[0]; //file name // fileInformation.fileFormat = fileInfo.Name.Split('.')[1].ToLower(); //file extension // fileInformation.fileSize = fileInfo.Size; //file size // totalSize += fileInfo.Size; // } // } // } // catch // { // } // filesInformation.Add(fileInformation); // } //} //} //dqModel.fileNumber = contentDescriptors.Count; //dqModel.datasetTotalSize.currentTotalSize = totalSize; //} //dqModel.filesInformation = filesInformation; //} #endregion return(PartialView(dqModel)); }
public ActionResult dqSync() { using (var dm = new DatasetManager()) { List <long> datasetIds = dm.GetDatasetLatestIds(); //get latest EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); DataStructureManager dsm = new DataStructureManager(); try { //datasetManager.SyncView(datasetIds, ViewCreationBehavior.Create | ViewCreationBehavior.Refresh); // if the viewData has a model error, the redirect forgets about it. string pathPerformers = @"C:\Data\DatasetQualities\Performers.csv"; StreamWriter writerPerformers = new StreamWriter(pathPerformers); string pathPerformerDataset = @"C:\Data\DatasetQualities\PerformerDataset.csv"; StreamWriter writerPerformerDataset = new StreamWriter(pathPerformerDataset); string pathComparison = @"C:\Data\DatasetQualities\Comparison.csv"; StreamWriter writerComparison = new StreamWriter(pathComparison); string pathDatasets = @"C:\Data\DatasetQualities\datasetInfo.csv"; StreamWriter writerDatasets = new StreamWriter(pathDatasets); string pathVariable = @"C:\Data\DatasetQualities\Variables.csv"; StreamWriter writerVariable = new StreamWriter(pathVariable); string pathFiles = @"C:\Data\DatasetQualities\Files.csv"; StreamWriter writerFiles = new StreamWriter(pathFiles); string variableHeader = "datasetId,VarLabel,varType,varDescription,varUse,varMissing"; writerVariable.WriteLine(variableHeader); string performer; List <string> performerDataset = new List <string>(); Dictionary <string, int> performerCount = new Dictionary <string, int>(); List <int> metadataRates = new List <int>(); int allValidMetadas = 0; //int publicDatasets = 0; //could not get result //int restrictedDatasets = 0; //could not get result List <int> dsDescLength = new List <int>(); List <int> dstrDescLength = new List <int>(); List <int> dstrUsage = new List <int>(); List <int> datasetSizeTabular = new List <int>(); List <int> datasetRows = new List <int>(); List <int> datasetCols = new List <int>(); List <double> datasetSizeFiles = new List <double>(); //all files in all datasets List <int> datasetFileNumber = new List <int>(); List <int> sizeTabular = new List <int>(); //collect size, column number, and row number for one dataset int fileDatasets = 0; int tabularDatasets = 0; int fileNumber = 0; List <double> datasetTotalSize = new List <double>(); //total file size of each dataset List <double> sizeFile = new List <double>(); /////////////////////////// foreach (long datasetId in datasetIds) { DatasetVersion datasetLatestVersion = dm.GetDatasetLatestVersion(datasetId); //get last dataset versions DataStructure dataStr = dsm.AllTypesDataStructureRepo.Get(datasetLatestVersion.Dataset.DataStructure.Id); //get data structure #region performers List <string> pers = new List <string>(); var dsvs = dm.GetDatasetVersions(datasetId); foreach (var d in dsvs) { performer = d.ModificationInfo.Performer; if (performer != null && !pers.Contains(performer)) { pers.Add(performer); //a list of performers } } foreach (var p in pers) { writerPerformerDataset.WriteLine(p + "," + datasetId); //fill the file PerformerDataset with a list of 'performer,datasetId' if (performerCount.ContainsKey(p)) { performerCount[p] += 1; } else { performerCount.Add(p, 1); } } #endregion #region allValidmetadatas long metadataStructureId = dm.DatasetRepo.Get(datasetId).MetadataStructure.Id; int validMetadata = 0; if (datasetLatestVersion.StateInfo != null) { validMetadata = DatasetStateInfo.Valid.ToString().Equals(datasetLatestVersion.StateInfo.State) ? 1 : 0; //1:valid; 0:invalid. } else { validMetadata = 0; } if (validMetadata == 1) //count how many datasets have valid metadata { allValidMetadas += 1; } #endregion #region metadataRates XmlDocument metadata = datasetLatestVersion.Metadata; string xmlFrag = metadata.OuterXml; List <int> metaInfo = new List <int>(); NameTable nt = new NameTable(); XmlNamespaceManager nsmgr = new XmlNamespaceManager(nt); // Create the XmlParserContext. XmlParserContext context = new XmlParserContext(null, nsmgr, null, XmlSpace.None); // Create the reader. XmlTextReader reader = new XmlTextReader(xmlFrag, XmlNodeType.Element, context); int countMetaAttr = 0; int countMetaComplition = 0; // Parse the XML and display each node. while (reader.Read()) { //XmlTextReader myReader = reader; if (reader.NodeType == XmlNodeType.Element) { if (reader.HasAttributes && reader.GetAttribute("type") == "MetadataAttribute") { countMetaAttr += 1; reader.Read(); if (reader.NodeType == XmlNodeType.Text) { string text = reader.Value; countMetaComplition += 1; } } } } // Close the reader. reader.Close(); int rate = (countMetaComplition * 100) / countMetaAttr; //percentage of all metadata fields contains information metadataRates.Add(rate); #endregion ////find how many public dataset exist //int publicRights = entityPermissionManager.GetRights(null, 1, datasetId); //1:public; 0:restricted //if (publicRights == 1) { publicDatasets += 1; } //if (publicRights == 0) { restrictedDatasets += 1; } ///issue here is that when a dataset has an empty dataset description field, the datasetLatestVersion.Description has the "not available" as value. int datasetLatestVersionDescriptionLength = 0; if (datasetLatestVersion.Description == "not available") { dsDescLength.Add(0); } else { dsDescLength.Add(datasetLatestVersion.Description.Length); //get dataset description length datasetLatestVersionDescriptionLength = datasetLatestVersion.Description.Length; } dstrDescLength.Add(datasetLatestVersion.Dataset.DataStructure.Description.Length); //get data structure description length dstrUsage.Add(dataStr.Datasets.Count() - 1); //data structure is used in how many other datasets (doesn't contain the current one) string type = "file"; if (dataStr.Self.GetType() == typeof(StructuredDataStructure)) { type = "tabular"; } //get dataset type int colNum = 0; int rowNum = 0; #region tabular dataset if (type == "tabular") { tabularDatasets += 1; try { DataTable table = dm.GetLatestDatasetVersionTuples(datasetId, true); DataRowCollection rowss = table.Rows; DataColumnCollection columns = table.Columns; StructuredDataStructure sds = dsm.StructuredDataStructureRepo.Get(datasetLatestVersion.Dataset.DataStructure.Id); //get data structure var variables = sds.Variables; //get variables //sizeTabular[1] = variables.Count; //columns.Count - 4; //if (sizeTabular[1] < 0) //if data structure has not been designed. //{ // sizeTabular[1] = 0; //} //sizeTabular[2] = rowss.Count; //sizeTabular[0] = sizeTabular[1] * sizeTabular[2]; colNum = variables.Count; rowNum = rowss.Count; #region variables int columnNumber = -1; //First four columns are added from system. if (variables.Count() > 0) { foreach (var variable in variables) { columnNumber += 1; //string missingValue = variable.MissingValue; //MISSING VALUE List <string> missingValues = new List <string>(); //creat a list contains missing values DataTable missTable = new DataTable(); foreach (var missValue in variable.MissingValues) //if data is equal missing value { missingValues.Add(missValue.Placeholder); } var varUse = variable.DataAttribute.UsagesAsVariable.Count() - 1; string varType = variable.DataAttribute.DataType.SystemType; int varMissing = 100; //suppose 100% is completed try { if (rowss.Count > 0) { int missing = rowss.Count; foreach (DataRow row in rowss) { var value = row.ItemArray[columnNumber]; //.ToString(); if (value == null || missingValues.Contains(value.ToString())) //check if cell is emty or contains a missing value { missing -= 1; } } varMissing = 100 * missing / rowss.Count; //% of existing values } else { varMissing = 0; } } catch { varMissing = 0; } string variableLine = datasetId + "," //0: dataset Id + variable.Label + "," //1: variable name + varType + "," //2: data type + variable.Description.Count() + "," //3: variable description length + varUse + "," //4: variable usage + varMissing; //5: % completed writerVariable.WriteLine(variableLine); } } #endregion } catch { colNum = 0; rowNum = 0; //sizeTabular.Add(0); //sizeTabular.Add(0); //sizeTabular.Add(0); } datasetSizeTabular.Add(colNum * rowNum); //sizeTabular[0]); datasetCols.Add(colNum); // sizeTabular[1]); //column number datasetRows.Add(rowNum); // sizeTabular[2]); //row number } #endregion #region file dataset else if (type == "file") { fileDatasets += 1; List <ContentDescriptor> contentDescriptors = datasetLatestVersion.ContentDescriptors.ToList(); fileNumber = contentDescriptors.Count; //datasetFileNumber.Add(fileNumber); //sizeFile.Add(fileNumber); int fileNum = 0; double totalSize = 0; string fileInDataset = ""; if (contentDescriptors.Count > 0) { foreach (ContentDescriptor cd in contentDescriptors) { if (cd.Name.ToLower().Equals("unstructureddata")) { fileNum += 1; string uri = cd.URI; String path = Server.UrlDecode(uri); path = Path.Combine(AppConfiguration.DataPath, path); try { Stream fileStream = System.IO.File.OpenRead(path); FileStream fs = fileStream as FileStream; if (fs != null) { FileInformation fileInfo = new FileInformation(fs.Name.Split('\\').LastOrDefault(), MimeMapping.GetMimeMapping(fs.Name), (uint)fs.Length, uri); totalSize += fileInfo.Size; fileInDataset = datasetId + "," + fileInfo.Name.Split('.')[0] + "," + fileInfo.Name.Split('.')[1].ToLower() + "," + fileInfo.Size; //datasetId,file name,file extension,file size writerFiles.WriteLine(fileInDataset); } } catch { datasetSizeFiles.Add(0); //file size } } } datasetFileNumber.Add(fileNum); datasetTotalSize.Add(totalSize); } else { datasetFileNumber.Add(0); datasetTotalSize.Add(0); } } #endregion //[0]datasetId, [1]dataType, [2]IsValid, [3]metadataComplitionRate, //[4]datasetDescLength, [5]dataStrDescrLength, [6]DataStrUsage, //[7]columns, [8]rows, [9]file numbers, [10]file sizes, [11]performers string datasetInfo = datasetId + ";" + type + ";" + validMetadata + ";" + rate + ";" + datasetLatestVersionDescriptionLength + ";" + datasetLatestVersion.Dataset.DataStructure.Description.Length + ";" + (dataStr.Datasets.Count() - 1); if (type == "tabular") { datasetInfo = datasetInfo + ";" + datasetCols.Last() //column number + ";" + datasetRows.Last() //row number + ";0;0"; //file number and size } if (type == "file") { datasetInfo = datasetInfo + ";0;0" //column and row number + ";" + datasetFileNumber.Last() //sizeFile[0] //file number + ";" + datasetTotalSize.Last(); //sizeFile[1]; //total size } string prfmrs = ""; foreach (string p in pers) { prfmrs = prfmrs + FindPerformerNameFromUsername(p) + ","; } prfmrs.Remove(prfmrs.Length - 1, 1); datasetInfo = datasetInfo + ";" + prfmrs; writerDatasets.WriteLine(datasetInfo); } writerDatasets.Close(); #region performersInFile //write a list of 'performer,activity' in Performers.csv foreach (string p in performerCount.Keys) { string l = p + "," + performerCount[p]; writerPerformers.WriteLine(l); } // performer activities int performerMin = performerCount.Values.Min(); int performerMax = performerCount.Values.Max(); List <int> performerActivities = new List <int>(); foreach (int s in performerCount.Values) { performerActivities.Add(s); } double performerMedian = medianCalc(performerActivities); string performerCompare = "performersActivity," + performerMin + "," + performerMedian + "," + performerMax; writerComparison.WriteLine(performerCompare); //performersActivity writerPerformers.Close(); writerPerformerDataset.Close(); #endregion #region datasetInfo in file #endregion //datasetInfo in file #region compare in file string m = "metadataRates," + metadataRates.Min() + "," + medianCalc(metadataRates) + "," + metadataRates.Max(); writerComparison.WriteLine(m); string allValids = "allValidMetadas," + allValidMetadas; writerComparison.WriteLine(allValids); //string pd = "publicDatasets," + publicDatasets; //string rd = "restrictedDatasets," + restrictedDatasets; //writerComparison.WriteLine(pd); //writerComparison.WriteLine(rd); string datasetDescriptionLength = "datasetDescriptionLength," + dsDescLength.Min() + "," + medianCalc(dsDescLength) + "," + dsDescLength.Max(); string dataStrDescriptionLength = "dataStrDescriptionLength," + dstrDescLength.Min() + "," + medianCalc(dstrDescLength) + "," + dstrDescLength.Max(); string dataStrUsage = "dataStrUsage," + dstrUsage.Min() + "," + medianCalc(dstrUsage) + "," + dstrUsage.Max(); writerComparison.WriteLine(datasetDescriptionLength); writerComparison.WriteLine(dataStrDescriptionLength); writerComparison.WriteLine(dataStrUsage); string typeDataset = "type," + (tabularDatasets + fileDatasets) + "," + tabularDatasets + "," + fileDatasets; writerComparison.WriteLine(typeDataset); string cols = "datasetColNumber," + datasetCols.Min() + "," + medianCalc(datasetCols) + "," + datasetCols.Max(); string rows = "datasetRowNumber," + datasetRows.Min() + "," + medianCalc(datasetRows) + "," + datasetRows.Max(); string fileNums = ""; string fileSizes = ""; string totalFileSize = ""; //if (datasetFileNumber.Count > 0) //{ fileNums = "datasetFileNumber," + datasetFileNumber.Min() + "," + medianCalc(datasetFileNumber) + "," + datasetFileNumber.Max(); //fileSizes = "datasetSizeFiles," + datasetSizeFiles.Min() + "," + medianCalc(datasetSizeFiles) + "," + datasetSizeFiles.Max(); totalFileSize = "datasetTotalSizeFiles," + datasetTotalSize.Min() + "," + medianCalc(datasetTotalSize) + "," + datasetTotalSize.Max(); //} //else //{ // fileNums = "datasetFileNumber," + 0 + "," + 0 + "," + 0; // //fileSizes = "datasetSizeFiles," + 0 + "," + 0 + "," + 0; // totalFileSize = "datasetTotalSizeFiles," + 0 + "," + 0 + "," + 0; //} writerComparison.WriteLine(cols); writerComparison.WriteLine(rows); writerComparison.WriteLine(fileNums); writerComparison.WriteLine(fileSizes); writerComparison.WriteLine(totalFileSize); #endregion writerComparison.Close(); //writerDatasets.Close(); writerVariable.Close(); writerFiles.Close(); return(View()); //return RedirectToAction("Index", new { area = "dqm" }); } catch (Exception ex) { ViewData.ModelState.AddModelError("", $@"'{ex.Message}'"); return(RedirectToAction("dqError", new { area = "dqm" })); } } }
/// <summary> /// create a model to fill the table of My Dataset /// </summary> /// <remarks></remarks> /// <seealso cref="ShowMyDatasets"/> /// <param>NA</param> /// <returns>model</returns> public ActionResult _CustomMyDatasetBinding() { DataTable model = new DataTable(); ViewData["PageSize"] = 10; ViewData["CurrentPage"] = 1; #region header List <HeaderItem> headerItems = CreateHeaderItems(); ViewData["DefaultHeaderList"] = headerItems; #endregion model = CreateDataTable(headerItems); DatasetManager datasetManager = new DatasetManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); UserManager userManager = new UserManager(); EntityManager entityManager = new EntityManager(); try { var entity = entityManager.FindByName("Dataset"); var user = userManager.FindByNameAsync(GetUsernameOrDefault()).Result; List <long> gridCommands = datasetManager.GetDatasetLatestIds(); gridCommands.Skip(Convert.ToInt16(ViewData["CurrentPage"])).Take(Convert.ToInt16(ViewData["PageSize"])); foreach (long datasetId in gridCommands) { //get permissions int rights = entityPermissionManager.GetEffectiveRights(user?.Id, entity.Id, datasetId); if (rights > 0) { DataRow dataRow = model.NewRow(); Object[] rowArray = new Object[8]; if (datasetManager.IsDatasetCheckedIn(datasetId)) { //long versionId = datasetManager.GetDatasetLatestVersionId (datasetId); // check for zero value //DatasetVersion dsv = datasetManager.DatasetVersionRepo.Get(versionId); DatasetVersion dsv = datasetManager.GetDatasetLatestVersion(datasetId); //MetadataStructureManager msm = new MetadataStructureManager(); //dsv.Dataset.MetadataStructure = msm.Repo.Get(dsv.Dataset.MetadataStructure.Id); string title = xmlDatasetHelper.GetInformationFromVersion(dsv.Id, NameAttributeValues.title); string description = xmlDatasetHelper.GetInformationFromVersion(dsv.Id, NameAttributeValues.description); rowArray[0] = Convert.ToInt64(datasetId); rowArray[1] = title; rowArray[2] = description; } else { rowArray[0] = Convert.ToInt64(datasetId); rowArray[1] = ""; rowArray[2] = "Dataset is just in processing."; } rowArray[3] = (rights & (int)RightType.Read) > 0 ? "✔" : "✘"; rowArray[4] = (rights & (int)RightType.Write) > 0 ? "✔" : "✘"; rowArray[5] = (rights & (int)RightType.Delete) > 0 ? "✔" : "✘"; rowArray[6] = (rights & (int)RightType.Download) > 0 ? "✔" : "✘"; rowArray[7] = (rights & (int)RightType.Grant) > 0 ? "✔" : "✘"; dataRow = model.NewRow(); dataRow.ItemArray = rowArray; model.Rows.Add(dataRow); } } return(View(new GridModel(model))); } finally { datasetManager.Dispose(); entityPermissionManager.Dispose(); entityManager.Dispose(); userManager.Dispose(); } }