//private Stream getFileStream(string uri) //{ // String path = Server.UrlDecode(uri); // path = Path.Combine(AppConfiguration.DataPath, path); // return System.IO.File.OpenRead(path); //} /// <summary> /// Get total size of a file format dataset /// </summary> /// <param name="datasetVersion"></param> /// <returns>double size</returns> private double GetFileDatasetSize(DatasetVersion datasetVersion) { List <ContentDescriptor> contentDescriptors = datasetVersion.ContentDescriptors.ToList(); double totalSize = 0; if (contentDescriptors.Count > 0) { foreach (ContentDescriptor cd in contentDescriptors) { if (cd.Name.ToLower().Equals("unstructureddata")) { fileInformation fileInformation = new fileInformation(); string uri = cd.URI; String path = Server.UrlDecode(uri); path = Path.Combine(AppConfiguration.DataPath, path); Stream fileStream = System.IO.File.OpenRead(path); if (fileStream != null) { FileStream fs = fileStream as FileStream; if (fs != null) { FileInformation fileInfo = new FileInformation(fs.Name.Split('\\').LastOrDefault(), MimeMapping.GetMimeMapping(fs.Name), (uint)fs.Length, uri); fileInformation.fileSize = fileInfo.Size; totalSize += fileInfo.Size; } } } } } return(totalSize); }
public static fileInformation getFileUserInfo(ref byte[] rawInfo) { fileInformation info = new fileInformation(); point location = new point(); info.fileType = dataOperations.convToLE(BitConverter.ToUInt32(rawInfo, 0)); info.fileCreator = dataOperations.convToLE(BitConverter.ToUInt32(rawInfo, 4)); ushort ff = dataOperations.convToLE(BitConverter.ToUInt16(rawInfo, 8)); info.isOnDesk = (finderFlags.kIsOnDesk & (finderFlags)ff) == finderFlags.kIsOnDesk; info.color = (finderFlags.kColor & (finderFlags)ff) == finderFlags.kColor; info.isShared = (finderFlags.kIsShared & (finderFlags)ff) == finderFlags.kIsShared; info.hasNoINITs = (finderFlags.kHasNoINITs & (finderFlags)ff) == finderFlags.kHasNoINITs; info.hasBeenInited = (finderFlags.kHasBeenInited & (finderFlags)ff) == finderFlags.kHasBeenInited; info.hasCustomIcon = (finderFlags.kHasCustomIcon & (finderFlags)ff) == finderFlags.kHasCustomIcon; info.isStationery = (finderFlags.kIsStationery & (finderFlags)ff) == finderFlags.kIsStationery; info.nameLocked = (finderFlags.kNameLocked & (finderFlags)ff) == finderFlags.kNameLocked; info.hasBundle = (finderFlags.kHasBundle & (finderFlags)ff) == finderFlags.kHasBundle; info.isInvisible = (finderFlags.kIsInvisible & (finderFlags)ff) == finderFlags.kIsInvisible; info.isAlias = (finderFlags.kIsAlias & (finderFlags)ff) == finderFlags.kIsAlias; location.v = dataOperations.convToLE(BitConverter.ToInt16(rawInfo, 10)); location.h = dataOperations.convToLE(BitConverter.ToInt16(rawInfo, 12)); info.location = location; info.reserved = dataOperations.convToLE(BitConverter.ToUInt16(rawInfo, 14)); return(info); }
// GET: DQ public ActionResult ShowDQ(long datasetId, long versionId) { DQModels dqModel = new DQModels(); Dictionary <string, string> datasetInfo = new Dictionary <string, string>(); List <performer> performers = new List <performer>(); List <varVariable> varVariables = new List <varVariable>(); Dictionary <string, double> datasetSize = new Dictionary <string, double>(); DatasetManager dm = new DatasetManager(); DataStructureManager dsm = new DataStructureManager(); EntityPermissionManager entityPermissionManager = new EntityPermissionManager(); PartyManager pm = new PartyManager(); UserManager um = new UserManager(); DatasetVersion dsv = new DatasetVersion(); UserManager userManager = new UserManager(); // data quality files try { string pathPerformerDataset = @"C:\Data\DatasetQualities\PerformerDataset.csv"; StreamReader readerPerformerDataset = new StreamReader(pathPerformerDataset); } catch (Exception ex) { } ////////////////////////////////////////////////////////////////////////// DatasetVersion currentDatasetVersion = dm.GetDatasetVersion(versionId); //Current dataset version DataStructure currentDataStr = dsm.AllTypesDataStructureRepo.Get(currentDatasetVersion.Dataset.DataStructure.Id); //current data structure var currentUser = userManager.FindByNameAsync(GetUsernameOrDefault()).Result; //Find current user //Find the dataset Type string currentDatasetType = "file"; if (currentDataStr.Self.GetType() == typeof(StructuredDataStructure)) { currentDatasetType = "tabular"; } dqModel.type = currentDatasetType; #region performers #region dataset's performers try { string pathPerformerDataset = @"C:\Data\DatasetQualities\PerformerDataset.csv"; StreamReader readerPerformerDataset = new StreamReader(pathPerformerDataset); string pathPerformers = @"C:\Data\DatasetQualities\Performers.csv"; StreamReader readerPerformers = new StreamReader(pathPerformers); string performerLine; List <string> pfs = new List <string>(); List <performer> ps = new List <performer>(); while ((performerLine = readerPerformerDataset.ReadLine()) != null) { string[] s = performerLine.Split(','); if (long.Parse(s[1]) == datasetId) { pfs.Add(s[0]); } } while ((performerLine = readerPerformers.ReadLine()) != null) { string[] s = performerLine.Split(','); if (pfs.Contains(s[0])) { performer p = new performer(); p.performerName = FindPerformerNameFromUsername(um, s[0]); //find performer name p.performerRate = int.Parse(s[1]); List <long> pfIds = FindDatasetsFromPerformerUsername(dm, um, s[0]); //Find all datasets in wich the username is involved. p.DatasetIds = pfIds; ps.Add(p); } } dqModel.performers = ps; readerPerformerDataset.Close(); readerPerformers.Close(); } catch (Exception ex) { } #endregion #endregion //performers //dqModel.isPublic = entityPermissionManager.GetRights(null, 1, datasetId); //check if dataset is public //check the read permission for current dataset bool rPermission = entityPermissionManager.HasEffectiveRight(currentUser.UserName, typeof(Dataset), datasetId, Security.Entities.Authorization.RightType.Read); //find if user has read permission if (rPermission == true) //has read permission or public = readable { dqModel.readable = 1; } else { dqModel.readable = 0; } //cannot read //Check if the current metadata is valid if (currentDatasetVersion.StateInfo != null) { dqModel.isValid = DatasetStateInfo.Valid.ToString().Equals(currentDatasetVersion.StateInfo.State) ? 1 : 0; //1:valid; 0:invalid. } else { dqModel.isValid = 0; } List <long> datasetIds = dm.GetDatasetLatestIds(); dqModel.allDatasets = datasetIds.Count; List <int> metadataRates = new List <int>(); List <int> dsDescLength = new List <int>(); List <int> dstrDescLength = new List <int>(); List <int> dstrUsage = new List <int>(); List <int> datasetSizeTabular = new List <int>(); List <int> datasetRows = new List <int>(); List <int> datasetCols = new List <int>(); List <double> datasetSizeFiles = new List <double>(); double datasetSizeFile = new double(); List <int> datasetFileNumber = new List <int>(); List <int> restrictions = new List <int>(); int fileNumber = 0; List <int> sizeTabular = new List <int>(); //collect size, column number, and row number for one dataset int publicDatasets = 0; int restrictedDatasets = 0; int fileDatasets = 0; int tabularDatasets = 0; int rpTrue = 0; int rp; int validMetadata = 0; int allValidMetadas = 0; foreach (long Id in datasetIds) //for each dataset { if (dm.IsDatasetCheckedIn(Id)) { DatasetVersion datasetVersion = dm.GetDatasetLatestVersion(Id); //get last dataset versions //If user has read permission rPermission = entityPermissionManager.HasEffectiveRight(currentUser.UserName, typeof(Dataset), Id, Security.Entities.Authorization.RightType.Read); if (rPermission == true) //has read permission or public = readable { rp = 1; rpTrue += 1; } else { rp = 0; } //cannot read } } dqModel.allReadables = rpTrue; string pathDatasetInfo = @"C:\Data\DatasetQualities\datasetInfo.csv"; StreamReader readerDatasetInfo = new StreamReader(pathDatasetInfo); List <datasetInformation> datasetsInformation = new List <datasetInformation>(); try { string lineDatasetInfo; while ((lineDatasetInfo = readerDatasetInfo.ReadLine()) != null) { string[] dsInf = lineDatasetInfo.Split(';'); datasetInformation datasetInformation = new datasetInformation(); long id = long.Parse(dsInf[0]); datasetInformation.datasetId = id; datasetInformation.title = dm.GetDatasetLatestVersion(id).Title; rPermission = entityPermissionManager.HasEffectiveRight(currentUser.UserName, typeof(Dataset), id, Security.Entities.Authorization.RightType.Read); if (rPermission == true) { datasetInformation.readable = 1; } if (rPermission == false) { datasetInformation.readable = 0; } datasetInformation.type = dsInf[1]; datasetInformation.metadataValidation = int.Parse(dsInf[2]); datasetInformation.metadataComplition = int.Parse(dsInf[3]); datasetInformation.descriptionLength = int.Parse(dsInf[4]); datasetInformation.structureDescriptionLength = int.Parse(dsInf[5]); datasetInformation.structureUsage = int.Parse(dsInf[6]); datasetInformation.columnNumber = int.Parse(dsInf[7]); datasetInformation.rowNumber = int.Parse(dsInf[8]); datasetInformation.fileNumber = int.Parse(dsInf[9]); datasetInformation.datasetSizeFile = double.Parse(dsInf[10]); string[] pfrms = dsInf[11].Split(','); List <string> performerNames = new List <string>(); foreach (string p in pfrms) { performerNames.Add(p); } datasetInformation.performerNames = performerNames; datasetsInformation.Add(datasetInformation); if (datasetId == id) { dqModel.metadataComplition.requiredFields = int.Parse(dsInf[2]); dqModel.metadataComplition.totalFields = int.Parse(dsInf[3]); dqModel.datasetDescriptionLength.currentDescriptionLength = int.Parse(dsInf[4]); dqModel.dataStrDescriptionLength.currentDescriptionLength = int.Parse(dsInf[5]); dqModel.dataStrUsage.currentDataStrUsage = int.Parse(dsInf[6]); dqModel.columnNumber = datasetInformation.columnNumber; dqModel.rowNumber = datasetInformation.rowNumber; dqModel.fileNumber = datasetInformation.fileNumber; dqModel.datasetTotalSize.currentTotalSize = datasetInformation.datasetSizeFile; } } } catch { } dqModel.datasetsInformation = datasetsInformation; readerDatasetInfo.Close(); //CURRENT DATASET VERSION //dqModel.metadataComplition.totalFields = GetMetadataRate(currentDatasetVersion); //current dataset version: metadata rate //dqModel.metadataComplition.requiredFields = 100; //Need to calculate: metadataStructureId = dsv.Dataset.MetadataStructure.Id; //dqModel.datasetDescriptionLength.currentDescriptionLength = currentDatasetVersion.Description.Length; // Current dataset vesion: dataset description length //dqModel.dataStrDescriptionLength.currentDescriptionLength = currentDatasetVersion.Dataset.DataStructure.Description.Length; // Current dataset version: data structure description length //dqModel.dataStrUsage.currentDataStrUsage = currentDataStr.Datasets.Count() - 1; // Current dataset version: how many times the data structure is used in other datasets #region comparision try { string pathComparison = @"C:\Data\DatasetQualities\Comparison.csv"; StreamReader readerComparison = new StreamReader(pathComparison); string infoline; List <int> infos = new List <int>(); while ((infoline = readerComparison.ReadLine()) != null) { string[] s = infoline.Split(','); if (s[0] == "performersActivity") { dqModel.performersActivity.minActivity = int.Parse(s[1]); dqModel.performersActivity.medianActivity = int.Parse(s[2]); dqModel.performersActivity.maxActivity = int.Parse(s[3]); } else if (s[0] == "type") { dqModel.allDatasets = int.Parse(s[1]); dqModel.tabularDatasets = int.Parse(s[2]); dqModel.fileDatasets = int.Parse(s[3]); } else if (s[0] == "metadataRates") { dqModel.metadataComplition.minRate = int.Parse(s[1]); dqModel.metadataComplition.medianRate = int.Parse(s[2]); dqModel.metadataComplition.maxRate = int.Parse(s[3]); } else if (s[0] == "allValidMetadas") { dqModel.allValids = int.Parse(s[1]); } else if (s[0] == "datasetDescriptionLength") { dqModel.datasetDescriptionLength.minDescriptionLength = int.Parse(s[1]); dqModel.datasetDescriptionLength.medianDescriptionLength = int.Parse(s[2]); dqModel.datasetDescriptionLength.maxDescriptionLength = int.Parse(s[3]); } else if (s[0] == "dataStrDescriptionLength") { dqModel.dataStrDescriptionLength.minDescriptionLength = int.Parse(s[1]); dqModel.dataStrDescriptionLength.medianDescriptionLength = int.Parse(s[2]); dqModel.dataStrDescriptionLength.maxDescriptionLength = int.Parse(s[3]); } else if (s[0] == "dataStrUsage") { dqModel.dataStrUsage.minDataStrUsage = int.Parse(s[1]); dqModel.dataStrUsage.medianDataStrUsage = int.Parse(s[2]); dqModel.dataStrUsage.maxDataStrUsage = int.Parse(s[3]); } else if (s[0] == "datasetColNumber") { dqModel.datasetColNumber.minColNumber = int.Parse(s[1]); dqModel.datasetColNumber.medianColNumber = int.Parse(s[2]); dqModel.datasetColNumber.maxColNumber = int.Parse(s[3]); } else if (s[0] == "datasetRowNumber") { dqModel.datasetRowNumber.minRowNumber = int.Parse(s[1]); dqModel.datasetRowNumber.medianRowNumber = int.Parse(s[2]); dqModel.datasetRowNumber.maxRowNumber = int.Parse(s[3]); } else if (s[0] == "datasetFileNumber") { dqModel.datasetFileNumber.minFileNumber = int.Parse(s[1]); dqModel.datasetFileNumber.medianFileNumber = int.Parse(s[2]); dqModel.datasetFileNumber.maxFileNumber = int.Parse(s[3]); } else if (s[0] == "datasetTotalSizeFiles") { dqModel.datasetTotalSize.minSizeFile = double.Parse(s[1]); dqModel.datasetTotalSize.medianSizeFile = double.Parse(s[2]); dqModel.datasetTotalSize.maxSizeFile = double.Parse(s[3]); } } readerComparison.Close(); } catch (Exception ex) { } #endregion /////////////////////////////////////////////////////////////////////// #region TABULAR FORMAT DATASET //If it is a tabular format dataset if (currentDatasetType == "tabular") { string pathVariables = @"C:\Data\DatasetQualities\Variables.csv"; StreamReader readerVariables = new StreamReader(pathVariables); string varLine; while ((varLine = readerVariables.ReadLine()) != null) { string[] varDetail = varLine.Split(','); if (varDetail[0] == datasetId.ToString()) { varVariable v = new varVariable(); v.varLabel = varDetail[1]; v.varType = varDetail[2]; v.varDescription = varDetail[3]; v.varUsage = int.Parse(varDetail[4]); v.missing = int.Parse(varDetail[5]); varVariables.Add(v); } } dqModel.varVariables = varVariables; readerVariables.Close(); } // string serverName = "http://*****:*****@"C:\Data\DatasetQualities\Files.csv"; try { StreamReader readerFiles = new StreamReader(pathFiles); while ((fileLine = readerFiles.ReadLine()) != null) { string[] fileDetail = fileLine.Split(','); if (fileDetail[0] == datasetId.ToString()) { fileInformation f = new fileInformation(); f.fileName = fileDetail[1]; f.fileFormat = fileDetail[2]; double d = Convert.ToDouble(fileDetail[3]); f.fileSize = d; filesInformations.Add(f); } } readerFiles.Close(); } catch { } dqModel.filesInformation = filesInformations; } //if (currentDatasetVersion != null) //{ // List<ContentDescriptor> contentDescriptors = currentDatasetVersion.ContentDescriptors.ToList(); // double totalSize = 0; // if (contentDescriptors.Count > 0) // { // foreach (ContentDescriptor cd in contentDescriptors) // { // if (cd.Name.ToLower().Equals("unstructureddata")) // { // fileInformation fileInformation = new fileInformation(); // string uri = cd.URI; // //get the file path // try // { // String path = Server.UrlDecode(uri); // path = Path.Combine(AppConfiguration.DataPath, path); // Stream fileStream = System.IO.File.OpenRead(path); // if (fileStream != null) // { // FileStream fs = fileStream as FileStream; // if (fs != null) // { // //get file information // FileInformation fileInfo = new FileInformation(fs.Name.Split('\\').LastOrDefault(), MimeMapping.GetMimeMapping(fs.Name), (uint)fs.Length, uri); // fileInformation.fileName = fileInfo.Name.Split('.')[0]; //file name // fileInformation.fileFormat = fileInfo.Name.Split('.')[1].ToLower(); //file extension // fileInformation.fileSize = fileInfo.Size; //file size // totalSize += fileInfo.Size; // } // } // } // catch // { // } // filesInformation.Add(fileInformation); // } //} //} //dqModel.fileNumber = contentDescriptors.Count; //dqModel.datasetTotalSize.currentTotalSize = totalSize; //} //dqModel.filesInformation = filesInformation; //} #endregion return(PartialView(dqModel)); }
public upload(string documentId, fileInformation fileInformation, string uploadUrl) { this.documentId = documentId; this.fileInformation = fileInformation; this.uploadUrl = uploadUrl; }
public static fileInformation getFileUserInfo(ref byte[] rawInfo) { fileInformation info = new fileInformation(); point location = new point(); info.fileType = dataOperations.convToLE(BitConverter.ToUInt32(rawInfo, 0)); info.fileCreator = dataOperations.convToLE(BitConverter.ToUInt32(rawInfo, 4)); ushort ff = dataOperations.convToLE(BitConverter.ToUInt16(rawInfo, 8)); info.isOnDesk = (finderFlags.kIsOnDesk & (finderFlags)ff) == finderFlags.kIsOnDesk; info.color = (finderFlags.kColor & (finderFlags)ff) == finderFlags.kColor; info.isShared = (finderFlags.kIsShared & (finderFlags)ff) == finderFlags.kIsShared; info.hasNoINITs = (finderFlags.kHasNoINITs & (finderFlags)ff) == finderFlags.kHasNoINITs; info.hasBeenInited = (finderFlags.kHasBeenInited & (finderFlags)ff) == finderFlags.kHasBeenInited; info.hasCustomIcon = (finderFlags.kHasCustomIcon & (finderFlags)ff) == finderFlags.kHasCustomIcon; info.isStationery = (finderFlags.kIsStationery & (finderFlags)ff) == finderFlags.kIsStationery; info.nameLocked = (finderFlags.kNameLocked & (finderFlags)ff) == finderFlags.kNameLocked; info.hasBundle = (finderFlags.kHasBundle & (finderFlags)ff) == finderFlags.kHasBundle; info.isInvisible = (finderFlags.kIsInvisible & (finderFlags)ff) == finderFlags.kIsInvisible; info.isAlias = (finderFlags.kIsAlias & (finderFlags)ff) == finderFlags.kIsAlias; location.v = dataOperations.convToLE(BitConverter.ToInt16(rawInfo, 10)); location.h = dataOperations.convToLE(BitConverter.ToInt16(rawInfo, 12)); info.location = location; info.reserved = dataOperations.convToLE(BitConverter.ToUInt16(rawInfo, 14)); return info; }