public static void NormalizeFileName(DataSetUri uri) { if (uri == null) { throw new ArgumentNullException("uri"); } if (!uri.ContainsParameter("file")) { return; } string file = uri["file"]; if (String.IsNullOrEmpty(file)) { return; } Uri resourceUri; bool res = Uri.TryCreate(file, UriKind.RelativeOrAbsolute, out resourceUri); bool isLocalFile = !res || !resourceUri.IsAbsoluteUri || resourceUri.Scheme == Uri.UriSchemeFile; if (!isLocalFile) { return; } if (System.IO.Path.IsPathRooted(file)) { return; } file = System.IO.Path.Combine(Environment.CurrentDirectory, file); uri["file"] = file; }
/// <summary> /// Copies given dataset into dataset determined by <paramref name="dstUri"/>. /// </summary> /// <param name="src">Original dataset to clone.</param> /// <param name="dstUri">URI of the destination dataset.</param> /// <param name="updater">Delegate accepting update progressm notifications.</param> /// <returns>New instance of <see cref="DataSet"/> class.</returns> /// <remarks> /// This method splits the original dataser into parts and therefore is able /// to clone very large datasets not fitting to memory. /// </remarks> public static DataSet Clone(DataSet src, DataSetUri dstUri, ProgressUpdater updater) { DataSet dst = null; try { dst = DataSet.Open(dstUri); return Clone(src, dst, updater); } catch { if (dst != null) dst.Dispose(); throw; } }
protected AzureBlobDataSet(string uri, int schemeSize, SerializableDataSetSchema info) { AzureBlobDataSetUri azureUri = null; if (DataSetUri.IsDataSetUri(uri)) { azureUri = new AzureBlobDataSetUri(uri); } else { azureUri = AzureBlobDataSetUri.ToUri(uri); } this.uri = azureUri; CloudStorageAccount storageAccount;// = CloudStorageAccount.Parse(azureUri.ConnectionString); if (CloudStorageAccount.TryParse(azureUri.ConnectionString, out storageAccount)) { CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); // Retrieve a reference to a container CloudBlobContainer container = blobClient.GetContainerReference(azureUri.Container); blob = container.GetPageBlobReference(azureUri.Blob); } else { blob = new CloudPageBlob(@"http://" + azureUri.AccountName + @".blob.core.windows.net/" + azureUri.Container + @"/" + azureUri.Blob); _anonymous = true; } bool savedAutoCommitState = IsAutocommitEnabled; IsAutocommitEnabled = false; Initialize(schemeSize, info); Commit(); if (_anonymous) { this.SetCompleteReadOnly(); } IsAutocommitEnabled = savedAutoCommitState; _IsInitialized = true; }
/// <summary> /// Updates the properties of the <paramref name="uri"/> marked with <see cref="FileNamePropertyAttribute"/> /// to contain full paths. /// </summary> /// <param name="uri"></param> /// <remarks> /// <para> /// The method finds all properties of the <paramref name="uri"/> marked /// with <see cref="FileNamePropertyAttribute"/> and having value /// that is a relative local path, and then updates these properties /// assigning respective full path using current directory. /// </para> /// </remarks> public static void NormalizeFileNames(DataSetUri uri) { if (uri == null) { throw new ArgumentNullException("uri"); } Type uriType = uri.GetType(); PropertyInfo[] properties = uriType.GetProperties(BindingFlags.Instance | BindingFlags.Public | BindingFlags.DeclaredOnly); foreach (var prop in properties) { if (prop.PropertyType != typeof(string) || !prop.CanWrite || !prop.CanRead) { continue; } object[] objs = prop.GetCustomAttributes(typeof(FileNamePropertyAttribute), false); if (objs == null || objs.Length == 0) { continue; // not a file name property } string file = (string)prop.GetValue(uri, null); if (!String.IsNullOrEmpty(file)) { Uri resourceUri; bool res = Uri.TryCreate(file, UriKind.RelativeOrAbsolute, out resourceUri); bool isLocalFile = !res || !resourceUri.IsAbsoluteUri || resourceUri.Scheme == Uri.UriSchemeFile; if (!isLocalFile) { continue; // not a local file path } if (System.IO.Path.IsPathRooted(file)) { continue; // it is already full path } file = System.IO.Path.Combine(Environment.CurrentDirectory, file); prop.SetValue(uri, file, null); } } }
public static AzureBlobDataSet CreateSetWithSmallData(string uri, SerializableDataSetSchema schema, IDictionary <string, Array> dataToPut) { SerializableDataSetSchema info = schema; List <SerializableDimension> dimensions = schema.Dimensions.ToList(); List <SerializableVariableSchema> varsUnsorted = schema.Variables.ToList(); List <SerializableVariableSchema> vars = new List <SerializableVariableSchema>(varsUnsorted.Count); //vars for which data is provided should go first int varsWithDataCount = 0; foreach (var v in varsUnsorted) { if (dataToPut.ContainsKey(v.Name)) { vars.Add(v); ++varsWithDataCount; } } foreach (var v in varsUnsorted) { if (!dataToPut.ContainsKey(v.Name)) { vars.Add(v); } } Dictionary <string, int> dimLengthDictionary = new Dictionary <string, int>(dimensions.Count); foreach (var i in dimensions) { dimLengthDictionary.Add(i.Name, i.Length); //System.Diagnostics.Trace.WriteLine(string.Format("ABDS: dimension added {0}[{1}]", i.Name, i.Length)); } long estimatedBlobSize = 512;//only scheme size on 1st page long[] varOffsets = new long[vars.Count]; AzureBlobDataSetUri azureUri = null; if (DataSetUri.IsDataSetUri(uri)) { azureUri = new AzureBlobDataSetUri(uri); } else { azureUri = AzureBlobDataSetUri.ToUri(uri); } CloudStorageAccount storageAccount = CloudStorageAccount.Parse(azureUri.ConnectionString); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); // Retrieve a reference to a container CloudBlobContainer container = blobClient.GetContainerReference(azureUri.Container); container.CreateIfNotExist(); CloudPageBlob blob = container.GetPageBlobReference(azureUri.Blob); blob.DeleteIfExists(); int schemeSize; using (MemoryStream bufferStream = new MemoryStream()) { using (MemoryStream memStream = new MemoryStream()) { DataContractJsonSerializer serializer = new DataContractJsonSerializer(typeof(SerializableDataSetSchema)); serializer.WriteObject(memStream, info); schemeSize = (int)memStream.Length; estimatedBlobSize += 512 * ((schemeSize + 511) / 512);//remembering the need to align data for (int i = 0; i < vars.Count; ++i) { varOffsets[i] = estimatedBlobSize; if (vars[i].Dimensions.Length == 1) { //System.Diagnostics.Trace.WriteLine(string.Format("ABDS: looking for dim \"{0}\" for var \"{1}\"", vars[i].Dimensions[0], vars[i].Name)); estimatedBlobSize += ((dimLengthDictionary[vars[i].Dimensions[0]] * vars[i].ValueSize + 511) / 512) * 512; } else { int rowSize = 1; for (int j = 1; j < vars[i].Dimensions.Length; ++j) { //System.Diagnostics.Trace.WriteLine(string.Format("ABDS: looking for dim \"{0}\" for var \"{1}\"", vars[i].Dimensions[j], vars[i].Name)); rowSize *= dimLengthDictionary[vars[i].Dimensions[j]]; } //System.Diagnostics.Trace.WriteLine(string.Format("ABDS: looking for dim \"{0}\" for var \"{1}\"", vars[i].Dimensions[0], vars[i].Name)); estimatedBlobSize += (long)dimLengthDictionary[vars[i].Dimensions[0]] * (long)(((rowSize * vars[i].ValueSize + 511) / 512) * 512); } } blob.Create(estimatedBlobSize); //writing scheme size into the 1st page UTF8Encoding utf8 = new UTF8Encoding(); using (MemoryStream sizeStream = new MemoryStream(new byte[512], true)) { byte[] sizeBuf = utf8.GetBytes(schemeSize.ToString()); sizeStream.Write(sizeBuf, 0, sizeBuf.Length); sizeStream.Seek(0, SeekOrigin.Begin); //blob.WritePages(sizeStream, 0); //writing scheme starting with 2nd page int sizeAligned = ((schemeSize + 511) / 512) * 512 + 512; byte[] scheme = new byte[sizeAligned]; sizeStream.Seek(0, SeekOrigin.Begin); sizeStream.Read(scheme, 0, 512); memStream.Seek(0, SeekOrigin.Begin); memStream.Read(scheme, 512, schemeSize); bufferStream.Write(scheme, 0, sizeAligned); //for (int i = 0; i < sizeAligned; i += maxBlobChunk) // blob.WritePages(new MemoryStream(scheme, i, Math.Min(maxBlobChunk, sizeAligned - i)), i); } } for (int i = 0; i < varsWithDataCount; ++i) { if (vars[i].Dimensions.Length == 1) { int len = dimLengthDictionary[vars[i].Dimensions[0]]; var data = dataToPut[vars[i].Name]; if (vars[i].Type == typeof(DateTime)) { var temp = new Int64[data.Length]; for (int j = 0; j < temp.Length; ++j) { temp[j] = ((DateTime)data.GetValue(j)).Ticks; } data = temp; } int bufferSize = 512 * ((len * vars[i].ValueSize + 511) / 512); byte[] buffer = new byte[bufferSize]; Buffer.BlockCopy(data, 0, buffer, 0, len * vars[i].ValueSize); bufferStream.Write(buffer, 0, bufferSize); //for (int j = 0; j < bufferSize; j += maxBlobChunk) // blob.WritePages(new MemoryStream(buffer, j, Math.Min(maxBlobChunk, bufferSize - j)), varOffsets[i] + j); } else { int outerDimLen = dimLengthDictionary[vars[i].Dimensions[0]]; int rowLen = vars[i].ValueSize; for (int j = 1; j < vars[i].Dimensions.Length; ++j) { rowLen *= dimLengthDictionary[vars[i].Dimensions[j]]; } int rowLenUnaligned = rowLen; rowLen = 512 * ((rowLen + 511) / 512); byte[] buffer = new byte[rowLen]; Array data = dataToPut[vars[i].Name]; if (vars[i].Type == typeof(DateTime)) { int[] shapeTemp = new int[data.Rank]; for (int k = 0; k < shapeTemp.Length; ++k) { shapeTemp[k] = data.GetUpperBound(k) + 1; } Array temp = Array.CreateInstance(typeof(Int64), shapeTemp); int[] resPos = new int[shapeTemp.Length]; for (int k = 0; k < resPos.Length; ++k) { resPos[k] = 0; } do { temp.SetValue(((DateTime)data.GetValue(resPos)).Ticks, resPos); }while (Move(resPos, shapeTemp)); data = temp; } for (int j = 0; j < outerDimLen; ++j) { Buffer.BlockCopy(data, j * rowLenUnaligned, buffer, 0, rowLenUnaligned); bufferStream.Write(buffer, 0, rowLen); } } } int bufferStreamSize = (int)bufferStream.Length; int bufferStreamSizeAligned = ((bufferStreamSize + 511) / 512) * 512; byte[] bufferAligned = new byte[bufferStreamSizeAligned + 512]; bufferStream.Seek(0, SeekOrigin.Begin); bufferStream.Read(bufferAligned, 0, bufferStreamSize); for (int i = 0; i < bufferStreamSizeAligned; i += maxBlobChunk) { blob.WritePages(new MemoryStream(bufferAligned, i, Math.Min(maxBlobChunk, bufferStreamSizeAligned - i)), i); } } return(new AzureBlobDataSet(uri, schemeSize, info)); }
public static AzureBlobDataSet CreateEmptySet(string uri, SerializableDataSetSchema schema) { SerializableDataSetSchema info = schema; List <SerializableDimension> dimensions = schema.Dimensions.ToList(); List <SerializableVariableSchema> vars = schema.Variables.ToList(); Dictionary <string, int> dimLengthDictionary = new Dictionary <string, int>(dimensions.Count); foreach (var i in dimensions) { dimLengthDictionary.Add(i.Name, i.Length); } long estimatedBlobSize = 512;//only scheme size on 1st page long[] varOffsets = new long[vars.Count]; AzureBlobDataSetUri azureUri = null; if (DataSetUri.IsDataSetUri(uri)) { azureUri = new AzureBlobDataSetUri(uri); } else { azureUri = AzureBlobDataSetUri.ToUri(uri); } CloudStorageAccount storageAccount = CloudStorageAccount.Parse(azureUri.ConnectionString); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); // Retrieve a reference to a container CloudBlobContainer container = blobClient.GetContainerReference(azureUri.Container); container.CreateIfNotExist(); CloudPageBlob blob = container.GetPageBlobReference(azureUri.Blob); blob.DeleteIfExists(); int schemeSize; using (MemoryStream memStream = new MemoryStream()) { DataContractJsonSerializer serializer = new DataContractJsonSerializer(typeof(SerializableDataSetSchema)); serializer.WriteObject(memStream, info); schemeSize = (int)memStream.Length; estimatedBlobSize += 512 * ((schemeSize + 511) / 512);//remembering the need to align data for (int i = 0; i < vars.Count; ++i) { varOffsets[i] = estimatedBlobSize; if (vars[i].Dimensions.Length == 1) { estimatedBlobSize += ((dimLengthDictionary[vars[i].Dimensions[0]] * vars[i].ValueSize + 511) / 512) * 512; } else { int rowSize = 1; for (int j = 1; j < vars[i].Dimensions.Length; ++j) { rowSize *= dimLengthDictionary[vars[i].Dimensions[j]]; } estimatedBlobSize += (long)dimLengthDictionary[vars[i].Dimensions[0]] * (long)(((rowSize * vars[i].ValueSize + 511) / 512) * 512); } } blob.Create(estimatedBlobSize); //writing scheme size into the 1st page UTF8Encoding utf8 = new UTF8Encoding(); using (MemoryStream sizeStream = new MemoryStream(new byte[512], true)) { byte[] sizeBuf = utf8.GetBytes(schemeSize.ToString()); sizeStream.Write(sizeBuf, 0, sizeBuf.Length); sizeStream.Seek(0, SeekOrigin.Begin); //blob.WritePages(sizeStream, 0); //writing scheme starting with 2nd page int sizeAligned = ((schemeSize + 511) / 512) * 512 + 512; byte[] scheme = new byte[sizeAligned]; sizeStream.Seek(0, SeekOrigin.Begin); sizeStream.Read(scheme, 0, 512); memStream.Seek(0, SeekOrigin.Begin); memStream.Read(scheme, 512, schemeSize); for (int i = 0; i < sizeAligned; i += maxBlobChunk) { blob.WritePages(new MemoryStream(scheme, i, Math.Min(maxBlobChunk, sizeAligned - i)), i); } } } return(new AzureBlobDataSet(uri, schemeSize, info)); }
public static AzureBlobDataSet ArrangeData(string uri, DataSet source, SerializableVariableSchema[] emptyVariables) { List <SerializableDimension> dimensions = new List <SerializableDimension>(); foreach (var i in source.Dimensions) { dimensions.Add(new SerializableDimension(i.Name, i.Length)); } List <SerializableVariableSchema> oldVars = source.Variables.Select <Variable, SerializableVariableSchema>(x => x.GetSchema().AsSerializable()).ToList(); List <SerializableVariableSchema> vars = new List <SerializableVariableSchema>(oldVars); vars.AddRange(emptyVariables); SerializableDataSetSchema info = new SerializableDataSetSchema(dimensions.ToArray(), vars.ToArray(), source.Metadata.AsDictionary()); Dictionary <string, int> dimLengthDictionary = new Dictionary <string, int>(dimensions.Count); foreach (var i in dimensions) { dimLengthDictionary.Add(i.Name, i.Length); } long estimatedBlobSize = 512;//only scheme size on 1st page long[] varOffsets = new long[vars.Count]; AzureBlobDataSetUri azureUri = null; if (DataSetUri.IsDataSetUri(uri)) { azureUri = new AzureBlobDataSetUri(uri); } else { azureUri = AzureBlobDataSetUri.ToUri(uri); } CloudStorageAccount storageAccount = CloudStorageAccount.Parse(azureUri.ConnectionString); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); // Retrieve a reference to a container CloudBlobContainer container = blobClient.GetContainerReference(azureUri.Container); container.CreateIfNotExist(); container.SetPermissions(new BlobContainerPermissions { PublicAccess = BlobContainerPublicAccessType.Container }); CloudPageBlob blob; int schemeSize; using (MemoryStream memStream = new MemoryStream()) { DataContractJsonSerializer serializer = new DataContractJsonSerializer(typeof(SerializableDataSetSchema)); serializer.WriteObject(memStream, info); schemeSize = (int)memStream.Length; estimatedBlobSize += 512 * ((schemeSize + 511) / 512);//remembering the need to align data for (int i = 0; i < vars.Count; ++i) { varOffsets[i] = estimatedBlobSize; if (vars[i].Dimensions.Length == 1) { estimatedBlobSize += ((dimLengthDictionary[vars[i].Dimensions[0]] * vars[i].ValueSize + 511) / 512) * 512; } else { int rowSize = 1; for (int j = 1; j < vars[i].Dimensions.Length; ++j) { rowSize *= dimLengthDictionary[vars[i].Dimensions[j]]; } estimatedBlobSize += dimLengthDictionary[vars[i].Dimensions[0]] * ((rowSize * vars[i].ValueSize + 511) / 512) * 512; } } blob = container.GetPageBlobReference(azureUri.Blob); blob.DeleteIfExists(); // CRITICAL: some may interfere between calls blob.Create(estimatedBlobSize); //writing scheme size into the 1st page UTF8Encoding utf8 = new UTF8Encoding(); using (MemoryStream sizeStream = new MemoryStream(new byte[512], true)) { byte[] sizeBuf = utf8.GetBytes(schemeSize.ToString()); sizeStream.Write(sizeBuf, 0, sizeBuf.Length); sizeStream.Seek(0, SeekOrigin.Begin); blob.WritePages(sizeStream, 0); } //writing scheme starting with 2nd page int sizeAligned = ((schemeSize + 511) / 512) * 512; byte[] scheme = new byte[sizeAligned]; memStream.Seek(0, SeekOrigin.Begin); memStream.Read(scheme, 0, schemeSize); for (int i = 0; i < sizeAligned; i += maxBlobChunk) { blob.WritePages(new MemoryStream(scheme, i, Math.Min(maxBlobChunk, sizeAligned - i)), 512 + i); } } //populating blob with values from source for (int i = 0; i < oldVars.Count; ++i) { if (oldVars[i].Dimensions.Length == 1) { int len = dimLengthDictionary[oldVars[i].Dimensions[0]]; var data = source[oldVars[i].Name].GetData(); if (oldVars[i].Type == typeof(DateTime)) { var temp = new Int64[data.Length]; for (int j = 0; j < temp.Length; ++j) { temp[j] = ((DateTime)data.GetValue(j)).Ticks; } data = temp; } int bufferSize = 512 * ((len * oldVars[i].ValueSize + 511) / 512); byte[] buffer = new byte[bufferSize]; Buffer.BlockCopy(data, 0, buffer, 0, len * oldVars[i].ValueSize); for (int j = 0; j < bufferSize; j += maxBlobChunk) { blob.WritePages(new MemoryStream(buffer, j, Math.Min(maxBlobChunk, bufferSize - j)), varOffsets[i] + j); } } else { int outerDimLen = dimLengthDictionary[oldVars[i].Dimensions[0]]; int rowLen = vars[i].ValueSize; for (int j = 1; j < vars[i].Dimensions.Length; ++j) { rowLen *= dimLengthDictionary[vars[i].Dimensions[j]]; } int rowLenUnaligned = rowLen; rowLen = 512 * ((rowLen + 511) / 512); int[] origin = new int[oldVars[i].Dimensions.Length]; for (int j = 0; j < origin.Length; ++j) { origin[j] = 0; } int[] shape = new int[oldVars[i].Dimensions.Length]; shape[0] = 1; for (int j = 1; j < origin.Length; ++j) { shape[j] = dimLengthDictionary[oldVars[i].Dimensions[j]]; } byte[] buffer = new byte[rowLen]; for (int j = 0; j < outerDimLen; ++j) { origin[0] = j; Array data = source[oldVars[i].Name].GetData(origin, shape); if (oldVars[i].Type == typeof(DateTime)) { int[] shapeTemp = new int[data.Rank]; for (int k = 0; k < shapeTemp.Length; ++k) { shapeTemp[k] = data.GetUpperBound(k) + 1; } Array temp = Array.CreateInstance(typeof(Int64), shapeTemp); int[] resPos = new int[shapeTemp.Length]; for (int k = 0; k < resPos.Length; ++k) { resPos[k] = 0; } do { temp.SetValue(((DateTime)data.GetValue(resPos)).Ticks, resPos); }while (Move(resPos, shapeTemp)); data = temp; } Buffer.BlockCopy(data, 0, buffer, 0, rowLenUnaligned); for (int k = 0; k < rowLen; k += maxBlobChunk) { blob.WritePages(new MemoryStream(buffer, k, Math.Min(maxBlobChunk, rowLen - k)), varOffsets[i] + (long)rowLen * (long)j + (long)k); } } } } //blob is prepared: values are where they gotta be, trash is everwhere else! return(new AzureBlobDataSet(uri, schemeSize, info)); }
public AzureBlobDataSet(string uri) { AzureBlobDataSetUri azureUri = null; if (DataSetUri.IsDataSetUri(uri)) { azureUri = new AzureBlobDataSetUri(uri); } else { azureUri = AzureBlobDataSetUri.ToUri(uri); } this.uri = azureUri; CloudStorageAccount storageAccount;// = CloudStorageAccount.Parse(azureUri.ConnectionString); if (CloudStorageAccount.TryParse(azureUri.ConnectionString, out storageAccount)) { CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); // Retrieve a reference to a container CloudBlobContainer container = blobClient.GetContainerReference(azureUri.Container); blob = container.GetPageBlobReference(azureUri.Blob); } else { blob = new CloudPageBlob(@"http://" + azureUri.AccountName + @".blob.core.windows.net/" + azureUri.Container + @"/" + azureUri.Blob); _anonymous = true; } SerializableDataSetSchema info; Int32 schemeSize; using (BinaryReader br = new BinaryReader(blob.OpenRead())) { int curOffset = 0; int curCount = 512; int temp = 0; UTF8Encoding utf8 = new UTF8Encoding(); byte[] buffer = new byte[512]; do { temp = br.BaseStream.Read(buffer, curOffset, curCount); curOffset += temp; curCount -= temp; } while (curOffset < 512); string sizeStr = utf8.GetString(buffer); schemeSize = Int32.Parse(sizeStr); br.BaseStream.Seek(512, SeekOrigin.Begin); DataContractJsonSerializer serializer = new DataContractJsonSerializer(typeof(SerializableDataSetSchema)); byte[] scheme = new byte[schemeSize]; curOffset = 0; curCount = schemeSize; do { temp = br.BaseStream.Read(scheme, curOffset, curCount); curOffset += temp; curCount -= temp; } while (curOffset < schemeSize); info = (SerializableDataSetSchema)serializer.ReadObject(new MemoryStream(scheme)); } bool savedAutoCommitState = IsAutocommitEnabled; IsAutocommitEnabled = false; Initialize(schemeSize, info); Commit(); if (_anonymous) { this.SetCompleteReadOnly(); } IsAutocommitEnabled = savedAutoCommitState; _IsInitialized = true; }
/// <summary> /// Copies given dataset into dataset determined by <paramref name="dstUri"/> and prints progress into console. /// </summary> /// <param name="src">Original dataset to clone.</param> /// <param name="dstUri">URI of the destination dataset.</param> /// <returns>New instance of <see cref="DataSet"/> class.</returns> /// <remarks><para> /// This method splits the original dataser into parts and therefore is able /// to clone very large datasets not fitting to memory.</para> /// <para>Progress is printed out into the console window.</para> /// </remarks> /// <seealso cref="DataSetCloning.Clone(DataSet,DataSetUri,ProgressUpdater)"/> public static DataSet Clone(DataSet src, DataSetUri dstUri) { return Clone(src, dstUri, DefaultUpdater); }
public static void NormalizeUris(DataSetUri uri) { if (uri == null) throw new ArgumentNullException("uri"); if (String.IsNullOrEmpty(DataSetFactory.BaseUri)) return; Type uriType = uri.GetType(); PropertyInfo[] properties = uriType.GetProperties(BindingFlags.Instance | BindingFlags.Public | BindingFlags.DeclaredOnly); foreach (var prop in properties) { if (prop.PropertyType == typeof(string) && prop.CanWrite && prop.CanRead) { object[] objs = prop.GetCustomAttributes(typeof(UriPropertyAttribute), false); if (objs != null && objs.Length > 0) { string v = (string)prop.GetValue(uri, null); if (!String.IsNullOrEmpty(v)) { Uri localUri; if (Uri.TryCreate(v, UriKind.RelativeOrAbsolute, out localUri) && !localUri.IsAbsoluteUri) { v = new Uri(new Uri(DataSetFactory.BaseUri), v).ToString(); } prop.SetValue(uri, v, null); } } } } }
/// <summary> /// Updates the properties of the <paramref name="uri"/> marked with <see cref="DirectoryPropertyAttribute"/> /// according to <see cref="Microsoft.Research.Science.Data.Factory.DataSetFactory.BaseUri"/> property. /// </summary> /// <param name="uri"></param> /// <remarks> /// <para> /// The method finds all properties of the <paramref name="uri"/> marked /// with <see cref="DirectoryPropertyAttribute"/> and having value /// that is a relative local path, and then updates these properties /// according to current value of <see cref="Microsoft.Research.Science.Data.Factory.DataSetFactory.BaseUri"/> /// property. /// </para> /// </remarks> private static void NormalizeDirectoryPath(DataSetUri uri) { if (uri == null) throw new ArgumentNullException("uri"); Type uriType = uri.GetType(); PropertyInfo[] properties = uriType.GetProperties(BindingFlags.Instance | BindingFlags.Public | BindingFlags.DeclaredOnly); foreach (var prop in properties) { if (prop.PropertyType != typeof(string) || !prop.CanWrite || !prop.CanRead) continue; object[] objs = prop.GetCustomAttributes(typeof(DirectoryPropertyAttribute), false); if (objs == null || objs.Length == 0) continue; // not a directory path property string dir = (string)prop.GetValue(uri, null); if (!String.IsNullOrEmpty(dir)) { Uri resourceUri; bool uriCreated = Uri.TryCreate(dir, UriKind.RelativeOrAbsolute, out resourceUri); if (!uriCreated || !resourceUri.IsAbsoluteUri || resourceUri.Scheme == UriSchemeFile) // { if (!System.IO.Path.IsPathRooted(dir)) if (DataSetFactory.BaseUri != null) dir = System.IO.Path.Combine(DataSetFactory.BaseUri, dir); else dir = System.IO.Path.Combine(Environment.CurrentDirectory, dir); prop.SetValue(uri, dir, null); } } } }
/// <summary> /// Updates the properties of the <paramref name="uri"/> marked with <see cref="FileNamePropertyAttribute"/>, /// <see cref="UriPropertyAttribute"/> /// according to <see cref="Microsoft.Research.Science.Data.Factory.DataSetFactory.BaseUri"/> property. /// </summary> /// <param name="uri"></param> /// <remarks> /// <para> /// The method finds all properties of the <paramref name="uri"/> marked /// with <see cref="FileNamePropertyAttribute"/>, <see cref="UriPropertyAttribute"/> and having value /// that is a relative local path, and then updates these properties /// according to current value of <see cref="Microsoft.Research.Science.Data.Factory.DataSetFactory.BaseUri"/> /// property. /// </para> /// </remarks> public static void NormalizeUri(DataSetUri uri) { NormalizeFileNames(uri); NormalizeDirectoryPath(uri); NormalizeUris(uri); }
/// <summary> /// Clones the given uri, replacing absolute file and directory paths with relative. /// </summary> /// <param name="uri">Input DataSetUri.</param> /// <param name="basePath">The base path to make relative paths.</param> /// <returns>New DataSetUri.</returns> public static DataSetUri GetRelativeUri(DataSetUri uri, string basePath) { if (uri == null) throw new ArgumentNullException("uri"); if (basePath == null) throw new ArgumentNullException("basePath"); if (!System.IO.Path.IsPathRooted(basePath)) throw new ArgumentException("basePath must be rooted"); uri = DataSetUri.Create(uri.ToString()); // cloning if (basePath[basePath.Length - 1] != System.IO.Path.DirectorySeparatorChar) basePath += System.IO.Path.DirectorySeparatorChar; Type uriType = uri.GetType(); PropertyInfo[] properties = uriType.GetProperties(BindingFlags.Instance | BindingFlags.Public | BindingFlags.DeclaredOnly); foreach (var prop in properties) { if (prop.PropertyType == typeof(string) && prop.CanWrite && prop.CanRead) { bool attributed = prop.GetCustomAttributes(typeof(FileNamePropertyAttribute), false).Length > 0 || prop.GetCustomAttributes(typeof(DirectoryPropertyAttribute), false).Length > 0; if (attributed) { string v = (string)prop.GetValue(uri, null); if (!String.IsNullOrEmpty(v)) { Uri localUri; if (Uri.TryCreate(v, UriKind.Absolute, out localUri) && v.StartsWith(basePath)) { v = new Uri(basePath).MakeRelativeUri(localUri).ToString(); prop.SetValue(uri, v, null); } } } } } return uri; }
public static void NormalizeFileName(DataSetUri uri) { if (uri == null) throw new ArgumentNullException("uri"); if (!uri.ContainsParameter("file")) return; string file = uri["file"]; if (String.IsNullOrEmpty(file)) return; Uri resourceUri; bool uriCreated = Uri.TryCreate(file, UriKind.RelativeOrAbsolute, out resourceUri); if (!uriCreated || !resourceUri.IsAbsoluteUri || resourceUri.Scheme == UriSchemeFile) // { if (!System.IO.Path.IsPathRooted(file)) if (DataSetFactory.BaseUri != null) file = System.IO.Path.Combine(DataSetFactory.BaseUri, file); else file = System.IO.Path.Combine(Environment.CurrentDirectory, file); uri["file"] = file; } }