/// <summary> /// Initialise the item data using an existing stream. If the build mode is to blocks the itemDataStream is broken into chunks. /// </summary> /// <param name="itemIdentifier"></param> /// <param name="dataBuildMode"></param> /// <param name="itemDataStream"></param> /// <param name="enableChunkChecksum"></param> public DistributedItemData(string itemIdentifier, DataBuildMode dataBuildMode, Stream itemDataStream, bool enableChunkChecksum = false) { this.DataBuildMode = dataBuildMode; if (itemDataStream == null) { throw new ArgumentNullException("itemDataStream", "itemDataStream cannot be null."); } this.ItemBytesLength = itemDataStream.Length; //Calculate the exactChunkSize if we split everything up into 255 pieces double exactChunkSize = (double)ItemBytesLength / 255.0; //If the item is too small we just use the minimumChunkSize //If we need something larger than MinChunkSizeInBytes we select appropriately this.ChunkSizeInBytes = (exactChunkSize <= DFS.MinChunkSizeInBytes ? DFS.MinChunkSizeInBytes : (int)Math.Ceiling(exactChunkSize)); this.TotalNumChunks = (byte)(Math.Ceiling((double)ItemBytesLength / (double)ChunkSizeInBytes)); InitialiseChunkPositionLengthDict(); SetData(itemIdentifier, itemDataStream); if (enableChunkChecksum) { BuildChunkCheckSums(); } }
/// <summary> /// Updates the ItemBuildTarget /// </summary> /// <param name="newDataBuildMode">The new DataBuildMode to use</param> public void UpdateBuildTarget(DataBuildMode newDataBuildMode) { lock (dataLocker) { if (DFS.GetDistributedItemByChecksum(CompleteDataCheckSum) == null) { this.DataBuildMode = newDataBuildMode; } else { throw new Exception("Unable to update build target once item has been added to DFS. Future version of the DFS may be more flexible in this regard."); } } }
/// <summary> /// Initialise the item data using existing chunk streams. Build mode must be to blocks. /// </summary> /// <param name="dataBuildMode"></param> /// <param name="itemDataStreams"></param> /// <param name="enableChunkChecksum"></param> public DistributedItemData(DataBuildMode dataBuildMode, Dictionary <int, Stream> itemDataStreams, bool enableChunkChecksum = false) { this.DataBuildMode = DataBuildMode; if (itemDataStreams == null) { throw new ArgumentNullException("itemDataStreams", "itemDataStreams cannot be null."); } if (dataBuildMode == DataBuildMode.Disk_Single || //itemBuildMode == ItemBuildMode.Both_Single || dataBuildMode == DataBuildMode.Memory_Single) { throw new ArgumentException("Please use other constructor that takes a single input data stream."); } this.ItemBytesLength = itemDataStreams.Select(i => i.Value.Length).Sum(); //Calculate the exactChunkSize if we split everything up into 255 pieces double exactChunkSize = (double)ItemBytesLength / 255.0; //If the item is too small we just use the minimumChunkSize //If we need something larger than MinChunkSizeInBytes we select appropriately this.ChunkSizeInBytes = (exactChunkSize <= DFS.MinChunkSizeInBytes ? DFS.MinChunkSizeInBytes : (int)Math.Ceiling(exactChunkSize)); this.TotalNumChunks = (byte)(Math.Ceiling((double)ItemBytesLength / (double)ChunkSizeInBytes)); InitialiseChunkPositionLengthDict(); if (itemDataStreams.Count != ChunkPositionLengthDict.Count) { throw new ArgumentException("Number of streams should equal the number of chunks"); } //Initialise the data streams ChunkDataStreams = new StreamTools.ThreadSafeStream[ChunkPositionLengthDict.Count]; foreach (int chunkIndex in ChunkPositionLengthDict.Keys) { ChunkDataStreams[chunkIndex] = new StreamTools.ThreadSafeStream(itemDataStreams[chunkIndex]); } this.CompleteDataCheckSum = MD5(); if (enableChunkChecksum) { BuildChunkCheckSums(); } }
/// <summary> /// Updates the ItemBuildTarget /// </summary> /// <param name="newDataBuildMode">The new DataBuildMode to use</param> public void UpdateBuildTarget(DataBuildMode newDataBuildMode) { lock (dataLocker) { if (DFS.GetDistributedItemByChecksum(CompleteDataCheckSum) == null) this.DataBuildMode = newDataBuildMode; else throw new Exception("Unable to update build target once item has been added to DFS. Future version of the DFS may be more flexible in this regard."); } }
/// <summary> /// Initialise the item data using existing chunk streams. Build mode must be to blocks. /// </summary> /// <param name="dataBuildMode"></param> /// <param name="itemDataStreams"></param> /// <param name="enableChunkChecksum"></param> public DistributedItemData(DataBuildMode dataBuildMode, Dictionary<int, Stream> itemDataStreams, bool enableChunkChecksum = false) { this.DataBuildMode = DataBuildMode; if (itemDataStreams == null) throw new ArgumentNullException("itemDataStreams", "itemDataStreams cannot be null."); if (dataBuildMode == DataBuildMode.Disk_Single || //itemBuildMode == ItemBuildMode.Both_Single || dataBuildMode == DataBuildMode.Memory_Single) throw new ArgumentException("Please use other constructor that takes a single input data stream."); this.ItemBytesLength = itemDataStreams.Select(i => i.Value.Length).Sum(); //Calculate the exactChunkSize if we split everything up into 255 pieces double exactChunkSize = (double)ItemBytesLength / 255.0; //If the item is too small we just use the minimumChunkSize //If we need something larger than MinChunkSizeInBytes we select appropriately this.ChunkSizeInBytes = (exactChunkSize <= DFS.MinChunkSizeInBytes ? DFS.MinChunkSizeInBytes : (int)Math.Ceiling(exactChunkSize)); this.TotalNumChunks = (byte)(Math.Ceiling((double)ItemBytesLength / (double)ChunkSizeInBytes)); InitialiseChunkPositionLengthDict(); if (itemDataStreams.Count != ChunkPositionLengthDict.Count) throw new ArgumentException("Number of streams should equal the number of chunks"); //Initialise the data streams ChunkDataStreams = new StreamTools.ThreadSafeStream[ChunkPositionLengthDict.Count]; foreach (int chunkIndex in ChunkPositionLengthDict.Keys) ChunkDataStreams[chunkIndex] = new StreamTools.ThreadSafeStream(itemDataStreams[chunkIndex]); this.CompleteDataCheckSum = MD5(); if (enableChunkChecksum) BuildChunkCheckSums(); }
/// <summary> /// Initialise the item data using an existing stream. If the build mode is to blocks the itemDataStream is broken into chunks. /// </summary> /// <param name="itemIdentifier"></param> /// <param name="dataBuildMode"></param> /// <param name="itemDataStream"></param> /// <param name="enableChunkChecksum"></param> public DistributedItemData(string itemIdentifier, DataBuildMode dataBuildMode, Stream itemDataStream, bool enableChunkChecksum = false) { this.DataBuildMode = dataBuildMode; if (itemDataStream == null) throw new ArgumentNullException("itemDataStream", "itemDataStream cannot be null."); this.ItemBytesLength = itemDataStream.Length; //Calculate the exactChunkSize if we split everything up into 255 pieces double exactChunkSize = (double)ItemBytesLength / 255.0; //If the item is too small we just use the minimumChunkSize //If we need something larger than MinChunkSizeInBytes we select appropriately this.ChunkSizeInBytes = (exactChunkSize <= DFS.MinChunkSizeInBytes ? DFS.MinChunkSizeInBytes : (int)Math.Ceiling(exactChunkSize)); this.TotalNumChunks = (byte)(Math.Ceiling((double)ItemBytesLength / (double)ChunkSizeInBytes)); InitialiseChunkPositionLengthDict(); SetData(itemIdentifier, itemDataStream); if (enableChunkChecksum) BuildChunkCheckSums(); }