internal SymbolFile( Func <string, bool> symlinkTester, Client bxlClient, string filePath, string fileId, ContentHash hash, IEnumerable <IDebugEntryData> debugEntries) { Contract.Requires(symlinkTester != null); Contract.Requires(bxlClient != null); Contract.Requires(!string.IsNullOrEmpty(filePath)); Contract.Requires(!string.IsNullOrEmpty(fileId)); Contract.Requires(hash.IsValid); // It's not clear whether the symbol endpoint can play nicely with dedup hashes, so locking it down to VSO0 for now. Contract.Requires(hash.HashType == HashType.Vso0, "support only VSO0 hashes (for now)"); if (debugEntries != null) { var blobIdentifier = new BlobIdentifier(hash.ToHashByteArray()); Contract.Assert(debugEntries.All(e => e.BlobIdentifier == blobIdentifier)); m_debugEntries = new List <IDebugEntryData>(debugEntries); } m_symlinkTester = symlinkTester; m_bxlClient = bxlClient; FullFilePath = Path.GetFullPath(filePath); m_file = FileId.Parse(fileId); Hash = hash; }
private async Task <BoolResult> GetFileWithDedupAsync(OperationContext context, ContentHash contentHash, string path) { BlobIdentifier blobId = contentHash.ToBlobIdentifier(); DedupIdentifier dedupId = blobId.ToDedupIdentifier(); try { await TryGatedArtifactOperationAsync <object>( context, contentHash.ToString(), "DownloadToFileAsync", async innerCts => { await DedupStoreClient.DownloadToFileAsync(dedupId, path, null, null, EdgeCache.Allowed, innerCts); return(null); }); } catch (NullReferenceException) // Null reference thrown when DedupIdentifier doesn't exist in VSTS. { return(new BoolResult("DedupIdentifier not found.")); } catch (Exception ex) { return(new BoolResult(ex)); } return(BoolResult.Success); }
/// <summary> /// Constructor. /// </summary> /// <param name="fileFullPath"> /// Path to the target file which is to be added to drop. The file must exist on disk at the time of invocation. /// </param> /// <param name="relativeDropPath"> /// Relative path under which to associate the file in the target drop. If null, file name of the file is used. /// </param> /// <param name="fileContentInfo"> /// Expected content hash and file length. May be left null. /// </param> public DropItemForFile(string fileFullPath, string relativeDropPath = null, FileContentInfo?fileContentInfo = null) { Contract.Requires(fileFullPath != null); FullFilePath = Path.GetFullPath(fileFullPath); RelativeDropPath = relativeDropPath ?? Path.GetFileName(FullFilePath); if (fileContentInfo != null) { var contentInfo = fileContentInfo.Value; if (contentInfo.Hash.HashType.Equals(BuildXL.Cache.ContentStore.Hashing.HashType.DedupChunk)) { BlobIdentifier = new ChunkDedupIdentifier(contentInfo.Hash.ToHashByteArray()).ToBlobIdentifier(); } else { BlobIdentifier = new BlobIdentifier(contentInfo.Hash.ToHashByteArray()); } FileLength = contentInfo.HasKnownLength ? contentInfo.Length : UnknownFileLength; } else { BlobIdentifier = null; FileLength = UnknownFileLength; } }
public void CanBeSerializedThenDeserialized_NewtonsoftJson() { BlobIdentifier original = BlobIdentifier.MaxValue; string serialized = JsonConvert.SerializeObject(original, Formatting.None); BlobIdentifier deserializedObject = JsonConvert.DeserializeObject <BlobIdentifier>(serialized); Assert.Equal(original, deserializedObject); }
/// <nodoc /> public DropFile(string relativePath, long?fileSize, BlobIdentifier blobId) { Contract.Requires(!string.IsNullOrEmpty(relativePath)); Contract.Requires(blobId != null); BlobIdentifier = blobId; FileSize = fileSize; RelativePath = relativePath; }
/// <nodoc/> public void SetDebugEntries(List <DebugEntryData> entries) { Contract.Requires(entries != null); // check that either all entries are missing the blobId, or all the entries have the same blobId and that blobId matches this file var blobIdentifier = new BlobIdentifier(Hash.ToHashByteArray()); Contract.Assert(entries.All(e => e.BlobIdentifier == null) || entries.All(e => e.BlobIdentifier == blobIdentifier)); // ensure that BlobIdentifier is not null // here we 'trust' that the debug entries are from the current symbol file entries.ForEach(entry => entry.BlobIdentifier = blobIdentifier); m_debugEntries = new List <IDebugEntryData>(entries); }
public async Task <BlobIdentifierWithBlocks> UploadLogToBlobstorageService(Stream blob, string hubName, Guid planId, int logId) { CheckConnection(); BlobIdentifier blobId = VsoHash.CalculateBlobIdentifierWithBlocks(blob).BlobId; // Since we read this while calculating the hash, the position needs to be reset before we send this blob.Position = 0; using (var blobClient = CreateArtifactsClient(_connection, default(CancellationToken))) { return(await blobClient.UploadBlocksForBlobAsync(blobId, blob, default(CancellationToken))); } }
/// <summary> /// Converts a VSO BlobIdentifier to a ContentHash. /// </summary> public static ContentHash ToContentHash(this BlobIdentifier blobId) { switch (blobId.AlgorithmId) { case VsoHash.VsoAlgorithmId: return(new ContentHash(HashType.Vso0, blobId.Bytes)); case BuildXL.Cache.ContentStore.Hashing.NodeDedupIdentifier.NodeAlgorithmId: case ChunkDedupIdentifier.ChunkAlgorithmId: return(new ContentHash(HashType.DedupNodeOrChunk, blobId.Bytes)); default: throw new ArgumentException($"BlobIdentifier has an unrecognized AlgorithmId: {blobId.AlgorithmId}"); } }
public void SerializationFormatIsCorrect_NewtonsoftJson() { BlobIdentifier original = BlobIdentifier.CreateFromAlgorithmResult(HashIdentifier, 0xF); string serialized = JsonConvert.SerializeObject(original); JObject jObject = JObject.Parse(serialized); Assert.Equal(1, jObject.Count); var identifierProperty = jObject["identifierValue"]; Assert.NotNull(identifierProperty); var bytes = identifierProperty.ToObject <byte[]>(); Assert.Equal(bytes, original.Bytes); }
/// <summary> /// Extracts the debug entry for a javascript sourcemap file. /// It will try to extract the client key from the sourcemap, so that /// the tool that writes the sourcemap has control over they key. /// It will fall back to the sha256 of the sourcemap as the client key /// when it can't be found. /// </summary> internal static async Task <DebugEntryData[]> GetJsMapDebugEntryAsync(FileInfo file, bool calculateBlobId = false) { var fileName = file.FullName; string clientKey = TryGetSymbolClientKeyFromJsMap(fileName); if (clientKey == null) { // If the .js.map file file does not contain the proper info, use content hash as a fallback try { using (var fileStream = FileStreamUtility.OpenFileStreamForAsync(fileName, FileMode.Open, FileAccess.Read, FileShare.Read | FileShare.Delete)) { var hash = await HashInfoLookup.GetContentHasher(HashType.SHA256).GetContentHashAsync(fileStream); var clientId = hash.ToHex().ToLowerInvariant(); clientKey = CreateClientKey(clientId, Path.GetFileName(fileName)); } } catch (IOException) { return(new DebugEntryData[0]); } catch (UnauthorizedAccessException) { return(new DebugEntryData[0]); } } BlobIdentifier blobId = null; if (calculateBlobId) { var blobDescriptor = await FileBlobDescriptor.CalculateAsync(file.DirectoryName, chunkDedup : false, file.Name, FileBlobType.File, CancellationToken.None); blobId = blobDescriptor.BlobIdentifier; } return(new[] { new DebugEntryData() { BlobIdentifier = blobId, ClientKey = clientKey, InformationLevel = DebugInformationLevel.Private } }); }
/// <summary> /// Gets a single hash representing the ContentHashList /// </summary> public static byte[] GetHashOfHashes(this ContentHashList contentHashList) { var rollingBlobIdentifier = new VsoHash.RollingBlobIdentifier(); BlobIdentifier blobIdOfContentHashes = VsoHash.OfNothing.BlobId; for (int i = 0; i < contentHashList.Hashes.Count; i++) { BlobIdentifier blobId = BlobIdentifier.Deserialize(contentHashList.Hashes[i].ToHex()); if (i != contentHashList.Hashes.Count - 1) { rollingBlobIdentifier.Update(VsoHash.HashBlock(blobId.Bytes, blobId.Bytes.Length)); } else { blobIdOfContentHashes = rollingBlobIdentifier.Finalize(VsoHash.HashBlock(blobId.Bytes, blobId.Bytes.Length)); } } return(blobIdOfContentHashes.Bytes); }
public void HashAlgorithmForCacheIsTheSame() { using (var hashAlgorithm = new VsoHashAlgorithm()) { var blobSizes = new[] { 0, 1, VsoHash.BlockSize - 1, VsoHash.BlockSize, VsoHash.BlockSize + 1, (2 * VsoHash.BlockSize) - 1, 2 * VsoHash.BlockSize, (2 * VsoHash.BlockSize) + 1, }; foreach (int blobSize in blobSizes) { var content = ThreadSafeRandom.GetBytes(blobSize); hashAlgorithm.Initialize(); byte[] hashAlgoBytes = hashAlgorithm.ComputeHash(content); BlobIdentifier blobId = VsoHash.CalculateBlobIdentifier(content); Assert.True(hashAlgoBytes.SequenceEqual(blobId.Bytes)); } } }
/// <summary> /// Converts a VSO BlobIdentifier to a ContentHash. /// </summary> public static ContentHash ToContentHash(this BlobIdentifier blobId) { switch (blobId.AlgorithmId) { case VsoHash.VsoAlgorithmId: return(new ContentHash(HashType.Vso0, blobId.Bytes)); case ChunkDedupIdentifier.ChunkAlgorithmId: return(new ContentHash(HashType.Dedup64K, blobId.Bytes)); // TODO: Chunk size optimization case (byte)NodeAlgorithmId.Node64K: return(new ContentHash(HashType.Dedup64K, blobId.Bytes)); case (byte)NodeAlgorithmId.Node1024K: return(new ContentHash(HashType.Dedup1024K, blobId.Bytes)); case MurmurHashInfo.MurmurAlgorithmId: return(new ContentHash(HashType.Murmur, blobId.Bytes)); default: throw new ArgumentException($"BlobIdentifier has an unrecognized AlgorithmId: {blobId.AlgorithmId}"); } }
/// <summary> /// Initializes a new instance of the <see cref="BlobContentHashListWithDeterminism"/> struct. /// </summary> public BlobContentHashListWithDeterminism(Guid value, BlobIdentifier blobId, Uri downloadUri) { ContentHashListMetadataBlobId = blobId?.ValueString; ContentHashListMetadataBlobDownloadUriString = downloadUri?.AbsoluteUri; Guid = value.ToString(); }
/// <summary> /// Initializes a new instance of the <see cref="BlobContentHashListWithDeterminism"/> struct. /// </summary> public BlobContentHashListWithDeterminism(Guid value, BlobIdentifier blobId) : this(value, blobId, null) { }
private async Task <PinResult> PinCoreImplAsync( OperationContext context, ContentHash contentHash, DateTime keepUntil) { if (!contentHash.HashType.IsValidDedup()) { return(new PinResult($"DedupStore client requires a HashType that supports dedup. Given hash type: {contentHash.HashType}.")); } var pinResult = CheckPinInMemory(contentHash, keepUntil); if (pinResult.Succeeded) { return(pinResult); } BlobIdentifier blobId = contentHash.ToBlobIdentifier(); DedupIdentifier dedupId = blobId.ToDedupIdentifier(); if (dedupId.AlgorithmId == Hashing.ChunkDedupIdentifier.ChunkAlgorithmId) { // No need to optimize since pinning a chunk is always a fast operation. return(await PinImplAsync(context, contentHash, keepUntil)); } // Since pinning the whole tree can be an expensive operation, we have optimized how we call it. Depending on the current // keepUntil of the root node, which is unexpensive to check, the operation will behave differently: // The pin operation will be ignored if it is greater than ignorePinThreshold, to reduce amount of calls // The pin operation will be inlined if it is lower than pinInlineThreshold, to make sure that we don't try to use // content that we pin in the background but has expired before we could complete the pin. // The pin operation will be done asynchronously and will return success otherwise. Most calls should follow this // behavior, to avoid waiting on a potentially long operation. We're confident returning a success because we // know that the content is there even though we still have to extend it's keepUntil var keepUntilResult = await CheckNodeKeepUntilAsync(context, dedupId); if (!keepUntilResult.Succeeded) { // Returned a service error. Fail fast. return(new PinResult(keepUntilResult)); } else if (!keepUntilResult.Value.HasValue) { // Content not found. return(new PinResult(PinResult.ResultCode.ContentNotFound)); } var timeLeft = keepUntilResult.Value.Value - DateTime.UtcNow; // Make sure to only trigger this optimization for normal pins and not for pins for incorporate if (keepUntil == EndDateTime && timeLeft > _ignorePinThreshold) { Tracer.Debug(context, $"Pin was skipped because keepUntil has remaining time [{timeLeft}] that is greater than ignorePinThreshold=[{_ignorePinThreshold}]"); _dedupCounters[Counters.PinIgnored].Increment(); return(PinResult.Success); } var pinTask = PinImplAsync(context, contentHash, keepUntil); if (timeLeft < _pinInlineThreshold) { Tracer.Debug(context, $"Pin inlined because keepUntil has remaining time [{timeLeft}] that is less than pinInlineThreshold=[{_pinInlineThreshold}]"); _dedupCounters[Counters.PinInlined].Increment(); return(await pinTask); } pinTask.FireAndForget(context); return(PinResult.Success); }
private BlobIdentifier CreateBlobIdentifier() { const string HashIdentifier = "54CE418A2A89A74B42CC39630167795DED5F3B16A75FF32A01B2B01C59697784"; return(BlobIdentifier.CreateFromAlgorithmResult(HashIdentifier.ToUpperInvariant())); }