/// <summary> /// Read bytes from a BinaryReader and hash them with a given HashAlgorithm wrapper and stop when the provided position /// is the current position of the BinaryReader's base stream. It does not hash the byte in the provided position. /// </summary> /// <param name="reader">Read bytes from this stream</param> /// <param name="hashFunc">HashAlgorithm wrapper used to hash contents cross platform</param> /// <param name="position">Position to stop copying data</param> internal static void ReadAndHashUntilPosition(BinaryReader reader, Sha512HashFunction hashFunc, long position) { if (reader == null) { throw new ArgumentNullException(nameof(reader)); } if (position > reader.BaseStream.Length) { throw new ArgumentOutOfRangeException(nameof(position), Strings.SignedPackageArchiveIOExtraRead); } if (position < reader.BaseStream.Position) { throw new ArgumentOutOfRangeException(nameof(position), Strings.SignedPackageArchiveIOInvalidRead); } while (reader.BaseStream.Position + _bufferSize < position) { var bytes = reader.ReadBytes(_bufferSize); HashBytes(hashFunc, bytes); } var remainingBytes = position - reader.BaseStream.Position; if (remainingBytes > 0) { var bytes = reader.ReadBytes((int)remainingBytes); HashBytes(hashFunc, bytes); } }
/// <summary> /// Read bytes from a BinaryReader and hash them with a given HashAlgorithm wrapper and stop when the provided position /// is the current position of the BinaryReader's base stream. It does not hash the byte in the provided position. /// </summary> /// <param name="reader">Read bytes from this stream</param> /// <param name="hashFunc">HashAlgorithm wrapper used to hash contents cross platform</param> /// <param name="position">Position to stop copying data</param> internal static void ReadAndHashUntilPosition(BinaryReader reader, Sha512HashFunction hashFunc, long position) { if (reader == null) { throw new ArgumentNullException(nameof(reader)); } if (position > reader.BaseStream.Length) { throw new ArgumentOutOfRangeException(nameof(position), Strings.SignedPackageArchiveIOExtraRead); } if (position < reader.BaseStream.Position) { throw new ArgumentOutOfRangeException(nameof(position), Strings.SignedPackageArchiveIOInvalidRead); } byte[] buffer = ArrayPool <byte> .Shared.Rent(_bufferSize); Stream stream = reader.BaseStream; long currentPosition; while ((currentPosition = stream.Position) != position) { var bytesToRead = (int)Math.Min(position - currentPosition, buffer.Length); int bytesRead = stream.Read(buffer, offset: 0, bytesToRead); HashBytes(hashFunc, buffer, bytesRead); } ArrayPool <byte> .Shared.Return(buffer); }
internal static void HashUInt32(Sha512HashFunction hashFunc, uint value) { byte[] array = BitConverter.GetBytes(value); if (!BitConverter.IsLittleEndian) { Array.Reverse(array); } SignedPackageArchiveIOUtility.HashBytes(hashFunc, array); }
public void Update_ThrowsAfterGetHashCalled() { using (var hashFunc = new Sha512HashFunction()) { hashFunc.Update(_input, 0, count: 1); hashFunc.GetHash(); Assert.Throws <InvalidOperationException>(() => hashFunc.Update(_input, 1, count: 1)); } }
public void Update_SupportsIncrementalUpdates() { using (var hashFunc = new Sha512HashFunction()) { for (var i = 0; i < _input.Length; ++i) { hashFunc.Update(_input, i, count: 1); } var actualResult = hashFunc.GetHash(); Assert.Equal(_expectedResult, actualResult); } }
/// <summary> /// Hashes given byte array with a specified HashAlgorithm wrapper which works cross platform. /// </summary> /// <param name="hashFunc">HashAlgorithm wrapper used to hash contents cross platform</param> /// <param name="bytes">Content to hash</param> internal static void HashBytes(Sha512HashFunction hashFunc, byte[] bytes) { if (hashFunc == null) { throw new ArgumentNullException(nameof(hashFunc)); } if (bytes == null || bytes.Length == 0) { throw new ArgumentException(Strings.ArgumentCannotBeNullOrEmpty, nameof(bytes)); } hashFunc.Update(bytes, 0, bytes.Length); }
/// <summary> /// Hashes given byte array with a specified HashAlgorithm wrapper which works cross platform. /// </summary> /// <param name="hashFunc">HashAlgorithm wrapper used to hash contents cross platform</param> /// <param name="bytes">Content to hash</param> /// <param name="count">The number of bytes in the input byte array to use as data.</param> internal static void HashBytes(Sha512HashFunction hashFunc, byte[] bytes, int count) { if (hashFunc == null) { throw new ArgumentNullException(nameof(hashFunc)); } if (bytes == null || bytes.Length == 0) { throw new ArgumentException(Strings.ArgumentCannotBeNullOrEmpty, nameof(bytes)); } if (count <= 0 || bytes.Length < count) { throw new ArgumentOutOfRangeException(nameof(count)); } hashFunc.Update(bytes, offset: 0, count); }
/// <summary> /// Tries to create a SPARQL Function expression if the function Uri correseponds to a supported SPARQL Function. /// </summary> /// <param name="u">Function Uri.</param> /// <param name="args">Function Arguments.</param> /// <param name="scalarArguments">Scalar Arguments.</param> /// <param name="expr">Generated Expression.</param> /// <returns>Whether an expression was successfully generated.</returns> public bool TryCreateExpression(Uri u, List <ISparqlExpression> args, Dictionary <string, ISparqlExpression> scalarArguments, out ISparqlExpression expr) { String func = u.ToString(); if (func.StartsWith(SparqlFunctionsNamespace)) { func = func.Substring(SparqlFunctionsNamespace.Length); func = func.ToUpper(); // If any Scalar Arguments are present then can't be a SPARQL Function UNLESS it is // a GROUP_CONCAT function and it has the SEPARATOR argument if (scalarArguments.Count > 0) { if (func.Equals(SparqlSpecsHelper.SparqlKeywordGroupConcat) && scalarArguments.Count == 1 && scalarArguments.ContainsKey(SparqlSpecsHelper.SparqlKeywordSeparator)) { // OK } else { expr = null; return(false); } } // Q: Will there be special URIs for the DISTINCT modified forms of aggregates? ISparqlExpression sparqlFunc = null; switch (func) { case SparqlSpecsHelper.SparqlKeywordAbs: if (args.Count == 1) { sparqlFunc = new AbsFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL ABS() function"); } break; case SparqlSpecsHelper.SparqlKeywordAvg: if (args.Count == 1) { sparqlFunc = new AggregateTerm(new AverageAggregate(args.First())); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL AVG() aggregate"); } break; case SparqlSpecsHelper.SparqlKeywordBound: if (args.Count == 1) { if (args[0] is VariableTerm) { sparqlFunc = new BoundFunction((VariableTerm)args[0]); } else { throw new RdfParseException("The SPARQL BOUND() function only operates over Variables"); } } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL BOUND() function"); } break; case SparqlSpecsHelper.SparqlKeywordCeil: if (args.Count == 1) { sparqlFunc = new CeilFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL CEIL() function"); } break; case SparqlSpecsHelper.SparqlKeywordCoalesce: if (args.Count >= 1) { sparqlFunc = new CoalesceFunction(args); } else { throw new RdfParseException("The SPARQL COALESCE() function requires at least 1 argument"); } break; case SparqlSpecsHelper.SparqlKeywordConcat: if (args.Count >= 1) { sparqlFunc = new ConcatFunction(args); } else { throw new RdfParseException("The SPARQL CONCAT() function requires at least 1 argument"); } break; case SparqlSpecsHelper.SparqlKeywordContains: if (args.Count == 2) { sparqlFunc = new ContainsFunction(args[0], args[1]); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL CONTAINS() function"); } break; case SparqlSpecsHelper.SparqlKeywordCount: // Q: What will the URIs be for the special forms of COUNT? if (args.Count == 1) { sparqlFunc = new AggregateTerm(new CountAggregate(args.First())); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL COUNT() aggregate"); } break; case SparqlSpecsHelper.SparqlKeywordDataType: if (args.Count == 1) { sparqlFunc = new DataTypeFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL DATATYPE() function"); } break; case SparqlSpecsHelper.SparqlKeywordDay: if (args.Count == 1) { sparqlFunc = new DayFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL DAY() function"); } break; case SparqlSpecsHelper.SparqlKeywordEncodeForUri: if (args.Count == 1) { sparqlFunc = new EncodeForUriFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL ENCODE_FOR_URI() function"); } break; case SparqlSpecsHelper.SparqlKeywordFloor: if (args.Count == 1) { sparqlFunc = new FloorFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL FLOOR() function"); } break; case SparqlSpecsHelper.SparqlKeywordGroupConcat: if (args.Count == 1) { if (scalarArguments.ContainsKey(SparqlSpecsHelper.SparqlKeywordSeparator)) { sparqlFunc = new AggregateTerm(new GroupConcatAggregate(args.First(), scalarArguments[SparqlSpecsHelper.SparqlKeywordSeparator])); } else { sparqlFunc = new AggregateTerm(new GroupConcatAggregate(args.First())); } } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL GROUP_CONCAT() aggregate"); } break; case SparqlSpecsHelper.SparqlKeywordHours: if (args.Count == 1) { sparqlFunc = new HoursFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL HOURS() function"); } break; case SparqlSpecsHelper.SparqlKeywordIf: if (args.Count == 3) { sparqlFunc = new IfElseFunction(args[0], args[1], args[2]); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL IF() function"); } break; case SparqlSpecsHelper.SparqlKeywordIri: if (args.Count == 1) { sparqlFunc = new IriFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL IRI() function"); } break; case SparqlSpecsHelper.SparqlKeywordIsBlank: if (args.Count == 1) { sparqlFunc = new IsBlankFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL ISBLANK() function"); } break; case SparqlSpecsHelper.SparqlKeywordIsIri: if (args.Count == 1) { sparqlFunc = new IsIriFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL ISIRI() function"); } break; case SparqlSpecsHelper.SparqlKeywordIsLiteral: if (args.Count == 1) { sparqlFunc = new IsLiteralFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL ISLITERAL() function"); } break; case SparqlSpecsHelper.SparqlKeywordIsNumeric: if (args.Count == 1) { sparqlFunc = new IsNumericFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL ISNUMERIC() function"); } break; case SparqlSpecsHelper.SparqlKeywordIsUri: if (args.Count == 1) { sparqlFunc = new IsUriFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL ISURI() function"); } break; case SparqlSpecsHelper.SparqlKeywordLang: if (args.Count == 1) { sparqlFunc = new LangFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL LANG() function"); } break; case SparqlSpecsHelper.SparqlKeywordLangMatches: if (args.Count == 2) { sparqlFunc = new LangMatchesFunction(args[0], args[1]); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL LANGMATCHES() function"); } break; case SparqlSpecsHelper.SparqlKeywordLCase: if (args.Count == 1) { sparqlFunc = new LCaseFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL LCASE() function"); } break; case SparqlSpecsHelper.SparqlKeywordMax: if (args.Count == 1) { sparqlFunc = new AggregateTerm(new MaxAggregate(args.First())); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL MAX() aggregate"); } break; case SparqlSpecsHelper.SparqlKeywordMD5: if (args.Count == 1) { sparqlFunc = new MD5HashFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL MD5() function"); } break; case SparqlSpecsHelper.SparqlKeywordMin: if (args.Count == 1) { sparqlFunc = new AggregateTerm(new MinAggregate(args.First())); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL MIN() aggregate"); } break; case SparqlSpecsHelper.SparqlKeywordMinutes: if (args.Count == 1) { sparqlFunc = new MinutesFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL MINUTES() function"); } break; case SparqlSpecsHelper.SparqlKeywordMonth: if (args.Count == 1) { sparqlFunc = new MonthFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL MONTH() function"); } break; case SparqlSpecsHelper.SparqlKeywordNow: if (args.Count == 0) { sparqlFunc = new NowFunction(); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL ABS() function"); } break; case SparqlSpecsHelper.SparqlKeywordRegex: if (args.Count == 2) { sparqlFunc = new RegexFunction(args[0], args[1]); } else if (args.Count == 3) { sparqlFunc = new RegexFunction(args[0], args[1], args[2]); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL REGEX() function"); } break; case SparqlSpecsHelper.SparqlKeywordRound: if (args.Count == 1) { sparqlFunc = new RoundFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL ROUND() function"); } break; case SparqlSpecsHelper.SparqlKeywordSameTerm: if (args.Count == 2) { sparqlFunc = new SameTermFunction(args[0], args[1]); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL SAMETERM() function"); } break; case SparqlSpecsHelper.SparqlKeywordSample: if (args.Count == 1) { sparqlFunc = new AggregateTerm(new SampleAggregate(args.First())); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL AVG() aggregate"); } break; case SparqlSpecsHelper.SparqlKeywordSeconds: if (args.Count == 1) { sparqlFunc = new SecondsFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL SECONDS() function"); } break; case SparqlSpecsHelper.SparqlKeywordSha1: if (args.Count == 1) { sparqlFunc = new Sha1HashFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL SHA1() function"); } break; case SparqlSpecsHelper.SparqlKeywordSha256: if (args.Count == 1) { sparqlFunc = new Sha256HashFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL SHA256() function"); } break; case SparqlSpecsHelper.SparqlKeywordSha384: if (args.Count == 1) { sparqlFunc = new Sha384HashFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL SHA384() function"); } break; case SparqlSpecsHelper.SparqlKeywordSha512: if (args.Count == 1) { sparqlFunc = new Sha512HashFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL SHA512() function"); } break; case SparqlSpecsHelper.SparqlKeywordStr: if (args.Count == 1) { sparqlFunc = new StrFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL STR() function"); } break; case SparqlSpecsHelper.SparqlKeywordStrDt: if (args.Count == 2) { sparqlFunc = new StrDtFunction(args[0], args[1]); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL STRDT() function"); } break; case SparqlSpecsHelper.SparqlKeywordStrEnds: if (args.Count == 2) { sparqlFunc = new StrEndsFunction(args[0], args[1]); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL STRENDS() function"); } break; case SparqlSpecsHelper.SparqlKeywordStrLang: if (args.Count == 2) { sparqlFunc = new StrLangFunction(args[0], args[1]); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL STRLANG() function"); } break; case SparqlSpecsHelper.SparqlKeywordStrLen: if (args.Count == 1) { sparqlFunc = new StrLenFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL STRKEN() function"); } break; case SparqlSpecsHelper.SparqlKeywordStrStarts: if (args.Count == 2) { sparqlFunc = new StrStartsFunction(args[0], args[1]); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL STRSTARTS() function"); } break; case SparqlSpecsHelper.SparqlKeywordSubStr: if (args.Count == 2) { sparqlFunc = new SubStrFunction(args[0], args[1]); } else if (args.Count == 3) { sparqlFunc = new SubStrFunction(args[0], args[1], args[2]); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL SUBSTR() function"); } break; case SparqlSpecsHelper.SparqlKeywordSum: if (args.Count == 1) { sparqlFunc = new AggregateTerm(new SumAggregate(args.First())); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL SUM() aggregate"); } break; case SparqlSpecsHelper.SparqlKeywordTimezone: if (args.Count == 1) { sparqlFunc = new TimezoneFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL TIMEZONE() function"); } break; case SparqlSpecsHelper.SparqlKeywordTz: if (args.Count == 1) { sparqlFunc = new TZFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL TZ() function"); } break; case SparqlSpecsHelper.SparqlKeywordUCase: if (args.Count == 1) { sparqlFunc = new UCaseFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL UCASE() function"); } break; case SparqlSpecsHelper.SparqlKeywordUri: if (args.Count == 1) { sparqlFunc = new IriFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL URI() function"); } break; case SparqlSpecsHelper.SparqlKeywordYear: if (args.Count == 1) { sparqlFunc = new YearFunction(args.First()); } else { throw new RdfParseException("Incorrect number of arguments for the SPARQL YEAR() function"); } break; } if (sparqlFunc != null) { expr = sparqlFunc; return(true); } } expr = null; return(false); }
internal static string GetPackageContentHash(BinaryReader reader) { using (var hashFunc = new Sha512HashFunction()) { // skip validating signature entry since we're just trying to get the content hash here instead of // verifying signature entry. var metadata = SignedPackageArchiveIOUtility.ReadSignedArchiveMetadata(reader, validateSignatureEntry: false); var signatureCentralDirectoryHeader = metadata.GetPackageSignatureFileCentralDirectoryHeaderMetadata(); var centralDirectoryRecordsWithoutSignature = RemoveSignatureAndOrderByOffset(metadata); // Read and hash from the start of the archive to the start of the file headers reader.BaseStream.Seek(offset: 0, origin: SeekOrigin.Begin); SignedPackageArchiveIOUtility.ReadAndHashUntilPosition(reader, hashFunc, metadata.StartOfLocalFileHeaders); // Read and hash file headers foreach (var record in centralDirectoryRecordsWithoutSignature) { reader.BaseStream.Seek(offset: record.OffsetToLocalFileHeader, origin: SeekOrigin.Begin); SignedPackageArchiveIOUtility.ReadAndHashUntilPosition(reader, hashFunc, record.OffsetToLocalFileHeader + record.FileEntryTotalSize); } // Order central directory records by their position centralDirectoryRecordsWithoutSignature.Sort((x, y) => x.Position.CompareTo(y.Position)); // Update offset of any central directory record that has a file entry after signature foreach (var record in centralDirectoryRecordsWithoutSignature) { reader.BaseStream.Seek(offset: record.Position, origin: SeekOrigin.Begin); // Hash from the start of the central directory record until the relative offset of local file header (42 from the start of central directory record, including signature length) SignedPackageArchiveIOUtility.ReadAndHashUntilPosition(reader, hashFunc, reader.BaseStream.Position + 42); var relativeOffsetOfLocalFileHeader = (uint)(reader.ReadUInt32() + record.ChangeInOffset); HashUInt32(hashFunc, relativeOffsetOfLocalFileHeader); // Continue hashing file name, extra field, and file comment fields. SignedPackageArchiveIOUtility.ReadAndHashUntilPosition(reader, hashFunc, reader.BaseStream.Position + record.HeaderSize - CentralDirectoryHeader.SizeInBytesOfFixedLengthFields); } reader.BaseStream.Seek(offset: metadata.EndOfCentralDirectory, origin: SeekOrigin.Begin); // Hash until total entries in end of central directory record (8 bytes from the start of EOCDR) SignedPackageArchiveIOUtility.ReadAndHashUntilPosition(reader, hashFunc, metadata.EndOfCentralDirectory + 8); var eocdrTotalEntries = (ushort)(reader.ReadUInt16() - 1); var eocdrTotalEntriesOnDisk = (ushort)(reader.ReadUInt16() - 1); HashUInt16(hashFunc, eocdrTotalEntries); HashUInt16(hashFunc, eocdrTotalEntriesOnDisk); // update the central directory size by substracting the size of the package signature file's central directory header var eocdrSizeOfCentralDirectory = (uint)(reader.ReadUInt32() - signatureCentralDirectoryHeader.HeaderSize); HashUInt32(hashFunc, eocdrSizeOfCentralDirectory); var eocdrOffsetOfCentralDirectory = reader.ReadUInt32() - (uint)signatureCentralDirectoryHeader.FileEntryTotalSize; HashUInt32(hashFunc, eocdrOffsetOfCentralDirectory); // Hash until the end of the reader SignedPackageArchiveIOUtility.ReadAndHashUntilPosition(reader, hashFunc, reader.BaseStream.Length); hashFunc.Update(new byte[0], offset: 0, count: 0); return(hashFunc.GetHash()); } }
/// <summary> /// Hashes given byte array with a specified HashAlgorithm wrapper which works cross platform. /// </summary> /// <param name="hashFunc">HashAlgorithm wrapper used to hash contents cross platform</param> /// <param name="bytes">Content to hash</param> internal static void HashBytes(Sha512HashFunction hashFunc, byte[] bytes) { HashBytes(hashFunc, bytes, bytes?.Length ?? 0); }