private static byte[] ComputeCheckSum(IEnumerable <string> pluginCandidatePaths)
        {
            // include config files in the check sum
            var configFiles = Directory.EnumerateFiles(Platform.InstallDirectory, "*.exe.config", SearchOption.TopDirectoryOnly);

            configFiles = configFiles.Concat(Directory.EnumerateFiles(Platform.InstallDirectory, "*.exe.critical.config", SearchOption.TopDirectoryOnly));

            var orderedFiles = configFiles.Concat(pluginCandidatePaths).Select(f => new FileInfo(f)).OrderBy(fi => fi.FullName);

            // generate a checksum based on the name, create time, and last write time of each file
            using (var byteStream = new MemoryStream())
                using (var hash = new SHA256CryptoServiceProvider2())
                {
                    foreach (var fi in orderedFiles)
                    {
                        var name = Encoding.Unicode.GetBytes(fi.Name);
                        byteStream.Write(name, 0, name.Length);

                        var createTime = BitConverter.GetBytes(fi.CreationTimeUtc.Ticks);
                        byteStream.Write(createTime, 0, createTime.Length);

                        var writeTime = BitConverter.GetBytes(fi.LastWriteTimeUtc.Ticks);
                        byteStream.Write(writeTime, 0, createTime.Length);
                    }
                    return(hash.ComputeHash(byteStream.GetBuffer()));
                }
        }
Beispiel #2
0
		/// <summary>
		/// Computes a 128-bit (16-byte) hash from the specified input data.
		/// </summary>
		/// <remarks>
		/// This method provides no guarantees as to consistency over different versions of the framework,
		/// and should not be considered cryptographically secure.
		/// </remarks>
		/// <param name="bytes">The input data to be hashed.</param>
		/// <param name="offset">The byte offset in <paramref name="bytes"/> from which to start hashing.</param>
		/// <param name="length">The number of bytes in <paramref name="bytes"/> to hash.</param>
		/// <returns>A byte array containing the hash (16 bytes).</returns>
		public static byte[] ComputeHash128(byte[] bytes, int offset, int length)
		{
			// we don't simply use MD5 because it throws an exception if the OS has strict cryptographic policies in place (e.g. FIPS)
			// note: truncation of SHA256 seems to be an accepted method of producing a shorter hash
			// * RFC3874 describes the SHA224 algorithm, which is just a truncated SHA256 hash with a different initialization vector
			// * RFC4868 describes HMAC, a scheme for origin authentication and integrity verification which incorporates truncated SHA256 hashes
			// * Altman M. {A Fingerprint Method for Scientific Data Verification}. In: Sobh T Proceedings of the International Conference on Systems Computing Sciences and Software Engineering 2007. New York: Springer Netherlands; 2008. p. 311–316.
			// * a discussion of truncating SHA512 to 256 at http://crypto.stackexchange.com/questions/3153/sha-256-vs-any-256-bits-of-sha-512-which-is-more-secure
			using (var sha256 = new SHA256CryptoServiceProvider2())
			{
				var hash = sha256.ComputeHash(bytes, offset, length);
				var result = new byte[16];
				Buffer.BlockCopy(hash, 0, result, 0, 16);
				return result;
			}
		}
        private static string GetMetadataCacheFilePath(out string[] alternates)
        {
            var exePath = Process.GetCurrentProcess().MainModule.FileName;

            using (var sha = new SHA256CryptoServiceProvider2())
            {
                // since this is used to generate a file path, we must limit the length of the generated name so it doesn't exceed max path length
                // we don't simply use MD5 because it throws an exception if the OS has strict cryptographic policies in place (e.g. FIPS)
                // note: truncation of SHA256 seems to be an accepted method of producing a shorter hash - see notes in HashUtilities
                var hash = StringUtilities.ToHexString(sha.ComputeHash(Encoding.Unicode.GetBytes(exePath)), 0, 16);

                // alternate locations are treated as read-only pre-generated cache files (e.g. for Portable workstation)
                alternates = new[] { Path.Combine(Platform.PluginDirectory, "pxpx", hash) };

                // return the main location, which must be writable
                return(Path.Combine(Platform.ApplicationDataDirectory, "pxpx", hash));
            }
        }
Beispiel #4
0
        public void TestRandomAccess()
        {
            const int operationCount = 2500;

            var rng      = new PseudoRandom(-0x522C5EF5);
            var seedData = new byte[1 << 17];

            using (var s = CreateStream(seedData))
                using (var r = new MemoryStream())
                {
                    if (!(s.CanRead && s.CanWrite && s.CanSeek))
                    {
                        Console.WriteLine("Test skipped because {0} doesn't support Read, Write and Seek", s.GetType().FullName);
                        return;
                    }

                    r.Write(seedData, 0, seedData.Length);
                    r.Position = 0;

                    Console.WriteLine("Preparing to execute {0} randomized operations", operationCount);
                    for (var k = 0; k < operationCount; ++k)
                    {
                        var opcode = rng.Next(0, 12);                 // slightly biased towards write operations, in order to ensure "interesting" data
                        switch (opcode)
                        {
                        case 0:                         // Read
                        case 6:
                        {
                            var size   = rng.Next(1024, 32768);
                            var offset = rng.Next(0, 1024);
                            var count  = rng.Next(0, size - offset);

                            var sBuffer = new byte[size];
                            var rBuffer = new byte[size];
                            rng.NextBytes(sBuffer);
                            Buffer.BlockCopy(sBuffer, 0, rBuffer, 0, size);

                            var sResult = Read(s, sBuffer, offset, count);
                            var rResult = r.Read(rBuffer, offset, count);

                            Assert.AreEqual(rResult, sResult, "Function return from Read at step k={0}", k);
                            Assert.AreEqual(r.Position, s.Position, "Position after Read at step k={0}", k);
                            Assert.AreEqual(r.Length, s.Length, "Length after Read at step k={0}", k);
                            AssertAreEqual(rBuffer, sBuffer, "Buffer after Read at step k={0}", k);
                        }
                        break;

                        case 1:                         // ReadByte
                        {
                            var sResult = ReadByte(s);
                            var rResult = r.ReadByte();

                            Assert.AreEqual(rResult, sResult, "Function return from ReadByte at step k={0}", k);
                            Assert.AreEqual(r.Position, s.Position, "Position after ReadByte at step k={0}", k);
                            Assert.AreEqual(r.Length, s.Length, "Length after ReadByte at step k={0}", k);
                        }
                        break;

                        case 2:                         // Write
                        case 7:
                        case 8:
                        case 9:
                        {
                            var oldLength = s.Length;
                            var size      = rng.Next(1024, 32768);
                            var offset    = rng.Next(0, 1024);
                            var count     = rng.Next(0, size - offset);

                            var sBuffer = new byte[size];
                            var rBuffer = new byte[size];
                            rng.NextBytes(sBuffer);
                            Buffer.BlockCopy(sBuffer, 0, rBuffer, 0, size);

                            Write(s, sBuffer, offset, count);
                            r.Write(rBuffer, offset, count);

                            Assert.AreEqual(r.Position, s.Position, "Position after Write at step k={0}", k);
                            Assert.AreEqual(r.Length, s.Length, "Length after Write at step k={0}", k);
                            AssertAreEqual(rBuffer, sBuffer, "Buffer after Write at step k={0}", k);

                            // because the behaviour of uninitialized bytes caused by buffer expansion is not explicitly defined,
                            // we explicitly initialize those bytes in order to continue the test deterministically
                            if (s.Length > oldLength)
                            {
                                var pos  = s.Position;
                                var zero = new byte[s.Length - oldLength];
                                Seek(s, oldLength, SeekOrigin.Begin);
                                r.Seek(oldLength, SeekOrigin.Begin);
                                Write(s, zero, 0, zero.Length);
                                r.Write(zero, 0, zero.Length);
                                Seek(s, pos, SeekOrigin.Begin);
                                r.Seek(pos, SeekOrigin.Begin);
                            }
                        }
                        break;

                        case 3:                         // WriteByte
                        case 11:
                        {
                            var oldLength = s.Length;
                            var value     = (byte)rng.Next(0, 256);

                            WriteByte(s, value);
                            r.WriteByte(value);

                            Assert.AreEqual(r.Position, s.Position, "Position after WriteByte at step k={0}", k);
                            Assert.AreEqual(r.Length, s.Length, "Length after WriteByte at step k={0}", k);

                            // because the behaviour of uninitialized bytes caused by buffer expansion is not explicitly defined,
                            // we explicitly initialize those bytes in order to continue the test deterministically
                            if (s.Length > oldLength)
                            {
                                var pos  = s.Position;
                                var zero = new byte[s.Length - oldLength];
                                Seek(s, oldLength, SeekOrigin.Begin);
                                r.Seek(oldLength, SeekOrigin.Begin);
                                Write(s, zero, 0, zero.Length);
                                r.Write(zero, 0, zero.Length);
                                Seek(s, pos, SeekOrigin.Begin);
                                r.Seek(pos, SeekOrigin.Begin);
                            }
                        }
                        break;

                        case 4:                         // Seek
                        case 10:
                        {
                            int        offset;
                            SeekOrigin origin;

                            switch (rng.Next(0, 3))
                            {
                            case 0:
                                offset = rng.Next(0, (int)s.Length + 1024);
                                origin = SeekOrigin.Begin;
                                break;

                            case 1:
                                offset = rng.Next(-(int)s.Position, (int)s.Length - (int)s.Position + 1024);
                                origin = SeekOrigin.Current;
                                break;

                            case 2:
                            default:
                                offset = rng.Next(-(int)s.Length, 1024);
                                origin = SeekOrigin.End;
                                break;
                            }

                            Seek(s, offset, origin);
                            r.Seek(offset, origin);

                            Assert.AreEqual(r.Position, s.Position, "Position after Seek at step k={0}", k);
                            Assert.AreEqual(r.Length, s.Length, "Length after Seek at step k={0}", k);
                        }
                        break;

                        case 5:                         // SetLength
                        {
                            var oldLength = s.Length;
                            var length    = (int)s.Length + rng.Next(-4096, 4096);

                            // because the behaviour of Position after a SetLength call is not explicitly defined,
                            // we explicitly set it here in order to continue the test deterministically
                            var newPosition = rng.Next(0, (int)s.Length + 1024);

                            SetLength(s, length);
                            r.SetLength(length);

                            Assert.AreEqual(r.Length, s.Length, "Length after SetLength at step k={0}", k);

                            // because the behaviour of uninitialized bytes caused by buffer expansion is not explicitly defined,
                            // we explicitly initialize those bytes in order to continue the test deterministically
                            if (s.Length > oldLength)
                            {
                                var zero = new byte[s.Length - oldLength];
                                Seek(s, oldLength, SeekOrigin.Begin);
                                r.Seek(oldLength, SeekOrigin.Begin);
                                Write(s, zero, 0, zero.Length);
                                r.Write(zero, 0, zero.Length);
                            }

                            Seek(s, newPosition, SeekOrigin.Begin);
                            r.Seek(newPosition, SeekOrigin.Begin);
                        }
                        break;

                        default:
                            throw new InvalidOperationException("Invalid OP code generated");
                        }
                    }
                    Console.WriteLine("Completed executing {0} randomized operations", operationCount);

                    var rArray = r.ToArray();
                    var sArray = new byte[s.Length];
                    Seek(s, 0, SeekOrigin.Begin);
                    Assert.AreEqual(sArray.Length, Read(s, sArray, 0, sArray.Length), "Bytes Read while dumping stream contents");
                    //AssertAreEqual(rArray, sArray, "Dump of Stream Contents");

                    using (var hashProvider = new SHA256CryptoServiceProvider2())
                    {
                        var rHash = hashProvider.ComputeHash(rArray);
                        var sHash = hashProvider.ComputeHash(sArray);
                        Assert.AreEqual(rHash, sHash, "Hash of Stream Contents");

                        var hashString = StringUtilities.ToHexString(sHash);
                        Console.WriteLine("Final stream has a hash value of {0}", hashString);
                    }
                }
        }
Beispiel #5
0
		private static string GetMetadataCacheFilePath(out string[] alternates)
		{
			var exePath = Process.GetCurrentProcess().MainModule.FileName;
			using (var sha = new SHA256CryptoServiceProvider2())
			{
				// since this is used to generate a file path, we must limit the length of the generated name so it doesn't exceed max path length
				// we don't simply use MD5 because it throws an exception if the OS has strict cryptographic policies in place (e.g. FIPS)
				// note: truncation of SHA256 seems to be an accepted method of producing a shorter hash - see notes in HashUtilities
				var hash = StringUtilities.ToHexString(sha.ComputeHash(Encoding.Unicode.GetBytes(exePath)), 0, 16);

				// alternate locations are treated as read-only pre-generated cache files (e.g. for Portable workstation)
				alternates = new[] {Path.Combine(Platform.PluginDirectory, "pxpx", hash)};

				// return the main location, which must be writable
				return Path.Combine(Platform.ApplicationDataDirectory, "pxpx", hash);
			}
		}
Beispiel #6
0
		private static byte[] ComputeCheckSum(IEnumerable<string> pluginCandidatePaths)
		{
			// include config files in the check sum
			var configFiles = Directory.EnumerateFiles(Platform.InstallDirectory, "*.exe.config", SearchOption.TopDirectoryOnly);
			configFiles = configFiles.Concat(Directory.EnumerateFiles(Platform.InstallDirectory, "*.exe.critical.config", SearchOption.TopDirectoryOnly));

			var orderedFiles = configFiles.Concat(pluginCandidatePaths).Select(f => new FileInfo(f)).OrderBy(fi => fi.FullName);

			// generate a checksum based on the name, create time, and last write time of each file
			using (var byteStream = new MemoryStream())
			using (var hash = new SHA256CryptoServiceProvider2())
			{
				foreach (var fi in orderedFiles)
				{
					var name = Encoding.Unicode.GetBytes(fi.Name);
					byteStream.Write(name, 0, name.Length);

					var createTime = BitConverter.GetBytes(fi.CreationTimeUtc.Ticks);
					byteStream.Write(createTime, 0, createTime.Length);

					var writeTime = BitConverter.GetBytes(fi.LastWriteTimeUtc.Ticks);
					byteStream.Write(writeTime, 0, createTime.Length);
				}
				return hash.ComputeHash(byteStream.GetBuffer());
			}
		}
Beispiel #7
0
		/// <summary>
		/// Computes a 128-bit (16-byte) hash from the specified input data.
		/// </summary>
		/// <remarks>
		/// This method provides no guarantees as to consistency over different versions of the framework,
		/// and should not be considered cryptographically secure.
		/// </remarks>
		/// <param name="stream">The input stream to be hashed (starts from current stream position).</param>
		/// <returns>A byte array containing the hash (16 bytes).</returns>
		public static byte[] ComputeHash128(Stream stream)
		{
			// we don't simply use MD5 because it throws an exception if the OS has strict cryptographic policies in place (e.g. FIPS)
			// note: truncation of SHA256 seems to be an accepted method of producing a shorter hash - see other overload
			using (var sha256 = new SHA256CryptoServiceProvider2())
			{
				var hash = sha256.ComputeHash(stream);
				var result = new byte[16];
				Buffer.BlockCopy(hash, 0, result, 0, 16);
				return result;
			}
		}
Beispiel #8
0
		/// <summary>
		/// Computes a 256-bit (32-byte) hash from the specified input data.
		/// </summary>
		/// <remarks>
		/// This method provides no guarantees as to consistency over different versions of the framework,
		/// and should not be considered cryptographically secure.
		/// </remarks>
		/// <param name="stream">The input stream to be hashed (starts from current stream position).</param>
		/// <returns>A byte array containing the hash (32 bytes).</returns>
		public static byte[] ComputeHash256(Stream stream)
		{
			using (var sha256 = new SHA256CryptoServiceProvider2())
			{
				return sha256.ComputeHash(stream);
			}
		}
Beispiel #9
0
		/// <summary>
		/// Computes a 256-bit (32-byte) hash from the specified input data.
		/// </summary>
		/// <remarks>
		/// This method provides no guarantees as to consistency over different versions of the framework,
		/// and should not be considered cryptographically secure.
		/// </remarks>
		/// <param name="bytes">The input data to be hashed.</param>
		/// <param name="offset">The byte offset in <paramref name="bytes"/> from which to start hashing.</param>
		/// <param name="length">The number of bytes in <paramref name="bytes"/> to hash.</param>
		/// <returns>A byte array containing the hash (32 bytes).</returns>
		public static byte[] ComputeHash256(byte[] bytes, int offset, int length)
		{
			using (var sha256 = new SHA256CryptoServiceProvider2())
			{
				return sha256.ComputeHash(bytes, offset, length);
			}
		}