byte[] GenKey(byte[] key) { for (int x = 0; x < 10000; x++) { SHA1CryptoServiceProvider sha1 = new SHA1CryptoServiceProvider(); sha1.TransformBlock(key, 0, key.Length, key, 0); key = sha1.TransformFinalBlock(ASCIIEncoding.ASCII.GetBytes("fuckoff"), 0, 7); sha1.Clear(); } return key; }
public void ComputeHashes() { try { SHA1CryptoServiceProvider sha1 = new SHA1CryptoServiceProvider(); this.CRC32 = 0; long totalSamples = this.audioSource.Length; long processedSamples = 0; AudioBuffer buffer = new AudioBuffer(this.audioSource.PCM, 44100); while (this.audioSource.Read(buffer, 44100) > 0) { byte[] bufferBytes = buffer.Bytes; if (this.audioSource.Position == this.audioSource.Length) { sha1.TransformFinalBlock(bufferBytes, 0, buffer.ByteLength); } else { sha1.TransformBlock(bufferBytes, 0, buffer.ByteLength, null, 0); } this.CRC32 = Crc32.ComputeChecksum(this.CRC32, buffer.Bytes, 0, buffer.ByteLength); processedSamples += buffer.Length; ProgressChangedEventArgs eventArgs = new ProgressChangedEventArgs((double)processedSamples / totalSamples); this.OnProgressChanged(eventArgs); if (eventArgs.Cancel) { return; } } this.SHA1 = sha1.Hash; } finally { this.audioSource.Close(); } }
public void SignHashed(string infile, string outfile) { if (Chain == null) throw new ApplicationException("Certificate chain has not been initialized"); PdfReader reader = new PdfReader(infile); PdfStamper stp = PdfStamper.CreateSignature(reader, new FileStream(outfile, FileMode.Create), '\0'); PdfSignatureAppearance sap = stp.SignatureAppearance; SignatureDataHandler.SetMetadata(Config, stp); sap.SetCrypto(null, Chain, null, PdfSignatureAppearance.WINCER_SIGNED); SignatureDataHandler.SetAppearance(Config, sap); //sap.SetVisibleSignature(new Rectangle(100, 100, 300, 200), 1, null); //sap.SignDate = DateTime.Now; //sap.SetCrypto(null, Chain, null, null); //sap.Reason = "I like to sign"; //sap.Location = "Universe"; //sap.Acro6Layers = true; //sap.Render = PdfSignatureAppearance.SignatureRender.NameAndDescription; PdfSignature dic = new PdfSignature(PdfName.ADOBE_PPKMS, PdfName.ADBE_PKCS7_SHA1); /*dic.Date = new PdfDate(sap.SignDate); dic.Name = PdfPKCS7.GetSubjectFields(Chain[0]).GetField("CN"); if (sap.Reason != null) dic.Reason = sap.Reason; if (sap.Location != null) dic.Location = sap.Location; sap.CryptoDictionary = dic;*/ dic.Name = PdfPKCS7.GetSubjectFields(Chain[0]).GetField("CN"); dic.Reason = sap.Reason; dic.Location = sap.Location; dic.Contact = sap.Contact; dic.Date = new PdfDate(sap.SignDate); sap.CryptoDictionary = dic; int csize = 15000; // was 4000 Dictionary<PdfName, int> exc = new Dictionary<PdfName, int>(); exc[PdfName.CONTENTS] = csize * 2 + 2; sap.PreClose(exc); HashAlgorithm sha = new SHA1CryptoServiceProvider(); Stream s = sap.GetRangeStream(); int read = 0; byte[] buff = new byte[8192]; while ((read = s.Read(buff, 0, 8192)) > 0) { sha.TransformBlock(buff, 0, read, buff, 0); } sha.TransformFinalBlock(buff, 0, 0); var card = new X509Certificate2(otrosbytes); byte[] pk = SignMsg(sha.Hash, card, false); byte[] outc = new byte[csize]; PdfDictionary dic2 = new PdfDictionary(); Array.Copy(pk, 0, outc, 0, pk.Length); dic2.Put(PdfName.CONTENTS, new PdfString(outc).SetHexWriting(true)); sap.Close(dic2); }
public ReceivePackCommit ParseCommitDetails(byte[] buff, long commitMsgLengthLong) { if (commitMsgLengthLong > buff.Length) { // buff at the moment is 16KB, should be enough for commit messages // but break just in case this ever does happen so it could be addressed then throw new Exception("Encountered unexpectedly large commit message"); } int commitMsgLength = (int)commitMsgLengthLong; // guaranteed no truncation because of above guard clause var commitMsg = Encoding.UTF8.GetString(buff, 0, commitMsgLength); string treeHash = null; var parentHashes = new List<string>(); ReceivePackCommitSignature author = null; ReceivePackCommitSignature committer = null; var commitLines = commitMsg.Split('\n'); var commitHeadersEndIndex = 0; foreach (var commitLine in commitLines) { commitHeadersEndIndex += 1; // Make sure we have safe default values in case the string is empty. var commitHeaderType = ""; var commitHeaderData = ""; // Find the index of the first space. var firstSpace = commitLine.IndexOf(' '); if (firstSpace < 0) { // Ensure that we always have a valid length for the type. firstSpace = commitLine.Length; } // Take everything up to the first space as the type. commitHeaderType = commitLine.Substring(0, firstSpace); // Data starts immediately following the space (if there is any). var dataStart = firstSpace + 1; if (dataStart < commitLine.Length) { commitHeaderData = commitLine.Substring(dataStart); } if (commitHeaderType == "tree") { treeHash = commitHeaderData; } else if (commitHeaderType == "parent") { parentHashes.Add(commitHeaderData); } else if (commitHeaderType == "author") { author = ParseSignature(commitHeaderData); } else if (commitHeaderType == "committer") { committer = ParseSignature(commitHeaderData); } else if (commitHeaderType == "") { // The first empty type indicates the end of the headers. break; } else { // unrecognized header encountered, skip over it } } var commitComment = string.Join("\n", commitLines.Skip(commitHeadersEndIndex).ToArray()).TrimEnd('\n'); // Compute commit hash using (var sha1 = new SHA1CryptoServiceProvider()) { var commitHashHeader = Encoding.UTF8.GetBytes(string.Format("commit {0}\0", commitMsgLength)); sha1.TransformBlock(commitHashHeader, 0, commitHashHeader.Length, commitHashHeader, 0); sha1.TransformFinalBlock(buff, 0, commitMsgLength); var commitHashBytes = sha1.Hash; var sb = new StringBuilder(); foreach (byte b in commitHashBytes) { var hex = b.ToString("x2"); sb.Append(hex); } var commitHash = sb.ToString(); return new ReceivePackCommit(commitHash, treeHash, parentHashes, author, committer, commitComment); } }
private bool Verify(int version, string privateMac, string privateHash, string passphrase, string keyTypeName, string encryptionName, string comment, byte[] publicBlob, byte[] privateBlob) { byte[] macData; using (MemoryStream macDataBuff = new MemoryStream()) { if (version == 1) { WriteMacData(macDataBuff, privateBlob); } else { WriteMacData(macDataBuff, keyTypeName); WriteMacData(macDataBuff, encryptionName); WriteMacData(macDataBuff, comment); WriteMacData(macDataBuff, publicBlob); WriteMacData(macDataBuff, privateBlob); } macDataBuff.Close(); macData = macDataBuff.ToArray(); } if (privateMac != null) { SHA1CryptoServiceProvider sha1 = new SHA1CryptoServiceProvider(); byte[] a = Encoding.ASCII.GetBytes("putty-private-key-file-mac-key"); sha1.TransformBlock(a, 0, a.Length, null, 0); byte[] b = Encoding.UTF8.GetBytes(passphrase); sha1.TransformFinalBlock(b, 0, b.Length); byte[] key = sha1.Hash; sha1.Clear(); System.Security.Cryptography.HMACSHA1 hmacsha1 = new System.Security.Cryptography.HMACSHA1(key); byte[] hash = hmacsha1.ComputeHash(macData); hmacsha1.Clear(); string mac = BinToHex(hash); return mac == privateMac; } else if (privateHash != null) { SHA1CryptoServiceProvider sha1 = new SHA1CryptoServiceProvider(); byte[] hash = sha1.ComputeHash(macData); sha1.Clear(); string mac = BinToHex(hash); return mac == privateHash; } else { return true; } }
private static byte[] PuTTYPassphraseToKey(string passphrase) { const int HASH_SIZE = 20; SHA1CryptoServiceProvider sha1 = new SHA1CryptoServiceProvider(); byte[] pp = Encoding.UTF8.GetBytes(passphrase); byte[] buf = new byte[HASH_SIZE * 2]; sha1.TransformBlock(new byte[] { 0, 0, 0, 0 }, 0, 4, null, 0); sha1.TransformFinalBlock(pp, 0, pp.Length); Buffer.BlockCopy(sha1.Hash, 0, buf, 0, HASH_SIZE); sha1.Initialize(); sha1.TransformBlock(new byte[] { 0, 0, 0, 1 }, 0, 4, null, 0); sha1.TransformFinalBlock(pp, 0, pp.Length); Buffer.BlockCopy(sha1.Hash, 0, buf, HASH_SIZE, HASH_SIZE); sha1.Clear(); byte[] key = new byte[32]; Buffer.BlockCopy(buf, 0, key, 0, key.Length); return key; }
/// <summary> /// Saves the stream to the torrent. /// </summary> /// <param name="pieceId">Piece index to save to</param> /// <param name="istream">Stream to read data from</param> /// <returns>True if the data saved checks out correctly with the SHA-1 digest, false otherwise. The bitfield /// property is automatically updated if true</returns> public bool SaveToFile(int pieceId, IO.Stream istream) { // it starts in this file, as it could be spread across several files keep looping till we finish int dataWritten = 0; int positionInFile = 0; int fileNum = 0; Crypto.SHA1 sha = new Crypto.SHA1CryptoServiceProvider(); WhichFileIsPieceIn(pieceId, out fileNum, out positionInFile); while (dataWritten < this.infofile.GetPieceLength(pieceId) && fileNum < this.infofile.FileCount) { int fileLength = this.infofile.GetFileLength(fileNum); int dataToWrite = System.Math.Min(fileLength - positionInFile, this.infofile.GetPieceLength(pieceId) - dataWritten); IO.FileStream fstream = new IO.FileStream(this.infofile.GetFileName(fileNum), IO.FileMode.Open); // write data to file fstream.Seek(positionInFile, IO.SeekOrigin.Begin); byte[] data = new byte[dataToWrite]; istream.Read(data, 0, data.Length); fstream.Write(data, 0, data.Length); dataWritten += dataToWrite; if (dataWritten >= this.infofile.GetPieceLength(pieceId)) { sha.TransformFinalBlock(data, 0, data.Length); } else { sha.TransformBlock(data, 0, data.Length, data, 0); } fstream.Close(); fileNum++; // move onto next file positionInFile = 0; } if (this.infofile.GetSHADigest(pieceId).Equals(new ByteField20(sha.Hash))) { this.piecesDownloaded.Set(pieceId, true); this.numBytesLeft -= dataWritten; if (this.PercentChanged != null) { this.PercentChanged(this, this.PercentComplete); } if (this.piecesDownloaded.AllTrue) { Config.LogDebugMessage("Torrent finished!"); } return(true); } else { return(false); } }
private static void SetSigCryptoFromX509(PdfSignatureAppearance sigAppearance, X509Certificate2 card, X509Certificate[] chain) { sigAppearance.SetCrypto(null, chain, null, PdfSignatureAppearance.WINCER_SIGNED); var dic = new PdfSignature(PdfName.ADOBE_PPKMS, PdfName.ADBE_PKCS7_SHA1) { Date = new PdfDate(sigAppearance.SignDate), Name = PdfPKCS7.GetSubjectFields(chain[0]).GetField("CN"), Reason = sigAppearance.Reason, Location = sigAppearance.Location }; sigAppearance.CryptoDictionary = dic; const int csize = 4000; var exc = new Dictionary<PdfName, int> { { PdfName.CONTENTS, csize * 2 + 2 } }; sigAppearance.PreClose(exc); HashAlgorithm sha = new SHA1CryptoServiceProvider(); var s = sigAppearance.RangeStream; int read; var buff = new byte[8192]; while ((read = s.Read(buff, 0, 8192)) > 0) { sha.TransformBlock(buff, 0, read, buff, 0); } sha.TransformFinalBlock(buff, 0, 0); var pk = SignMsg(sha.Hash, card, false); var outc = new byte[csize]; var dic2 = new PdfDictionary(); Array.Copy(pk, 0, outc, 0, pk.Length); dic2.Put(PdfName.CONTENTS, new PdfString(outc).SetHexWriting(true)); sigAppearance.Close(dic2); }
public PdfDictionary GetEncryptionDictionary() { PdfDictionary dic = new PdfDictionary(); if (publicKeyHandler.GetRecipientsSize() > 0) { PdfArray recipients = null; dic.Put(PdfName.FILTER, PdfName.PUBSEC); dic.Put(PdfName.R, new PdfNumber(revision)); recipients = publicKeyHandler.GetEncodedRecipients(); if (revision == STANDARD_ENCRYPTION_40) { dic.Put(PdfName.V, new PdfNumber(1)); dic.Put(PdfName.SUBFILTER, PdfName.ADBE_PKCS7_S4); dic.Put(PdfName.RECIPIENTS, recipients); } else if (revision == STANDARD_ENCRYPTION_128 && encryptMetadata) { dic.Put(PdfName.V, new PdfNumber(2)); dic.Put(PdfName.LENGTH, new PdfNumber(128)); dic.Put(PdfName.SUBFILTER, PdfName.ADBE_PKCS7_S4); dic.Put(PdfName.RECIPIENTS, recipients); } else { dic.Put(PdfName.R, new PdfNumber(AES_128)); dic.Put(PdfName.V, new PdfNumber(4)); dic.Put(PdfName.SUBFILTER, PdfName.ADBE_PKCS7_S5); PdfDictionary stdcf = new PdfDictionary(); stdcf.Put(PdfName.RECIPIENTS, recipients); if (!encryptMetadata) stdcf.Put(PdfName.ENCRYPTMETADATA, PdfBoolean.PDFFALSE); if (revision == AES_128) stdcf.Put(PdfName.CFM, PdfName.AESV2); else stdcf.Put(PdfName.CFM, PdfName.V2); PdfDictionary cf = new PdfDictionary(); cf.Put(PdfName.DEFAULTCRYPTFILTER, stdcf); dic.Put(PdfName.CF, cf); if (embeddedFilesOnly) { dic.Put(PdfName.EFF, PdfName.DEFAULTCRYPTFILTER); dic.Put(PdfName.STRF, PdfName.IDENTITY); dic.Put(PdfName.STMF, PdfName.IDENTITY); } else { dic.Put(PdfName.STRF, PdfName.DEFAULTCRYPTFILTER); dic.Put(PdfName.STMF, PdfName.DEFAULTCRYPTFILTER); } } SHA1 sh = new SHA1CryptoServiceProvider(); byte[] encodedRecipient = null; byte[] seed = publicKeyHandler.GetSeed(); sh.TransformBlock(seed, 0, seed.Length, seed, 0); for (int i=0; i<publicKeyHandler.GetRecipientsSize(); i++) { encodedRecipient = publicKeyHandler.GetEncodedRecipient(i); sh.TransformBlock(encodedRecipient, 0, encodedRecipient.Length, encodedRecipient, 0); } if (!encryptMetadata) sh.TransformBlock(metadataPad, 0, metadataPad.Length, metadataPad, 0); sh.TransformFinalBlock(seed, 0, 0); byte[] mdResult = sh.Hash; SetupByEncryptionKey(mdResult, keyLength); } else { dic.Put(PdfName.FILTER, PdfName.STANDARD); dic.Put(PdfName.O, new PdfLiteral(PdfContentByte.EscapeString(ownerKey))); dic.Put(PdfName.U, new PdfLiteral(PdfContentByte.EscapeString(userKey))); dic.Put(PdfName.P, new PdfNumber(permissions)); dic.Put(PdfName.R, new PdfNumber(revision)); if (revision == STANDARD_ENCRYPTION_40) { dic.Put(PdfName.V, new PdfNumber(1)); } else if (revision == STANDARD_ENCRYPTION_128 && encryptMetadata) { dic.Put(PdfName.V, new PdfNumber(2)); dic.Put(PdfName.LENGTH, new PdfNumber(128)); } else { if (!encryptMetadata) dic.Put(PdfName.ENCRYPTMETADATA, PdfBoolean.PDFFALSE); dic.Put(PdfName.R, new PdfNumber(AES_128)); dic.Put(PdfName.V, new PdfNumber(4)); dic.Put(PdfName.LENGTH, new PdfNumber(128)); PdfDictionary stdcf = new PdfDictionary(); stdcf.Put(PdfName.LENGTH, new PdfNumber(16)); if (embeddedFilesOnly) { stdcf.Put(PdfName.AUTHEVENT, PdfName.EFOPEN); dic.Put(PdfName.EFF, PdfName.STDCF); dic.Put(PdfName.STRF, PdfName.IDENTITY); dic.Put(PdfName.STMF, PdfName.IDENTITY); } else { stdcf.Put(PdfName.AUTHEVENT, PdfName.DOCOPEN); dic.Put(PdfName.STRF, PdfName.STDCF); dic.Put(PdfName.STMF, PdfName.STDCF); } if (revision == AES_128) stdcf.Put(PdfName.CFM, PdfName.AESV2); else stdcf.Put(PdfName.CFM, PdfName.V2); PdfDictionary cf = new PdfDictionary(); cf.Put(PdfName.STDCF, stdcf); dic.Put(PdfName.CF, cf); } } return dic; }
public async Task<byte[]> WriteAsync(Stream stream, CancellationToken cancellationToken = new CancellationToken(), IEnumerable<IContentEncoding> encodings = null) { if (stream == null) throw new ArgumentNullException("stream"); // Create a new, empty temporary file // We will write the source content into this file, whilst computing the content's hash // Once we have the hash, we can move this temp file into the correct location var tempFile = Path.GetTempFileName(); // Create a SHA-1 hash builder using (var hashBuilder = new SHA1CryptoServiceProvider()) { // Open the temp file for write using (var fileStream = new FileStream(tempFile, FileMode.Open, FileAccess.Write, FileShare.None, BufferSize, FileOptions.SequentialScan | FileOptions.Asynchronous)) { // Allocate a buffer, used to process data in chunks // We use parallel read/write for increased throughput which requires two buffers var buffers = new[] { new byte[BufferSize], new byte[BufferSize] }; var bufferIndex = 0; var writeTask = (Task)null; // Loop until the source stream is exhausted while (true) { // Swap buffers bufferIndex ^= 1; // Start read a chunk of data into the buffer asynchronously var readTask = stream.ReadAsync(buffers[bufferIndex], 0, BufferSize, cancellationToken); if (writeTask != null) await Task.WhenAll(readTask, writeTask); var readCount = readTask.Result; // If the stream has ended, break if (readCount == 0) break; // Integrate the source data chunk into the hash hashBuilder.TransformBlock(buffers[bufferIndex], 0, readCount, null, 0); // Write the source data chunk to the output file writeTask = fileStream.WriteAsync(buffers[bufferIndex], 0, readCount, cancellationToken); } // Finalise the hash computation hashBuilder.TransformFinalBlock(buffers[bufferIndex], 0, 0); } // Retrieve the computed hash var hash = hashBuilder.Hash; // Determine the location for the content file string subPath; string contentPath; GetPaths(hash, null, out subPath, out contentPath); // Test whether a file already exists for this hash if (File.Exists(contentPath)) { // This content already exists in the store // Delete the temporary file File.Delete(tempFile); } else { // Ensure the sub-path exists if (!Directory.Exists(subPath)) Directory.CreateDirectory(subPath); // Move the temporary file into its correct location File.Move(tempFile, contentPath); // Set the read-only flag on the file File.SetAttributes(contentPath, FileAttributes.ReadOnly); } // Write any encoded forms of the content too if (encodings != null) { foreach (var encoding in encodings) { var encodedContentPath = contentPath + "." + encoding.Name; if (File.Exists(encodedContentPath)) continue; // Create a new temporary file for the encoded content tempFile = Path.GetTempFileName(); using (var inputStream = new FileStream(contentPath, FileMode.Open, FileAccess.Read, FileShare.Read, BufferSize, FileOptions.SequentialScan | FileOptions.Asynchronous)) using (var outputStream = new FileStream(tempFile, FileMode.Open, FileAccess.Write, FileShare.None, BufferSize, FileOptions.SequentialScan | FileOptions.Asynchronous)) using (var encodedOutputStream = encoding.Encode(outputStream)) { await inputStream.CopyToAsync(encodedOutputStream, BufferSize, cancellationToken); } // Move the temporary file into its correct location File.Move(tempFile, encodedContentPath); // Set the read-only flag on the file File.SetAttributes(encodedContentPath, FileAttributes.ReadOnly); } } // The caller receives the hash, regardless of whether the // file previously existed in the store return hash; } }
/// <summary> /// Saves the stream to the torrent. /// </summary> /// <param name="pieceId">Piece index to save to</param> /// <param name="istream">Stream to read data from</param> /// <returns>True if the data saved checks out correctly with the SHA-1 digest, false otherwise. The bitfield /// property is automatically updated if true</returns> public bool SaveToFile(int pieceId, IO.Stream istream) { // it starts in this file, as it could be spread across several files keep looping till we finish int dataWritten = 0; int positionInFile = 0; int fileNum = 0; Crypto.SHA1 sha = new Crypto.SHA1CryptoServiceProvider(); WhichFileIsPieceIn(pieceId, out fileNum, out positionInFile); while (dataWritten < this.infofile.GetPieceLength(pieceId) && fileNum < this.infofile.FileCount) { int fileLength = this.infofile.GetFileLength(fileNum); int dataToWrite = System.Math.Min(fileLength - positionInFile, this.infofile.GetPieceLength(pieceId) - dataWritten); IO.FileStream fstream = new IO.FileStream(this.infofile.GetFileName(fileNum), IO.FileMode.Open); // write data to file fstream.Seek(positionInFile, IO.SeekOrigin.Begin); byte[] data = new byte[ dataToWrite ]; istream.Read(data, 0, data.Length); fstream.Write(data, 0, data.Length); dataWritten += dataToWrite; if (dataWritten >= this.infofile.GetPieceLength(pieceId)) sha.TransformFinalBlock(data, 0, data.Length); else sha.TransformBlock(data, 0, data.Length, data, 0); fstream.Close(); fileNum++; // move onto next file positionInFile = 0; } if (this.infofile.GetSHADigest(pieceId).Equals(new ByteField20(sha.Hash))) { this.piecesDownloaded.Set(pieceId, true); this.numBytesLeft -= dataWritten; if (this.PercentChanged != null) this.PercentChanged(this, this.PercentComplete); if (this.piecesDownloaded.AllTrue) Config.LogDebugMessage("Torrent finished!"); return true; } else return false; }
/// <summary> /// Generate a SHA-1 hash using several byte arrays /// </summary> /// <param name="tohash">array of byte arrays to hash</param> /// <returns>Returns the hashed data</returns> private static byte[] digest(byte[][] tohash) { SHA1CryptoServiceProvider sha1 = new SHA1CryptoServiceProvider(); for (int i = 0; i < tohash.Length; i++) sha1.TransformBlock(tohash[i], 0, tohash[i].Length, tohash[i], 0); sha1.TransformFinalBlock(new byte[] { }, 0, 0); return sha1.Hash; }
private void WriteHash() { SHA1 sha1 = new SHA1CryptoServiceProvider(); const int LargestSettingSize = 0x3E8; byte[] data = new byte[LargestSettingSize]; // Hash TitleSpecific1 BigEndianStream stream = _titleSpecific1.GetStream(); stream.ReadBlock(data, 0, TitleSpecific1Size); sha1.TransformBlock(data, 0, TitleSpecific1Size, data, 0); // Hash TitleSpecific2 stream = _titleSpecific2.GetStream(); stream.ReadBlock(data, 0, TitleSpecific2Size); sha1.TransformBlock(data, 0, TitleSpecific2Size, data, 0); // Read in TitleSpecific3 stream = _titleSpecific3.GetStream(); stream.ReadBlock(data, 0, TitleSpecific3Size); // Fill the hash area with 0x99 for (int i = HashOffset; i < HashOffset + HashSize; i++) data[i] = 0x99; // Transform TitleSpecific3 and get the hash sha1.TransformFinalBlock(data, 0, TitleSpecific3Size); _hash = sha1.Hash; // Write it back stream.Position -= TitleSpecific3Size - HashOffset; stream.WriteBlock(_hash); }
/** * @throws IOException */ private void ReadDecryptedDocObj() { if (encrypted) return; PdfObject encDic = trailer.Get(PdfName.ENCRYPT); if (encDic == null || encDic.ToString().Equals("null")) return; encryptionError = true; byte[] encryptionKey = null; encrypted = true; PdfDictionary enc = (PdfDictionary)GetPdfObject(encDic); String s; PdfObject o; PdfArray documentIDs = trailer.GetAsArray(PdfName.ID); byte[] documentID = null; if (documentIDs != null) { o = documentIDs[0]; strings.Remove(o); s = o.ToString(); documentID = DocWriter.GetISOBytes(s); if (documentIDs.Size > 1) strings.Remove(documentIDs[1]); } // just in case we have a broken producer if (documentID == null) documentID = new byte[0]; byte[] uValue = null; byte[] oValue = null; int cryptoMode = PdfWriter.STANDARD_ENCRYPTION_40; int lengthValue = 0; PdfObject filter = GetPdfObjectRelease(enc.Get(PdfName.FILTER)); if (filter.Equals(PdfName.STANDARD)) { s = enc.Get(PdfName.U).ToString(); strings.Remove(enc.Get(PdfName.U)); uValue = DocWriter.GetISOBytes(s); s = enc.Get(PdfName.O).ToString(); strings.Remove(enc.Get(PdfName.O)); oValue = DocWriter.GetISOBytes(s); o = enc.Get(PdfName.P); if (!o.IsNumber()) throw new InvalidPdfException("Illegal P value."); pValue = ((PdfNumber)o).IntValue; o = enc.Get(PdfName.R); if (!o.IsNumber()) throw new InvalidPdfException("Illegal R value."); rValue = ((PdfNumber)o).IntValue; switch (rValue) { case 2: cryptoMode = PdfWriter.STANDARD_ENCRYPTION_40; break; case 3: o = enc.Get(PdfName.LENGTH); if (!o.IsNumber()) throw new InvalidPdfException("Illegal Length value."); lengthValue = ( (PdfNumber) o).IntValue; if (lengthValue > 128 || lengthValue < 40 || lengthValue % 8 != 0) throw new InvalidPdfException("Illegal Length value."); cryptoMode = PdfWriter.STANDARD_ENCRYPTION_128; break; case 4: PdfDictionary dic = (PdfDictionary)enc.Get(PdfName.CF); if (dic == null) throw new InvalidPdfException("/CF not found (encryption)"); dic = (PdfDictionary)dic.Get(PdfName.STDCF); if (dic == null) throw new InvalidPdfException("/StdCF not found (encryption)"); if (PdfName.V2.Equals(dic.Get(PdfName.CFM))) cryptoMode = PdfWriter.STANDARD_ENCRYPTION_128; else if (PdfName.AESV2.Equals(dic.Get(PdfName.CFM))) cryptoMode = PdfWriter.ENCRYPTION_AES_128; else throw new UnsupportedPdfException("No compatible encryption found"); PdfObject em = enc.Get(PdfName.ENCRYPTMETADATA); if (em != null && em.ToString().Equals("false")) cryptoMode |= PdfWriter.DO_NOT_ENCRYPT_METADATA; break; default: throw new UnsupportedPdfException("Unknown encryption type R = " + rValue); } } else if (filter.Equals(PdfName.PUBSEC)) { bool foundRecipient = false; byte[] envelopedData = null; PdfArray recipients = null; o = enc.Get(PdfName.V); if (!o.IsNumber()) throw new InvalidPdfException("Illegal V value."); int vValue = ((PdfNumber)o).IntValue; switch(vValue) { case 1: cryptoMode = PdfWriter.STANDARD_ENCRYPTION_40; lengthValue = 40; recipients = (PdfArray)enc.Get(PdfName.RECIPIENTS); break; case 2: o = enc.Get(PdfName.LENGTH); if (!o.IsNumber()) throw new InvalidPdfException("Illegal Length value."); lengthValue = ( (PdfNumber) o).IntValue; if (lengthValue > 128 || lengthValue < 40 || lengthValue % 8 != 0) throw new InvalidPdfException("Illegal Length value."); cryptoMode = PdfWriter.STANDARD_ENCRYPTION_128; recipients = (PdfArray)enc.Get(PdfName.RECIPIENTS); break; case 4: PdfDictionary dic = (PdfDictionary)enc.Get(PdfName.CF); if (dic == null) throw new InvalidPdfException("/CF not found (encryption)"); dic = (PdfDictionary)dic.Get(PdfName.DEFAULTCRYPTFILTER); if (dic == null) throw new InvalidPdfException("/DefaultCryptFilter not found (encryption)"); if (PdfName.V2.Equals(dic.Get(PdfName.CFM))) { cryptoMode = PdfWriter.STANDARD_ENCRYPTION_128; lengthValue = 128; } else if (PdfName.AESV2.Equals(dic.Get(PdfName.CFM))) { cryptoMode = PdfWriter.ENCRYPTION_AES_128; lengthValue = 128; } else throw new UnsupportedPdfException("No compatible encryption found"); PdfObject em = dic.Get(PdfName.ENCRYPTMETADATA); if (em != null && em.ToString().Equals("false")) cryptoMode |= PdfWriter.DO_NOT_ENCRYPT_METADATA; recipients = (PdfArray)dic.Get(PdfName.RECIPIENTS); break; default: throw new UnsupportedPdfException("Unknown encryption type V = " + rValue); } for (int i = 0; i<recipients.Size; i++) { PdfObject recipient = recipients[i]; strings.Remove(recipient); CmsEnvelopedData data = null; data = new CmsEnvelopedData(recipient.GetBytes()); foreach (RecipientInformation recipientInfo in data.GetRecipientInfos().GetRecipients()) { if (recipientInfo.RecipientID.Match(certificate) && !foundRecipient) { envelopedData = recipientInfo.GetContent(certificateKey); foundRecipient = true; } } } if (!foundRecipient || envelopedData == null) { throw new UnsupportedPdfException("Bad certificate and key."); } SHA1 sh = new SHA1CryptoServiceProvider(); sh.TransformBlock(envelopedData, 0, 20, envelopedData, 0); for (int i=0; i<recipients.Size; i++) { byte[] encodedRecipient = recipients[i].GetBytes(); sh.TransformBlock(encodedRecipient, 0, encodedRecipient.Length, encodedRecipient, 0); } if ((cryptoMode & PdfWriter.DO_NOT_ENCRYPT_METADATA) != 0) sh.TransformBlock(PdfEncryption.metadataPad, 0, PdfEncryption.metadataPad.Length, PdfEncryption.metadataPad, 0); sh.TransformFinalBlock(envelopedData, 0, 0); encryptionKey = sh.Hash; } decrypt = new PdfEncryption(); decrypt.SetCryptoMode(cryptoMode, lengthValue); if (filter.Equals(PdfName.STANDARD)) { //check by owner password decrypt.SetupByOwnerPassword(documentID, password, uValue, oValue, pValue); if (!EqualsArray(uValue, decrypt.userKey, (rValue == 3 || rValue == 4) ? 16 : 32)) { //check by user password decrypt.SetupByUserPassword(documentID, password, oValue, pValue); if (!EqualsArray(uValue, decrypt.userKey, (rValue == 3 || rValue == 4) ? 16 : 32)) { throw new BadPasswordException("Bad user password"); } } else ownerPasswordUsed = true; } else if (filter.Equals(PdfName.PUBSEC)) { decrypt.SetupByEncryptionKey(encryptionKey, lengthValue); ownerPasswordUsed = true; } for (int k = 0; k < strings.Count; ++k) { PdfString str = (PdfString)strings[k]; str.Decrypt(this); } if (encDic.IsIndirect()) { cryptoRef = (PRIndirectReference)encDic; xrefObj[cryptoRef.Number] = null; } encryptionError = false; }
public static bool Extract(string archiveFilename, string destination, Stats stats) { stats.Title = Path.GetFileName(archiveFilename); ArchiveReader archive = new ArchiveReader(archiveFilename, stats); bool writeEnabled = (destination != null); Dictionary<string, ZipFile> openZips = new Dictionary<string, ZipFile>(); openZips[archive.ArchiveName.ToLowerInvariant()] = archive.zipFile; FileStream openFile = null; IAsyncResult openFileRead = null; string openFilePathLC = null; long totalSize = 0; long totalSizeDone = 0; // Setup cache Dictionary<string, ExtractedData> dataCache = new Dictionary<string, ExtractedData>(); int time = 0; foreach (File file in archive.files) { totalSize += file.Size; foreach (int hashIndex in file.HashIndices) { if (stats.Canceled) return false; string path = archive.GetString(archive.hashes[hashIndex].Path).ToLowerInvariant(); if (!dataCache.ContainsKey(path)) dataCache.Add(path, new ExtractedData()); dataCache[path].Refs.Add(time++); } } string stateFile = writeEnabled ? Path.Combine(destination, Settings.StateFile) : null; stats.Status = "Loading working copy state"; WorkingCopy workingCopy = writeEnabled ? WorkingCopy.Load(stateFile) : new WorkingCopy(); WorkingCopy newWorkingFiles = new WorkingCopy(); List<WorkingHash> oldWorkingHashes = new List<WorkingHash>(); if (writeEnabled) { int oldCount = workingCopy.Count; workingCopy = WorkingCopy.HashLocalFiles(destination, stats, workingCopy); if (workingCopy.Count > oldCount) { stats.Status = "Saving working copy state"; workingCopy.Save(stateFile); } } foreach(WorkingFile wf in workingCopy.GetAll()) { foreach(WorkingHash wh in wf.Hashes) { wh.File = wf; } oldWorkingHashes.AddRange(wf.Hashes); } oldWorkingHashes.Sort(); if (stats.Canceled) return false; string tmpPath = null; if (writeEnabled) { tmpPath = Path.Combine(destination, Settings.TmpDirectory); Directory.CreateDirectory(tmpPath); } Dictionary<ExtractedData, bool> loaded = new Dictionary<ExtractedData, bool>(); float waitingForDecompression = 0.0f; float mbUnloadedDueToMemoryPressure = 0.0f; stats.Status = writeEnabled ? "Extracting" : "Verifying"; stats.WriteStartTime = DateTime.Now; foreach (File file in archive.files) { string tmpFileName = null; FileStream outFile = null; if (writeEnabled) { // Quickpath - see if the file exists and has correct content WorkingFile workingFile = workingCopy.Find(Path.Combine(destination, file.Name)); if (workingFile != null && workingFile.ExistsOnDisk() && !workingFile.IsModifiedOnDisk()) { if (new Hash(workingFile.Hash).CompareTo(new Hash(file.Hash)) == 0) { // The file is already there - no need to extract it stats.Status = "Skipped " + file.Name; workingFile.UserModified = false; newWorkingFiles.Add(workingFile); stats.Unmodified += file.Size; totalSizeDone += file.Size; continue; } } int tmpFileNamePostfix = 0; do { tmpFileName = Path.Combine(tmpPath, file.Name + (tmpFileNamePostfix == 0 ? string.Empty : ("-" + tmpFileNamePostfix.ToString()))); tmpFileNamePostfix++; } while (System.IO.File.Exists(tmpFileName)); Directory.CreateDirectory(Path.GetDirectoryName(tmpFileName)); outFile = new FileStream(tmpFileName, FileMode.CreateNew, FileAccess.Write); // Avoid fragmentation outFile.SetLength(file.Size); outFile.Position = 0; } List<WorkingHash> workingHashes = new List<WorkingHash>(); try { stats.Progress = 0; stats.Status = (writeEnabled ? "Extracting " : "Verifying ") + file.Name; SHA1CryptoServiceProvider sha1Provider = new SHA1CryptoServiceProvider(); Queue<MemoryStreamRef> writeQueue = new Queue<MemoryStreamRef>(); int p = 0; for (int i = 0; i < file.HashIndices.Count; i++) { if (stats.Canceled) { stats.Status = "Canceled. No files were modified."; return false; } // Prefetch for (; p < file.HashIndices.Count; p++) { if (writeQueue.Count > 0 && writeQueue.Peek().Ready.WaitOne(TimeSpan.Zero)) break; // Some data is ready - go process it int prefetchSize = 0; Dictionary<MemoryStream, bool> prefetchedStreams = new Dictionary<MemoryStream, bool>(); foreach(MemoryStreamRef memStreamRef in writeQueue) { prefetchedStreams[memStreamRef.MemStream] = true; } foreach(MemoryStream prefetchedStream in prefetchedStreams.Keys) { prefetchSize += (int)prefetchedStream.Length; } if (writeQueue.Count > 0 && prefetchSize > Settings.WritePrefetchSize) break; // We have prefetched enough data HashSource hashSrc = archive.hashes[file.HashIndices[p]]; string path = archive.GetString(hashSrc.Path).ToLowerInvariant(); ExtractedData data = dataCache[path]; // See if we have the hash on disk. Try our best not to seek too much WorkingHash onDiskHash = null; long bestSeekDistance = long.MaxValue; int idx = oldWorkingHashes.BinarySearch(new WorkingHash() { Hash = hashSrc.Hash }); if (idx >= 0) { while (idx - 1 >= 0 && oldWorkingHashes[idx - 1].Hash.Equals(hashSrc.Hash)) idx--; for (; idx < oldWorkingHashes.Count && oldWorkingHashes[idx].Hash.Equals(hashSrc.Hash); idx++) { WorkingHash wh = oldWorkingHashes[idx]; long seekDistance; if (openFile != null && openFilePathLC == wh.File.NameLowercase) { seekDistance = Math.Abs(openFile.Position - wh.Offset); } else { seekDistance = long.MaxValue; } if (onDiskHash == null || seekDistance < bestSeekDistance) { onDiskHash = wh; bestSeekDistance = seekDistance; } } } if (onDiskHash != null && ((openFilePathLC == onDiskHash.File.NameLowercase) || (onDiskHash.File.ExistsOnDisk() && !onDiskHash.File.IsModifiedOnDisk()))) { MemoryStream memStream = new MemoryStream(onDiskHash.Length); memStream.SetLength(onDiskHash.Length); // Finish the last read if (openFileRead != null) { openFile.EndRead(openFileRead); openFileRead = null; } // Open other file if (openFilePathLC != onDiskHash.File.NameLowercase) { if (openFile != null) openFile.Close(); openFile = new FileStream(onDiskHash.File.NameLowercase, FileMode.Open, FileAccess.Read, FileShare.Read, Settings.FileStreamBufferSize, FileOptions.None); openFilePathLC = onDiskHash.File.NameLowercase; System.Diagnostics.Debug.Write(Path.GetFileName(onDiskHash.File.NameMixedcase)); } System.Diagnostics.Debug.Write(onDiskHash.Offset == openFile.Position ? "." : "S"); if (openFile.Position != onDiskHash.Offset) openFile.Position = onDiskHash.Offset; openFileRead = openFile.BeginRead(memStream.GetBuffer(), 0, (int)memStream.Length, null, null); writeQueue.Enqueue(new MemoryStreamRef() { Ready = openFileRead.AsyncWaitHandle, MemStream = memStream, Offset = 0, Length = (int)memStream.Length, CacheLine = null, Hash = hashSrc.Hash }); stats.ReadFromWorkingCopy += hashSrc.Length; } else { // Locate and load the zipentry ZipEntry pZipEntry; path = path.Replace("\\", "/"); if (path.StartsWith("/")) { pZipEntry = archive.zipFile[path.Substring(1)]; } else { int slashIndex = path.IndexOf("/"); string zipPath = path.Substring(0, slashIndex); string entryPath = path.Substring(slashIndex + 1); if (!openZips.ContainsKey(zipPath)) openZips[zipPath] = new ZipFile(Path.Combine(Path.GetDirectoryName(archiveFilename), zipPath)); pZipEntry = openZips[zipPath][entryPath]; } if (data.Data == null) { stats.ReadFromArchiveDecompressed += pZipEntry.UncompressedSize; stats.ReadFromArchiveCompressed += pZipEntry.CompressedSize; data.AsycDecompress(pZipEntry); } loaded[data] = true; writeQueue.Enqueue(new MemoryStreamRef() { Ready = data.LoadDone, MemStream = data.Data, Offset = hashSrc.Offset, Length = hashSrc.Length, CacheLine = data, Hash = hashSrc.Hash }); } } MemoryStreamRef writeItem = writeQueue.Dequeue(); while (writeItem.Ready.WaitOne(TimeSpan.FromSeconds(0.01)) == false) { waitingForDecompression += 0.01f; } // Write output if (writeEnabled) { workingHashes.Add(new WorkingHash() { Hash = writeItem.Hash, Offset = outFile.Position, Length = writeItem.Length }); outFile.Write(writeItem.MemStream.GetBuffer(), (int)writeItem.Offset, writeItem.Length); } // Verify SHA1 sha1Provider.TransformBlock(writeItem.MemStream.GetBuffer(), (int)writeItem.Offset, writeItem.Length, writeItem.MemStream.GetBuffer(), (int)writeItem.Offset); stats.TotalWritten += writeItem.Length; totalSizeDone += writeItem.Length; stats.Title = string.Format("{0:F0}% {1}", 100 * (float)totalSizeDone / (float)totalSize , Path.GetFileName(archiveFilename)); stats.Progress = (float)i / (float)file.HashIndices.Count; // Unload if it is not needed anymore if (writeItem.CacheLine != null) { writeItem.CacheLine.Refs.RemoveAt(0); if (writeItem.CacheLine.Refs.Count == 0) { StreamPool.Release(ref writeItem.CacheLine.Data); writeItem.CacheLine.LoadDone = null; loaded.Remove(writeItem.CacheLine); } } // Unload some data if we are running out of memory while (loaded.Count * Settings.MaxZipEntrySize > Settings.WriteCacheSize) { ExtractedData maxRef = null; foreach (ExtractedData ed in loaded.Keys) { if (maxRef == null || ed.Refs[0] > maxRef.Refs[0]) maxRef = ed; } maxRef.LoadDone.WaitOne(); // Check that we are not evicting something from the write queue bool inQueue = false; foreach(MemoryStreamRef memRef in writeQueue) { if (memRef.CacheLine == maxRef) inQueue = true; } if (inQueue) break; mbUnloadedDueToMemoryPressure += (float)maxRef.Data.Length / 1024 / 1024; StreamPool.Release(ref maxRef.Data); maxRef.LoadDone = null; loaded.Remove(maxRef); } } stats.Progress = 0; sha1Provider.TransformFinalBlock(new byte[0], 0, 0); byte[] sha1 = sha1Provider.Hash; if (new Hash(sha1).CompareTo(new Hash(file.Hash)) != 0) { MessageBox.Show("The checksum of " + file.Name + " does not match original value. The file is corrupted.", "Critical error", MessageBoxButtons.OK, MessageBoxIcon.Error); if (writeEnabled) { stats.Status = "Extraction failed. Checksum mismatch."; } else { stats.Status = "Verification failed. Checksum mismatch."; } return false; } } finally { if (outFile != null) outFile.Close(); } if (writeEnabled) { FileInfo fileInfo = new FileInfo(tmpFileName); WorkingFile workingFile = new WorkingFile() { NameMixedcase = Path.Combine(destination, file.Name), Size = fileInfo.Length, Created = fileInfo.CreationTime, Modified = fileInfo.LastWriteTime, Hash = file.Hash, TempFileName = tmpFileName, Hashes = workingHashes }; newWorkingFiles.Add(workingFile); } } stats.Progress = 0; stats.Title = string.Format("100% {0}", Path.GetFileName(archiveFilename)); // Close sources foreach (ZipFile zip in openZips.Values) { zip.Dispose(); } if (openFileRead != null) openFile.EndRead(openFileRead); if (openFile != null) openFile.Close(); // Replace the old working copy with new one if (writeEnabled) { List<string> deleteFilesLC = new List<string>(); List<string> deleteFilesAskLC = new List<string>(); List<string> keepFilesLC = new List<string>(); stats.Status = "Preparing to move files"; // Delete all non-user-modified files foreach (WorkingFile workingFile in workingCopy.GetAll()) { if (!workingFile.UserModified && workingFile.ExistsOnDisk() && !workingFile.IsModifiedOnDisk()) { WorkingFile newWF = newWorkingFiles.Find(workingFile.NameLowercase); // Do not delete if it is was skipped 'fast-path' file if (newWF != null && newWF.TempFileName == null) continue; deleteFilesLC.Add(workingFile.NameLowercase); } } // Find obstructions for new files foreach (WorkingFile newWorkingFile in newWorkingFiles.GetAll()) { if (newWorkingFile.TempFileName != null && newWorkingFile.ExistsOnDisk() && !deleteFilesLC.Contains(newWorkingFile.NameLowercase)) { deleteFilesAskLC.Add(newWorkingFile.NameLowercase); } } // Ask the user for permission to delete StringBuilder sb = new StringBuilder(); sb.AppendLine("Do you want to override local changes in the following files?"); int numLines = 0; foreach (string deleteFileAskLC in deleteFilesAskLC) { sb.AppendLine(deleteFileAskLC); numLines++; if (numLines > 30) { sb.AppendLine("..."); sb.AppendLine("(" + deleteFilesAskLC.Count + " files in total)"); break; } } if (deleteFilesAskLC.Count > 0) { DialogResult overrideAnswer = Settings.AlwaysOverwrite ? DialogResult.Yes : MessageBox.Show(sb.ToString(), "Override files", MessageBoxButtons.YesNoCancel); if (overrideAnswer == DialogResult.Cancel) { stats.Status = "Canceled. No files were modified."; return false; } if (overrideAnswer == DialogResult.Yes) { deleteFilesLC.AddRange(deleteFilesAskLC); } else { keepFilesLC = deleteFilesAskLC; } deleteFilesAskLC.Clear(); } // Delete files foreach (string deleteFileLC in deleteFilesLC) { stats.Status = "Deleting " + Path.GetFileName(deleteFileLC); while (true) { try { FileInfo fileInfo = new FileInfo(deleteFileLC); if (fileInfo.IsReadOnly) { fileInfo.IsReadOnly = false; } System.IO.File.Delete(deleteFileLC); workingCopy.Remove(deleteFileLC); break; } catch (Exception e) { DialogResult deleteAnswer = MessageBox.Show("Can not delete file " + deleteFileLC + Environment.NewLine + e.Message, "Error", MessageBoxButtons.AbortRetryIgnore); if (deleteAnswer == DialogResult.Retry) continue; if (deleteAnswer == DialogResult.Ignore) break; if (deleteAnswer == DialogResult.Abort) { stats.Status = "Canceled. Some files were deleted."; return false; } } } } // Move the new files foreach (WorkingFile newWorkingFile in newWorkingFiles.GetAll()) { if (!keepFilesLC.Contains(newWorkingFile.NameLowercase) && newWorkingFile.TempFileName != null) { stats.Status = "Moving " + Path.GetFileName(newWorkingFile.NameMixedcase); while (true) { try { Directory.CreateDirectory(Path.GetDirectoryName(newWorkingFile.NameMixedcase)); System.IO.File.Move(newWorkingFile.TempFileName, newWorkingFile.NameMixedcase); workingCopy.Add(newWorkingFile); break; } catch (Exception e) { DialogResult moveAnswer = MessageBox.Show("Error when moving " + newWorkingFile.TempFileName + Environment.NewLine + e.Message, "Error", MessageBoxButtons.AbortRetryIgnore); if (moveAnswer == DialogResult.Retry) continue; if (moveAnswer == DialogResult.Ignore) break; if (moveAnswer == DialogResult.Abort) { stats.Status = "Canceled. Some files were deleted or overridden."; return false; } } } } } stats.Status = "Saving working copy state"; workingCopy.Save(stateFile); stats.Status = "Deleting temporary directory"; try { if (Directory.Exists(tmpPath)) Directory.Delete(tmpPath, true); } catch { } } stats.EndTime = DateTime.Now; if (writeEnabled) { stats.Status = "Extraction finished"; } else { stats.Status = "Verification finished"; } return true; }