public static sbyte[] ReadBytes(FilePath file) { int length = (int)file.Length(); // should only be zero if loading from a network or similar System.Diagnostics.Debug.Assert((length != 0)); sbyte[] bytes = new sbyte[length]; int totalBytesRead = 0; FileInputStream inputStream = null; try { inputStream = new FileInputStream(file); while (totalBytesRead != length) { int bytesRead = inputStream.Read(bytes, totalBytesRead, length - totalBytesRead); if (bytesRead == -1) { break; } totalBytesRead += bytesRead; } } finally { if (inputStream != null) { inputStream.Close(); } } return bytes; }
public virtual void Test001_Initalize() { FilePath gitdir = new FilePath(trash, Constants.DOT_GIT); FilePath hooks = new FilePath(gitdir, "hooks"); FilePath objects = new FilePath(gitdir, "objects"); FilePath objects_pack = new FilePath(objects, "pack"); FilePath objects_info = new FilePath(objects, "info"); FilePath refs = new FilePath(gitdir, "refs"); FilePath refs_heads = new FilePath(refs, "heads"); FilePath refs_tags = new FilePath(refs, "tags"); FilePath HEAD = new FilePath(gitdir, "HEAD"); NUnit.Framework.Assert.IsTrue(trash.IsDirectory(), "Exists " + trash); NUnit.Framework.Assert.IsTrue(hooks.IsDirectory(), "Exists " + hooks); NUnit.Framework.Assert.IsTrue(objects.IsDirectory(), "Exists " + objects); NUnit.Framework.Assert.IsTrue(objects_pack.IsDirectory(), "Exists " + objects_pack ); NUnit.Framework.Assert.IsTrue(objects_info.IsDirectory(), "Exists " + objects_info ); NUnit.Framework.Assert.AreEqual(2L, objects.ListFiles().Length); NUnit.Framework.Assert.IsTrue(refs.IsDirectory(), "Exists " + refs); NUnit.Framework.Assert.IsTrue(refs_heads.IsDirectory(), "Exists " + refs_heads); NUnit.Framework.Assert.IsTrue(refs_tags.IsDirectory(), "Exists " + refs_tags); NUnit.Framework.Assert.IsTrue(HEAD.IsFile(), "Exists " + HEAD); NUnit.Framework.Assert.AreEqual(23, HEAD.Length()); }
/// <exception cref="System.IO.IOException"></exception> public static string LoadFile(FilePath f) { int length = (int)f.Length(); // don't worry about very long files char[] buf = new char[length]; new FileReader(f).Read(buf, 0, length); return new string(buf); }
public virtual void TestWriteThumbnail() { ExifThumbnailDirectory directory = ExifReaderTest.ProcessBytes<ExifThumbnailDirectory>("Tests/Data/manuallyAddedThumbnail.jpg.app1"); Sharpen.Tests.IsTrue(directory.HasThumbnailData()); FilePath thumbnailFile = FilePath.CreateTempFile("thumbnail", ".jpg"); try { directory.WriteThumbnail(thumbnailFile.GetAbsolutePath()); FilePath file = new FilePath(thumbnailFile.GetAbsolutePath()); Sharpen.Tests.AreEqual(2970, file.Length()); Sharpen.Tests.IsTrue(file.Exists()); } finally { if (!thumbnailFile.Delete()) { NUnit.Framework.Assert.Fail("Unable to delete temp thumbnail file."); } } }
// end finally // end decodeToFile /// <summary> /// Convenience method for reading a base64-encoded /// file and decoding it. /// </summary> /// <remarks> /// Convenience method for reading a base64-encoded /// file and decoding it. /// <p>As of v 2.3, if there is a error, /// the method will throw an java.io.IOException. <b>This is new to v2.3!</b> /// In earlier versions, it just returned false, but /// in retrospect that's a pretty poor way to handle it.</p> /// </remarks> /// <param name="filename">Filename for reading encoded data</param> /// <returns>decoded byte array</returns> /// <exception cref="System.IO.IOException">if there is an error</exception> /// <since>2.1</since> public static byte[] DecodeFromFile(string filename) { byte[] decodedData = null; Base64.InputStream bis = null; try { // Set up some useful variables FilePath file = new FilePath(filename); byte[] buffer = null; int length = 0; int numBytes = 0; // Check for size of file if (file.Length() > int.MaxValue) { throw new IOException("File is too big for this convenience method (" + file.Length () + " bytes)."); } // end if: file too big for int index buffer = new byte[(int)file.Length()]; // Open a stream bis = new Base64.InputStream(new BufferedInputStream(new FileInputStream(file)), Couchbase.Lite.Support.Base64.Decode); // Read until done while ((numBytes = bis.Read(buffer, length, 4096)) >= 0) { length += numBytes; } // end while // Save in a variable to return decodedData = new byte[length]; System.Array.Copy(buffer, 0, decodedData, 0, length); } catch (IOException e) { // end try throw; } finally { // Catch and release to execute finally{} // end catch: java.io.IOException try { bis.Close(); } catch (Exception) { } } // end finally return decodedData; }
public long TotalDataSize() { FilePath f = new FilePath(path); long size = f.Length() + attachments.TotalDataSize(); return size; }
/// <summary> /// Updates the file in the working tree with content and mode from an entry /// in the index. /// </summary> /// <remarks> /// Updates the file in the working tree with content and mode from an entry /// in the index. The new content is first written to a new temporary file in /// the same directory as the real file. Then that new file is renamed to the /// final filename. /// <p> /// TODO: this method works directly on File IO, we may need another /// abstraction (like WorkingTreeIterator). This way we could tell e.g. /// Eclipse that Files in the workspace got changed /// </p> /// </remarks> /// <param name="repo"></param> /// <param name="f"> /// the file to be modified. The parent directory for this file /// has to exist already /// </param> /// <param name="entry">the entry containing new mode and content</param> /// <param name="or">object reader to use for checkout</param> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public static void CheckoutEntry(Repository repo, FilePath f, DirCacheEntry entry , ObjectReader or) { ObjectLoader ol = or.Open(entry.GetObjectId()); FilePath parentDir = f.GetParentFile(); FilePath tmpFile = FilePath.CreateTempFile("._" + f.GetName(), null, parentDir); WorkingTreeOptions opt = repo.GetConfig().Get(WorkingTreeOptions.KEY); FileOutputStream rawChannel = new FileOutputStream(tmpFile); OutputStream channel; if (opt.GetAutoCRLF() == CoreConfig.AutoCRLF.TRUE) { channel = new AutoCRLFOutputStream(rawChannel); } else { channel = rawChannel; } try { ol.CopyTo(channel); } finally { channel.Close(); } FS fs = repo.FileSystem; if (opt.IsFileMode() && fs.SupportsExecute()) { if (FileMode.EXECUTABLE_FILE.Equals(entry.RawMode)) { if (!fs.CanExecute(tmpFile)) { fs.SetExecute(tmpFile, true); } } else { if (fs.CanExecute(tmpFile)) { fs.SetExecute(tmpFile, false); } } } if (!tmpFile.RenameTo(f)) { // tried to rename which failed. Let' delete the target file and try // again FileUtils.Delete(f); if (!tmpFile.RenameTo(f)) { throw new IOException(MessageFormat.Format(JGitText.Get().couldNotWriteFile, tmpFile .GetPath(), f.GetPath())); } } entry.LastModified = f.LastModified(); if (opt.GetAutoCRLF() != CoreConfig.AutoCRLF.FALSE) { entry.SetLength(f.Length()); } else { // AutoCRLF wants on-disk-size entry.SetLength((int)ol.GetSize()); } }
public virtual void TestWriteEmptyCommit_RealIndex() { FilePath idx = new FilePath(db.Directory, "index"); FilePath lck = new FilePath(db.Directory, "index.lock"); NUnit.Framework.Assert.IsFalse(idx.Exists()); NUnit.Framework.Assert.IsFalse(lck.Exists()); DirCache dc = db.LockDirCache(); NUnit.Framework.Assert.AreEqual(0, lck.Length()); dc.Write(); NUnit.Framework.Assert.AreEqual(12 + 20, lck.Length()); NUnit.Framework.Assert.IsTrue(dc.Commit()); NUnit.Framework.Assert.IsTrue(idx.Exists()); NUnit.Framework.Assert.IsFalse(lck.Exists()); NUnit.Framework.Assert.AreEqual(12 + 20, idx.Length()); }
/// <exception cref="System.IO.IOException"></exception> private DirCacheEntry AddEntryToBuilder(string path, FilePath file, ObjectInserter newObjectInserter, DirCacheBuilder builder, int stage) { FileInputStream inputStream = new FileInputStream(file); ObjectId id = newObjectInserter.Insert(Constants.OBJ_BLOB, file.Length(), inputStream ); inputStream.Close(); DirCacheEntry entry = new DirCacheEntry(path, stage); entry.SetObjectId(id); entry.FileMode = FileMode.REGULAR_FILE; entry.LastModified = file.LastModified(); entry.SetLength((int)file.Length()); builder.Add(entry); return entry; }
public long GetSizeOfBlob(BlobKey key) { string path = PathForKey(key); FilePath file = new FilePath(path); return file.Length(); }
// DSA // RSA // modulus // public exponent // private exponent // private String algname="ssh-dss"; /// <exception cref="NSch.JSchException"></exception> internal static NSch.IdentityFile NewInstance(string prvfile, string pubfile, JSch jsch) { byte[] prvkey = null; byte[] pubkey = null; FilePath file = null; FileInputStream fis = null; try { file = new FilePath(prvfile); fis = new FileInputStream(prvfile); prvkey = new byte[(int)(file.Length())]; int len = 0; while (true) { int i = fis.Read(prvkey, len, prvkey.Length - len); if (i <= 0) { break; } len += i; } fis.Close(); } catch (Exception e) { try { if (fis != null) { fis.Close(); } } catch (Exception) { } if (e is Exception) { throw new JSchException(e.ToString(), (Exception)e); } throw new JSchException(e.ToString()); } string _pubfile = pubfile; if (pubfile == null) { _pubfile = prvfile + ".pub"; } try { file = new FilePath(_pubfile); fis = new FileInputStream(_pubfile); pubkey = new byte[(int)(file.Length())]; int len = 0; while (true) { int i = fis.Read(pubkey, len, pubkey.Length - len); if (i <= 0) { break; } len += i; } fis.Close(); } catch (Exception e) { try { if (fis != null) { fis.Close(); } } catch (Exception) { } if (pubfile != null) { // The pubfile is explicitry given, but not accessible. if (e is Exception) { throw new JSchException(e.ToString(), (Exception)e); } throw new JSchException(e.ToString()); } } return NewInstance(prvfile, prvkey, pubkey, jsch); }
// do nothing /// <summary>Read an entire local file into memory as a byte array.</summary> /// <remarks>Read an entire local file into memory as a byte array.</remarks> /// <param name="path">location of the file to read.</param> /// <param name="max"> /// maximum number of bytes to read, if the file is larger than /// this limit an IOException is thrown. /// </param> /// <returns>complete contents of the requested local file.</returns> /// <exception cref="System.IO.FileNotFoundException">the file does not exist.</exception> /// <exception cref="System.IO.IOException">the file exists, but its contents cannot be read. /// </exception> public static byte[] ReadFully(FilePath path, int max) { FileInputStream @in = new FileInputStream(path); try { long sz = Math.Max(path.Length(), 1); if (sz > max) { throw new IOException(MessageFormat.Format(JGitText.Get().fileIsTooLarge, path)); } byte[] buf = new byte[(int)sz]; int valid = 0; for (; ; ) { if (buf.Length == valid) { if (buf.Length == max) { int next = @in.Read(); if (next < 0) { break; } throw new IOException(MessageFormat.Format(JGitText.Get().fileIsTooLarge, path)); } byte[] nb = new byte[Math.Min(buf.Length * 2, max)]; System.Array.Copy(buf, 0, nb, 0, valid); buf = nb; } int n = @in.Read(buf, valid, buf.Length - valid); if (n < 0) { break; } valid += n; } if (valid < buf.Length) { byte[] nb = new byte[valid]; System.Array.Copy(buf, 0, nb, 0, valid); buf = nb; } return buf; } finally { try { @in.Close(); } catch (IOException) { } } }
/// <exception cref="System.IO.IOException"></exception> private static string ReadUrl(string filePath, string charCoding, bool urlIsFile) { int chunkLength; Stream @is = null; try { if (!urlIsFile) { Uri urlObj = new Uri(filePath); URLConnection uc = urlObj.OpenConnection(); @is = uc.GetInputStream(); chunkLength = uc.GetContentLength(); if (chunkLength <= 0) { chunkLength = 1024; } if (charCoding == null) { string type = uc.GetContentType(); if (type != null) { charCoding = GetCharCodingFromType(type); } } } else { FilePath f = new FilePath(filePath); if (!f.Exists()) { throw new FileNotFoundException("File not found: " + filePath); } else { if (!f.CanRead()) { throw new IOException("Cannot read file: " + filePath); } } long length = f.Length(); chunkLength = (int)length; if (chunkLength != length) { throw new IOException("Too big file size: " + length); } if (chunkLength == 0) { return string.Empty; } @is = new FileInputStream(f); } TextReader r; if (charCoding == null) { r = new StreamReader(@is); } else { r = new StreamReader(@is, charCoding); } return ReadReader(r, chunkLength); } finally { if (@is != null) { @is.Close(); } } }
// end decodeFromFile /// <summary> /// Convenience method for reading a binary file /// and base64-encoding it. /// </summary> /// <remarks> /// Convenience method for reading a binary file /// and base64-encoding it. /// <p>As of v 2.3, if there is a error, /// the method will throw an java.io.IOException. <b>This is new to v2.3!</b> /// In earlier versions, it just returned false, but /// in retrospect that's a pretty poor way to handle it.</p> /// </remarks> /// <param name="filename">Filename for reading binary data</param> /// <returns>base64-encoded string</returns> /// <exception cref="System.IO.IOException">if there is an error</exception> /// <since>2.1</since> public static string EncodeFromFile(string filename) { string encodedData = null; Base64.InputStream bis = null; try { // Set up some useful variables FilePath file = new FilePath(filename); byte[] buffer = new byte[Math.Max((int)(file.Length() * 1.4 + 1), 40)]; // Need max() for math on small files (v2.2.1); Need +1 for a few corner cases (v2.3.5) int length = 0; int numBytes = 0; // Open a stream bis = new Base64.InputStream(new BufferedInputStream(new FileInputStream(file)), Couchbase.Lite.Support.Base64.Encode); // Read until done while ((numBytes = bis.Read(buffer, length, 4096)) >= 0) { length += numBytes; } // end while // Save in a variable to return encodedData = Sharpen.Runtime.GetStringForBytes(buffer, 0, length, Couchbase.Lite.Support.Base64 .PreferredEncoding); } catch (IOException e) { // end try throw; } finally { // Catch and release to execute finally{} // end catch: java.io.IOException try { bis.Close(); } catch (Exception) { } } // end finally return encodedData; }
/// <summary>An application entry point.</summary> /// <remarks> /// An application entry point. Takes the name of one or more files as arguments and prints the contents of all /// metadata directories to <code>System.out</code>. /// <p/> /// If <code>-thumb</code> is passed, then any thumbnail data will be written to a file with name of the /// input file having <code>.thumb.jpg</code> appended. /// <p/> /// If <code>-wiki</code> is passed, then output will be in a format suitable for Google Code's wiki. /// <p/> /// If <code>-hex</code> is passed, then the ID of each tag will be displayed in hexadecimal. /// </remarks> /// <param name="args">the command line arguments</param> /// <exception cref="Com.Drew.Metadata.MetadataException"/> /// <exception cref="System.IO.IOException"/> public static void Main(string[] args) { ICollection<string> argList = new AList<string>(Arrays.AsList(args)); bool thumbRequested = argList.Remove("-thumb"); bool wikiFormat = argList.Remove("-wiki"); bool showHex = argList.Remove("-hex"); if (argList.Count < 1) { string version = typeof(Com.Drew.Imaging.ImageMetadataReader).Assembly.GetImplementationVersion(); System.Console.Out.Println("metadata-extractor version " + version); System.Console.Out.Println(); System.Console.Out.Println(Sharpen.Extensions.StringFormat("Usage: java -jar metadata-extractor-%s.jar <filename> [<filename>] [-thumb] [-wiki] [-hex]", version == null ? "a.b.c" : version)); System.Environment.Exit(1); } foreach (string filePath in argList) { long startTime = Runtime.NanoTime(); FilePath file = new FilePath(filePath); if (!wikiFormat && argList.Count > 1) { System.Console.Out.Printf("\n***** PROCESSING: %s\n%n", filePath); } Com.Drew.Metadata.Metadata metadata = null; try { metadata = Com.Drew.Imaging.ImageMetadataReader.ReadMetadata(file); } catch (Exception e) { Sharpen.Runtime.PrintStackTrace(e, System.Console.Error); System.Environment.Exit(1); } long took = Runtime.NanoTime() - startTime; if (!wikiFormat) { System.Console.Out.Printf("Processed %.3f MB file in %.2f ms%n%n", file.Length() / (1024d * 1024), took / 1000000d); } if (wikiFormat) { string fileName = file.GetName(); string urlName = StringUtil.UrlEncode(fileName); ExifIFD0Directory exifIFD0Directory = metadata.GetDirectory<ExifIFD0Directory>(); string make = exifIFD0Directory == null ? string.Empty : StringUtil.EscapeForWiki(exifIFD0Directory.GetString(ExifIFD0Directory.TagMake)); string model = exifIFD0Directory == null ? string.Empty : StringUtil.EscapeForWiki(exifIFD0Directory.GetString(ExifIFD0Directory.TagModel)); System.Console.Out.Println(); System.Console.Out.Println("-----"); System.Console.Out.Println(); System.Console.Out.Printf("= %s - %s =%n", make, model); System.Console.Out.Println(); System.Console.Out.Printf("<a href=\"http://sample-images.metadata-extractor.googlecode.com/git/%s\">%n", urlName); System.Console.Out.Printf("<img src=\"http://sample-images.metadata-extractor.googlecode.com/git/%s\" width=\"300\"/><br/>%n", urlName); System.Console.Out.Println(StringUtil.EscapeForWiki(fileName)); System.Console.Out.Println("</a>"); System.Console.Out.Println(); System.Console.Out.Println("|| *Directory* || *Tag Id* || *Tag Name* || *Extracted Value* ||"); } // iterate over the metadata and print to System.out foreach (Com.Drew.Metadata.Directory directory in metadata.GetDirectories()) { string directoryName = directory.GetName(); foreach (Tag tag in directory.GetTags()) { string tagName = tag.GetTagName(); string description = tag.GetDescription(); // truncate the description if it's too long if (description != null && description.Length > 1024) { description = Sharpen.Runtime.Substring(description, 0, 1024) + "..."; } if (wikiFormat) { System.Console.Out.Printf("||%s||0x%s||%s||%s||%n", StringUtil.EscapeForWiki(directoryName), Sharpen.Extensions.ToHexString(tag.GetTagType()), StringUtil.EscapeForWiki(tagName), StringUtil.EscapeForWiki(description)); } else { if (showHex) { System.Console.Out.Printf("[%s - %s] %s = %s%n", directoryName, tag.GetTagTypeHex(), tagName, description); } else { System.Console.Out.Printf("[%s] %s = %s%n", directoryName, tagName, description); } } } // print out any errors foreach (string error in directory.GetErrors()) { System.Console.Error.Println("ERROR: " + error); } } if (args.Length > 1 && thumbRequested) { ExifThumbnailDirectory directory_1 = metadata.GetDirectory<ExifThumbnailDirectory>(); if (directory_1 != null && directory_1.HasThumbnailData()) { System.Console.Out.Println("Writing thumbnail..."); directory_1.WriteThumbnail(Sharpen.Extensions.Trim(args[0]) + ".thumb.jpg"); } else { System.Console.Out.Println("No thumbnail data exists in this image"); } } } }
/// <summary>Updates the index after a content merge has happened.</summary> /// <remarks> /// Updates the index after a content merge has happened. If no conflict has /// occurred this includes persisting the merged content to the object /// database. In case of conflicts this method takes care to write the /// correct stages to the index. /// </remarks> /// <param name="base"></param> /// <param name="ours"></param> /// <param name="theirs"></param> /// <param name="result"></param> /// <param name="of"></param> /// <exception cref="System.IO.FileNotFoundException">System.IO.FileNotFoundException /// </exception> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> private void UpdateIndex(CanonicalTreeParser @base, CanonicalTreeParser ours, CanonicalTreeParser theirs, MergeResult<RawText> result, FilePath of) { if (result.ContainsConflicts()) { // a conflict occurred, the file will contain conflict markers // the index will be populated with the three stages and only the // workdir (if used) contains the halfways merged content Add(tw.RawPath, @base, DirCacheEntry.STAGE_1); Add(tw.RawPath, ours, DirCacheEntry.STAGE_2); Add(tw.RawPath, theirs, DirCacheEntry.STAGE_3); mergeResults.Put(tw.PathString, result.Upcast ()); } else { // no conflict occurred, the file will contain fully merged content. // the index will be populated with the new merged version DirCacheEntry dce = new DirCacheEntry(tw.PathString); int newMode = MergeFileModes(tw.GetRawMode(0), tw.GetRawMode(1), tw.GetRawMode(2) ); // set the mode for the new content. Fall back to REGULAR_FILE if // you can't merge modes of OURS and THEIRS dce.FileMode = (newMode == FileMode.MISSING.GetBits()) ? FileMode.REGULAR_FILE : FileMode.FromBits(newMode); dce.LastModified = of.LastModified(); dce.SetLength((int)of.Length()); InputStream @is = new FileInputStream(of); try { dce.SetObjectId(oi.Insert(Constants.OBJ_BLOB, of.Length(), @is)); } finally { @is.Close(); if (inCore) { FileUtils.Delete(of); } } builder.Add(dce); } }
public virtual void TestUpdateSmudgedEntries() { git.BranchCreate().SetName("test2").Call(); RefUpdate rup = db.UpdateRef(Constants.HEAD); rup.Link("refs/heads/test2"); FilePath file = new FilePath(db.WorkTree, "Test.txt"); long size = file.Length(); long mTime = file.LastModified() - 5000L; NUnit.Framework.Assert.IsTrue(file.SetLastModified(mTime)); DirCache cache = DirCache.Lock(db.GetIndexFile(), db.FileSystem); DirCacheEntry entry = cache.GetEntry("Test.txt"); NUnit.Framework.Assert.IsNotNull(entry); entry.SetLength(0); entry.LastModified = 0; cache.Write(); NUnit.Framework.Assert.IsTrue(cache.Commit()); cache = DirCache.Read(db.GetIndexFile(), db.FileSystem); entry = cache.GetEntry("Test.txt"); NUnit.Framework.Assert.IsNotNull(entry); NUnit.Framework.Assert.AreEqual(0, entry.Length); NUnit.Framework.Assert.AreEqual(0, entry.LastModified); db.GetIndexFile().SetLastModified(db.GetIndexFile().LastModified() - 5000); NUnit.Framework.Assert.IsNotNull(git.Checkout().SetName("test").Call()); cache = DirCache.Read(db.GetIndexFile(), db.FileSystem); entry = cache.GetEntry("Test.txt"); NUnit.Framework.Assert.IsNotNull(entry); NUnit.Framework.Assert.AreEqual(size, entry.Length); NUnit.Framework.Assert.AreEqual(mTime, entry.LastModified); }
/// <exception cref="System.IO.IOException"></exception> private static byte[] GetBytesFromFile(FilePath file) { InputStream @is = new FileInputStream(file); // Get the size of the file long length = file.Length(); // Create the byte array to hold the data byte[] bytes = new byte[(int)length]; // Read in the bytes int offset = 0; int numRead = 0; while (offset < bytes.Length && (numRead = @is.Read(bytes, offset, bytes.Length - offset)) >= 0) { offset += numRead; } // Ensure all the bytes have been read in if (offset < bytes.Length) { throw new IOException("Could not completely read file " + file.GetName()); } // Close the input stream and return bytes @is.Close(); return bytes; }
/// <exception cref="NSch.SftpException"></exception> public virtual void Get(string src, string dst, SftpProgressMonitor monitor, int mode) { // System.out.println("get: "+src+" "+dst); bool _dstExist = false; string _dst = null; try { ((Channel.MyPipedInputStream)io_in).UpdateReadSide(); src = RemoteAbsolutePath(src); dst = LocalAbsolutePath(dst); ArrayList v = Glob_remote(src); int vsize = v.Count; if (vsize == 0) { throw new SftpException(SSH_FX_NO_SUCH_FILE, "No such file"); } FilePath dstFile = new FilePath(dst); bool isDstDir = dstFile.IsDirectory(); StringBuilder dstsb = null; if (isDstDir) { if (!dst.EndsWith(file_separator)) { dst += file_separator; } dstsb = new StringBuilder(dst); } else { if (vsize > 1) { throw new SftpException(SSH_FX_FAILURE, "Copying multiple files, but destination is missing or a file." ); } } for (int j = 0; j < vsize; j++) { string _src = (string)(v[j]); SftpATTRS attr = _stat(_src); if (attr.IsDir()) { throw new SftpException(SSH_FX_FAILURE, "not supported to get directory " + _src); } _dst = null; if (isDstDir) { int i = _src.LastIndexOf('/'); if (i == -1) { dstsb.Append(_src); } else { dstsb.Append(Sharpen.Runtime.Substring(_src, i + 1)); } _dst = dstsb.ToString(); dstsb.Delete(dst.Length, _dst.Length); } else { _dst = dst; } FilePath _dstFile = new FilePath(_dst); if (mode == RESUME) { long size_of_src = attr.GetSize(); long size_of_dst = _dstFile.Length(); if (size_of_dst > size_of_src) { throw new SftpException(SSH_FX_FAILURE, "failed to resume for " + _dst); } if (size_of_dst == size_of_src) { return; } } if (monitor != null) { monitor.Init(SftpProgressMonitor.GET, _src, _dst, attr.GetSize()); if (mode == RESUME) { monitor.Count(_dstFile.Length()); } } FileOutputStream fos = null; _dstExist = _dstFile.Exists(); try { if (mode == OVERWRITE) { fos = new FileOutputStream(_dst); } else { fos = new FileOutputStream(_dst, true); } // append // System.err.println("_get: "+_src+", "+_dst); _get(_src, fos, monitor, mode, new FilePath(_dst).Length()); } finally { if (fos != null) { fos.Close(); } } } } catch (Exception e) { if (!_dstExist && _dst != null) { FilePath _dstFile = new FilePath(_dst); if (_dstFile.Exists() && _dstFile.Length() == 0) { _dstFile.Delete(); } } if (e is SftpException) { throw (SftpException)e; } if (e is Exception) { throw new SftpException(SSH_FX_FAILURE, string.Empty, (Exception)e); } throw new SftpException(SSH_FX_FAILURE, string.Empty); } }
/// <exception cref="NGit.Errors.CorruptObjectException"></exception> /// <exception cref="System.IO.IOException"></exception> public virtual void AssertWorkDir(Dictionary<string, string> i) { TreeWalk walk = new TreeWalk(db); walk.Recursive = true; walk.AddTree(new FileTreeIterator(db)); string expectedValue; string path; int nrFiles = 0; FileTreeIterator ft; while (walk.Next()) { ft = walk.GetTree<FileTreeIterator>(0); path = ft.EntryPathString; expectedValue = i.Get(path); NUnit.Framework.Assert.IsNotNull(expectedValue, "found unexpected file for path " + path + " in workdir"); FilePath file = new FilePath(db.WorkTree, path); NUnit.Framework.Assert.IsTrue(file.Exists()); if (file.IsFile()) { FileInputStream @is = new FileInputStream(file); byte[] buffer = new byte[(int)file.Length()]; int offset = 0; int numRead = 0; while (offset < buffer.Length && (numRead = @is.Read(buffer, offset, buffer.Length - offset)) >= 0) { offset += numRead; } @is.Close(); CollectionAssert.AreEqual (buffer, Sharpen.Runtime.GetBytesForString(i.Get(path)), "unexpected content for path " + path + " in workDir. "); nrFiles++; } } NUnit.Framework.Assert.AreEqual(i.Count, nrFiles, "WorkDir has not the right size." ); }
/// <exception cref="NSch.JSchException"></exception> public static NSch.KeyPair Load(JSch jsch, string prvkey, string pubkey) { byte[] iv = new byte[8]; // 8 bool encrypted = true; byte[] data = null; byte[] publickeyblob = null; int type = ERROR; int vendor = VENDOR_OPENSSH; try { FilePath file = new FilePath(prvkey); FileInputStream fis = new FileInputStream(prvkey); byte[] buf = new byte[(int)(file.Length())]; int len = 0; while (true) { int i = fis.Read(buf, len, buf.Length - len); if (i <= 0) { break; } len += i; } fis.Close(); int i_1 = 0; while (i_1 < len) { if (buf[i_1] == 'B' && buf[i_1 + 1] == 'E' && buf[i_1 + 2] == 'G' && buf[i_1 + 3] == 'I') { i_1 += 6; if (buf[i_1] == 'D' && buf[i_1 + 1] == 'S' && buf[i_1 + 2] == 'A') { type = DSA; } else { if (buf[i_1] == 'R' && buf[i_1 + 1] == 'S' && buf[i_1 + 2] == 'A') { type = RSA; } else { if (buf[i_1] == 'S' && buf[i_1 + 1] == 'S' && buf[i_1 + 2] == 'H') { // FSecure type = UNKNOWN; vendor = VENDOR_FSECURE; } else { //System.err.println("invalid format: "+identity); throw new JSchException("invalid privatekey: " + prvkey); } } } i_1 += 3; continue; } if (buf[i_1] == 'C' && buf[i_1 + 1] == 'B' && buf[i_1 + 2] == 'C' && buf[i_1 + 3] == ',') { i_1 += 4; for (int ii = 0; ii < iv.Length; ii++) { iv[ii] = unchecked((byte)(((A2b(buf[i_1++]) << 4) & unchecked((int)(0xf0))) + (A2b (buf[i_1++]) & unchecked((int)(0xf))))); } continue; } if (buf[i_1] == unchecked((int)(0x0d)) && i_1 + 1 < buf.Length && buf[i_1 + 1] == unchecked((int)(0x0a))) { i_1++; continue; } if (buf[i_1] == unchecked((int)(0x0a)) && i_1 + 1 < buf.Length) { if (buf[i_1 + 1] == unchecked((int)(0x0a))) { i_1 += 2; break; } if (buf[i_1 + 1] == unchecked((int)(0x0d)) && i_1 + 2 < buf.Length && buf[i_1 + 2 ] == unchecked((int)(0x0a))) { i_1 += 3; break; } bool inheader = false; for (int j = i_1 + 1; j < buf.Length; j++) { if (buf[j] == unchecked((int)(0x0a))) { break; } //if(buf[j]==0x0d) break; if (buf[j] == ':') { inheader = true; break; } } if (!inheader) { i_1++; encrypted = false; // no passphrase break; } } i_1++; } if (type == ERROR) { throw new JSchException("invalid privatekey: " + prvkey); } int start = i_1; while (i_1 < len) { if (buf[i_1] == unchecked((int)(0x0a))) { bool xd = (buf[i_1 - 1] == unchecked((int)(0x0d))); System.Array.Copy(buf, i_1 + 1, buf, i_1 - (xd ? 1 : 0), len - i_1 - 1 - (xd ? 1 : 0)); if (xd) { len--; } len--; continue; } if (buf[i_1] == '-') { break; } i_1++; } data = Util.FromBase64(buf, start, i_1 - start); if (data.Length > 4 && data[0] == unchecked((byte)unchecked((int)(0x3f))) && data [1] == unchecked((byte)unchecked((int)(0x6f))) && data[2] == unchecked((byte)unchecked( (int)(0xf9))) && data[3] == unchecked((byte)unchecked((int)(0xeb)))) { // FSecure Buffer _buf = new Buffer(data); _buf.GetInt(); // 0x3f6ff9be _buf.GetInt(); byte[] _type = _buf.GetString(); //System.err.println("type: "+new String(_type)); byte[] _cipher = _buf.GetString(); string cipher = Util.Byte2str(_cipher); //System.err.println("cipher: "+cipher); if (cipher.Equals("3des-cbc")) { _buf.GetInt(); byte[] foo = new byte[data.Length - _buf.GetOffSet()]; _buf.GetByte(foo); data = foo; encrypted = true; throw new JSchException("unknown privatekey format: " + prvkey); } else { if (cipher.Equals("none")) { _buf.GetInt(); _buf.GetInt(); encrypted = false; byte[] foo = new byte[data.Length - _buf.GetOffSet()]; _buf.GetByte(foo); data = foo; } } } if (pubkey != null) { try { file = new FilePath(pubkey); fis = new FileInputStream(pubkey); buf = new byte[(int)(file.Length())]; len = 0; while (true) { i_1 = fis.Read(buf, len, buf.Length - len); if (i_1 <= 0) { break; } len += i_1; } fis.Close(); if (buf.Length > 4 && buf[0] == '-' && buf[1] == '-' && buf[2] == '-' && buf[3] == '-') { // FSecure's public key bool valid = true; i_1 = 0; do { i_1++; } while (buf.Length > i_1 && buf[i_1] != unchecked((int)(0x0a))); if (buf.Length <= i_1) { valid = false; } while (valid) { if (buf[i_1] == unchecked((int)(0x0a))) { bool inheader = false; for (int j = i_1 + 1; j < buf.Length; j++) { if (buf[j] == unchecked((int)(0x0a))) { break; } if (buf[j] == ':') { inheader = true; break; } } if (!inheader) { i_1++; break; } } i_1++; } if (buf.Length <= i_1) { valid = false; } start = i_1; while (valid && i_1 < len) { if (buf[i_1] == unchecked((int)(0x0a))) { System.Array.Copy(buf, i_1 + 1, buf, i_1, len - i_1 - 1); len--; continue; } if (buf[i_1] == '-') { break; } i_1++; } if (valid) { publickeyblob = Util.FromBase64(buf, start, i_1 - start); if (type == UNKNOWN) { if (publickeyblob[8] == 'd') { type = DSA; } else { if (publickeyblob[8] == 'r') { type = RSA; } } } } } else { if (buf[0] == 's' && buf[1] == 's' && buf[2] == 'h' && buf[3] == '-') { i_1 = 0; while (i_1 < len) { if (buf[i_1] == ' ') { break; } i_1++; } i_1++; if (i_1 < len) { start = i_1; while (i_1 < len) { if (buf[i_1] == ' ') { break; } i_1++; } publickeyblob = Util.FromBase64(buf, start, i_1 - start); } } } } catch (Exception) { } } } catch (Exception e) { if (e is JSchException) { throw (JSchException)e; } if (e is Exception) { throw new JSchException(e.ToString(), (Exception)e); } throw new JSchException(e.ToString()); } NSch.KeyPair kpair = null; if (type == DSA) { kpair = new KeyPairDSA(jsch); } else { if (type == RSA) { kpair = new KeyPairRSA(jsch); } } if (kpair != null) { kpair.encrypted = encrypted; kpair.publickeyblob = publickeyblob; kpair.vendor = vendor; if (encrypted) { kpair.iv = iv; kpair.data = data; } else { if (kpair.Parse(data)) { return kpair; } else { throw new JSchException("invalid privatekey: " + prvkey); } } } return kpair; }
/// <exception cref="System.IO.IOException"></exception> protected internal static void CheckFile(FilePath f, string checkData) { StreamReader r = new InputStreamReader(new FileInputStream(f), "ISO-8859-1"); try { char[] data = new char[(int)f.Length()]; if (f.Length() != r.Read(data)) { throw new IOException("Internal error reading file data from " + f); } NUnit.Framework.Assert.AreEqual(checkData, new string(data)); } finally { r.Close(); } }
/// <exception cref="System.IO.IOException"></exception> public static object ReadFileOrUrl(string path, bool convertToString, string defaultEncoding) { Uri url = ToUrl(path); Stream @is = null; int capacityHint = 0; string encoding; string contentType; byte[] data; try { if (url == null) { FilePath file = new FilePath(path); contentType = encoding = null; capacityHint = (int)file.Length(); @is = new FileInputStream(file); } else { URLConnection uc = url.OpenConnection(); @is = uc.GetInputStream(); if (convertToString) { ParsedContentType pct = new ParsedContentType(uc.GetContentType()); contentType = pct.GetContentType(); encoding = pct.GetEncoding(); } else { contentType = encoding = null; } capacityHint = uc.GetContentLength(); // Ignore insane values for Content-Length if (capacityHint > (1 << 20)) { capacityHint = -1; } } if (capacityHint <= 0) { capacityHint = 4096; } data = Kit.ReadStream(@is, capacityHint); } finally { if (@is != null) { @is.Close(); } } object result; if (!convertToString) { result = data; } else { if (encoding == null) { // None explicitly specified in Content-type header. Use RFC-4329 // 4.2.2 section to autodetect if (data.Length > 3 && data[0] == -1 && data[1] == -2 && data[2] == 0 && data[3] == 0) { encoding = "UTF-32LE"; } else { if (data.Length > 3 && data[0] == 0 && data[1] == 0 && data[2] == -2 && data[3] == -1) { encoding = "UTF-32BE"; } else { if (data.Length > 2 && data[0] == -17 && data[1] == -69 && data[2] == -65) { encoding = "UTF-8"; } else { if (data.Length > 1 && data[0] == -1 && data[1] == -2) { encoding = "UTF-16LE"; } else { if (data.Length > 1 && data[0] == -2 && data[1] == -1) { encoding = "UTF-16BE"; } else { // No autodetect. See if we have explicit value on command line encoding = defaultEncoding; if (encoding == null) { // No explicit encoding specification if (url == null) { // Local files default to system encoding encoding = Runtime.GetProperty("file.encoding"); } else { if (contentType != null && contentType.StartsWith("application/")) { // application/* types default to UTF-8 encoding = "UTF-8"; } else { // text/* MIME types default to US-ASCII encoding = "US-ASCII"; } } } } } } } } } string strResult = Sharpen.Runtime.GetStringForBytes(data, encoding); // Skip BOM if (strResult.Length > 0 && strResult[0] == '\uFEFF') { strResult = Sharpen.Runtime.Substring(strResult, 1); } result = strResult; } return result; }