/// <exception cref="System.IO.IOException"></exception> protected internal static void CopyFile(FilePath src, FilePath dst) { FileInputStream fis = new FileInputStream(src); try { FileOutputStream fos = new FileOutputStream(dst); try { byte[] buf = new byte[4096]; int r; while ((r = fis.Read(buf)) > 0) { fos.Write(buf, 0, r); } } finally { fos.Close(); } } finally { fis.Close(); } }
public virtual void TestReadWriteMergeMsg() { NUnit.Framework.Assert.AreEqual(db.ReadSquashCommitMsg(), null); NUnit.Framework.Assert.IsFalse(new FilePath(db.Directory, Constants.SQUASH_MSG).Exists ()); db.WriteSquashCommitMsg(squashMsg); NUnit.Framework.Assert.AreEqual(squashMsg, db.ReadSquashCommitMsg()); NUnit.Framework.Assert.AreEqual(Read(new FilePath(db.Directory, Constants.SQUASH_MSG )), squashMsg); db.WriteSquashCommitMsg(null); NUnit.Framework.Assert.AreEqual(db.ReadSquashCommitMsg(), null); NUnit.Framework.Assert.IsFalse(new FilePath(db.Directory, Constants.SQUASH_MSG).Exists ()); FileOutputStream fos = new FileOutputStream(new FilePath(db.Directory, Constants. SQUASH_MSG)); try { fos.Write(Sharpen.Runtime.GetBytesForString(squashMsg, Constants.CHARACTER_ENCODING )); } finally { fos.Close(); } NUnit.Framework.Assert.AreEqual(db.ReadSquashCommitMsg(), squashMsg); }
/// <exception cref="System.IO.IOException"></exception> public static void CopyFile(FilePath sourceFile, FilePath destFile) { if (!destFile.Exists()) { destFile.CreateNewFile(); } FileChannel source = null; FileChannel destination = null; try { source = new FileInputStream(sourceFile).GetChannel(); destination = new FileOutputStream(destFile).GetChannel(); destination.TransferFrom(source, 0, source.Size()); } finally { if (source != null) { source.Close(); } if (destination != null) { destination.Close(); } } }
/// <exception cref="System.IO.IOException"></exception> public static void CopyStreamToFile(InputStream @is, FilePath file) { OutputStream os = new FileOutputStream(file); int n; byte[] buffer = new byte[16384]; while ((n = @is.Read(buffer)) > -1) { os.Write(buffer, 0, n); } os.Close(); @is.Close(); }
/// <summary> /// Saves the contents of a <code>byte[]</code> to the specified /// <see cref="Sharpen.FilePath"/> /// . /// </summary> /// <exception cref="System.IO.IOException"/> public static void SaveBytes(FilePath file, sbyte[] bytes) { FileOutputStream stream = null; try { stream = new FileOutputStream(file); stream.Write(bytes); } finally { if (stream != null) { stream.Close(); } } }
public virtual void TestReadWriteMergeHeads() { NUnit.Framework.Assert.AreEqual(db.ReadMergeHeads(), null); db.WriteMergeHeads(Arrays.AsList(ObjectId.ZeroId, ObjectId.FromString(sampleId))); NUnit.Framework.Assert.AreEqual(Read(new FilePath(db.Directory, "MERGE_HEAD")), "0000000000000000000000000000000000000000\n1c6db447abdbb291b25f07be38ea0b1bf94947c5\n" ); NUnit.Framework.Assert.AreEqual(db.ReadMergeHeads().Count, 2); NUnit.Framework.Assert.AreEqual(db.ReadMergeHeads()[0], ObjectId.ZeroId); NUnit.Framework.Assert.AreEqual(db.ReadMergeHeads()[1], ObjectId.FromString(sampleId )); // same test again, this time with lower-level io FileOutputStream fos = new FileOutputStream(new FilePath(db.Directory, "MERGE_HEAD" )); try { fos.Write(Sharpen.Runtime.GetBytesForString("0000000000000000000000000000000000000000\n1c6db447abdbb291b25f07be38ea0b1bf94947c5\n" , Constants.CHARACTER_ENCODING)); } finally { fos.Close(); } NUnit.Framework.Assert.AreEqual(db.ReadMergeHeads().Count, 2); NUnit.Framework.Assert.AreEqual(db.ReadMergeHeads()[0], ObjectId.ZeroId); NUnit.Framework.Assert.AreEqual(db.ReadMergeHeads()[1], ObjectId.FromString(sampleId )); db.WriteMergeHeads(Collections.EmptyList<ObjectId> ()); NUnit.Framework.Assert.AreEqual(Read(new FilePath(db.Directory, "MERGE_HEAD")), string.Empty ); NUnit.Framework.Assert.AreEqual(db.ReadMergeHeads(), null); fos = new FileOutputStream(new FilePath(db.Directory, "MERGE_HEAD")); try { fos.Write(Sharpen.Runtime.GetBytesForString(sampleId, Constants.CHARACTER_ENCODING )); } finally { fos.Close(); } NUnit.Framework.Assert.AreEqual(db.ReadMergeHeads().Count, 1); NUnit.Framework.Assert.AreEqual(db.ReadMergeHeads()[0], ObjectId.FromString(sampleId )); }
protected internal override RandomAccessReader CreateReader(sbyte[] bytes) { try { // Unit tests can create multiple readers in the same test, as long as they're used one after the other DeleteTempFile(); _tempFile = FilePath.CreateTempFile("metadata-extractor-test-", ".tmp"); FileOutputStream stream = new FileOutputStream(_tempFile); stream.Write(bytes); stream.Close(); _randomAccessFile = new RandomAccessFile(_tempFile, "r"); return new RandomAccessFileReader(_randomAccessFile); } catch (IOException) { NUnit.Framework.Assert.Fail("Unable to create temp file"); return null; } }
/// <summary>Try to establish the lock.</summary> /// <remarks>Try to establish the lock.</remarks> /// <returns> /// true if the lock is now held by the caller; false if it is held /// by someone else. /// </returns> /// <exception cref="System.IO.IOException"> /// the temporary output file could not be created. The caller /// does not hold the lock. /// </exception> public virtual bool Lock() { FileUtils.Mkdirs(lck.GetParentFile(), true); if (lck.CreateNewFile()) { haveLck = true; try { os = new FileOutputStream(lck); } catch (IOException ioe) { Unlock(); throw; } } return haveLck; }
/// <summary>Compile JavaScript source.</summary> /// <remarks>Compile JavaScript source.</remarks> public virtual void ProcessSource(string[] filenames) { for (int i = 0; i != filenames.Length; ++i) { string filename = filenames[i]; if (!filename.EndsWith(".js")) { AddError("msg.extension.not.js", filename); return; } FilePath f = new FilePath(filename); string source = ReadSource(f); if (source == null) { return; } string mainClassName = targetName; if (mainClassName == null) { string name = f.GetName(); string nojs = Sharpen.Runtime.Substring(name, 0, name.Length - 3); mainClassName = GetClassName(nojs); } if (targetPackage.Length != 0) { mainClassName = targetPackage + "." + mainClassName; } object[] compiled = compiler.CompileToClassFiles(source, filename, 1, mainClassName); if (compiled == null || compiled.Length == 0) { return; } FilePath targetTopDir = null; if (destinationDir != null) { targetTopDir = new FilePath(destinationDir); } else { string parent = f.GetParent(); if (parent != null) { targetTopDir = new FilePath(parent); } } for (int j = 0; j != compiled.Length; j += 2) { string className = (string)compiled[j]; byte[] bytes = (byte[])compiled[j + 1]; FilePath outfile = GetOutputFile(targetTopDir, className); try { FileOutputStream os = new FileOutputStream(outfile); try { os.Write(bytes); } finally { os.Close(); } } catch (IOException ioe) { AddFormatedError(ioe.ToString()); } } } }
/// <exception cref="System.IO.IOException"></exception> private FilePath AddToWorkDir(string path, string content) { FilePath f = new FilePath(db.WorkTree, path); FileOutputStream fos = new FileOutputStream(f); try { fos.Write(Sharpen.Runtime.GetBytesForString(content, Constants.CHARACTER_ENCODING )); return f; } finally { fos.Close(); } }
/// <exception cref="System.IO.IOException"></exception> private NGit.Storage.File.ReflogWriter Log(string refName, byte[] rec) { FilePath log = LogFor(refName); bool write = forceWrite || (IsLogAllRefUpdates() && ShouldAutoCreateLog(refName)) || log.IsFile(); if (!write) { return this; } WriteConfig wc = GetRepository().GetConfig().Get(WriteConfig.KEY); FileOutputStream @out; try { @out = new FileOutputStream(log, true); } catch (FileNotFoundException err) { FilePath dir = log.GetParentFile(); if (dir.Exists()) { throw; } if (!dir.Mkdirs() && !dir.IsDirectory()) { throw new IOException(MessageFormat.Format(JGitText.Get().cannotCreateDirectory, dir)); } @out = new FileOutputStream(log, true); } try { if (wc.GetFSyncRefFiles()) { FileChannel fc = @out.GetChannel(); ByteBuffer buf = ByteBuffer.Wrap(rec); while (0 < buf.Remaining()) { fc.Write(buf); } fc.Force(true); } else { @out.Write(rec); } } finally { @out.Close(); } return this; }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> private void ReplaceDatabase(string databaseName, InputStream databaseStream, IEnumerator <KeyValuePair<string, InputStream>> attachmentStreams) { try { Database database = GetDatabase(databaseName); string dstAttachmentsPath = database.GetAttachmentStorePath(); OutputStream destStream = new FileOutputStream(new FilePath(database.GetPath())); StreamUtils.CopyStream(databaseStream, destStream); FilePath attachmentsFile = new FilePath(dstAttachmentsPath); FileDirUtils.DeleteRecursive(attachmentsFile); attachmentsFile.Mkdirs(); if (attachmentStreams != null) { StreamUtils.CopyStreamsToFolder(attachmentStreams, attachmentsFile); } database.Open(); database.ReplaceUUIDs(); } catch (FileNotFoundException e) { Log.E(Database.Tag, string.Empty, e); throw new CouchbaseLiteException(Status.InternalServerError); } catch (IOException e) { Log.E(Database.Tag, string.Empty, e); throw new CouchbaseLiteException(Status.InternalServerError); } }
/// <summary>Reverts the worktree after an unsuccessful merge.</summary> /// <remarks> /// Reverts the worktree after an unsuccessful merge. We know that for all /// modified files the old content was in the old index and the index /// contained only stage 0. In case if inCore operation just clear /// the history of modified files. /// </remarks> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> /// <exception cref="NGit.Errors.CorruptObjectException">NGit.Errors.CorruptObjectException /// </exception> /// <exception cref="NGit.Errors.NoWorkTreeException">NGit.Errors.NoWorkTreeException /// </exception> private void CleanUp() { if (inCore) { modifiedFiles.Clear(); return; } DirCache dc = db.ReadDirCache(); ObjectReader or = db.ObjectDatabase.NewReader(); Iterator<string> mpathsIt = modifiedFiles.Iterator(); while (mpathsIt.HasNext()) { string mpath = mpathsIt.Next(); DirCacheEntry entry = dc.GetEntry(mpath); FileOutputStream fos = new FileOutputStream(new FilePath(db.WorkTree, mpath)); try { or.Open(entry.GetObjectId()).CopyTo(fos); } finally { fos.Close(); } mpathsIt.Remove(); } }
/// <summary>Writes merged file content to the working tree.</summary> /// <remarks> /// Writes merged file content to the working tree. In case /// <see cref="inCore">inCore</see> /// is set and we don't have a working tree the content is written to a /// temporary file /// </remarks> /// <param name="result">the result of the content merge</param> /// <returns>the file to which the merged content was written</returns> /// <exception cref="System.IO.FileNotFoundException">System.IO.FileNotFoundException /// </exception> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> private FilePath WriteMergedFile(MergeResult<RawText> result) { MergeFormatter fmt = new MergeFormatter(); FilePath of = null; FileOutputStream fos; if (!inCore) { FilePath workTree = db.WorkTree; if (workTree == null) { // TODO: This should be handled by WorkingTreeIterators which // support write operations throw new NGit.Errors.NotSupportedException(); } of = new FilePath(workTree, tw.PathString); fos = new FileOutputStream(of); try { fmt.FormatMerge(fos, result, Arrays.AsList(commitNames), Constants.CHARACTER_ENCODING ); } finally { fos.Close(); } } else { if (!result.ContainsConflicts()) { // When working inCore, only trivial merges can be handled, // so we generate objects only in conflict free cases of = FilePath.CreateTempFile("merge_", "_temp", null); fos = new FileOutputStream(of); try { fmt.FormatMerge(fos, result, Arrays.AsList(commitNames), Constants.CHARACTER_ENCODING ); } finally { fos.Close(); } } } return of; }
/// <exception cref="System.IO.IOException"></exception> private void WriteToFile(FilePath actFile, string @string) { FileOutputStream fos = null; try { fos = new FileOutputStream(actFile); fos.Write(Sharpen.Runtime.GetBytesForString(@string, "UTF-8")); fos.Close(); } finally { if (fos != null) { fos.Close(); } } }
/// <exception cref="NSch.SftpException"></exception> public virtual void Get(string src, string dst, SftpProgressMonitor monitor, int mode) { // System.out.println("get: "+src+" "+dst); src = RemoteAbsolutePath(src); dst = LocalAbsolutePath(dst); try { ArrayList v = Glob_remote(src); int vsize = v.Count; if (vsize == 0) { throw new SftpException(SSH_FX_NO_SUCH_FILE, "No such file"); } FilePath dstFile = new FilePath(dst); bool isDstDir = dstFile.IsDirectory(); StringBuilder dstsb = null; if (isDstDir) { if (!dst.EndsWith(file_separator)) { dst += file_separator; } dstsb = new StringBuilder(dst); } else { if (vsize > 1) { throw new SftpException(SSH_FX_FAILURE, "Copying multiple files, but destination is missing or a file." ); } } for (int j = 0; j < vsize; j++) { string _src = (string)(v[j]); SftpATTRS attr = _stat(_src); if (attr.IsDir()) { throw new SftpException(SSH_FX_FAILURE, "not supported to get directory " + _src); } string _dst = null; if (isDstDir) { int i = _src.LastIndexOf('/'); if (i == -1) { dstsb.Append(_src); } else { dstsb.Append(Sharpen.Runtime.Substring(_src, i + 1)); } _dst = dstsb.ToString(); dstsb.Delete(dst.Length, _dst.Length); } else { _dst = dst; } if (mode == RESUME) { long size_of_src = attr.GetSize(); long size_of_dst = new FilePath(_dst).Length(); if (size_of_dst > size_of_src) { throw new SftpException(SSH_FX_FAILURE, "failed to resume for " + _dst); } if (size_of_dst == size_of_src) { return; } } if (monitor != null) { monitor.Init(SftpProgressMonitor.GET, _src, _dst, attr.GetSize()); if (mode == RESUME) { monitor.Count(new FilePath(_dst).Length()); } } FileOutputStream fos = null; try { if (mode == OVERWRITE) { fos = new FileOutputStream(_dst); } else { fos = new FileOutputStream(_dst, true); } // append // System.err.println("_get: "+_src+", "+_dst); _get(_src, fos, monitor, mode, new FilePath(_dst).Length()); } finally { if (fos != null) { fos.Close(); } } } } catch (Exception e) { if (e is SftpException) { throw (SftpException)e; } if (e is Exception) { throw new SftpException(SSH_FX_FAILURE, string.Empty, (Exception)e); } throw new SftpException(SSH_FX_FAILURE, string.Empty); } }
/// <exception cref="System.IO.IOException"></exception> internal virtual void OpenIndex(ProgressMonitor pm) { if (this.index != null) { return; } if (this.tmpIdx == null) { this.tmpIdx = FilePath.CreateTempFile("jgit-walk-", ".idx"); } else { if (this.tmpIdx.IsFile()) { try { this.index = PackIndex.Open(this.tmpIdx); return; } catch (FileNotFoundException) { } } } // Fall through and get the file. WalkRemoteObjectDatabase.FileStream s; s = this.connection.Open("pack/" + this.idxName); pm.BeginTask("Get " + Sharpen.Runtime.Substring(this.idxName, 0, 12) + "..idx", s .length < 0 ? ProgressMonitor.UNKNOWN : (int)(s.length / 1024)); try { FileOutputStream fos = new FileOutputStream(this.tmpIdx); try { byte[] buf = new byte[2048]; int cnt; while (!pm.IsCancelled() && (cnt = [email protected](buf)) >= 0) { fos.Write(buf, 0, cnt); pm.Update(cnt / 1024); } } finally { fos.Close(); } } catch (IOException err) { FileUtils.Delete(this.tmpIdx); throw; } finally { [email protected](); } pm.EndTask(); if (pm.IsCancelled()) { FileUtils.Delete(this.tmpIdx); return; } try { this.index = PackIndex.Open(this.tmpIdx); } catch (IOException e) { FileUtils.Delete(this.tmpIdx); throw; } }
/// <summary> /// Updates the file in the working tree with content and mode from an entry /// in the index. /// </summary> /// <remarks> /// Updates the file in the working tree with content and mode from an entry /// in the index. The new content is first written to a new temporary file in /// the same directory as the real file. Then that new file is renamed to the /// final filename. /// <p> /// TODO: this method works directly on File IO, we may need another /// abstraction (like WorkingTreeIterator). This way we could tell e.g. /// Eclipse that Files in the workspace got changed /// </p> /// </remarks> /// <param name="repo"></param> /// <param name="f"> /// the file to be modified. The parent directory for this file /// has to exist already /// </param> /// <param name="entry">the entry containing new mode and content</param> /// <param name="or">object reader to use for checkout</param> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public static void CheckoutEntry(Repository repo, FilePath f, DirCacheEntry entry , ObjectReader or) { ObjectLoader ol = or.Open(entry.GetObjectId()); FilePath parentDir = f.GetParentFile(); FilePath tmpFile = FilePath.CreateTempFile("._" + f.GetName(), null, parentDir); WorkingTreeOptions opt = repo.GetConfig().Get(WorkingTreeOptions.KEY); FileOutputStream rawChannel = new FileOutputStream(tmpFile); OutputStream channel; if (opt.GetAutoCRLF() == CoreConfig.AutoCRLF.TRUE) { channel = new AutoCRLFOutputStream(rawChannel); } else { channel = rawChannel; } try { ol.CopyTo(channel); } finally { channel.Close(); } FS fs = repo.FileSystem; if (opt.IsFileMode() && fs.SupportsExecute()) { if (FileMode.EXECUTABLE_FILE.Equals(entry.RawMode)) { if (!fs.CanExecute(tmpFile)) { fs.SetExecute(tmpFile, true); } } else { if (fs.CanExecute(tmpFile)) { fs.SetExecute(tmpFile, false); } } } if (!tmpFile.RenameTo(f)) { // tried to rename which failed. Let' delete the target file and try // again FileUtils.Delete(f); if (!tmpFile.RenameTo(f)) { throw new IOException(MessageFormat.Format(JGitText.Get().couldNotWriteFile, tmpFile .GetPath(), f.GetPath())); } } entry.LastModified = f.LastModified(); if (opt.GetAutoCRLF() != CoreConfig.AutoCRLF.FALSE) { entry.SetLength(f.Length()); } else { // AutoCRLF wants on-disk-size entry.SetLength((int)ol.GetSize()); } }
/// <exception cref="System.IO.FileNotFoundException"></exception> /// <exception cref="System.IO.IOException"></exception> public virtual void WritePrivateKey(string name) { FileOutputStream fos = new FileOutputStream(name); WritePrivateKey(fos); fos.Close(); }
/// <exception cref="System.IO.FileNotFoundException"></exception> /// <exception cref="System.IO.IOException"></exception> public virtual void WriteSECSHPublicKey(string name, string comment) { FileOutputStream fos = new FileOutputStream(name); WriteSECSHPublicKey(fos, comment); fos.Close(); }
/// <summary>Unlock this file and abort this change.</summary> /// <remarks> /// Unlock this file and abort this change. /// <p> /// The temporary file (if created) is deleted before returning. /// </remarks> public virtual void Unlock() { if (os != null) { try { os.Close(); } catch (IOException) { } // Ignore this os = null; } if (haveLck) { haveLck = false; try { FileUtils.Delete(lck, FileUtils.RETRY); } catch (IOException) { } } }
public virtual void TestReadLoosePackedRef() { Ref @ref = db.GetRef("refs/heads/master"); NUnit.Framework.Assert.AreEqual(RefStorage.PACKED, @ref.GetStorage()); FileOutputStream os = new FileOutputStream(new FilePath(db.Directory, "refs/heads/master" )); os.Write(Sharpen.Runtime.GetBytesForString(@ref.GetObjectId().Name)); os.Write('\n'); os.Close(); @ref = db.GetRef("refs/heads/master"); NUnit.Framework.Assert.AreEqual(RefStorage.LOOSE, @ref.GetStorage()); }
/// <summary>Write arbitrary data to the temporary file.</summary> /// <remarks>Write arbitrary data to the temporary file.</remarks> /// <param name="content"> /// the bytes to store in the temporary file. No additional bytes /// are added, so if the file must end with an LF it must appear /// at the end of the byte array. /// </param> /// <exception cref="System.IO.IOException"> /// the temporary file could not be written. The lock is released /// before throwing the underlying IO exception to the caller. /// </exception> /// <exception cref="Sharpen.RuntimeException"> /// the temporary file could not be written. The lock is released /// before throwing the underlying exception to the caller. /// </exception> public virtual void Write(byte[] content) { RequireLock(); try { if (fsync) { FileChannel fc = os.GetChannel(); ByteBuffer buf = ByteBuffer.Wrap(content); while (0 < buf.Remaining()) { fc.Write(buf); } fc.Force(true); } else { os.Write(content); } os.Close(); os = null; } catch (IOException ioe) { Unlock(); throw; } catch (RuntimeException ioe) { Unlock(); throw; } catch (Error ioe) { Unlock(); throw; } }
/// <exception cref="System.IO.IOException"></exception> private void WriteIdx() { IList<PackedObjectInfo> list = GetSortedObjectList(null); FileOutputStream os = new FileOutputStream(tmpIdx); try { PackIndexWriter iw; if (indexVersion <= 0) { iw = PackIndexWriter.CreateOldestPossible(os, list); } else { iw = PackIndexWriter.CreateVersion(os, indexVersion); } iw.Write(list, packHash); os.GetChannel().Force(true); } finally { os.Close(); } }
/// <exception cref="System.IO.FileNotFoundException"></exception> /// <exception cref="System.IO.IOException"></exception> private void SetupReflog(string logName, byte[] data) { FilePath logfile = new FilePath(db.Directory, logName); if (!logfile.GetParentFile().Mkdirs() && !logfile.GetParentFile().IsDirectory()) { throw new IOException("oops, cannot create the directory for the test reflog file" + logfile); } FileOutputStream fileOutputStream = new FileOutputStream(logfile); try { fileOutputStream.Write(data); } finally { fileOutputStream.Close(); } }
public bool StoreBlob(byte[] data, BlobKey outKey) { BlobKey newKey = KeyForBlob(data); outKey.SetBytes(newKey.GetBytes()); string path = PathForKey(outKey); FilePath file = new FilePath(path); if (file.CanRead()) { return true; } FileOutputStream fos = null; try { fos = new FileOutputStream(file); fos.Write(data); } catch (FileNotFoundException e) { Log.E(Database.Tag, "Error opening file for output", e); return false; } catch (IOException ioe) { Log.E(Database.Tag, "Error writing to file", ioe); return false; } finally { if (fos != null) { try { fos.Close(); } catch (IOException) { } } } // ignore return true; }
/// <exception cref="System.IO.FileNotFoundException"></exception> /// <exception cref="System.InvalidOperationException"></exception> /// <exception cref="System.IO.IOException"></exception> private bool ContentMerge(CanonicalTreeParser @base, CanonicalTreeParser ours, CanonicalTreeParser theirs) { MergeFormatter fmt = new MergeFormatter(); RawText baseText = @base == null ? RawText.EMPTY_TEXT : GetRawText(@base.EntryObjectId , db); // do the merge MergeResult<RawText> result = mergeAlgorithm.Merge(RawTextComparator.DEFAULT, baseText , GetRawText(ours.EntryObjectId, db), GetRawText(theirs.EntryObjectId, db)); FilePath of = null; FileOutputStream fos; if (!inCore) { FilePath workTree = db.WorkTree; if (workTree == null) { // TODO: This should be handled by WorkingTreeIterators which // support write operations throw new NotSupportedException(); } of = new FilePath(workTree, tw.PathString); fos = new FileOutputStream(of); try { fmt.FormatMerge(fos, result, Arrays.AsList(commitNames), Constants.CHARACTER_ENCODING ); } finally { fos.Close(); } } else { if (!result.ContainsConflicts()) { // When working inCore, only trivial merges can be handled, // so we generate objects only in conflict free cases of = FilePath.CreateTempFile("merge_", "_temp", null); fos = new FileOutputStream(of); try { fmt.FormatMerge(fos, result, Arrays.AsList(commitNames), Constants.CHARACTER_ENCODING ); } finally { fos.Close(); } } } if (result.ContainsConflicts()) { // a conflict occured, the file will contain conflict markers // the index will be populated with the three stages and only the // workdir (if used) contains the halfways merged content Add(tw.RawPath, @base, DirCacheEntry.STAGE_1); Add(tw.RawPath, ours, DirCacheEntry.STAGE_2); Add(tw.RawPath, theirs, DirCacheEntry.STAGE_3); mergeResults.Put(tw.PathString, result.Upcast ()); return false; } else { // no conflict occured, the file will contain fully merged content. // the index will be populated with the new merged version DirCacheEntry dce = new DirCacheEntry(tw.PathString); dce.FileMode = tw.GetFileMode(0); dce.LastModified = of.LastModified(); dce.SetLength((int)of.Length()); InputStream @is = new FileInputStream(of); try { dce.SetObjectId(oi.Insert(Constants.OBJ_BLOB, of.Length(), @is)); } finally { @is.Close(); if (inCore) { FileUtils.Delete(of); } } builder.Add(dce); return true; } }
/// <exception cref="System.IO.IOException"></exception> private void Append(FilePath f, byte b) { FileOutputStream os = new FileOutputStream(f, true); try { os.Write(b); } finally { os.Close(); } }
/// <exception cref="System.IO.IOException"></exception> /// <exception cref="System.IO.FileNotFoundException"></exception> /// <exception cref="Sharpen.Error"></exception> private FilePath ToTemp(MessageDigest md, int type, long len, InputStream @is) { bool delete = true; FilePath tmp = NewTempFile(); try { FileOutputStream fOut = new FileOutputStream(tmp); try { OutputStream @out = fOut; if (config.GetFSyncObjectFiles()) { @out = Channels.NewOutputStream(fOut.GetChannel()); } DeflaterOutputStream cOut = Compress(@out); DigestOutputStream dOut = new DigestOutputStream(cOut, md); WriteHeader(dOut, type, len); byte[] buf = Buffer(); while (len > 0) { int n = @is.Read(buf, 0, (int)Math.Min(len, buf.Length)); if (n <= 0) { throw ShortInput(len); } dOut.Write(buf, 0, n); len -= n; } dOut.Flush(); cOut.Finish(); } finally { if (config.GetFSyncObjectFiles()) { fOut.GetChannel().Force(true); } fOut.Close(); } delete = false; return tmp; } finally { if (delete) { FileUtils.Delete(tmp); } } }
public bool StoreBlobStream(Stream inputStream, out BlobKey outKey) { FilePath tmp = null; try { tmp = FilePath.CreateTempFile(TmpFilePrefix, TmpFileExtension, new FilePath(this.path)); FileOutputStream fos = new FileOutputStream(tmp); byte[] buffer = new byte[65536]; int lenRead = ((InputStream)inputStream).Read(buffer); while (lenRead > 0) { fos.Write(buffer, 0, lenRead); lenRead = ((InputStream)inputStream).Read(buffer); } inputStream.Close(); fos.Close(); } catch (IOException e) { Log.E(Database.Tag, "Error writing blog to tmp file", e); outKey = null; return false; } outKey = KeyForBlobFromFile(tmp); var keyPath = PathForKey(outKey); var file = new FilePath(keyPath); if (file.CanRead()) { // object with this hash already exists, we should delete tmp file and return true tmp.Delete(); } else { // does not exist, we should rename tmp file to this name tmp.RenameTo(file); } return true; }