public override string ToString() { bool first = true; StringBuilder commits = new StringBuilder(); foreach (ObjectId commit in mergedCommits) { if (!first) { commits.Append(", "); } else { first = false; } commits.Append(ObjectId.ToString(commit)); } return(MessageFormat.Format(JGitText.Get().mergeUsingStrategyResultedInDescription , commits, ObjectId.ToString(@base), mergeStrategy.GetName(), mergeStatus, (description == null ? string.Empty : ", " + description))); }
/// <exception cref="System.IO.IOException"></exception> public override void Write(byte[] b, int off, int len) { while (0 < len) { int n = Math.Min(len, BYTES_TO_WRITE_BEFORE_CANCEL_CHECK); count += n; if (checkCancelAt <= count) { if (writeMonitor.IsCancelled()) { throw new IOException(JGitText.Get().packingCancelledDuringObjectsWriting); } checkCancelAt = count + BYTES_TO_WRITE_BEFORE_CANCEL_CHECK; } @out.Write(b, off, n); crc.Update(b, off, n); md.Update(b, off, n); off += n; len -= n; } }
/// <exception cref="NGit.Errors.TransportException"></exception> public TcpPushConnection(TransportGitAnon _enclosing) : base(_enclosing) { this._enclosing = _enclosing; this.sock = this._enclosing.OpenConnection(); try { InputStream sIn = this.sock.GetInputStream(); OutputStream sOut = this.sock.GetOutputStream(); sIn = new BufferedInputStream(sIn); sOut = new BufferedOutputStream(sOut); this.Init(sIn, sOut); this._enclosing.Service("git-receive-pack", this.pckOut); } catch (IOException err) { this.Close(); throw new TransportException(this.uri, JGitText.Get().remoteHungUpUnexpectedly, err ); } this.ReadAdvertisedRefs(); }
/// <exception cref="NGit.Api.Errors.GitAPIException"></exception> private ObjectId GetStashId() { string revision = stashRef != null ? stashRef : DEFAULT_REF; ObjectId stashId; try { stashId = repo.Resolve(revision); } catch (IOException e) { throw new InvalidRefNameException(MessageFormat.Format(JGitText.Get().stashResolveFailed , revision), e); } if (stashId == null) { throw new InvalidRefNameException(MessageFormat.Format(JGitText.Get().stashResolveFailed , revision)); } return(stashId); }
public virtual void TestStandardFormat_SmallObject_CorruptZLibStream() { ObjectId id = ObjectId.ZeroId; byte[] data = GetRng().NextBytes(300); try { byte[] gz = CompressStandardFormat(Constants.OBJ_BLOB, data); for (int i = 5; i < gz.Length; i++) { gz[i] = 0; } UnpackedObject.Open(new ByteArrayInputStream(gz), Path(id), id, wc); NUnit.Framework.Assert.Fail("Did not throw CorruptObjectException"); } catch (CorruptObjectException coe) { NUnit.Framework.Assert.AreEqual(MessageFormat.Format(JGitText.Get().objectIsCorrupt , id.Name, JGitText.Get().corruptObjectBadStream), coe.Message); } }
/// <exception cref="NGit.Errors.TransportException"></exception> private void ProcessTree(RevObject obj) { try { treeWalk.Reset(obj); while (treeWalk.Next()) { FileMode mode = treeWalk.GetFileMode(0); int sType = mode.GetObjectType(); switch (sType) { case Constants.OBJ_BLOB: case Constants.OBJ_TREE: { treeWalk.GetObjectId(idBuffer, 0); Needs(revWalk.LookupAny(idBuffer, sType)); continue; goto default; } default: { if (FileMode.GITLINK.Equals(mode)) { continue; } treeWalk.GetObjectId(idBuffer, 0); throw new CorruptObjectException(MessageFormat.Format(JGitText.Get().invalidModeFor , mode, idBuffer.Name, treeWalk.PathString, obj.Id.Name)); } } } } catch (IOException ioe) { throw new TransportException(MessageFormat.Format(JGitText.Get().cannotReadTree, obj.Name), ioe); } obj.Add(COMPLETE); }
/// <exception cref="System.IO.IOException"></exception> private void WriteCommands(ICollection <RemoteRefUpdate> refUpdates, ProgressMonitor monitor) { string capabilities = EnableCapabilities(monitor); foreach (RemoteRefUpdate rru in refUpdates) { if (!capableDeleteRefs && rru.IsDelete()) { rru.SetStatus(RemoteRefUpdate.Status.REJECTED_NODELETE); continue; } StringBuilder sb = new StringBuilder(); Ref advertisedRef = GetRef(rru.GetRemoteName()); ObjectId oldId = (advertisedRef == null ? ObjectId.ZeroId : advertisedRef.GetObjectId ()); sb.Append(oldId.Name); sb.Append(' '); sb.Append(rru.GetNewObjectId().Name); sb.Append(' '); sb.Append(rru.GetRemoteName()); if (!sentCommand) { sentCommand = true; sb.Append(capabilities); } pckOut.WriteString(sb.ToString()); rru.SetStatus(RemoteRefUpdate.Status.AWAITING_REPORT); if (!rru.IsDelete()) { writePack = true; } } if (monitor.IsCancelled()) { throw new TransportException(uri, JGitText.Get().pushCancelled); } pckOut.End(); outNeedsEnd = false; }
private string[] ExtractFileLines(Encoding[] csGuess) { TemporaryBuffer[] tmp = new TemporaryBuffer[GetParentCount() + 1]; try { for (int i = 0; i < tmp.Length; i++) { tmp[i] = new TemporaryBuffer.LocalFile(); } foreach (HunkHeader h in GetHunks()) { h.ExtractFileLines(tmp); } string[] r = new string[tmp.Length]; for (int i_1 = 0; i_1 < tmp.Length; i_1++) { Encoding cs = csGuess != null ? csGuess[i_1] : null; if (cs == null) { cs = Constants.CHARSET; } r[i_1] = RawParseUtils.Decode(cs, tmp[i_1].ToByteArray()); } return(r); } catch (IOException ioe) { throw new RuntimeException(JGitText.Get().cannotConvertScriptToText, ioe); } finally { foreach (TemporaryBuffer b in tmp) { if (b != null) { b.Destroy(); } } } }
public static void HardReset(NGit.Repository repo, ObjectId newHead) { DirCache dc = null; try { // Reset head to upstream RefUpdate ru = repo.UpdateRef(Constants.HEAD); ru.SetNewObjectId(newHead); ru.SetForceUpdate(true); RefUpdate.Result rc = ru.Update(); switch (rc) { case RefUpdate.Result.NO_CHANGE: case RefUpdate.Result.NEW: case RefUpdate.Result.FAST_FORWARD: case RefUpdate.Result.FORCED: break; case RefUpdate.Result.REJECTED: case RefUpdate.Result.LOCK_FAILURE: throw new ConcurrentRefUpdateException(JGitText.Get().couldNotLockHEAD, ru.GetRef(), rc); default: throw new JGitInternalException("Reference update failed: " + rc); } dc = repo.LockDirCache(); RevWalk rw = new RevWalk(repo); RevCommit c = rw.ParseCommit(newHead); DirCacheCheckout checkout = new DirCacheCheckout(repo, null, dc, c.Tree); checkout.Checkout(); } catch { if (dc != null) { dc.Unlock(); } throw; } }
/// <exception cref="NGit.Errors.TransportException"></exception> public InternalLocalPushConnection(TransportLocal _enclosing) : base(_enclosing ) { this._enclosing = _enclosing; Repository dst; try { dst = new FileRepository(this._enclosing.remoteGitDir); } catch (IOException) { throw new TransportException(this.uri, JGitText.Get().notAGitDirectory); } PipedInputStream in_r; PipedOutputStream in_w; PipedInputStream out_r; PipedOutputStream out_w; try { in_r = new PipedInputStream(); in_w = new PipedOutputStream(in_r); out_r = new PipedInputStream(); out_w = new PipedOutputStream(out_r); } catch (IOException err) { dst.Close(); throw new TransportException(this.uri, JGitText.Get().cannotConnectPipes, err); } this.worker = new _Thread_340(this, dst, out_r, in_w, "JGit-Receive-Pack"); // Client side of the pipes should report the problem. // Clients side will notice we went away, and report. // Ignore close failure, we probably crashed above. // Ignore close failure, we probably crashed above. this.worker.Start(); this.Init(in_r, out_w); this.ReadAdvertisedRefs(); }
public virtual void TestConstructor_RejectsBadBufferSize() { try { new SideBandOutputStream(SideBandOutputStream.CH_DATA, -1, rawOut); NUnit.Framework.Assert.Fail("Accepted -1 for buffer size"); } catch (ArgumentException e) { NUnit.Framework.Assert.AreEqual("packet size -1 must be >= 5", e.Message); } try { new SideBandOutputStream(SideBandOutputStream.CH_DATA, 0, rawOut); NUnit.Framework.Assert.Fail("Accepted 0 for buffer size"); } catch (ArgumentException e) { NUnit.Framework.Assert.AreEqual("packet size 0 must be >= 5", e.Message); } try { new SideBandOutputStream(SideBandOutputStream.CH_DATA, 1, rawOut); NUnit.Framework.Assert.Fail("Accepted 1 for buffer size"); } catch (ArgumentException e) { NUnit.Framework.Assert.AreEqual("packet size 1 must be >= 5", e.Message); } try { new SideBandOutputStream(SideBandOutputStream.CH_DATA, int.MaxValue, rawOut); NUnit.Framework.Assert.Fail("Accepted " + int.MaxValue + " for buffer size"); } catch (ArgumentException e) { NUnit.Framework.Assert.AreEqual(MessageFormat.Format(JGitText.Get().packetSizeMustBeAtMost , int.MaxValue, 65520), e.Message); } }
private void BeforeAdd(DirCacheEntry newEntry) { if (sorted && entryCnt > 0) { DirCacheEntry lastEntry = entries[entryCnt - 1]; int cr = DirCache.Cmp(lastEntry, newEntry); if (cr > 0) { // The new entry sorts before the old entry; we are // no longer sorted correctly. We'll need to redo // the sorting before we can close out the build. // sorted = false; } else { if (cr == 0) { // Same file path; we can only insert this if the // stages won't be violated. // int peStage = lastEntry.Stage; int dceStage = newEntry.Stage; if (peStage == dceStage) { throw Bad(newEntry, JGitText.Get().duplicateStagesNotAllowed); } if (peStage == 0 || dceStage == 0) { throw Bad(newEntry, JGitText.Get().mixedStagesNotAllowed); } if (peStage > dceStage) { sorted = false; } } } } }
public virtual void TestStandardFormat_LargeObject_TrailingGarbage() { int type = Constants.OBJ_BLOB; byte[] data = GetRng().NextBytes(streamThreshold + 5); ObjectId id = new ObjectInserter.Formatter().IdFor(type, data); byte[] gz = CompressStandardFormat(type, data); byte[] tr = new byte[gz.Length + 1]; System.Array.Copy(gz, 0, tr, 0, gz.Length); Write(id, tr); ObjectLoader ol; { FileInputStream fs = new FileInputStream(Path(id)); try { ol = UnpackedObject.Open(fs, Path(id), id, wc); } finally { fs.Close(); } } byte[] tmp = new byte[data.Length]; InputStream @in = ol.OpenStream(); IOUtil.ReadFully(@in, tmp, 0, tmp.Length); try { @in.Close(); NUnit.Framework.Assert.Fail("close did not throw CorruptObjectException"); } catch (CorruptObjectException coe) { NUnit.Framework.Assert.AreEqual(MessageFormat.Format(JGitText.Get().objectIsCorrupt , id.Name, JGitText.Get().corruptObjectBadStream), coe.Message); } }
/// <summary>Create a new writer instance for a specific index format version.</summary> /// <remarks>Create a new writer instance for a specific index format version.</remarks> /// <param name="dst"> /// the stream the index data will be written to. If not already /// buffered it will be automatically wrapped in a buffered /// stream. Callers are always responsible for closing the stream. /// </param> /// <param name="version"> /// index format version number required by the caller. Exactly /// this formatted version will be written. /// </param> /// <returns> /// a new writer to output an index file of the requested format to /// the supplied stream. /// </returns> /// <exception cref="System.ArgumentException"> /// the version requested is not supported by this /// implementation. /// </exception> public static NGit.Storage.File.PackIndexWriter CreateVersion(OutputStream dst, int version) { switch (version) { case 1: { return(new PackIndexWriterV1(dst)); } case 2: { return(new PackIndexWriterV2(dst)); } default: { throw new ArgumentException(MessageFormat.Format(JGitText.Get().unsupportedPackIndexVersion , version)); } } }
/// <summary> /// Create a new Git repository initializing the necessary files and /// directories. /// </summary> /// <remarks> /// Create a new Git repository initializing the necessary files and /// directories. /// </remarks> /// <param name="bare">if true, a bare repository is created.</param> /// <exception cref="System.IO.IOException">in case of IO problem</exception> public override void Create(bool bare) { FileBasedConfig cfg = ((FileBasedConfig)GetConfig()); if (cfg.GetFile().Exists()) { throw new InvalidOperationException(MessageFormat.Format(JGitText.Get().repositoryAlreadyExists , Directory)); } FileUtils.Mkdirs(Directory, true); refs.Create(); objectDatabase.Create(); FileUtils.Mkdir(new FilePath(Directory, "branches")); FileUtils.Mkdir(new FilePath(Directory, "hooks")); RefUpdate head = UpdateRef(Constants.HEAD); head.DisableRefLog(); head.Link(Constants.R_HEADS + Constants.MASTER); bool fileMode = false; cfg.SetInt(ConfigConstants.CONFIG_CORE_SECTION, null, ConfigConstants.CONFIG_KEY_REPO_FORMAT_VERSION , 0); cfg.SetBoolean(ConfigConstants.CONFIG_CORE_SECTION, null, ConfigConstants.CONFIG_KEY_FILEMODE , fileMode); if (bare) { cfg.SetBoolean(ConfigConstants.CONFIG_CORE_SECTION, null, ConfigConstants.CONFIG_KEY_BARE , true); } cfg.SetBoolean(ConfigConstants.CONFIG_CORE_SECTION, null, ConfigConstants.CONFIG_KEY_LOGALLREFUPDATES , !bare); if (SystemReader.GetInstance().IsMacOS()) { // Java has no other way cfg.SetBoolean(ConfigConstants.CONFIG_CORE_SECTION, null, ConfigConstants.CONFIG_KEY_PRECOMPOSEUNICODE , true); } cfg.Save(); }
internal HistogramDiffIndex(int maxChainLength, HashedSequenceComparator <S> cmp, HashedSequence <S> a, HashedSequence <S> b, Edit r) { this.maxChainLength = maxChainLength; this.cmp = cmp; this.a = a; this.b = b; this.region = r; if (region.endA >= MAX_PTR) { throw new ArgumentException(JGitText.Get().sequenceTooLargeForDiffAlgorithm); } int sz = r.GetLengthA(); int tableBits = TableBits(sz); table = new int[1 << tableBits]; keyShift = 32 - tableBits; ptrShift = r.beginA; recs = new long[Math.Max(4, (int)(((uint)sz) >> 3))]; next = new int[sz]; recIdx = new int[sz]; }
/// <exception cref="NGit.Errors.PackProtocolException"></exception> private bool OkToGiveUpImp() { if (commonBase.IsEmpty()) { return(false); } try { foreach (RevObject obj in wantAll) { if (!WantSatisfied(obj)) { return(false); } } return(true); } catch (IOException e) { throw new PackProtocolException(JGitText.Get().internalRevisionError, e); } }
/// <exception cref="NGit.Errors.TransportException"></exception> internal SshPushConnection(TransportGitSsh _enclosing, TransportGitSsh.Connection conn) : base(_enclosing) { this._enclosing = _enclosing; this.conn = conn; try { MessageWriter msg = new MessageWriter(); this.SetMessageWriter(msg); conn.Exec(this._enclosing.GetOptionReceivePack()); InputStream rpErr = conn.GetErrorStream(); this.errorThread = new StreamCopyThread(rpErr, msg.GetRawStream()); this.errorThread.Start(); this.Init(conn.GetInputStream(), conn.GetOutputStream()); conn.Connect(); } catch (TransportException err) { this.Close(); throw; } catch (IOException err) { this.Close(); throw new TransportException(this.uri, JGitText.Get().remoteHungUpUnexpectedly, err ); } try { this.ReadAdvertisedRefs(); } catch (NoRemoteRepositoryException notFound) { string msgs = this.GetMessages(); this._enclosing.CheckExecFailure(conn.GetExitStatus(), this._enclosing.GetOptionReceivePack (), msgs); throw this._enclosing.CleanNotFound(notFound, msgs); } }
/// <exception cref="System.IO.IOException"></exception> /// <exception cref="NGit.Api.Errors.NoHeadException"></exception> /// <exception cref="NGit.Api.Errors.JGitInternalException"></exception> private RevCommit CheckoutCurrentHead() { ObjectId headTree = repo.Resolve(Constants.HEAD + "^{tree}"); if (headTree == null) { throw new NoHeadException(JGitText.Get().cannotRebaseWithoutCurrentHead); } DirCache dc = repo.LockDirCache(); try { DirCacheCheckout dco = new DirCacheCheckout(repo, dc, headTree); dco.SetFailOnConflict(false); bool needsDeleteFiles = dco.Checkout(); if (needsDeleteFiles) { IList <string> fileList = dco.GetToBeDeleted(); foreach (string filePath in fileList) { FilePath fileToDelete = new FilePath(repo.WorkTree, filePath); if (fileToDelete.Exists()) { FileUtils.Delete(fileToDelete, FileUtils.RECURSIVE | FileUtils.RETRY); } } } } finally { dc.Unlock(); } RevWalk rw = new RevWalk(repo); RevCommit commit = rw.ParseCommit(repo.Resolve(Constants.HEAD)); rw.Release(); return(commit); }
/// <summary> /// Parses a string into a /// <see cref="System.DateTime">System.DateTime</see> /// . Since this parser also supports /// relative formats (e.g. "yesterday") the caller can specify the reference /// date. These types of strings can be parsed: /// <ul> /// <li>"never"</li> /// <li>"now"</li> /// <li>"yesterday"</li> /// <li>"(x) years|months|weeks|days|hours|minutes|seconds ago"<br /> /// Multiple specs can be combined like in "2 weeks 3 days ago". Instead of /// ' ' one can use '.' to seperate the words</li> /// <li>"yyyy-MM-dd HH:mm:ss Z" (ISO)</li> /// <li>"EEE, dd MMM yyyy HH:mm:ss Z" (RFC)</li> /// <li>"yyyy-MM-dd"</li> /// <li>"yyyy.MM.dd"</li> /// <li>"MM/dd/yyyy",</li> /// <li>"dd.MM.yyyy"</li> /// <li>"EEE MMM dd HH:mm:ss yyyy Z" (DEFAULT)</li> /// <li>"EEE MMM dd HH:mm:ss yyyy" (LOCAL)</li> /// </ul> /// </summary> /// <param name="dateStr">the string to be parsed</param> /// <param name="now"> /// the base date which is used for the calculation of relative /// formats. E.g. if baseDate is "25.8.2012" then parsing of the /// string "1 week ago" would result in a date corresponding to /// "18.8.2012". This is used when a JGit command calls this /// parser often but wants a consistent starting point for calls.<br /> /// If set to <code>null</code> then the current time will be used /// instead. /// </param> /// <returns> /// the parsed /// <see cref="System.DateTime">System.DateTime</see> /// </returns> /// <exception cref="Sharpen.ParseException">if the given dateStr was not recognized</exception> public static DateTime Parse(string dateStr, JavaCalendar now) { dateStr = dateStr.Trim(); DateTime?ret; if (Sharpen.Runtime.EqualsIgnoreCase("never", dateStr)) { return(NEVER); } ret = Parse_relative(dateStr, now); if (ret != null) { return(ret.Value); } foreach (GitDateParser.ParseableSimpleDateFormat f in GitDateParser.ParseableSimpleDateFormat .Values()) { try { return(Parse_simple(dateStr, f)); } catch (ParseException) { } } // simply proceed with the next parser GitDateParser.ParseableSimpleDateFormat[] values = GitDateParser.ParseableSimpleDateFormat .Values(); StringBuilder allFormats = new StringBuilder("\"").Append(values[0].formatStr); for (int i = 1; i < values.Length; i++) { allFormats.Append("\", \"").Append(values[i].formatStr); } allFormats.Append("\""); throw new ParseException(MessageFormat.Format(JGitText.Get().cannotParseDate, dateStr , allFormats.ToString()), 0); }
/// <exception cref="System.IO.IOException"></exception> /// <exception cref="NGit.Errors.CorruptObjectException"></exception> private static void CheckValidEndOfStream(InputStream @in, Inflater inf, AnyObjectId id, byte[] buf) { for (; ;) { int r; try { r = inf.Inflate(buf); } catch (SharpZipBaseException) { throw new CorruptObjectException(id, JGitText.Get().corruptObjectBadStream); } if (r != 0) { throw new CorruptObjectException(id, JGitText.Get().corruptObjectIncorrectLength); } if (inf.IsFinished) { if (inf.RemainingInput != 0 || @in.Read() != -1) { throw new CorruptObjectException(id, JGitText.Get().corruptObjectBadStream); } break; } if (!inf.IsNeedingInput) { throw new CorruptObjectException(id, JGitText.Get().corruptObjectBadStream); } r = @in.Read(buf); if (r <= 0) { throw new CorruptObjectException(id, JGitText.Get().corruptObjectBadStream); } inf.SetInput(buf, 0, r); } }
/// <summary>Read a standard Git packed-refs file to discover known references.</summary> /// <remarks>Read a standard Git packed-refs file to discover known references.</remarks> /// <param name="avail"> /// return collection of references. Any existing entries will be /// replaced if they are found in the packed-refs file. /// </param> /// <exception cref="NGit.Errors.TransportException">an error occurred reading from the packed refs file. /// </exception> protected internal virtual void ReadPackedRefs(IDictionary <string, Ref> avail) { try { BufferedReader br = OpenReader(ROOT_DIR + Constants.PACKED_REFS); try { ReadPackedRefsImpl(avail, br); } finally { br.Close(); } } catch (FileNotFoundException) { } catch (IOException e) { // Perhaps it wasn't worthwhile, or is just an older repository. throw new TransportException(GetURI(), JGitText.Get().errorInPackedRefs, e); } }
public override void Apply(DirCacheEntry ent) { if (checkoutIndex && ent.Stage > DirCacheEntry.STAGE_0) { UnmergedPathException e = new UnmergedPathException(ent); throw new JGitInternalException(e.Message, e); } ent.SetObjectId(blobId); ent.FileMode = mode; FilePath file = new FilePath(workTree, ent.PathString); FilePath parentDir = file.GetParentFile(); try { FileUtils.Mkdirs(parentDir, true); DirCacheCheckout.CheckoutEntry(this._enclosing.repo, file, ent, r); } catch (IOException e) { throw new JGitInternalException(MessageFormat.Format(JGitText.Get().checkoutConflictWithFile , ent.PathString), e); } }
/// <exception cref="NGit.Errors.TransportException"></exception> private bool DownloadLooseObject(AnyObjectId id, string looseName, WalkRemoteObjectDatabase remote) { try { byte[] compressed = remote.Open(looseName).ToArray(); VerifyAndInsertLooseObject(id, compressed); return(true); } catch (FileNotFoundException e) { // Not available in a loose format from this alternate? // Try another strategy to get the object. // RecordError(id, e); return(false); } catch (IOException e) { throw new TransportException(MessageFormat.Format(JGitText.Get().cannotDownload, id.Name), e); } }
/// <summary>Commit this change and release the lock.</summary> /// <remarks> /// Commit this change and release the lock. /// <p> /// If this method fails (returns false) the lock is still released. /// </remarks> /// <returns> /// true if the commit was successful and the file contains the new /// data; false if the commit failed and the file remains with the /// old data. /// </returns> /// <exception cref="System.InvalidOperationException">the lock is not held.</exception> public virtual bool Commit() { if (os != null) { Unlock(); throw new InvalidOperationException(MessageFormat.Format(JGitText.Get().lockOnNotClosed , @ref)); } SaveStatInformation(); if (lck.RenameTo(@ref)) { return(true); } if ([email protected]() || DeleteRef()) { if (RenameLock()) { return(true); } } Unlock(); return(false); }
/// <exception cref="NGit.Errors.TransportException"></exception> private void CreateNewRepository(IList <RemoteRefUpdate> updates) { try { string @ref = "ref: " + PickHEAD(updates) + "\n"; byte[] bytes = Constants.Encode(@ref); dest.WriteFile(WalkRemoteObjectDatabase.ROOT_DIR + Constants.HEAD, bytes); } catch (IOException e) { throw new TransportException(uri, JGitText.Get().cannotCreateHEAD, e); } try { string config = "[core]\n" + "\trepositoryformatversion = 0\n"; byte[] bytes = Constants.Encode(config); dest.WriteFile(WalkRemoteObjectDatabase.ROOT_DIR + Constants.CONFIG, bytes); } catch (IOException e) { throw new TransportException(uri, JGitText.Get().cannotCreateConfig, e); } }
/// <summary>Construct a new pattern matching filter.</summary> /// <remarks>Construct a new pattern matching filter.</remarks> /// <param name="pattern"> /// text of the pattern. Callers may want to surround their /// pattern with ".*" on either end to allow matching in the /// middle of the string. /// </param> /// <param name="innerString"> /// should .* be wrapped around the pattern of ^ and $ are /// missing? Most users will want this set. /// </param> /// <param name="rawEncoding"> /// should /// <see cref="ForceToRaw(string)">ForceToRaw(string)</see> /// be applied to the pattern /// before compiling it? /// </param> /// <param name="flags"> /// flags from /// <see cref="Sharpen.Pattern">Sharpen.Pattern</see> /// to control how matching performs. /// </param> protected internal PatternMatchRevFilter(string pattern, bool innerString, bool rawEncoding , int flags) { if (pattern.Length == 0) { throw new ArgumentException(JGitText.Get().cannotMatchOnEmptyString); } patternText = pattern; if (innerString) { if (!pattern.StartsWith("^") && !pattern.StartsWith(".*")) { pattern = ".*" + pattern; } if (!pattern.EndsWith("$") && !pattern.EndsWith(".*")) { pattern = pattern + ".*"; } } string p = rawEncoding ? ForceToRaw(pattern) : pattern; compiledPattern = Sharpen.Pattern.Compile(p, flags).Matcher(string.Empty); }
/// <exception cref="System.NotSupportedException"></exception> protected internal TransportHttp(Repository local, URIish uri) : base(local, uri) { try { string uriString = uri.ToString(); if (!uriString.EndsWith("/")) { //$NON-NLS-1$ uriString += "/"; } //$NON-NLS-1$ baseUrl = new Uri(uriString); objectsUrl = new Uri(baseUrl, "objects/"); } catch (UriFormatException e) { //$NON-NLS-1$ throw new NotSupportedException(MessageFormat.Format(JGitText.Get().invalidURL, uri ), e); } http = local.GetConfig().Get(HTTP_KEY); proxySelector = ProxySelector.GetDefault(); }
/// <exception cref="NGit.Errors.AmbiguousObjectException"></exception> /// <exception cref="NGit.Api.Errors.RefNotFoundException"></exception> /// <exception cref="System.IO.IOException"></exception> private ObjectId GetStartPoint() { if (startCommit != null) { return(startCommit.Id); } ObjectId result = null; try { result = repo.Resolve((startPoint == null) ? Constants.HEAD : startPoint); } catch (AmbiguousObjectException e) { throw; } if (result == null) { throw new RefNotFoundException(MessageFormat.Format(JGitText.Get().refNotResolved , startPoint != null ? startPoint : Constants.HEAD)); } return(result); }
/// <summary>Packs all non-symbolic, loose refs into packed-refs.</summary> /// <remarks>Packs all non-symbolic, loose refs into packed-refs.</remarks> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public virtual void PackRefs() { ICollection <Ref> refs = repo.GetAllRefs().Values; IList <string> refsToBePacked = new AList <string>(refs.Count); pm.BeginTask(JGitText.Get().packRefs, refs.Count); try { foreach (Ref @ref in refs) { if ([email protected]() && @ref.GetStorage().IsLoose()) { refsToBePacked.AddItem(@ref.GetName()); } pm.Update(1); } ((RefDirectory)repo.RefDatabase).Pack(refsToBePacked); } finally { pm.EndTask(); } }