public override void SetUp() { base.SetUp(); toLoad = new AList<WindowCacheGetTest.TestObject>(); BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream( JGitTestUtil.GetTestResourceFile("all_packed_objects.txt")), Constants.CHARSET)); try { string line; while ((line = br.ReadLine()) != null) { string[] parts = line.Split(" {1,}"); WindowCacheGetTest.TestObject o = new WindowCacheGetTest.TestObject(this); o.id = ObjectId.FromString(parts[0]); o.SetType(parts[1]); // parts[2] is the inflate size // parts[3] is the size-in-pack // parts[4] is the offset in the pack toLoad.AddItem(o); } } finally { br.Close(); } NUnit.Framework.Assert.AreEqual(96, toLoad.Count); }
/// <summary>Add entries to be considered for rename detection.</summary> /// <remarks>Add entries to be considered for rename detection.</remarks> /// <param name="entriesToAdd">one or more entries to add.</param> /// <exception cref="System.InvalidOperationException"> /// if /// <code>getEntries</code> /// was already invoked. /// </exception> public virtual void AddAll(ICollection <DiffEntry> entriesToAdd) { if (done) { throw new InvalidOperationException(JGitText.Get().renamesAlreadyFound); } foreach (DiffEntry entry in entriesToAdd) { switch (entry.GetChangeType()) { case DiffEntry.ChangeType.ADD: { added.AddItem(entry); break; } case DiffEntry.ChangeType.DELETE: { deleted.AddItem(entry); break; } case DiffEntry.ChangeType.MODIFY: { if (SameType(entry.GetOldMode(), entry.GetNewMode())) { entries.AddItem(entry); } else { IList <DiffEntry> tmp = DiffEntry.BreakModify(entry); deleted.AddItem(tmp[0]); added.AddItem(tmp[1]); } break; } case DiffEntry.ChangeType.COPY: case DiffEntry.ChangeType.RENAME: default: { entries.AddItem(entry); break; } } } }
public virtual void TestINodeXAttrsLimit() { IList <XAttr> existingXAttrs = Lists.NewArrayListWithCapacity(2); XAttr xAttr1 = (new XAttr.Builder()).SetNameSpace(XAttr.NameSpace.User).SetName("a1" ).SetValue(new byte[] { unchecked ((int)(0x31)), unchecked ((int)(0x32)), unchecked ( (int)(0x33)) }).Build(); XAttr xAttr2 = (new XAttr.Builder()).SetNameSpace(XAttr.NameSpace.User).SetName("a2" ).SetValue(new byte[] { unchecked ((int)(0x31)), unchecked ((int)(0x31)), unchecked ( (int)(0x31)) }).Build(); existingXAttrs.AddItem(xAttr1); existingXAttrs.AddItem(xAttr2); // Adding system and raw namespace xAttrs aren't affected by inode // xAttrs limit. XAttr newSystemXAttr = (new XAttr.Builder()).SetNameSpace(XAttr.NameSpace.System) .SetName("a3").SetValue(new byte[] { unchecked ((int)(0x33)), unchecked ((int)(0x33 )), unchecked ((int)(0x33)) }).Build(); XAttr newRawXAttr = (new XAttr.Builder()).SetNameSpace(XAttr.NameSpace.Raw).SetName ("a3").SetValue(new byte[] { unchecked ((int)(0x33)), unchecked ((int)(0x33)), unchecked ( (int)(0x33)) }).Build(); IList <XAttr> newXAttrs = Lists.NewArrayListWithCapacity(2); newXAttrs.AddItem(newSystemXAttr); newXAttrs.AddItem(newRawXAttr); IList <XAttr> xAttrs = FSDirXAttrOp.SetINodeXAttrs(fsdir, existingXAttrs, newXAttrs , EnumSet.Of(XAttrSetFlag.Create, XAttrSetFlag.Replace)); NUnit.Framework.Assert.AreEqual(xAttrs.Count, 4); // Adding a trusted namespace xAttr, is affected by inode xAttrs limit. XAttr newXAttr1 = (new XAttr.Builder()).SetNameSpace(XAttr.NameSpace.Trusted).SetName ("a4").SetValue(new byte[] { unchecked ((int)(0x34)), unchecked ((int)(0x34)), unchecked ( (int)(0x34)) }).Build(); newXAttrs.Set(0, newXAttr1); try { FSDirXAttrOp.SetINodeXAttrs(fsdir, existingXAttrs, newXAttrs, EnumSet.Of(XAttrSetFlag .Create, XAttrSetFlag.Replace)); NUnit.Framework.Assert.Fail("Setting user visible xattr on inode should fail if " + "reaching limit."); } catch (IOException e) { GenericTestUtils.AssertExceptionContains("Cannot add additional XAttr " + "to inode, would exceed limit" , e); } }
/// <summary>The default constructor for CertPathRevocationAnalysis.</summary> /// <remarks>The default constructor for CertPathRevocationAnalysis.</remarks> /// <param name="ctx"></param> /// <param name="info"></param> public CertPathRevocationAnalysis(ValidationContext ctx, TrustedListInformation info ) { summary = new Result(); this.trustedListInformation = info; if (ctx != null && ctx.GetNeededCertificates() != null) { foreach (CertificateAndContext cert in ctx.GetNeededCertificates()) { CertificateVerification verif = new CertificateVerification(cert, ctx); certificatePathVerification.AddItem(verif); } } summary.SetStatus(Result.ResultStatus.VALID, null); if (certificatePathVerification != null) { foreach (CertificateVerification verif in certificatePathVerification) { if (verif.GetValidityPeriodVerification().IsInvalid()) { summary.SetStatus(Result.ResultStatus.INVALID, "certificate.not.valid"); break; } if (verif.GetCertificateStatus() != null) { if (verif.GetCertificateStatus().GetStatus() == CertificateValidity.REVOKED) { summary.SetStatus(Result.ResultStatus.INVALID, "certificate.revoked"); break; } else { if (verif.GetCertificateStatus().GetStatus() == CertificateValidity.UNKNOWN || verif .GetCertificateStatus().GetStatus() == null) { summary.SetStatus(Result.ResultStatus.UNDETERMINED, "revocation.unknown"); } } } else { summary.SetStatus(Result.ResultStatus.UNDETERMINED, "no.revocation.data"); } } } if (trustedListInformation != null) { if (!trustedListInformation.IsServiceWasFound()) { summary.SetStatus(Result.ResultStatus.INVALID, "no.trustedlist.service.was.found" ); } } else { summary.SetStatus(Result.ResultStatus.INVALID, "no.trustedlist.service.was.found" ); } }
private void ConstructTaskAttemptCompletionEvents() { LoadAllTasks(); completionEvents = new List <TaskAttemptCompletionEvent>(); IList <TaskAttempt> allTaskAttempts = new List <TaskAttempt>(); int numMapAttempts = 0; foreach (KeyValuePair <TaskId, Task> taskEntry in tasks) { Task task = taskEntry.Value; foreach (KeyValuePair <TaskAttemptId, TaskAttempt> taskAttemptEntry in task.GetAttempts ()) { TaskAttempt taskAttempt = taskAttemptEntry.Value; allTaskAttempts.AddItem(taskAttempt); if (task.GetType() == TaskType.Map) { ++numMapAttempts; } } } allTaskAttempts.Sort(new _IComparer_237()); mapCompletionEvents = new AList <TaskAttemptCompletionEvent>(numMapAttempts); int eventId = 0; foreach (TaskAttempt taskAttempt_1 in allTaskAttempts) { TaskAttemptCompletionEvent tace = Org.Apache.Hadoop.Yarn.Util.Records.NewRecord <TaskAttemptCompletionEvent >(); int attemptRunTime = -1; if (taskAttempt_1.GetLaunchTime() != 0 && taskAttempt_1.GetFinishTime() != 0) { attemptRunTime = (int)(taskAttempt_1.GetFinishTime() - taskAttempt_1.GetLaunchTime ()); } // Default to KILLED TaskAttemptCompletionEventStatus taceStatus = TaskAttemptCompletionEventStatus.Killed; string taStateString = taskAttempt_1.GetState().ToString(); try { taceStatus = TaskAttemptCompletionEventStatus.ValueOf(taStateString); } catch (Exception) { Log.Warn("Cannot constuct TACEStatus from TaskAtemptState: [" + taStateString + "] for taskAttemptId: [" + taskAttempt_1.GetID() + "]. Defaulting to KILLED"); } tace.SetAttemptId(taskAttempt_1.GetID()); tace.SetAttemptRunTime(attemptRunTime); tace.SetEventId(eventId++); tace.SetMapOutputServerAddress(taskAttempt_1.GetAssignedContainerMgrAddress()); tace.SetStatus(taceStatus); completionEvents.AddItem(tace); if (taskAttempt_1.GetID().GetTaskId().GetTaskType() == TaskType.Map) { mapCompletionEvents.AddItem(tace); } } }
/// <summary>Adds a value to the map, assigning it a sequence number and returning it.</summary> /// <remarks> /// Adds a value to the map, assigning it a sequence number and returning it. /// Sequence numbers start at 1 and increment from there. /// </remarks> public long AddValue(string value) { _lock.EnterWriteLock(); sequences.AddItem(++lastSequence); values.AddItem(value); _lock.ExitWriteLock(); return(lastSequence); }
/// <summary>Add a new push-only URI to the end of the list of URIs.</summary> /// <remarks>Add a new push-only URI to the end of the list of URIs.</remarks> /// <param name="toAdd">the new URI to add to this remote.</param> /// <returns>true if the URI was added; false if it already exists.</returns> public virtual bool AddPushURI(URIish toAdd) { if (pushURIs.Contains(toAdd)) { return(false); } return(pushURIs.AddItem(toAdd)); }
/// <summary>Adds the given header to the group.</summary> /// <remarks> /// Adds the given header to the group. The order in which this header was /// added is preserved. /// </remarks> /// <param name="header">the header to add</param> public virtual void AddHeader(Header header) { if (header == null) { return; } headers.AddItem(header); }
/// <summary>Build xattr name with prefix as <code>XAttr</code> list.</summary> public static IList <XAttr> BuildXAttrAsList(string name) { XAttr xAttr = BuildXAttr(name); IList <XAttr> xAttrs = Lists.NewArrayListWithCapacity(1); xAttrs.AddItem(xAttr); return(xAttrs); }
internal virtual void AddError(IOException ioe) { if (errors == null) { errors = new List <IOException>(); } errors.AddItem(ioe); }
/// <summary>Sets up a table to be a consistent style.</summary> /// <param name="html">the HTML to use to render.</param> /// <param name="tableId">the ID of the table to set styles on.</param> /// <param name="innerStyles">any other styles to add to the table.</param> protected internal virtual void SetTableStyles(Hamlet.HTML <HtmlPage._> html, string tableId, params string[] innerStyles) { IList <string> styles = Lists.NewArrayList(); styles.AddItem(StringHelper.Join('#', tableId, "_paginate span {font-weight:normal}" )); styles.AddItem(StringHelper.Join('#', tableId, " .progress {width:8em}")); styles.AddItem(StringHelper.Join('#', tableId, "_processing {top:-1.5em; font-size:1em;" )); styles.AddItem(" color:#000; background:rgba(255, 255, 255, 0.8)}"); foreach (string style in innerStyles) { styles.AddItem(StringHelper.Join('#', tableId, " ", style)); } html.Style(Sharpen.Collections.ToArray(styles)); }
private void AddMappingProvider(string providerName, Type providerClass) { Configuration newConf = PrepareConf(providerName); GroupMappingServiceProvider provider = (GroupMappingServiceProvider)ReflectionUtils .NewInstance(providerClass, newConf); providersList.AddItem(provider); }
/// <summary>Add a new push RefSpec to this remote.</summary> /// <remarks>Add a new push RefSpec to this remote.</remarks> /// <param name="s">the new specification to add.</param> /// <returns>true if the specification was added; false if it already exists.</returns> public virtual bool AddPushRefSpec(RefSpec s) { if (push.Contains(s)) { return(false); } return(push.AddItem(s)); }
/// <summary> /// The /// <see cref="ResourceScheduler"/> /// is allocating data-local resources to the /// application. /// </summary> /// <param name="allocatedContainers">resources allocated to the application</param> private void AllocateNodeLocal(SchedulerNode node, Priority priority, ResourceRequest nodeLocalRequest, Container container, IList <ResourceRequest> resourceRequests) { lock (this) { // Update future requirements DecResourceRequest(node.GetNodeName(), priority, nodeLocalRequest); ResourceRequest rackLocalRequest = requests[priority][node.GetRackName()]; DecResourceRequest(node.GetRackName(), priority, rackLocalRequest); ResourceRequest offRackRequest = requests[priority][ResourceRequest.Any]; DecrementOutstanding(offRackRequest); // Update cloned NodeLocal, RackLocal and OffRack requests for recovery resourceRequests.AddItem(CloneResourceRequest(nodeLocalRequest)); resourceRequests.AddItem(CloneResourceRequest(rackLocalRequest)); resourceRequests.AddItem(CloneResourceRequest(offRackRequest)); } }
/// <summary>Add a new fetch RefSpec to this remote.</summary> /// <remarks>Add a new fetch RefSpec to this remote.</remarks> /// <param name="s">the new specification to add.</param> /// <returns>true if the specification was added; false if it already exists.</returns> public virtual bool AddFetchRefSpec(RefSpec s) { if (fetch.Contains(s)) { return(false); } return(fetch.AddItem(s)); }
/// <exception cref="System.IO.IOException"></exception> private FilePath CreateFile(string @string) { trash.Mkdirs(); FilePath f = FilePath.CreateTempFile(@string, "tdat", trash); files.AddItem(f); return(f); }
/// <summary>Add a new URI to the end of the list of URIs.</summary> /// <remarks>Add a new URI to the end of the list of URIs.</remarks> /// <param name="toAdd">the new URI to add to this remote.</param> /// <returns>true if the URI was added; false if it already exists.</returns> public virtual bool AddURI(URIish toAdd) { if (uris.Contains(toAdd)) { return(false); } return(uris.AddItem(toAdd)); }
/// <summary>Add a state change event to the lifecycle history</summary> private void RecordLifecycleEvent() { LifecycleEvent @event = new LifecycleEvent(); @event.time = Runtime.CurrentTimeMillis(); @event.state = GetServiceState(); lifecycleHistory.AddItem(@event); }
/// <summary>Create an object.</summary> internal UnderReplicatedBlocks() { for (int i = 0; i < Level; i++) { priorityQueues.AddItem(new LightWeightLinkedSet <Block>()); priorityToReplIdx[i] = 0; } }
internal override IList <OperationOutput> Run(FileSystem fs) { // Operation IList <OperationOutput> @out = base.Run(fs); try { Path fn = GetDeleteFile(); long timeTaken = 0; bool deleteStatus = false; { long startTime = Timer.Now(); deleteStatus = fs.Delete(fn, false); timeTaken = Timer.Elapsed(startTime); } // collect the stats if (!deleteStatus) { @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .Failures, 1L)); Log.Info("Could not delete " + fn); } else { @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .OkTimeTaken, timeTaken)); @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .Successes, 1L)); Log.Info("Could delete " + fn); } } catch (FileNotFoundException e) { @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .NotFound, 1L)); Log.Warn("Error with deleting", e); } catch (IOException e) { @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .Failures, 1L)); Log.Warn("Error with deleting", e); } return(@out); }
internal override IList <OperationOutput> Run(FileSystem fs) { // Operation IList <OperationOutput> @out = base.Run(fs); try { Path dir = GetDirectory(); bool mkRes = false; long timeTaken = 0; { long startTime = Timer.Now(); mkRes = fs.Mkdirs(dir); timeTaken = Timer.Elapsed(startTime); } // log stats if (mkRes) { @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .OkTimeTaken, timeTaken)); @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .Successes, 1L)); Log.Info("Made directory " + dir); } else { @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .Failures, 1L)); Log.Warn("Could not make " + dir); } } catch (FileNotFoundException e) { @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .NotFound, 1L)); Log.Warn("Error with mkdir", e); } catch (IOException e) { @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .Failures, 1L)); Log.Warn("Error with mkdir", e); } return(@out); }
public virtual long AddValue(string value) { lock (this) { sequences.AddItem(++lastSequence); values.AddItem(value); return(lastSequence); } }
internal FSImageTransactionalStorageInspector(EnumSet <NNStorage.NameNodeFile> nnfs ) { foreach (NNStorage.NameNodeFile nnf in nnfs) { Sharpen.Pattern pattern = Sharpen.Pattern.Compile(nnf.GetName() + "_(\\d+)"); namePatterns.AddItem(pattern); } }
/// <summary>Adds a Column to the new table.</summary> /// <remarks>Adds a Column to the new table.</remarks> public virtual HealthMarketScience.Jackcess.TableBuilder AddColumn(Column column) { if (_escapeIdentifiers) { column.SetName(Database.EscapeIdentifier(column.GetName())); } _columns.AddItem(column); return(this); }
/// <exception cref="UnknownHostException"/> internal override IPAddress GetInetAddressByName(string host) { hostSearches.AddItem(host); if (!resolvedHosts.Contains(host)) { throw new UnknownHostException(host); } return(resolvedHosts[host]); }
/// <summary>Test setting and removing multiple xattrs via single operations</summary> /// <exception cref="System.Exception"/> public virtual void TestXAttrMultiSetRemove() { IList <XAttr> existingXAttrs = Lists.NewArrayListWithCapacity(0); // Keep adding a random number of xattrs and verifying until exhausted Random rand = new Random(unchecked ((int)(0xFEEDA))); int numExpectedXAttrs = 0; while (numExpectedXAttrs < numGeneratedXAttrs) { Log.Info("Currently have " + numExpectedXAttrs + " xattrs"); int numToAdd = rand.Next(5) + 1; IList <XAttr> toAdd = Lists.NewArrayListWithCapacity(numToAdd); for (int i = 0; i < numToAdd; i++) { if (numExpectedXAttrs >= numGeneratedXAttrs) { break; } toAdd.AddItem(generatedXAttrs[numExpectedXAttrs]); numExpectedXAttrs++; } Log.Info("Attempting to add " + toAdd.Count + " XAttrs"); for (int i_1 = 0; i_1 < toAdd.Count; i_1++) { Log.Info("Will add XAttr " + toAdd[i_1]); } IList <XAttr> newXAttrs = FSDirXAttrOp.SetINodeXAttrs(fsdir, existingXAttrs, toAdd , EnumSet.Of(XAttrSetFlag.Create)); VerifyXAttrsPresent(newXAttrs, numExpectedXAttrs); existingXAttrs = newXAttrs; } // Keep removing a random number of xattrs and verifying until all gone while (numExpectedXAttrs > 0) { Log.Info("Currently have " + numExpectedXAttrs + " xattrs"); int numToRemove = rand.Next(5) + 1; IList <XAttr> toRemove = Lists.NewArrayListWithCapacity(numToRemove); for (int i = 0; i < numToRemove; i++) { if (numExpectedXAttrs == 0) { break; } toRemove.AddItem(generatedXAttrs[numExpectedXAttrs - 1]); numExpectedXAttrs--; } int expectedNumToRemove = toRemove.Count; Log.Info("Attempting to remove " + expectedNumToRemove + " XAttrs"); IList <XAttr> removedXAttrs = Lists.NewArrayList(); IList <XAttr> newXAttrs = FSDirXAttrOp.FilterINodeXAttrs(existingXAttrs, toRemove, removedXAttrs); NUnit.Framework.Assert.AreEqual("Unexpected number of removed XAttrs", expectedNumToRemove , removedXAttrs.Count); VerifyXAttrsPresent(newXAttrs, numExpectedXAttrs); existingXAttrs = newXAttrs; } }
private void AddObject(RevObject o) { if ((o.flags & IN_PENDING) == 0) { o.flags |= IN_PENDING; rootObjects.AddItem(o); pendingObjects.Add(o); } }
/// <summary> /// Parses a space and/or comma separated sequence of server specifications /// of the form <i>hostname</i> or <i>hostname:port</i>. /// </summary> /// <remarks> /// Parses a space and/or comma separated sequence of server specifications /// of the form <i>hostname</i> or <i>hostname:port</i>. If /// the specs string is null, defaults to localhost:defaultPort. /// </remarks> /// <param name="specs">server specs (see description)</param> /// <param name="defaultPort">the default port if not specified</param> /// <returns>a list of InetSocketAddress objects.</returns> public static IList <IPEndPoint> Parse(string specs, int defaultPort) { IList <IPEndPoint> result = Lists.NewArrayList(); if (specs == null) { result.AddItem(new IPEndPoint("localhost", defaultPort)); } else { string[] specStrings = specs.Split("[ ,]+"); foreach (string specString in specStrings) { result.AddItem(NetUtils.CreateSocketAddr(specString, defaultPort)); } } return(result); }
/// <summary>Adds a value to the map, assigning it a sequence number and returning it.</summary> /// <remarks> /// Adds a value to the map, assigning it a sequence number and returning it. /// Sequence numbers start at 1 and increment from there. /// </remarks> public long AddValue(string value) { lock (locker) { sequences.AddItem(++lastSequence); values.AddItem(value); return(lastSequence); } }
private void AddCommonBase(RevObject o) { if (!o.Has(COMMON)) { o.Add(COMMON); commonBase.AddItem(o); okToGiveUp = null; } }
protected internal virtual void InitDataTables(IList <string> list) { string defaultInit = "{bJQueryUI: true, sPaginationType: 'full_numbers'}"; string stateSaveInit = "bStateSave : true, " + "\"fnStateSave\": function (oSettings, oData) { " + " data = oData.aoSearchCols;" + "for(i =0 ; i < data.length; i ++) {" + "data[i].sSearch = \"\"" + "}" + " sessionStorage.setItem( oSettings.sTableId, JSON.stringify(oData) ); }, " + "\"fnStateLoad\": function (oSettings) { " + "return JSON.parse( sessionStorage.getItem(oSettings.sTableId) );}, "; foreach (string id in StringHelper.Split($(DatatablesId))) { if (Html.IsValidId(id)) { string init = $(InitID(Datatables, id)); if (init.IsEmpty()) { init = defaultInit; } // for inserting stateSaveInit int pos = init.IndexOf('{') + 1; init = new StringBuilder(init).Insert(pos, stateSaveInit).ToString(); list.AddItem(StringHelper.Join(id, "DataTable = $('#", id, "').dataTable(", init , ").fnSetFilteringDelay(188);")); string postInit = $(PostInitID(Datatables, id)); if (!postInit.IsEmpty()) { list.AddItem(postInit); } } } string selector = $(DatatablesSelector); if (!selector.IsEmpty()) { string init = $(InitSelector(Datatables)); if (init.IsEmpty()) { init = defaultInit; } int pos = init.IndexOf('{') + 1; init = new StringBuilder(init).Insert(pos, stateSaveInit).ToString(); list.AddItem(StringHelper.Join(" $('", StringEscapeUtils.EscapeJavaScript(selector ), "').dataTable(", init, ").fnSetFilteringDelay(288);")); } }
/// <exception cref="NGit.Errors.InvalidPatternException"></exception> internal GroupHead(string pattern, string wholePattern) : base(false) { this.characterClasses = new AList<GroupHead.CharacterPattern>(); this.inverse = pattern.StartsWith("!"); if (inverse) { pattern = Sharpen.Runtime.Substring(pattern, 1); } Matcher matcher = REGEX_PATTERN.Matcher(pattern); while (matcher.Find()) { string characterClass = matcher.Group(0); if (characterClass.Length == 3 && characterClass[1] == '-') { char start = characterClass[0]; char end = characterClass[2]; characterClasses.AddItem(new GroupHead.CharacterRange(start, end)); } else { if (characterClass.Equals("[:alnum:]")) { characterClasses.AddItem(GroupHead.LetterPattern.INSTANCE); characterClasses.AddItem(GroupHead.DigitPattern.INSTANCE); } else { if (characterClass.Equals("[:alpha:]")) { characterClasses.AddItem(GroupHead.LetterPattern.INSTANCE); } else { if (characterClass.Equals("[:blank:]")) { characterClasses.AddItem(new GroupHead.OneCharacterPattern(' ')); characterClasses.AddItem(new GroupHead.OneCharacterPattern('\t')); } else { if (characterClass.Equals("[:cntrl:]")) { characterClasses.AddItem(new GroupHead.CharacterRange('\u0000', '\u001F')); characterClasses.AddItem(new GroupHead.OneCharacterPattern('\u007F')); } else { if (characterClass.Equals("[:digit:]")) { characterClasses.AddItem(GroupHead.DigitPattern.INSTANCE); } else { if (characterClass.Equals("[:graph:]")) { characterClasses.AddItem(new GroupHead.CharacterRange('\u0021', '\u007E')); characterClasses.AddItem(GroupHead.LetterPattern.INSTANCE); characterClasses.AddItem(GroupHead.DigitPattern.INSTANCE); } else { if (characterClass.Equals("[:lower:]")) { characterClasses.AddItem(GroupHead.LowerPattern.INSTANCE); } else { if (characterClass.Equals("[:print:]")) { characterClasses.AddItem(new GroupHead.CharacterRange('\u0020', '\u007E')); characterClasses.AddItem(GroupHead.LetterPattern.INSTANCE); characterClasses.AddItem(GroupHead.DigitPattern.INSTANCE); } else { if (characterClass.Equals("[:punct:]")) { characterClasses.AddItem(GroupHead.PunctPattern.INSTANCE); } else { if (characterClass.Equals("[:space:]")) { characterClasses.AddItem(GroupHead.WhitespacePattern.INSTANCE); } else { if (characterClass.Equals("[:upper:]")) { characterClasses.AddItem(GroupHead.UpperPattern.INSTANCE); } else { if (characterClass.Equals("[:xdigit:]")) { characterClasses.AddItem(new GroupHead.CharacterRange('0', '9')); characterClasses.AddItem(new GroupHead.CharacterRange('a', 'f')); characterClasses.AddItem(new GroupHead.CharacterRange('A', 'F')); } else { if (characterClass.Equals("[:word:]")) { characterClasses.AddItem(new GroupHead.OneCharacterPattern('_')); characterClasses.AddItem(GroupHead.LetterPattern.INSTANCE); characterClasses.AddItem(GroupHead.DigitPattern.INSTANCE); } else { string message = string.Format(MessageFormat.Format(JGitText.Get().characterClassIsNotSupported , characterClass)); throw new InvalidPatternException(message, wholePattern); } } } } } } } } } } } } } } pattern = matcher.ReplaceFirst(string.Empty); matcher.Reset(pattern); } // pattern contains now no ranges for (int i = 0; i < pattern.Length; i++) { char c = pattern[i]; characterClasses.AddItem(new GroupHead.OneCharacterPattern(c)); } }
public virtual IList<string> GetChangedKeys() { if (changedKeys == null) { changedKeys = new AList<string>(); IDictionary<string, object> cur = GetCurrentRevision().GetProperties(); IDictionary<string, object> nuu = newRev.GetProperties(); foreach (string key in cur.Keys) { if (!cur.Get(key).Equals(nuu.Get(key)) && !key.Equals("_rev")) { changedKeys.AddItem(key); } } foreach (string key_1 in nuu.Keys) { if (cur.Get(key_1) == null && !key_1.Equals("_rev") && !key_1.Equals("_id")) { changedKeys.AddItem(key_1); } } } return changedKeys; }
/// <summary>Returns the rev ID of the 'winning' revision of this document, and whether it's deleted.</summary> /// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> internal String WinningRevIDOfDoc(Int64 docNumericId, IList<Boolean> outIsDeleted, IList<Boolean> outIsConflict) { Cursor cursor = null; var args = new [] { Convert.ToString(docNumericId) }; String revId = null; var sql = "SELECT revid, deleted FROM revs WHERE doc_id=? and current=1" + " ORDER BY deleted asc, revid desc LIMIT 2"; try { cursor = StorageEngine.RawQuery(sql, args); cursor.MoveToNext(); if (!cursor.IsAfterLast()) { revId = cursor.GetString(0); var deleted = cursor.GetInt(1) > 0; if (deleted) { outIsDeleted.AddItem(true); } // The document is in conflict if there are two+ result rows that are not deletions. var hasNextResult = cursor.MoveToNext(); if (hasNextResult) { var isNextDeleted = cursor.GetInt(1) > 0; var isInConflict = !deleted && hasNextResult && !isNextDeleted; if (isInConflict) { outIsConflict.AddItem(true); } } } } catch (SQLException e) { Log.E(Tag, "Error", e); throw new CouchbaseLiteException("Error", e, new Status(StatusCode.InternalServerError)); } finally { if (cursor != null) { cursor.Close(); } } return revId; }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> internal IEnumerable<QueryRow> QueryViewNamed(String viewName, QueryOptions options, IList<Int64> outLastSequence) { var before = Runtime.CurrentTimeMillis(); var lastSequence = 0L; IEnumerable<QueryRow> rows; if (!String.IsNullOrEmpty (viewName)) { var view = GetView (viewName); if (view == null) { throw new CouchbaseLiteException (StatusCode.NotFound); } lastSequence = view.LastSequenceIndexed; if (options.GetStale () == IndexUpdateMode.Before || lastSequence <= 0) { view.UpdateIndex (); lastSequence = view.LastSequenceIndexed; } else { if (options.GetStale () == IndexUpdateMode.After && lastSequence < GetLastSequenceNumber()) // NOTE: The exception is handled inside the thread. // TODO: Consider using the async keyword instead. try { view.UpdateIndex(); } catch (CouchbaseLiteException e) { Log.E(Tag, "Error updating view index on background thread", e); } } rows = view.QueryWithOptions (options); } else { // nil view means query _all_docs // note: this is a little kludgy, but we have to pull out the "rows" field from the // result dictionary because that's what we want. should be refactored, but // it's a little tricky, so postponing. var allDocsResult = GetAllDocs (options); rows = (IList<QueryRow>)allDocsResult.Get ("rows"); lastSequence = GetLastSequenceNumber (); } outLastSequence.AddItem(lastSequence); var delta = Runtime.CurrentTimeMillis() - before; Log.D(Tag, String.Format("Query view {0} completed in {1} milliseconds", viewName, delta)); return rows; }
/// <summary>Parse a remote block from an existing configuration file.</summary> /// <remarks> /// Parse a remote block from an existing configuration file. /// <p> /// This constructor succeeds even if the requested remote is not defined /// within the supplied configuration file. If that occurs then there will be /// no URIs and no ref specifications known to the new instance. /// </remarks> /// <param name="rc"> /// the existing configuration to get the remote settings from. /// The configuration must already be loaded into memory. /// </param> /// <param name="remoteName">subsection key indicating the name of this remote.</param> /// <exception cref="Sharpen.URISyntaxException">one of the URIs within the remote's configuration is invalid. /// </exception> public RemoteConfig(Config rc, string remoteName) { name = remoteName; oldName = remoteName; string[] vlst; string val; vlst = rc.GetStringList(SECTION, name, KEY_URL); IDictionary<string, string> insteadOf = GetReplacements(rc, KEY_INSTEADOF); uris = new AList<URIish>(vlst.Length); foreach (string s in vlst) { uris.AddItem(new URIish(ReplaceUri(s, insteadOf))); } IDictionary<string, string> pushInsteadOf = GetReplacements(rc, KEY_PUSHINSTEADOF ); vlst = rc.GetStringList(SECTION, name, KEY_PUSHURL); pushURIs = new AList<URIish>(vlst.Length); foreach (string s_1 in vlst) { pushURIs.AddItem(new URIish(ReplaceUri(s_1, pushInsteadOf))); } vlst = rc.GetStringList(SECTION, name, KEY_FETCH); fetch = new AList<RefSpec>(vlst.Length); foreach (string s_2 in vlst) { fetch.AddItem(new RefSpec(s_2)); } vlst = rc.GetStringList(SECTION, name, KEY_PUSH); push = new AList<RefSpec>(vlst.Length); foreach (string s_3 in vlst) { push.AddItem(new RefSpec(s_3)); } val = rc.GetString(SECTION, name, KEY_UPLOADPACK); if (val == null) { val = DEFAULT_UPLOAD_PACK; } uploadpack = val; val = rc.GetString(SECTION, name, KEY_RECEIVEPACK); if (val == null) { val = DEFAULT_RECEIVE_PACK; } receivepack = val; val = rc.GetString(SECTION, name, KEY_TAGOPT); tagopt = NGit.Transport.TagOpt.FromOption(val); mirror = rc.GetBoolean(SECTION, name, KEY_MIRROR, DEFAULT_MIRROR); timeout = rc.GetInt(SECTION, name, KEY_TIMEOUT, 0); }
/// <exception cref="System.IO.IOException"></exception> internal virtual void Compute(ProgressMonitor pm) { if (pm == null) { pm = NullProgressMonitor.INSTANCE; } pm.BeginTask(JGitText.Get().renamesFindingByContent, 2 * srcs.Count * dsts.Count); // int mNext = BuildMatrix(pm); @out = new AList<DiffEntry>(Math.Min(mNext, dsts.Count)); // Match rename pairs on a first come, first serve basis until // we have looked at everything that is above our minimum score. // for (--mNext; mNext >= 0; mNext--) { long ent = matrix[mNext]; int sIdx = SrcFile(ent); int dIdx = DstFile(ent); DiffEntry s = srcs[sIdx]; DiffEntry d = dsts[dIdx]; if (d == null) { pm.Update(1); continue; } // was already matched earlier DiffEntry.ChangeType type; if (s.changeType == DiffEntry.ChangeType.DELETE) { // First use of this source file. Tag it as a rename so we // later know it is already been used as a rename, other // matches (if any) will claim themselves as copies instead. // s.changeType = DiffEntry.ChangeType.RENAME; type = DiffEntry.ChangeType.RENAME; } else { type = DiffEntry.ChangeType.COPY; } @out.AddItem(DiffEntry.Pair(type, s, d, Score(ent))); dsts.Set(dIdx, null); // Claim the destination was matched. pm.Update(1); } srcs = CompactSrcList(srcs); dsts = CompactDstList(dsts); pm.EndTask(); }
public string GetDesignDocFunction(string fnName, string key, IList<string> outLanguageList ) { string[] path = fnName.Split("/"); if (path.Length != 2) { return null; } string docId = string.Format("_design/%s", path[0]); RevisionInternal rev = GetDocumentWithIDAndRev(docId, null, EnumSet.NoneOf<Database.TDContentOptions >()); if (rev == null) { return null; } string outLanguage = (string)rev.GetPropertyForKey("language"); if (outLanguage != null) { outLanguageList.AddItem(outLanguage); } else { outLanguageList.AddItem("javascript"); } IDictionary<string, object> container = (IDictionary<string, object>)rev.GetPropertyForKey (key); return (string)container.Get(path[1]); }
public IList<QueryRow> QueryViewNamed(string viewName, QueryOptions options, IList <long> outLastSequence) { long before = Runtime.CurrentTimeMillis(); long lastSequence = 0; IList<QueryRow> rows = null; if (viewName != null && viewName.Length > 0) { View view = GetView(viewName); if (view == null) { throw new CouchbaseLiteException(new Status(Status.NotFound)); } lastSequence = view.GetLastSequenceIndexed(); if (options.GetStale() == Query.IndexUpdateMode.Before || lastSequence <= 0) { view.UpdateIndex(); lastSequence = view.GetLastSequenceIndexed(); } else { if (options.GetStale() == Query.IndexUpdateMode.After && lastSequence < GetLastSequenceNumber ()) { new Sharpen.Thread(new _Runnable_1847(view)).Start(); } } rows = view.QueryWithOptions(options); } else { // nil view means query _all_docs // note: this is a little kludgy, but we have to pull out the "rows" field from the // result dictionary because that's what we want. should be refactored, but // it's a little tricky, so postponing. IDictionary<string, object> allDocsResult = GetAllDocs(options); rows = (IList<QueryRow>)allDocsResult.Get("rows"); lastSequence = GetLastSequenceNumber(); } outLastSequence.AddItem(lastSequence); long delta = Runtime.CurrentTimeMillis() - before; Log.D(Database.Tag, string.Format("Query view %s completed in %d milliseconds", viewName , delta)); return rows; }
internal string WinningRevIDOfDoc(long docNumericId, IList<bool> outIsDeleted, IList <bool> outIsConflict) { Cursor cursor = null; string sql = "SELECT revid, deleted FROM revs" + " WHERE doc_id=? and current=1" + " ORDER BY deleted asc, revid desc LIMIT 2"; string[] args = new string[] { System.Convert.ToString(docNumericId) }; string revId = null; try { cursor = database.RawQuery(sql, args); cursor.MoveToNext(); if (!cursor.IsAfterLast()) { revId = cursor.GetString(0); bool deleted = cursor.GetInt(1) > 0; if (deleted) { outIsDeleted.AddItem(true); } // The document is in conflict if there are two+ result rows that are not deletions. bool hasNextResult = cursor.MoveToNext(); if (hasNextResult) { bool isNextDeleted = cursor.GetInt(1) > 0; bool isInConflict = !deleted && hasNextResult && isNextDeleted; if (isInConflict) { outIsConflict.AddItem(true); } } } } catch (SQLException e) { Log.E(Database.Tag, "Error", e); throw new CouchbaseLiteException("Error", e, new Status(Status.InternalServerError )); } finally { if (cursor != null) { cursor.Close(); } } return revId; }
internal WalkFetchConnection(WalkTransport t, WalkRemoteObjectDatabase w) { NGit.Transport.Transport wt = (NGit.Transport.Transport)t; local = wt.local; objCheck = wt.IsCheckFetchedObjects() ? new ObjectChecker() : null; inserter = local.NewObjectInserter(); reader = local.NewObjectReader(); remotes = new AList<WalkRemoteObjectDatabase>(); remotes.AddItem(w); unfetchedPacks = new List<WalkFetchConnection.RemotePack>(); packsConsidered = new HashSet<string>(); noPacksYet = new List<WalkRemoteObjectDatabase>(); noPacksYet.AddItem(w); noAlternatesYet = new List<WalkRemoteObjectDatabase>(); noAlternatesYet.AddItem(w); fetchErrors = new Dictionary<ObjectId, IList<Exception>>(); packLocks = new AList<PackLock>(4); revWalk = new RevWalk(reader); revWalk.SetRetainBody(false); treeWalk = new TreeWalk(reader); COMPLETE = revWalk.NewFlag("COMPLETE"); IN_WORK_QUEUE = revWalk.NewFlag("IN_WORK_QUEUE"); LOCALLY_SEEN = revWalk.NewFlag("LOCALLY_SEEN"); localCommitQueue = new DateRevQueue(); workQueue = new List<ObjectId>(); }