internal static IDictionary <string, object> InstallAttachmentBodies(IDictionary <string , object> attachments, Database database) { IDictionary <string, object> updatedAttachments = new Dictionary <string, object>(); foreach (string name in attachments.Keys) { object value = attachments.Get(name); if (value is Couchbase.Lite.Attachment) { Couchbase.Lite.Attachment attachment = (Couchbase.Lite.Attachment)value; IDictionary <string, object> metadataMutable = new Dictionary <string, object>(); metadataMutable.PutAll(attachment.GetMetadata()); InputStream body = attachment.GetBodyIfNew(); if (body != null) { // Copy attachment body into the database's blob store: BlobStoreWriter writer = BlobStoreWriterForBody(body, database); metadataMutable.Put("length", (long)writer.GetLength()); metadataMutable.Put("digest", writer.MD5DigestString()); metadataMutable.Put("follows", true); database.RememberAttachmentWriter(writer); } updatedAttachments.Put(name, metadataMutable); } else { if (value is AttachmentInternal) { throw new ArgumentException("AttachmentInternal objects not expected here. Could indicate a bug" ); } else { if (value != null) { updatedAttachments.Put(name, value); } } } } return(updatedAttachments); }
protected internal virtual IDictionary <string, object> CollectTransientVariables(Dictionary <string, object> variables) { VariableScopeImpl parentScope = ParentVariableScope; if (parentScope != null) { variables.PutAll(parentScope.CollectVariables(variables)); } if (transientVariabes != null) { foreach (string variableName in transientVariabes.Keys) { variables[variableName] = transientVariabes[variableName].Value; } } return(variables); }
internal IDictionary <string, object> GetChangesFeedParams() { if (docIDs != null && docIDs.Count > 0) { filterName = "_doc_ids"; filterParams = new Dictionary <string, object>(); filterParams.Put("doc_ids", docIDs); } var bodyParams = new Dictionary <string, object>(); bodyParams["feed"] = GetFeed(); bodyParams["heartbeat"] = _heartbeatMilliseconds; if (includeConflicts) { bodyParams["style"] = "all_docs"; } else { bodyParams["style"] = null; } if (lastSequenceID != null) { Int64 sequenceAsLong; var success = Int64.TryParse(lastSequenceID.ToString(), out sequenceAsLong); bodyParams["since"] = success ? sequenceAsLong : lastSequenceID; } if (mode == ChangeTrackerMode.LongPoll) { bodyParams["limit"] = LongPollModeLimit; } if (filterName != null) { bodyParams["filter"] = filterName; bodyParams.PutAll(filterParams); } return(bodyParams); }
public virtual Couchbase.Lite.Internal.RevisionInternal CopyWithDocID(string docId , string revId) { System.Diagnostics.Debug.Assert(((docId != null) && (revId != null))); System.Diagnostics.Debug.Assert(((this.docId == null) || (this.docId.Equals(docId )))); Couchbase.Lite.Internal.RevisionInternal result = new Couchbase.Lite.Internal.RevisionInternal (docId, revId, deleted, database); IDictionary <string, object> unmodifiableProperties = GetProperties(); IDictionary <string, object> properties = new Dictionary <string, object>(); if (unmodifiableProperties != null) { properties.PutAll(unmodifiableProperties); } properties.Put("_id", docId); properties.Put("_rev", revId); result.SetProperties(properties); return(result); }
public static Dictionary <TK, TV> OfKeys <TK, TV>([NotNull] this IReadOnlyDictionary <TK, TV> dict, [CanBeNull] IEnumerable <TK> keys) { var dictionary = new Dictionary <TK, TV>(); if (keys == null) { dictionary.PutAll(dict); } else { foreach (var key in keys) { if (dict.TryGetValue(key, out var val)) { dictionary[key] = val; } } } return(dictionary); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public static void AddMemberToList(Couchbase.Lite.Document list, Couchbase.Lite.Document user) { IDictionary <string, object> newProperties = new Dictionary <string, object>(); newProperties.PutAll(list.Properties); IList <string> members = (IList <string>)newProperties.Get("members"); if (members == null) { members = new AList <string>(); } members.AddItem(user.Id); newProperties.Put("members", members); try { list.PutProperties(newProperties); } catch (CouchbaseLiteException e) { Log.E(Application.Tag, "Cannot add member to the list", e); } }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public static void AssignOwnerToListsIfNeeded(Database database, Couchbase.Lite.Document user) { QueryEnumerator enumerator = GetQuery(database).Run(); if (enumerator == null) { return; } foreach (var row in enumerator) { Couchbase.Lite.Document document = row.Document; string owner = (string)document.GetProperty("owner"); if (owner != null) { continue; } IDictionary <string, object> properties = new Dictionary <string, object>(); properties.PutAll(document.Properties); properties.Put("owner", user.Id); document.PutProperties(properties); } }
public byte[] ToByteArray(Object contextInfo) { var state = (ContextControllerInitTermState)contextInfo; var serializableProps = new Dictionary <String, Object>(); if (state.PatternData != null) { serializableProps.PutAll(state.PatternData); foreach (var entry in state.PatternData) { if (entry.Value is EventBean) { var @event = (EventBean)entry.Value; serializableProps.Put( entry.Key, new EventBeanNameValuePair( @event.EventType.Name, SerializerUtil.ObjectToByteArr(@event.Underlying))); } } } var serialized = new ContextControllerInitTermState(state.StartTime, serializableProps); return(SerializerUtil.ObjectToByteArr(serialized)); }
/// <summary> /// This methods helps in traversing the /// tree hierarchy. /// </summary> /// <remarks> /// This methods helps in traversing the /// tree hierarchy. /// Returns list of all inner queues.i.e nodes which has children. /// below this level. /// Incase of children being null , returns an empty map. /// This helps in case of creating union of inner and leaf queues. /// </remarks> /// <returns/> internal virtual IDictionary <string, Org.Apache.Hadoop.Mapred.Queue> GetInnerQueues () { IDictionary <string, Org.Apache.Hadoop.Mapred.Queue> l = new Dictionary <string, Org.Apache.Hadoop.Mapred.Queue >(); //If no children , return empty set. //This check is required for root node. if (children == null) { return(l); } //check for children if they are parent. foreach (Org.Apache.Hadoop.Mapred.Queue child in children) { //check if children are themselves parent add them if (child.GetChildren() != null && child.GetChildren().Count > 0) { l[child.GetName()] = child; l.PutAll(child.GetInnerQueues()); } } return(l); }
/// <exception cref="NGit.Api.Errors.GitAPIException"></exception> public override IList <Ref> Call() { CheckCallable(); IDictionary <string, Ref> refList; try { if (listMode == null) { refList = repo.RefDatabase.GetRefs(Constants.R_HEADS); } else { if (listMode == ListBranchCommand.ListMode.REMOTE) { refList = repo.RefDatabase.GetRefs(Constants.R_REMOTES); } else { refList = new Dictionary <string, Ref>(repo.RefDatabase.GetRefs(Constants.R_HEADS) ); refList.PutAll(repo.RefDatabase.GetRefs(Constants.R_REMOTES)); } } } catch (IOException e) { throw new JGitInternalException(e.Message, e); } IList <Ref> resultRefs = new AList <Ref>(); Sharpen.Collections.AddAll(resultRefs, refList.Values); resultRefs.Sort(new _IComparer_111()); SetCallable(false); return(resultRefs); }
public static EventBean[] TypeCast( IList <EventBean> events, EventType targetType, EventAdapterService eventAdapterService) { var convertedArray = new EventBean[events.Count]; var count = 0; foreach (var theEvent in events) { EventBean converted; if (theEvent is DecoratingEventBean) { var wrapper = (DecoratingEventBean)theEvent; if (targetType is MapEventType) { var props = new Dictionary <string, Object>(); props.PutAll(wrapper.DecoratingProperties); foreach (var propDesc in wrapper.UnderlyingEvent.EventType.PropertyDescriptors) { props.Put(propDesc.PropertyName, wrapper.UnderlyingEvent.Get(propDesc.PropertyName)); } converted = eventAdapterService.AdapterForTypedMap(props, targetType); } else { converted = eventAdapterService.AdapterForTypedWrapper( wrapper.UnderlyingEvent, wrapper.DecoratingProperties, targetType); } } else if ((theEvent.EventType is MapEventType) && (targetType is MapEventType)) { var mapEvent = (MappedEventBean)theEvent; converted = eventAdapterService.AdapterForTypedMap(mapEvent.Properties, targetType); } else if ((theEvent.EventType is MapEventType) && (targetType is WrapperEventType)) { converted = eventAdapterService.AdapterForTypedWrapper(theEvent, Collections.EmptyDataMap, targetType); } else if ((theEvent.EventType is BeanEventType) && (targetType is BeanEventType)) { converted = eventAdapterService.AdapterForTypedObject(theEvent.Underlying, targetType); } else if (theEvent.EventType is ObjectArrayEventType && targetType is ObjectArrayEventType) { var convertedObjectArray = ObjectArrayEventType.ConvertEvent( theEvent, (ObjectArrayEventType)targetType); converted = eventAdapterService.AdapterForTypedObjectArray(convertedObjectArray, targetType); } else if (theEvent.EventType is AvroSchemaEventType && targetType is AvroSchemaEventType) { Object convertedGenericRecord = eventAdapterService.EventAdapterAvroHandler.ConvertEvent( theEvent, (AvroSchemaEventType)targetType); converted = eventAdapterService.AdapterForTypedAvro(convertedGenericRecord, targetType); } else { throw new EPException("Unknown event type " + theEvent.EventType); } convertedArray[count] = converted; count++; } return(convertedArray); }
/** * Add all the given variables to this replacer's variables map. * * @param vars * A map of variable name-value pairs (String-to-String). */ public void AddVariables(Dictionary <String, String> vars) { variables.PutAll(vars); }
public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, int range, Directory dir) { IDictionary <string, Document> docs = new Dictionary <string, Document>(); IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetRAMBufferSizeMB(0.1).SetMaxBufferedDocs(maxBufferedDocs).SetMergePolicy(NewLogMergePolicy()), new YieldTestPoint(this)); w.Commit(); LogMergePolicy lmp = (LogMergePolicy)w.Config.MergePolicy; lmp.NoCFSRatio = 0.0; lmp.MergeFactor = mergeFactor; /* * /// w.setMaxMergeDocs(Integer.MAX_VALUE); * /// w.setMaxFieldLength(10000); * /// w.SetRAMBufferSizeMB(1); * /// w.setMergeFactor(10); */ threads = new IndexingThread[nThreads]; for (int i = 0; i < threads.Length; i++) { IndexingThread th = new IndexingThread(this); th.w = w; th.@base = 1000000 * i; th.range = range; th.iterations = iterations; threads[i] = th; } for (int i = 0; i < threads.Length; i++) { threads[i].Start(); } for (int i = 0; i < threads.Length; i++) { threads[i].Join(); } // w.ForceMerge(1); //w.Dispose(); for (int i = 0; i < threads.Length; i++) { IndexingThread th = threads[i]; UninterruptableMonitor.Enter(th); try { docs.PutAll(th.docs); } finally { UninterruptableMonitor.Exit(th); } } TestUtil.CheckIndex(dir); DocsAndWriter dw = new DocsAndWriter(); dw.docs = docs; dw.writer = w; return(dw); }
public override string Format(FormatContext context, BareANY hl7Value, int indentLevel) { bool isAny = false; CD cd = null; if (hl7Value is CD) { cd = (CD)hl7Value; } else { isAny = true; // bypass some validations cd = ConvertAnyToCd(hl7Value); } StringBuilder result = new StringBuilder(); if (cd != null) { HandleConstraints(cd.Value, context.GetConstraints(), context.GetPropertyPath(), context.GetModelToXmlResult()); // don't bother validating if we don't have anything to validate if (cd.HasNullFlavor() || HasValue(cd, context)) { Hl7Errors errors = context.GetModelToXmlResult(); VersionNumber version = context.GetVersion(); string type = context.Type; bool isCne = context.GetCodingStrength() == CodingStrength.CNE; bool isCwe = context.GetCodingStrength() == CodingStrength.CWE; // we can't lookup a code supplied in an ANY datatype as we don't know the domain // a "reverse" lookup of domain type by code/codesystem could be possible, but difficult to implement to be 100% correct (MB does not track code systems) if (!isAny) { if (cd.Value != null && cd.Value.CodeValue != null) { ValidateCodeExists(cd.Value, context.GetDomainType(), version, context.IsCda(), context.GetPropertyPath(), errors); } } string codeAsString = (cd.Value != null ? cd.Value.CodeValue : null); CD_VALIDATION_UTILS.ValidateCodedType(cd, codeAsString, isCwe, isCne, false, context.IsFixed(), type, version, null, context .GetPropertyPath(), errors); } IDictionary <string, string> attributes = new Dictionary <string, string>(); Ca.Infoway.Messagebuilder.Xml.ConformanceLevel conformanceLevel = context.GetConformanceLevel(); Cardinality cardinality = context.GetCardinality(); if (cd.HasNullFlavor()) { if (ConformanceLevelUtil.IsMandatory(conformanceLevel, cardinality)) { LogMandatoryError(context); } else { attributes.PutAll(CreateNullFlavorAttributes(hl7Value.NullFlavor)); } } else { if (!HasValue(cd, context)) { if (conformanceLevel == null || IsMandatoryOrPopulated(context)) { if (ConformanceLevelUtil.IsMandatory(conformanceLevel, cardinality)) { LogMandatoryError(context); } else { attributes.PutAll(AbstractPropertyFormatter.NULL_FLAVOR_ATTRIBUTES); } } } } // Codes can have other attributes in map even if has NullFlavor attributes.PutAll(GetAttributeNameValuePairs(context, cd.Value, hl7Value)); bool hasChildContent = HasChildContent(cd, context); if (hasChildContent || (!attributes.IsEmpty() || ConformanceLevelUtil.IsMandatory(conformanceLevel, cardinality))) { result.Append(CreateElement(context, attributes, indentLevel, !hasChildContent, !hasChildContent)); if (hasChildContent) { CreateChildContent(cd, result); result.Append("</").Append(context.GetElementName()).Append(">"); result.Append(SystemUtils.LINE_SEPARATOR); } } } return(result.ToString()); }
/// <summary> /// Adds all of the selection entries to the current dictionary of selections. /// </summary> /// <param name="map">A dictionary of field name to <see cref="T:BrowseSelection"/> pairs.</param> public virtual void PutAllSelections(IDictionary <string, BrowseSelection> map) { _selections.PutAll(map); }
/// <exception cref="System.Exception"></exception> public virtual void TestConflict() { IDictionary<string, object> prop = new Dictionary<string, object>(); prop.Put("foo", "bar"); Database db = StartDatabase(); Document doc = CreateDocumentWithProperties(db, prop); SavedRevision rev1 = doc.GetCurrentRevision(); IDictionary<string, object> properties = new Dictionary<string, object>(); properties.PutAll(doc.GetProperties()); properties.Put("tag", 2); SavedRevision rev2a = doc.PutProperties(properties); properties = new Dictionary<string, object>(); properties.PutAll(rev1.GetProperties()); properties.Put("tag", 3); UnsavedRevision newRev = rev1.CreateRevision(); newRev.SetProperties(properties); bool allowConflict = true; SavedRevision rev2b = newRev.Save(allowConflict); NUnit.Framework.Assert.IsNotNull("Failed to create a a conflict", rev2b); IList<SavedRevision> confRevs = new AList<SavedRevision>(); confRevs.AddItem(rev2b); confRevs.AddItem(rev2a); NUnit.Framework.Assert.AreEqual(doc.GetConflictingRevisions(), confRevs); NUnit.Framework.Assert.AreEqual(doc.GetLeafRevisions(), confRevs); SavedRevision defaultRev; SavedRevision otherRev; if (Sharpen.Runtime.CompareOrdinal(rev2a.GetId(), rev2b.GetId()) > 0) { defaultRev = rev2a; otherRev = rev2b; } else { defaultRev = rev2b; otherRev = rev2a; } NUnit.Framework.Assert.AreEqual(doc.GetCurrentRevision(), defaultRev); Query query = db.CreateAllDocumentsQuery(); query.SetAllDocsMode(Query.AllDocsMode.ShowConflicts); QueryEnumerator rows = query.Run(); NUnit.Framework.Assert.AreEqual(rows.GetCount(), 1); QueryRow row = rows.GetRow(0); IList<SavedRevision> revs = row.GetConflictingRevisions(); NUnit.Framework.Assert.AreEqual(revs.Count, 2); NUnit.Framework.Assert.AreEqual(revs[0], defaultRev); NUnit.Framework.Assert.AreEqual(revs[1], otherRev); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public static void UpdateCheckedStatus(Couchbase.Lite.Document task, bool @checked ) { IDictionary<string, object> properties = new Dictionary<string, object>(); properties.PutAll(task.GetProperties()); properties.Put("checked", @checked); task.PutProperties(properties); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public static void AddMemberToList(Couchbase.Lite.Document list, Couchbase.Lite.Document user) { IDictionary<string, object> newProperties = new Dictionary<string, object>(); newProperties.PutAll(list.Properties); IList<string> members = (IList<string>)newProperties.Get("members"); if (members == null) { members = new AList<string>(); } members.AddItem(user.Id); newProperties.Put("members", members); try { list.PutProperties(newProperties); } catch (CouchbaseLiteException e) { Log.E(Application.Tag, "Cannot add member to the list", e); } }
//TODO issue: deleteLocalDocument should return error.code( see ios) // HISTORY /// <exception cref="System.Exception"></exception> public virtual void TestHistory() { IDictionary<string, object> properties = new Dictionary<string, object>(); properties.Put("testName", "test06_History"); properties.Put("tag", 1); Database db = StartDatabase(); Document doc = CreateDocumentWithProperties(db, properties); string rev1ID = doc.GetCurrentRevisionId(); Log.I(Tag, "1st revision: " + rev1ID); NUnit.Framework.Assert.IsNotNull("1st revision looks wrong: " + rev1ID, rev1ID.StartsWith ("1-")); NUnit.Framework.Assert.AreEqual(doc.GetUserProperties(), properties); properties = new Dictionary<string, object>(); properties.PutAll(doc.GetProperties()); properties.Put("tag", 2); NUnit.Framework.Assert.IsNotNull(!properties.Equals(doc.GetProperties())); NUnit.Framework.Assert.IsNotNull(doc.PutProperties(properties)); string rev2ID = doc.GetCurrentRevisionId(); Log.I(Tag, "rev2ID" + rev2ID); NUnit.Framework.Assert.IsNotNull("2nd revision looks wrong:" + rev2ID, rev2ID.StartsWith ("2-")); IList<SavedRevision> revisions = doc.GetRevisionHistory(); Log.I(Tag, "Revisions = " + revisions); NUnit.Framework.Assert.AreEqual(revisions.Count, 2); SavedRevision rev1 = revisions[0]; NUnit.Framework.Assert.AreEqual(rev1.GetId(), rev1ID); IDictionary<string, object> gotProperties = rev1.GetProperties(); NUnit.Framework.Assert.AreEqual(1, gotProperties.Get("tag")); SavedRevision rev2 = revisions[1]; NUnit.Framework.Assert.AreEqual(rev2.GetId(), rev2ID); NUnit.Framework.Assert.AreEqual(rev2, doc.GetCurrentRevision()); gotProperties = rev2.GetProperties(); NUnit.Framework.Assert.AreEqual(2, gotProperties.Get("tag")); IList<SavedRevision> tmp = new AList<SavedRevision>(); tmp.AddItem(rev2); NUnit.Framework.Assert.AreEqual(doc.GetConflictingRevisions(), tmp); NUnit.Framework.Assert.AreEqual(doc.GetLeafRevisions(), tmp); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public static void AssignOwnerToListsIfNeeded(Database database, Couchbase.Lite.Document user) { QueryEnumerator enumerator = GetQuery(database).Run(); if (enumerator == null) { return; } foreach (var row in enumerator) { Couchbase.Lite.Document document = row.Document; string owner = (string)document.GetProperty("owner"); if (owner != null) { continue; } IDictionary<string, object> properties = new Dictionary<string, object>(); properties.PutAll(document.Properties); properties.Put("owner", user.Id); document.PutProperties(properties); } }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public static void RemoveMemberFromList(Couchbase.Lite.Document list, Couchbase.Lite.Document user) { IDictionary<string, object> newProperties = new Dictionary<string, object>(); newProperties.PutAll(list.Properties); IList<string> members = (IList<string>)newProperties.Get("members"); if (members != null) { members.Remove(user.Id); } newProperties.Put("members", members); list.PutProperties(newProperties); }
internal void SaveLastSequence() { if (!lastSequenceChanged) { return; } if (savingCheckpoint) { // If a save is already in progress, don't do anything. (The completion block will trigger // another save after the first one finishes.) overdueForSave = true; return; } lastSequenceChanged = false; overdueForSave = false; Log.D(Tag, this + " saveLastSequence() called. lastSequence: " + LastSequence); var body = new Dictionary<String, Object>(); if (remoteCheckpoint != null) { body.PutAll(remoteCheckpoint); } body["lastSequence"] = LastSequence; var remoteCheckpointDocID = RemoteCheckpointDocID(); if (String.IsNullOrEmpty(remoteCheckpointDocID)) { Log.W(Tag, this + ": remoteCheckpointDocID is null, aborting saveLastSequence()"); return; } savingCheckpoint = true; Log.D(Tag, this + " put remote _local document. checkpointID: " + remoteCheckpointDocID); SendAsyncRequest(HttpMethod.Put, "/_local/" + remoteCheckpointDocID, body, (result, e) => { savingCheckpoint = false; if (e != null) { Log.V (Tag, this + ": Unable to save remote checkpoint", e); } if (LocalDatabase == null) { Log.W(Tag, this + ": Database is null, ignoring remote checkpoint response"); return; } if (e != null) { switch (GetStatusFromError(e)) { case StatusCode.NotFound: { remoteCheckpoint = null; overdueForSave = true; break; } case StatusCode.Conflict: { RefreshRemoteCheckpointDoc(); break; } default: { // TODO: On 401 or 403, and this is a pull, remember that remote // TODO: is read-only & don't attempt to read its checkpoint next time. break; } } } else { var response = (IDictionary<String, Object>)result; body.Put ("_rev", response.Get ("rev")); remoteCheckpoint = body; LocalDatabase.SetLastSequence(LastSequence, RemoteCheckpointDocID(), IsPull); } if (overdueForSave) { SaveLastSequence (); } }); }
/// <exception cref="Org.Apache.Hadoop.Yarn.Server.Resourcemanager.Reservation.Exceptions.PlanningException /// "/> /// <exception cref="Org.Apache.Hadoop.Yarn.Server.Resourcemanager.Reservation.Exceptions.ContractValidationException /// "/> private bool ComputeAllocation(ReservationId reservationId, string user, Plan plan , ReservationDefinition contract, ReservationAllocation oldReservation) { Log.Info("placing the following ReservationRequest: " + contract); Resource totalCapacity = plan.GetTotalCapacity(); // Here we can addd logic to adjust the ResourceDefinition to account for // system "imperfections" (e.g., scheduling delays for large containers). // Align with plan step conservatively (i.e., ceil arrival, and floor // deadline) long earliestStart = contract.GetArrival(); long step = plan.GetStep(); if (earliestStart % step != 0) { earliestStart = earliestStart + (step - (earliestStart % step)); } long deadline = contract.GetDeadline() - contract.GetDeadline() % plan.GetStep(); // setup temporary variables to handle time-relations between stages and // intermediate answers long curDeadline = deadline; long oldDeadline = -1; IDictionary <ReservationInterval, ReservationRequest> allocations = new Dictionary <ReservationInterval, ReservationRequest>(); RLESparseResourceAllocation tempAssigned = new RLESparseResourceAllocation(plan.GetResourceCalculator (), plan.GetMinimumAllocation()); IList <ReservationRequest> stages = contract.GetReservationRequests().GetReservationResources (); ReservationRequestInterpreter type = contract.GetReservationRequests().GetInterpreter (); // Iterate the stages in backward from deadline for (ListIterator <ReservationRequest> li = stages.ListIterator(stages.Count); li. HasPrevious();) { ReservationRequest currentReservationStage = li.Previous(); // validate the RR respect basic constraints ValidateInput(plan, currentReservationStage, totalCapacity); // run allocation for a single stage IDictionary <ReservationInterval, ReservationRequest> curAlloc = PlaceSingleStage( plan, tempAssigned, currentReservationStage, earliestStart, curDeadline, oldReservation , totalCapacity); if (curAlloc == null) { // if we did not find an allocation for the currentReservationStage // return null, unless the ReservationDefinition we are placing is of // type ANY if (type != ReservationRequestInterpreter.RAny) { throw new PlanningException("The GreedyAgent" + " couldn't find a valid allocation for your request" ); } else { continue; } } else { // if we did find an allocation add it to the set of allocations allocations.PutAll(curAlloc); // if this request is of type ANY we are done searching (greedy) // and can return the current allocation (break-out of the search) if (type == ReservationRequestInterpreter.RAny) { break; } // if the request is of ORDER or ORDER_NO_GAP we constraint the next // round of allocation to precede the current allocation, by setting // curDeadline if (type == ReservationRequestInterpreter.ROrder || type == ReservationRequestInterpreter .ROrderNoGap) { curDeadline = FindEarliestTime(curAlloc.Keys); // for ORDER_NO_GAP verify that the allocation found so far has no // gap, return null otherwise (the greedy procedure failed to find a // no-gap // allocation) if (type == ReservationRequestInterpreter.ROrderNoGap && oldDeadline > 0) { if (oldDeadline - FindLatestTime(curAlloc.Keys) > plan.GetStep()) { throw new PlanningException("The GreedyAgent" + " couldn't find a valid allocation for your request" ); } } // keep the variable oldDeadline pointing to the last deadline we // found oldDeadline = curDeadline; } } } // / If we got here is because we failed to find an allocation for the // ReservationDefinition give-up and report failure to the user if (allocations.IsEmpty()) { throw new PlanningException("The GreedyAgent" + " couldn't find a valid allocation for your request" ); } // create reservation with above allocations if not null/empty ReservationRequest ZeroRes = ReservationRequest.NewInstance(Resource.NewInstance( 0, 0), 0); long firstStartTime = FindEarliestTime(allocations.Keys); // add zero-padding from arrival up to the first non-null allocation // to guarantee that the reservation exists starting at arrival if (firstStartTime > earliestStart) { allocations[new ReservationInterval(earliestStart, firstStartTime)] = ZeroRes; firstStartTime = earliestStart; } // consider to add trailing zeros at the end for simmetry // Actually add/update the reservation in the plan. // This is subject to validation as other agents might be placing // in parallel and there might be sharing policies the agent is not // aware off. ReservationAllocation capReservation = new InMemoryReservationAllocation(reservationId , contract, user, plan.GetQueueName(), firstStartTime, FindLatestTime(allocations .Keys), allocations, plan.GetResourceCalculator(), plan.GetMinimumAllocation()); if (oldReservation != null) { return(plan.UpdateReservation(capReservation)); } else { return(plan.AddReservation(capReservation)); } }
internal RevisionInternal(RevisionInternal other) : this(other.DocID, other.RevID, other.Deleted) { var unmodifiableProperties = other.GetProperties(); var properties = new Dictionary<string, object>(); if(unmodifiableProperties != null) { properties.PutAll(unmodifiableProperties); } SetProperties(properties); }
/// <summary> /// Gets or sets the userProperties of the <see cref="Couchbase.Lite.Revision"/>. /// </summary> /// <remarks> /// Gets or sets the userProperties of the <see cref="Couchbase.Lite.Revision"/>. /// Get, returns the properties of the <see cref="Couchbase.Lite.Revision"/> /// without any properties with keys prefixed with '_' (which contain Couchbase Lite data). /// Set, replaces all properties except for those with keys prefixed with '_'. /// </remarks> /// <value>The userProperties of the <see cref="Couchbase.Lite.Revision"/>.</value> public void SetUserProperties(IDictionary<String, Object> userProperties) { var newProps = new Dictionary<String, Object>(); newProps.PutAll(userProperties); foreach(string key in Properties.Keys) { if(key.StartsWith("_", StringComparison.InvariantCultureIgnoreCase)) { newProps[key] = properties.Get(key); } } // Preserve metadata properties properties = newProps; }
/// <summary>Adds all values from the passed in ContentValues.</summary> /// <remarks>Adds all values from the passed in ContentValues.</remarks> /// <param name="other">the ContentValues from which to copy</param> public void PutAll(Couchbase.Lite.Storage.ContentValues other) { mValues.PutAll(other.mValues); }
/// <summary> /// This method is used to insert a collection of mappings into /// the session map. This is used when another source of pairs /// is required to populate the collection currently maintained /// within this sessions internal map. Any pairs that currently /// exist with similar names will be overwritten by this. /// </summary> /// <param name="data"> /// this is the collection of pairs to be added /// </param> public void PutAll(Dictionary data) { map.PutAll(data); }
public void SetUserProperties(IDictionary<string, object> userProperties) { IDictionary<string, object> newProps = new Dictionary<string, object>(); newProps.PutAll(userProperties); foreach (string key in properties.Keys) { if (key.StartsWith("_")) { newProps.Put(key, properties.Get(key)); } } // Preserve metadata properties properties = newProps; }
/// <exception cref="System.MemberAccessException"/> /// <exception cref="System.Exception"/> /// <exception cref="Java.Util.Concurrent.ExecutionException"/> /// <exception cref="System.IO.IOException"/> /// <exception cref="Java.Lang.InstantiationException"/> /// <exception cref="System.MissingMethodException"/> /// <exception cref="System.Reflection.TargetInvocationException"/> /// <exception cref="System.TypeLoadException"/> /// <exception cref="Java.Sql.SQLException"/> public virtual string SuggestPhrasesTest(Properties testProps, string modelPropertiesFile, string stopWordsFile) { logger.Info("Suggesting phrases in test"); logger.Info("test properties are " + testProps); Properties runProps = StringUtils.ArgsToPropertiesWithResolve(new string[] { "-props", modelPropertiesFile }); string[] removeProperties = new string[] { "allPatternsDir", "storePatsForEachToken", "invertedIndexClass", "savePatternsWordsDir", "batchProcessSents", "outDir", "saveInvertedIndex", "removeOverLappingLabels", "numThreads" }; foreach (string s in removeProperties) { if (runProps.Contains(s)) { runProps.Remove(s); } } runProps.SetProperty("stopWordsPatternFiles", stopWordsFile); runProps.SetProperty("englishWordsFiles", stopWordsFile); runProps.SetProperty("commonWordsPatternFiles", stopWordsFile); runProps.PutAll(props); runProps.PutAll(testProps); props.PutAll(runProps); ProcessText(false); GetPatternsFromDataMultiClass <SurfacePattern> model = new GetPatternsFromDataMultiClass <SurfacePattern>(runProps, Data.sents, seedWords, true, humanLabelClasses); ArgumentParser.FillOptions(model, runProps); GetPatternsFromDataMultiClass.LoadFromSavedPatternsWordsDir(model, runProps); IDictionary <string, int> alreadyLearnedIters = new Dictionary <string, int>(); foreach (string label in model.constVars.GetLabels()) { alreadyLearnedIters[label] = model.constVars.GetLearnedWordsEachIter()[label].LastEntry().Key; } if (model.constVars.learn) { // Map<String, E> p0 = new HashMap<String, SurfacePattern>(); // Map<String, Counter<CandidatePhrase>> p0Set = new HashMap<String, Counter<CandidatePhrase>>(); // Map<String, Set<E>> ignorePatterns = new HashMap<String, Set<E>>(); model.IterateExtractApply(null, null, null); } IDictionary <string, ICounter <CandidatePhrase> > allExtractions = new Dictionary <string, ICounter <CandidatePhrase> >(); //Only for one label right now! string label_1 = model.constVars.GetLabels().GetEnumerator().Current; allExtractions[label_1] = new ClassicCounter <CandidatePhrase>(); foreach (KeyValuePair <string, DataInstance> sent in Data.sents) { StringBuilder str = new StringBuilder(); foreach (CoreLabel l in sent.Value.GetTokens()) { if (l.Get(typeof(PatternsAnnotations.MatchedPatterns)) != null && !l.Get(typeof(PatternsAnnotations.MatchedPatterns)).IsEmpty()) { str.Append(" " + l.Word()); } else { allExtractions[label_1].IncrementCount(CandidatePhrase.CreateOrGet(str.ToString().Trim())); str.Length = 0; } } } allExtractions.PutAll(model.matchedSeedWords); return(model.constVars.GetSetWordsAsJson(allExtractions)); }
/// <summary>Adds all values from the passed in ContentValues.</summary> /// <param name="other">the ContentValues from which to copy</param> public void PutAll(ContentValues other) { mValues.PutAll(other.mValues); }
private static void SetDiagnostics(SegmentInfo info, string source, IDictionary<string, string> details) { IDictionary<string, string> diagnostics = new Dictionary<string, string>(); diagnostics["source"] = source; diagnostics["lucene.version"] = Constants.LUCENE_VERSION; diagnostics["os"] = Constants.OS_NAME; diagnostics["os.arch"] = Constants.OS_ARCH; diagnostics["os.version"] = Constants.OS_VERSION; diagnostics["java.version"] = Constants.JAVA_VERSION; diagnostics["java.vendor"] = Constants.JAVA_VENDOR; diagnostics["timestamp"] = Convert.ToString((DateTime.Now)); if (details != null) { diagnostics.PutAll(details); } info.Diagnostics = diagnostics; }
public virtual Couchbase.Lite.Internal.RevisionInternal CopyWithDocID(string docId , string revId) { //assert((docId != null) && (revId != null)); System.Diagnostics.Debug.Assert((docId != null)); System.Diagnostics.Debug.Assert(((this.docId == null) || (this.docId.Equals(docId )))); Couchbase.Lite.Internal.RevisionInternal result = new Couchbase.Lite.Internal.RevisionInternal (docId, revId, deleted, database); IDictionary<string, object> unmodifiableProperties = GetProperties(); IDictionary<string, object> properties = new Dictionary<string, object>(); if (unmodifiableProperties != null) { properties.PutAll(unmodifiableProperties); } properties.Put("_id", docId); properties.Put("_rev", revId); result.SetProperties(properties); return result; }
internal IDictionary<string, object> GetChangesFeedParams() { if (docIDs != null && docIDs.Count > 0) { filterName = "_doc_ids"; filterParams = new Dictionary<string, object>(); filterParams.Put("doc_ids", docIDs); } var bodyParams = new Dictionary<string, object>(); bodyParams["feed"] = GetFeed(); bodyParams["heartbeat"] = _heartbeatMilliseconds; if (includeConflicts) { bodyParams["style"] = "all_docs"; } else { bodyParams["style"] = null; } if (lastSequenceID != null) { Int64 sequenceAsLong; var success = Int64.TryParse(lastSequenceID.ToString(), out sequenceAsLong); bodyParams["since"] = success ? sequenceAsLong : lastSequenceID; } if (mode == ChangeTrackerMode.LongPoll) { bodyParams["limit"] = LongPollModeLimit; } if (filterName != null) { bodyParams["filter"] = filterName; bodyParams.PutAll(filterParams); } return bodyParams; }
private void SaveLastSequence(SaveLastSequenceCompletionBlock completionHandler) { if (!lastSequenceChanged) { if (completionHandler != null) { completionHandler(); } return; } if (_savingCheckpoint) { // If a save is already in progress, don't do anything. (The completion block will trigger // another save after the first one finishes.) Task.Delay(500).ContinueWith(t => SaveLastSequence(completionHandler)); } lastSequenceChanged = false; Log.D(TAG, "saveLastSequence() called. lastSequence: " + LastSequence); var body = new Dictionary<String, Object>(); if (_remoteCheckpoint != null) { body.PutAll(_remoteCheckpoint); } body["lastSequence"] = LastSequence; var remoteCheckpointDocID = RemoteCheckpointDocID(); if (String.IsNullOrEmpty(remoteCheckpointDocID)) { Log.W(TAG, "remoteCheckpointDocID is null, aborting saveLastSequence()"); return; } _savingCheckpoint = true; var message = SendAsyncRequest(HttpMethod.Put, "/_local/" + remoteCheckpointDocID, body, (result, e) => { _savingCheckpoint = false; if (e != null) { Log.V(TAG, "Unable to save remote checkpoint", e); } if (e != null) { switch (GetStatusFromError(e)) { case StatusCode.NotFound: _remoteCheckpoint = null; break; case StatusCode.Conflict: RefreshRemoteCheckpointDoc(); break; default: // TODO: On 401 or 403, and this is a pull, remember that remote // TODO: is read-only & don't attempt to read its checkpoint next time. break; } } else { Log.D(TAG, "Save checkpoint response: " + result); var response = result.AsDictionary<string, object>(); body.Put ("_rev", response.Get ("rev")); _remoteCheckpoint = body; var localDb = LocalDatabase; if(localDb == null || localDb.Storage == null) { Log.W(TAG, "Database is null, ignoring remote checkpoint response"); if(completionHandler != null) { completionHandler(); } return; } localDb.SetLastSequence(LastSequence, remoteCheckpointDocID); } if (completionHandler != null) { completionHandler (); } }); // This request should not be canceled when the replication is told to stop: Task dummy; _requests.TryRemove(message, out dummy); }
internal static IDictionary<string, object> InstallAttachmentBodies(IDictionary<string , object> attachments, Database database) { IDictionary<string, object> updatedAttachments = new Dictionary<string, object>(); foreach (string name in attachments.Keys) { object value = attachments.Get(name); if (value is Couchbase.Lite.Attachment) { Couchbase.Lite.Attachment attachment = (Couchbase.Lite.Attachment)value; IDictionary<string, object> metadataMutable = new Dictionary<string, object>(); metadataMutable.PutAll(attachment.GetMetadata()); InputStream body = attachment.GetBodyIfNew(); if (body != null) { // Copy attachment body into the database's blob store: BlobStoreWriter writer = BlobStoreWriterForBody(body, database); metadataMutable.Put("length", (long)writer.GetLength()); metadataMutable.Put("digest", writer.MD5DigestString()); metadataMutable.Put("follows", true); database.RememberAttachmentWriter(writer); } updatedAttachments.Put(name, metadataMutable); } else { if (value is AttachmentInternal) { throw new ArgumentException("AttachmentInternal objects not expected here. Could indicate a bug" ); } else { if (value != null) { updatedAttachments.Put(name, value); } } } } return updatedAttachments; }
public RevisionInternal Copy(string docId, RevisionID revId) { System.Diagnostics.Debug.Assert((docId != null)); System.Diagnostics.Debug.Assert(((_docId == null) || (_docId.Equals(docId)))); var result = new RevisionInternal(docId, revId, Deleted); var unmodifiableProperties = GetProperties(); var properties = new Dictionary<string, object>(); if(unmodifiableProperties != null) { properties.PutAll(unmodifiableProperties); } properties.SetDocRevID(docId, revId); result.SetProperties(properties); return result; }
public Dictionary <int, DhcpOption> InitDhcpOptionMap() { this.optionMap.Clear(); v6BcmcsAddressesOption bcmcsAddressesOption = this.configOptions.v6BcmcsAddressesOption; if (bcmcsAddressesOption != null && bcmcsAddressesOption.ipAddress.Count > 0) { this.optionMap[bcmcsAddressesOption.code] = new DhcpV6BcmcsAddressesOption(bcmcsAddressesOption); } v6BcmcsDomainNamesOption bcmcsDomainNamesOption = this.configOptions.v6BcmcsDomainNamesOption; if (bcmcsDomainNamesOption != null && bcmcsDomainNamesOption.domainName.Count > 0) { this.optionMap[bcmcsDomainNamesOption.code] = new DhcpV6BcmcsDomainNamesOption(bcmcsDomainNamesOption); } v6DnsServersOption dnsServersOption = this.configOptions.v6DnsServersOption; if (dnsServersOption != null && dnsServersOption.ipAddress.Count > 0) { this.optionMap[dnsServersOption.code] = new DhcpV6DnsServersOption(dnsServersOption); } v6DomainSearchListOption domainSearchListOption = this.configOptions.v6DomainSearchListOption; if (domainSearchListOption != null && domainSearchListOption.domainName.Count > 0) { this.optionMap[domainSearchListOption.code] = new DhcpV6DomainSearchListOption(domainSearchListOption); } v6GeoconfCivicOption geoconfCivicOption = this.configOptions.v6GeoconfCivicOption; if (geoconfCivicOption != null && geoconfCivicOption.civicAddressElement.Count > 0) { this.optionMap[geoconfCivicOption.code] = new DhcpV6GeoconfCivicOption(geoconfCivicOption); } v6InfoRefreshTimeOption infoRefreshTimeOption = this.configOptions.v6InfoRefreshTimeOption; if (infoRefreshTimeOption != null && infoRefreshTimeOption.unsignedInt > 0) { this.optionMap[infoRefreshTimeOption.code] = new DhcpV6InfoRefreshTimeOption(infoRefreshTimeOption); } v6LostServerDomainNameOption lostServerDomainNameOption = this.configOptions.v6LostServerDomainNameOption; if (lostServerDomainNameOption != null && !String.IsNullOrEmpty(lostServerDomainNameOption.domainName)) { this.optionMap[lostServerDomainNameOption.code] = new DhcpV6LostServerDomainNameOption(lostServerDomainNameOption); } v6NewPosixTimezoneOption newPosixTimezoneOption = this.configOptions.v6NewPosixTimezoneOption; if (newPosixTimezoneOption != null && !String.IsNullOrEmpty(newPosixTimezoneOption.@string)) { this.optionMap[newPosixTimezoneOption.code] = new DhcpV6NewPosixTimezoneOption(); } v6NewTzdbTimezoneOption newTzdbTimezoneOption = this.configOptions.v6NewTzdbTimezoneOption; if (newTzdbTimezoneOption != null && !String.IsNullOrEmpty(newTzdbTimezoneOption.@string)) { this.optionMap[newTzdbTimezoneOption.code] = new DhcpV6NewTzdbTimezoneOption(); } v6NisDomainNameOption nisDomainNameOption = this.configOptions.v6NisDomainNameOption; if (nisDomainNameOption != null && !String.IsNullOrEmpty(nisDomainNameOption.domainName)) { this.optionMap[nisDomainNameOption.code] = new DhcpV6NisDomainNameOption(nisDomainNameOption); } v6NisPlusDomainNameOption nisPlusDomainNameOption = this.configOptions.v6NisPlusDomainNameOption; if (nisPlusDomainNameOption != null && !String.IsNullOrEmpty(nisPlusDomainNameOption.domainName)) { this.optionMap[nisPlusDomainNameOption.code] = new DhcpV6NisPlusDomainNameOption(nisPlusDomainNameOption); } v6NisPlusServersOption nisPlusServersOption = this.configOptions.v6NisPlusServersOption; if (nisPlusServersOption != null && nisPlusServersOption.ipAddress.Count > 0) { this.optionMap[nisPlusServersOption.code] = new DhcpV6NisPlusServersOption(nisPlusServersOption); } v6NisServersOption nisServersOption = this.configOptions.v6NisServersOption; if (nisServersOption != null && nisServersOption.ipAddress.Count > 0) { this.optionMap[nisServersOption.code] = new DhcpV6NisServersOption(nisServersOption); } v6PanaAgentAddressesOption panaAgentAddressesOption = this.configOptions.v6PanaAgentAddressesOption; if (panaAgentAddressesOption != null && panaAgentAddressesOption.ipAddress.Count > 0) { this.optionMap[panaAgentAddressesOption.code] = new DhcpV6PanaAgentAddressesOption(panaAgentAddressesOption); } v6PreferenceOption preferenceOption = this.configOptions.v6PreferenceOption; if (preferenceOption != null && preferenceOption.unsignedByte != 0) { this.optionMap[preferenceOption.code] = new DhcpV6PreferenceOption(preferenceOption); } v6ServerUnicastOption serverUnicastOption = this.configOptions.v6ServerUnicastOption; if (serverUnicastOption != null && !String.IsNullOrEmpty(serverUnicastOption.ipAddress)) { this.optionMap[serverUnicastOption.code] = new DhcpV6ServerUnicastOption(serverUnicastOption); } v6SipServerAddressesOption sipServerAddressesOption = this.configOptions.v6SipServerAddressesOption; if (sipServerAddressesOption != null && sipServerAddressesOption.ipAddress.Count > 0) { this.optionMap[sipServerAddressesOption.code] = new DhcpV6SipServerAddressesOption(sipServerAddressesOption); } v6SipServerDomainNamesOption sipServerDomainNamesOption = this.configOptions.v6SipServerDomainNamesOption; if (sipServerDomainNamesOption != null && sipServerDomainNamesOption.domainName.Count > 0) { this.optionMap[sipServerDomainNamesOption.code] = new DhcpV6SipServerDomainNamesOption(sipServerDomainNamesOption); } v6SntpServersOption sntpServersOption = this.configOptions.v6SntpServersOption; if (sntpServersOption != null && sntpServersOption.ipAddress.Count > 0) { this.optionMap[sntpServersOption.code] = new DhcpV6SntpServersOption(sntpServersOption); } v6StatusCodeOption statusCodeOption = this.configOptions.v6StatusCodeOption; if (statusCodeOption != null && !String.IsNullOrEmpty(statusCodeOption.message)) { this.optionMap[statusCodeOption.code] = new DhcpV6StatusCodeOption(statusCodeOption); } v6VendorInfoOption vendorInfoOption = this.configOptions.v6VendorInfoOption; if (vendorInfoOption != null && vendorInfoOption.suboptionList.Count > 0) { this.optionMap[vendorInfoOption.code] = new DhcpV6VendorInfoOption(vendorInfoOption); } if (this.configOptions.v6OtherOptions.Count > 0) { optionMap.PutAll(GenericOptionFactory.GenericOptions(configOptions.v6OtherOptions)); } return(this.optionMap); }
internal void SaveLastSequence() { if (!lastSequenceChanged) { return; } if (savingCheckpoint) { // If a save is already in progress, don't do anything. (The completion block will trigger // another save after the first one finishes.) overdueForSave = true; return; } lastSequenceChanged = false; overdueForSave = false; Log.D(Tag, this + " saveLastSequence() called. lastSequence: " + LastSequence); var body = new Dictionary <String, Object>(); if (remoteCheckpoint != null) { body.PutAll(remoteCheckpoint); } body["lastSequence"] = LastSequence; var remoteCheckpointDocID = RemoteCheckpointDocID(); if (String.IsNullOrEmpty(remoteCheckpointDocID)) { Log.W(Tag, this + ": remoteCheckpointDocID is null, aborting saveLastSequence()"); return; } savingCheckpoint = true; Log.D(Tag, this + " put remote _local document. checkpointID: " + remoteCheckpointDocID); SendAsyncRequest(HttpMethod.Put, "/_local/" + remoteCheckpointDocID, body, (result, e) => { savingCheckpoint = false; if (e != null) { Log.V(Tag, this + ": Unable to save remote checkpoint", e); } if (LocalDatabase == null) { Log.W(Tag, this + ": Database is null, ignoring remote checkpoint response"); return; } if (e != null) { switch (GetStatusFromError(e)) { case StatusCode.NotFound: { remoteCheckpoint = null; overdueForSave = true; break; } case StatusCode.Conflict: { RefreshRemoteCheckpointDoc(); break; } default: { // TODO: On 401 or 403, and this is a pull, remember that remote // TODO: is read-only & don't attempt to read its checkpoint next time. break; } } } else { var response = (IDictionary <String, Object>)result; body.Put("_rev", response.Get("rev")); remoteCheckpoint = body; LocalDatabase.SetLastSequence(LastSequence, RemoteCheckpointDocID(), IsPull); } if (overdueForSave) { SaveLastSequence(); } }); }
/// <exception cref="System.IO.IOException"/> public virtual void SanitizeEnv(IDictionary <string, string> environment, Path pwd , IList <Path> appDirs, IList <string> containerLogDirs, IDictionary <Path, IList <string > > resources, Path nmPrivateClasspathJarDir) { environment[ApplicationConstants.Environment.ContainerId.ToString()] = container. GetContainerId().ToString(); environment[ApplicationConstants.Environment.NmPort.ToString()] = this.context.GetNodeId ().GetPort().ToString(); environment[ApplicationConstants.Environment.NmHost.ToString()] = this.context.GetNodeId ().GetHost(); environment[ApplicationConstants.Environment.NmHttpPort.ToString()] = this.context .GetHttpPort().ToString(); environment[ApplicationConstants.Environment.LocalDirs.ToString()] = StringUtils. Join(",", appDirs); environment[ApplicationConstants.Environment.LogDirs.ToString()] = StringUtils.Join (",", containerLogDirs); environment[ApplicationConstants.Environment.User.ToString()] = container.GetUser (); environment[ApplicationConstants.Environment.Logname.ToString()] = container.GetUser (); environment[ApplicationConstants.Environment.Home.ToString()] = conf.Get(YarnConfiguration .NmUserHomeDir, YarnConfiguration.DefaultNmUserHomeDir); environment[ApplicationConstants.Environment.Pwd.ToString()] = pwd.ToString(); PutEnvIfNotNull(environment, ApplicationConstants.Environment.HadoopConfDir.ToString (), Runtime.Getenv(ApplicationConstants.Environment.HadoopConfDir.ToString())); if (!Shell.Windows) { environment["JVM_PID"] = "$$"; } // allow containers to override these variables string[] whitelist = conf.Get(YarnConfiguration.NmEnvWhitelist, YarnConfiguration .DefaultNmEnvWhitelist).Split(","); foreach (string whitelistEnvVariable in whitelist) { PutEnvIfAbsent(environment, whitelistEnvVariable.Trim()); } // variables here will be forced in, even if the container has specified them. Apps.SetEnvFromInputString(environment, conf.Get(YarnConfiguration.NmAdminUserEnv , YarnConfiguration.DefaultNmAdminUserEnv), FilePath.pathSeparator); // TODO: Remove Windows check and use this approach on all platforms after // additional testing. See YARN-358. if (Shell.Windows) { string inputClassPath = environment[ApplicationConstants.Environment.Classpath.ToString ()]; if (inputClassPath != null && !inputClassPath.IsEmpty()) { //On non-windows, localized resources //from distcache are available via the classpath as they were placed //there but on windows they are not available when the classpath //jar is created and so they "are lost" and have to be explicitly //added to the classpath instead. This also means that their position //is lost relative to other non-distcache classpath entries which will //break things like mapreduce.job.user.classpath.first. An environment //variable can be set to indicate that distcache entries should come //first bool preferLocalizedJars = Sharpen.Extensions.ValueOf(environment[ApplicationConstants.Environment .ClasspathPrependDistcache.ToString()]); bool needsSeparator = false; StringBuilder newClassPath = new StringBuilder(); if (!preferLocalizedJars) { newClassPath.Append(inputClassPath); needsSeparator = true; } // Localized resources do not exist at the desired paths yet, because the // container launch script has not run to create symlinks yet. This // means that FileUtil.createJarWithClassPath can't automatically expand // wildcards to separate classpath entries for each file in the manifest. // To resolve this, append classpath entries explicitly for each // resource. foreach (KeyValuePair <Path, IList <string> > entry in resources) { bool targetIsDirectory = new FilePath(entry.Key.ToUri().GetPath()).IsDirectory(); foreach (string linkName in entry.Value) { // Append resource. if (needsSeparator) { newClassPath.Append(FilePath.pathSeparator); } else { needsSeparator = true; } newClassPath.Append(pwd.ToString()).Append(Path.Separator).Append(linkName); // FileUtil.createJarWithClassPath must use File.toURI to convert // each file to a URI to write into the manifest's classpath. For // directories, the classpath must have a trailing '/', but // File.toURI only appends the trailing '/' if it is a directory that // already exists. To resolve this, add the classpath entries with // explicit trailing '/' here for any localized resource that targets // a directory. Then, FileUtil.createJarWithClassPath will guarantee // that the resulting entry in the manifest's classpath will have a // trailing '/', and thus refer to a directory instead of a file. if (targetIsDirectory) { newClassPath.Append(Path.Separator); } } } if (preferLocalizedJars) { if (needsSeparator) { newClassPath.Append(FilePath.pathSeparator); } newClassPath.Append(inputClassPath); } // When the container launches, it takes the parent process's environment // and then adds/overwrites with the entries from the container launch // context. Do the same thing here for correct substitution of // environment variables in the classpath jar manifest. IDictionary <string, string> mergedEnv = new Dictionary <string, string>(Sharpen.Runtime.GetEnv ()); mergedEnv.PutAll(environment); // this is hacky and temporary - it's to preserve the windows secure // behavior but enable non-secure windows to properly build the class // path for access to job.jar/lib/xyz and friends (see YARN-2803) Path jarDir; if (exec is WindowsSecureContainerExecutor) { jarDir = nmPrivateClasspathJarDir; } else { jarDir = pwd; } string[] jarCp = FileUtil.CreateJarWithClassPath(newClassPath.ToString(), jarDir, pwd, mergedEnv); // In a secure cluster the classpath jar must be localized to grant access Path localizedClassPathJar = exec.LocalizeClasspathJar(new Path(jarCp[0]), pwd, container .GetUser()); string replacementClassPath = localizedClassPathJar.ToString() + jarCp[1]; environment[ApplicationConstants.Environment.Classpath.ToString()] = replacementClassPath; } } // put AuxiliaryService data to environment foreach (KeyValuePair <string, ByteBuffer> meta in containerManager.GetAuxServiceMetaData ()) { AuxiliaryServiceHelper.SetServiceDataIntoEnv(meta.Key, meta.Value, environment); } }
public RevisionInternal CopyWithDocID(String docId, String revId) { System.Diagnostics.Debug.Assert((docId != null)); System.Diagnostics.Debug.Assert(((this.docId == null) || (this.docId.Equals(docId)))); var result = new RevisionInternal(docId, revId, deleted, database); var unmodifiableProperties = GetProperties(); var properties = new Dictionary<string, object>(); if (unmodifiableProperties != null) { properties.PutAll(unmodifiableProperties); } properties["_id"] = docId; properties["_rev"] = revId; result.SetProperties(properties); return result; }
private void SaveLastSequence(SaveLastSequenceCompletionBlock completionHandler) { if (!lastSequenceChanged) { if (completionHandler != null) { completionHandler(); } return; } if (_savingCheckpoint) { // If a save is already in progress, don't do anything. (The completion block will trigger // another save after the first one finishes.) _overdueForSave = true; return; } lastSequenceChanged = false; _overdueForSave = false; Log.D(TAG, "saveLastSequence() called. lastSequence: " + LastSequence); var body = new Dictionary<String, Object>(); if (_remoteCheckpoint != null) { body.PutAll(_remoteCheckpoint); } body["lastSequence"] = LastSequence; var remoteCheckpointDocID = RemoteCheckpointDocID(); if (String.IsNullOrEmpty(remoteCheckpointDocID)) { Log.W(TAG, "remoteCheckpointDocID is null, aborting saveLastSequence()"); return; } _savingCheckpoint = true; SendAsyncRequest(HttpMethod.Put, "/_local/" + remoteCheckpointDocID, body, (result, e) => { _savingCheckpoint = false; if (e != null) { Log.V (TAG, "Unable to save remote checkpoint", e); } if (LocalDatabase == null) { Log.W(TAG, "Database is null, ignoring remote checkpoint response"); if (completionHandler != null) { completionHandler (); } return; } if (!LocalDatabase.Open()) { Log.W(TAG, "Database is closed, ignoring remote checkpoint response"); if (completionHandler != null) { completionHandler (); } return; } if (e != null) { switch (GetStatusFromError(e)) { case StatusCode.NotFound: _remoteCheckpoint = null; _overdueForSave = true; break; case StatusCode.Conflict: RefreshRemoteCheckpointDoc(); break; default: // TODO: On 401 or 403, and this is a pull, remember that remote // TODO: is read-only & don't attempt to read its checkpoint next time. break; } } else { Log.D(TAG, "Save checkpoint response: " + result.ToString()); var response = result.AsDictionary<string, object>(); body.Put ("_rev", response.Get ("rev")); _remoteCheckpoint = body; LocalDatabase.SetLastSequence(LastSequence, RemoteCheckpointDocID(), !IsPull); } if (_overdueForSave) { SaveLastSequence (completionHandler); } else if (completionHandler != null) { completionHandler (); } }); }
/** * Inits the dhcp option map. * * @return the map< integer, dhcp option> */ public Dictionary <int, DhcpOption> InitDhcpV4OptionMap() { optionMap.Clear(); if (configOptions.v4DomainNameOption != null && !string.IsNullOrEmpty(configOptions.v4DomainNameOption.@string)) { v4DomainNameOption domainNameOption = configOptions.v4DomainNameOption; if (domainNameOption != null) { optionMap[(int)domainNameOption.code] = new DhcpV4DomainNameOption(domainNameOption); } } if (configOptions.v4DomainServersOption != null && configOptions.v4DomainServersOption.ipAddress.Count > 0) { v4DomainServersOption domainServersOption = configOptions.v4DomainServersOption; if (domainServersOption != null) { optionMap[(int)domainServersOption.code] = new DhcpV4DomainServersOption(domainServersOption); } } if (configOptions.v4RoutersOption != null && configOptions.v4RoutersOption.ipAddress.Count > 0) { v4RoutersOption routersOption = configOptions.v4RoutersOption; if (routersOption != null) { optionMap[(int)routersOption.code] = new DhcpV4RoutersOption(routersOption); } } if (configOptions.v4SubnetMaskOption != null && !string.IsNullOrEmpty(configOptions.v4SubnetMaskOption.ipAddress)) { v4SubnetMaskOption subnetMaskOption = configOptions.v4SubnetMaskOption; if (subnetMaskOption != null) { optionMap[(int)subnetMaskOption.code] = new DhcpV4SubnetMaskOption(subnetMaskOption); } } if (configOptions.v4TimeOffsetOption != null && configOptions.v4TimeOffsetOption.unsignedInt > 0) { v4TimeOffsetOption timeOffsetOption = configOptions.v4TimeOffsetOption; if (timeOffsetOption != null) { optionMap[(int)timeOffsetOption.code] = new DhcpV4TimeOffsetOption(timeOffsetOption); } } if (configOptions.v4TimeServersOption != null && configOptions.v4TimeServersOption.ipAddress.Count > 0) { v4TimeServersOption timeServersOption = configOptions.v4TimeServersOption; if (timeServersOption != null) { optionMap[(int)timeServersOption.code] = new DhcpV4TimeServersOption(timeServersOption); } } if (configOptions.v4VendorSpecificOption != null && configOptions.v4VendorSpecificOption.opaqueData != null && configOptions.v4VendorSpecificOption.opaqueData.asciiValue != null && configOptions.v4VendorSpecificOption.opaqueData.hexValue != null) { v4VendorSpecificOption vendorSpecificOption = configOptions.v4VendorSpecificOption; if (vendorSpecificOption != null) { optionMap[(int)vendorSpecificOption.code] = new DhcpV4VendorSpecificOption(vendorSpecificOption); } } if (configOptions.v4NetbiosNameServersOption != null && configOptions.v4NetbiosNameServersOption.ipAddress.Count > 0) { v4NetbiosNameServersOption netbiosNameServersOption = configOptions.v4NetbiosNameServersOption; if (netbiosNameServersOption != null) { optionMap[(int)netbiosNameServersOption.code] = new DhcpV4NetbiosNameServersOption(netbiosNameServersOption); } } if (configOptions.v4NetbiosNodeTypeOption != null && configOptions.v4NetbiosNodeTypeOption.unsignedByte > 0) { v4NetbiosNodeTypeOption netbiosNodeTypeOption = configOptions.v4NetbiosNodeTypeOption; if (netbiosNodeTypeOption != null) { optionMap[(int)netbiosNodeTypeOption.code] = new DhcpV4NetbiosNodeTypeOption(netbiosNodeTypeOption); } } if (configOptions.v4TftpServerNameOption != null && !string.IsNullOrEmpty(configOptions.v4TftpServerNameOption.@string)) { v4TftpServerNameOption tftpServerNameOption = configOptions.v4TftpServerNameOption; if (tftpServerNameOption != null) { optionMap[(int)tftpServerNameOption.code] = new DhcpV4TftpServerNameOption(tftpServerNameOption); } } if (configOptions.v4BootFileNameOption != null && !string.IsNullOrEmpty(configOptions.v4BootFileNameOption.@string)) { v4BootFileNameOption bootFileNameOption = configOptions.v4BootFileNameOption; if (bootFileNameOption != null) { optionMap[(int)bootFileNameOption.code] = new DhcpV4BootFileNameOption(bootFileNameOption); } } if (configOptions.otherOptions != null && configOptions.otherOptions.Count > 0) { optionMap.PutAll(GenericOptionFactory.GenericOptions(configOptions.otherOptions)); } return(optionMap); }