public void MemoryStoreShouldSupportLoad() { var instanceStore = new MemoryStore(); var host1 = CreateTestHost(instanceStore); var host2 = CreateTestHost(instanceStore); try { Debug.Assert(host1 != null, "host1 != null"); host1.TestWorkflowApplication.RunEpisode(); Assert.IsTrue(host1.WaitForUnloadedEvent(Constants.Timeout)); Debug.Assert(host2 != null, "host2 != null"); Debug.Assert(host2.TestWorkflowApplication != null, "host2.TestWorkflowApplication != null"); Debug.Assert(host1.TestWorkflowApplication != null, "host1.TestWorkflowApplication != null"); host2.TestWorkflowApplication.Load(host1.TestWorkflowApplication.Id); host2.TestWorkflowApplication.Run(); Assert.IsTrue(host2.WaitForUnloadedEvent(Constants.Timeout)); } finally { Trace.WriteLine("*** Test Host 1 Tracking ***"); Debug.Assert(host1 != null, "host1 != null"); Debug.Assert(host1.Tracking != null, "host1.Tracking != null"); host1.Tracking.Trace(); Trace.WriteLine("*** Test Host 2 Tracking ***"); Debug.Assert(host2 != null, "host2 != null"); Debug.Assert(host2.Tracking != null, "host2.Tracking != null"); host2.Tracking.Trace(); } }
public void BasicCRUD() { var store = new MemoryStore(); var tape = new Tape() { Id = "foo" }; store.Insert(tape); var tape2 = store.Select(tape.Id); Assert.IsNotNull(tape2); // make sure tapes are cloned tape2.Comment = "bar"; var tape3 = store.Select(tape2.Id); Assert.AreNotEqual(tape2.Comment, tape3.Comment); // update store.Update(tape2); tape3 = store.Select(tape2.Id); Assert.AreEqual(tape2.Comment, tape3.Comment); // delete store.Delete(tape3.Id); Assert.AreEqual(0, store.List().Count); }
public void GetSettings_ByName0() { var store = new MemoryStore(); var settings = store.GetSettings(new Setting { Name = SettingUrn.Parse("baz") }); settings.Count().Verify().IsEqual(0); }
public void Add_ObjectInitializer() { var store = new MemoryStore { { "foo", "bar" }, { "baz", "qux" } }; store.Count().Verify().IsEqual(2); }
public void GetSettings_ByName2() { var store = new MemoryStore { { "foo", "bar" }, { "baz", "qux" } }; var settings = store.GetSettings(new Setting { Name = SettingUrn.Parse("baz") }); settings.Count().Verify().IsEqual(1); settings.First().Value.ToString().Verify().IsEqual("qux"); }
private static void InitRootDnsServer() { DnsServerSettings settings = new DnsServerSettings(); settings.Address = "127.0.0.1"; settings.Port = 5400; settings.TcpServerSettings.MaxOutstandingAccepts = 4; settings.TcpServerSettings.MaxActiveRequests = 16; settings.TcpServerSettings.ReceiveTimeout = 60 * 1000; settings.TcpServerSettings.SendTimeout = 60 * 1000; settings.UdpServerSettings.MaxOutstandingAccepts = 4; settings.UdpServerSettings.MaxActiveRequests = 16; settings.UdpServerSettings.ReceiveTimeout = 60 * 1000; settings.UdpServerSettings.SendTimeout = 60 * 1000; MemoryStore memoryDnsStore = new MemoryStore(); DnsRecordTable table = new DnsRecordTable(); // Address only table.Add(new NSRecord("abc.com", "127.0.0.1")); table.Add(new AddressRecord("abc.com", "192.200.0.1")); table.Add(new AddressRecord("abc.com", "192.200.0.2")); // Cert + Address table.Add(new NSRecord("redmond.hsgincubator.com", "127.0.0.1")); table.Add(new AddressRecord("redmond.hsgincubator.com", "192.210.0.1")); table.Add(new CertRecord(new DnsX509Cert(File.ReadAllBytes("metadata\\certificates\\redmond.cer")))); // Cert MX + Address table.Add(new NSRecord("direct.hisp.com", "127.0.0.1")); table.Add(new AddressRecord("direct.hisp.com", "192.220.0.1")); table.Add(new MXRecord("direct.hisp.com", "gateway.direct.hisp.com")); s_recordTable = table; foreach (DnsResourceRecord record in s_recordTable.Records) { memoryDnsStore.Records.Add(record); } s_rootDnsServer = new TestServer(memoryDnsStore, settings); s_rootDnsServer.Server.Start(); }
public void TestPutAndChangedLeaf() { var l = Node.NewLeaf(Encoding.ASCII.GetBytes("leaf")); var h = l.Hash; var store = new MemoryStore(); var snapshot = store.GetSnapshot(); var cache = new Cache(snapshot, Prefix); cache.PutNode(l); var rl = cache.Resolve(l.Hash); Assert.AreEqual(h, rl.Hash); Assert.AreEqual("leaf", Encoding.ASCII.GetString(rl.Value)); l.Value = new byte[] { 0x01 }; l.SetDirty(); var rl1 = cache.Resolve(h); Assert.AreEqual(h, rl1.Hash); Assert.AreEqual("leaf", Encoding.ASCII.GetString(rl1.Value)); }
public void InstantiatesBasicAttributesOnModel() { var store = new MemoryStore(); store.SetValue("name", "Foo"); store.SetValue("number", 5); store.SetValue("float", 37.5f); store.SetValue("list", "one, two, three, four"); store.SetValue("dice", "1d6+4"); var obj = new TestSimpleObject(); var test = store.Deserialize <TestSimpleObject>(obj); Assert.Equal(test.Name, "Foo"); Assert.Equal(test.Number, 5); Assert.Equal(test.FloatNumber, 37.5f); Assert.NotStrictEqual(test.ListOfValues, new string[] { "one", "two", "three", "four" }); Assert.Equal(test.Optional, ""); Assert.Equal("defaultString", test.OptionalWithDefault); Assert.Equal("1d6+4", test.DiceValues.ToString()); }
public void TestResolveBranch() { var l = Node.NewLeaf(Encoding.ASCII.GetBytes("leaf")); var b = Node.NewBranch(); b.Children[1] = l; var store = new MemoryStore(); store.Put(b.Hash.ToKey(), b.ToArray()); store.Put(l.Hash.ToKey(), l.ToArray()); var snapshot = store.GetSnapshot(); var cache = new Cache(snapshot, Prefix); var resolved_b = cache.Resolve(b.Hash); Assert.AreEqual(b.Hash, resolved_b.Hash); Assert.AreEqual(l.Hash, resolved_b.Children[1].Hash); var resolved_l = cache.Resolve(l.Hash); Assert.AreEqual(l.Value.Span.ToHexString(), resolved_l.Value.Span.ToHexString()); }
public void TestSplitKey() { var store = new MemoryStore(); var snapshot = store.GetSnapshot(); var mpt1 = new MPTTrie <TestKey, TestValue>(snapshot, null); Assert.IsTrue(mpt1.Put(new byte[] { 0xab, 0xcd }, new byte[] { 0x01 })); Assert.IsTrue(mpt1.Put(new byte[] { 0xab }, new byte[] { 0x02 })); HashSet <byte[]> set1 = mpt1.GetProof(new byte[] { 0xab, 0xcd }); Assert.AreEqual(4, set1.Count); var mpt2 = new MPTTrie <TestKey, TestValue>(snapshot, null); Assert.IsTrue(mpt2.Put(new byte[] { 0xab }, new byte[] { 0x02 })); Assert.IsTrue(mpt2.Put(new byte[] { 0xab, 0xcd }, new byte[] { 0x01 })); HashSet <byte[]> set2 = mpt2.GetProof(new byte[] { 0xab, 0xcd }); Assert.AreEqual(4, set2.Count); Assert.AreEqual(mpt1.Root.Hash, mpt2.Root.Hash); }
/// <summary> /// Method for handling memory functions. Scans input from button text and performs appriopriate action with MemoryStore string variable. /// MC = Memory Clear /// MR = Memory Recall /// MS = Adds number in display to memory /// M+ = Memory add /// M- = Memory subtract /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void btnMemory_Click(object sender, EventArgs e) { Button ButtonThatWasPushed = (Button)sender; string ButtonText = ButtonThatWasPushed.Text; if (ButtonText == "MC") { //Memory Clear MemoryStore = 0; return; } if (ButtonText == "MR") { //Memory Recall textBox1.Text = MemoryStore.ToString(); return; } if (ButtonText == "MS") { //Adds number in display to memory MemoryStore = double.Parse(textBox1.Text); return; } if (ButtonText == "M+") { //Memory add MemoryStore += result; return; } if (ButtonText == "M-") { //Memory subtract MemoryStore -= result; textBox1.Text = MemoryStore.ToString(); return; } }
static void Main(string[] args) { var rdfStore = new MemoryStore(); //rdfStore.Add(new Statement(NS.CSO.classEntity, NS.Rdfs.subClassOf, NS.Rdfs.ClassEntity)); //rdfStore.Add(new Statement(NS.CSO.interfaceEntity, NS.Rdfs.subClassOf, NS.Rdfs.ClassEntity)); var rdfsReasoner = new RDFS(); rdfsReasoner.LoadSchema(rdfStore); rdfStore.AddReasoner(rdfsReasoner); /*using (var wr = new RdfXmlWriter(Console.Out)) { * wr.BaseUri = NS.NrMeta; * wr.Write(rdfStore); * }*/ /*var r = rdfStore.Contains(new Statement( * (Entity)(NS.NrDotNetType + "NReco.Operations.ChainOperationCall"), * (Entity)NS.Rdfs.subClassOfEntity, * (Entity)(NS.NrDotNetType + "NReco.Operations.OperationCall") )); // * Console.WriteLine(r.ToString());*/ /*foreach (Statement s in rdfStore.Select(new Statement( * (Entity)(NS.NrDotNetProp+"ContextFilter"), * (Entity)NS.Rdfs.domainEntity, * null))) { //Entity)(NS.NrDotNetType + "NReco.IProvider`2") * Console.WriteLine(s.Object.Uri.ToString()); * }*/ /*Query query = new GraphMatch(new N3Reader(new StringReader(rdfQuery))); * QueryResultSink sink = new SparqlXmlQuerySink(Console.Out); * query.Run(rdfStore, sink); */ using (RdfXmlWriter wr = new RdfXmlWriter(@"c:\temp\_1.rdf")) { //wr.BaseUri = NS.NrMeta; //wr.Namespaces.AddNamespace(NS.DotNet.Type, "t"); //wr.Namespaces.AddNamespace(NS.DotNet.Property, "p"); wr.Write(rdfStore); } Console.ReadKey(); }
public void TestSplitKey() { var store = new MemoryStore(); var snapshot = store.GetSnapshot(); var mpt1 = new Trie(snapshot, null); mpt1.Put(new byte[] { 0xab, 0xcd }, new byte[] { 0x01 }); mpt1.Put(new byte[] { 0xab }, new byte[] { 0x02 }); var r = mpt1.TryGetProof(new byte[] { 0xab, 0xcd }, out var set1); Assert.IsTrue(r); Assert.AreEqual(4, set1.Count); var mpt2 = new Trie(snapshot, null); mpt2.Put(new byte[] { 0xab }, new byte[] { 0x02 }); mpt2.Put(new byte[] { 0xab, 0xcd }, new byte[] { 0x01 }); r = mpt2.TryGetProof(new byte[] { 0xab, 0xcd }, out var set2); Assert.IsTrue(r); Assert.AreEqual(4, set2.Count); Assert.AreEqual(mpt1.Root.Hash, mpt2.Root.Hash); }
public void IfLevelUpDoesNotIncrementLevelThrowException() { var data = new MemoryStore(); data.SetValue("name", "Test One"); data.SetValue("type", "levelup"); //Does now steps that increment level var steps = new MemoryStore(); data.SetValue("steps", steps); var build = new CharacterStrategy(); build.TargetLevel = 5; var character = new CharacterSheet(build); var designer = new CharacterDesigner(data); Assert.Throws <System.InvalidOperationException>(() => designer.ExecuteStep(character)); }
public void TestGetAndChangedExtension() { var e = MPTNode.NewExtension(new byte[] { 0x01 }, new MPTNode()); var store = new MemoryStore(); store.Put(Prefix, e.Hash.ToArray(), e.ToArray()); var snapshot = store.GetSnapshot(); var cache = new MPTCache(snapshot, Prefix); var re = cache.Resolve(e.Hash); Assert.AreEqual(e.Hash, re.Hash); Assert.AreEqual(e.Key.ToHexString(), re.Key.ToHexString()); Assert.IsTrue(re.Next.IsEmpty); re.Key = new byte[] { 0x02 }; re.SetDirty(); var re1 = cache.Resolve(e.Hash); Assert.AreEqual(e.Hash, re1.Hash); Assert.AreEqual(e.Key.ToHexString(), re1.Key.ToHexString()); Assert.IsTrue(re1.Next.IsEmpty); }
public virtual string GetValue(MemoryStore store, SemWeb.Literal obj) { string result = obj.Value; if (type.IsEnum) { try { object o = System.Enum.Parse(type, obj.Value); result = o.ToString(); } catch (System.Exception e) { System.Console.WriteLine("Value \"{2}\" not found in {0}{3}{1}", type, e, result, System.Environment.NewLine); } } /* * else if (type == typeof (Rational)) { * object o = FSpot.Tiff.Rational.Parse (obj.Value); * } */ return(result); }
public async Task Archive_should_start_from_replaying_events_from_given_offset() { var events = MakeEvents(10).ToArray(); var store = new MemoryStore <int>(events); var archive = new VolatileArchive <int>(store); var probe = this.CreateManualSubscriberProbe <Emission <int> >(); Source.Empty <Command <int> >() .Via(archive.CreateFlow(5)) .ToMaterialized(Sink.FromSubscriber(probe), Keep.Right) .Run(Materializer); probe.ExpectSubscription().Request(10); probe.ExpectNext(new Replaying <int>(new DurableEvent <int>(ChannelId, 6, new HybridTime(new VectorTime((ReplicaId, 6)), DateTimeOffset.MinValue), 6))) .ExpectNext(new Replaying <int>(new DurableEvent <int>(ChannelId, 7, new HybridTime(new VectorTime((ReplicaId, 7)), DateTimeOffset.MinValue), 7))) .ExpectNext(new Replaying <int>(new DurableEvent <int>(ChannelId, 8, new HybridTime(new VectorTime((ReplicaId, 8)), DateTimeOffset.MinValue), 8))) .ExpectNext(new Replaying <int>(new DurableEvent <int>(ChannelId, 9, new HybridTime(new VectorTime((ReplicaId, 9)), DateTimeOffset.MinValue), 9))) .ExpectNext(new Replaying <int>(new DurableEvent <int>(ChannelId, 10, new HybridTime(new VectorTime((ReplicaId, 10)), DateTimeOffset.MinValue), 10))) .ExpectNext(Replayed <int> .Instance) .ExpectComplete(); }
private IPersistenceStore GetStore(IScope scope, bool persistent) { IPersistenceStore store = null; if (!persistent) { // Use special store for non-persistent shared objects if (!scope.HasAttribute(SO_TRANSIENT_STORE)) { store = new MemoryStore(scope); scope.SetAttribute(SO_TRANSIENT_STORE, store); return(store); } return(scope.GetAttribute(SO_TRANSIENT_STORE) as IPersistenceStore); } // Evaluate configuration for persistent shared objects if (!scope.HasAttribute(SO_PERSISTENCE_STORE)) { try { Type type = ObjectFactory.Locate(_configuration.PersistenceStore.Type); store = Activator.CreateInstance(type, new object[] { scope }) as IPersistenceStore; if (_log.IsInfoEnabled) { _log.Info(__Res.GetString(__Res.SharedObjectService_CreateStore, store)); } } catch (Exception exception) { if (_log.IsErrorEnabled) { _log.Error(__Res.GetString(__Res.SharedObjectService_CreateStoreError), exception); } store = new MemoryStore(scope); } scope.SetAttribute(SO_PERSISTENCE_STORE, store); return(store); } return(scope.GetAttribute(SO_PERSISTENCE_STORE) as IPersistenceStore); }
private void ReadGazetteerSettings(MemoryStore rdfStore, out CaseMatchingType caseMatchingType, out bool lemmatize, out bool enabled, out int minLen) { caseMatchingType = CaseMatchingType.IgnoreCase; lemmatize = false; enabled = true; minLen = 1; ArrayList <string> crumbs = new ArrayList <string>(new string[] { mUri }); Entity[] objects = rdfStore.SelectSubjects(P_IDENTIFIED_BY, new Entity(mUri)); if (objects.Length > 0) { Resource[] objTypes = rdfStore.SelectObjects(objects[0].Uri, P_TYPE); if (objTypes.Length > 0) { crumbs.Add(objTypes[0].Uri); Resource[] superClass = rdfStore.SelectObjects((Entity)objTypes[0], P_SUBCLASS_OF); while (superClass.Length > 0) { crumbs.Add(superClass[0].Uri); superClass = rdfStore.SelectObjects((Entity)superClass[0], P_SUBCLASS_OF); } } } crumbs.Reverse(); foreach (string uri in crumbs) { Resource[] settings = rdfStore.SelectObjects(uri, P_SETTINGS); if (settings.Length == 0) { settings = rdfStore.SelectObjects(uri, P_COMMENT); } // compatibility with OWL-DL if (settings.Length > 0) { string settingsStr = ((Literal)settings[0]).Value; ParseGazetteerSettings(settingsStr, ref caseMatchingType, ref lemmatize, ref enabled, ref minLen); } } }
public void ReadConditions(MemoryStore rdfStore, Dictionary <string, Gazetteer> gazetteers) { ArrayList <string> crumbs = new ArrayList <string>(new string[] { mUri }); Entity[] objects = rdfStore.SelectSubjects(P_IDENTIFIED_BY, new Entity(mUri)); if (objects.Length > 0) { Resource[] objTypes = rdfStore.SelectObjects(objects[0].Uri, P_TYPE); if (objTypes.Length > 0) { crumbs.Add(objTypes[0].Uri); Resource[] superClass = rdfStore.SelectObjects((Entity)objTypes[0], P_SUBCLASS_OF); while (superClass.Length > 0) { crumbs.Add(superClass[0].Uri); superClass = rdfStore.SelectObjects((Entity)superClass[0], P_SUBCLASS_OF); } } } crumbs.Reverse(); foreach (string uri in crumbs) { Resource[] conditionGazetteers = rdfStore.SelectObjects(uri, P_HAS_SENTENCE_LEVEL_CONDITION); foreach (Entity conditionGazetteer in conditionGazetteers) { mConditions.Add(new Condition(gazetteers[conditionGazetteer.Uri], Condition.Level.Sentence)); } conditionGazetteers = rdfStore.SelectObjects(uri, P_HAS_BLOCK_LEVEL_CONDITION); foreach (Entity conditionGazetteer in conditionGazetteers) { mConditions.Add(new Condition(gazetteers[conditionGazetteer.Uri], Condition.Level.Block)); } conditionGazetteers = rdfStore.SelectObjects(uri, P_HAS_DOCUMENT_LEVEL_CONDITION); foreach (Entity conditionGazetteer in conditionGazetteers) { mConditions.Add(new Condition(gazetteers[conditionGazetteer.Uri], Condition.Level.Document)); } } }
public void TestFind() { var store = new MemoryStore(); var snapshot = store.GetSnapshot(); var mpt1 = new MPTTrie <TestKey, TestValue>(snapshot, null); var results = mpt1.Find(ReadOnlySpan <byte> .Empty).ToArray(); Assert.AreEqual(0, results.Count()); var mpt2 = new MPTTrie <TestKey, TestValue>(snapshot, null); Assert.IsTrue(mpt2.Put(new byte[] { 0xab, 0xcd, 0xef }, new byte[] { 0x01 })); Assert.IsTrue(mpt2.Put(new byte[] { 0xab, 0xcd, 0xe1 }, new byte[] { 0x02 })); Assert.IsTrue(mpt2.Put(new byte[] { 0xab }, new byte[] { 0x03 })); results = mpt2.Find(ReadOnlySpan <byte> .Empty).ToArray(); Assert.AreEqual(3, results.Count()); results = mpt2.Find(new byte[] { 0xab }).ToArray(); Assert.AreEqual(3, results.Count()); results = mpt2.Find(new byte[] { 0xab, 0xcd }).ToArray(); Assert.AreEqual(2, results.Count()); results = mpt2.Find(new byte[] { 0xac }).ToArray(); Assert.AreEqual(0, results.Count()); }
public void InteropSemWebWriteVia() { Graph g = new Graph(); FileLoader.Load(g, "InferenceTest.ttl"); MemoryStore mem = new MemoryStore(); SemWebConverter.ToSemWeb(g, mem); RdfWriter writer = new RdfXmlWriter("semweb.rdf"); writer.Write(mem); writer.Close(); //Read the output graph back in to check for equality Graph h = new Graph(); FileLoader.Load(h, "semweb.rdf"); Assert.AreEqual(g, h, "Graphs should have been equal"); }
public void WillAddTheModifierToNumericSlotsIfSetToTrue() { var configuration = new MemoryStore(); configuration.SetValue( "spells", new string[] { "all", "3" } ); configuration.SetValue("add-modifier", true); var step = new AddSpellsToSpellbook(configuration); var wizard = CharacterTestTemplates.Wizard().WithWizardCasting(); wizard.AbilityScores.SetScore(AbilityScoreTypes.Intelligence, 16); step.ExecuteStep(wizard); var casting = wizard.Get <ISpellCasting>(); Assert.Equal( 6, casting.GetKnownSpells(1).Count() ); }
public void PassInConfigurationDataToStepIfAppropriate() { var data = new MemoryStore(); data.SetValue("name", "Test One"); var configureStep = new MemoryStore(); configureStep.SetValue("step", "Tests.Actions.CharacterGeneration.DummyStepWithConfiguration"); configureStep.SetValue("other-option", "fourteen"); var steps = new MemoryStore(); steps.AddListItem(configureStep); data.SetValue("steps", steps); var designer = new CharacterDesigner(data); var character = CharacterTestTemplates.AverageBob(); designer.ExecuteStep(character); Assert.Equal(configureStep, character.Get <IObjectStore>()); Assert.Equal("fourteen", configureStep.GetString("other-option")); }
public void ReadTerms(MemoryStore rdfStore) { // read default settings CaseMatchingType caseMatchingType; bool lemmatize; ReadGazetteerSettings(rdfStore, out caseMatchingType, out lemmatize, out mEnabled, out mMinLen); // read terms Resource[] terms = rdfStore.SelectObjects(mUri, P_TERM); Set <string> skipList = new Set <string>(); foreach (Literal term in terms) { GazetteerTerm termObj = new GazetteerTerm(term.Value, /*gazetteer=*/ this, caseMatchingType, lemmatize, mEnabled, mMinLen); string termStr = termObj.ToString(); if (termObj.mTokens.Count > 0 && !skipList.Contains(termStr)) { mTerms.Add(termObj); skipList.Add(termStr); } } }
public ImageInfo(ImageFile img) { // FIXME We use the memory store to hold the anonymous statements // as they are added so that we can query for them later to // resolve anonymous nodes. store = new MemoryStore(); if (img == null) { return; } if (img is StatementSource) { SemWeb.StatementSource source = (SemWeb.StatementSource)img; source.Select(this); // If we couldn't find the ISO speed because of the ordering // search the memory store for the values if (iso_speed == null && iso_anon != null) { add = false; store.Select(this); } } if (img is JpegFile) { int real_width; int real_height; JpegUtils.GetSize(img.Uri.LocalPath, out real_width, out real_height); width = real_width.ToString(); height = real_height.ToString(); } #if USE_EXIF_DATE date = img.Date.ToLocalTime(); #endif }
public void GetTest() { MemoryStore store = new MemoryStore(); store.Set(1, "dude"); store.Set(2, "pude"); store.Set(3, "ok"); store.Set(4, "ohai"); var msgs = new List <string>(); store.Get(2, 3, msgs); var expected = new List <string>() { "pude", "ok" }; Assert.That(msgs, Is.EqualTo(expected)); msgs = new List <string>(); store.Get(5, 6, msgs); Assert.That(msgs, Is.Empty); }
public void TestReference1() { var l = MPTNode.NewLeaf(Encoding.ASCII.GetBytes("leaf")); var store = new MemoryStore(); var snapshot = store.GetSnapshot(); var cache = new MPTCache(snapshot, Prefix); cache.PutNode(l); cache.Commit(); snapshot.Commit(); var snapshot1 = store.GetSnapshot(); var cache1 = new MPTCache(snapshot1, Prefix); cache1.PutNode(l); cache1.Commit(); snapshot1.Commit(); var snapshot2 = store.GetSnapshot(); var cache2 = new MPTCache(snapshot2, Prefix); var rl = cache2.Resolve(l.Hash); Assert.AreEqual(2, rl.Reference); }
public static void GetDescription(MemoryStore store, Statement stmt, out string label, out string value) { string predicate = stmt.Predicate.Uri; Description d = (Description)table [predicate]; label = System.IO.Path.GetFileName(predicate); value = null; if (stmt.Object is SemWeb.Literal) { value = ((SemWeb.Literal)(stmt.Object)).Value; } if (d != null) { label = d.title; if (d.formater != null && stmt.Object is SemWeb.Literal) { value = d.formater.GetValue(store, (SemWeb.Literal)stmt.Object); } } else { Statement sstmt = new Statement(stmt.Predicate, (Entity)MetadataStore.Namespaces.Resolve("rdfs:label"), null); foreach (Statement tstmt in MetadataStore.Descriptions.Select(sstmt)) { if (tstmt.Object is SemWeb.Literal) { label = ((SemWeb.Literal)(tstmt.Object)).Value; } } } return; }
private void InvokeServicesWorker_DoWork(object threadContext) { ServiceCallStatus call = threadContext as ServiceCallStatus; SADIService service = call.Data as SADIService; try { call.Status = "Assembling input"; MasterWorker.ReportProgress(1, call); MemoryStore input = assembleInput(SelectedNodes, service); call.Status = "Calling service"; call.Data = "Assembled input:\r\n" + SemWebHelper.storeToString(input); MasterWorker.ReportProgress(33, call); Store output = service.invokeService(input); call.Status = "Storing output"; call.Data = "Received output:\r\n" + SemWebHelper.storeToString(output); MasterWorker.ReportProgress(66, call); ICollection <IStatement> statements = KE.Import(output); showNewStatements(statements); call.Status = "Done"; call.Data = service; MasterWorker.ReportProgress(100, call); } catch (Exception err) { SADIHelper.error("ServiceCall", "error calling service", service, err); call.Status = "Error"; call.Data = "Error:\r\n" + err.Message; MasterWorker.ReportProgress(100, call); } finally { Interlocked.Decrement(ref NumWorkers); } }
public void Init() { SourceNsHash = new Dictionary <string, IList <SourceDescriptor> >(); FieldSourceNsHash = new Dictionary <string, IList <SourceDescriptor> >(); FieldNsSourceHash = new Dictionary <SourceDescriptor, IDictionary <string, FieldDescriptor> >(); FieldNameSourceHash = new Dictionary <SourceDescriptor, IDictionary <string, FieldDescriptor> >(); EntityFieldHash = new Dictionary <FieldDescriptor, Entity>(); EntitySourceHash = new Dictionary <SourceDescriptor, Entity>(); SourceNameHash = new Dictionary <string, SourceDescriptor>(); SchemaStore = new MemoryStore(); for (int i = 0; i < Sources.Length; i++) { var descr = Sources[i]; AddToHashList(SourceNsHash, descr.Ns, descr); var sourceEntity = new Entity(descr.Ns); EntitySourceHash[descr] = sourceEntity; SourceNameHash[descr.SourceName] = descr; // fill schema LoadSchemaInfo(descr, sourceEntity, descr.SourceName, SchemaStore); var fieldNsHash = new Dictionary <string, FieldDescriptor>(); var fieldNameHash = new Dictionary <string, FieldDescriptor>(); for (int j = 0; j < descr.Fields.Length; j++) { var fldDescr = descr.Fields[j]; AddToHashList(FieldSourceNsHash, fldDescr.Ns, descr); fieldNsHash[fldDescr.Ns] = descr.Fields[j]; fieldNsHash[fldDescr.FieldName] = fldDescr; var fldEntity = new Entity(fldDescr.Ns); EntityFieldHash[fldDescr] = fldEntity; // fill schema LoadSchemaInfo(fldDescr, fldEntity, fldDescr.FieldName, SchemaStore); SchemaStore.Add(new Statement(fldEntity, NS.Rdfs.domainEntity, sourceEntity)); } FieldNsSourceHash[descr] = fieldNsHash; FieldNameSourceHash[descr] = fieldNameHash; } }
public void LoadFromObjectStore() { var data = new MemoryStore(); data.SetValue("name", "Bat"); var modifiers = new MemoryStore(); var statMod = new MemoryStore(); statMod.SetValue("stat", "Perception"); statMod.SetValue("type", "bonus"); statMod.SetValue("modifier", 2); statMod.SetValue("condition", "darkness"); modifiers.AddListItem(statMod); data.SetValue("modifiers", modifiers); var familiar = new Familiar(data); Assert.Equal("Bat", familiar.Name); Assert.Equal(1, familiar.Modifiers.Count); Assert.Equal("Perception", familiar.Modifiers[0].StatisticName); Assert.Equal("bonus", familiar.Modifiers[0].ModifierType); Assert.Equal(2, familiar.Modifiers[0].Modifier); }
public void TestPutAndChangedExtension() { var e = Node.NewExtension(new byte[] { 0x01 }, new Node()); var h = e.Hash; var store = new MemoryStore(); var snapshot = store.GetSnapshot(); var cache = new Cache(snapshot, Prefix); cache.PutNode(e); var re = cache.Resolve(e.Hash); Assert.AreEqual(e.Hash, re.Hash); Assert.AreEqual(e.Key.ToHexString(), re.Key.ToHexString()); Assert.IsTrue(re.Next.IsEmpty); e.Key = new byte[] { 0x02 }; e.Next = e; e.SetDirty(); var re1 = cache.Resolve(h); Assert.AreEqual(h, re1.Hash); Assert.AreEqual("01", re1.Key.ToHexString()); Assert.IsTrue(re1.Next.IsEmpty); }
public void TestReference1() { var store = new MemoryStore(); var snapshot = store.GetSnapshot(); var mpt = new Trie(snapshot, null); mpt.Put("a101".HexToBytes(), "01".HexToBytes()); mpt.Put("a201".HexToBytes(), "01".HexToBytes()); mpt.Put("a301".HexToBytes(), "01".HexToBytes()); mpt.Commit(); snapshot.Commit(); var snapshot1 = store.GetSnapshot(); var mpt1 = new Trie(snapshot1, mpt.Root.Hash); mpt1.Delete("a301".HexToBytes()); mpt1.Commit(); snapshot1.Commit(); var snapshot2 = store.GetSnapshot(); var mpt2 = new Trie(snapshot2, mpt1.Root.Hash); mpt2.Delete("a201".HexToBytes()); Assert.AreEqual("01", mpt2["a101".HexToBytes()].ToHexString()); }
public void TestFromNibblesException() { var b = Node.NewBranch(); var r = Node.NewExtension("0c".HexToBytes(), b); var v1 = Node.NewLeaf("abcd".HexToBytes()); //key=ac01 var v2 = Node.NewLeaf("2222".HexToBytes()); //key=ac var e1 = Node.NewExtension(new byte[] { 0x01 }, v1); b.Children[0] = e1; b.Children[16] = v2; var store = new MemoryStore(); PutToStore(store, r); PutToStore(store, b); PutToStore(store, e1); PutToStore(store, v1); PutToStore(store, v2); var snapshot = store.GetSnapshot(); var mpt = new Trie(snapshot, r.Hash); Assert.ThrowsException <FormatException>(() => mpt.Find(Array.Empty <byte>()).Count()); }
public void TestFindCantResolve() { var b = Node.NewBranch(); var r = Node.NewExtension("0a0c".HexToBytes(), b); var v1 = Node.NewLeaf("abcd".HexToBytes());//key=ac01 var v4 = Node.NewLeaf(Encoding.ASCII.GetBytes("missing")); var e1 = Node.NewExtension(new byte[] { 0x01 }, v1); var e4 = Node.NewExtension(new byte[] { 0x01 }, v4); b.Children[0] = e1; b.Children[15] = Node.NewHash(e4.Hash); var store = new MemoryStore(); PutToStore(store, r); PutToStore(store, b); PutToStore(store, e1); PutToStore(store, v1); var snapshot = store.GetSnapshot(); var mpt = new Trie(snapshot, r.Hash); Assert.ThrowsException <InvalidOperationException>(() => mpt.Find("ac".HexToBytes()).Count()); }
public TestStore() { m_store = new MemoryStore(); }
public void Add(RdfReader source) { MemoryStore store = new MemoryStore(source); Add(store);
public void Add(string uri, RdfReader source) { MemoryStore store = new MemoryStore(source); Add(uri, store);
bool Select(Entity[] subjects, Entity[] predicates, Resource[] objects, Entity[] metas, LiteralFilter[] litFilters, int limit, StatementSink sink, bool ask) { // TODO: Change meta into named graphs. Anything but a null or DefaultMeta // meta returns no statements immediately. if (metas != null && (metas.Length != 1 || metas[0] != Statement.DefaultMeta)) return false; string query; bool nonull = false; if (subjects != null && subjects.Length == 1 && predicates != null && predicates.Length == 1 && objects != null && objects.Length == 1) { query = "ASK WHERE { " + S(subjects[0], null) + " " + S(predicates[0], null) + " " + S(objects[0], null) + "}"; nonull = true; } else { if (ask) query = "ASK"; else query = "SELECT *"; query += " WHERE { "; query += S(subjects, "subject"); query += " "; query += S(predicates, "predicate"); query += " "; query += S(objects, "object"); query += " . "; query += SL(subjects, "subject"); query += SL(predicates, "predicate"); query += SL(objects, "object"); query += " }"; // TODO: Pass literal filters to server. } if (limit >= 1) query += " LIMIT " + limit; XmlDocument result = Load(query); if (ask || nonull) { foreach (XmlElement boolean in result.DocumentElement) { if (boolean.Name != "boolean") continue; bool ret = boolean.InnerText == "true"; if (ask) return ret; else if (ret) sink.Add(new Statement(subjects[0], predicates[0], objects[0])); return false; } throw new ApplicationException("Invalid server response: No boolean node."); } XmlElement bindings = null; foreach (XmlElement e in result.DocumentElement) if (e.Name == "results") bindings = e; if (bindings == null) throw new ApplicationException("Invalid server response: No result node."); MemoryStore distinctCheck = null; if (bindings.GetAttribute("distinct") != "true") distinctCheck = new MemoryStore(); Hashtable bnodes = new Hashtable(); foreach (XmlNode bindingnode in bindings) { if (!(bindingnode is XmlElement)) continue; XmlElement binding = (XmlElement)bindingnode; Resource subj = GetBinding(binding, "subject", subjects, bnodes); Resource pred = GetBinding(binding, "predicate", predicates, bnodes); Resource obj = GetBinding(binding, "object", objects, bnodes); if (!(subj is Entity) || !(pred is Entity)) continue; Statement s = new Statement((Entity)subj, (Entity)pred, obj); if (distinctCheck != null && distinctCheck.Contains(s)) continue; if (litFilters != null && !LiteralFilter.MatchesFilters(s.Object, litFilters, this)) continue; if (!sink.Add(s)) return true; if (distinctCheck != null) distinctCheck.Add(s); } return true; }
public void Select(Statement template, StatementSink sink) { if (!selectresults.ContainsKey(template)) { MemoryStore s = new MemoryStore(); source.Select(template, s); selectresults[template] = s; } ((MemoryStore)selectresults[template]).Select(sink);
public static void Main(string[] args) { if (args.Length < 3) { Console.Error.WriteLine("Usage: mono rdfbind.exe bindings.txt targetschema schmefile1 schemafile2 . . ."); return; } // Parse command-line arguments string bindingmapfile = args[0]; string targetschema = args[1]; ArrayList schemafiles = new ArrayList(); for (int i = 2; i < args.Length; i++) schemafiles.Add(args[i]); // Load the binding map Hashtable bindingmap = new Hashtable(); ArrayList schemalist = new ArrayList(); try { char[] whitespacechars = { ' ', '\t' }; using (TextReader map = new StreamReader(bindingmapfile)) { string line; while ((line = map.ReadLine()) != null) { if (line == "" || line.StartsWith("#")) continue; int whitespace = line.IndexOfAny(whitespacechars); if (whitespace == -1) throw new FormatException("Each line should be an assembly/namespace name followed by a space or tab, followed by a schema URI."); string name = line.Substring(0, whitespace).Trim(); string uri = line.Substring(whitespace+1).Trim(); bindingmap[uri] = name; schemalist.Add(uri); // Let targetscheme be either a name or URI. if (targetschema == name) targetschema = uri; } } } catch (Exception e) { Console.Error.WriteLine("Error loading the binding map: " + e.Message); return; } if (!bindingmap.ContainsKey(targetschema)) { Console.Error.WriteLine("The target schema must have an entry in the binding map."); return; } MultiStore schemas = new MultiStore(); foreach (string schemafile in schemafiles) { try { Store schema = new MemoryStore(new RdfXmlReader(schemafile)); schemas.Add(schema); } catch (Exception e) { Console.Error.WriteLine("Error loading the schema in '" + schemafile + "': " + e.Message); return; } } foreach (string sch in schemalist) { AssemblyBuilder a = new SemWeb.Bind.Bindings(sch, schemas, bindingmap).CreateBindings(); a.Save((string)bindingmap[sch] + ".dll"); } }
public void Select(SelectFilter filter, StatementSink sink) { if (!selfilterresults.ContainsKey(filter)) { MemoryStore s = new MemoryStore(); source.Select(filter, s); selfilterresults[filter] = s; } ((MemoryStore)selfilterresults[filter]).Select(sink);