public virtual void TestDatabase() { Send("PUT", "/database", Status.Created, null); IDictionary entries = new Dictionary<string, IDictionary<string, object>>(); entries.Put("results", new AList<object>()); entries.Put("last_seq", 0); Send("GET", "/database/_changes?feed=normal&heartbeat=300000&style=all_docs", Status .Ok, entries); IDictionary<string, object> dbInfo = (IDictionary<string, object>)Send("GET", "/database" , Status.Ok, null); NUnit.Framework.Assert.AreEqual(6, dbInfo.Count); NUnit.Framework.Assert.AreEqual(0, dbInfo.Get("doc_count")); NUnit.Framework.Assert.AreEqual(0, dbInfo.Get("update_seq")); NUnit.Framework.Assert.IsTrue((int)dbInfo.Get("disk_size") > 8000); NUnit.Framework.Assert.AreEqual("database", dbInfo.Get("db_name")); NUnit.Framework.Assert.IsTrue(Runtime.CurrentTimeMillis() * 1000 > (long)dbInfo.Get ("instance_start_time")); NUnit.Framework.Assert.IsTrue(dbInfo.ContainsKey("db_uuid")); Send("PUT", "/database", Status.PreconditionFailed, null); Send("PUT", "/database2", Status.Created, null); IList<string> allDbs = new AList<string>(); allDbs.AddItem("cblite-test"); allDbs.AddItem("database"); allDbs.AddItem("database2"); Send("GET", "/_all_dbs", Status.Ok, allDbs); dbInfo = (IDictionary<string, object>)Send("GET", "/database2", Status.Ok, null); NUnit.Framework.Assert.AreEqual("database2", dbInfo.Get("db_name")); Send("DELETE", "/database2", Status.Ok, null); allDbs.Remove("database2"); Send("GET", "/_all_dbs", Status.Ok, allDbs); Send("PUT", "/database%2Fwith%2Fslashes", Status.Created, null); dbInfo = (IDictionary<string, object>)Send("GET", "/database%2Fwith%2Fslashes", Status .Ok, null); NUnit.Framework.Assert.AreEqual("database/with/slashes", dbInfo.Get("db_name")); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public static Couchbase.Lite.Document CreateTask(Database database, string title, Bitmap image, string listId) { SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" ); Calendar calendar = GregorianCalendar.GetInstance(); string currentTimeString = dateFormatter.Format(calendar.GetTime()); IDictionary<string, object> properties = new Dictionary<string, object>(); properties.Put("type", DocType); properties.Put("title", title); properties.Put("checked", false); properties.Put("created_at", currentTimeString); properties.Put("list_id", listId); Couchbase.Lite.Document document = database.CreateDocument(); UnsavedRevision revision = document.CreateRevision(); revision.SetUserProperties(properties); if (image != null) { ByteArrayOutputStream @out = new ByteArrayOutputStream(); image.Compress(Bitmap.CompressFormat.Jpeg, 50, @out); ByteArrayInputStream @in = new ByteArrayInputStream(@out.ToByteArray()); revision.SetAttachment("image", "image/jpg", @in); } revision.Save(); return document; }
public bool Run() { StringBuilder sb = new StringBuilder(); for (int i = 0; i < this._enclosing.GetSizeOfAttachment(); i++) { sb.Append('1'); } byte[] attach1 = Sharpen.Runtime.GetBytesForString(sb.ToString()); try { Status status = new Status(); for (int i_1 = 0; i_1 < this._enclosing.GetNumberOfDocuments(); i_1++) { IDictionary<string, object> rev1Properties = new Dictionary<string, object>(); rev1Properties.Put("foo", 1); rev1Properties.Put("bar", false); RevisionInternal rev1 = this._enclosing.database.PutRevision(new RevisionInternal (rev1Properties, this._enclosing.database), null, false, status); NUnit.Framework.Assert.AreEqual(Status.Created, status.GetCode()); this._enclosing.database.InsertAttachmentForSequenceWithNameAndType(new ByteArrayInputStream (attach1), rev1.GetSequence(), Test3_CreateDocsWithAttachments._testAttachmentName , "text/plain", rev1.GetGeneration()); NUnit.Framework.Assert.AreEqual(Status.Created, status.GetCode()); } } catch (Exception t) { Log.E(Test3_CreateDocsWithAttachments.Tag, "Document create with attachment failed" , t); return false; } return true; }
// Reproduces issue #167 // https://github.com/couchbase/couchbase-lite-android/issues/167 /// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestLoadRevisionBody() { Document document = database.CreateDocument(); IDictionary<string, object> properties = new Dictionary<string, object>(); properties.Put("foo", "foo"); properties.Put("bar", false); document.PutProperties(properties); NUnit.Framework.Assert.IsNotNull(document.GetCurrentRevision()); bool deleted = false; RevisionInternal revisionInternal = new RevisionInternal(document.GetId(), document .GetCurrentRevisionId(), deleted, database); EnumSet<Database.TDContentOptions> contentOptions = EnumSet.Of(Database.TDContentOptions .TDIncludeAttachments, Database.TDContentOptions.TDBigAttachmentsFollow); database.LoadRevisionBody(revisionInternal, contentOptions); // now lets purge the document, and then try to load the revision body again NUnit.Framework.Assert.IsTrue(document.Purge()); bool gotExpectedException = false; try { database.LoadRevisionBody(revisionInternal, contentOptions); } catch (CouchbaseLiteException e) { if (e.GetCBLStatus().GetCode() == Status.NotFound) { gotExpectedException = true; } } NUnit.Framework.Assert.IsTrue(gotExpectedException); }
public void TestMultiValPath() { IndexReader reader = IndexReader.Open(directory, true); BoboIndexReader boboReader = BoboIndexReader.GetInstance(reader, facetHandlers); BoboBrowser browser = new BoboBrowser(boboReader); BrowseRequest req = new BrowseRequest(); BrowseSelection sel = new BrowseSelection(PathHandlerName); sel.AddValue("/a"); var propMap = new Dictionary<String, String>(); propMap.Put(PathFacetHandler.SEL_PROP_NAME_DEPTH, "0"); propMap.Put(PathFacetHandler.SEL_PROP_NAME_STRICT, "false"); sel.SetSelectionProperties(propMap); req.AddSelection(sel); FacetSpec fs = new FacetSpec(); fs.MinHitCount = (1); req.SetFacetSpec(PathHandlerName, fs); BrowseResult res = browser.Browse(req); Assert.AreEqual(res.NumHits, 1); IFacetAccessible fa = res.GetFacetAccessor(PathHandlerName); IEnumerable<BrowseFacet> facets = fa.GetFacets(); Console.WriteLine(facets); Assert.AreEqual(1, facets.Count()); BrowseFacet facet = facets.Get(0); Assert.AreEqual(2, facet.FacetValueHitCount); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestNewDocumentHasCurrentRevision() { Document document = database.CreateDocument(); IDictionary<string, object> properties = new Dictionary<string, object>(); properties.Put("foo", "foo"); properties.Put("bar", false); document.PutProperties(properties); NUnit.Framework.Assert.IsNotNull(document.GetCurrentRevisionId()); NUnit.Framework.Assert.IsNotNull(document.GetCurrentRevision()); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestForceInsertEmptyHistory() { IList<string> revHistory = null; RevisionInternal rev = new RevisionInternal("FakeDocId", "1-tango", false, database ); IDictionary<string, object> revProperties = new Dictionary<string, object>(); revProperties.Put("_id", rev.GetDocId()); revProperties.Put("_rev", rev.GetRevId()); revProperties.Put("message", "hi"); rev.SetProperties(revProperties); database.ForceInsert(rev, revHistory, null); }
public void TestForceInsertEmptyHistory() { var rev = new RevisionInternal("FakeDocId", "1-abcd", false); var revProperties = new Dictionary<string, object>(); revProperties.Put("_id", rev.GetDocId()); revProperties.Put("_rev", rev.GetRevId()); revProperties["message"] = "hi"; rev.SetProperties(revProperties); IList<string> revHistory = null; database.ForceInsert(rev, revHistory, null); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public static Couchbase.Lite.Document CreateNewList(Database database, string title, string userId) { var dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); var calendar = Calendar.CurrentEra; string currentTimeString = dateFormatter.Format(calendar.GetTime()); IDictionary<string, object> properties = new Dictionary<string, object>(); properties.Put("type", "list"); properties.Put("title", title); properties.Put("created_at", currentTimeString); properties.Put("owner", "profile:" + userId); properties.Put("members", new AList<string>()); Couchbase.Lite.Document document = database.CreateDocument(); document.PutProperties(properties); return document; }
public bool Run() { string[] bigObj = new string[this._enclosing.GetSizeOfDocument()]; for (int i = 0; i < this._enclosing.GetSizeOfDocument(); i++) { bigObj[i] = Test10_DeleteDB._propertyValue; } for (int i_1 = 0; i_1 < this._enclosing.GetNumberOfDocuments(); i_1++) { //create a document IDictionary<string, object> props = new Dictionary<string, object>(); props.Put("bigArray", bigObj); Body body = new Body(props); RevisionInternal rev1 = new RevisionInternal(body, this._enclosing.database); Status status = new Status(); try { rev1 = this._enclosing.database.PutRevision(rev1, null, false, status); } catch (Exception t) { Log.E(Test10_DeleteDB.Tag, "Document create failed", t); return false; } } return true; }
public bool Run() { string[] bigObj = new string[this._enclosing.GetSizeOfDocument()]; for (int i = 0; i < this._enclosing.GetSizeOfDocument(); i++) { bigObj[i] = Test11_DeleteDocs._propertyValue; } for (int i_1 = 0; i_1 < this._enclosing.GetNumberOfDocuments(); i_1++) { //create a document IDictionary<string, object> props = new Dictionary<string, object>(); props.Put("bigArray", bigObj); Document doc = this._enclosing.database.CreateDocument(); this._enclosing.docs[i_1] = doc; try { doc.PutProperties(props); } catch (CouchbaseLiteException cblex) { Log.E(Test11_DeleteDocs.Tag, "Document creation failed", cblex); return false; } } return true; }
public virtual void TestParseContentType() { Encoding utf8 = Sharpen.Extensions.GetEncoding("UTF-8"); Dictionary<string, byte[]> contentTypes = new Dictionary<string, byte[]>(); contentTypes.Put("multipart/related; boundary=\"BOUNDARY\"", Sharpen.Runtime.GetBytesForString (new string("\r\n--BOUNDARY"), utf8)); contentTypes.Put("multipart/related; boundary=BOUNDARY", Sharpen.Runtime.GetBytesForString (new string("\r\n--BOUNDARY"), utf8)); contentTypes.Put("multipart/related;boundary=X", Sharpen.Runtime.GetBytesForString (new string("\r\n--X"), utf8)); foreach (string contentType in contentTypes.Keys) { MultipartReaderDelegate delegate_ = null; MultipartReader reader = new MultipartReader(contentType, delegate_); byte[] expectedBoundary = (byte[])contentTypes.Get(contentType); byte[] boundary = reader.GetBoundary(); NUnit.Framework.Assert.IsTrue(Arrays.Equals(boundary, expectedBoundary)); } try { MultipartReaderDelegate delegate_ = null; MultipartReader reader = new MultipartReader("multipart/related; boundary=\"BOUNDARY" , delegate_); NUnit.Framework.Assert.IsTrue("Should not have gotten here, above lines should have thrown exception" , false); } catch (Exception) { } }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestChangeNotification() { Database.ChangeListener changeListener = new _ChangeListener_16(this); // add listener to database database.AddChangeListener(changeListener); // create a document IDictionary<string, object> documentProperties = new Dictionary<string, object>(); documentProperties.Put("foo", 1); documentProperties.Put("bar", false); documentProperties.Put("baz", "touch"); Body body = new Body(documentProperties); RevisionInternal rev1 = new RevisionInternal(body, database); Status status = new Status(); rev1 = database.PutRevision(rev1, null, false, status); NUnit.Framework.Assert.AreEqual(1, changeNotifications); }
public static Authenticator CreatePersonaAuthenticator(string assertion, string email ) { // TODO: REVIEW : Do we need email? IDictionary<string, string> @params = new Dictionary<string, string>(); @params.Put("access_token", assertion); return new TokenAuthenticator("_persona", @params); }
/// <summary>ScriptableOutputStream constructor.</summary> /// <remarks> /// ScriptableOutputStream constructor. /// Creates a ScriptableOutputStream for use in serializing /// JavaScript objects. Calls excludeStandardObjectNames. /// </remarks> /// <param name="out">the OutputStream to write to.</param> /// <param name="scope">the scope containing the object.</param> /// <exception cref="System.IO.IOException"></exception> public ScriptableOutputStream(Stream @out, Scriptable scope) : base(@out) { // API class this.scope = scope; table = new Dictionary<object, string>(); table.Put(scope, string.Empty); EnableReplaceObject(true); ExcludeStandardObjectNames(); }
/// <exception cref="System.Exception"></exception> public virtual void TestPruneRevsToMaxDepthViaCompact() { IDictionary<string, object> properties = new Dictionary<string, object>(); properties.Put("testName", "testDatabaseCompaction"); properties.Put("tag", 1337); Document doc = CreateDocumentWithProperties(database, properties); SavedRevision rev = doc.GetCurrentRevision(); database.SetMaxRevTreeDepth(1); for (int i = 0; i < 10; i++) { IDictionary<string, object> properties2 = new Dictionary<string, object>(properties ); properties2.Put("tag", i); rev = rev.CreateRevision(properties2); } database.Compact(); Document fetchedDoc = database.GetDocument(doc.GetId()); IList<SavedRevision> revisions = fetchedDoc.GetRevisionHistory(); NUnit.Framework.Assert.AreEqual(1, revisions.Count); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestCreateDocument() { IDictionary<string, object> properties = new Dictionary<string, object>(); properties.Put("testName", "testCreateDocument"); properties.Put("tag", 1337); Database db = StartDatabase(); Document doc = CreateDocumentWithProperties(db, properties); string docID = doc.GetId(); NUnit.Framework.Assert.IsTrue("Invalid doc ID: " + docID, docID.Length > 10); string currentRevisionID = doc.GetCurrentRevisionId(); NUnit.Framework.Assert.IsTrue("Invalid doc revision: " + docID, currentRevisionID .Length > 10); NUnit.Framework.Assert.AreEqual(doc.GetUserProperties(), properties); NUnit.Framework.Assert.AreEqual(db.GetDocument(docID), doc); db.ClearDocumentCache(); // so we can load fresh copies Document doc2 = db.GetExistingDocument(docID); NUnit.Framework.Assert.AreEqual(doc2.GetId(), docID); NUnit.Framework.Assert.AreEqual(doc2.GetCurrentRevisionId(), currentRevisionID); NUnit.Framework.Assert.IsNull(db.GetExistingDocument("b0gus")); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestDeleteDocument() { Document document = database.CreateDocument(); IDictionary<string, object> properties = new Dictionary<string, object>(); properties.Put("foo", "foo"); properties.Put("bar", false); document.PutProperties(properties); NUnit.Framework.Assert.IsNotNull(document.GetCurrentRevision()); string docId = document.GetId(); document.Delete(); NUnit.Framework.Assert.IsTrue(document.IsDeleted()); Document fetchedDoc = database.GetExistingDocument(docId); NUnit.Framework.Assert.IsNull(fetchedDoc); // query all docs and make sure we don't see that document database.GetAllDocs(new QueryOptions()); Query queryAllDocs = database.CreateAllDocumentsQuery(); QueryEnumerator queryEnumerator = queryAllDocs.Run(); for (IEnumerator<QueryRow> it = queryEnumerator; it.HasNext(); ) { QueryRow row = it.Next(); NUnit.Framework.Assert.IsFalse(row.GetDocument().GetId().Equals(docId)); } }
public virtual void TestServer() { IDictionary<string, object> responseBody = new Dictionary<string, object>(); responseBody.Put("CBLite", "Welcome"); responseBody.Put("couchdb", "Welcome"); responseBody.Put("version", Couchbase.Lite.Router.Router.GetVersionString()); Send("GET", "/", Status.Ok, responseBody); IDictionary<string, object> session = new Dictionary<string, object>(); IDictionary<string, object> userCtx = new Dictionary<string, object>(); IList<string> roles = new AList<string>(); roles.AddItem("_admin"); session.Put("ok", true); userCtx.Put("name", null); userCtx.Put("roles", roles); session.Put("userCtx", userCtx); Send("GET", "/_session", Status.Ok, session); IList<string> allDbs = new AList<string>(); allDbs.AddItem("cblite-test"); Send("GET", "/_all_dbs", Status.Ok, allDbs); Send("GET", "/non-existant", Status.NotFound, null); Send("GET", "/BadName", Status.BadRequest, null); Send("PUT", "/", Status.BadRequest, null); Send("POST", "/", Status.BadRequest, null); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestLoadDBPerformance() { long startMillis = Runtime.CurrentTimeMillis(); string[] bigObj = new string[GetSizeOfDocument()]; for (int i = 0; i < GetSizeOfDocument(); i++) { bigObj[i] = _propertyValue; } for (int j = 0; j < GetNumberOfShutAndReloadCycles(); j++) { //Force close and reopen of manager and database to ensure cold //start before doc creation try { TearDown(); manager = new Manager(new LiteTestContext(), Manager.DefaultOptions); database = manager.GetExistingDatabase(DefaultTestDb); } catch (Exception ex) { Log.E(Tag, "DB teardown", ex); Fail(); } for (int k = 0; k < GetNumberOfDocuments(); k++) { //create a document IDictionary<string, object> props = new Dictionary<string, object>(); props.Put("bigArray", bigObj); Body body = new Body(props); RevisionInternal rev1 = new RevisionInternal(body, database); Status status = new Status(); try { rev1 = database.PutRevision(rev1, null, false, status); } catch (Exception t) { Log.E(Tag, "Document creation failed", t); Fail(); } } } Log.V("PerformanceStats", Tag + "," + Sharpen.Extensions.ValueOf(Runtime.CurrentTimeMillis () - startMillis).ToString() + "," + GetNumberOfDocuments() + "," + GetSizeOfDocument () + ",," + GetNumberOfShutAndReloadCycles()); }
private static readonly double LUCENE_4464_distErrPct = SpatialArgs.DEFAULT_DISTERRPCT;//DEFAULT 2.5% public NtsPolygonTest() { try { IDictionary<string, string> args = new Dictionary<string, string>(); args.Put("spatialContextFactory", typeof(NtsSpatialContextFactory).AssemblyQualifiedName); ctx = SpatialContextFactory.MakeSpatialContext(args /*, getClass().getClassLoader()*/); } catch (TypeLoadException e) //LUCENENET TODO: Does this match NoClassDefFoundError ?? { AssumeTrue("This test requires Spatial4n.Core.NTS: " + e, false); } GeohashPrefixTree grid = new GeohashPrefixTree(ctx, 11);//< 1 meter == 11 maxLevels this.strategy = new RecursivePrefixTreeStrategy(grid, GetType().Name); ((RecursivePrefixTreeStrategy)this.strategy).DistErrPct = (LUCENE_4464_distErrPct);//1% radius (small!) }
/// <exception cref="System.Exception"></exception> public virtual void TestCache() { int retainCount = 1; Cache cache = new Cache<string, Document>(retainCount); IDictionary<string, object> props = new Dictionary<string, object>(); props.Put("foo", "bar"); Document doc1 = CreateDocumentWithProperties(database, props); cache.Put(doc1.GetId(), doc1); IDictionary<string, object> props2 = new Dictionary<string, object>(); props2.Put("foo2", "bar2"); Document doc2 = CreateDocumentWithProperties(database, props2); cache.Put(doc2.GetId(), doc2); NUnit.Framework.Assert.IsNotNull(cache.Get(doc1.GetId())); NUnit.Framework.Assert.IsNotNull(cache.Get(doc2.GetId())); cache.Remove(doc1.GetId()); NUnit.Framework.Assert.IsNull(cache.Get(doc1.GetId())); cache.Clear(); NUnit.Framework.Assert.IsNull(cache.Get(doc2.GetId())); }
/// <exception cref="System.Exception"></exception> public virtual void TestJsonObject() { IDictionary<string, object> dict = new Dictionary<string, object>(); dict.Put("id", "01234567890"); dict.Put("foo", "bar"); dict.Put("int", 5); dict.Put("double", 3.5); dict.Put("bool", true); dict.Put("date", new DateTime().ToString()); ObjectWriter mapper = new ObjectWriter(); byte[] json = mapper.WriteValueAsBytes(dict); JsonDocument jsdoc = new JsonDocument(json); NUnit.Framework.Assert.AreEqual(dict, jsdoc.JsonObject()); }
public bool Run() { for (int i = 0; i < this._enclosing.GetNumberOfDocuments(); i++) { //create a document IDictionary<string, object> props = new Dictionary<string, object>(); props.Put("toogle", true); Document doc = this._enclosing.database.CreateDocument(); this._enclosing.docs[i] = doc; try { doc.PutProperties(props); } catch (CouchbaseLiteException cblex) { Log.E(Test8_DocRevisions.Tag, "Document creation failed", cblex); return false; } } return true; }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestCreateDocsUnoptimizedWayPerformance() { long startMillis = Runtime.CurrentTimeMillis(); string[] bigObj = new string[GetSizeOfDocument()]; for (int i = 0; i < GetSizeOfDocument(); i++) { bigObj[i] = _propertyValue; } for (int i_1 = 0; i_1 < GetNumberOfDocuments(); i_1++) { //create a document IDictionary<string, object> props = new Dictionary<string, object>(); props.Put("bigArray", bigObj); Body body = new Body(props); RevisionInternal rev1 = new RevisionInternal(body, database); Status status = new Status(); rev1 = database.PutRevision(rev1, null, false, status); } Log.V("PerformanceStats", Tag + "," + Sharpen.Extensions.ValueOf(Runtime.CurrentTimeMillis () - startMillis).ToString() + "," + GetNumberOfDocuments() + "," + GetSizeOfDocument ()); }
public override IDictionary<string, string> LoginParametersForSite(Uri site) { IDictionary<string, string> loginParameters = new Dictionary<string, string>(); try { string accessToken = AccessTokenForEmailAndSite(this.emailAddress, site); if (accessToken != null) { loginParameters.Put(LoginParameterAccessToken, accessToken); return loginParameters; } else { return null; } } catch (Exception e) { Log.E(Log.TagSync, "Error looking login parameters for site", e); } return null; }
/// <exception cref="NGit.Api.Errors.GitAPIException"></exception> public override IDictionary <string, string> Call() { CheckCallable(); try { SubmoduleWalk generator = SubmoduleWalk.ForIndex(repo); if (!paths.IsEmpty()) { generator.SetFilter(PathFilterGroup.CreateFromStrings(paths)); } IDictionary <string, string> synced = new Dictionary <string, string>(); StoredConfig config = repo.GetConfig(); while (generator.Next()) { string remoteUrl = generator.GetRemoteUrl(); if (remoteUrl == null) { continue; } string path = generator.GetPath(); config.SetString(ConfigConstants.CONFIG_SUBMODULE_SECTION, path, ConfigConstants. CONFIG_KEY_URL, remoteUrl); synced.Put(path, remoteUrl); Repository subRepo = generator.GetRepository(); if (subRepo == null) { continue; } StoredConfig subConfig; string branch; try { subConfig = subRepo.GetConfig(); // Get name of remote associated with current branch and // fall back to default remote name as last resort branch = GetHeadBranch(subRepo); string remote = null; if (branch != null) { remote = subConfig.GetString(ConfigConstants.CONFIG_BRANCH_SECTION, branch, ConfigConstants .CONFIG_KEY_REMOTE); } if (remote == null) { remote = Constants.DEFAULT_REMOTE_NAME; } subConfig.SetString(ConfigConstants.CONFIG_REMOTE_SECTION, remote, ConfigConstants .CONFIG_KEY_URL, remoteUrl); subConfig.Save(); } finally { subRepo.Close(); } } if (!synced.IsEmpty()) { config.Save(); } return(synced); } catch (IOException e) { throw new JGitInternalException(e.Message, e); } catch (ConfigInvalidException e) { throw new JGitInternalException(e.Message, e); } }
public void TestAttachments() { var testAttachmentName = "test_attachment"; var attachments = database.Attachments; Assert.AreEqual(0, attachments.Count()); Assert.AreEqual(0, attachments.AllKeys().Count()); var rev1Properties = new Dictionary <string, object>(); rev1Properties["foo"] = 1; rev1Properties["bar"] = false; var status = new Status(); var rev1 = database.PutRevision( new RevisionInternal(rev1Properties, database), null, false, status); Assert.AreEqual(StatusCode.Created, status.GetCode()); var attach1 = Runtime.GetBytesForString( "This is the body of attach1").ToArray(); database.InsertAttachmentForSequenceWithNameAndType( new ByteArrayInputStream(attach1), rev1.GetSequence(), testAttachmentName, "text/plain", rev1.GetGeneration()); //We must set the no_attachments column for the rev to false, as we are using an internal //private API call above (database.insertAttachmentForSequenceWithNameAndType) which does //not set the no_attachments column on revs table try { var args = new ContentValues(); args.Put("no_attachments", false); database.StorageEngine.Update( "revs", args, "sequence=?", new[] { rev1.GetSequence().ToString() } ); } catch (SQLException e) { Log.E(Tag, "Error setting rev1 no_attachments to false", e); throw new CouchbaseLiteException(StatusCode.InternalServerError); } var attachment = database.GetAttachmentForSequence( rev1.GetSequence(), testAttachmentName ); Assert.AreEqual("text/plain", attachment.ContentType); var data = attachment.Content.ToArray(); Assert.IsTrue(Arrays.Equals(attach1, data)); // Workaround : // Not closing the content stream will cause Sharing Violation // Exception when trying to get the same attachment going forward. attachment.ContentStream.Close(); var innerDict = new Dictionary <string, object>(); innerDict["content_type"] = "text/plain"; innerDict["digest"] = "sha1-gOHUOBmIMoDCrMuGyaLWzf1hQTE="; innerDict["length"] = 27; innerDict["stub"] = true; innerDict["revpos"] = 1; var attachmentDict = new Dictionary <string, object>(); attachmentDict[testAttachmentName] = innerDict; var attachmentDictForSequence = database.GetAttachmentsDictForSequenceWithContent(rev1.GetSequence(), DocumentContentOptions.None); Assert.AreEqual(new SortedDictionary <string, object>(attachmentDict), new SortedDictionary <string, object>(attachmentDictForSequence));//Assert.AreEqual(1, attachmentDictForSequence.Count); var gotRev1 = database.GetDocumentWithIDAndRev(rev1.GetDocId(), rev1.GetRevId(), DocumentContentOptions.IncludeAttachments); var gotAttachmentDict = gotRev1.GetProperties() .Get("_attachments") .AsDictionary <string, object>(); Assert.AreEqual(attachmentDict.Select(kvp => kvp.Key).OrderBy(k => k), gotAttachmentDict.Select(kvp => kvp.Key).OrderBy(k => k)); // Check the attachment dict, with attachments included: innerDict.Remove("stub"); innerDict.Put("data", Convert.ToBase64String(attach1)); attachmentDictForSequence = database.GetAttachmentsDictForSequenceWithContent( rev1.GetSequence(), DocumentContentOptions.IncludeAttachments); Assert.AreEqual(new SortedDictionary <string, object>(attachmentDict[testAttachmentName].AsDictionary <string, object>()), new SortedDictionary <string, object>(attachmentDictForSequence[testAttachmentName].AsDictionary <string, object>())); gotRev1 = database.GetDocumentWithIDAndRev( rev1.GetDocId(), rev1.GetRevId(), DocumentContentOptions.IncludeAttachments); gotAttachmentDict = gotRev1.GetProperties() .Get("_attachments") .AsDictionary <string, object>() .Get(testAttachmentName) .AsDictionary <string, object>(); Assert.AreEqual(innerDict.Select(kvp => kvp.Key).OrderBy(k => k), gotAttachmentDict.Select(kvp => kvp.Key).OrderBy(k => k)); // Add a second revision that doesn't update the attachment: var rev2Properties = new Dictionary <string, object>(); rev2Properties.Put("_id", rev1.GetDocId()); rev2Properties["foo"] = 2; rev2Properties["bazz"] = false; var rev2 = database.PutRevision(new RevisionInternal(rev2Properties, database), rev1.GetRevId(), false, status); Assert.AreEqual(StatusCode.Created, status.GetCode()); database.CopyAttachmentNamedFromSequenceToSequence( testAttachmentName, rev1.GetSequence(), rev2.GetSequence()); // Add a third revision of the same document: var rev3Properties = new Dictionary <string, object>(); rev3Properties.Put("_id", rev2.GetDocId()); rev3Properties["foo"] = 2; rev3Properties["bazz"] = false; var rev3 = database.PutRevision(new RevisionInternal( rev3Properties, database), rev2.GetRevId(), false, status); Assert.AreEqual(StatusCode.Created, status.GetCode()); var attach2 = Runtime.GetBytesForString("<html>And this is attach2</html>").ToArray(); database.InsertAttachmentForSequenceWithNameAndType( new ByteArrayInputStream(attach2), rev3.GetSequence(), testAttachmentName, "text/html", rev2.GetGeneration()); // Check the 2nd revision's attachment: var attachment2 = database.GetAttachmentForSequence(rev2.GetSequence(), testAttachmentName); Assert.AreEqual("text/plain", attachment2.ContentType); data = attachment2.Content.ToArray(); Assert.IsTrue(Arrays.Equals(attach1, data)); // Workaround : // Not closing the content stream will cause Sharing Violation // Exception when trying to get the same attachment going forward. attachment2.ContentStream.Close(); // Check the 3rd revision's attachment: var attachment3 = database.GetAttachmentForSequence(rev3.GetSequence(), testAttachmentName); Assert.AreEqual("text/html", attachment3.ContentType); data = attachment3.Content.ToArray(); Assert.IsTrue(Arrays.Equals(attach2, data)); var attachmentDictForRev3 = database.GetAttachmentsDictForSequenceWithContent(rev3.GetSequence(), DocumentContentOptions.None) .Get(testAttachmentName) .AsDictionary <string, object>(); if (attachmentDictForRev3.ContainsKey("follows")) { if (((bool)attachmentDictForRev3.Get("follows")) == true) { throw new RuntimeException("Did not expected attachment dict 'follows' key to be true" ); } else { throw new RuntimeException("Did not expected attachment dict to have 'follows' key" ); } } // Workaround : // Not closing the content stream will cause Sharing Violation // Exception when trying to get the same attachment going forward. attachment3.ContentStream.Close(); // Examine the attachment store: Assert.AreEqual(2, attachments.Count()); var expected = new HashSet <BlobKey>(); expected.AddItem(BlobStore.KeyForBlob(attach1)); expected.AddItem(BlobStore.KeyForBlob(attach2)); Assert.AreEqual(expected.Count, attachments.AllKeys().Count()); foreach (var key in attachments.AllKeys()) { Assert.IsTrue(expected.Contains(key)); } database.Compact(); // This clears the body of the first revision Assert.AreEqual(1, attachments.Count()); var expected2 = new HashSet <BlobKey>(); expected2.AddItem(BlobStore.KeyForBlob(attach2)); Assert.AreEqual(expected2.Count, attachments.AllKeys().Count()); foreach (var key in attachments.AllKeys()) { Assert.IsTrue(expected2.Contains(key)); } }
public void TestTopK() { var space = 10000; var points = 100000; var topkMax = 100; var random = new Random(); var topk = new CountMinSketchStateTopk(topkMax); var sent = new Dictionary <ByteBuffer, long>(); for (var i = 0; i < points; i++) { // for simple population: ByteBuffer bytes = generateBytesModulo(i, space); var bytes = GenerateBytesRandom(random, space); //var bytes = GenerateBytesModulo(i, space); if (!sent.TryGetValue(bytes, out var count)) { sent.Put(bytes, 1L); topk.UpdateExpectIncreasing(bytes.Array, 1); } else { sent.Put(bytes, count + 1); topk.UpdateExpectIncreasing(bytes.Array, count + 1); } if (i > 0 && i % 100000 == 0) { Console.Out.WriteLine("Completed " + i); } } // compare var top = topk.TopKValues; // assert filled if (sent.Count < topkMax) { Assert.AreEqual(sent.Count, top.Count); } else { Assert.AreEqual(topkMax, top.Count); } // assert no duplicate values ISet <ByteBuffer> set = new HashSet <ByteBuffer>(); foreach (var topBytes in top) { Assert.IsTrue(set.Add(topBytes)); } // assert order descending long?lastFreq = null; foreach (var topBytes in top) { var freq = sent.Get(topBytes); if (lastFreq != null) { Assert.IsTrue(freq <= lastFreq); } lastFreq = freq; } }
public DataFlowOpInitializeResult Initialize(DataFlowOpInitializateContext context) { if (context.InputPorts.IsEmpty()) { throw new ArgumentException("Select operator requires at least one input stream"); } if (context.OutputPorts.Count != 1) { throw new ArgumentException("Select operator requires one output stream but produces " + context.OutputPorts.Count + " streams"); } DataFlowOpOutputPort portZero = context.OutputPorts[0]; if (portZero.OptionalDeclaredType != null && !portZero.OptionalDeclaredType.IsUnderlying) { _submitEventBean = true; } // determine adapter factories for each type int numStreams = context.InputPorts.Count; _adapterFactories = new EventBeanAdapterFactory[numStreams]; for (int i = 0; i < numStreams; i++) { EventType eventType = context.InputPorts.Get(i).TypeDesc.EventType; _adapterFactories[i] = context.StatementContext.EventAdapterService.GetAdapterFactoryForType(eventType); } // Compile and prepare execution // StatementContext statementContext = context.StatementContext; EPServicesContext servicesContext = context.ServicesContext; AgentInstanceContext agentInstanceContext = context.AgentInstanceContext; // validate if (select.InsertIntoDesc != null) { throw new ExprValidationException("Insert-into clause is not supported"); } if (select.SelectStreamSelectorEnum != SelectClauseStreamSelectorEnum.ISTREAM_ONLY) { throw new ExprValidationException("Selecting remove-stream is not supported"); } ExprNodeSubselectDeclaredDotVisitor visitor = StatementSpecRawAnalyzer.WalkSubselectAndDeclaredDotExpr(select); GroupByClauseExpressions groupByExpressions = GroupByExpressionHelper.GetGroupByRollupExpressions( select.GroupByExpressions, select.SelectClauseSpec, select.HavingExprRootNode, select.OrderByList, visitor); if (!visitor.Subselects.IsEmpty()) { throw new ExprValidationException("Subselects are not supported"); } IDictionary <int, FilterStreamSpecRaw> streams = new Dictionary <int, FilterStreamSpecRaw>(); for (int streamNum = 0; streamNum < select.StreamSpecs.Count; streamNum++) { var rawStreamSpec = select.StreamSpecs[streamNum]; if (!(rawStreamSpec is FilterStreamSpecRaw)) { throw new ExprValidationException("From-clause must contain only streams and cannot contain patterns or other constructs"); } streams.Put(streamNum, (FilterStreamSpecRaw)rawStreamSpec); } // compile offered streams IList <StreamSpecCompiled> streamSpecCompileds = new List <StreamSpecCompiled>(); for (int streamNum = 0; streamNum < select.StreamSpecs.Count; streamNum++) { var filter = streams.Get(streamNum); var inputPort = FindInputPort(filter.RawFilterSpec.EventTypeName, context.InputPorts); if (inputPort == null) { throw new ExprValidationException( string.Format("Failed to find stream '{0}' among input ports, input ports are {1}", filter.RawFilterSpec.EventTypeName, GetInputPortNames(context.InputPorts).Render(", ", "[]"))); } var eventType = inputPort.Value.Value.TypeDesc.EventType; var streamAlias = filter.OptionalStreamName; var filterSpecCompiled = new FilterSpecCompiled(eventType, streamAlias, new IList <FilterSpecParam>[] { Collections.GetEmptyList <FilterSpecParam>() }, null); var filterStreamSpecCompiled = new FilterStreamSpecCompiled(filterSpecCompiled, select.StreamSpecs[0].ViewSpecs, streamAlias, new StreamSpecOptions()); streamSpecCompileds.Add(filterStreamSpecCompiled); } // create compiled statement spec SelectClauseSpecCompiled selectClauseCompiled = StatementLifecycleSvcUtil.CompileSelectClause(select.SelectClauseSpec); // determine if snapshot output is needed OutputLimitSpec outputLimitSpec = select.OutputLimitSpec; _isOutputLimited = outputLimitSpec != null; if (iterate) { if (outputLimitSpec != null) { throw new ExprValidationException("Output rate limiting is not supported with 'iterate'"); } outputLimitSpec = new OutputLimitSpec(OutputLimitLimitType.SNAPSHOT, OutputLimitRateType.TERM); } var mergedAnnotations = AnnotationUtil.MergeAnnotations(statementContext.Annotations, context.OperatorAnnotations); var orderByArray = OrderByItem.ToArray(select.OrderByList); var outerJoinArray = OuterJoinDesc.ToArray(select.OuterJoinDescList); var streamSpecArray = streamSpecCompileds.ToArray(); var compiled = new StatementSpecCompiled(null, null, null, null, null, null, null, SelectClauseStreamSelectorEnum.ISTREAM_ONLY, selectClauseCompiled, streamSpecArray, outerJoinArray, select.FilterExprRootNode, select.HavingExprRootNode, outputLimitSpec, orderByArray, ExprSubselectNode.EMPTY_SUBSELECT_ARRAY, ExprNodeUtility.EMPTY_DECLARED_ARR, ExprNodeUtility.EMPTY_SCRIPTS, select.ReferencedVariables, select.RowLimitSpec, CollectionUtil.EMPTY_STRING_ARRAY, mergedAnnotations, null, null, null, null, null, null, null, null, null, groupByExpressions, null, null); // create viewable per port var viewables = new EPLSelectViewable[context.InputPorts.Count]; _viewablesPerPort = viewables; foreach (var entry in context.InputPorts) { EPLSelectViewable viewable = new EPLSelectViewable(entry.Value.TypeDesc.EventType); viewables[entry.Key] = viewable; } var activatorFactory = new ProxyViewableActivatorFactory { ProcCreateActivatorSimple = filterStreamSpec => { EPLSelectViewable found = null; foreach (EPLSelectViewable sviewable in viewables) { if (sviewable.EventType == filterStreamSpec.FilterSpec.FilterForEventType) { found = sviewable; } } if (found == null) { throw new IllegalStateException("Failed to find viewable for filter"); } EPLSelectViewable viewable = found; return(new ProxyViewableActivator( (agentInstanceContext2, isSubselect, isRecoveringResilient) => new ViewableActivationResult( viewable, new ProxyStopCallback(() => { }), null, null, null, false, false, null))); } }; // for per-row deliver, register select expression result callback OutputProcessViewCallback optionalOutputProcessViewCallback = null; if (!iterate && !_isOutputLimited) { _deliveryCallback = new EPLSelectDeliveryCallback(); optionalOutputProcessViewCallback = this; } // prepare EPStatementStartMethodSelectDesc selectDesc = EPStatementStartMethodSelectUtil.Prepare(compiled, servicesContext, statementContext, false, agentInstanceContext, false, activatorFactory, optionalOutputProcessViewCallback, _deliveryCallback); // start _selectResult = (StatementAgentInstanceFactorySelectResult)selectDesc.StatementAgentInstanceFactorySelect.NewContext(agentInstanceContext, false); // for output-rate-limited, register a dispatch view if (_isOutputLimited) { _selectResult.FinalView.AddView(new EPLSelectUpdateDispatchView(this)); } // assign strategies to expression nodes EPStatementStartMethodHelperAssignExpr.AssignExpressionStrategies( selectDesc, _selectResult.OptionalAggegationService, _selectResult.SubselectStrategies, _selectResult.PriorNodeStrategies, _selectResult.PreviousNodeStrategies, null, null, _selectResult.TableAccessEvalStrategies); EventType outputEventType = selectDesc.ResultSetProcessorPrototypeDesc.ResultSetProcessorFactory.ResultEventType; _agentInstanceContext = agentInstanceContext; return(new DataFlowOpInitializeResult(new GraphTypeDesc[] { new GraphTypeDesc(false, true, outputEventType) })); }
/// <summary>Check cyclic dependency and determine processing order for the given graph. </summary> /// <param name="graph">is represented as child nodes that have one or more parent nodes that they are dependent on</param> /// <returns>set of parent and child nodes in order such that no node's dependency is not satisfiedby a prior nodein the set </returns> /// <throws>GraphCircularDependencyException if a dependency has been detected</throws> public static ICollection <String> GetTopDownOrder(IDictionary <String, ICollection <String> > graph) { var circularDependency = GetFirstCircularDependency(graph); if (circularDependency != null) { throw new GraphCircularDependencyException("Circular dependency detected between " + circularDependency.Render()); } var reversedGraph = new Dictionary <String, ICollection <String> >(); // Reversed the graph - build a list of children per parent foreach (var entry in graph) { var parents = entry.Value; var child = entry.Key; foreach (var parent in parents) { var childList = reversedGraph.Get(parent); if (childList == null) { childList = new FIFOHashSet <String>(); reversedGraph.Put(parent, childList); } childList.Add(child); } } // Determine all root nodes, which are those without parent var roots = new SortedSet <String>(); foreach (var parents in graph.Values) { if (parents == null) { continue; } foreach (String parent in parents) { // node not itself a child if (!graph.ContainsKey(parent)) { roots.Add(parent); } } } // for each root, recursively add its child nodes, this becomes the default order ICollection <String> graphFlattened = new FIFOHashSet <String>(); foreach (String root in roots) { RecusiveAdd(graphFlattened, root, reversedGraph); } // now walk down the default order and for each node ensure all parents are created ICollection <String> created = new FIFOHashSet <String>(); ICollection <String> removeList = new HashSet <String>(); while (graphFlattened.IsNotEmpty()) { removeList.Clear(); foreach (String node in graphFlattened) { if (!RecursiveParentsCreated(node, created, graph)) { continue; } created.Add(node); removeList.Add(node); } graphFlattened.RemoveAll(removeList); } return(created); }
public void RunExample(bool isRunFromUnitTest, string engineURI) { int numEventsToLoad = 100000; int numFireAndForgetExecutions = 100; int numOnEventQueryExecutions = 100000; if (isRunFromUnitTest) { numEventsToLoad = 1000; numFireAndForgetExecutions = 5; numOnEventQueryExecutions = 5; } EPServiceProvider epService = EPServiceProviderManager.GetProvider(engineURI); // This example initializes the engine instance as it is running within an overall test suite. // This step would not be required unless re-using the same engine instance with different configurations. epService.Initialize(); // define event type - this example uses Map event representation // IDictionary <String, Object> definition = new LinkedHashMap <String, Object>(); definition.Put("sensor", typeof(string)); definition.Put("temperature", typeof(double)); epService.EPAdministrator.Configuration.AddEventType("SensorEvent", definition); // define a named window to hold the last 1000000 (1M) events // String stmtText = "create window SensorWindow.win:keepall() as select * from SensorEvent"; Log.Info("Creating named window : " + stmtText); epService.EPAdministrator.CreateEPL(stmtText); stmtText = "insert into SensorWindow select * from SensorEvent"; Log.Info("Creating insert statement for named window : " + stmtText); epService.EPAdministrator.CreateEPL(stmtText); // load 1M events // var random = new Random(); String[] sensors = "s1,s2,s3,s4,s5,s6".Split(','); Log.Info("Generating " + numEventsToLoad + " sensor events for the named window"); IList <IDictionary <String, Object> > events = new List <IDictionary <String, Object> >(); for (int i = 0; i < numEventsToLoad; i++) { double temperature = Math.Round(random.NextDouble() * 10, 5, MidpointRounding.AwayFromZero) + 80; String sensor = sensors[random.Next(sensors.Length)]; IDictionary <String, Object> data = new LinkedHashMap <String, Object>(); data.Put("temperature", temperature); data.Put("sensor", sensor); events.Add(data); } Log.Info("Completed generating sensor events"); Log.Info("Sending " + events.Count + " sensor events into engine"); foreach (var @event in events) { epService.EPRuntime.SendEvent(@event, "SensorEvent"); } Log.Info("Completed sending sensor events"); // prepare on-demand query // var sampleTemperature = (double)events[0].Get("temperature"); stmtText = "select * from SensorWindow where temperature = " + sampleTemperature; Log.Info("Preparing fire-and-forget query : " + stmtText); EPOnDemandPreparedQuery onDemandQuery = epService.EPRuntime.PrepareQuery(stmtText); Log.Info("Executing fire-and-forget query " + numFireAndForgetExecutions + " times"); long startTime = Environment.TickCount; for (int i = 0; i < numFireAndForgetExecutions; i++) { EPOnDemandQueryResult result = onDemandQuery.Execute(); if (result.Array.Length != 1) { throw new ApplicationException( "Failed assertion of result, expected a single row returned from query"); } } long endTime = Environment.TickCount; double deltaSec = (endTime - startTime) / 1000.0; Log.Info("Executing fire-and-forget query " + numFireAndForgetExecutions + " times took " + deltaSec + " seconds"); // prepare on-select // IDictionary <String, Object> definitionQuery = new LinkedHashMap <String, Object>(); definitionQuery.Put("querytemp", typeof(double)); epService.EPAdministrator.Configuration.AddEventType("SensorQueryEvent", definitionQuery); stmtText = "on SensorQueryEvent select sensor from SensorWindow where temperature = querytemp"; Log.Info("Creating on-select statement for named window : " + stmtText); EPStatement onSelectStmt = epService.EPAdministrator.CreateEPL(stmtText); onSelectStmt.Subscriber = this; Log.Info("Executing on-select query " + numOnEventQueryExecutions + " times"); startTime = Environment.TickCount; for (int i = 0; i < numOnEventQueryExecutions; i++) { IDictionary <String, Object> queryParams = new Dictionary <String, Object>(); queryParams.Put("querytemp", sampleTemperature); epService.EPRuntime.SendEvent(queryParams, "SensorQueryEvent"); } endTime = Environment.TickCount; deltaSec = (endTime - startTime) / 1000.0; Log.Info("Executing on-select query " + numOnEventQueryExecutions + " times took " + deltaSec + " seconds"); }
private static void Configure(Configuration configuration) { foreach (var clazz in new[] { typeof(SupportBean), typeof(OrderBean), typeof(OrderWithItems), typeof(SupportBeanAtoFBase), typeof(SupportBean_A), typeof(SupportMarketDataBean), typeof(SupportSimpleBeanTwo), typeof(SupportSimpleBeanOne), typeof(SupportVariableSetEvent), typeof(SupportBean_S0), typeof(SupportBean_S1), typeof(SupportBeanRange), typeof(SupportBean_B), typeof(SupportOverrideOneA), typeof(SupportOverrideOne), typeof(SupportOverrideBase), typeof(SupportQueueEnter), typeof(SupportQueueLeave), typeof(SupportBeanAtoFBase), typeof(SupportBeanAbstractSub), typeof(SupportBean_ST0), typeof(SupportBeanTwo), typeof(SupportCountAccessEvent), typeof(BookDesc), typeof(SupportBean_Container), typeof(SupportEventWithManyArray), typeof(SupportEventWithIntArray) }) { configuration.Common.AddEventType(clazz); } IDictionary<string, object> outerMapInnerType = new Dictionary<string, object>(); outerMapInnerType.Put("key", typeof(string)); configuration.Common.AddEventType("InnerMap", outerMapInnerType); IDictionary<string, object> outerMap = new Dictionary<string, object>(); outerMap.Put("innermap", "InnerMap"); configuration.Common.AddEventType("OuterMap", outerMap); IDictionary<string, object> typesSimpleKeyValue = new Dictionary<string, object>(); typesSimpleKeyValue.Put("key", typeof(string)); typesSimpleKeyValue.Put("value", typeof(long)); configuration.Common.AddEventType("MySimpleKeyValueMap", typesSimpleKeyValue); IDictionary<string, object> innerTypeOne = new Dictionary<string, object>(); innerTypeOne.Put("i1", typeof(int)); IDictionary<string, object> innerTypeTwo = new Dictionary<string, object>(); innerTypeTwo.Put("i2", typeof(int)); IDictionary<string, object> outerType = new Dictionary<string, object>(); outerType.Put("one", "T1"); outerType.Put("two", "T2"); configuration.Common.AddEventType("T1", innerTypeOne); configuration.Common.AddEventType("T2", innerTypeTwo); configuration.Common.AddEventType("OuterType", outerType); IDictionary<string, object> types = new Dictionary<string, object>(); types.Put("key", typeof(string)); types.Put("primitive", typeof(long)); types.Put("boxed", typeof(long?)); configuration.Common.AddEventType("MyMapWithKeyPrimitiveBoxed", types); var dataType = BuildMap( new object[][] { new object[] {"a", typeof(string)}, new object[] {"b", typeof(int)} }); configuration.Common.AddEventType("MyMapAB", dataType); var legacy = new ConfigurationCommonEventTypeBean(); legacy.CopyMethod = "MyCopyMethod"; configuration.Common.AddEventType("SupportBeanCopyMethod", typeof(SupportBeanCopyMethod), legacy); configuration.Common.AddEventType("SimpleEventWithId", new string[]{"id"}, new object[]{ typeof(string) }); configuration.Compiler.AddPlugInSingleRowFunction( "setBeanLongPrimitive999", typeof(InfraNamedWindowOnUpdate), "SetBeanLongPrimitive999"); configuration.Compiler.AddPlugInSingleRowFunction( "increaseIntCopyDouble", typeof(InfraNamedWindowOnMerge), "IncreaseIntCopyDouble"); var config = new ConfigurationCommonVariantStream(); config.AddEventTypeName("SupportBean_A"); config.AddEventTypeName("SupportBean_B"); configuration.Common.AddVariantStream("VarStream", config); configuration.Common.Logging.IsEnableQueryPlan = true; }
private IEnumerable<BrowseFacet> FilterByKeys(IEnumerable<BrowseFacet> facets, char separator, int numFacetsPerKey, string[] values) { var keyOccurences = new Dictionary<string, AtomicInteger>(); var editable = facets.ToList(); string separatorString = Convert.ToString(separator); for (int i = 0; i < facets.Count(); i++) { BrowseFacet facet = facets.ElementAt(i); string value = facet.Value; if (!value.Contains(separatorString)) { editable.Remove(facet); continue; } if (values != null && values.Length > 0) { bool belongsToKeys = false; foreach (var val in values) { if (value.StartsWith(val)) { belongsToKeys = true; break; } } if (!belongsToKeys) { editable.Remove(facet); continue; } } string key = value.Substring(0, value.IndexOf(separatorString)); AtomicInteger numOfKeys = keyOccurences.Get(key); if (numOfKeys == null) { numOfKeys = new AtomicInteger(0); keyOccurences.Put(key, numOfKeys); } int count = numOfKeys.IncrementAndGet(); if (count > numFacetsPerKey) { editable.Remove(facet); } } return editable; }
public virtual void TestRules4thru13_IndexEntryNotInHead() { // rules 4 and 5 Dictionary <string, string> idxMap; idxMap = new Dictionary <string, string>(); idxMap.Put("foo", "foo"); SetupCase(null, null, idxMap); Go(); NUnit.Framework.Assert.IsTrue(GetUpdated().IsEmpty()); NUnit.Framework.Assert.IsTrue(GetRemoved().IsEmpty()); NUnit.Framework.Assert.IsTrue(GetConflicts().IsEmpty()); // rules 6 and 7 idxMap = new Dictionary <string, string>(); idxMap.Put("foo", "foo"); SetupCase(null, idxMap, idxMap); Go(); AssertAllEmpty(); // rules 8 and 9 Dictionary <string, string> mergeMap; mergeMap = new Dictionary <string, string>(); mergeMap.Put("foo", "merge"); SetupCase(null, mergeMap, idxMap); Go(); NUnit.Framework.Assert.IsTrue(GetUpdated().IsEmpty()); NUnit.Framework.Assert.IsTrue(GetRemoved().IsEmpty()); NUnit.Framework.Assert.IsTrue(GetConflicts().Contains("foo")); // rule 10 Dictionary <string, string> headMap = new Dictionary <string, string>(); headMap.Put("foo", "foo"); SetupCase(headMap, null, idxMap); Go(); NUnit.Framework.Assert.IsTrue(GetRemoved().Contains("foo")); NUnit.Framework.Assert.IsTrue(GetUpdated().IsEmpty()); NUnit.Framework.Assert.IsTrue(GetConflicts().IsEmpty()); // rule 11 SetupCase(headMap, null, idxMap); NUnit.Framework.Assert.IsTrue(new FilePath(trash, "foo").Delete()); WriteTrashFile("foo", "bar"); db.GetIndex().GetMembers()[0].ForceRecheck(); Go(); NUnit.Framework.Assert.IsTrue(GetRemoved().IsEmpty()); NUnit.Framework.Assert.IsTrue(GetUpdated().IsEmpty()); NUnit.Framework.Assert.IsTrue(GetConflicts().Contains("foo")); // rule 12 & 13 headMap.Put("foo", "head"); SetupCase(headMap, null, idxMap); Go(); NUnit.Framework.Assert.IsTrue(GetRemoved().IsEmpty()); NUnit.Framework.Assert.IsTrue(GetUpdated().IsEmpty()); NUnit.Framework.Assert.IsTrue(GetConflicts().Contains("foo")); // rules 14 & 15 SetupCase(headMap, headMap, idxMap); Go(); AssertAllEmpty(); // rules 16 & 17 SetupCase(headMap, mergeMap, idxMap); Go(); NUnit.Framework.Assert.IsTrue(GetConflicts().Contains("foo")); // rules 18 & 19 SetupCase(headMap, idxMap, idxMap); Go(); AssertAllEmpty(); // rule 20 SetupCase(idxMap, mergeMap, idxMap); Go(); NUnit.Framework.Assert.IsTrue(GetUpdated().ContainsKey("foo")); // rules 21 SetupCase(idxMap, mergeMap, idxMap); NUnit.Framework.Assert.IsTrue(new FilePath(trash, "foo").Delete()); WriteTrashFile("foo", "bar"); db.GetIndex().GetMembers()[0].ForceRecheck(); Go(); NUnit.Framework.Assert.IsTrue(GetConflicts().Contains("foo")); }
// Replaces the "follows" key with the real attachment data in all attachments to 'doc'. internal bool InlineFollowingAttachmentsIn(RevisionInternal rev) { return rev.MutateAttachments((s, attachment)=> { if (!attachment.ContainsKey("follows")) { return attachment; } var fileURL = FileForAttachmentDict(attachment); byte[] fileData = null; try { var inputStream = fileURL.OpenConnection().GetInputStream(); var os = new ByteArrayOutputStream(); inputStream.CopyTo(os); fileData = os.ToByteArray(); } catch (IOException e) { Log.E(TAG, "could not retrieve attachment data: {0}".Fmt(fileURL.ToString()), e); return null; } var editedAttachment = new Dictionary<string, object>(attachment); editedAttachment.Remove("follows"); editedAttachment.Put("data", Convert.ToBase64String(fileData)); return editedAttachment; }); }
private DataMap ConstructPropertyTypes(String eventTypeName, DataMap propertyTypesGiven, EventAdapterService eventAdapterService) { var propertyTypes = new Dictionary <string, object>(); var eventType = eventAdapterService.GetEventTypeByName(eventTypeName); if (eventType == null) { if (propertyTypesGiven != null) { eventAdapterService.AddNestableMapType(eventTypeName, new Dictionary <string, object>(propertyTypesGiven), null, true, true, true, false, false); } return(propertyTypesGiven); } if (eventType.UnderlyingType != typeof(DataMap)) { _beanType = eventType.UnderlyingType; } if (propertyTypesGiven != null && eventType.PropertyNames.Length != propertyTypesGiven.Count) { // allow this scenario for beans as we may want to bring in a subset of properties if (_beanType != null) { return(propertyTypesGiven); } throw new EPException("Event type " + eventTypeName + " has already been declared with a different number of parameters"); } foreach (var property in eventType.PropertyNames) { Type type; try { type = eventType.GetPropertyType(property); } catch (PropertyAccessException e) { // thrown if trying to access an invalid property on an EventBean throw new EPException(e); } if (propertyTypesGiven != null && propertyTypesGiven.Get(property) == null) { throw new EPException("Event type " + eventTypeName + "has already been declared with different parameters"); } if (propertyTypesGiven != null && !Equals(propertyTypesGiven.Get(property), type)) { throw new EPException("Event type " + eventTypeName + "has already been declared with a different type for property " + property); } // we can't set read-only properties for bean if (eventType.UnderlyingType != typeof(DataMap)) { var magicType = MagicType.GetCachedType(_beanType); var magicProperty = magicType.ResolveProperty(property, PropertyResolutionStyle.CASE_SENSITIVE); if (magicProperty == null) { continue; } if (!magicProperty.CanWrite) { if (propertyTypesGiven == null) { continue; } else { throw new EPException("Event type " + eventTypeName + "property " + property + " is read only"); } } } propertyTypes[property] = type; } // flatten nested types var flattenPropertyTypes = new Dictionary <string, object>(); foreach (var prop in propertyTypes) { var name = prop.Key; var type = prop.Value; var asType = type as Type; if ((asType != null) && (asType.IsGenericStringDictionary()) && (eventType is MapEventType)) { var mapEventType = (MapEventType)eventType; var nested = (DataMap)mapEventType.Types.Get(name); foreach (var nestedProperty in nested.Keys) { flattenPropertyTypes.Put(name + "." + nestedProperty, nested.Get(nestedProperty)); } } else if (asType != null) { if (asType.IsNullable()) { asType = Nullable.GetUnderlyingType(asType); } if ((!asType.IsPrimitive) && (asType != typeof(string))) { var magicType = MagicType.GetCachedType(asType); foreach (var magicProperty in magicType.GetAllProperties(false)) { if (magicProperty.CanWrite) { flattenPropertyTypes[name + '.' + magicProperty.Name] = magicProperty.PropertyType; } } } else { flattenPropertyTypes[name] = type; } } else { flattenPropertyTypes[name] = type; } } return(flattenPropertyTypes); }
private static void Configure(Configuration configuration) { foreach (Type clazz in new[] { typeof(SupportBean), typeof(SupportObjectArrayOneDim), typeof(SupportBeanSimple), typeof(SupportBean_A), typeof(SupportRFIDEvent), typeof(SupportBean_S0), typeof(SupportBean_S1), typeof(SupportMarketDataBean), typeof(SupportTemperatureBean), typeof(SupportBeanComplexProps), typeof(SupportBeanInterfaceProps), typeof(SupportBeanErrorTestingOne), typeof(SupportBeanErrorTestingTwo), typeof(SupportBeanReadOnly), typeof(SupportBeanArrayCollMap), typeof(SupportBean_N), typeof(SupportBeanObject), typeof(SupportBeanCtorOne), typeof(SupportBeanCtorTwo), typeof(SupportBean_ST0), typeof(SupportBean_ST1), typeof(SupportEventWithCtorSameType), typeof(SupportBeanCtorThree), typeof(SupportBeanCtorOne), typeof(SupportBean_ST0), typeof(SupportBean_ST1), typeof(SupportEventWithMapFieldSetter), typeof(SupportBeanNumeric), typeof(SupportBeanArrayEvent), typeof(SupportBean_A), typeof(SupportBean_B), typeof(SupportEventContainsSupportBean) }) { configuration.Common.AddEventType(clazz); } Schema avroExistingTypeSchema = SchemaBuilder.Record( "name", TypeBuilder.RequiredLong("MyLong"), TypeBuilder.Field( "MyLongArray", TypeBuilder.Array(TypeBuilder.LongType())), TypeBuilder.Field("MyByteArray", TypeBuilder.BytesType()), TypeBuilder.Field( "MyMap", TypeBuilder.Map( TypeBuilder.StringType( TypeBuilder.Property( AvroConstant.PROP_STRING_KEY, AvroConstant.PROP_STRING_VALUE))))); configuration.Common.AddEventTypeAvro("AvroExistingType", new ConfigurationCommonEventTypeAvro(avroExistingTypeSchema)); IDictionary<string, object> mapTypeInfo = new Dictionary<string, object>(); mapTypeInfo.Put("one", typeof(string)); mapTypeInfo.Put("two", typeof(string)); configuration.Common.AddEventType("MapOne", mapTypeInfo); configuration.Common.AddEventType("MapTwo", mapTypeInfo); string[] props = {"one", "two"}; object[] types = {typeof(string), typeof(string)}; configuration.Common.AddEventType("OAOne", props, types); configuration.Common.AddEventType("OATwo", props, types); Schema avroOneAndTwoSchema = SchemaBuilder.Record( "name", TypeBuilder.RequiredString("one"), TypeBuilder.RequiredString("two")); configuration.Common.AddEventTypeAvro("AvroOne", new ConfigurationCommonEventTypeAvro(avroOneAndTwoSchema)); configuration.Common.AddEventTypeAvro("AvroTwo", new ConfigurationCommonEventTypeAvro(avroOneAndTwoSchema)); ConfigurationCommonEventTypeBean legacySupportBeanString = new ConfigurationCommonEventTypeBean(); legacySupportBeanString.FactoryMethod = "GetInstance"; configuration.Common.AddEventType("SupportBeanString", typeof(SupportBeanString), legacySupportBeanString); ConfigurationCommonEventTypeBean legacySupportSensorEvent = new ConfigurationCommonEventTypeBean(); legacySupportSensorEvent.FactoryMethod = typeof(SupportSensorEventFactory).FullName + ".GetInstance"; configuration.Common.AddEventType("SupportSensorEvent", typeof(SupportSensorEvent), legacySupportSensorEvent); configuration.Common.AddImportType(typeof(SupportEnum)); IDictionary<string, object> mymapDef = new Dictionary<string, object>(); mymapDef.Put("Anint", typeof(int)); mymapDef.Put("IntBoxed", typeof(int?)); mymapDef.Put("FloatBoxed", typeof(float?)); mymapDef.Put("IntArr", typeof(int[])); mymapDef.Put("MapProp", typeof(IDictionary<string, object>)); mymapDef.Put("IsaImpl", typeof(ISupportAImpl)); mymapDef.Put("IsbImpl", typeof(ISupportBImpl)); mymapDef.Put("IsgImpl", typeof(ISupportAImplSuperGImpl)); mymapDef.Put("IsabImpl", typeof(ISupportBaseABImpl)); mymapDef.Put("Nested", typeof(SupportBeanComplexProps.SupportBeanSpecialGetterNested)); configuration.Common.AddEventType("MyMap", mymapDef); IDictionary<string, object> defMap = new Dictionary<string, object>(); defMap.Put("intVal", typeof(int)); defMap.Put("stringVal", typeof(string)); defMap.Put("doubleVal", typeof(double?)); defMap.Put("nullVal", null); configuration.Common.AddEventType("MyMapType", defMap); string[] propsMyOAType = new string[] {"intVal", "stringVal", "doubleVal", "nullVal"}; object[] typesMyOAType = new object[] {typeof(int), typeof(string), typeof(double?), null}; configuration.Common.AddEventType("MyOAType", propsMyOAType, typesMyOAType); Schema schema = SchemaBuilder.Record( "MyAvroType", TypeBuilder.RequiredInt("intVal"), TypeBuilder.RequiredString("stringVal"), TypeBuilder.RequiredDouble("doubleVal"), TypeBuilder.Field("nullVal", TypeBuilder.NullType())); configuration.Common.AddEventTypeAvro("MyAvroType", new ConfigurationCommonEventTypeAvro(schema)); ConfigurationCommonEventTypeXMLDOM xml = new ConfigurationCommonEventTypeXMLDOM(); xml.RootElementName = "abc"; configuration.Common.AddEventType("xmltype", xml); IDictionary<string, object> mapDef = new Dictionary<string, object>(); mapDef.Put("IntPrimitive", typeof(int)); mapDef.Put("LongBoxed", typeof(long?)); mapDef.Put("TheString", typeof(string)); mapDef.Put("BoolPrimitive", typeof(bool?)); configuration.Common.AddEventType("MySupportMap", mapDef); IDictionary<string, object> type = MakeMap( new object[][] { new object[] {"Id", typeof(string)} }); configuration.Common.AddEventType("AEventMap", type); configuration.Common.AddEventType("BEventMap", type); IDictionary<string, object> metadata = MakeMap( new object[][] {new object[] {"Id", typeof(string)}}); configuration.Common.AddEventType("AEventTE", metadata); configuration.Common.AddEventType("BEventTE", metadata); configuration.Common.AddImportType(typeof(SupportStaticMethodLib)); configuration.Common.AddImportNamespace(typeof(EPLInsertIntoPopulateUnderlying)); IDictionary<string, object> complexMapMetadata = MakeMap( new object[][] { new object[] { "Nested", MakeMap( new object[][] { new object[] {"NestedValue", typeof(string)} }) } }); configuration.Common.AddEventType("ComplexMap", complexMapMetadata); configuration.Compiler.ByteCode.AllowSubscriber = true; configuration.Compiler.AddPlugInSingleRowFunction("generateMap", typeof(EPLInsertIntoTransposeStream), "LocalGenerateMap"); configuration.Compiler.AddPlugInSingleRowFunction("generateOA", typeof(EPLInsertIntoTransposeStream), "LocalGenerateOA"); configuration.Compiler.AddPlugInSingleRowFunction("generateAvro", typeof(EPLInsertIntoTransposeStream), "LocalGenerateAvro"); configuration.Compiler.AddPlugInSingleRowFunction("generateJson", typeof(EPLInsertIntoTransposeStream), "LocalGenerateJson"); configuration.Compiler.AddPlugInSingleRowFunction("custom", typeof(SupportStaticMethodLib), "MakeSupportBean"); configuration.Compiler.AddPlugInSingleRowFunction("customOne", typeof(SupportStaticMethodLib), "MakeSupportBean"); configuration.Compiler.AddPlugInSingleRowFunction("customTwo", typeof(SupportStaticMethodLib), "MakeSupportBeanNumeric"); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestAllDocsQuery() { IList <RevisionInternal> docs = PutDocs(database); IList <QueryRow> expectedRow = new AList <QueryRow>(); foreach (RevisionInternal rev in docs) { IDictionary <string, object> value = new Dictionary <string, object>(); value.Put("rev", rev.GetRevId()); value.Put("_conflicts", new AList <string>()); QueryRow queryRow = new QueryRow(rev.GetDocId(), 0, rev.GetDocId(), value, null); queryRow.SetDatabase(database); expectedRow.AddItem(queryRow); } QueryOptions options = new QueryOptions(); IDictionary <string, object> allDocs = database.GetAllDocs(options); IList <QueryRow> expectedRows = new AList <QueryRow>(); expectedRows.AddItem(expectedRow[2]); expectedRows.AddItem(expectedRow[0]); expectedRows.AddItem(expectedRow[3]); expectedRows.AddItem(expectedRow[1]); expectedRows.AddItem(expectedRow[4]); IDictionary <string, object> expectedQueryResult = CreateExpectedQueryResult(expectedRows , 0); NUnit.Framework.Assert.AreEqual(expectedQueryResult, allDocs); // Start/end key query: options = new QueryOptions(); options.StartKey = "2"; options.EndKey = "44444"; allDocs = database.GetAllDocs(options); expectedRows = new AList <QueryRow>(); expectedRows.AddItem(expectedRow[0]); expectedRows.AddItem(expectedRow[3]); expectedRows.AddItem(expectedRow[1]); expectedQueryResult = CreateExpectedQueryResult(expectedRows, 0); NUnit.Framework.Assert.AreEqual(expectedQueryResult, allDocs); // Start/end query without inclusive end: options.SetInclusiveEnd(false); allDocs = database.GetAllDocs(options); expectedRows = new AList <QueryRow>(); expectedRows.AddItem(expectedRow[0]); expectedRows.AddItem(expectedRow[3]); expectedQueryResult = CreateExpectedQueryResult(expectedRows, 0); NUnit.Framework.Assert.AreEqual(expectedQueryResult, allDocs); // Get all documents: with default QueryOptions options = new QueryOptions(); allDocs = database.GetAllDocs(options); expectedRows = new AList <QueryRow>(); expectedRows.AddItem(expectedRow[2]); expectedRows.AddItem(expectedRow[0]); expectedRows.AddItem(expectedRow[3]); expectedRows.AddItem(expectedRow[1]); expectedRows.AddItem(expectedRow[4]); expectedQueryResult = CreateExpectedQueryResult(expectedRows, 0); NUnit.Framework.Assert.AreEqual(expectedQueryResult, allDocs); // Get specific documents: options = new QueryOptions(); IList <object> docIds = new AList <object>(); QueryRow expected2 = expectedRow[2]; docIds.AddItem(expected2.Document.Id); options.SetKeys(docIds); allDocs = database.GetAllDocs(options); expectedRows = new AList <QueryRow>(); expectedRows.AddItem(expected2); expectedQueryResult = CreateExpectedQueryResult(expectedRows, 0); NUnit.Framework.Assert.AreEqual(expectedQueryResult, allDocs); }
/// <exception cref="NGit.Errors.TransportException"></exception> private IDictionary <string, RemoteRefUpdate> PrepareRemoteUpdates() { IDictionary <string, RemoteRefUpdate> result = new Dictionary <string, RemoteRefUpdate >(); foreach (RemoteRefUpdate rru in toPush.Values) { Ref advertisedRef = connection.GetRef(rru.GetRemoteName()); ObjectId advertisedOld = (advertisedRef == null ? ObjectId.ZeroId : advertisedRef .GetObjectId()); if (rru.GetNewObjectId().Equals(advertisedOld)) { if (rru.IsDelete()) { // ref does exist neither locally nor remotely rru.SetStatus(RemoteRefUpdate.Status.NON_EXISTING); } else { // same object - nothing to do rru.SetStatus(RemoteRefUpdate.Status.UP_TO_DATE); } continue; } // caller has explicitly specified expected old object id, while it // has been changed in the mean time - reject if (rru.IsExpectingOldObjectId() && !rru.GetExpectedOldObjectId().Equals(advertisedOld )) { rru.SetStatus(RemoteRefUpdate.Status.REJECTED_REMOTE_CHANGED); continue; } // create ref (hasn't existed on remote side) and delete ref // are always fast-forward commands, feasible at this level if (advertisedOld.Equals(ObjectId.ZeroId) || rru.IsDelete()) { rru.SetFastForward(true); result.Put(rru.GetRemoteName(), rru); continue; } // check for fast-forward: // - both old and new ref must point to commits, AND // - both of them must be known for us, exist in repository, AND // - old commit must be ancestor of new commit bool fastForward = true; try { RevObject oldRev = walker.ParseAny(advertisedOld); RevObject newRev = walker.ParseAny(rru.GetNewObjectId()); if (!(oldRev is RevCommit) || !(newRev is RevCommit) || !walker.IsMergedInto((RevCommit )oldRev, (RevCommit)newRev)) { fastForward = false; } } catch (MissingObjectException) { fastForward = false; } catch (Exception x) { throw new TransportException(transport.GetURI(), MessageFormat.Format(JGitText.Get ().readingObjectsFromLocalRepositoryFailed, x.Message), x); } rru.SetFastForward(fastForward); if (!fastForward && !rru.IsForceUpdate()) { rru.SetStatus(RemoteRefUpdate.Status.REJECTED_NONFASTFORWARD); } else { result.Put(rru.GetRemoteName(), rru); } } return(result); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestViewCollation() { IList <object> list1 = new AList <object>(); list1.AddItem("a"); IList <object> list2 = new AList <object>(); list2.AddItem("b"); IList <object> list3 = new AList <object>(); list3.AddItem("b"); list3.AddItem("c"); IList <object> list4 = new AList <object>(); list4.AddItem("b"); list4.AddItem("c"); list4.AddItem("a"); IList <object> list5 = new AList <object>(); list5.AddItem("b"); list5.AddItem("d"); IList <object> list6 = new AList <object>(); list6.AddItem("b"); list6.AddItem("d"); list6.AddItem("e"); // Based on CouchDB's "view_collation.js" test IList <object> testKeys = new AList <object>(); testKeys.AddItem(null); testKeys.AddItem(false); testKeys.AddItem(true); testKeys.AddItem(0); testKeys.AddItem(2.5); testKeys.AddItem(10); testKeys.AddItem(" "); testKeys.AddItem("_"); testKeys.AddItem("~"); testKeys.AddItem("a"); testKeys.AddItem("A"); testKeys.AddItem("aa"); testKeys.AddItem("b"); testKeys.AddItem("B"); testKeys.AddItem("ba"); testKeys.AddItem("bb"); testKeys.AddItem(list1); testKeys.AddItem(list2); testKeys.AddItem(list3); testKeys.AddItem(list4); testKeys.AddItem(list5); testKeys.AddItem(list6); int i = 0; foreach (object key in testKeys) { IDictionary <string, object> docProperties = new Dictionary <string, object>(); docProperties.Put("_id", Sharpen.Extensions.ToString(i++)); docProperties["name"] = key; PutDoc(database, docProperties); } View view = database.GetView("default/names"); view.SetMapReduce(new _Mapper_963(), null, "1.0"); QueryOptions options = new QueryOptions(); IList <QueryRow> rows = view.QueryWithOptions(options); i = 0; foreach (QueryRow row in rows) { NUnit.Framework.Assert.AreEqual(testKeys[i++], row.Key); } }
/// <summary> /// Compute from parameters a crontab schedule. /// </summary> /// <param name="args">parameters</param> /// <exception cref="ScheduleParameterException">if the parameters are invalid</exception> /// <returns>crontab schedule</returns> public static ScheduleSpec ComputeValues(object[] args) { if (args.Length <= 4 || args.Length >= 10) { throw new ScheduleParameterException(GetExpressionCountException(args.Length)); } var unitMap = new Dictionary<ScheduleUnit, ICollection<int>>(); var minutes = args[0]; var hours = args[1]; var daysOfMonth = args[2]; var months = args[3]; var daysOfWeek = args[4]; unitMap.Put(ScheduleUnit.MINUTES, ComputeValues(minutes, ScheduleUnit.MINUTES)); unitMap.Put(ScheduleUnit.HOURS, ComputeValues(hours, ScheduleUnit.HOURS)); var resultMonths = ComputeValues(months, ScheduleUnit.MONTHS); if (daysOfWeek is CronParameter && daysOfMonth is CronParameter) { throw new ScheduleParameterException( "Invalid combination between days of week and days of month fields for timer:at"); } if (resultMonths != null && resultMonths.Count == 1 && (resultMonths.First().IsInt32())) { // If other arguments are cronParameters, use it for later computations CronParameter parameter = null; if (daysOfMonth is CronParameter) { parameter = (CronParameter) daysOfMonth; } else if (daysOfWeek is CronParameter) { parameter = (CronParameter) daysOfWeek; } if (parameter != null) { parameter.Month = resultMonths.First(); } } var resultDaysOfWeek = ComputeValues(daysOfWeek, ScheduleUnit.DAYS_OF_WEEK); var resultDaysOfMonth = ComputeValues(daysOfMonth, ScheduleUnit.DAYS_OF_MONTH); if (resultDaysOfWeek != null && resultDaysOfWeek.Count == 1 && (resultDaysOfWeek.First().IsInt32())) { // The result is in the form "last xx of the month // Days of week is replaced by a wildcard and days of month is updated with // the computation of "last xx day of month". // In this case "days of month" parameter has to be a wildcard. if (resultDaysOfWeek.First() > 6) { if (resultDaysOfMonth != null) { throw new ScheduleParameterException( "Invalid combination between days of week and days of month fields for timer:at"); } resultDaysOfMonth = resultDaysOfWeek; resultDaysOfWeek = null; } } if (resultDaysOfMonth != null && resultDaysOfMonth.Count == 1 && (resultDaysOfMonth.First().IsInt32())) { if (resultDaysOfWeek != null) { throw new ScheduleParameterException( "Invalid combination between days of week and days of month fields for timer:at"); } } unitMap.Put(ScheduleUnit.DAYS_OF_WEEK, resultDaysOfWeek); unitMap.Put(ScheduleUnit.DAYS_OF_MONTH, resultDaysOfMonth); unitMap.Put(ScheduleUnit.MONTHS, resultMonths); if (args.Length > 5) { unitMap.Put(ScheduleUnit.SECONDS, ComputeValues(args[5], ScheduleUnit.SECONDS)); } string timezone = null; if (args.Length > 6) { if (!(args[6] is WildcardParameter)) { if (!(args[6] is string)) { throw new ScheduleParameterException( "Invalid timezone parameter '" + args[6] + "' for timer:at, expected a string-type value"); } timezone = (string) args[6]; } } if (args.Length > 7) { unitMap.Put(ScheduleUnit.MILLISECONDS, ComputeValues(args[7], ScheduleUnit.MILLISECONDS)); } if (args.Length > 8) { unitMap.Put(ScheduleUnit.MICROSECONDS, ComputeValues(args[8], ScheduleUnit.MICROSECONDS)); } var optionalDayOfMonthOp = GetOptionalSpecialOp(daysOfMonth); var optionalDayOfWeekOp = GetOptionalSpecialOp(daysOfWeek); return new ScheduleSpec(unitMap, timezone, optionalDayOfMonthOp, optionalDayOfWeekOp); }
/// <summary>Flush hanging leaves.</summary> /// <param name="container">a container element</param> public virtual void FlushHangingLeaves(IPropertyContainer container) { Paragraph p = CreateLeavesContainer(); if (p != null) { IDictionary <String, String> map = new Dictionary <String, String>(); map.Put(CssConstants.OVERFLOW, CommonCssConstants.VISIBLE); OverflowApplierUtil.ApplyOverflow(map, p); if (container is Document) { ((Document)container).Add(p); } else { if (container is Paragraph) { foreach (IElement leafElement in waitingLeaves) { if (leafElement is ILeafElement) { ((Paragraph)container).Add((ILeafElement)leafElement); } else { if (leafElement is IBlockElement) { ((Paragraph)container).Add((IBlockElement)leafElement); } } } } else { if (container is Div) { ((Div)container).Add(p); } else { if (container is Cell) { ((Cell)container).Add(p); } else { if (container is List) { ListItem li = new ListItem(); li.Add(p); ((List)container).Add(li); } else { throw new InvalidOperationException("Unable to process hanging inline content"); } } } } } waitingLeaves.Clear(); } }
static CanonMakernoteDirectory() { // /** // * Long Exposure Noise Reduction // * 0 = Off // * 1 = On // */ // public static final int TAG_CANON_CUSTOM_FUNCTION_LONG_EXPOSURE_NOISE_REDUCTION = 0xC301; // // /** // * Shutter/Auto Exposure-lock buttons // * 0 = AF/AE lock // * 1 = AE lock/AF // * 2 = AF/AF lock // * 3 = AE+release/AE+AF // */ // public static final int TAG_CANON_CUSTOM_FUNCTION_SHUTTER_AUTO_EXPOSURE_LOCK_BUTTONS = 0xC302; // // /** // * Mirror lockup // * 0 = Disable // * 1 = Enable // */ // public static final int TAG_CANON_CUSTOM_FUNCTION_MIRROR_LOCKUP = 0xC303; // // /** // * Tv/Av and exposure level // * 0 = 1/2 stop // * 1 = 1/3 stop // */ // public static final int TAG_CANON_CUSTOM_FUNCTION_TV_AV_AND_EXPOSURE_LEVEL = 0xC304; // // /** // * AF-assist light // * 0 = On (Auto) // * 1 = Off // */ // public static final int TAG_CANON_CUSTOM_FUNCTION_AF_ASSIST_LIGHT = 0xC305; // // /** // * Shutter speed in Av mode // * 0 = Automatic // * 1 = 1/200 (fixed) // */ // public static final int TAG_CANON_CUSTOM_FUNCTION_SHUTTER_SPEED_IN_AV_MODE = 0xC306; // // /** // * Auto-Exposure Bracketing sequence/auto cancellation // * 0 = 0,-,+ / Enabled // * 1 = 0,-,+ / Disabled // * 2 = -,0,+ / Enabled // * 3 = -,0,+ / Disabled // */ // public static final int TAG_CANON_CUSTOM_FUNCTION_BRACKETING = 0xC307; // // /** // * Shutter Curtain Sync // * 0 = 1st Curtain Sync // * 1 = 2nd Curtain Sync // */ // public static final int TAG_CANON_CUSTOM_FUNCTION_SHUTTER_CURTAIN_SYNC = 0xC308; // // /** // * Lens Auto-Focus stop button Function Switch // * 0 = AF stop // * 1 = Operate AF // * 2 = Lock AE and start timer // */ // public static final int TAG_CANON_CUSTOM_FUNCTION_AF_STOP = 0xC309; // // /** // * Auto reduction of fill flash // * 0 = Enable // * 1 = Disable // */ // public static final int TAG_CANON_CUSTOM_FUNCTION_FILL_FLASH_REDUCTION = 0xC30A; // // /** // * Menu button return position // * 0 = Top // * 1 = Previous (volatile) // * 2 = Previous // */ // public static final int TAG_CANON_CUSTOM_FUNCTION_MENU_BUTTON_RETURN = 0xC30B; // // /** // * SET button function when shooting // * 0 = Not Assigned // * 1 = Change Quality // * 2 = Change ISO Speed // * 3 = Select Parameters // */ // public static final int TAG_CANON_CUSTOM_FUNCTION_SET_BUTTON_FUNCTION = 0xC30C; // // /** // * Sensor cleaning // * 0 = Disable // * 1 = Enable // */ // public static final int TAG_CANON_CUSTOM_FUNCTION_SENSOR_CLEANING = 0xC30D; _tagNameMap.Put(TagCanonFirmwareVersion, "Firmware Version"); _tagNameMap.Put(TagCanonImageNumber, "Image Number"); _tagNameMap.Put(TagCanonImageType, "Image Type"); _tagNameMap.Put(TagCanonOwnerName, "Owner Name"); _tagNameMap.Put(TagCanonSerialNumber, "Camera Serial Number"); _tagNameMap.Put(TagCameraInfoArray, "Camera Info Array"); _tagNameMap.Put(TagCanonFileLength, "File Length"); _tagNameMap.Put(TagCanonCustomFunctionsArray, "Custom Functions"); _tagNameMap.Put(TagModelId, "Canon Model ID"); _tagNameMap.Put(TagMovieInfoArray, "Movie Info Array"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagAfPointSelected, "AF Point Selected"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagContinuousDriveMode, "Continuous Drive Mode"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagContrast, "Contrast"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagEasyShootingMode, "Easy Shooting Mode"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagExposureMode, "Exposure Mode"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagFlashDetails, "Flash Details"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagFlashMode, "Flash Mode"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagFocalUnitsPerMm, "Focal Units per mm"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagFocusMode1, "Focus Mode"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagFocusMode2, "Focus Mode"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagImageSize, "Image Size"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagIso, "Iso"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagLongFocalLength, "Long Focal Length"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagMacroMode, "Macro Mode"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagMeteringMode, "Metering Mode"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagSaturation, "Saturation"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagSelfTimerDelay, "Self Timer Delay"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagSharpness, "Sharpness"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagShortFocalLength, "Short Focal Length"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagQuality, "Quality"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagUnknown2, "Unknown Camera Setting 2"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagUnknown3, "Unknown Camera Setting 3"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagUnknown4, "Unknown Camera Setting 4"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagDigitalZoom, "Digital Zoom"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagFocusType, "Focus Type"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagUnknown7, "Unknown Camera Setting 7"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagUnknown8, "Unknown Camera Setting 8"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagUnknown9, "Unknown Camera Setting 9"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagUnknown10, "Unknown Camera Setting 10"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagFlashActivity, "Flash Activity"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagUnknown12, "Unknown Camera Setting 12"); _tagNameMap.Put(CanonMakernoteDirectory.CameraSettings.TagUnknown13, "Unknown Camera Setting 13"); _tagNameMap.Put(CanonMakernoteDirectory.FocalLength.TagWhiteBalance, "White Balance"); _tagNameMap.Put(CanonMakernoteDirectory.FocalLength.TagSequenceNumber, "Sequence Number"); _tagNameMap.Put(CanonMakernoteDirectory.FocalLength.TagAfPointUsed, "AF Point Used"); _tagNameMap.Put(CanonMakernoteDirectory.FocalLength.TagFlashBias, "Flash Bias"); _tagNameMap.Put(CanonMakernoteDirectory.FocalLength.TagAutoExposureBracketing, "Auto Exposure Bracketing"); _tagNameMap.Put(CanonMakernoteDirectory.FocalLength.TagAebBracketValue, "AEB Bracket Value"); _tagNameMap.Put(CanonMakernoteDirectory.FocalLength.TagSubjectDistance, "Subject Distance"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagAutoIso, "Auto ISO"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagBaseIso, "Base ISO"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagMeasuredEv, "Measured EV"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagTargetAperture, "Target Aperture"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagTargetExposureTime, "Target Exposure Time"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagExposureCompensation, "Exposure Compensation"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagWhiteBalance, "White Balance"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagSlowShutter, "Slow Shutter"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagSequenceNumber, "Sequence Number"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagOpticalZoomCode, "Optical Zoom Code"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagCameraTemperature, "Camera Temperature"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagFlashGuideNumber, "Flash Guide Number"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagAfPointsInFocus, "AF Points in Focus"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagFlashExposureBracketing, "Flash Exposure Compensation"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagAutoExposureBracketing, "Auto Exposure Bracketing"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagAebBracketValue, "AEB Bracket Value"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagControlMode, "Control Mode"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagFocusDistanceUpper, "Focus Distance Upper"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagFocusDistanceLower, "Focus Distance Lower"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagFNumber, "F Number"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagExposureTime, "Exposure Time"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagMeasuredEv2, "Measured EV 2"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagBulbDuration, "Bulb Duration"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagCameraType, "Camera Type"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagAutoRotate, "Auto Rotate"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagNdFilter, "ND Filter"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagSelfTimer2, "Self Timer 2"); _tagNameMap.Put(CanonMakernoteDirectory.ShotInfo.TagFlashOutput, "Flash Output"); _tagNameMap.Put(CanonMakernoteDirectory.Panorama.TagPanoramaFrameNumber, "Panorama Frame Number"); _tagNameMap.Put(CanonMakernoteDirectory.Panorama.TagPanoramaDirection, "Panorama Direction"); _tagNameMap.Put(CanonMakernoteDirectory.AFInfo.TagNumAfPoints, "AF Point Count"); _tagNameMap.Put(CanonMakernoteDirectory.AFInfo.TagValidAfPoints, "Valid AF Point Count"); _tagNameMap.Put(CanonMakernoteDirectory.AFInfo.TagImageWidth, "Image Width"); _tagNameMap.Put(CanonMakernoteDirectory.AFInfo.TagImageHeight, "Image Height"); _tagNameMap.Put(CanonMakernoteDirectory.AFInfo.TagAfImageWidth, "AF Image Width"); _tagNameMap.Put(CanonMakernoteDirectory.AFInfo.TagAfImageHeight, "AF Image Height"); _tagNameMap.Put(CanonMakernoteDirectory.AFInfo.TagAfAreaWidth, "AF Area Width"); _tagNameMap.Put(CanonMakernoteDirectory.AFInfo.TagAfAreaHeight, "AF Area Height"); _tagNameMap.Put(CanonMakernoteDirectory.AFInfo.TagAfAreaXPositions, "AF Area X Positions"); _tagNameMap.Put(CanonMakernoteDirectory.AFInfo.TagAfAreaYPositions, "AF Area Y Positions"); _tagNameMap.Put(CanonMakernoteDirectory.AFInfo.TagAfPointsInFocus, "AF Points in Focus Count"); _tagNameMap.Put(CanonMakernoteDirectory.AFInfo.TagPrimaryAfPoint1, "Primary AF Point 1"); _tagNameMap.Put(CanonMakernoteDirectory.AFInfo.TagPrimaryAfPoint2, "Primary AF Point 2"); // _tagNameMap.put(TAG_CANON_CUSTOM_FUNCTION_LONG_EXPOSURE_NOISE_REDUCTION, "Long Exposure Noise Reduction"); // _tagNameMap.put(TAG_CANON_CUSTOM_FUNCTION_SHUTTER_AUTO_EXPOSURE_LOCK_BUTTONS, "Shutter/Auto Exposure-lock Buttons"); // _tagNameMap.put(TAG_CANON_CUSTOM_FUNCTION_MIRROR_LOCKUP, "Mirror Lockup"); // _tagNameMap.put(TAG_CANON_CUSTOM_FUNCTION_TV_AV_AND_EXPOSURE_LEVEL, "Tv/Av And Exposure Level"); // _tagNameMap.put(TAG_CANON_CUSTOM_FUNCTION_AF_ASSIST_LIGHT, "AF-Assist Light"); // _tagNameMap.put(TAG_CANON_CUSTOM_FUNCTION_SHUTTER_SPEED_IN_AV_MODE, "Shutter Speed in Av Mode"); // _tagNameMap.put(TAG_CANON_CUSTOM_FUNCTION_BRACKETING, "Auto-Exposure Bracketing Sequence/Auto Cancellation"); // _tagNameMap.put(TAG_CANON_CUSTOM_FUNCTION_SHUTTER_CURTAIN_SYNC, "Shutter Curtain Sync"); // _tagNameMap.put(TAG_CANON_CUSTOM_FUNCTION_AF_STOP, "Lens Auto-Focus Stop Button Function Switch"); // _tagNameMap.put(TAG_CANON_CUSTOM_FUNCTION_FILL_FLASH_REDUCTION, "Auto Reduction of Fill Flash"); // _tagNameMap.put(TAG_CANON_CUSTOM_FUNCTION_MENU_BUTTON_RETURN, "Menu Button Return Position"); // _tagNameMap.put(TAG_CANON_CUSTOM_FUNCTION_SET_BUTTON_FUNCTION, "SET Button Function When Shooting"); // _tagNameMap.put(TAG_CANON_CUSTOM_FUNCTION_SENSOR_CLEANING, "Sensor Cleaning"); _tagNameMap.Put(TagThumbnailImageValidArea, "Thumbnail Image Valid Area"); _tagNameMap.Put(TagSerialNumberFormat, "Serial Number Format"); _tagNameMap.Put(TagSuperMacro, "Super Macro"); _tagNameMap.Put(TagDateStampMode, "Date Stamp Mode"); _tagNameMap.Put(TagMyColors, "My Colors"); _tagNameMap.Put(TagFirmwareRevision, "Firmware Revision"); _tagNameMap.Put(TagCategories, "Categories"); _tagNameMap.Put(TagFaceDetectArray1, "Face Detect Array 1"); _tagNameMap.Put(TagFaceDetectArray2, "Face Detect Array 2"); _tagNameMap.Put(TagAfInfoArray2, "AF Info Array 2"); _tagNameMap.Put(TagImageUniqueId, "Image Unique ID"); _tagNameMap.Put(TagRawDataOffset, "Raw Data Offset"); _tagNameMap.Put(TagOriginalDecisionDataOffset, "Original Decision Data Offset"); _tagNameMap.Put(TagCustomFunctions1dArray, "Custom Functions (1D) Array"); _tagNameMap.Put(TagPersonalFunctionsArray, "Personal Functions Array"); _tagNameMap.Put(TagPersonalFunctionValuesArray, "Personal Function Values Array"); _tagNameMap.Put(TagFileInfoArray, "File Info Array"); _tagNameMap.Put(TagAfPointsInFocus1d, "AF Points in Focus (1D)"); _tagNameMap.Put(TagLensModel, "Lens Model"); _tagNameMap.Put(TagSerialInfoArray, "Serial Info Array"); _tagNameMap.Put(TagDustRemovalData, "Dust Removal Data"); _tagNameMap.Put(TagCropInfo, "Crop Info"); _tagNameMap.Put(TagCustomFunctionsArray2, "Custom Functions Array 2"); _tagNameMap.Put(TagAspectInfoArray, "Aspect Information Array"); _tagNameMap.Put(TagProcessingInfoArray, "Processing Information Array"); _tagNameMap.Put(TagToneCurveTable, "Tone Curve Table"); _tagNameMap.Put(TagSharpnessTable, "Sharpness Table"); _tagNameMap.Put(TagSharpnessFreqTable, "Sharpness Frequency Table"); _tagNameMap.Put(TagWhiteBalanceTable, "White Balance Table"); _tagNameMap.Put(TagColorBalanceArray, "Color Balance Array"); _tagNameMap.Put(TagMeasuredColorArray, "Measured Color Array"); _tagNameMap.Put(TagColorTemperature, "Color Temperature"); _tagNameMap.Put(TagCanonFlagsArray, "Canon Flags Array"); _tagNameMap.Put(TagModifiedInfoArray, "Modified Information Array"); _tagNameMap.Put(TagToneCurveMatching, "Tone Curve Matching"); _tagNameMap.Put(TagWhiteBalanceMatching, "White Balance Matching"); _tagNameMap.Put(TagColorSpace, "Color Space"); _tagNameMap.Put(TagPreviewImageInfoArray, "Preview Image Info Array"); _tagNameMap.Put(TagVrdOffset, "VRD Offset"); _tagNameMap.Put(TagSensorInfoArray, "Sensor Information Array"); _tagNameMap.Put(TagColorDataArray2, "Color Data Array 1"); _tagNameMap.Put(TagCrwParam, "CRW Parameters"); _tagNameMap.Put(TagColorInfoArray2, "Color Data Array 2"); _tagNameMap.Put(TagBlackLevel, "Black Level"); _tagNameMap.Put(TagCustomPictureStyleFileName, "Custom Picture Style File Name"); _tagNameMap.Put(TagColorInfoArray, "Color Info Array"); _tagNameMap.Put(TagVignettingCorrectionArray1, "Vignetting Correction Array 1"); _tagNameMap.Put(TagVignettingCorrectionArray2, "Vignetting Correction Array 2"); _tagNameMap.Put(TagLightingOptimizerArray, "Lighting Optimizer Array"); _tagNameMap.Put(TagLensInfoArray, "Lens Info Array"); _tagNameMap.Put(TagAmbianceInfoArray, "Ambiance Info Array"); _tagNameMap.Put(TagFilterInfoArray, "Filter Info Array"); }
private ConfigurationInformation TakeSnapshot(Configuration configuration) { try { // Allow variables to have non-serializable values by copying their initial value IDictionary <string, Object> variableInitialValues = null; if (!configuration.Variables.IsEmpty()) { variableInitialValues = new Dictionary <string, object>(); foreach (var variable in configuration.Variables) { var initializationValue = variable.Value.InitializationValue; if (initializationValue != null) { variableInitialValues.Put(variable.Key, initializationValue); variable.Value.InitializationValue = null; } } } // Avro schemas are not serializable IDictionary <string, ConfigurationEventTypeAvro> avroSchemas = null; if (!configuration.EventTypesAvro.IsEmpty()) { avroSchemas = new LinkedHashMap <string, ConfigurationEventTypeAvro>(configuration.EventTypesAvro); configuration.EventTypesAvro.Clear(); } var copy = (Configuration)SerializableObjectCopier.Copy(_container, configuration); copy.TransientConfiguration = configuration.TransientConfiguration; copy.Container = _container; // transition to this container?? // Restore variable with initial values if (variableInitialValues != null && !variableInitialValues.IsEmpty()) { foreach (var entry in variableInitialValues) { var config = copy.Variables.Get(entry.Key); config.InitializationValue = entry.Value; } } // Restore Avro schemas if (avroSchemas != null) { copy.EventTypesAvro.PutAll(avroSchemas); } return(copy); } catch (IOException e) { throw new ConfigurationException( "Failed to snapshot configuration instance through serialization : " + e.Message, e); } catch (TypeLoadException e) { throw new ConfigurationException( "Failed to snapshot configuration instance through serialization : " + e.Message, e); } }
/// <summary> /// Initialize event adapter service for config snapshot. /// </summary> /// <param name="eventAdapterService">is events adapter</param> /// <param name="configSnapshot">is the config snapshot</param> /// <param name="engineImportService">engine import service</param> /// <param name="resourceManager">The resource manager.</param> /// <exception cref="ConfigurationException"> /// Error configuring engine: " + ex.Message /// or /// Error configuring engine: " + ex.Message /// or /// or /// Error configuring engine: " + ex.Message /// or /// Error configuring engine, dependency graph between map type names is circular: " + e.Message /// or /// Error configuring engine: " + ex.Message /// or /// Error configuring engine, dependency graph between object array type names is circular: " + /// e.Message /// or /// Error configuring engine: " + ex.Message /// or /// Failed to load plug-in event representation class '" + className + "' /// or /// Failed to instantiate plug-in event representation class '" + className + /// "' via default constructor /// or /// Failed to instantiate plug-in event representation class '" + className + /// "' via default constructor /// or /// Illegal access to instantiate plug-in event representation class '" + className + /// "' via default constructor /// or /// Illegal access to instantiate plug-in event representation class '" + className + /// "' via default constructor /// or /// Plug-in event representation class '" + className + /// "' does not implement the required interface " + typeof (PlugInEventRepresentation).Name /// or /// Plug-in event representation class '" + className + "' and URI '" + eventRepURI + /// "' did not initialize correctly : " + e.Message /// </exception> internal static void Init( EventAdapterService eventAdapterService, ConfigurationInformation configSnapshot, EngineImportService engineImportService, IResourceManager resourceManager) { // Extract legacy event type definitions for each event type name, if supplied. // // We supply this information as setup information to the event adapter service // to allow discovery of superclasses and interfaces during event type construction for bean events, // such that superclasses and interfaces can use the legacy type definitions. var classLegacyInfo = new Dictionary <string, ConfigurationEventTypeLegacy>(); foreach (var entry in configSnapshot.EventTypeNames) { var typeName = entry.Key; var className = entry.Value; var legacyDef = configSnapshot.EventTypesLegacy.Get(typeName); if (legacyDef != null) { classLegacyInfo.Put(className, legacyDef); } } eventAdapterService.TypeLegacyConfigs = classLegacyInfo; eventAdapterService.DefaultPropertyResolutionStyle = configSnapshot.EngineDefaults.EventMeta.ClassPropertyResolutionStyle; eventAdapterService.DefaultAccessorStyle = configSnapshot.EngineDefaults.EventMeta.DefaultAccessorStyle; foreach (var typeNamespace in configSnapshot.EventTypeAutoNamePackages) { eventAdapterService.AddAutoNamePackage(typeNamespace); } // Add from the configuration the event class names var typeNames = configSnapshot.EventTypeNames; foreach (var entry in typeNames) { // Add class try { var typeName = entry.Key; eventAdapterService.AddBeanType(typeName, entry.Value, false, true, true, true); } catch (EventAdapterException ex) { throw new ConfigurationException("Error configuring engine: " + ex.Message, ex); } } // Add from the configuration the Java event class names var avroNames = configSnapshot.EventTypesAvro; foreach (var entry in avroNames) { try { eventAdapterService.AddAvroType(entry.Key, entry.Value, true, true, true, false, false); } catch (EventAdapterException ex) { throw new ConfigurationException("Error configuring engine: " + ex.Message, ex); } } // Add from the configuration the XML DOM names and type def var xmlDOMNames = configSnapshot.EventTypesXMLDOM; foreach (var entry in xmlDOMNames) { SchemaModel schemaModel = null; if ((entry.Value.SchemaResource != null) || (entry.Value.SchemaText != null)) { try { schemaModel = XSDSchemaMapper.LoadAndMap( entry.Value.SchemaResource, entry.Value.SchemaText, engineImportService, resourceManager); } catch (Exception ex) { throw new ConfigurationException(ex.Message, ex); } } // Add XML DOM type try { eventAdapterService.AddXMLDOMType(entry.Key, entry.Value, schemaModel, true); } catch (EventAdapterException ex) { throw new ConfigurationException("Error configuring engine: " + ex.Message, ex); } } // Add maps in dependency order such that supertypes are added before subtypes ICollection <string> dependentMapOrder; try { var typesReferences = ToTypesReferences(configSnapshot.MapTypeConfigurations); dependentMapOrder = GraphUtil.GetTopDownOrder(typesReferences); } catch (GraphCircularDependencyException e) { throw new ConfigurationException( "Error configuring engine, dependency graph between map type names is circular: " + e.Message, e); } var mapNames = configSnapshot.EventTypesMapEvents; var nestableMapNames = configSnapshot.EventTypesNestableMapEvents; dependentMapOrder.AddAll(mapNames.Keys); dependentMapOrder.AddAll(nestableMapNames.Keys); try { foreach (var mapName in dependentMapOrder) { var mapConfig = configSnapshot.MapTypeConfigurations.Get(mapName); var propertiesUnnested = mapNames.Get(mapName); if (propertiesUnnested != null) { var propertyTypes = CreatePropertyTypes(propertiesUnnested, engineImportService); var propertyTypesCompiled = EventTypeUtility.CompileMapTypeProperties( propertyTypes, eventAdapterService); eventAdapterService.AddNestableMapType( mapName, propertyTypesCompiled, mapConfig, true, true, true, false, false); } var propertiesNestable = nestableMapNames.Get(mapName); if (propertiesNestable != null) { var propertiesNestableCompiled = EventTypeUtility.CompileMapTypeProperties( propertiesNestable, eventAdapterService); eventAdapterService.AddNestableMapType( mapName, propertiesNestableCompiled, mapConfig, true, true, true, false, false); } } } catch (EventAdapterException ex) { throw new ConfigurationException("Error configuring engine: " + ex.Message, ex); } // Add object-array in dependency order such that supertypes are added before subtypes ICollection <string> dependentObjectArrayOrder; try { var typesReferences = ToTypesReferences(configSnapshot.ObjectArrayTypeConfigurations); dependentObjectArrayOrder = GraphUtil.GetTopDownOrder(typesReferences); } catch (GraphCircularDependencyException e) { throw new ConfigurationException( "Error configuring engine, dependency graph between object array type names is circular: " + e.Message, e); } var nestableObjectArrayNames = configSnapshot.EventTypesNestableObjectArrayEvents; dependentObjectArrayOrder.AddAll(nestableObjectArrayNames.Keys); try { foreach (var objectArrayName in dependentObjectArrayOrder) { var objectArrayConfig = configSnapshot.ObjectArrayTypeConfigurations.Get(objectArrayName); var propertyTypes = nestableObjectArrayNames.Get(objectArrayName); propertyTypes = ResolveClassesForStringPropertyTypes(propertyTypes, engineImportService); var propertyTypesCompiled = EventTypeUtility.CompileMapTypeProperties( propertyTypes, eventAdapterService); eventAdapterService.AddNestableObjectArrayType( objectArrayName, propertyTypesCompiled, objectArrayConfig, true, true, true, false, false, false, null); } } catch (EventAdapterException ex) { throw new ConfigurationException("Error configuring engine: " + ex.Message, ex); } // Add plug-in event representations var plugInReps = configSnapshot.PlugInEventRepresentation; foreach (var entry in plugInReps) { String className = entry.Value.EventRepresentationTypeName; Type eventRepClass; try { eventRepClass = TypeHelper.ResolveType(className); } catch (TypeLoadException ex) { throw new ConfigurationException( "Failed to load plug-in event representation class '" + className + "'", ex); } Object pluginEventRepObj; try { pluginEventRepObj = Activator.CreateInstance(eventRepClass); } catch (TypeInstantiationException ex) { throw new ConfigurationException( "Failed to instantiate plug-in event representation class '" + className + "' via default constructor", ex); } catch (TargetInvocationException ex) { throw new ConfigurationException( "Failed to instantiate plug-in event representation class '" + className + "' via default constructor", ex); } catch (MethodAccessException ex) { throw new ConfigurationException( "Illegal access to instantiate plug-in event representation class '" + className + "' via default constructor", ex); } catch (MemberAccessException ex) { throw new ConfigurationException( "Illegal access to instantiate plug-in event representation class '" + className + "' via default constructor", ex); } if (!(pluginEventRepObj is PlugInEventRepresentation)) { throw new ConfigurationException( "Plug-in event representation class '" + className + "' does not implement the required interface " + typeof(PlugInEventRepresentation).Name); } var eventRepURI = entry.Key; var pluginEventRep = (PlugInEventRepresentation)pluginEventRepObj; var initializer = entry.Value.Initializer; var context = new PlugInEventRepresentationContext(eventAdapterService, eventRepURI, initializer); try { pluginEventRep.Init(context); eventAdapterService.AddEventRepresentation(eventRepURI, pluginEventRep); } catch (Exception e) { throw new ConfigurationException( "Plug-in event representation class '" + className + "' and URI '" + eventRepURI + "' did not initialize correctly : " + e.Message, e); } } // Add plug-in event type names var plugInNames = configSnapshot.PlugInEventTypes; foreach (var entry in plugInNames) { var name = entry.Key; var config = entry.Value; eventAdapterService.AddPlugInEventType( name, config.EventRepresentationResolutionURIs, config.Initializer); } }
public static void UpdateImageAttributes(RawImageData image, IDictionary <String, Object> additional) { if (!image.IsRawImage()) { throw new ArgumentException("Raw image expected."); } // will also have the CCITT parameters int colorSpace = image.GetColorSpace(); int typeCCITT = image.GetTypeCcitt(); if (typeCCITT > 0xff) { if (!image.IsMask()) { image.SetColorSpace(1); } image.SetBpc(1); image.SetFilter("CCITTFaxDecode"); int k = typeCCITT - RawImageData.CCITTG3_1D; IDictionary <String, Object> decodeparms = new Dictionary <String, Object>(); if (k != 0) { decodeparms.Put("K", k); } if ((colorSpace & RawImageData.CCITT_BLACKIS1) != 0) { decodeparms.Put("BlackIs1", true); } if ((colorSpace & RawImageData.CCITT_ENCODEDBYTEALIGN) != 0) { decodeparms.Put("EncodedByteAlign", true); } if ((colorSpace & RawImageData.CCITT_ENDOFLINE) != 0) { decodeparms.Put("EndOfLine", true); } if ((colorSpace & RawImageData.CCITT_ENDOFBLOCK) != 0) { decodeparms.Put("EndOfBlock", false); } decodeparms.Put("Columns", image.GetWidth()); decodeparms.Put("Rows", image.GetHeight()); image.decodeParms = decodeparms; } else { switch (colorSpace) { case 1: { if (image.IsInverted()) { image.decode = new float[] { 1, 0 }; } break; } case 3: { if (image.IsInverted()) { image.decode = new float[] { 1, 0, 1, 0, 1, 0 }; } break; } case 4: default: { if (image.IsInverted()) { image.decode = new float[] { 1, 0, 1, 0, 1, 0, 1, 0 }; } break; } } if (additional != null) { image.SetImageAttributes(additional); } if (image.IsMask() && (image.GetBpc() == 1 || image.GetBpc() > 8)) { image.SetColorSpace(-1); } if (image.IsDeflated()) { image.SetFilter("FlateDecode"); } } }
/// <summary> /// Produces an aggregation service for use with match-recognice. /// </summary> /// <param name="numStreams">number of streams</param> /// <param name="measureExprNodesPerStream">measure nodes</param> /// <param name="typesPerStream">type information</param> /// <exception cref="ExprValidationException">for validation errors</exception> /// <returns>service</returns> public static AggregationServiceMatchRecognizeFactoryDesc GetServiceMatchRecognize( int numStreams, IDictionary<int, IList<ExprAggregateNode>> measureExprNodesPerStream, EventType[] typesPerStream) { var equivalencyListPerStream = new OrderedDictionary<int, List<AggregationServiceAggExpressionDesc>>(); foreach (var entry in measureExprNodesPerStream) { var equivalencyList = new List<AggregationServiceAggExpressionDesc>(); equivalencyListPerStream.Put(entry.Key, equivalencyList); foreach (ExprAggregateNode selectAggNode in entry.Value) { AddEquivalent(selectAggNode, equivalencyList); } } var aggregatorsPerStream = new LinkedHashMap<int, AggregationMethodFactory[]>(); var evaluatorsPerStream = new Dictionary<int, ExprEvaluator[]>(); foreach (var equivalencyPerStream in equivalencyListPerStream) { int index = 0; int stream = equivalencyPerStream.Key; var aggregators = new AggregationMethodFactory[equivalencyPerStream.Value.Count]; aggregatorsPerStream.Put(stream, aggregators); var evaluators = new ExprEvaluator[equivalencyPerStream.Value.Count]; evaluatorsPerStream.Put(stream, evaluators); foreach (AggregationServiceAggExpressionDesc aggregation in equivalencyPerStream.Value) { ExprAggregateNode aggregateNode = aggregation.AggregationNode; if (aggregateNode.ChildNodes.Count > 1) { evaluators[index] = ExprMethodAggUtil.GetMultiNodeEvaluator( aggregateNode.ChildNodes, typesPerStream.Length > 1, typesPerStream); } else if (aggregateNode.ChildNodes.Count > 0) { // Use the evaluation node under the aggregation node to obtain the aggregation value evaluators[index] = aggregateNode.ChildNodes[0].ExprEvaluator; } else { // For aggregation that doesn't evaluate any particular sub-expression, return null on evaluation evaluators[index] = new ProxyExprEvaluator { ProcEvaluate = eventParams => null, ProcReturnType = () => null }; } aggregators[index] = aggregateNode.Factory; index++; } } // Assign a column number to each aggregation node. The regular aggregation goes first followed by access-aggregation. int columnNumber = 0; var allExpressions = new List<AggregationServiceAggExpressionDesc>(); foreach (var equivalencyPerStream in equivalencyListPerStream) { foreach (AggregationServiceAggExpressionDesc entry in equivalencyPerStream.Value) { entry.ColumnNum = columnNumber++; } allExpressions.AddAll(equivalencyPerStream.Value); } var factory = new AggregationServiceMatchRecognizeFactoryImpl( numStreams, aggregatorsPerStream, evaluatorsPerStream); return new AggregationServiceMatchRecognizeFactoryDesc(factory, allExpressions); }
private void TrySend(int numThreads, int numEvents, string[] choices) { if (numEvents < choices.Length) { throw new ArgumentException("Number of events must at least match number of choices"); } _epService.EPRuntime.SendEvent(new CurrentTimeEvent(0)); _epService.EPAdministrator.CreateEPL("create variable boolean myvar = false"); _epService.EPAdministrator.CreateEPL("create context SegmentedByString as partition by theString from SupportBean"); var stmt = _epService.EPAdministrator.CreateEPL("context SegmentedByString select theString, count(*) - 1 as cnt from SupportBean output snapshot when myvar = true"); stmt.AddListener(_listener); // preload - since concurrently sending same-category events an event can be dropped for (var i = 0; i < choices.Length; i++) { _epService.EPRuntime.SendEvent(new SupportBean(choices[i], 0)); } var runnables = new EventRunnable[numThreads]; for (var i = 0; i < runnables.Length; i++) { runnables[i] = new EventRunnable(_epService, numEvents, choices); } // start var threads = new Thread[runnables.Length]; for (var i = 0; i < runnables.Length; i++) { threads[i] = new Thread(runnables[i].Run); threads[i].Start(); } // join Log.Info("Waiting for completion"); for (var i = 0; i < runnables.Length; i++) { threads[i].Join(); } IDictionary <string, long?> totals = new Dictionary <string, long?>(); foreach (var choice in choices) { totals.Put(choice, 0L); } // verify var sum = 0; for (var i = 0; i < runnables.Length; i++) { Assert.IsNull(runnables[i].Exception); foreach (var entry in runnables[i].Totals) { var current = totals.Get(entry.Key); current += entry.Value; sum += entry.Value; totals.Put(entry.Key, current); //System.out.println("Thread " + i + " key " + entry.getKey() + " count " + entry.getValue()); } } Assert.AreEqual(numThreads * numEvents, sum); _epService.EPRuntime.SetVariableValue("myvar", true); _epService.EPRuntime.SendEvent(new CurrentTimeEvent(10000)); var result = _listener.LastNewData; Assert.AreEqual(choices.Length, result.Length); foreach (var item in result) { var theString = (string)item.Get("theString"); var count = (long?)item.Get("cnt"); //System.out.println("String " + string + " count " + count); Assert.AreEqual(count, totals.Get(theString)); } }
public static PropertyEvaluatorForge MakeEvaluator( PropertyEvalSpec spec, EventType sourceEventType, string optionalSourceStreamName, StatementRawInfo rawInfo, StatementCompileTimeServices services) { var length = spec.Atoms.Count; var containedEventForges = new ContainedEventEvalForge[length]; var fragmentEventTypes = new FragmentEventType[length]; var currentEventType = sourceEventType; var whereClauses = new ExprForge[length]; IList<EventType> streamEventTypes = new List<EventType>(); IList<string> streamNames = new List<string>(); IDictionary<string, int> streamNameAndNumber = new Dictionary<string, int>().WithNullKeySupport(); IList<string> expressionTexts = new List<string>(); streamEventTypes.Add(sourceEventType); streamNames.Add(optionalSourceStreamName); streamNameAndNumber.Put(optionalSourceStreamName, 0); expressionTexts.Add(sourceEventType.Name); IList<SelectClauseElementCompiled> cumulativeSelectClause = new List<SelectClauseElementCompiled>(); for (var i = 0; i < length; i++) { var atom = spec.Atoms[i]; ContainedEventEvalForge containedEventEval = null; string expressionText = null; EventType streamEventType = null; FragmentEventType fragmentEventType = null; // Resolve directly as fragment event type if possible if (atom.SplitterExpression is ExprIdentNode) { var propertyName = ((ExprIdentNode) atom.SplitterExpression).FullUnresolvedName; fragmentEventType = currentEventType.GetFragmentType(propertyName); if (fragmentEventType != null) { var getter = ((EventTypeSPI) currentEventType).GetGetterSPI(propertyName); if (getter != null) { containedEventEval = new ContainedEventEvalGetterForge(getter); expressionText = propertyName; streamEventType = fragmentEventType.FragmentType; } } } // evaluate splitter expression if (containedEventEval == null) { ExprNodeUtilityValidate.ValidatePlainExpression( ExprNodeOrigin.CONTAINEDEVENT, atom.SplitterExpression); var availableTypes = streamEventTypes.ToArray(); var availableStreamNames = streamNames.ToArray(); var isIStreamOnly = new bool[streamNames.Count]; isIStreamOnly.Fill(true); StreamTypeService streamTypeService = new StreamTypeServiceImpl( availableTypes, availableStreamNames, isIStreamOnly, false, false); var validationContext = new ExprValidationContextBuilder(streamTypeService, rawInfo, services) .WithAllowBindingConsumption(true) .Build(); var validatedExprNode = ExprNodeUtilityValidate.GetValidatedSubtree( ExprNodeOrigin.CONTAINEDEVENT, atom.SplitterExpression, validationContext); // determine result type if (atom.OptionalResultEventType == null) { throw new ExprValidationException( "Missing @type(name) declaration providing the event type name of the return type for expression '" + ExprNodeUtilityPrint.ToExpressionStringMinPrecedenceSafe(atom.SplitterExpression) + "'"); } streamEventType = services.EventTypeCompileTimeResolver.GetTypeByName(atom.OptionalResultEventType); if (streamEventType == null) { throw new ExprValidationException( "Event type by name '" + atom.OptionalResultEventType + "' could not be found"); } var returnType = validatedExprNode.Forge.EvaluationType; // when the expression returns an array, allow array values to become the column of the single-column event type if (returnType.IsArray && streamEventType.PropertyNames.Length == 1 && !(streamEventType is JsonEventType) && // since json string-array should not become itself the property TypeHelper.IsSubclassOrImplementsInterface( returnType.GetElementType().GetBoxedType(), streamEventType.GetPropertyType(streamEventType.PropertyNames[0]).GetBoxedType())) { var writables = EventTypeUtility.GetWriteableProperties(streamEventType, false, false); if (writables != null && !writables.IsEmpty()) { try { var manufacturer = EventTypeUtility.GetManufacturer( streamEventType, new[] {writables.First()}, services.ImportServiceCompileTime, false, services.EventTypeAvroHandler); containedEventEval = new ContainedEventEvalArrayToEventForge( validatedExprNode.Forge, manufacturer); } catch (EventBeanManufactureException e) { throw new ExprValidationException( "Event type '" + streamEventType.Name + "' cannot be populated: " + e.Message, e); } } else { throw new ExprValidationException( "Event type '" + streamEventType.Name + "' cannot be written to"); } } else if (returnType.IsArray && returnType.GetElementType() == typeof(EventBean)) { containedEventEval = new ContainedEventEvalEventBeanArrayForge(validatedExprNode.Forge); } else { // check expression result type against eventtype expected underlying type if (returnType.IsArray) { if (!(streamEventType is JsonEventType)) { if (!TypeHelper.IsSubclassOrImplementsInterface( returnType.GetElementType(), streamEventType.UnderlyingType)) { throw new ExprValidationException( "Event type '" + streamEventType.Name + "' underlying type " + streamEventType.UnderlyingType.CleanName() + " cannot be assigned a value of type " + returnType.CleanName()); } } else { if (returnType.GetElementType() != typeof(string)) { throw new ExprValidationException( "Event type '" + streamEventType.Name + "' requires string-type array and cannot be assigned from value of type " + returnType.CleanName()); } } } else if (GenericExtensions.IsGenericEnumerable(returnType) || TypeHelper.IsImplementsInterface<System.Collections.IEnumerable>(returnType)) { // fine, assumed to return the right type } else { throw new ExprValidationException( "Return type of expression '" + ExprNodeUtilityPrint.ToExpressionStringMinPrecedenceSafe(atom.SplitterExpression) + "' is '" + returnType.Name + "', expected an Iterable or array result"); } containedEventEval = new ContainedEventEvalExprNodeForge( validatedExprNode.Forge, streamEventType); } expressionText = ExprNodeUtilityPrint.ToExpressionStringMinPrecedenceSafe(validatedExprNode); fragmentEventType = new FragmentEventType(streamEventType, true, false); } // validate where clause, if any streamEventTypes.Add(streamEventType); streamNames.Add(atom.OptionalAsName); streamNameAndNumber.Put(atom.OptionalAsName, i + 1); expressionTexts.Add(expressionText); if (atom.OptionalWhereClause != null) { var whereTypes = streamEventTypes.ToArray(); var whereStreamNames = streamNames.ToArray(); var isIStreamOnly = new bool[streamNames.Count]; isIStreamOnly.Fill(true); StreamTypeService streamTypeService = new StreamTypeServiceImpl( whereTypes, whereStreamNames, isIStreamOnly, false, false); var validationContext = new ExprValidationContextBuilder(streamTypeService, rawInfo, services) .WithAllowBindingConsumption(true) .Build(); var whereClause = ExprNodeUtilityValidate.GetValidatedSubtree( ExprNodeOrigin.CONTAINEDEVENT, atom.OptionalWhereClause, validationContext); whereClauses[i] = whereClause.Forge; } // validate select clause if (atom.OptionalSelectClause != null && !atom.OptionalSelectClause.SelectExprList.IsEmpty()) { var whereTypes = streamEventTypes.ToArray(); var whereStreamNames = streamNames.ToArray(); var isIStreamOnly = new bool[streamNames.Count]; isIStreamOnly.Fill(true); StreamTypeService streamTypeService = new StreamTypeServiceImpl( whereTypes, whereStreamNames, isIStreamOnly, false, false); var validationContext = new ExprValidationContextBuilder(streamTypeService, rawInfo, services) .WithAllowBindingConsumption(true) .Build(); foreach (var raw in atom.OptionalSelectClause.SelectExprList) { if (raw is SelectClauseStreamRawSpec) { var rawStreamSpec = (SelectClauseStreamRawSpec) raw; if (!streamNames.Contains(rawStreamSpec.StreamName)) { throw new ExprValidationException( "Property rename '" + rawStreamSpec.StreamName + "' not found in path"); } var streamSpec = new SelectClauseStreamCompiledSpec( rawStreamSpec.StreamName, rawStreamSpec.OptionalAsName); var streamNumber = streamNameAndNumber.Get(rawStreamSpec.StreamName); streamSpec.StreamNumber = streamNumber; cumulativeSelectClause.Add(streamSpec); } else if (raw is SelectClauseExprRawSpec) { var exprSpec = (SelectClauseExprRawSpec) raw; var exprCompiled = ExprNodeUtilityValidate.GetValidatedSubtree( ExprNodeOrigin.CONTAINEDEVENT, exprSpec.SelectExpression, validationContext); var resultName = exprSpec.OptionalAsName; if (resultName == null) { resultName = ExprNodeUtilityPrint.ToExpressionStringMinPrecedenceSafe(exprCompiled); } cumulativeSelectClause.Add( new SelectClauseExprCompiledSpec( exprCompiled, resultName, exprSpec.OptionalAsName, exprSpec.IsEvents)); var isMinimal = ExprNodeUtilityValidate.IsMinimalExpression(exprCompiled); if (isMinimal != null) { throw new ExprValidationException( "Expression in a property-selection may not utilize " + isMinimal); } } else if (raw is SelectClauseElementWildcard) { // wildcards are stream selects: we assign a stream name (any) and add a stream wildcard select var streamNameAtom = atom.OptionalAsName; if (streamNameAtom == null) { streamNameAtom = UuidGenerator.Generate(); } var streamSpec = new SelectClauseStreamCompiledSpec(streamNameAtom, atom.OptionalAsName); var streamNumber = i + 1; streamSpec.StreamNumber = streamNumber; cumulativeSelectClause.Add(streamSpec); } else { throw new IllegalStateException("Unknown select clause item:" + raw); } } } currentEventType = fragmentEventType.FragmentType; fragmentEventTypes[i] = fragmentEventType; containedEventForges[i] = containedEventEval; } if (cumulativeSelectClause.IsEmpty()) { if (length == 1) { return new PropertyEvaluatorSimpleForge( containedEventForges[0], fragmentEventTypes[0], whereClauses[0], expressionTexts[0]); } return new PropertyEvaluatorNestedForge( containedEventForges, fragmentEventTypes, whereClauses, expressionTexts.ToArray()); } { var fragmentEventTypeIsIndexed = new bool[fragmentEventTypes.Length]; for (var i = 0; i < fragmentEventTypes.Length; i++) { fragmentEventTypeIsIndexed[i] = fragmentEventTypes[i].IsIndexed; } var accumulative = new PropertyEvaluatorAccumulativeForge( containedEventForges, fragmentEventTypeIsIndexed, whereClauses, expressionTexts); var whereTypes = streamEventTypes.ToArray(); var whereStreamNames = streamNames.ToArray(); var isIStreamOnly = new bool[streamNames.Count]; isIStreamOnly.Fill(true); StreamTypeService streamTypeService = new StreamTypeServiceImpl( whereTypes, whereStreamNames, isIStreamOnly, false, false); var cumulativeSelectArr = cumulativeSelectClause.ToArray(); var args = new SelectProcessorArgs( cumulativeSelectArr, null, false, null, null, streamTypeService, null, false, rawInfo.Annotations, rawInfo, services); var selectExprDesc = SelectExprProcessorFactory.GetProcessor(args, null, false); return new PropertyEvaluatorSelectForge(selectExprDesc, accumulative); } }
public static GroupByClauseExpressions GetGroupByRollupExpressions( IList <GroupByClauseElement> groupByElements, SelectClauseSpecRaw selectClauseSpec, ExprNode optionalHavingNode, IList <OrderByItem> orderByList, ExprNodeSubselectDeclaredDotVisitor visitor) { if (groupByElements == null || groupByElements.Count == 0) { return(null); } // walk group-by-elements, determine group-by expressions and rollup nodes var groupByExpressionInfo = GroupByToRollupNodes(groupByElements); // obtain expression nodes, collect unique nodes and assign index var distinctGroupByExpressions = new List <ExprNode>(); var expressionToIndex = new Dictionary <ExprNode, int>(); foreach (ExprNode exprNode in groupByExpressionInfo.Expressions) { var found = false; for (var i = 0; i < distinctGroupByExpressions.Count; i++) { ExprNode other = distinctGroupByExpressions[i]; // find same expression if (ExprNodeUtility.DeepEquals(exprNode, other)) { expressionToIndex.Put(exprNode, i); found = true; break; } } // not seen before if (!found) { expressionToIndex.Put(exprNode, distinctGroupByExpressions.Count); distinctGroupByExpressions.Add(exprNode); } } // determine rollup, validate it is either (not both) var hasGroupingSet = false; var hasRollup = false; foreach (var element in groupByElements) { if (element is GroupByClauseElementGroupingSet) { hasGroupingSet = true; } if (element is GroupByClauseElementRollupOrCube) { hasRollup = true; } } // no-rollup or grouping-sets means simply validate var groupByExpressions = distinctGroupByExpressions.ToArray(); if (!hasRollup && !hasGroupingSet) { return(new GroupByClauseExpressions(groupByExpressions)); } // evaluate rollup node roots IList <GroupByRollupNodeBase> nodes = groupByExpressionInfo.Nodes; var perNodeCombinations = new Object[nodes.Count][]; var context = new GroupByRollupEvalContext(expressionToIndex); try { for (var i = 0; i < nodes.Count; i++) { var node = nodes[i]; var combinations = node.Evaluate(context); perNodeCombinations[i] = new Object[combinations.Count]; for (var j = 0; j < combinations.Count; j++) { perNodeCombinations[i][j] = combinations[j]; } } } catch (GroupByRollupDuplicateException ex) { if (ex.Indexes.Length == 0) { throw new ExprValidationException("Failed to validate the group-by clause, found duplicate specification of the overall grouping '()'"); } else { var writer = new StringWriter(); var delimiter = ""; for (var i = 0; i < ex.Indexes.Length; i++) { writer.Write(delimiter); writer.Write(groupByExpressions[ex.Indexes[i]].ToExpressionStringMinPrecedenceSafe()); delimiter = ", "; } throw new ExprValidationException("Failed to validate the group-by clause, found duplicate specification of expressions (" + writer.ToString() + ")"); } } // enumerate combinations building an index list var combinationEnumeration = new CombinationEnumeration(perNodeCombinations); ICollection <int> combination = new SortedSet <int>(); ICollection <MultiKeyInt> indexList = new LinkedHashSet <MultiKeyInt>(); while (combinationEnumeration.MoveNext()) { combination.Clear(); Object[] combinationOA = combinationEnumeration.Current; foreach (var indexes in combinationOA) { var indexarr = (int[])indexes; foreach (var anIndex in indexarr) { combination.Add(anIndex); } } var indexArr = CollectionUtil.IntArray(combination); indexList.Add(new MultiKeyInt(indexArr)); } // obtain rollup levels var rollupLevels = new int[indexList.Count][]; var count = 0; foreach (var mk in indexList) { rollupLevels[count++] = mk.Keys; } var numberOfLevels = rollupLevels.Length; if (numberOfLevels == 1 && rollupLevels[0].Length == 0) { throw new ExprValidationException("Failed to validate the group-by clause, the overall grouping '()' cannot be the only grouping"); } // obtain select-expression copies for rewrite var expressions = selectClauseSpec.SelectExprList; var selects = new ExprNode[numberOfLevels][]; for (var i = 0; i < numberOfLevels; i++) { selects[i] = new ExprNode[expressions.Count]; for (var j = 0; j < expressions.Count; j++) { SelectClauseElementRaw selectRaw = expressions[j]; if (!(selectRaw is SelectClauseExprRawSpec)) { throw new ExprValidationException("Group-by with rollup requires that the select-clause does not use wildcard"); } var compiled = (SelectClauseExprRawSpec)selectRaw; selects[i][j] = CopyVisitExpression(compiled.SelectExpression, visitor); } } // obtain having-expression copies for rewrite ExprNode[] optHavingNodeCopy = null; if (optionalHavingNode != null) { optHavingNodeCopy = new ExprNode[numberOfLevels]; for (var i = 0; i < numberOfLevels; i++) { optHavingNodeCopy[i] = CopyVisitExpression(optionalHavingNode, visitor); } } // obtain orderby-expression copies for rewrite ExprNode[][] optOrderByCopy = null; if (orderByList != null && orderByList.Count > 0) { optOrderByCopy = new ExprNode[numberOfLevels][]; for (var i = 0; i < numberOfLevels; i++) { optOrderByCopy[i] = new ExprNode[orderByList.Count]; for (var j = 0; j < orderByList.Count; j++) { OrderByItem element = orderByList[j]; optOrderByCopy[i][j] = CopyVisitExpression(element.ExprNode, visitor); } } } return(new GroupByClauseExpressions(groupByExpressions, rollupLevels, selects, optHavingNodeCopy, optOrderByCopy)); }
public void TestPutLargeAttachment() { var testAttachmentName = "test_attachment"; var attachments = database.Attachments; attachments.DeleteBlobs(); Assert.AreEqual(0, attachments.Count()); var status = new Status(); var rev1Properties = new Dictionary <string, object>(); rev1Properties["foo"] = 1; rev1Properties["bar"] = false; var rev1 = database.PutRevision(new RevisionInternal(rev1Properties, database), null, false, status); Assert.AreEqual(StatusCode.Created, status.GetCode()); var largeAttachment = new StringBuilder(); for (int i = 0; i < Database.BigAttachmentLength; i++) { largeAttachment.Append("big attachment!"); } var attach1 = Runtime.GetBytesForString(largeAttachment.ToString()).ToArray(); database.InsertAttachmentForSequenceWithNameAndType( new ByteArrayInputStream(attach1), rev1.GetSequence(), testAttachmentName, "text/plain", rev1.GetGeneration()); var attachment = database.GetAttachmentForSequence(rev1.GetSequence(), testAttachmentName); Assert.AreEqual("text/plain", attachment.ContentType); var data = attachment.Content.ToArray(); Assert.IsTrue(Arrays.Equals(attach1, data)); const DocumentContentOptions contentOptions = DocumentContentOptions.IncludeAttachments | DocumentContentOptions.BigAttachmentsFollow; var attachmentDictForSequence = database.GetAttachmentsDictForSequenceWithContent(rev1.GetSequence(), contentOptions); var innerDict = (IDictionary <string, object>)attachmentDictForSequence[testAttachmentName]; if (innerDict.ContainsKey("stub")) { if (((bool)innerDict["stub"])) { throw new RuntimeException("Expected attachment dict 'stub' key to be true"); } else { throw new RuntimeException("Expected attachment dict to have 'stub' key"); } } if (!innerDict.ContainsKey("follows")) { throw new RuntimeException("Expected attachment dict to have 'follows' key"); } // Workaround : // Not closing the content stream will cause Sharing Violation // Exception when trying to get the same attachment going forward. attachment.ContentStream.Close(); var rev1WithAttachments = database.GetDocumentWithIDAndRev( rev1.GetDocId(), rev1.GetRevId(), contentOptions); var rev1WithAttachmentsProperties = rev1WithAttachments.GetProperties(); var rev2Properties = new Dictionary <string, object>(); rev2Properties.Put("_id", rev1WithAttachmentsProperties["_id"]); rev2Properties["foo"] = 2; var newRev = new RevisionInternal(rev2Properties, database); var rev2 = database.PutRevision(newRev, rev1WithAttachments.GetRevId(), false, status); Assert.AreEqual(StatusCode.Created, status.GetCode()); database.CopyAttachmentNamedFromSequenceToSequence( testAttachmentName, rev1WithAttachments.GetSequence(), rev2.GetSequence()); // Check the 2nd revision's attachment: var rev2FetchedAttachment = database.GetAttachmentForSequence(rev2.GetSequence(), testAttachmentName); Assert.AreEqual(attachment.Length, rev2FetchedAttachment.Length); AssertPropertiesAreEqual(attachment.Metadata, rev2FetchedAttachment.Metadata); Assert.AreEqual(attachment.ContentType, rev2FetchedAttachment.ContentType); // Add a third revision of the same document: var rev3Properties = new Dictionary <string, object>(); rev3Properties.Put("_id", rev2.GetProperties().Get("_id")); rev3Properties["foo"] = 3; rev3Properties["baz"] = false; var rev3 = new RevisionInternal(rev3Properties, database); rev3 = database.PutRevision(rev3, rev2.GetRevId(), false, status); Assert.AreEqual(StatusCode.Created, status.GetCode()); var attach3 = Runtime.GetBytesForString("<html><blink>attach3</blink></html>").ToArray(); database.InsertAttachmentForSequenceWithNameAndType( new ByteArrayInputStream(attach3), rev3.GetSequence(), testAttachmentName, "text/html", rev3.GetGeneration()); // Check the 3rd revision's attachment: var rev3FetchedAttachment = database.GetAttachmentForSequence( rev3.GetSequence(), testAttachmentName); data = rev3FetchedAttachment.Content.ToArray(); Assert.IsTrue(Arrays.Equals(attach3, data)); Assert.AreEqual("text/html", rev3FetchedAttachment.ContentType); // TODO: why doesn't this work? // Assert.assertEquals(attach3.length, rev3FetchedAttachment.getLength()); ICollection <BlobKey> blobKeys = database.Attachments.AllKeys(); Assert.AreEqual(2, blobKeys.Count); database.Compact(); blobKeys = database.Attachments.AllKeys(); Assert.AreEqual(1, blobKeys.Count); }
/// <summary> /// Helper method that reads CFS entries from an input stream </summary> private static IDictionary <string, FileEntry> ReadEntries(IndexInputSlicer handle, Directory dir, string name) { IOException priorE = null; IndexInput stream = null; ChecksumIndexInput entriesStream = null; // read the first VInt. If it is negative, it's the version number // otherwise it's the count (pre-3.1 indexes) try { IDictionary <string, FileEntry> mapping; #pragma warning disable 612, 618 stream = handle.OpenFullSlice(); #pragma warning restore 612, 618 int firstInt = stream.ReadVInt32(); // impossible for 3.0 to have 63 files in a .cfs, CFS writer was not visible // and separate norms/etc are outside of cfs. if (firstInt == CODEC_MAGIC_BYTE1) { sbyte secondByte = (sbyte)stream.ReadByte(); sbyte thirdByte = (sbyte)stream.ReadByte(); sbyte fourthByte = (sbyte)stream.ReadByte(); if (secondByte != CODEC_MAGIC_BYTE2 || thirdByte != CODEC_MAGIC_BYTE3 || fourthByte != CODEC_MAGIC_BYTE4) { throw new CorruptIndexException("Illegal/impossible header for CFS file: " + secondByte + "," + thirdByte + "," + fourthByte); } int version = CodecUtil.CheckHeaderNoMagic(stream, CompoundFileWriter.DATA_CODEC, CompoundFileWriter.VERSION_START, CompoundFileWriter.VERSION_CURRENT); string entriesFileName = IndexFileNames.SegmentFileName( IndexFileNames.StripExtension(name), "", IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION); entriesStream = dir.OpenChecksumInput(entriesFileName, IOContext.READ_ONCE); CodecUtil.CheckHeader(entriesStream, CompoundFileWriter.ENTRY_CODEC, CompoundFileWriter.VERSION_START, CompoundFileWriter.VERSION_CURRENT); int numEntries = entriesStream.ReadVInt32(); mapping = new Dictionary <string, FileEntry>(numEntries); for (int i = 0; i < numEntries; i++) { FileEntry fileEntry = new FileEntry(); string id = entriesStream.ReadString(); FileEntry previous = mapping.Put(id, fileEntry); if (previous != null) { throw new CorruptIndexException("Duplicate cfs entry id=" + id + " in CFS: " + entriesStream); } fileEntry.Offset = entriesStream.ReadInt64(); fileEntry.Length = entriesStream.ReadInt64(); } if (version >= CompoundFileWriter.VERSION_CHECKSUM) { CodecUtil.CheckFooter(entriesStream); } else { #pragma warning disable 612, 618 CodecUtil.CheckEOF(entriesStream); #pragma warning restore 612, 618 } } else { // TODO remove once 3.x is not supported anymore mapping = ReadLegacyEntries(stream, firstInt); } return(mapping); } catch (IOException ioe) { priorE = ioe; } finally { IOUtils.DisposeWhileHandlingException(priorE, stream, entriesStream); } // this is needed until Java 7's real try-with-resources: throw new InvalidOperationException("impossible to get here"); }
private static IDictionary <string, FileEntry> ReadLegacyEntries(IndexInput stream, int firstInt) { IDictionary <string, FileEntry> entries = new Dictionary <string, FileEntry>(); int count; bool stripSegmentName; if (firstInt < CompoundFileWriter.FORMAT_PRE_VERSION) { if (firstInt < CompoundFileWriter.FORMAT_NO_SEGMENT_PREFIX) { throw new CorruptIndexException("Incompatible format version: " + firstInt + " expected >= " + CompoundFileWriter.FORMAT_NO_SEGMENT_PREFIX + " (resource: " + stream + ")"); } // It's a post-3.1 index, read the count. count = stream.ReadVInt32(); stripSegmentName = false; } else { count = firstInt; stripSegmentName = true; } // read the directory and init files long streamLength = stream.Length; FileEntry entry = null; for (int i = 0; i < count; i++) { long offset = stream.ReadInt64(); if (offset < 0 || offset > streamLength) { throw new CorruptIndexException("Invalid CFS entry offset: " + offset + " (resource: " + stream + ")"); } string id = stream.ReadString(); if (stripSegmentName) { // Fix the id to not include the segment names. this is relevant for // pre-3.1 indexes. id = IndexFileNames.StripSegmentName(id); } if (entry != null) { // set length of the previous entry entry.Length = offset - entry.Offset; } entry = new FileEntry(); entry.Offset = offset; FileEntry previous = entries.Put(id, entry); if (previous != null) { throw new CorruptIndexException("Duplicate cfs entry id=" + id + " in CFS: " + stream); } } // set the length of the final entry if (entry != null) { entry.Length = streamLength - entry.Offset; } return(entries); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestRevTree() { RevisionInternal rev = new RevisionInternal("MyDocId", "4-foxy", false, database); IDictionary <string, object> revProperties = new Dictionary <string, object>(); revProperties.Put("_id", rev.GetDocId()); revProperties.Put("_rev", rev.GetRevId()); revProperties.Put("message", "hi"); rev.SetProperties(revProperties); IList <string> revHistory = new AList <string>(); revHistory.AddItem(rev.GetRevId()); revHistory.AddItem("3-thrice"); revHistory.AddItem("2-too"); revHistory.AddItem("1-won"); database.ForceInsert(rev, revHistory, null); NUnit.Framework.Assert.AreEqual(1, database.GetDocumentCount()); VerifyHistory(database, rev, revHistory); RevisionInternal conflict = new RevisionInternal("MyDocId", "5-epsilon", false, database ); IDictionary <string, object> conflictProperties = new Dictionary <string, object>(); conflictProperties.Put("_id", conflict.GetDocId()); conflictProperties.Put("_rev", conflict.GetRevId()); conflictProperties.Put("message", "yo"); conflict.SetProperties(conflictProperties); IList <string> conflictHistory = new AList <string>(); conflictHistory.AddItem(conflict.GetRevId()); conflictHistory.AddItem("4-delta"); conflictHistory.AddItem("3-gamma"); conflictHistory.AddItem("2-too"); conflictHistory.AddItem("1-won"); IList wasInConflict = new ArrayList(); Database.ChangeListener listener = new _ChangeListener_84(wasInConflict); database.AddChangeListener(listener); database.ForceInsert(conflict, conflictHistory, null); NUnit.Framework.Assert.IsTrue(wasInConflict.Count > 0); database.RemoveChangeListener(listener); NUnit.Framework.Assert.AreEqual(1, database.GetDocumentCount()); VerifyHistory(database, conflict, conflictHistory); // Add an unrelated document: RevisionInternal other = new RevisionInternal("AnotherDocID", "1-ichi", false, database ); IDictionary <string, object> otherProperties = new Dictionary <string, object>(); otherProperties.Put("language", "jp"); other.SetProperties(otherProperties); IList <string> otherHistory = new AList <string>(); otherHistory.AddItem(other.GetRevId()); database.ForceInsert(other, otherHistory, null); // Fetch one of those phantom revisions with no body: RevisionInternal rev2 = database.GetDocumentWithIDAndRev(rev.GetDocId(), "2-too", EnumSet.NoneOf <Database.TDContentOptions>()); NUnit.Framework.Assert.AreEqual(rev.GetDocId(), rev2.GetDocId()); NUnit.Framework.Assert.AreEqual("2-too", rev2.GetRevId()); //Assert.assertNull(rev2.getContent()); // Make sure no duplicate rows were inserted for the common revisions: NUnit.Framework.Assert.AreEqual(8, database.GetLastSequenceNumber()); // Make sure the revision with the higher revID wins the conflict: RevisionInternal current = database.GetDocumentWithIDAndRev(rev.GetDocId(), null, EnumSet.NoneOf <Database.TDContentOptions>()); NUnit.Framework.Assert.AreEqual(conflict, current); // Get the _changes feed and verify only the winner is in it: ChangesOptions options = new ChangesOptions(); RevisionList changes = database.ChangesSince(0, options, null); RevisionList expectedChanges = new RevisionList(); expectedChanges.AddItem(conflict); expectedChanges.AddItem(other); NUnit.Framework.Assert.AreEqual(changes, expectedChanges); options.SetIncludeConflicts(true); changes = database.ChangesSince(0, options, null); expectedChanges = new RevisionList(); expectedChanges.AddItem(rev); expectedChanges.AddItem(conflict); expectedChanges.AddItem(other); NUnit.Framework.Assert.AreEqual(changes, expectedChanges); }
public void TestSortRelevance() { var uris = new[] { new object[] { "a/relative/one", -1 }, new object[] { "other:mailto:test", 0 }, new object[] { "other://a", 1 }, new object[] { "type://a/b2/c1", 2 }, new object[] { "type://a/b3", 3 }, new object[] { "type://a/b2/c2", 4 }, new object[] { "type://a", 5 }, new object[] { "type://x?query#fragment¶m", 6 }, new object[] { "type://a/b1/c1", 7 }, new object[] { "type://a/b1/c2", 8 }, new object[] { "type://a/b1/c2/d1", 9 }, new object[] { "type://a/b2", 10 }, new object[] { "type://x/a?query#fragment¶m", 11 }, new object[] { "type://x/a/b?query#fragment¶m", 12 }, new object[] { "/a/b/c", 13 }, new object[] { "/a", 14 }, new object[] { "//a/b/c", 15 }, new object[] { "//a", 16 }, }; // setup input IDictionary <Uri, Object> input = new Dictionary <Uri, Object>(); foreach (var uri1 in uris) { var uri2 = new Uri((String)uri1[0], UriKind.RelativeOrAbsolute); input.Put(uri2, uri1[1]); } var uri = new Uri("type://x/a/b?qqq", UriKind.RelativeOrAbsolute); var result = URIUtil.FilterSort(uri, input); var expected = new[] { "type://x/a/b?query#fragment¶m", "type://x/a?query#fragment¶m", "type://x?query#fragment¶m" }; RunAssertion(uri, input, result, expected); // unspecific child uri = new Uri("type://a/b2", UriKind.RelativeOrAbsolute); result = URIUtil.FilterSort(uri, input); expected = new[] { "type://a/b2", "type://a" }; RunAssertion(uri, input, result, expected); // very specific child uri = new Uri("type://a/b2/c2/d/e", UriKind.RelativeOrAbsolute); result = URIUtil.FilterSort(uri, input); expected = new[] { "type://a/b2/c2", "type://a/b2", "type://a" }; RunAssertion(uri, input, result, expected); // less specific child uri = new Uri("type://a/b1/c2", UriKind.RelativeOrAbsolute); result = URIUtil.FilterSort(uri, input); expected = new[] { "type://a/b1/c2", "type://a" }; RunAssertion(uri, input, result, expected); // unspecific child uri = new Uri("type://a/b4", UriKind.RelativeOrAbsolute); result = URIUtil.FilterSort(uri, input); expected = new[] { "type://a" }; RunAssertion(uri, input, result, expected); uri = new Uri("type://b/b1", UriKind.RelativeOrAbsolute); result = URIUtil.FilterSort(uri, input); expected = new String[] {}; RunAssertion(uri, input, result, expected); uri = new Uri("type://a/b1/c2/d1/e1/f1", UriKind.RelativeOrAbsolute); result = URIUtil.FilterSort(uri, input); expected = new[] { "type://a/b1/c2/d1", "type://a/b1/c2", "type://a" }; RunAssertion(uri, input, result, expected); uri = new Uri("other:mailto:test", UriKind.RelativeOrAbsolute); result = URIUtil.FilterSort(uri, input); expected = new[] { "other:mailto:test" }; RunAssertion(uri, input, result, expected); uri = new Uri("type://x/a?qqq", UriKind.RelativeOrAbsolute); result = URIUtil.FilterSort(uri, input); expected = new[] { "type://x/a?query#fragment¶m", "type://x?query#fragment¶m" }; RunAssertion(uri, input, result, expected); uri = new Uri("other://x/a?qqq", UriKind.RelativeOrAbsolute); result = URIUtil.FilterSort(uri, input); expected = new String[] {}; RunAssertion(uri, input, result, expected); // this is seen as relative, must be a full hit (no path checking) uri = new Uri("/a/b", UriKind.RelativeOrAbsolute); result = URIUtil.FilterSort(uri, input); expected = new String[] {}; RunAssertion(uri, input, result, expected); // this is seen as relative uri = new Uri("/a/b/c", UriKind.RelativeOrAbsolute); result = URIUtil.FilterSort(uri, input); expected = new[] { "/a/b/c" }; RunAssertion(uri, input, result, expected); // this is seen as relative uri = new Uri("//a/b", UriKind.RelativeOrAbsolute); result = URIUtil.FilterSort(uri, input); expected = new String[] {}; RunAssertion(uri, input, result, expected); // this is seen as relative uri = new Uri("//a/b/c", UriKind.RelativeOrAbsolute); result = URIUtil.FilterSort(uri, input); expected = new[] { "//a/b/c" }; RunAssertion(uri, input, result, expected); }
public virtual void TestMakeRevisionHistoryDict() { IList<RevisionInternal> revs = new AList<RevisionInternal>(); revs.AddItem(Mkrev("4-jkl")); revs.AddItem(Mkrev("3-ghi")); revs.AddItem(Mkrev("2-def")); IList<string> expectedSuffixes = new AList<string>(); expectedSuffixes.AddItem("jkl"); expectedSuffixes.AddItem("ghi"); expectedSuffixes.AddItem("def"); IDictionary<string, object> expectedHistoryDict = new Dictionary<string, object>( ); expectedHistoryDict.Put("start", 4); expectedHistoryDict.Put("ids", expectedSuffixes); IDictionary<string, object> historyDict = Database.MakeRevisionHistoryDict(revs); NUnit.Framework.Assert.AreEqual(expectedHistoryDict, historyDict); revs = new AList<RevisionInternal>(); revs.AddItem(Mkrev("4-jkl")); revs.AddItem(Mkrev("2-def")); expectedSuffixes = new AList<string>(); expectedSuffixes.AddItem("4-jkl"); expectedSuffixes.AddItem("2-def"); expectedHistoryDict = new Dictionary<string, object>(); expectedHistoryDict.Put("ids", expectedSuffixes); historyDict = Database.MakeRevisionHistoryDict(revs); NUnit.Framework.Assert.AreEqual(expectedHistoryDict, historyDict); revs = new AList<RevisionInternal>(); revs.AddItem(Mkrev("12345")); revs.AddItem(Mkrev("6789")); expectedSuffixes = new AList<string>(); expectedSuffixes.AddItem("12345"); expectedSuffixes.AddItem("6789"); expectedHistoryDict = new Dictionary<string, object>(); expectedHistoryDict.Put("ids", expectedSuffixes); historyDict = Database.MakeRevisionHistoryDict(revs); NUnit.Framework.Assert.AreEqual(expectedHistoryDict, historyDict); }
public void Test_loadLSystem_IO_Stream() { Dictionary <string, string> prop = new Dictionary <string, string>(); using (Stream @is = new MemoryStream(writeProperties())) { prop.Load(@is); } assertEquals("Failed to load correct properties", "harmony.tests", prop.get("test.pkg")); assertNull("Load failed to parse incorrectly", prop .get("commented.entry")); prop = new Dictionary <string, string>(); prop.Load(new MemoryStream("=".getBytes())); assertEquals("Failed to add empty key", "", prop.get("")); prop = new Dictionary <string, string>(); prop.Load(new MemoryStream(" = ".getBytes())); assertEquals("Failed to add empty key2", "", prop.get("")); prop = new Dictionary <string, string>(); prop.Load(new MemoryStream(" a= b".getBytes())); assertEquals("Failed to ignore whitespace", "b", prop.get("a")); prop = new Dictionary <string, string>(); prop.Load(new MemoryStream(" a b".getBytes())); assertEquals("Failed to interpret whitespace as =", "b", prop.get("a")); prop = new Dictionary <string, string>(); prop.Load(new MemoryStream("#comment\na=value" .getBytes("UTF-8"))); assertEquals("value", prop.get("a")); prop = new Dictionary <string, string>(); prop.Load(new MemoryStream("#\u008d\u00d2\na=\u008d\u00d3" .getBytes("ISO-8859-1"))); assertEquals("Failed to parse chars >= 0x80", "\u008d\u00d3", prop .get("a")); prop = new Dictionary <string, string>(); prop.Load(new MemoryStream( "#properties file\r\nfred=1\r\n#last comment" .getBytes("ISO-8859-1"))); assertEquals("Failed to load when last line contains a comment", "1", prop.get("fred")); // Regression tests for HARMONY-5414 prop = new Dictionary <string, string>(); prop.Load(new MemoryStream("a=\\u1234z".getBytes())); prop = new Dictionary <string, string>(); try { prop.Load(new MemoryStream("a=\\u123".getBytes())); fail("should throw IllegalArgumentException"); } #pragma warning disable 168 catch (ArgumentException e) #pragma warning restore 168 { // Expected } prop = new Dictionary <string, string>(); try { prop.Load(new MemoryStream("a=\\u123z".getBytes())); fail("should throw IllegalArgumentException"); } catch (ArgumentException /*expected*/) { // Expected } prop = new Dictionary <string, string>(); Dictionary <string, string> expected = new Dictionary <string, string>(); expected.Put("a", "\u0000"); prop.Load(new MemoryStream("a=\\".getBytes())); assertEquals("Failed to read trailing slash value", expected, prop); prop = new Dictionary <string, string>(); expected = new Dictionary <string, string>(); expected.Put("a", "\u1234\u0000"); prop.Load(new MemoryStream("a=\\u1234\\".getBytes())); assertEquals("Failed to read trailing slash value #2", expected, prop); prop = new Dictionary <string, string>(); expected = new Dictionary <string, string>(); expected.Put("a", "q"); prop.Load(new MemoryStream("a=\\q".getBytes())); assertEquals("Failed to read slash value #3", expected, prop); }
private void SaveLastSequence(SaveLastSequenceCompletionBlock completionHandler) { if (!lastSequenceChanged) { if (completionHandler != null) { completionHandler(); } return; } if (_savingCheckpoint) { // If a save is already in progress, don't do anything. (The completion block will trigger // another save after the first one finishes.) _overdueForSave = true; return; } lastSequenceChanged = false; _overdueForSave = false; Log.D(TAG, "saveLastSequence() called. lastSequence: " + LastSequence); var body = new Dictionary<String, Object>(); if (_remoteCheckpoint != null) { body.PutAll(_remoteCheckpoint); } body["lastSequence"] = LastSequence; var remoteCheckpointDocID = RemoteCheckpointDocID(); if (String.IsNullOrEmpty(remoteCheckpointDocID)) { Log.W(TAG, "remoteCheckpointDocID is null, aborting saveLastSequence()"); return; } _savingCheckpoint = true; SendAsyncRequest(HttpMethod.Put, "/_local/" + remoteCheckpointDocID, body, (result, e) => { _savingCheckpoint = false; if (e != null) { Log.V (TAG, "Unable to save remote checkpoint", e); } if (LocalDatabase == null) { Log.W(TAG, "Database is null, ignoring remote checkpoint response"); if (completionHandler != null) { completionHandler (); } return; } if (!LocalDatabase.Open()) { Log.W(TAG, "Database is closed, ignoring remote checkpoint response"); if (completionHandler != null) { completionHandler (); } return; } if (e != null) { switch (GetStatusFromError(e)) { case StatusCode.NotFound: _remoteCheckpoint = null; _overdueForSave = true; break; case StatusCode.Conflict: RefreshRemoteCheckpointDoc(); break; default: // TODO: On 401 or 403, and this is a pull, remember that remote // TODO: is read-only & don't attempt to read its checkpoint next time. break; } } else { Log.D(TAG, "Save checkpoint response: " + result.ToString()); var response = result.AsDictionary<string, object>(); body.Put ("_rev", response.Get ("rev")); _remoteCheckpoint = body; LocalDatabase.SetLastSequence(LastSequence, RemoteCheckpointDocID(), !IsPull); } if (_overdueForSave) { SaveLastSequence (completionHandler); } else if (completionHandler != null) { completionHandler (); } }); }
public static DataflowStartDesc Realize(String dataFlowName, IDictionary <int, Object> operators, IDictionary <int, OperatorMetadataDescriptor> operatorMetadata, ICollection <int> operatorBuildOrder, IList <LogicalChannelBinding> bindings, DataFlowSignalManager dataFlowSignalManager, EPDataFlowInstantiationOptions options, EPServicesContext services, StatementContext statementContext) { // First pass: inject runtime context IDictionary <int, EPDataFlowEmitter> runtimeContexts = new Dictionary <int, EPDataFlowEmitter>(); OperatorStatisticsProvider statisticsProvider = null; if (options.IsOperatorStatistics()) { statisticsProvider = new OperatorStatisticsProvider(operatorMetadata); } bool audit = AuditEnum.DATAFLOW_OP.GetAudit(statementContext.Annotations) != null; foreach (int producerOpNum in operatorBuildOrder) { String operatorPrettyPrint = operatorMetadata.Get(producerOpNum).OperatorPrettyPrint; if (Log.IsDebugEnabled) { Log.Debug("Generating runtime context for " + operatorPrettyPrint); } // determine the number of output streams Object producingOp = operators.Get(producerOpNum); int numOutputStreams = operatorMetadata.Get(producerOpNum).OperatorSpec.Output.Items.Count; IList <ObjectBindingPair>[] targets = GetOperatorConsumersPerStream( numOutputStreams, producerOpNum, operators, operatorMetadata, bindings); EPDataFlowEmitter runtimeContext = GenerateRuntimeContext( statementContext.EngineURI, statementContext.StatementName, audit, dataFlowName, producerOpNum, operatorPrettyPrint, dataFlowSignalManager, targets, options); if (options.IsOperatorStatistics()) { runtimeContext = new EPDataFlowEmitterWrapperWStatistics( runtimeContext, producerOpNum, statisticsProvider, options.IsCpuStatistics()); } TypeHelper.SetFieldForAnnotation(producingOp, typeof(DataFlowContextAttribute), runtimeContext); runtimeContexts.Put(producerOpNum, runtimeContext); } // Second pass: hook punctuation such that it gets forwarded foreach (int producerOpNum in operatorBuildOrder) { String operatorPrettyPrint = operatorMetadata.Get(producerOpNum).OperatorPrettyPrint; if (Log.IsDebugEnabled) { Log.Debug("Handling signals for " + operatorPrettyPrint); } // determine consumers that receive punctuation ICollection <int> consumingOperatorsWithPunctuation = new HashSet <int>(); foreach (LogicalChannelBinding binding in bindings) { if (!binding.LogicalChannel.OutputPort.HasPunctuation || binding.LogicalChannel.OutputPort.ProducingOpNum != producerOpNum) { continue; } consumingOperatorsWithPunctuation.Add(binding.LogicalChannel.ConsumingOpNum); } // hook up a listener for each foreach (int consumerPunc in consumingOperatorsWithPunctuation) { EPDataFlowEmitter context = runtimeContexts.Get(consumerPunc); if (context == null) { continue; } dataFlowSignalManager.AddSignalListener( producerOpNum, new ProxyDataFlowSignalListener { ProcSignal = context.SubmitSignal }); } } return(new DataflowStartDesc(statisticsProvider)); }