public SolarSystem(Planet planet, Alignment alignment, EnumSet<IChaotic> set, List<Planet> list) { this.PlanetField = planet; this.PlanetFieldClone = planet; this.AlignmentField = alignment; this.AlignmentProperty = Alignment.FromConst(alignment.SwitchIndex + 1); this.LawfulAligned = set; this.ListPlanets = list; }
//JAVA TO C# CONVERTER WARNING: 'final' parameters are not allowed in .NET: //ORIGINAL LINE: public static VariantContextWriter create(final java.io.File location, final java.io.OutputStream output, final net.sf.samtools.SAMSequenceDictionary refDict, final java.util.EnumSet<Options> options) public static VariantContextWriter create(File location, OutputStream output, SAMSequenceDictionary refDict, EnumSet<Options> options) { //JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final': //ORIGINAL LINE: final boolean enableBCF = isBCFOutput(location, options); bool enableBCF = isBCFOutput(location, options); if (enableBCF) { return new BCF2Writer(location, output, refDict, options.contains(Options.INDEX_ON_THE_FLY), options.contains(Options.DO_NOT_WRITE_GENOTYPES)); } else { return new VCFWriter(location, output, refDict, options.contains(Options.INDEX_ON_THE_FLY), options.contains(Options.DO_NOT_WRITE_GENOTYPES), options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER)); } }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestPutAttachment() { string testAttachmentName = "test_attachment"; BlobStore attachments = database.Attachments; attachments.DeleteBlobs(); NUnit.Framework.Assert.AreEqual(0, attachments.Count()); // Put a revision that includes an _attachments dict: byte[] attach1 = Sharpen.Runtime.GetBytesForString("This is the body of attach1"); string base64 = Base64.EncodeBytes(attach1); IDictionary <string, object> attachment = new Dictionary <string, object>(); attachment["content_type"] = "text/plain"; attachment["data"] = base64; IDictionary <string, object> attachmentDict = new Dictionary <string, object>(); attachmentDict[testAttachmentName] = attachment; IDictionary <string, object> properties = new Dictionary <string, object>(); properties["foo"] = 1; properties["bar"] = false; properties["_attachments"] = attachmentDict; RevisionInternal rev1 = database.PutRevision(new RevisionInternal(properties, database ), null, false); // Examine the attachment store: NUnit.Framework.Assert.AreEqual(1, attachments.Count()); // Get the revision: RevisionInternal gotRev1 = database.GetDocumentWithIDAndRev(rev1.GetDocId(), rev1 .GetRevId(), EnumSet.NoneOf <TDContentOptions>()); IDictionary <string, object> gotAttachmentDict = (IDictionary <string, object>)gotRev1 .Properties["_attachments"]; IDictionary <string, object> innerDict = new Dictionary <string, object>(); innerDict["content_type"] = "text/plain"; innerDict["digest"] = "sha1-gOHUOBmIMoDCrMuGyaLWzf1hQTE="; innerDict["length"] = 27; innerDict["stub"] = true; innerDict["revpos"] = 1; IDictionary <string, object> expectAttachmentDict = new Dictionary <string, object> (); expectAttachmentDict[testAttachmentName] = innerDict; NUnit.Framework.Assert.AreEqual(expectAttachmentDict, gotAttachmentDict); // Update the attachment directly: byte[] attachv2 = Sharpen.Runtime.GetBytesForString("Replaced body of attach"); bool gotExpectedErrorCode = false; try { database.UpdateAttachment(testAttachmentName, new ByteArrayInputStream(attachv2), "application/foo", rev1.GetDocId(), null); } catch (CouchbaseLiteException e) { gotExpectedErrorCode = (e.GetCBLStatus().GetCode() == StatusCode.Conflict); } NUnit.Framework.Assert.IsTrue(gotExpectedErrorCode); gotExpectedErrorCode = false; try { database.UpdateAttachment(testAttachmentName, new ByteArrayInputStream(attachv2), "application/foo", rev1.GetDocId(), "1-bogus"); } catch (CouchbaseLiteException e) { gotExpectedErrorCode = (e.GetCBLStatus().GetCode() == StatusCode.Conflict); } NUnit.Framework.Assert.IsTrue(gotExpectedErrorCode); gotExpectedErrorCode = false; RevisionInternal rev2 = null; try { rev2 = database.UpdateAttachment(testAttachmentName, new ByteArrayInputStream(attachv2 ), "application/foo", rev1.GetDocId(), rev1.GetRevId()); } catch (CouchbaseLiteException) { gotExpectedErrorCode = true; } NUnit.Framework.Assert.IsFalse(gotExpectedErrorCode); NUnit.Framework.Assert.AreEqual(rev1.GetDocId(), rev2.GetDocId()); NUnit.Framework.Assert.AreEqual(2, rev2.GetGeneration()); // Get the updated revision: RevisionInternal gotRev2 = database.GetDocumentWithIDAndRev(rev2.GetDocId(), rev2 .GetRevId(), EnumSet.NoneOf <TDContentOptions>()); attachmentDict = (IDictionary <string, object>)gotRev2.Properties.Get("_attachments" ); innerDict = new Dictionary <string, object>(); innerDict["content_type"] = "application/foo"; innerDict["digest"] = "sha1-mbT3208HI3PZgbG4zYWbDW2HsPk="; innerDict["length"] = 23; innerDict["stub"] = true; innerDict["revpos"] = 2; expectAttachmentDict[testAttachmentName] = innerDict; NUnit.Framework.Assert.AreEqual(expectAttachmentDict, attachmentDict); // Delete the attachment: gotExpectedErrorCode = false; try { database.UpdateAttachment("nosuchattach", null, null, rev2.GetDocId(), rev2.GetRevId ()); } catch (CouchbaseLiteException e) { gotExpectedErrorCode = (e.GetCBLStatus().GetCode() == StatusCode.NotFound); } NUnit.Framework.Assert.IsTrue(gotExpectedErrorCode); gotExpectedErrorCode = false; try { database.UpdateAttachment("nosuchattach", null, null, "nosuchdoc", "nosuchrev"); } catch (CouchbaseLiteException e) { gotExpectedErrorCode = (e.GetCBLStatus().GetCode() == StatusCode.NotFound); } NUnit.Framework.Assert.IsTrue(gotExpectedErrorCode); RevisionInternal rev3 = database.UpdateAttachment(testAttachmentName, null, null, rev2.GetDocId(), rev2.GetRevId()); NUnit.Framework.Assert.AreEqual(rev2.GetDocId(), rev3.GetDocId()); NUnit.Framework.Assert.AreEqual(3, rev3.GetGeneration()); // Get the updated revision: RevisionInternal gotRev3 = database.GetDocumentWithIDAndRev(rev3.GetDocId(), rev3 .GetRevId(), EnumSet.NoneOf <TDContentOptions>()); attachmentDict = (IDictionary <string, object>)gotRev3.Properties.Get("_attachments" ); NUnit.Framework.Assert.IsNull(attachmentDict); database.Close(); }
/// <summary>Common routine to do position read while open the file for write.</summary> /// <remarks> /// Common routine to do position read while open the file for write. /// After each iteration of write, do a read of the file from begin to end. /// Return 0 on success, else number of failure. /// </remarks> /// <exception cref="System.IO.IOException"/> private int TestWriteAndRead(string fname, int loopN, int chunkSize, long readBeginPosition ) { int countOfFailures = 0; long byteVisibleToRead = 0; FSDataOutputStream @out = null; byte[] outBuffer = new byte[BufferSize]; byte[] inBuffer = new byte[BufferSize]; for (int i = 0; i < BufferSize; i++) { outBuffer[i] = unchecked ((byte)(i & unchecked ((int)(0x00ff)))); } try { Path path = GetFullyQualifiedPath(fname); long fileLengthBeforeOpen = 0; if (IfExists(path)) { if (truncateOption) { @out = useFCOption ? mfc.Create(path, EnumSet.Of(CreateFlag.Overwrite)) : mfs.Create (path, truncateOption); Log.Info("File already exists. File open with Truncate mode: " + path); } else { @out = useFCOption ? mfc.Create(path, EnumSet.Of(CreateFlag.Append)) : mfs.Append (path); fileLengthBeforeOpen = GetFileLengthFromNN(path); Log.Info("File already exists of size " + fileLengthBeforeOpen + " File open for Append mode: " + path); } } else { @out = useFCOption ? mfc.Create(path, EnumSet.Of(CreateFlag.Create)) : mfs.Create (path); } long totalByteWritten = fileLengthBeforeOpen; long totalByteVisible = fileLengthBeforeOpen; long totalByteWrittenButNotVisible = 0; bool toFlush; for (int i_1 = 0; i_1 < loopN; i_1++) { toFlush = (i_1 % 2) == 0; WriteData(@out, outBuffer, chunkSize); totalByteWritten += chunkSize; if (toFlush) { @out.Hflush(); totalByteVisible += chunkSize + totalByteWrittenButNotVisible; totalByteWrittenButNotVisible = 0; } else { totalByteWrittenButNotVisible += chunkSize; } if (verboseOption) { Log.Info("TestReadWrite - Written " + chunkSize + ". Total written = " + totalByteWritten + ". TotalByteVisible = " + totalByteVisible + " to file " + fname); } byteVisibleToRead = ReadData(fname, inBuffer, totalByteVisible, readBeginPosition ); string readmsg = "Written=" + totalByteWritten + " ; Expected Visible=" + totalByteVisible + " ; Got Visible=" + byteVisibleToRead + " of file " + fname; if (byteVisibleToRead >= totalByteVisible && byteVisibleToRead <= totalByteWritten) { readmsg = "pass: reader sees expected number of visible byte. " + readmsg + " [pass]"; } else { countOfFailures++; readmsg = "fail: reader see different number of visible byte. " + readmsg + " [fail]"; throw new IOException(readmsg); } Log.Info(readmsg); } // test the automatic flush after close WriteData(@out, outBuffer, chunkSize); totalByteWritten += chunkSize; totalByteVisible += chunkSize + totalByteWrittenButNotVisible; totalByteWrittenButNotVisible += 0; @out.Close(); byteVisibleToRead = ReadData(fname, inBuffer, totalByteVisible, readBeginPosition ); string readmsg2 = "Written=" + totalByteWritten + " ; Expected Visible=" + totalByteVisible + " ; Got Visible=" + byteVisibleToRead + " of file " + fname; string readmsg_1; if (byteVisibleToRead >= totalByteVisible && byteVisibleToRead <= totalByteWritten) { readmsg_1 = "pass: reader sees expected number of visible byte on close. " + readmsg2 + " [pass]"; } else { countOfFailures++; readmsg_1 = "fail: reader sees different number of visible byte on close. " + readmsg2 + " [fail]"; Log.Info(readmsg_1); throw new IOException(readmsg_1); } // now check if NN got the same length long lenFromFc = GetFileLengthFromNN(path); if (lenFromFc != byteVisibleToRead) { readmsg_1 = "fail: reader sees different number of visible byte from NN " + readmsg2 + " [fail]"; throw new IOException(readmsg_1); } } catch (IOException e) { throw new IOException("##### Caught Exception in testAppendWriteAndRead. Close file. " + "Total Byte Read so far = " + byteVisibleToRead, e); } finally { if (@out != null) { @out.Close(); } } return(-countOfFailures); }
void ICyberduck.QuickConnect(string arg) { try { Host h = HostParser.parse(arg); h.setCredentials(CredentialsConfiguratorFactory.get(h.getProtocol()).configure(h)); if (AbstractPath.Type.file == _detector.detect(h.getDefaultPath())) { Path file = new Path(PathNormalizer.normalize(h.getDefaultPath()), EnumSet.of(AbstractPath.Type.file)); // wait until transferCollection is loaded transfersSemaphore.Wait(); TransferController.Instance.StartTransfer(new DownloadTransfer(h, file, LocalFactory.get(PreferencesFactory.get().getProperty("queue.download.folder"), file.getName()))); } else { foreach (BrowserController b in Browsers) { if (b.IsMounted()) { if ( new HostUrlProvider().get(b.Session.getHost()) .Equals(new HostUrlProvider().get(h))) { b.View.BringToFront(); if (Path.Type.directory == _detector.detect(h.getDefaultPath())) { b.SetWorkdir(new Path(PathNormalizer.normalize(h.getDefaultPath()), EnumSet.of(AbstractPath.Type.directory))); } return; } } } NewBrowser().Mount(h); } } catch (HostParserException e) { Logger.warn(e.getDetail()); } }
private EnumSet(EnumSet c) : base(c) { }
public RevisionInternal GetDocumentWithIDAndRev(string id, string rev, EnumSet<Database.TDContentOptions > contentOptions) { RevisionInternal result = null; string sql; Cursor cursor = null; try { cursor = null; string cols = "revid, deleted, sequence"; if (!contentOptions.Contains(Database.TDContentOptions.TDNoBody)) { cols += ", json"; } if (rev != null) { sql = "SELECT " + cols + " FROM revs, docs WHERE docs.docid=? AND revs.doc_id=docs.doc_id AND revid=? LIMIT 1"; string[] args = new string[] { id, rev }; cursor = database.RawQuery(sql, args); } else { sql = "SELECT " + cols + " FROM revs, docs WHERE docs.docid=? AND revs.doc_id=docs.doc_id and current=1 and deleted=0 ORDER BY revid DESC LIMIT 1"; string[] args = new string[] { id }; cursor = database.RawQuery(sql, args); } if (cursor.MoveToNext()) { if (rev == null) { rev = cursor.GetString(0); } bool deleted = (cursor.GetInt(1) > 0); result = new RevisionInternal(id, rev, deleted, this); result.SetSequence(cursor.GetLong(2)); if (!contentOptions.Equals(EnumSet.Of(Database.TDContentOptions.TDNoBody))) { byte[] json = null; if (!contentOptions.Contains(Database.TDContentOptions.TDNoBody)) { json = cursor.GetBlob(3); } ExpandStoredJSONIntoRevisionWithAttachments(json, result, contentOptions); } } } catch (SQLException e) { Log.E(Database.Tag, "Error getting document with id and rev", e); } finally { if (cursor != null) { cursor.Close(); } } return result; }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> internal RevisionInternal LoadRevisionBody(RevisionInternal rev, EnumSet<TDContentOptions> contentOptions) { if (rev.GetBody() != null && contentOptions == EnumSet.NoneOf<TDContentOptions>() && rev.GetSequence() != 0) { return rev; } Debug.Assert(((rev.GetDocId() != null) && (rev.GetRevId() != null))); Cursor cursor = null; var result = new Status(StatusCode.NotFound); try { // TODO: on ios this query is: // TODO: "SELECT sequence, json FROM revs WHERE doc_id=@ AND revid=@ LIMIT 1" var sql = "SELECT sequence, json FROM revs, docs WHERE revid=@ AND docs.docid=@ AND revs.doc_id=docs.doc_id LIMIT 1"; var args = new [] { rev.GetRevId(), rev.GetDocId() }; cursor = StorageEngine.RawQuery(sql, CommandBehavior.SequentialAccess, args); if (cursor.MoveToNext()) { result.SetCode(StatusCode.Ok); rev.SetSequence(cursor.GetLong(0)); ExpandStoredJSONIntoRevisionWithAttachments(cursor.GetBlob(1), rev, contentOptions); } } catch (SQLException e) { Log.E(Database.Tag, "Error loading revision body", e); throw new CouchbaseLiteException(StatusCode.InternalServerError); } finally { if (cursor != null) { cursor.Close(); } } if (result.GetCode() == StatusCode.NotFound) { throw new CouchbaseLiteException(result.GetCode()); } return rev; }
internal IDictionary<String, Object> DocumentPropertiesFromJSON(IEnumerable<Byte> json, String docId, String revId, Boolean deleted, Int64 sequence, EnumSet<TDContentOptions> contentOptions) { var rev = new RevisionInternal(docId, revId, deleted, this); rev.SetSequence(sequence); IDictionary<String, Object> extra = ExtraPropertiesForRevision(rev, contentOptions); if (json == null) { return extra; } IDictionary<String, Object> docProperties = null; try { docProperties = Manager.GetObjectMapper().ReadValue<IDictionary<string, object>>(json); docProperties.PutAll(extra); } catch (Exception e) { Log.E(Database.Tag, "Error serializing properties to JSON", e); } return docProperties; }
/// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> public virtual IDictionary <ApplicationId, ApplicationReport> GetApplications(long appsNum) { TimelineEntities entities = timelineDataManager.GetEntities(ApplicationMetricsConstants .EntityType, null, null, null, null, null, null, appsNum == long.MaxValue ? this .maxLoadedApplications : appsNum, EnumSet.AllOf <TimelineReader.Field>(), UserGroupInformation .GetLoginUser()); IDictionary <ApplicationId, ApplicationReport> apps = new LinkedHashMap <ApplicationId , ApplicationReport>(); if (entities != null && entities.GetEntities() != null) { foreach (TimelineEntity entity in entities.GetEntities()) { try { ApplicationHistoryManagerOnTimelineStore.ApplicationReportExt app = GenerateApplicationReport (entity, ApplicationHistoryManagerOnTimelineStore.ApplicationReportField.All); apps[app.appReport.GetApplicationId()] = app.appReport; } catch (Exception e) { Log.Error("Error on generating application report for " + entity.GetEntityId(), e ); } } } return(apps); }
/// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> private ApplicationHistoryManagerOnTimelineStore.ApplicationReportExt GetApplication (ApplicationId appId, ApplicationHistoryManagerOnTimelineStore.ApplicationReportField field) { TimelineEntity entity = timelineDataManager.GetEntity(ApplicationMetricsConstants .EntityType, appId.ToString(), EnumSet.AllOf <TimelineReader.Field>(), UserGroupInformation .GetLoginUser()); if (entity == null) { throw new ApplicationNotFoundException("The entity for application " + appId + " doesn't exist in the timeline store" ); } else { return(GenerateApplicationReport(entity, field)); } }
/// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> public virtual IDictionary <ContainerId, ContainerReport> GetContainers(ApplicationAttemptId appAttemptId) { ApplicationHistoryManagerOnTimelineStore.ApplicationReportExt app = GetApplication (appAttemptId.GetApplicationId(), ApplicationHistoryManagerOnTimelineStore.ApplicationReportField .UserAndAcls); CheckAccess(app); TimelineEntities entities = timelineDataManager.GetEntities(ContainerMetricsConstants .EntityType, new NameValuePair(ContainerMetricsConstants.ParentPrimariyFilter, appAttemptId .ToString()), null, null, null, null, null, long.MaxValue, EnumSet.AllOf <TimelineReader.Field >(), UserGroupInformation.GetLoginUser()); IDictionary <ContainerId, ContainerReport> containers = new LinkedHashMap <ContainerId , ContainerReport>(); if (entities != null && entities.GetEntities() != null) { foreach (TimelineEntity entity in entities.GetEntities()) { ContainerReport container = ConvertToContainerReport(entity, serverHttpAddress, app .appReport.GetUser()); containers[container.GetContainerId()] = container; } } return(containers); }
/// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> public virtual ContainerReport GetContainer(ContainerId containerId) { ApplicationHistoryManagerOnTimelineStore.ApplicationReportExt app = GetApplication (containerId.GetApplicationAttemptId().GetApplicationId(), ApplicationHistoryManagerOnTimelineStore.ApplicationReportField .UserAndAcls); CheckAccess(app); TimelineEntity entity = timelineDataManager.GetEntity(ContainerMetricsConstants.EntityType , containerId.ToString(), EnumSet.AllOf <TimelineReader.Field>(), UserGroupInformation .GetLoginUser()); if (entity == null) { throw new ContainerNotFoundException("The entity for container " + containerId + " doesn't exist in the timeline store"); } else { return(ConvertToContainerReport(entity, serverHttpAddress, app.appReport.GetUser( ))); } }
/// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> private ApplicationAttemptReport GetApplicationAttempt(ApplicationAttemptId appAttemptId , bool checkACLs) { if (checkACLs) { ApplicationHistoryManagerOnTimelineStore.ApplicationReportExt app = GetApplication (appAttemptId.GetApplicationId(), ApplicationHistoryManagerOnTimelineStore.ApplicationReportField .UserAndAcls); CheckAccess(app); } TimelineEntity entity = timelineDataManager.GetEntity(AppAttemptMetricsConstants. EntityType, appAttemptId.ToString(), EnumSet.AllOf <TimelineReader.Field>(), UserGroupInformation .GetLoginUser()); if (entity == null) { throw new ApplicationAttemptNotFoundException("The entity for application attempt " + appAttemptId + " doesn't exist in the timeline store"); } else { return(ConvertToApplicationAttemptReport(entity)); } }
//JAVA TO C# CONVERTER WARNING: 'final' parameters are not allowed in .NET: //ORIGINAL LINE: public static VariantContextWriter create(final java.io.OutputStream output, final net.sf.samtools.SAMSequenceDictionary refDict, final java.util.EnumSet<Options> options) public static VariantContextWriter create(OutputStream output, SAMSequenceDictionary refDict, EnumSet<Options> options) { return create(null, output, refDict, options); }
/// <exception cref="System.Exception"/> public override int Run(string[] args) { Options opts = new Options(); string title = null; if (args.Length > 0 && Sharpen.Runtime.EqualsIgnoreCase(args[0], Application)) { title = Application; opts.AddOption(StatusCmd, true, "Prints the status of the application."); opts.AddOption(ListCmd, false, "List applications. " + "Supports optional use of -appTypes to filter applications " + "based on application type, " + "and -appStates to filter applications based on application state." ); opts.AddOption(KillCmd, true, "Kills the application."); opts.AddOption(MoveToQueueCmd, true, "Moves the application to a " + "different queue." ); opts.AddOption(QueueCmd, true, "Works with the movetoqueue command to" + " specify which queue to move an application to." ); opts.AddOption(HelpCmd, false, "Displays help for all commands."); Option appTypeOpt = new Option(AppTypeCmd, true, "Works with -list to " + "filter applications based on " + "input comma-separated list of application types."); appTypeOpt.SetValueSeparator(','); appTypeOpt.SetArgs(Option.UnlimitedValues); appTypeOpt.SetArgName("Types"); opts.AddOption(appTypeOpt); Option appStateOpt = new Option(AppStateCmd, true, "Works with -list " + "to filter applications based on input comma-separated list of " + "application states. " + GetAllValidApplicationStates()); appStateOpt.SetValueSeparator(','); appStateOpt.SetArgs(Option.UnlimitedValues); appStateOpt.SetArgName("States"); opts.AddOption(appStateOpt); opts.GetOption(KillCmd).SetArgName("Application ID"); opts.GetOption(MoveToQueueCmd).SetArgName("Application ID"); opts.GetOption(QueueCmd).SetArgName("Queue Name"); opts.GetOption(StatusCmd).SetArgName("Application ID"); } else { if (args.Length > 0 && Sharpen.Runtime.EqualsIgnoreCase(args[0], ApplicationAttempt )) { title = ApplicationAttempt; opts.AddOption(StatusCmd, true, "Prints the status of the application attempt."); opts.AddOption(ListCmd, true, "List application attempts for aplication."); opts.AddOption(HelpCmd, false, "Displays help for all commands."); opts.GetOption(StatusCmd).SetArgName("Application Attempt ID"); opts.GetOption(ListCmd).SetArgName("Application ID"); } else { if (args.Length > 0 && Sharpen.Runtime.EqualsIgnoreCase(args[0], Container)) { title = Container; opts.AddOption(StatusCmd, true, "Prints the status of the container."); opts.AddOption(ListCmd, true, "List containers for application attempt."); opts.AddOption(HelpCmd, false, "Displays help for all commands."); opts.GetOption(StatusCmd).SetArgName("Container ID"); opts.GetOption(ListCmd).SetArgName("Application Attempt ID"); } } } int exitCode = -1; CommandLine cliParser = null; try { cliParser = new GnuParser().Parse(opts, args); } catch (MissingArgumentException) { sysout.WriteLine("Missing argument for options"); PrintUsage(title, opts); return(exitCode); } if (cliParser.HasOption(StatusCmd)) { if (args.Length != 3) { PrintUsage(title, opts); return(exitCode); } if (Sharpen.Runtime.EqualsIgnoreCase(args[0], Application)) { exitCode = PrintApplicationReport(cliParser.GetOptionValue(StatusCmd)); } else { if (Sharpen.Runtime.EqualsIgnoreCase(args[0], ApplicationAttempt)) { exitCode = PrintApplicationAttemptReport(cliParser.GetOptionValue(StatusCmd)); } else { if (Sharpen.Runtime.EqualsIgnoreCase(args[0], Container)) { exitCode = PrintContainerReport(cliParser.GetOptionValue(StatusCmd)); } } } return(exitCode); } else { if (cliParser.HasOption(ListCmd)) { if (Sharpen.Runtime.EqualsIgnoreCase(args[0], Application)) { allAppStates = false; ICollection <string> appTypes = new HashSet <string>(); if (cliParser.HasOption(AppTypeCmd)) { string[] types = cliParser.GetOptionValues(AppTypeCmd); if (types != null) { foreach (string type in types) { if (!type.Trim().IsEmpty()) { appTypes.AddItem(StringUtils.ToUpperCase(type).Trim()); } } } } EnumSet <YarnApplicationState> appStates = EnumSet.NoneOf <YarnApplicationState>(); if (cliParser.HasOption(AppStateCmd)) { string[] states = cliParser.GetOptionValues(AppStateCmd); if (states != null) { foreach (string state in states) { if (!state.Trim().IsEmpty()) { if (Sharpen.Runtime.EqualsIgnoreCase(state.Trim(), AllstatesOption)) { allAppStates = true; break; } try { appStates.AddItem(YarnApplicationState.ValueOf(StringUtils.ToUpperCase(state).Trim ())); } catch (ArgumentException) { sysout.WriteLine("The application state " + state + " is invalid."); sysout.WriteLine(GetAllValidApplicationStates()); return(exitCode); } } } } } ListApplications(appTypes, appStates); } else { if (Sharpen.Runtime.EqualsIgnoreCase(args[0], ApplicationAttempt)) { if (args.Length != 3) { PrintUsage(title, opts); return(exitCode); } ListApplicationAttempts(cliParser.GetOptionValue(ListCmd)); } else { if (Sharpen.Runtime.EqualsIgnoreCase(args[0], Container)) { if (args.Length != 3) { PrintUsage(title, opts); return(exitCode); } ListContainers(cliParser.GetOptionValue(ListCmd)); } } } } else { if (cliParser.HasOption(KillCmd)) { if (args.Length != 3) { PrintUsage(title, opts); return(exitCode); } try { KillApplication(cliParser.GetOptionValue(KillCmd)); } catch (ApplicationNotFoundException) { return(exitCode); } } else { if (cliParser.HasOption(MoveToQueueCmd)) { if (!cliParser.HasOption(QueueCmd)) { PrintUsage(title, opts); return(exitCode); } MoveApplicationAcrossQueues(cliParser.GetOptionValue(MoveToQueueCmd), cliParser.GetOptionValue (QueueCmd)); } else { if (cliParser.HasOption(HelpCmd)) { PrintUsage(title, opts); return(0); } else { syserr.WriteLine("Invalid Command Usage : "); PrintUsage(title, opts); } } } } } return(0); }
public XAttrSetFlagParam(EnumSet <XAttrSetFlag> flag) : base(Domain, flag) { }
/// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> public virtual IDictionary <ApplicationAttemptId, ApplicationAttemptReport> GetApplicationAttempts (ApplicationId appId) { ApplicationHistoryManagerOnTimelineStore.ApplicationReportExt app = GetApplication (appId, ApplicationHistoryManagerOnTimelineStore.ApplicationReportField.UserAndAcls ); CheckAccess(app); TimelineEntities entities = timelineDataManager.GetEntities(AppAttemptMetricsConstants .EntityType, new NameValuePair(AppAttemptMetricsConstants.ParentPrimaryFilter, appId .ToString()), null, null, null, null, null, long.MaxValue, EnumSet.AllOf <TimelineReader.Field >(), UserGroupInformation.GetLoginUser()); IDictionary <ApplicationAttemptId, ApplicationAttemptReport> appAttempts = new LinkedHashMap <ApplicationAttemptId, ApplicationAttemptReport>(); foreach (TimelineEntity entity in entities.GetEntities()) { ApplicationAttemptReport appAttempt = ConvertToApplicationAttemptReport(entity); appAttempts[appAttempt.GetApplicationAttemptId()] = appAttempt; } return(appAttempts); }
/// <summary>Inserts the _id, _rev and _attachments properties into the JSON data and stores it in rev. /// </summary> /// <remarks> /// Inserts the _id, _rev and _attachments properties into the JSON data and stores it in rev. /// Rev must already have its revID and sequence properties set. /// </remarks> internal IDictionary<String, Object> ExtraPropertiesForRevision(RevisionInternal rev, EnumSet<TDContentOptions> contentOptions) { var docId = rev.GetDocId(); var revId = rev.GetRevId(); var sequenceNumber = rev.GetSequence(); Debug.Assert((revId != null)); Debug.Assert((sequenceNumber > 0)); // Get attachment metadata, and optionally the contents: var attachmentsDict = GetAttachmentsDictForSequenceWithContent(sequenceNumber, contentOptions); // Get more optional stuff to put in the properties: //OPT: This probably ends up making redundant SQL queries if multiple options are enabled. var localSeq = -1L; if (contentOptions.Contains(TDContentOptions.TDIncludeLocalSeq)) { localSeq = sequenceNumber; } IDictionary<string, object> revHistory = null; if (contentOptions.Contains(TDContentOptions.TDIncludeRevs)) { revHistory = GetRevisionHistoryDict(rev); } IList<object> revsInfo = null; if (contentOptions.Contains(TDContentOptions.TDIncludeRevsInfo)) { revsInfo = new AList<object>(); var revHistoryFull = GetRevisionHistory(rev); foreach (RevisionInternal historicalRev in revHistoryFull) { var revHistoryItem = new Dictionary<string, object>(); var status = "available"; if (historicalRev.IsDeleted()) { status = "deleted"; } // TODO: Detect missing revisions, set status="missing" if (historicalRev.IsMissing()) { status = "missing"; } revHistoryItem.Put("rev", historicalRev.GetRevId()); revHistoryItem["status"] = status; revsInfo.AddItem(revHistoryItem); } } IList<string> conflicts = null; if (contentOptions.Contains(TDContentOptions.TDIncludeConflicts)) { var revs = GetAllRevisionsOfDocumentID(docId, true); if (revs.Count > 1) { conflicts = new AList<string>(); foreach (RevisionInternal historicalRev in revs) { if (!historicalRev.Equals(rev)) { conflicts.AddItem(historicalRev.GetRevId()); } } } } var result = new Dictionary<string, object>(); result["_id"] = docId; result["_rev"] = revId; if (rev.IsDeleted()) { result["_deleted"] = true; } if (attachmentsDict != null) { result["_attachments"] = attachmentsDict; } if (localSeq > -1) { result["_local_seq"] = localSeq; } if (revHistory != null) { result["_revisions"] = revHistory; } if (revsInfo != null) { result["_revs_info"] = revsInfo; } if (conflicts != null) { result["_conflicts"] = conflicts; } return result; }
public virtual ClientMmap GetClientMmap(EnumSet <ReadOption> opts) { return(null); }
public void ExpandStoredJSONIntoRevisionWithAttachments(byte[] json, RevisionInternal rev, EnumSet<Database.TDContentOptions> contentOptions) { IDictionary<string, object> extra = ExtraPropertiesForRevision(rev, contentOptions ); if (json != null) { rev.SetJson(AppendDictToJSON(json, extra)); } else { rev.SetProperties(extra); } }
internal override void ProcessInbox(RevisionList inbox) { var lastInboxSequence = inbox[inbox.Count - 1].GetSequence(); // Generate a set of doc/rev IDs in the JSON format that _revs_diff wants: // <http://wiki.apache.org/couchdb/HttpPostRevsDiff> var diffs = new Dictionary <String, IList <String> >(); foreach (var rev in inbox) { var docID = rev.GetDocId(); var revs = diffs.Get(docID); if (revs == null) { revs = new AList <String>(); diffs[docID] = revs; } revs.AddItem(rev.GetRevId()); } // Call _revs_diff on the target db: Log.D(Tag, this + "|" + Thread.CurrentThread() + ": processInbox() calling asyncTaskStarted()"); Log.D(Tag, this + "|" + Thread.CurrentThread() + ": posting to /_revs_diff: " + diffs); AsyncTaskStarted(); SendAsyncRequest(HttpMethod.Post, "/_revs_diff", diffs, (response, e) => { try { Log.D(Tag, this + "|" + Thread.CurrentThread() + ": /_revs_diff response: " + response); var responseData = (JObject)response; var results = responseData.ToObject <IDictionary <string, object> >(); if (e != null) { LastError = e; RevisionFailed(); //Stop (); } else { if (results.Count != 0) { // Go through the list of local changes again, selecting the ones the destination server // said were missing and mapping them to a JSON dictionary in the form _bulk_docs wants: var docsToSend = new AList <object> (); foreach (var rev in inbox) { IDictionary <string, object> properties = null; var resultDocData = (JObject)results.Get(rev.GetDocId()); var resultDoc = resultDocData.ToObject <IDictionary <String, Object> >(); if (resultDoc != null) { var revs = ((JArray)resultDoc.Get("missing")).Values <String>().ToList(); if (revs != null && revs.Contains(rev.GetRevId())) { //remote server needs this revision // Get the revision's properties if (rev.IsDeleted()) { properties = new Dictionary <string, object> (); properties.Put("_id", rev.GetDocId()); properties.Put("_rev", rev.GetRevId()); properties.Put("_deleted", true); } else { // OPT: Shouldn't include all attachment bodies, just ones that have changed var contentOptions = EnumSet.Of(TDContentOptions.TDIncludeAttachments, TDContentOptions.TDBigAttachmentsFollow); try { LocalDatabase.LoadRevisionBody(rev, contentOptions); } catch (CouchbaseLiteException e1) { Log.W(Tag, string.Format("%s Couldn't get local contents of %s", rev, this)); RevisionFailed(); continue; } properties = new Dictionary <String, Object> (rev.GetProperties()); } if (properties.ContainsKey("_attachments")) { if (UploadMultipartRevision(rev)) { continue; } } if (properties != null) { // Add the _revisions list: properties.Put("_revisions", LocalDatabase.GetRevisionHistoryDict(rev)); //now add it to the docs to send docsToSend.AddItem(properties); } } } } // Post the revisions to the destination. "new_edits":false means that the server should // use the given _rev IDs instead of making up new ones. var numDocsToSend = docsToSend.Count; if (numDocsToSend > 0) { var bulkDocsBody = new Dictionary <String, Object> (); bulkDocsBody.Put("docs", docsToSend); bulkDocsBody.Put("new_edits", false); Log.V(Tag, string.Format("%s: POSTing " + numDocsToSend + " revisions to _bulk_docs: %s", this, docsToSend)); ChangesCount += numDocsToSend; Log.D(Tag, this + "|" + Thread.CurrentThread() + ": processInbox-before_bulk_docs() calling asyncTaskStarted()"); AsyncTaskStarted(); SendAsyncRequest(HttpMethod.Post, "/_bulk_docs", bulkDocsBody, (result, ex) => { try { if (ex != null) { LastError = ex; RevisionFailed(); } else { Log.V(Tag, string.Format("%s: POSTed to _bulk_docs: %s", this, docsToSend)); LastSequence = string.Format("{0}", lastInboxSequence); } CompletedChangesCount += numDocsToSend; } finally { AsyncTaskFinished(1); } }); } } else { // If none of the revisions are new to the remote, just bump the lastSequence: LastSequence = string.Format("{0}", lastInboxSequence); } } } finally { Log.D(Tag, this + "|" + Thread.CurrentThread() + ": processInbox() calling asyncTaskFinished()"); AsyncTaskFinished(1); } }); }
public IDictionary<string, object> GetAttachmentsDictForSequenceWithContent(long sequence, EnumSet<Database.TDContentOptions> contentOptions) { System.Diagnostics.Debug.Assert((sequence > 0)); Cursor cursor = null; string[] args = new string[] { System.Convert.ToString(sequence) }; try { cursor = database.RawQuery("SELECT filename, key, type, length, revpos FROM attachments WHERE sequence=?" , args); if (!cursor.MoveToNext()) { return null; } IDictionary<string, object> result = new Dictionary<string, object>(); while (!cursor.IsAfterLast()) { bool dataSuppressed = false; int length = cursor.GetInt(3); byte[] keyData = cursor.GetBlob(1); BlobKey key = new BlobKey(keyData); string digestString = "sha1-" + Base64.EncodeBytes(keyData); string dataBase64 = null; if (contentOptions.Contains(Database.TDContentOptions.TDIncludeAttachments)) { if (contentOptions.Contains(Database.TDContentOptions.TDBigAttachmentsFollow) && length >= Database.kBigAttachmentLength) { dataSuppressed = true; } else { byte[] data = attachments.BlobForKey(key); if (data != null) { dataBase64 = Base64.EncodeBytes(data); } else { // <-- very expensive Log.W(Database.Tag, "Error loading attachment"); } } } IDictionary<string, object> attachment = new Dictionary<string, object>(); if (dataBase64 == null || dataSuppressed == true) { attachment.Put("stub", true); } if (dataBase64 != null) { attachment.Put("data", dataBase64); } if (dataSuppressed == true) { attachment.Put("follows", true); } attachment.Put("digest", digestString); string contentType = cursor.GetString(2); attachment.Put("content_type", contentType); attachment.Put("length", length); attachment.Put("revpos", cursor.GetInt(4)); string filename = cursor.GetString(0); result.Put(filename, attachment); cursor.MoveToNext(); } return result; } catch (SQLException e) { Log.E(Database.Tag, "Error getting attachments for sequence", e); return null; } finally { if (cursor != null) { cursor.Close(); } } }
public void Test_TreeChartPerson() { using (TreeChartPerson tcPerson = new TreeChartPerson(null)) { Assert.IsNotNull(tcPerson); tcPerson.BuildBy(null); Assert.AreEqual(null, tcPerson.Rec); Assert.AreEqual(null, tcPerson.Portrait); Assert.AreEqual(0, tcPerson.PortraitWidth); tcPerson.SetFlag(PersonFlag.pfDivorced, false); Assert.AreEqual(false, tcPerson.HasFlag(PersonFlag.pfDivorced)); tcPerson.SetFlag(PersonFlag.pfDivorced, true); Assert.AreEqual(true, tcPerson.HasFlag(PersonFlag.pfDivorced)); tcPerson.IsDup = false; Assert.AreEqual(false, tcPerson.IsDup); tcPerson.IsDup = true; Assert.AreEqual(true, tcPerson.IsDup); Assert.AreEqual(0, tcPerson.Height); Assert.AreEqual(0, tcPerson.Width); Assert.AreEqual(0, tcPerson.PtX); tcPerson.PtX = 11; Assert.AreEqual(11, tcPerson.PtX); Assert.AreEqual(0, tcPerson.PtY); tcPerson.PtY = 22; Assert.AreEqual(22, tcPerson.PtY); tcPerson.Selected = false; Assert.AreEqual(false, tcPerson.Selected); tcPerson.Selected = true; Assert.AreEqual(true, tcPerson.Selected); Assert.AreEqual(GDMSex.svUnknown, tcPerson.Sex); tcPerson.Sex = GDMSex.svMale; Assert.AreEqual(GDMSex.svMale, tcPerson.Sex); EnumSet <SpecialUserRef> enums = tcPerson.Signs; Assert.IsTrue(enums.IsEmpty()); Assert.AreEqual(0, tcPerson.GetChildsCount()); Assert.AreEqual(0, tcPerson.GetSpousesCount()); TreeChartPerson child = new TreeChartPerson(null); tcPerson.AddChild(null); tcPerson.AddChild(child); Assert.AreEqual(1, tcPerson.GetChildsCount()); Assert.AreEqual(child, tcPerson.GetChild(0)); TreeChartPerson spouse = new TreeChartPerson(null); tcPerson.AddSpouse(null); tcPerson.AddSpouse(spouse); Assert.AreEqual(1, tcPerson.GetSpousesCount()); Assert.AreEqual(spouse, tcPerson.GetSpouse(0)); Assert.IsFalse(tcPerson.HasFlag(PersonFlag.pfDescWalk)); tcPerson.SetFlag(PersonFlag.pfDescWalk); Assert.IsTrue(tcPerson.HasFlag(PersonFlag.pfDescWalk)); tcPerson.BuildBy(null); ExtRect psnRt = tcPerson.Rect; Assert.IsTrue(psnRt.IsEmpty()); tcPerson.Sex = GDMSex.svMale; var color = ((ColorHandler)tcPerson.GetSelectedColor()).Handle; Assert.AreEqual(Color.FromArgb(255, Color.Blue), color); tcPerson.Sex = GDMSex.svFemale; color = ((ColorHandler)tcPerson.GetSelectedColor()).Handle; Assert.AreEqual(Color.FromArgb(255, Color.Red), color); tcPerson.Sex = GDMSex.svUnknown; color = ((ColorHandler)tcPerson.GetSelectedColor()).Handle; Assert.AreEqual(Color.FromArgb(255, Color.Black), color); } }
//JAVA TO C# CONVERTER WARNING: 'final' parameters are not allowed in .NET: //ORIGINAL LINE: public static VariantContextWriter create(final java.io.File location, final net.sf.samtools.SAMSequenceDictionary refDict, final java.util.EnumSet<Options> options) public static VariantContextWriter create(File location, SAMSequenceDictionary refDict, EnumSet<Options> options) { return create(location, openOutputStream(location), refDict, options); }
public virtual void SetUpdateFields(EnumSet <SetAttr3.SetAttrField> updateFields) { this.updateFields = updateFields; }
/// <exception cref="System.IO.IOException"/> public override void SetXAttr(Path path, string name, byte[] value, EnumSet <XAttrSetFlag > flag) { base.SetXAttr(FullPath(path), name, value, flag); }
public CompressionCodec modify(EnumSet<Modifier> modifiers) { // snappy allows no modifications return this; }
/// <exception cref="System.Exception"></exception> public virtual void TestPutLargeAttachment() { string testAttachmentName = "test_attachment"; BlobStore attachments = database.Attachments; attachments.DeleteBlobs(); NUnit.Framework.Assert.AreEqual(0, attachments.Count()); Status status = new Status(); IDictionary <string, object> rev1Properties = new Dictionary <string, object>(); rev1Properties["foo"] = 1; rev1Properties["bar"] = false; RevisionInternal rev1 = database.PutRevision(new RevisionInternal(rev1Properties, database), null, false, status); NUnit.Framework.Assert.AreEqual(StatusCode.Created, status.GetCode()); StringBuilder largeAttachment = new StringBuilder(); for (int i = 0; i < Database.kBigAttachmentLength; i++) { largeAttachment.Append("big attachment!"); } byte[] attach1 = Sharpen.Runtime.GetBytesForString(largeAttachment.ToString()); database.InsertAttachmentForSequenceWithNameAndType(new ByteArrayInputStream(attach1 ), rev1.Sequence, testAttachmentName, "text/plain", rev1.GetGeneration()); Attachment attachment = database.GetAttachmentForSequence(rev1.Sequence, testAttachmentName ); NUnit.Framework.Assert.AreEqual("text/plain", attachment.GetContentType()); byte[] data = IOUtils.ToByteArray(attachment.GetContent()); NUnit.Framework.Assert.IsTrue(Arrays.Equals(attach1, data)); EnumSet <TDContentOptions> contentOptions = EnumSet.Of(TDContentOptions .TDIncludeAttachments, TDContentOptions.TDBigAttachmentsFollow); IDictionary <string, object> attachmentDictForSequence = database.GetAttachmentsDictForSequenceWithContent (rev1.Sequence, contentOptions); IDictionary <string, object> innerDict = (IDictionary <string, object>)attachmentDictForSequence [testAttachmentName]; if (!innerDict.ContainsKey("stub")) { throw new RuntimeException("Expected attachment dict to have 'stub' key"); } if (((bool)innerDict["stub"]) == false) { throw new RuntimeException("Expected attachment dict 'stub' key to be true"); } if (!innerDict.ContainsKey("follows")) { throw new RuntimeException("Expected attachment dict to have 'follows' key"); } RevisionInternal rev1WithAttachments = database.GetDocumentWithIDAndRev(rev1.GetDocId (), rev1.GetRevId(), contentOptions); // Map<String,Object> rev1PropertiesPrime = rev1WithAttachments.Properties; // rev1PropertiesPrime.put("foo", 2); IDictionary <string, object> rev1WithAttachmentsProperties = rev1WithAttachments.GetProperties (); IDictionary <string, object> rev2Properties = new Dictionary <string, object>(); rev2Properties.Put("_id", rev1WithAttachmentsProperties["_id"]); rev2Properties["foo"] = 2; RevisionInternal newRev = new RevisionInternal(rev2Properties, database); RevisionInternal rev2 = database.PutRevision(newRev, rev1WithAttachments.GetRevId (), false, status); NUnit.Framework.Assert.AreEqual(StatusCode.Created, status.GetCode()); database.CopyAttachmentNamedFromSequenceToSequence(testAttachmentName, rev1WithAttachments .Sequence, rev2.Sequence); // Check the 2nd revision's attachment: Attachment rev2FetchedAttachment = database.GetAttachmentForSequence(rev2.GetSequence (), testAttachmentName); NUnit.Framework.Assert.AreEqual(attachment.GetLength(), rev2FetchedAttachment.GetLength ()); NUnit.Framework.Assert.AreEqual(attachment.GetMetadata(), rev2FetchedAttachment.GetMetadata ()); NUnit.Framework.Assert.AreEqual(attachment.GetContentType(), rev2FetchedAttachment .GetContentType()); // Add a third revision of the same document: IDictionary <string, object> rev3Properties = new Dictionary <string, object>(); rev3Properties.Put("_id", rev2.Properties["_id"]); rev3Properties["foo"] = 3; rev3Properties["baz"] = false; RevisionInternal rev3 = new RevisionInternal(rev3Properties, database); rev3 = database.PutRevision(rev3, rev2.GetRevId(), false, status); NUnit.Framework.Assert.AreEqual(StatusCode.Created, status.GetCode()); byte[] attach3 = Sharpen.Runtime.GetBytesForString("<html><blink>attach3</blink></html>" ); database.InsertAttachmentForSequenceWithNameAndType(new ByteArrayInputStream(attach3 ), rev3.Sequence, testAttachmentName, "text/html", rev3.GetGeneration()); // Check the 3rd revision's attachment: Attachment rev3FetchedAttachment = database.GetAttachmentForSequence(rev3.GetSequence (), testAttachmentName); data = IOUtils.ToByteArray(rev3FetchedAttachment.GetContent()); NUnit.Framework.Assert.IsTrue(Arrays.Equals(attach3, data)); NUnit.Framework.Assert.AreEqual("text/html", rev3FetchedAttachment.GetContentType ()); // TODO: why doesn't this work? // Assert.assertEquals(attach3.length, rev3FetchedAttachment.getLength()); ICollection <BlobKey> blobKeys = database.Attachments.AllKeys(); NUnit.Framework.Assert.AreEqual(2, blobKeys.Count); database.Compact(); blobKeys = database.Attachments.AllKeys(); NUnit.Framework.Assert.AreEqual(1, blobKeys.Count); }
public static EnumSet <E> complementOf <E>(EnumSet <E> prm1) { return(default(EnumSet <E>)); }
/// <exception cref="System.Exception"></exception> public virtual void TestAttachments() { string testAttachmentName = "test_attachment"; BlobStore attachments = database.Attachments; NUnit.Framework.Assert.AreEqual(0, attachments.Count()); NUnit.Framework.Assert.AreEqual(new HashSet <object>(), attachments.AllKeys()); Status status = new Status(); IDictionary <string, object> rev1Properties = new Dictionary <string, object>(); rev1Properties["foo"] = 1; rev1Properties["bar"] = false; RevisionInternal rev1 = database.PutRevision(new RevisionInternal(rev1Properties, database), null, false, status); NUnit.Framework.Assert.AreEqual(StatusCode.Created, status.GetCode()); byte[] attach1 = Sharpen.Runtime.GetBytesForString("This is the body of attach1"); database.InsertAttachmentForSequenceWithNameAndType(new ByteArrayInputStream(attach1 ), rev1.Sequence, testAttachmentName, "text/plain", rev1.GetGeneration()); NUnit.Framework.Assert.AreEqual(StatusCode.Created, status.GetCode()); Attachment attachment = database.GetAttachmentForSequence(rev1.Sequence, testAttachmentName ); NUnit.Framework.Assert.AreEqual("text/plain", attachment.GetContentType()); byte[] data = IOUtils.ToByteArray(attachment.GetContent()); NUnit.Framework.Assert.IsTrue(Arrays.Equals(attach1, data)); IDictionary <string, object> innerDict = new Dictionary <string, object>(); innerDict["content_type"] = "text/plain"; innerDict["digest"] = "sha1-gOHUOBmIMoDCrMuGyaLWzf1hQTE="; innerDict["length"] = 27; innerDict["stub"] = true; innerDict["revpos"] = 1; IDictionary <string, object> attachmentDict = new Dictionary <string, object>(); attachmentDict[testAttachmentName] = innerDict; IDictionary <string, object> attachmentDictForSequence = database.GetAttachmentsDictForSequenceWithContent (rev1.Sequence, EnumSet.NoneOf <TDContentOptions>()); NUnit.Framework.Assert.AreEqual(attachmentDict, attachmentDictForSequence); RevisionInternal gotRev1 = database.GetDocumentWithIDAndRev(rev1.GetDocId(), rev1 .GetRevId(), EnumSet.NoneOf <TDContentOptions>()); IDictionary <string, object> gotAttachmentDict = (IDictionary <string, object>)gotRev1 .Properties["_attachments"]; NUnit.Framework.Assert.AreEqual(attachmentDict, gotAttachmentDict); // Check the attachment dict, with attachments included: Sharpen.Collections.Remove(innerDict, "stub"); innerDict.Put("data", Base64.EncodeBytes(attach1)); attachmentDictForSequence = database.GetAttachmentsDictForSequenceWithContent(rev1 .Sequence, EnumSet.Of(TDContentOptions.TDIncludeAttachments)); NUnit.Framework.Assert.AreEqual(attachmentDict, attachmentDictForSequence); gotRev1 = database.GetDocumentWithIDAndRev(rev1.GetDocId(), rev1.GetRevId(), EnumSet .Of(TDContentOptions.TDIncludeAttachments)); gotAttachmentDict = (IDictionary <string, object>)gotRev1.Properties.Get("_attachments" ); NUnit.Framework.Assert.AreEqual(attachmentDict, gotAttachmentDict); // Add a second revision that doesn't update the attachment: IDictionary <string, object> rev2Properties = new Dictionary <string, object>(); rev2Properties.Put("_id", rev1.GetDocId()); rev2Properties["foo"] = 2; rev2Properties["bazz"] = false; RevisionInternal rev2 = database.PutRevision(new RevisionInternal(rev2Properties, database), rev1.GetRevId(), false, status); NUnit.Framework.Assert.AreEqual(StatusCode.Created, status.GetCode()); database.CopyAttachmentNamedFromSequenceToSequence(testAttachmentName, rev1.GetSequence (), rev2.Sequence); // Add a third revision of the same document: IDictionary <string, object> rev3Properties = new Dictionary <string, object>(); rev3Properties.Put("_id", rev2.GetDocId()); rev3Properties["foo"] = 2; rev3Properties["bazz"] = false; RevisionInternal rev3 = database.PutRevision(new RevisionInternal(rev3Properties, database), rev2.GetRevId(), false, status); NUnit.Framework.Assert.AreEqual(StatusCode.Created, status.GetCode()); byte[] attach2 = Sharpen.Runtime.GetBytesForString("<html>And this is attach2</html>" ); database.InsertAttachmentForSequenceWithNameAndType(new ByteArrayInputStream(attach2 ), rev3.Sequence, testAttachmentName, "text/html", rev2.GetGeneration()); // Check the 2nd revision's attachment: Attachment attachment2 = database.GetAttachmentForSequence(rev2.Sequence, testAttachmentName ); NUnit.Framework.Assert.AreEqual("text/plain", attachment2.GetContentType()); data = IOUtils.ToByteArray(attachment2.GetContent()); NUnit.Framework.Assert.IsTrue(Arrays.Equals(attach1, data)); // Check the 3rd revision's attachment: Attachment attachment3 = database.GetAttachmentForSequence(rev3.Sequence, testAttachmentName ); NUnit.Framework.Assert.AreEqual("text/html", attachment3.GetContentType()); data = IOUtils.ToByteArray(attachment3.GetContent()); NUnit.Framework.Assert.IsTrue(Arrays.Equals(attach2, data)); // Examine the attachment store: NUnit.Framework.Assert.AreEqual(2, attachments.Count()); ICollection <BlobKey> expected = new HashSet <BlobKey>(); expected.AddItem(BlobStore.KeyForBlob(attach1)); expected.AddItem(BlobStore.KeyForBlob(attach2)); NUnit.Framework.Assert.AreEqual(expected, attachments.AllKeys()); status = database.Compact(); // This clears the body of the first revision NUnit.Framework.Assert.AreEqual(StatusCode.Ok, status.GetCode()); NUnit.Framework.Assert.AreEqual(1, attachments.Count()); ICollection <BlobKey> expected2 = new HashSet <BlobKey>(); expected2.AddItem(BlobStore.KeyForBlob(attach2)); NUnit.Framework.Assert.AreEqual(expected2, attachments.AllKeys()); }
public static EnumSet <E> copyOf <E>(EnumSet <E> prm1) { return(default(EnumSet <E>)); }
public virtual Hamlet.HTML <WebAppProxyServlet._> Html() { return(new Hamlet.HTML <WebAppProxyServlet._>(this, "html", null, EnumSet.Of(HamletImpl.EOpt .Endtag))); }
private void CheckDependencies(IList <Tuple <RuleDependency, IElement> > dependencies, ITypeMirror recognizerType) { string[] ruleNames = GetRuleNames(recognizerType); int[] ruleVersions = GetRuleVersions(recognizerType, ruleNames); RuleDependencyProcessor.RuleRelations relations = ExtractRuleRelations(recognizerType); foreach (Tuple <RuleDependency, IElement> dependency in dependencies) { try { if (!processingEnv.GetTypeUtils().IsAssignable(GetRecognizerType(dependency.Item1), recognizerType)) { continue; } // this is the rule in the dependency set with the highest version number int effectiveRule = dependency.Item1.Rule(); if (effectiveRule < 0 || effectiveRule >= ruleVersions.Length) { Tuple <IAnnotationMirror, IAnnotationValue> ruleReferenceElement = FindRuleDependencyProperty(dependency, RuleDependencyProcessor.RuleDependencyProperty.Rule); string message = string.Format("Rule dependency on unknown rule {0}@{1} in {2}", dependency.Item1.Rule(), dependency.Item1.Version(), GetRecognizerType(dependency.Item1).ToString()); if (ruleReferenceElement != null) { processingEnv.GetMessager().PrintMessage(Diagnostic.Kind.Error, message, dependency.Item2, ruleReferenceElement.Item1, ruleReferenceElement.Item2); } else { processingEnv.GetMessager().PrintMessage(Diagnostic.Kind.Error, message, dependency.Item2); } continue; } EnumSet <Dependents> dependents = EnumSet.Of(Dependents.Self, dependency.Item1.Dependents()); ReportUnimplementedDependents(dependency, dependents); BitSet @checked = new BitSet(); int highestRequiredDependency = CheckDependencyVersion(dependency, ruleNames, ruleVersions, effectiveRule, null); if (dependents.Contains(Dependents.Parents)) { BitSet parents = relations.parents[dependency.Item1.Rule()]; for (int parent = parents.NextSetBit(0); parent >= 0; parent = parents.NextSetBit(parent + 1)) { if (parent < 0 || parent >= ruleVersions.Length || @checked.Get(parent)) { continue; } @checked.Set(parent); int required = CheckDependencyVersion(dependency, ruleNames, ruleVersions, parent, "parent"); highestRequiredDependency = Math.Max(highestRequiredDependency, required); } } if (dependents.Contains(Dependents.Children)) { BitSet children = relations.children[dependency.Item1.Rule()]; for (int child = children.NextSetBit(0); child >= 0; child = children.NextSetBit(child + 1)) { if (child < 0 || child >= ruleVersions.Length || @checked.Get(child)) { continue; } @checked.Set(child); int required = CheckDependencyVersion(dependency, ruleNames, ruleVersions, child, "child"); highestRequiredDependency = Math.Max(highestRequiredDependency, required); } } if (dependents.Contains(Dependents.Ancestors)) { BitSet ancestors = relations.GetAncestors(dependency.Item1.Rule()); for (int ancestor = ancestors.NextSetBit(0); ancestor >= 0; ancestor = ancestors.NextSetBit(ancestor + 1)) { if (ancestor < 0 || ancestor >= ruleVersions.Length || @checked.Get(ancestor)) { continue; } @checked.Set(ancestor); int required = CheckDependencyVersion(dependency, ruleNames, ruleVersions, ancestor, "ancestor"); highestRequiredDependency = Math.Max(highestRequiredDependency, required); } } if (dependents.Contains(Dependents.Descendants)) { BitSet descendants = relations.GetDescendants(dependency.Item1.Rule()); for (int descendant = descendants.NextSetBit(0); descendant >= 0; descendant = descendants.NextSetBit(descendant + 1)) { if (descendant < 0 || descendant >= ruleVersions.Length || @checked.Get(descendant)) { continue; } @checked.Set(descendant); int required = CheckDependencyVersion(dependency, ruleNames, ruleVersions, descendant, "descendant"); highestRequiredDependency = Math.Max(highestRequiredDependency, required); } } int declaredVersion = dependency.Item1.Version(); if (declaredVersion > highestRequiredDependency) { Tuple <IAnnotationMirror, IAnnotationValue> versionElement = FindRuleDependencyProperty(dependency, RuleDependencyProcessor.RuleDependencyProperty.Version); string message = string.Format("Rule dependency version mismatch: {0} has maximum dependency version {1} (expected {2}) in {3}", ruleNames[dependency.Item1.Rule()], highestRequiredDependency, declaredVersion, GetRecognizerType(dependency.Item1).ToString()); if (versionElement != null) { processingEnv.GetMessager().PrintMessage(Diagnostic.Kind.Error, message, dependency.Item2, versionElement.Item1, versionElement.Item2); } else { processingEnv.GetMessager().PrintMessage(Diagnostic.Kind.Error, message, dependency.Item2); } } } catch (AnnotationTypeMismatchException) { processingEnv.GetMessager().PrintMessage(Diagnostic.Kind.Warning, string.Format("Could not validate rule dependencies for element {0}", dependency.Item2.ToString()), dependency.Item2); } } }
private void ReportUnimplementedDependents(Tuple <RuleDependency, IElement> dependency, EnumSet <Dependents> dependents) { EnumSet <Dependents> unimplemented = dependents.Clone(); unimplemented.RemoveAll(ImplementedDependents); if (!unimplemented.IsEmpty()) { Tuple <IAnnotationMirror, IAnnotationValue> dependentsElement = FindRuleDependencyProperty(dependency, RuleDependencyProcessor.RuleDependencyProperty.Dependents); if (dependentsElement == null) { dependentsElement = FindRuleDependencyProperty(dependency, RuleDependencyProcessor.RuleDependencyProperty.Rule); } string message = string.Format("Cannot validate the following dependents of rule {0}: {1}", dependency.Item1.Rule(), unimplemented); if (dependentsElement != null) { processingEnv.GetMessager().PrintMessage(Diagnostic.Kind.Warning, message, dependency.Item2, dependentsElement.Item1, dependentsElement.Item2); } else { processingEnv.GetMessager().PrintMessage(Diagnostic.Kind.Warning, message, dependency.Item2); } } }
//$NON-NLS-1$ public override ICollection <TransportProtocol.URIishField> GetRequiredFields() { return(Sharpen.Collections.UnmodifiableSet(EnumSet.Of(TransportProtocol.URIishField .HOST, TransportProtocol.URIishField.PATH))); }
/// <summary>Inserts the _id, _rev and _attachments properties into the JSON data and stores it in rev. /// </summary> /// <remarks> /// Inserts the _id, _rev and _attachments properties into the JSON data and stores it in rev. /// Rev must already have its revID and sequence properties set. /// </remarks> internal void ExpandStoredJSONIntoRevisionWithAttachments(IEnumerable<Byte> json, RevisionInternal rev, EnumSet<TDContentOptions> contentOptions) { var extra = ExtraPropertiesForRevision(rev, contentOptions); if (json != null) { rev.SetJson(AppendDictToJSON(json, extra)); } else { rev.SetProperties(extra); } }
public override ICollection <TransportProtocol.URIishField> GetOptionalFields() { return(Sharpen.Collections.UnmodifiableSet(EnumSet.Of(TransportProtocol.URIishField .PORT))); }
/// <summary>Constructs an "_attachments" dictionary for a revision, to be inserted in its JSON body.</summary> internal IDictionary<String, Object> GetAttachmentsDictForSequenceWithContent(long sequence, EnumSet<TDContentOptions> contentOptions) { Debug.Assert((sequence > 0)); Cursor cursor = null; var args = new Object[] { sequence }; try { cursor = StorageEngine.RawQuery("SELECT filename, key, type, length, revpos FROM attachments WHERE sequence=@", CommandBehavior.SequentialAccess, args); if (!cursor.MoveToNext()) { return null; } var result = new Dictionary<String, Object>(); while (!cursor.IsAfterLast()) { var dataSuppressed = false; var filename = cursor.GetString(0); var keyData = cursor.GetBlob(1); var contentType = cursor.GetString(2); var length = cursor.GetInt(3); var revpos = cursor.GetInt(4); var key = new BlobKey(keyData); var digestString = "sha1-" + Convert.ToBase64String(keyData); var dataBase64 = String.Empty; if (contentOptions.Contains(TDContentOptions.TDIncludeAttachments)) { if (contentOptions.Contains(TDContentOptions.TDBigAttachmentsFollow) && length >= Database.BigAttachmentLength) { dataSuppressed = true; } else { byte[] data = Attachments.BlobForKey(key); if (data != null) { dataBase64 = Convert.ToBase64String(data); } else { // <-- very expensive Log.W(Database.Tag, "Error loading attachment"); } } } var attachment = new Dictionary<string, object>(); if (dataBase64 == null || dataSuppressed) { attachment["stub"] = true; } if (dataBase64 != null) { attachment["data"] = dataBase64; } if (dataSuppressed) { attachment.Put ("follows", true); } attachment["digest"] = digestString; attachment["content_type"] = contentType; attachment["length"] = length; attachment["revpos"] = revpos; result[filename] = attachment; cursor.MoveToNext(); } return result; } catch (SQLException e) { Log.E(Database.Tag, "Error getting attachments for sequence", e); return null; } finally { if (cursor != null) { cursor.Close(); } } }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public virtual void TestCRUDOperations() { database.AddChangeListener(this); string privateUUID = database.PrivateUUID(); string publicUUID = database.PublicUUID(); Log.V(Tag, "DB private UUID = '" + privateUUID + "', public UUID = '" + publicUUID + "'"); NUnit.Framework.Assert.IsTrue(privateUUID.Length >= 20); NUnit.Framework.Assert.IsTrue(publicUUID.Length >= 20); //create a document IDictionary <string, object> documentProperties = new Dictionary <string, object>(); documentProperties.Put("foo", 1); documentProperties.Put("bar", false); documentProperties.Put("baz", "touch"); Body body = new Body(documentProperties); RevisionInternal rev1 = new RevisionInternal(body, database); Status status = new Status(); rev1 = database.PutRevision(rev1, null, false, status); Log.V(Tag, "Created " + rev1); NUnit.Framework.Assert.IsTrue(rev1.GetDocId().Length >= 10); NUnit.Framework.Assert.IsTrue(rev1.GetRevId().StartsWith("1-")); //read it back RevisionInternal readRev = database.GetDocumentWithIDAndRev(rev1.GetDocId(), null , EnumSet.NoneOf <Database.TDContentOptions>()); NUnit.Framework.Assert.IsNotNull(readRev); IDictionary <string, object> readRevProps = readRev.GetProperties(); NUnit.Framework.Assert.AreEqual(UserProperties(readRevProps), UserProperties(body .GetProperties())); //now update it documentProperties = readRev.GetProperties(); documentProperties.Put("status", "updated!"); body = new Body(documentProperties); RevisionInternal rev2 = new RevisionInternal(body, database); RevisionInternal rev2input = rev2; rev2 = database.PutRevision(rev2, rev1.GetRevId(), false, status); Log.V(Tag, "Updated " + rev1); NUnit.Framework.Assert.AreEqual(rev1.GetDocId(), rev2.GetDocId()); NUnit.Framework.Assert.IsTrue(rev2.GetRevId().StartsWith("2-")); //read it back readRev = database.GetDocumentWithIDAndRev(rev2.GetDocId(), null, EnumSet.NoneOf < Database.TDContentOptions>()); NUnit.Framework.Assert.IsNotNull(readRev); NUnit.Framework.Assert.AreEqual(UserProperties(readRev.GetProperties()), UserProperties (body.GetProperties())); // Try to update the first rev, which should fail: bool gotExpectedError = false; try { database.PutRevision(rev2input, rev1.GetRevId(), false, status); } catch (CouchbaseLiteException e) { gotExpectedError = e.GetCBLStatus().GetCode() == Status.Conflict; } NUnit.Framework.Assert.IsTrue(gotExpectedError); // Check the changes feed, with and without filters: RevisionList changes = database.ChangesSince(0, null, null); Log.V(Tag, "Changes = " + changes); NUnit.Framework.Assert.AreEqual(1, changes.Count); changes = database.ChangesSince(0, null, new _ReplicationFilter_95()); NUnit.Framework.Assert.AreEqual(1, changes.Count); changes = database.ChangesSince(0, null, new _ReplicationFilter_105()); NUnit.Framework.Assert.AreEqual(0, changes.Count); // Delete it: RevisionInternal revD = new RevisionInternal(rev2.GetDocId(), null, true, database ); RevisionInternal revResult = null; gotExpectedError = false; try { revResult = database.PutRevision(revD, null, false, status); } catch (CouchbaseLiteException e) { gotExpectedError = e.GetCBLStatus().GetCode() == Status.Conflict; } NUnit.Framework.Assert.IsTrue(gotExpectedError); NUnit.Framework.Assert.IsNull(revResult); revD = database.PutRevision(revD, rev2.GetRevId(), false, status); NUnit.Framework.Assert.AreEqual(Status.Ok, status.GetCode()); NUnit.Framework.Assert.AreEqual(revD.GetDocId(), rev2.GetDocId()); NUnit.Framework.Assert.IsTrue(revD.GetRevId().StartsWith("3-")); // Delete nonexistent doc: RevisionInternal revFake = new RevisionInternal("fake", null, true, database); gotExpectedError = false; try { database.PutRevision(revFake, null, false, status); } catch (CouchbaseLiteException e) { gotExpectedError = e.GetCBLStatus().GetCode() == Status.NotFound; } NUnit.Framework.Assert.IsTrue(gotExpectedError); // Read it back (should fail): readRev = database.GetDocumentWithIDAndRev(revD.GetDocId(), null, EnumSet.NoneOf < Database.TDContentOptions>()); NUnit.Framework.Assert.IsNull(readRev); // Get Changes feed changes = database.ChangesSince(0, null, null); NUnit.Framework.Assert.IsTrue(changes.Count == 1); // Get Revision History IList <RevisionInternal> history = database.GetRevisionHistory(revD); NUnit.Framework.Assert.AreEqual(revD, history[0]); NUnit.Framework.Assert.AreEqual(rev2, history[1]); NUnit.Framework.Assert.AreEqual(rev1, history[2]); }
public IDictionary<string, object> ExtraPropertiesForRevision(RevisionInternal rev , EnumSet<Database.TDContentOptions> contentOptions) { string docId = rev.GetDocId(); string revId = rev.GetRevId(); long sequenceNumber = rev.GetSequence(); System.Diagnostics.Debug.Assert((revId != null)); System.Diagnostics.Debug.Assert((sequenceNumber > 0)); // Get attachment metadata, and optionally the contents: IDictionary<string, object> attachmentsDict = GetAttachmentsDictForSequenceWithContent (sequenceNumber, contentOptions); // Get more optional stuff to put in the properties: //OPT: This probably ends up making redundant SQL queries if multiple options are enabled. long localSeq = null; if (contentOptions.Contains(Database.TDContentOptions.TDIncludeLocalSeq)) { localSeq = sequenceNumber; } IDictionary<string, object> revHistory = null; if (contentOptions.Contains(Database.TDContentOptions.TDIncludeRevs)) { revHistory = GetRevisionHistoryDict(rev); } IList<object> revsInfo = null; if (contentOptions.Contains(Database.TDContentOptions.TDIncludeRevsInfo)) { revsInfo = new AList<object>(); IList<RevisionInternal> revHistoryFull = GetRevisionHistory(rev); foreach (RevisionInternal historicalRev in revHistoryFull) { IDictionary<string, object> revHistoryItem = new Dictionary<string, object>(); string status = "available"; if (historicalRev.IsDeleted()) { status = "deleted"; } if (historicalRev.IsMissing()) { status = "missing"; } revHistoryItem.Put("rev", historicalRev.GetRevId()); revHistoryItem.Put("status", status); revsInfo.AddItem(revHistoryItem); } } IList<string> conflicts = null; if (contentOptions.Contains(Database.TDContentOptions.TDIncludeConflicts)) { RevisionList revs = GetAllRevisionsOfDocumentID(docId, true); if (revs.Count > 1) { conflicts = new AList<string>(); foreach (RevisionInternal historicalRev in revs) { if (!historicalRev.Equals(rev)) { conflicts.AddItem(historicalRev.GetRevId()); } } } } IDictionary<string, object> result = new Dictionary<string, object>(); result.Put("_id", docId); result.Put("_rev", revId); if (rev.IsDeleted()) { result.Put("_deleted", true); } if (attachmentsDict != null) { result.Put("_attachments", attachmentsDict); } if (localSeq != null) { result.Put("_local_seq", localSeq); } if (revHistory != null) { result.Put("_revisions", revHistory); } if (revsInfo != null) { result.Put("_revs_info", revsInfo); } if (conflicts != null) { result.Put("_conflicts", conflicts); } return result; }
/// <summary>Tests all FsEditLogOps that are converted to inotify events.</summary> /// <exception cref="System.IO.IOException"/> /// <exception cref="Sharpen.URISyntaxException"/> /// <exception cref="System.Exception"/> /// <exception cref="Org.Apache.Hadoop.Hdfs.Inotify.MissingEventsException"/> public virtual void TestBasic() { Configuration conf = new HdfsConfiguration(); conf.SetLong(DFSConfigKeys.DfsBlockSizeKey, BlockSize); conf.SetBoolean(DFSConfigKeys.DfsNamenodeAclsEnabledKey, true); // so that we can get an atime change conf.SetLong(DFSConfigKeys.DfsNamenodeAccesstimePrecisionKey, 1); MiniQJMHACluster.Builder builder = new MiniQJMHACluster.Builder(conf); builder.GetDfsBuilder().NumDataNodes(2); MiniQJMHACluster cluster = builder.Build(); try { cluster.GetDfsCluster().WaitActive(); cluster.GetDfsCluster().TransitionToActive(0); DFSClient client = new DFSClient(cluster.GetDfsCluster().GetNameNode(0).GetNameNodeAddress (), conf); FileSystem fs = cluster.GetDfsCluster().GetFileSystem(0); DFSTestUtil.CreateFile(fs, new Path("/file"), BlockSize, (short)1, 0L); DFSTestUtil.CreateFile(fs, new Path("/file3"), BlockSize, (short)1, 0L); DFSTestUtil.CreateFile(fs, new Path("/file5"), BlockSize, (short)1, 0L); DFSInotifyEventInputStream eis = client.GetInotifyEventStream(); client.Rename("/file", "/file4", null); // RenameOp -> RenameEvent client.Rename("/file4", "/file2"); // RenameOldOp -> RenameEvent // DeleteOp, AddOp -> UnlinkEvent, CreateEvent OutputStream os = client.Create("/file2", true, (short)2, BlockSize); os.Write(new byte[BlockSize]); os.Close(); // CloseOp -> CloseEvent // AddOp -> AppendEvent os = client.Append("/file2", BlockSize, EnumSet.Of(CreateFlag.Append), null, null ); os.Write(new byte[BlockSize]); os.Close(); // CloseOp -> CloseEvent Sharpen.Thread.Sleep(10); // so that the atime will get updated on the next line client.Open("/file2").Read(new byte[1]); // TimesOp -> MetadataUpdateEvent // SetReplicationOp -> MetadataUpdateEvent client.SetReplication("/file2", (short)1); // ConcatDeleteOp -> AppendEvent, UnlinkEvent, CloseEvent client.Concat("/file2", new string[] { "/file3" }); client.Delete("/file2", false); // DeleteOp -> UnlinkEvent client.Mkdirs("/dir", null, false); // MkdirOp -> CreateEvent // SetPermissionsOp -> MetadataUpdateEvent client.SetPermission("/dir", FsPermission.ValueOf("-rw-rw-rw-")); // SetOwnerOp -> MetadataUpdateEvent client.SetOwner("/dir", "username", "groupname"); client.CreateSymlink("/dir", "/dir2", false); // SymlinkOp -> CreateEvent client.SetXAttr("/file5", "user.field", Sharpen.Runtime.GetBytesForString("value" ), EnumSet.Of(XAttrSetFlag.Create)); // SetXAttrOp -> MetadataUpdateEvent // RemoveXAttrOp -> MetadataUpdateEvent client.RemoveXAttr("/file5", "user.field"); // SetAclOp -> MetadataUpdateEvent client.SetAcl("/file5", AclEntry.ParseAclSpec("user::rwx,user:foo:rw-,group::r--,other::---" , true)); client.RemoveAcl("/file5"); // SetAclOp -> MetadataUpdateEvent client.Rename("/file5", "/dir"); // RenameOldOp -> RenameEvent EventBatch batch = null; // RenameOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); long txid = batch.GetTxid(); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Rename); Event.RenameEvent re = (Event.RenameEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.AreEqual("/file4", re.GetDstPath()); NUnit.Framework.Assert.AreEqual("/file", re.GetSrcPath()); NUnit.Framework.Assert.IsTrue(re.GetTimestamp() > 0); long eventsBehind = eis.GetTxidsBehindEstimate(); // RenameOldOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Rename); Event.RenameEvent re2 = (Event.RenameEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(re2.GetDstPath().Equals("/file2")); NUnit.Framework.Assert.IsTrue(re2.GetSrcPath().Equals("/file4")); NUnit.Framework.Assert.IsTrue(re.GetTimestamp() > 0); // AddOp with overwrite batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Create); Event.CreateEvent ce = (Event.CreateEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(ce.GetiNodeType() == Event.CreateEvent.INodeType.File ); NUnit.Framework.Assert.IsTrue(ce.GetPath().Equals("/file2")); NUnit.Framework.Assert.IsTrue(ce.GetCtime() > 0); NUnit.Framework.Assert.IsTrue(ce.GetReplication() > 0); NUnit.Framework.Assert.IsTrue(ce.GetSymlinkTarget() == null); NUnit.Framework.Assert.IsTrue(ce.GetOverwrite()); NUnit.Framework.Assert.AreEqual(BlockSize, ce.GetDefaultBlockSize()); // CloseOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Close); Event.CloseEvent ce2 = (Event.CloseEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(ce2.GetPath().Equals("/file2")); NUnit.Framework.Assert.IsTrue(ce2.GetFileSize() > 0); NUnit.Framework.Assert.IsTrue(ce2.GetTimestamp() > 0); // AppendOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Append); Event.AppendEvent append2 = (Event.AppendEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.AreEqual("/file2", append2.GetPath()); NUnit.Framework.Assert.IsFalse(append2.ToNewBlock()); // CloseOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Close); NUnit.Framework.Assert.IsTrue(((Event.CloseEvent)batch.GetEvents()[0]).GetPath(). Equals("/file2")); // TimesOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Metadata); Event.MetadataUpdateEvent mue = (Event.MetadataUpdateEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(mue.GetPath().Equals("/file2")); NUnit.Framework.Assert.IsTrue(mue.GetMetadataType() == Event.MetadataUpdateEvent.MetadataType .Times); // SetReplicationOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Metadata); Event.MetadataUpdateEvent mue2 = (Event.MetadataUpdateEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(mue2.GetPath().Equals("/file2")); NUnit.Framework.Assert.IsTrue(mue2.GetMetadataType() == Event.MetadataUpdateEvent.MetadataType .Replication); NUnit.Framework.Assert.IsTrue(mue2.GetReplication() == 1); // ConcatDeleteOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(3, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Append); NUnit.Framework.Assert.IsTrue(((Event.AppendEvent)batch.GetEvents()[0]).GetPath() .Equals("/file2")); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[1].GetEventType() == Event.EventType .Unlink); Event.UnlinkEvent ue2 = (Event.UnlinkEvent)batch.GetEvents()[1]; NUnit.Framework.Assert.IsTrue(ue2.GetPath().Equals("/file3")); NUnit.Framework.Assert.IsTrue(ue2.GetTimestamp() > 0); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[2].GetEventType() == Event.EventType .Close); Event.CloseEvent ce3 = (Event.CloseEvent)batch.GetEvents()[2]; NUnit.Framework.Assert.IsTrue(ce3.GetPath().Equals("/file2")); NUnit.Framework.Assert.IsTrue(ce3.GetTimestamp() > 0); // DeleteOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Unlink); Event.UnlinkEvent ue = (Event.UnlinkEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(ue.GetPath().Equals("/file2")); NUnit.Framework.Assert.IsTrue(ue.GetTimestamp() > 0); // MkdirOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Create); Event.CreateEvent ce4 = (Event.CreateEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(ce4.GetiNodeType() == Event.CreateEvent.INodeType.Directory ); NUnit.Framework.Assert.IsTrue(ce4.GetPath().Equals("/dir")); NUnit.Framework.Assert.IsTrue(ce4.GetCtime() > 0); NUnit.Framework.Assert.IsTrue(ce4.GetReplication() == 0); NUnit.Framework.Assert.IsTrue(ce4.GetSymlinkTarget() == null); // SetPermissionsOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Metadata); Event.MetadataUpdateEvent mue3 = (Event.MetadataUpdateEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(mue3.GetPath().Equals("/dir")); NUnit.Framework.Assert.IsTrue(mue3.GetMetadataType() == Event.MetadataUpdateEvent.MetadataType .Perms); NUnit.Framework.Assert.IsTrue(mue3.GetPerms().ToString().Contains("rw-rw-rw-")); // SetOwnerOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Metadata); Event.MetadataUpdateEvent mue4 = (Event.MetadataUpdateEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(mue4.GetPath().Equals("/dir")); NUnit.Framework.Assert.IsTrue(mue4.GetMetadataType() == Event.MetadataUpdateEvent.MetadataType .Owner); NUnit.Framework.Assert.IsTrue(mue4.GetOwnerName().Equals("username")); NUnit.Framework.Assert.IsTrue(mue4.GetGroupName().Equals("groupname")); // SymlinkOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Create); Event.CreateEvent ce5 = (Event.CreateEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(ce5.GetiNodeType() == Event.CreateEvent.INodeType.Symlink ); NUnit.Framework.Assert.IsTrue(ce5.GetPath().Equals("/dir2")); NUnit.Framework.Assert.IsTrue(ce5.GetCtime() > 0); NUnit.Framework.Assert.IsTrue(ce5.GetReplication() == 0); NUnit.Framework.Assert.IsTrue(ce5.GetSymlinkTarget().Equals("/dir")); // SetXAttrOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Metadata); Event.MetadataUpdateEvent mue5 = (Event.MetadataUpdateEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(mue5.GetPath().Equals("/file5")); NUnit.Framework.Assert.IsTrue(mue5.GetMetadataType() == Event.MetadataUpdateEvent.MetadataType .Xattrs); NUnit.Framework.Assert.IsTrue(mue5.GetxAttrs().Count == 1); NUnit.Framework.Assert.IsTrue(mue5.GetxAttrs()[0].GetName().Contains("field")); NUnit.Framework.Assert.IsTrue(!mue5.IsxAttrsRemoved()); // RemoveXAttrOp batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Metadata); Event.MetadataUpdateEvent mue6 = (Event.MetadataUpdateEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(mue6.GetPath().Equals("/file5")); NUnit.Framework.Assert.IsTrue(mue6.GetMetadataType() == Event.MetadataUpdateEvent.MetadataType .Xattrs); NUnit.Framework.Assert.IsTrue(mue6.GetxAttrs().Count == 1); NUnit.Framework.Assert.IsTrue(mue6.GetxAttrs()[0].GetName().Contains("field")); NUnit.Framework.Assert.IsTrue(mue6.IsxAttrsRemoved()); // SetAclOp (1) batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Metadata); Event.MetadataUpdateEvent mue7 = (Event.MetadataUpdateEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(mue7.GetPath().Equals("/file5")); NUnit.Framework.Assert.IsTrue(mue7.GetMetadataType() == Event.MetadataUpdateEvent.MetadataType .Acls); NUnit.Framework.Assert.IsTrue(mue7.GetAcls().Contains(AclEntry.ParseAclEntry("user::rwx" , true))); // SetAclOp (2) batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Metadata); Event.MetadataUpdateEvent mue8 = (Event.MetadataUpdateEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(mue8.GetPath().Equals("/file5")); NUnit.Framework.Assert.IsTrue(mue8.GetMetadataType() == Event.MetadataUpdateEvent.MetadataType .Acls); NUnit.Framework.Assert.IsTrue(mue8.GetAcls() == null); // RenameOp (2) batch = WaitForNextEvents(eis); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); txid = CheckTxid(batch, txid); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Rename); Event.RenameEvent re3 = (Event.RenameEvent)batch.GetEvents()[0]; NUnit.Framework.Assert.IsTrue(re3.GetDstPath().Equals("/dir/file5")); NUnit.Framework.Assert.IsTrue(re3.GetSrcPath().Equals("/file5")); NUnit.Framework.Assert.IsTrue(re.GetTimestamp() > 0); // Returns null when there are no further events NUnit.Framework.Assert.IsTrue(eis.Poll() == null); // make sure the estimate hasn't changed since the above assertion // tells us that we are fully caught up to the current namesystem state // and we should not have been behind at all when eventsBehind was set // either, since there were few enough events that they should have all // been read to the client during the first poll() call NUnit.Framework.Assert.IsTrue(eis.GetTxidsBehindEstimate() == eventsBehind); } finally { cluster.Shutdown(); } }
public IDictionary<string, object> DocumentPropertiesFromJSON(byte[] json, string docId, string revId, bool deleted, long sequence, EnumSet<Database.TDContentOptions > contentOptions) { RevisionInternal rev = new RevisionInternal(docId, revId, deleted, this); rev.SetSequence(sequence); IDictionary<string, object> extra = ExtraPropertiesForRevision(rev, contentOptions ); if (json == null) { return extra; } IDictionary<string, object> docProperties = null; try { docProperties = Manager.GetObjectMapper().ReadValue<IDictionary>(json); docProperties.PutAll(extra); return docProperties; } catch (Exception e) { Log.E(Database.Tag, "Error serializing properties to JSON", e); } return docProperties; }
internal WorkLog(EnumSet <Transaction> transactions) { this.TransactionsConflict = transactions; }
public RevisionInternal LoadRevisionBody(RevisionInternal rev, EnumSet<Database.TDContentOptions > contentOptions) { if (rev.GetBody() != null && contentOptions == EnumSet.NoneOf<Database.TDContentOptions >() && rev.GetSequence() != 0) { return rev; } System.Diagnostics.Debug.Assert(((rev.GetDocId() != null) && (rev.GetRevId() != null ))); Cursor cursor = null; Status result = new Status(Status.NotFound); try { // TODO: on ios this query is: // TODO: "SELECT sequence, json FROM revs WHERE doc_id=? AND revid=? LIMIT 1" string sql = "SELECT sequence, json FROM revs, docs WHERE revid=? AND docs.docid=? AND revs.doc_id=docs.doc_id LIMIT 1"; string[] args = new string[] { rev.GetRevId(), rev.GetDocId() }; cursor = database.RawQuery(sql, args); if (cursor.MoveToNext()) { result.SetCode(Status.Ok); rev.SetSequence(cursor.GetLong(0)); ExpandStoredJSONIntoRevisionWithAttachments(cursor.GetBlob(1), rev, contentOptions ); } } catch (SQLException e) { Log.E(Database.Tag, "Error loading revision body", e); throw new CouchbaseLiteException(Status.InternalServerError); } finally { if (cursor != null) { cursor.Close(); } } if (result.GetCode() == Status.NotFound) { throw new CouchbaseLiteException(result); } return rev; }
public abstract void SetSchedulerResourceTypes(EnumSet <YarnServiceProtos.SchedulerResourceTypes > types);
public virtual void SetContentOptions(EnumSet<Database.TDContentOptions> contentOptions ) { this.contentOptions = contentOptions; }
public virtual Hamlet.HTML <HtmlPage._> Html() { return(new Hamlet.HTML <HtmlPage._>(this, "html", null, EnumSet.Of(HamletImpl.EOpt .Endtag))); }
public override object Clone() { EnumSet newObj = new EnumSet(this); foreach (_ElementType v in this.ArrayList) newObj.ArrayList.Add(v); return newObj; }
//JAVA TO C# CONVERTER WARNING: 'final' parameters are not allowed in .NET: //ORIGINAL LINE: public static boolean isBCFOutput(final java.io.File location, final java.util.EnumSet<Options> options) public static bool isBCFOutput(File location, EnumSet<Options> options) { return options.contains(Options.FORCE_BCF) || (location != null && location.Name.contains(".bcf")); }