// public methods /// <summary> /// Authenticates the connection against the given database. /// </summary> /// <param name="connection">The connection.</param> /// <param name="credential">The credential.</param> public void Authenticate(MongoConnection connection, MongoCredential credential) { var nonceCommand = new CommandDocument("getnonce", 1); var commandResult = connection.RunCommand(credential.Source, QueryFlags.None, nonceCommand, false); if (!commandResult.Ok) { throw new MongoAuthenticationException( "Error getting nonce for authentication.", new MongoCommandException(commandResult)); } var nonce = commandResult.Response["nonce"].AsString; var passwordDigest = MongoUtils.Hash(credential.Username + ":mongo:" + ((PasswordEvidence)credential.Evidence).Password); var digest = MongoUtils.Hash(nonce + credential.Username + passwordDigest); var authenticateCommand = new CommandDocument { { "authenticate", 1 }, { "user", credential.Username }, { "nonce", nonce }, { "key", digest } }; commandResult = connection.RunCommand(credential.Source, QueryFlags.None, authenticateCommand, false); if (!commandResult.Ok) { var message = string.Format("Invalid credential for database '{0}'.", credential.Source); throw new MongoAuthenticationException( message, new MongoCommandException(commandResult)); } }
// private methods private BsonValue GetParameterValue(FeatureContext context) { // allow environment variable to provide value in case authentication prevents use of getParameter command var environmentVariableName = "mongod." + _parameterName; var environmentVariableValue = Environment.GetEnvironmentVariable(environmentVariableName); if (environmentVariableValue != null) { return environmentVariableValue; } var command = new CommandDocument { { "getParameter", 1 }, { _parameterName, 1 } }; var commandOperation = new CommandOperation<CommandResult>( "admin", // databaseName new BsonBinaryReaderSettings(), // readerSettings new BsonBinaryWriterSettings(), // writerSettings command, QueryFlags.SlaveOk, null, // options null, // readPreference null, // serializationOptions BsonSerializer.LookupSerializer(typeof(CommandResult)) // resultSerializer ); var result = commandOperation.Execute(context.Connection); return result.Response[_parameterName]; }
// public methods /// <summary> /// Authenticates the connection against the given database. /// </summary> /// <param name="connection">The connection.</param> /// <param name="credential">The credential.</param> public void Authenticate(MongoConnection connection, MongoCredential credential) { string nonce; try { var nonceCommand = new CommandDocument("getnonce", 1); var nonceResult = RunCommand(connection, credential.Source, nonceCommand); nonce = nonceResult.Response["nonce"].AsString; } catch (MongoCommandException ex) { throw new MongoAuthenticationException("Error getting nonce for authentication.", ex); } try { var passwordDigest = ((PasswordEvidence)credential.Evidence).ComputeMongoCRPasswordDigest(credential.Username); var digest = MongoUtils.Hash(nonce + credential.Username + passwordDigest); var authenticateCommand = new CommandDocument { { "authenticate", 1 }, { "user", credential.Username }, { "nonce", nonce }, { "key", digest } }; RunCommand(connection, credential.Source, authenticateCommand); } catch (MongoCommandException ex) { var message = string.Format("Invalid credential for database '{0}'.", credential.Source); throw new MongoAuthenticationException(message, ex); } }
// public methods public bool IsMet(FeatureContext context) { var command = new CommandDocument { { "getParameter", 1 }, { _parameterName, 1 } }; var result = context.ServerInstance.RunCommandAs<CommandResult>(context.Connection, "admin", command); return result.Response[_parameterName].ToBoolean(); }
protected WriteConcernResult SendMessageWithWriteConcern( MongoConnection connection, BsonBuffer buffer, int requestId, BsonBinaryReaderSettings readerSettings, BsonBinaryWriterSettings writerSettings, WriteConcern writeConcern) { CommandDocument getLastErrorCommand = null; if (writeConcern.Enabled) { var fsync = (writeConcern.FSync == null) ? null : (BsonValue)writeConcern.FSync; var journal = (writeConcern.Journal == null) ? null : (BsonValue)writeConcern.Journal; var w = (writeConcern.W == null) ? null : writeConcern.W.ToGetLastErrorWValue(); var wTimeout = (writeConcern.WTimeout == null) ? null : (BsonValue)(int)writeConcern.WTimeout.Value.TotalMilliseconds; getLastErrorCommand = new CommandDocument { { "getlasterror", 1 }, // use all lowercase for backward compatibility { "fsync", fsync, fsync != null }, { "j", journal, journal != null }, { "w", w, w != null }, { "wtimeout", wTimeout, wTimeout != null } }; // piggy back on network transmission for message var getLastErrorMessage = new MongoQueryMessage(writerSettings, DatabaseName + ".$cmd", QueryFlags.None, 0, 1, getLastErrorCommand, null); getLastErrorMessage.WriteToBuffer(buffer); } connection.SendMessage(buffer, requestId); WriteConcernResult writeConcernResult = null; if (writeConcern.Enabled) { var writeConcernResultSerializer = BsonSerializer.LookupSerializer(typeof(WriteConcernResult)); var replyMessage = connection.ReceiveMessage<WriteConcernResult>(readerSettings, writeConcernResultSerializer, null); if (replyMessage.NumberReturned == 0) { throw new MongoCommandException("Command 'getLastError' failed. No response returned"); } writeConcernResult = replyMessage.Documents[0]; writeConcernResult.Command = getLastErrorCommand; var mappedException = ExceptionMapper.Map(writeConcernResult); if (mappedException != null) { throw mappedException; } } return writeConcernResult; }
// public methods /// <summary> /// Authenticates the connection against the given database. /// </summary> /// <param name="connection">The connection.</param> /// <param name="credential">The credential.</param> public void Authenticate(MongoConnection connection, MongoCredential credential) { using (var conversation = new SaslConversation()) { var currentStep = _mechanism.Initialize(connection, credential); var command = new CommandDocument { { "saslStart", 1 }, { "mechanism", _mechanism.Name }, { "payload", currentStep.BytesToSendToServer } }; while (true) { CommandResult result; try { result = RunCommand(connection, credential.Source, command); } catch (MongoCommandException ex) { var message = "Unknown error occured during authentication."; var code = ex.CommandResult.Code; var errmsg = ex.CommandResult.ErrorMessage; if(code.HasValue && errmsg != null) { message = string.Format("Error: {0} - {1}", code, errmsg); } throw new MongoSecurityException(message, ex); } if (result.Response["done"].AsBoolean) { break; } currentStep = currentStep.Transition(conversation, result.Response["payload"].AsByteArray); command = new CommandDocument { { "saslContinue", 1 }, { "conversationId", result.Response["conversationId"].AsInt32 }, { "payload", currentStep.BytesToSendToServer } }; } } }
// public methods /// <summary> /// Authenticates the specified connection with the given credential. /// </summary> /// <param name="connection">The connection.</param> /// <param name="credential">The credential.</param> public void Authenticate(MongoConnection connection, MongoCredential credential) { try { var command = new CommandDocument { { "authenticate", 1 }, { "mechanism", Name }, { "user", credential.Username } }; RunCommand(connection, credential.Source, command); } catch (MongoCommandException ex) { throw new MongoAuthenticationException(string.Format("Unable to authenticate '{0}' using '{1}'.", credential.Username, Name), ex); } }
// private methods private BsonValue GetParameterValue(FeatureContext context) { // allow environment variable to provide value in case authentication prevents use of getParameter command var environmentVariableName = "mongod." + _parameterName; var environmentVariableValue = Environment.GetEnvironmentVariable(environmentVariableName); if (environmentVariableValue != null) { return environmentVariableValue; } var command = new CommandDocument { { "getParameter", 1 }, { _parameterName, 1 } }; var result = context.ServerInstance.RunCommandAs<CommandResult>(context.Connection, "admin", command); return result.Response[_parameterName]; }
// public methods /// <summary> /// Authenticates the connection against the given database. /// </summary> /// <param name="connection">The connection.</param> /// <param name="credentials">The credentials.</param> public void Authenticate(MongoConnection connection, MongoCredentials credentials) { using (var conversation = new SaslConversation()) { var currentStep = _mechanism.Initialize(connection, credentials); var command = new CommandDocument { { "saslStart", 1 }, { "mechanism", _mechanism.Name }, { "payload", currentStep.BytesToSendToServer } }; while (true) { var result = connection.RunCommand(credentials.Source, QueryFlags.SlaveOk, command, true); var code = result.Response["code"].AsInt32; if (code != 0) { HandleError(result, code); } if (result.Response["done"].AsBoolean) { break; } currentStep = currentStep.Transition(conversation, result.Response["payload"].AsByteArray); command = new CommandDocument { { "saslContinue", 1 }, { "conversationId", result.Response["conversationId"].AsInt32 }, { "payload", currentStep.BytesToSendToServer } }; } } }
/// <summary> /// /// </summary> /// <param name="action"></param> /// <param name="n"></param> /// <param name="minLv"> -1: 按Action/ExMsg 和 Severity =1/2/3(chart上只显示1,2,3的Exception), 其他:按Action/ExMsg 和 Severity=minLv找</param> /// <returns></returns> public string GetTopNActionDetails(string action, int n, int minLv) { System.Threading.Thread.Sleep(2000); if (n > 20) { n = 20; } var url = new MongoUrl(conString); var client = new MongoClient(url); var server = client.GetServer(); MongoDatabase db = server.GetDatabase(url.DatabaseName); var exMsgList = new List <string> { "can not match tracking number while marking not wish post orders.", "can not match tracking number while marking orders tracking number repeatly.", "IDH.Service.Bus.Email.EmailService::SynchronousAccountMessage", "Failed to mark wish track-number, order id:", "IDH.Service.Bus.Email.EmailSynCompleteEvent" }; var actionIsExMsg = false; if (exMsgList.Any(x => action.StartsWith(x))) { actionIsExMsg = true; } else if (action.Split(':').Length > 1 || action.Split(',').Length > 1) { actionIsExMsg = true; } var isFromChartRequest = minLv == -1; var pipeline = new BsonArray { new BsonDocument { { "$match", new BsonDocument { { "ExTime", new BsonDocument { { "$gte", DateTime.Now.Date.AddHours(8) // "ISODate(\"2018-04-17T08:00:00.000+0800\")" } } }, { actionIsExMsg?"ExMsg":"Action", action }, { "Severity", new BsonDocument { { isFromChartRequest?"$lte":"$eq", isFromChartRequest?3:minLv } } }, } } }, new BsonDocument { { "$sort", new BsonDocument { { "ExTime", -1 }, } } }, new BsonDocument { { "$limit", n } }, }; var command = new CommandDocument { { "aggregate", "ExceptionLog" }, { "pipeline", pipeline } }; var result = ""; var cmdResult = db.RunCommand(command); if (cmdResult.Ok) { if (cmdResult.Response != null) { result = cmdResult.Response.ToJson(); } } return(result); }
private async void CustomCommand_OnAddCommand(object sender, OnChatCommandReceivedArgs e) { if (!await Permission.Can(e.Command, false, 2).ConfigureAwait(false)) { return; } Match commandMatch = this._commandAddEditRegex.Match(e.Command.ArgumentsAsString); if (commandMatch.Success) { string command = commandMatch.Groups["command"].Value.ToLowerInvariant(); if (!await PluginManager.DoesCustomChatCommandExistAsync(e.Command.ChatMessage.RoomId, command).ConfigureAwait(false)) { UserLevels userLevel = GetUserLevelFromTag(commandMatch.Groups["userlevel"].Value); string response = commandMatch.Groups["response"].Value; // TODO: Register the custom role IMongoCollection <CommandDocument> collection = DatabaseClient.Instance.MongoDatabase.GetCollection <CommandDocument>(CommandDocument.CollectionName); CommandDocument commandDocument = new CommandDocument { ChannelId = e.Command.ChatMessage.RoomId, Command = command, Response = response, UserLevel = userLevel }; await collection.InsertOneAsync(commandDocument).ConfigureAwait(false); // Command added. TwitchLibClient.Instance.SendMessage(e.Command.ChatMessage.Channel, await I18n.Instance.GetAndFormatWithAsync("CustomCommand", "AddSuccess", e.Command.ChatMessage.RoomId, new { CommandPrefix = PluginManager.Instance.ChatCommandIdentifier, Command = command, Response = response, User = e.Command.ChatMessage.Username, Sender = e.Command.ChatMessage.Username, e.Command.ChatMessage.DisplayName }, "@{DisplayName}, the command {CommandPrefix}{Command} has been added with the response: {Response}").ConfigureAwait(false)); } else { // Command exists. TwitchLibClient.Instance.SendMessage(e.Command.ChatMessage.Channel, await I18n.Instance.GetAndFormatWithAsync("CustomCommand", "AddAlreadyExists", e.Command.ChatMessage.RoomId, new { CommandPrefix = PluginManager.Instance.ChatCommandIdentifier, Command = command, User = e.Command.ChatMessage.Username, Sender = e.Command.ChatMessage.Username, e.Command.ChatMessage.DisplayName }, "@{DisplayName}, the command {CommandPrefix}{Command} cannot be added as it already exists.").ConfigureAwait(false)); } } else { // Wrong syntax. TwitchLibClient.Instance.SendMessage(e.Command.ChatMessage.Channel, await I18n.Instance.GetAndFormatWithAsync("CustomCommand", "AddUsage", e.Command.ChatMessage.RoomId, new { CommandPrefix = PluginManager.Instance.ChatCommandIdentifier, User = e.Command.ChatMessage.Username, Sender = e.Command.ChatMessage.Username, e.Command.ChatMessage.DisplayName }, "@{DisplayName}, Adds a custom command. Usage: {CommandPrefix}command add {CommandPrefix}[command] [permission (optional)] [response]").ConfigureAwait(false)); } }
private void UpdateMetadata() { using (_fileInfo.Server.RequestStart(null, ReadPreference.Primary)) { var gridFS = new MongoGridFS(_fileInfo.Server, _fileInfo.DatabaseName, _fileInfo.GridFSSettings); var database = gridFS.GetDatabase(ReadPreference.Primary); var filesCollection = gridFS.GetFilesCollection(database); BsonValue md5 = BsonNull.Value; if (_updateMD5) { var md5Command = new CommandDocument { { "filemd5", _fileInfo.Id }, { "root", gridFS.Settings.Root } }; var md5Result = database.RunCommand(md5Command); md5 = md5Result.Response["md5"].AsString; } var query = Query.EQ("_id", _fileInfo.Id); var update = Update .Set("length", _length) .Set("md5", md5); filesCollection.Update(query, update); } }
protected SendMessageWithWriteConcernResult SendMessageWithWriteConcern( MongoConnection connection, Stream stream, int requestId, BsonBinaryReaderSettings readerSettings, BsonBinaryWriterSettings writerSettings, WriteConcern writeConcern) { var result = new SendMessageWithWriteConcernResult(); if (writeConcern.Enabled) { var maxDocumentSize = connection.ServerInstance.MaxDocumentSize; var fsync = (writeConcern.FSync == null) ? null : (BsonValue)writeConcern.FSync; var journal = (writeConcern.Journal == null) ? null : (BsonValue)writeConcern.Journal; var w = (writeConcern.W == null) ? null : writeConcern.W.ToGetLastErrorWValue(); var wTimeout = (writeConcern.WTimeout == null) ? null : (BsonValue)(int)writeConcern.WTimeout.Value.TotalMilliseconds; var getLastErrorCommand = new CommandDocument { { "getlasterror", 1 }, // use all lowercase for backward compatibility { "fsync", fsync, fsync != null }, { "j", journal, journal != null }, { "w", w, w != null }, { "wtimeout", wTimeout, wTimeout != null } }; // piggy back on network transmission for message var getLastErrorMessage = new MongoQueryMessage(writerSettings, DatabaseName + ".$cmd", QueryFlags.None, maxDocumentSize, 0, 1, getLastErrorCommand, null); getLastErrorMessage.WriteTo(stream); result.GetLastErrorCommand = getLastErrorCommand; result.GetLastErrorRequestId = getLastErrorMessage.RequestId; } connection.SendMessage(stream, requestId); return result; }
internal WriteConcernResult SendMessage(MongoRequestMessage message, WriteConcern writeConcern, string databaseName) { if (_state == MongoConnectionState.Closed) { throw new InvalidOperationException("Connection is closed."); } lock (_connectionLock) { _requestId = message.RequestId; message.WriteToBuffer(); CommandDocument getLastErrorCommand = null; if (writeConcern != null && writeConcern.Enabled) { var fsync = (writeConcern.FSync == null) ? null : (BsonValue)writeConcern.FSync; var journal = (writeConcern.Journal == null) ? null : (BsonValue)writeConcern.Journal; var w = (writeConcern.W == null) ? null : writeConcern.W.ToGetLastErrorWValue(); var wTimeout = (writeConcern.WTimeout == null) ? null : (BsonValue)(int)writeConcern.WTimeout.Value.TotalMilliseconds; getLastErrorCommand = new CommandDocument { { "getlasterror", 1 }, // use all lowercase for backward compatibility { "fsync", fsync, fsync != null }, { "j", journal, journal != null }, { "w", w, w != null }, { "wtimeout", wTimeout, wTimeout != null } }; // piggy back on network transmission for message using (var getLastErrorMessage = new MongoQueryMessage(message.Buffer, message.WriterSettings, databaseName + ".$cmd", QueryFlags.None, 0, 1, getLastErrorCommand, null)) { getLastErrorMessage.WriteToBuffer(); } } try { var networkStream = GetNetworkStream(); var writeTimeout = (int)_serverInstance.Settings.SocketTimeout.TotalMilliseconds; if (writeTimeout != 0) { networkStream.WriteTimeout = writeTimeout; } message.Buffer.WriteTo(networkStream); _messageCounter++; } catch (Exception ex) { HandleException(ex); throw; } WriteConcernResult writeConcernResult = null; if (writeConcern != null && writeConcern.Enabled) { var readerSettings = new BsonBinaryReaderSettings { GuidRepresentation = message.WriterSettings.GuidRepresentation, MaxDocumentSize = _serverInstance.MaxDocumentSize }; var replyMessage = ReceiveMessage <BsonDocument>(readerSettings, null); var getLastErrorResponse = replyMessage.Documents[0]; writeConcernResult = new WriteConcernResult(); writeConcernResult.Initialize(getLastErrorCommand, getLastErrorResponse); if (!writeConcernResult.Ok) { var errorMessage = string.Format( "WriteConcern detected an error '{0}'. (response was {1}).", writeConcernResult.ErrorMessage, getLastErrorResponse.ToJson()); throw new WriteConcernException(errorMessage, writeConcernResult); } if (writeConcernResult.HasLastErrorMessage) { var errorMessage = string.Format( "WriteConcern detected an error '{0}'. (Response was {1}).", writeConcernResult.LastErrorMessage, getLastErrorResponse.ToJson()); throw new WriteConcernException(errorMessage, writeConcernResult); } } return(writeConcernResult); } }
private void Connect( IPEndPoint endPoint, TimeSpan timeout ) { var connection = new MongoConnection(null, endPoint); // no connection pool bool isPrimary; try { var isMasterCommand = new CommandDocument("ismaster", 1); var isMasterResult = connection.RunCommand("admin.$cmd", QueryFlags.SlaveOk, isMasterCommand); isPrimary = isMasterResult["ismaster", false].ToBoolean(); if (!isPrimary && !server.SlaveOk) { throw new MongoConnectionException("Server is not a primary and SlaveOk is false"); } } catch { try { connection.Close(); } catch { } // ignore exceptions throw; } this.connection = connection; this.isPrimary = isPrimary; }
internal SafeModeResult SendMessage(MongoRequestMessage message, SafeMode safeMode) { if (_state == MongoConnectionState.Closed) { throw new InvalidOperationException("Connection is closed."); } lock (_connectionLock) { _requestId = message.RequestId; message.WriteToBuffer(); CommandDocument safeModeCommand = null; if (safeMode.Enabled) { safeModeCommand = new CommandDocument { { "getlasterror", 1 }, // use all lowercase for backward compatibility { "fsync", true, safeMode.FSync }, { "j", true, safeMode.Journal }, { "w", safeMode.W, safeMode.W > 1 }, { "w", safeMode.WMode, safeMode.WMode != null }, { "wtimeout", (int)safeMode.WTimeout.TotalMilliseconds, safeMode.W > 1 && safeMode.WTimeout != TimeSpan.Zero } }; // piggy back on network transmission for message using (var getLastErrorMessage = new MongoQueryMessage(message.Buffer, message.WriterSettings, "admin.$cmd", QueryFlags.None, 0, 1, safeModeCommand, null)) { getLastErrorMessage.WriteToBuffer(); } } try { var networkStream = GetNetworkStream(); var writeTimeout = (int)_serverInstance.Server.Settings.SocketTimeout.TotalMilliseconds; if (writeTimeout != 0) { networkStream.WriteTimeout = writeTimeout; } message.Buffer.WriteTo(networkStream); _messageCounter++; } catch (Exception ex) { HandleException(ex); throw; } SafeModeResult safeModeResult = null; if (safeMode.Enabled) { var readerSettings = new BsonBinaryReaderSettings { GuidRepresentation = message.WriterSettings.GuidRepresentation, MaxDocumentSize = _serverInstance.MaxDocumentSize }; var replyMessage = ReceiveMessage <BsonDocument>(readerSettings, null); var safeModeResponse = replyMessage.Documents[0]; safeModeResult = new SafeModeResult(); safeModeResult.Initialize(safeModeCommand, safeModeResponse); if (!safeModeResult.Ok) { var errorMessage = string.Format( "Safemode detected an error '{0}'. (response was {1}).", safeModeResult.ErrorMessage, safeModeResponse.ToJson()); throw new MongoSafeModeException(errorMessage, safeModeResult); } if (safeModeResult.HasLastErrorMessage) { var errorMessage = string.Format( "Safemode detected an error '{0}'. (Response was {1}).", safeModeResult.LastErrorMessage, safeModeResponse.ToJson()); throw new MongoSafeModeException(errorMessage, safeModeResult); } } return(safeModeResult); } }
private static void CreateIndexes(MongoCollection <BsonDocument> sourceCollection, MongoCollection <BsonDocument> targetCollection, FlexibleOptions options) { if (options == null) { options = new FlexibleOptions(); } var logger = NLog.LogManager.GetLogger("CreateIndexes"); logger.Debug("{2} - {0}.{1} - Start index creation", sourceCollection.Database.Name, sourceCollection.Name, Thread.CurrentThread.ManagedThreadId); var command = new CommandDocument(); command.Add("createIndexes", targetCollection.Name); var indexList = new BsonArray(); command.Add("indexes", indexList); // Copying Indexes - If Any foreach (IndexInfo idx in sourceCollection.GetIndexes().ToList()) { // Skipping "_id_" default index - Since Every mongodb Collection has it if (idx.Name == "_id_") { continue; } // Recreating Index Options based on the current index options var opts = IndexOptions.SetBackground(idx.IsBackground || options.Get("indexes-background", false)) .SetSparse(idx.IsSparse || options.Get("indexes-sparse", false)) .SetUnique(idx.IsUnique).SetName(idx.Name).SetDropDups(idx.DroppedDups); if (idx.TimeToLive < TimeSpan.MaxValue) { opts.SetTimeToLive(idx.TimeToLive); } // Adding Index try { if (targetCollection.Database.Server.BuildInfo.Version.Major < 2 && targetCollection.Database.Server.BuildInfo.Version.MajorRevision < 6) { logger.Debug("{2} - {0}.{1} - Creating index: {2}", sourceCollection.Database.Name, sourceCollection, idx.Name, Thread.CurrentThread.ManagedThreadId); targetCollection.CreateIndex(idx.Key, opts); } else { logger.Debug("{2} - {0}.{1} - Prepare index creation: {2}", sourceCollection.Database.Name, sourceCollection, idx.Name, Thread.CurrentThread.ManagedThreadId); // removes the namespace to allow mongodb to generate the correct one... var doc = idx.RawDocument; doc.Remove("ns"); if (options.Get("indexes-background", false)) { doc["background"] = true; } if (options.Get("indexes-sparse", false)) { doc["sparse"] = true; } indexList.Add(doc); } } catch (Exception ex) { // check for timeout exception that may occur if the collection is large... if (ex is System.IO.IOException || ex is System.Net.Sockets.SocketException || (ex.InnerException != null && ex.InnerException is System.Net.Sockets.SocketException)) { logger.Warn("{3} - {0}.{1} - Timeout creating index {2}, this may occur in large collections. You should check manually after a while.", sourceCollection.Database.Name, sourceCollection.Name, idx.Name, Thread.CurrentThread.ManagedThreadId); // wait for index creation.... for (var i = 0; i < 30; i++) { System.Threading.Thread.Sleep(10000); try { if (targetCollection.IndexExists(idx.Name)) { break; } } catch { } } } else { logger.Error(ex, "{0}.{1} - Error creating index {2}" + idx.Name); } logger.Warn("{3} - {0}.{1} - Index details: {2}", sourceCollection.Database.Name, sourceCollection.Name, idx.RawDocument.ToJson(), Thread.CurrentThread.ManagedThreadId); } } if (indexList.Count > 0) { try { logger.Debug("{3} - {0}.{1} - Creating {2} indexes", sourceCollection.Database.Name, sourceCollection, indexList.Count, Thread.CurrentThread.ManagedThreadId); targetCollection.Database.RunCommand(command); } catch (Exception ex) { // check for timeout exception that may occur if the collection is large... if (ex is System.IO.IOException || ex is System.Net.Sockets.SocketException || (ex.InnerException != null && ex.InnerException is System.Net.Sockets.SocketException)) { logger.Warn("{3} - {0}.{1} - Timeout creating {2} indexes, this may occur in large collections. You should check manually after a while.", sourceCollection.Database.Name, sourceCollection.Name, indexList.Count, Thread.CurrentThread.ManagedThreadId); logger.Warn("{3} - {0}.{1} - Index details: {2}", sourceCollection.Database.Name, sourceCollection.Name, command.ToJson(), Thread.CurrentThread.ManagedThreadId); } else { logger.Error(ex, "{2} - {0}.{1} - Error creating indexes", sourceCollection.Database.Name, sourceCollection.Name, Thread.CurrentThread.ManagedThreadId); logger.Error("{3} - {0}.{1} - Index details: {2}", sourceCollection.Database.Name, sourceCollection.Name, command.ToJson(), Thread.CurrentThread.ManagedThreadId); } } } logger.Debug("{2} - {0}.{1} - Index creation completed", sourceCollection.Database.Name, sourceCollection, Thread.CurrentThread.ManagedThreadId); }
protected WriteConcernResult SendMessageWithWriteConcern( MongoConnection connection, BsonBuffer buffer, int requestId, BsonBinaryReaderSettings readerSettings, BsonBinaryWriterSettings writerSettings, WriteConcern writeConcern) { CommandDocument getLastErrorCommand = null; if (writeConcern.Enabled) { var fsync = (writeConcern.FSync == null) ? null : (BsonValue)writeConcern.FSync; var journal = (writeConcern.Journal == null) ? null : (BsonValue)writeConcern.Journal; var w = (writeConcern.W == null) ? null : writeConcern.W.ToGetLastErrorWValue(); var wTimeout = (writeConcern.WTimeout == null) ? null : (BsonValue)(int)writeConcern.WTimeout.Value.TotalMilliseconds; getLastErrorCommand = new CommandDocument { { "getlasterror", 1 }, // use all lowercase for backward compatibility { "fsync", fsync, fsync != null }, { "j", journal, journal != null }, { "w", w, w != null }, { "wtimeout", wTimeout, wTimeout != null } }; // piggy back on network transmission for message var getLastErrorMessage = new MongoQueryMessage(writerSettings, DatabaseName + ".$cmd", QueryFlags.None, 0, 1, getLastErrorCommand, null); getLastErrorMessage.WriteToBuffer(buffer); } connection.SendMessage(buffer, requestId); WriteConcernResult writeConcernResult = null; if (writeConcern.Enabled) { var writeConcernResultSerializer = BsonSerializer.LookupSerializer(typeof(WriteConcernResult)); var replyMessage = connection.ReceiveMessage <WriteConcernResult>(readerSettings, writeConcernResultSerializer, null); if (replyMessage.NumberReturned == 0) { throw new MongoCommandException("Command 'getLastError' failed. No response returned"); } writeConcernResult = replyMessage.Documents[0]; writeConcernResult.Command = getLastErrorCommand; if (!writeConcernResult.Ok) { var errorMessage = string.Format( "WriteConcern detected an error '{0}'. (response was {1}).", writeConcernResult.ErrorMessage, writeConcernResult.Response.ToJson()); throw new WriteConcernException(errorMessage, writeConcernResult); } if (writeConcernResult.HasLastErrorMessage) { var errorMessage = string.Format( "WriteConcern detected an error '{0}'. (Response was {1}).", writeConcernResult.LastErrorMessage, writeConcernResult.Response.ToJson()); throw new WriteConcernException(errorMessage, writeConcernResult); } } return(writeConcernResult); }
private void UpdateMetadata() { BsonValue md5 = BsonNull.Value; if (_updateMD5) { var md5Command = new CommandDocument { { "filemd5", _fileInfo.Id }, { "root", _gridFS.Settings.Root } }; var md5Result = _gridFS.Database.RunCommand(md5Command); md5 = md5Result.Response["md5"].AsString; } var query = Query.EQ("_id", _fileInfo.Id); var update = Update .Set("length", _length) .Set("md5", md5); _gridFS.Files.Update(query, update); }
private void Connect( IPEndPoint endPoint, TimeSpan timeout ) { var connection = new MongoConnection(null, endPoint); // no connection pool bool isPrimary; try { var isMasterCommand = new CommandDocument("ismaster", 1); var isMasterResult = connection.RunCommand(server, "admin.$cmd", QueryFlags.SlaveOk, isMasterCommand); isPrimary = isMasterResult.Response["ismaster", false].ToBoolean(); if (!isPrimary && !server.Settings.SlaveOk) { throw new MongoConnectionException("Server is not a primary and SlaveOk is false"); } maxDocumentSize = isMasterResult.Response["maxBsonObjectSize", server.MaxDocumentSize].ToInt32(); maxMessageLength = Math.Max(MongoDefaults.MaxMessageLength, maxDocumentSize + 1024); // derived from maxDocumentSize } catch { try { connection.Close(); } catch { } // ignore exceptions throw; } this.connection = connection; this.isPrimary = isPrimary; }
/// <summary> /// 初始化 /// </summary> /// <param name="commandDocument"></param> /// <param name="runLevel"></param> public MongoCommand(CommandDocument commandDocument, EnumMgr.PathLevel runLevel) { CmdDocument = commandDocument; RunLevel = runLevel; CommandString = string.Empty; }
/// <summary> /// Uploads a GridFS file. /// </summary> /// <param name="stream">The source stream.</param> /// <param name="remoteFileName">The remote file name.</param> /// <param name="createOptions">The create options.</param> /// <returns>The file info of the new GridFS file.</returns> public MongoGridFSFileInfo Upload( Stream stream, string remoteFileName, MongoGridFSCreateOptions createOptions ) { using (database.RequestStart()) { EnsureIndexes(); var files_id = createOptions.Id ?? BsonObjectId.GenerateNewId(); var chunkSize = createOptions.ChunkSize == 0 ? settings.ChunkSize : createOptions.ChunkSize; var buffer = new byte[chunkSize]; var length = 0; string md5Client; using (var md5Algorithm = MD5.Create()) { for (int n = 0; true; n++) { // might have to call Stream.Read several times to get a whole chunk var bytesNeeded = chunkSize; var bytesRead = 0; while (bytesNeeded > 0) { var partialRead = stream.Read(buffer, bytesRead, bytesNeeded); if (partialRead == 0) { break; // EOF may or may not have a partial chunk } bytesNeeded -= partialRead; bytesRead += partialRead; } if (bytesRead == 0) { break; // EOF no partial chunk } length += bytesRead; byte[] data = buffer; if (bytesRead < chunkSize) { data = new byte[bytesRead]; Buffer.BlockCopy(buffer, 0, data, 0, bytesRead); } var chunk = new BsonDocument { { "_id", BsonObjectId.GenerateNewId() }, { "files_id", files_id }, { "n", n }, { "data", new BsonBinaryData(data) } }; chunks.Insert(chunk, settings.SafeMode); md5Algorithm.TransformBlock(data, 0, data.Length, null, 0); if (bytesRead < chunkSize) { break; // EOF after partial chunk } } md5Algorithm.TransformFinalBlock(new byte[0], 0, 0); md5Client = BsonUtils.ToHexString(md5Algorithm.Hash); } var md5Command = new CommandDocument { { "filemd5", files_id }, { "root", settings.Root } }; var md5Result = database.RunCommand(md5Command); var md5Server = md5Result.Response["md5"].AsString; if (!md5Client.Equals(md5Server, StringComparison.OrdinalIgnoreCase)) { throw new MongoGridFSException("Upload client and server MD5 hashes are not equal"); } var uploadDate = createOptions.UploadDate == DateTime.MinValue ? DateTime.UtcNow : createOptions.UploadDate; BsonDocument fileInfo = new BsonDocument { { "_id", files_id }, { "filename", remoteFileName }, { "length", length }, { "chunkSize", chunkSize }, { "uploadDate", uploadDate }, { "md5", md5Server }, { "contentType", createOptions.ContentType }, // optional { "aliases", BsonArray.Create((IEnumerable <string>)createOptions.Aliases) }, // optional { "metadata", createOptions.Metadata } // optional }; files.Insert(fileInfo, settings.SafeMode); return(FindOneById(files_id)); } }
internal SafeModeResult SendMessage( MongoRequestMessage message, SafeMode safeMode ) { if (state == MongoConnectionState.Closed) { throw new InvalidOperationException("Connection is closed"); } lock (connectionLock) { message.WriteToBuffer(); CommandDocument safeModeCommand = null; if (safeMode.Enabled) { safeModeCommand = new CommandDocument { { "getlasterror", 1 }, // use all lowercase for backward compatibility { "fsync", true, safeMode.FSync }, { "w", safeMode.W, safeMode.W > 1 }, { "wtimeout", (int)safeMode.WTimeout.TotalMilliseconds, safeMode.W > 1 && safeMode.WTimeout != TimeSpan.Zero } }; using ( var getLastErrorMessage = new MongoQueryMessage( message.Server, "admin.$cmd", // collectionFullName QueryFlags.None, 0, // numberToSkip 1, // numberToReturn safeModeCommand, null, // fields message.Buffer // piggy back on network transmission for message ) ) { getLastErrorMessage.WriteToBuffer(); } } try { var networkStream = GetNetworkStream(); networkStream.WriteTimeout = (int)message.Server.Settings.SocketTimeout.TotalMilliseconds; message.Buffer.WriteTo(networkStream); messageCounter++; } catch (Exception ex) { HandleException(ex); throw; } SafeModeResult safeModeResult = null; if (safeMode.Enabled) { var replyMessage = ReceiveMessage <BsonDocument>(message.Server); var safeModeResponse = replyMessage.Documents[0]; safeModeResult = new SafeModeResult(); safeModeResult.Initialize(safeModeCommand, safeModeResponse); if (!safeModeResult.Ok) { var errorMessage = string.Format("Safemode detected an error: {0} (response: {1})", safeModeResult.ErrorMessage, safeModeResponse.ToJson()); throw new MongoSafeModeException(errorMessage, safeModeResult); } if (safeModeResult.HasLastErrorMessage) { var errorMessage = string.Format("Safemode detected an error: {0} (response: {1})", safeModeResult.LastErrorMessage, safeModeResponse.ToJson()); throw new MongoSafeModeException(errorMessage, safeModeResult); } } return(safeModeResult); } }
/// The copydb command copies a database from a remote host to the current host. /// /// MongoDb copydb Command syntax: /// /// { copydb: 1: /// fromhost: <hostname>, /// fromdb: <db>, /// todb: <db>, /// slaveOk: <bool>, [optional] /// username: <username>, [optional] /// nonce: <nonce>, [optional] /// key: <key> } [optional] public bool DatabaseBackup() { var fromdb = _options.Get("fromdb", ""); if (String.IsNullOrEmpty(fromdb)) { throw new ArgumentNullException("fromdb"); } var fromHostLogin = _options.Get("fromHostLogin", ""); if (String.IsNullOrEmpty(fromHostLogin)) { throw new ArgumentNullException("fromHostLogin"); } var fromHostPassword = _options.Get("fromHostPassword", ""); if (String.IsNullOrEmpty(fromHostPassword)) { throw new ArgumentNullException("fromHostPassword"); } var fromHostAddress = _options.Get("fromHostAddress", ""); if (String.IsNullOrEmpty(fromHostAddress)) { throw new ArgumentNullException("fromHostAddress"); } var todb = _options.Get("todb", ""); if (String.IsNullOrEmpty(todb)) { throw new ArgumentNullException("todb"); } var toHostLogin = _options.Get("toHostLogin", ""); if (String.IsNullOrEmpty(toHostLogin)) { throw new ArgumentNullException("toHostLogin"); } var toHostPassword = _options.Get("toHostPassword", ""); if (String.IsNullOrEmpty(toHostPassword)) { throw new ArgumentNullException("toHostPassword"); } var toHostAddress = _options.Get("toHostAddress", ""); if (String.IsNullOrEmpty(toHostAddress)) { throw new ArgumentNullException("toHostAddress"); } var errorLocation = _options.Get("errorLocation", ""); if (String.IsNullOrEmpty(errorLocation)) { throw new ArgumentNullException("errorLocation"); } MongoDbContext.Configure(GlobalSettings.Mongo_UserName, GlobalSettings.Mongo_Password, GlobalSettings.Mongo_ServerPath); // check backup database name if (String.IsNullOrWhiteSpace(bkpDataBaseName)) { // prepare backup database name int version = 1; bkpDataBaseName = GlobalSettings.Mongo_Database + "_bkp_" + DateTime.UtcNow.ToString("yyyyMMdd") + "_"; while (MongoDbContext.Server.DatabaseExists(bkpDataBaseName + version)) { version++; } bkpDataBaseName += version; } // sanity check if (bkpDataBaseName == GlobalSettings.Mongo_Database) { return; } // drop older database try { MongoDbContext.Server.DropDatabase(bkpDataBaseName); } catch (Exception ex) { LogManager.GetCurrentClassLogger().Warn("Drop Backup Database", ex); } // execute copy command // http://docs.mongodb.org/manual/reference/command/copydb/ // var command = new CommandDocument(new BsonElement("copydb", 1), //new BsonElement ("fromhost", MongoDbContext.Server.Primary.Address.Host + ':' + MongoDbContext.Server.Primary.Address.Port.ToString ()), new BsonElement("fromdb", GlobalSettings.Mongo_Database), new BsonElement("todb", bkpDataBaseName), new BsonElement("slaveOK", true)); MongoDbContext.Server.GetDatabase("admin").RunCommand(command); }
private bool IsArbiter() { var command = new CommandDocument("isMaster", true); var result = RunCommand(_connection, "admin", command); return result.Response.GetValue("arbiterOnly", false).ToBoolean(); }
public MongoGridFSFileInfo Upload( Stream stream, string remoteFileName ) { using (database.RequestStart()) { chunks.EnsureIndex("files_id", "n"); BsonObjectId files_id = BsonObjectId.GenerateNewId(); var chunkSize = settings.DefaultChunkSize; var buffer = new byte[chunkSize]; var length = 0; for (int n = 0; true; n++) { int bytesRead = stream.Read(buffer, 0, chunkSize); if (bytesRead == 0) { break; } length += bytesRead; byte[] data = buffer; if (bytesRead < chunkSize) { data = new byte[bytesRead]; Buffer.BlockCopy(buffer, 0, data, 0, bytesRead); } var chunk = new BsonDocument { { "_id", BsonObjectId.GenerateNewId() }, { "files_id", files_id }, { "n", n }, { "data", new BsonBinaryData(data) } }; chunks.Insert(chunk, settings.SafeMode); if (bytesRead < chunkSize) { break; } } var md5Command = new CommandDocument { { "filemd5", files_id }, { "root", settings.Root } }; var md5Result = database.RunCommand(md5Command); var md5 = md5Result.Response["md5"].AsString; BsonDocument fileInfo = new BsonDocument { { "_id", files_id }, { "filename", remoteFileName }, { "length", length }, { "chunkSize", chunkSize }, { "uploadDate", DateTime.UtcNow }, { "md5", md5 } }; files.Insert(fileInfo, settings.SafeMode); return FindOneById(files_id); } }
/// <summary> /// Uploads a GridFS file. /// </summary> /// <param name="stream">The source stream.</param> /// <param name="remoteFileName">The remote file name.</param> /// <param name="createOptions">The create options.</param> /// <returns>The file info of the new GridFS file.</returns> public MongoGridFSFileInfo Upload( Stream stream, string remoteFileName, MongoGridFSCreateOptions createOptions) { if (_settings.ReadPreference != ReadPreference.Primary) { var gridFS = GetPrimaryGridFSSettings(); return(gridFS.Upload(stream, remoteFileName, createOptions)); } using (_server.RequestStart(null, ReadPreference.Primary)) { EnsureIndexes(); var database = GetDatabase(ReadPreference.Primary); var chunksCollection = GetChunksCollection(database); var filesCollection = GetFilesCollection(database); var files_id = createOptions.Id ?? ObjectId.GenerateNewId(); var chunkSize = (createOptions.ChunkSize == 0) ? _settings.ChunkSize : createOptions.ChunkSize; var buffer = new byte[chunkSize]; var length = 0L; string md5Client = null; using (var md5Algorithm = _settings.VerifyMD5 ? MD5.Create() : null) { for (var n = 0L; true; n++) { // might have to call Stream.Read several times to get a whole chunk var bytesNeeded = chunkSize; var bytesRead = 0; while (bytesNeeded > 0) { var partialRead = stream.Read(buffer, bytesRead, bytesNeeded); if (partialRead == 0) { break; // EOF may or may not have a partial chunk } bytesNeeded -= partialRead; bytesRead += partialRead; } if (bytesRead == 0) { break; // EOF no partial chunk } length += bytesRead; byte[] data = buffer; if (bytesRead < chunkSize) { data = new byte[bytesRead]; Buffer.BlockCopy(buffer, 0, data, 0, bytesRead); } var chunk = new BsonDocument { { "_id", ObjectId.GenerateNewId() }, { "files_id", files_id }, { "n", (n < int.MaxValue) ? (BsonValue) new BsonInt32((int)n) : new BsonInt64(n) }, { "data", new BsonBinaryData(data) } }; chunksCollection.Insert(chunk, _settings.WriteConcern); if (_settings.VerifyMD5) { md5Algorithm.TransformBlock(data, 0, data.Length, null, 0); } if (bytesRead < chunkSize) { break; // EOF after partial chunk } } if (_settings.VerifyMD5) { md5Algorithm.TransformFinalBlock(new byte[0], 0, 0); md5Client = BsonUtils.ToHexString(md5Algorithm.Hash); } } string md5Server = null; if (_settings.UpdateMD5 || _settings.VerifyMD5) { var md5Command = new CommandDocument { { "filemd5", files_id }, { "root", _settings.Root } }; var md5Result = database.RunCommand(md5Command); md5Server = md5Result.Response["md5"].AsString; } if (_settings.VerifyMD5 && !md5Client.Equals(md5Server, StringComparison.OrdinalIgnoreCase)) { throw new MongoGridFSException("Upload client and server MD5 hashes are not equal."); } var uploadDate = (createOptions.UploadDate == DateTime.MinValue) ? DateTime.UtcNow : createOptions.UploadDate; var aliases = (createOptions.Aliases != null) ? new BsonArray(createOptions.Aliases) : null; BsonDocument fileInfo = new BsonDocument { { "_id", files_id }, { "filename", remoteFileName, !string.IsNullOrEmpty(remoteFileName) }, // optional { "length", length }, { "chunkSize", chunkSize }, { "uploadDate", uploadDate }, { "md5", (md5Server == null) ? (BsonValue)BsonNull.Value : new BsonString(md5Server) }, { "contentType", createOptions.ContentType, !string.IsNullOrEmpty(createOptions.ContentType) }, // optional { "aliases", aliases, aliases != null }, // optional { "metadata", createOptions.Metadata, createOptions.Metadata != null } // optional }; filesCollection.Insert(fileInfo, _settings.WriteConcern); return(FindOneById(files_id)); } }
/// <summary> /// Uploads a GridFS file. /// </summary> /// <param name="stream">The source stream.</param> /// <param name="remoteFileName">The remote file name.</param> /// <param name="createOptions">The create options.</param> /// <returns>The file info of the new GridFS file.</returns> public MongoGridFSFileInfo Upload( Stream stream, string remoteFileName, MongoGridFSCreateOptions createOptions ) { using (database.RequestStart()) { EnsureIndexes(); var files_id = createOptions.Id ?? BsonObjectId.GenerateNewId(); var chunkSize = createOptions.ChunkSize == 0 ? settings.ChunkSize : createOptions.ChunkSize; var buffer = new byte[chunkSize]; var length = 0; for (int n = 0; true; n++) { int bytesRead = stream.Read(buffer, 0, chunkSize); if (bytesRead == 0) { break; } length += bytesRead; byte[] data = buffer; if (bytesRead < chunkSize) { data = new byte[bytesRead]; Buffer.BlockCopy(buffer, 0, data, 0, bytesRead); } var chunk = new BsonDocument { { "_id", BsonObjectId.GenerateNewId() }, { "files_id", files_id }, { "n", n }, { "data", new BsonBinaryData(data) } }; chunks.Insert(chunk, settings.SafeMode); if (bytesRead < chunkSize) { break; } } var md5Command = new CommandDocument { { "filemd5", files_id }, { "root", settings.Root } }; var md5Result = database.RunCommand(md5Command); var md5 = md5Result.Response["md5"].AsString; var uploadDate = createOptions.UploadDate == DateTime.MinValue ? DateTime.UtcNow : createOptions.UploadDate; BsonDocument fileInfo = new BsonDocument { { "_id", files_id }, { "filename", remoteFileName }, { "length", length }, { "chunkSize", chunkSize }, { "uploadDate", uploadDate }, { "md5", md5 }, { "contentType", createOptions.ContentType }, // optional { "aliases", BsonArray.Create((IEnumerable <string>)createOptions.Aliases) }, // optional { "metadata", createOptions.Metadata } // optional }; files.Insert(fileInfo, settings.SafeMode); return(FindOneById(files_id)); } }
public IList <T> Execute <T>(string command, ParamList parameters) where T : class { IList <T> items = new T[] { }; var args = parameters ?? new Dictionary <string, object>(); switch (command) { case "aggregate": { object collection, pipeline; if ((args.TryGetValue("collection", out collection) && collection is string) && (args.TryGetValue("pipeline", out pipeline) && pipeline is BsonArray)) { var result = _context.Session.RunCommand(new CommandDocument { { "aggregate", (string)collection }, { "pipeline", (BsonArray)pipeline } }); items = result.Response["result"].AsBsonArray.Select( v => BsonSerializer.Deserialize <T>(v.AsBsonDocument)).ToArray(); } } break; case "mapReduce": { object collection, map, reduce; if ((args.TryGetValue("collection", out collection) && collection is string) && (args.TryGetValue("map", out map) && (map is string || map is BsonJavaScript)) && (args.TryGetValue("reduce", out reduce) && (reduce is string || reduce is BsonJavaScript))) { var commandDoc = new CommandDocument { { "mapReduce", (string)collection } }; var s1 = map as string; if (s1 != null) { commandDoc.Add("map", new BsonJavaScript(s1)); } else { commandDoc.Add("map", (BsonJavaScript)map); } var s2 = reduce as string; if (s2 != null) { commandDoc.Add("reduce", new BsonJavaScript(s2)); } else { commandDoc.Add("reduce", (BsonJavaScript)reduce); } object query; if (args.TryGetValue("query", out query) && (query is string || query is BsonDocument)) { var s = query as string; if (s != null) { commandDoc.Add("query", BsonDocument.Parse(s)); } else { commandDoc.Add("query", (BsonDocument)query); } } object sort; if (args.TryGetValue("sort", out sort) && (sort is string || sort is BsonDocument)) { var s = sort as string; if (s != null) { commandDoc.Add("sort", BsonDocument.Parse(s)); } else { commandDoc.Add("sort", (BsonDocument)sort); } } object limit; if (args.TryGetValue("limit", out limit) && limit is int) { commandDoc.Add("limit", (int)limit); } object finalize; if (args.TryGetValue("finalize", out finalize) && (finalize is string || finalize is BsonJavaScript)) { var s = finalize as string; if (s != null) { commandDoc.Add("finalize", BsonDocument.Parse(s)); } else { commandDoc.Add("finalize", (BsonDocument)finalize); } } var result = _context.Session.RunCommand(commandDoc); items = result.Response["result"].AsBsonArray.Select(v => BsonSerializer.Deserialize <T>(v.AsBsonDocument)).ToArray(); } } break; } return(items); }
public void Command_help_includes_XML_summary() { var commandDoc = new CommandDocument(typeof(Place)); commandDoc.Summary.Should().Be("Places the order."); }
private List<string> GetServerSupportedMethods() { var command = new CommandDocument { { "saslStart", 1 }, { "mechanism", ""}, // forces a response that contains a list of supported mechanisms... { "payload", new byte[0] } }; var list = new List<string>(); var result = _connection.RunCommand("admin", QueryFlags.SlaveOk, command, false); if (result.Response.Contains("supportedMechanisms")) { list.AddRange(result.Response["supportedMechanisms"].AsBsonArray.Select(x => x.AsString)); } // because MONGO-CR is last in the list, we don't need to check if the server supports it... // in the future, we may need to add a check. list.Add("MONGO-CR"); return list; }
// note: this method will run on a thread from the ThreadPool private void QueryNodeWorkItem( object parameters ) { // this method has to work at a very low level because the connection pool isn't set up yet var args = (QueryNodeParameters) parameters; var response = new QueryNodeResponse { Address = args.Address, EndPoint = args.EndPoint }; try { var connection = new MongoConnection(null, args.EndPoint); // no connection pool try { var isMasterCommand = new CommandDocument("ismaster", 1); var isMasterResult = connection.RunCommand(server, "admin.$cmd", QueryFlags.SlaveOk, isMasterCommand); response.IsMasterResult = isMasterResult; response.Connection = connection; // might become the first connection in the connection pool response.IsPrimary = isMasterResult.Response["ismaster", false].ToBoolean(); response.MaxDocumentSize = isMasterResult.Response["maxBsonObjectSize", server.MaxDocumentSize].ToInt32(); response.MaxMessageLength = Math.Max(MongoDefaults.MaxMessageLength, response.MaxDocumentSize + 1024); // derived from maxDocumentSize if (server.Settings.ReplicaSetName != null) { var getStatusCommand = new CommandDocument("replSetGetStatus", 1); var getStatusResult = connection.RunCommand(server, "admin.$cmd", QueryFlags.SlaveOk, getStatusCommand); var replicaSetName = getStatusResult.Response["set"].AsString; if (replicaSetName != server.Settings.ReplicaSetName) { var message = string.Format("Host {0} belongs to a different replica set: {1}", args.EndPoint, replicaSetName); throw new MongoConnectionException(message); } } } catch { try { connection.Close(); } catch { } // ignore exceptions throw; } } catch (Exception ex) { response.Exception = ex; } args.ResponseQueue.Enqueue(response); }
/// <summary> /// Uploads a GridFS file. /// </summary> /// <param name="stream">The source stream.</param> /// <param name="remoteFileName">The remote file name.</param> /// <param name="createOptions">The create options.</param> /// <returns>The file info of the new GridFS file.</returns> public MongoGridFSFileInfo Upload( Stream stream, string remoteFileName, MongoGridFSCreateOptions createOptions) { using (_server.RequestStart(null, ReadPreference.Primary)) { EnsureIndexes(); var database = GetDatabase(ReadPreference.Primary); var chunksCollection = GetChunksCollection(database); var filesCollection = GetFilesCollection(database); var files_id = createOptions.Id ?? ObjectId.GenerateNewId(); var chunkSize = (createOptions.ChunkSize == 0) ? _settings.ChunkSize : createOptions.ChunkSize; var buffer = new byte[chunkSize]; var length = 0L; string md5Client = null; using (var md5Algorithm = _settings.VerifyMD5 ? MD5.Create() : null) { for (var n = 0L; true; n++) { // might have to call Stream.Read several times to get a whole chunk var bytesNeeded = chunkSize; var bytesRead = 0; while (bytesNeeded > 0) { var partialRead = stream.Read(buffer, bytesRead, bytesNeeded); if (partialRead == 0) { break; // EOF may or may not have a partial chunk } bytesNeeded -= partialRead; bytesRead += partialRead; } if (bytesRead == 0) { break; // EOF no partial chunk } length += bytesRead; byte[] data = buffer; if (bytesRead < chunkSize) { data = new byte[bytesRead]; Buffer.BlockCopy(buffer, 0, data, 0, bytesRead); } var chunk = new BsonDocument { { "_id", ObjectId.GenerateNewId() }, { "files_id", files_id }, { "n", (n < int.MaxValue) ? (BsonValue)new BsonInt32((int)n) : new BsonInt64(n) }, { "data", new BsonBinaryData(data) } }; chunksCollection.Insert(chunk, _settings.WriteConcern); if (_settings.VerifyMD5) { md5Algorithm.TransformBlock(data, 0, data.Length, null, 0); } if (bytesRead < chunkSize) { break; // EOF after partial chunk } } if (_settings.VerifyMD5) { md5Algorithm.TransformFinalBlock(new byte[0], 0, 0); md5Client = BsonUtils.ToHexString(md5Algorithm.Hash); } } string md5Server = null; if (_settings.UpdateMD5 || _settings.VerifyMD5) { var md5Command = new CommandDocument { { "filemd5", files_id }, { "root", _settings.Root } }; var md5Result = database.RunCommand(md5Command); md5Server = md5Result.Response["md5"].AsString; } if ( _settings.VerifyMD5 && !md5Client.Equals(md5Server, StringComparison.OrdinalIgnoreCase)) { throw new MongoGridFSException("Upload client and server MD5 hashes are not equal."); } var uploadDate = (createOptions.UploadDate == DateTime.MinValue) ? DateTime.UtcNow : createOptions.UploadDate; var aliases = (createOptions.Aliases != null) ? new BsonArray(createOptions.Aliases) : null; BsonDocument fileInfo = new BsonDocument { { "_id", files_id }, { "filename", remoteFileName, !string.IsNullOrEmpty(remoteFileName) }, // optional { "length", length }, { "chunkSize", chunkSize }, { "uploadDate", uploadDate }, { "md5", (md5Server == null) ? (BsonValue)BsonNull.Value : new BsonString(md5Server) }, { "contentType", createOptions.ContentType, !string.IsNullOrEmpty(createOptions.ContentType) }, // optional { "aliases", aliases, aliases != null }, // optional { "metadata", createOptions.Metadata, createOptions.Metadata != null } // optional }; filesCollection.Insert(fileInfo, _settings.WriteConcern); return FindOneById(files_id); } }
public MongoExceptionLogCommandResuilt GetTodayLog() { //var s = new DbHelper<MongoExceptionLog>(); //var list = s.Where(x => x.ExceptionTime >= DateTime.Now.Date).GroupBy(x => new { x.Action, x.ExceptionTime.Date }).ToList(); //return list; //创建数据连接 //获取指定数据库 var url = new MongoUrl(conString); var client = new MongoClient(url); var server = client.GetServer(); MongoDatabase db = server.GetDatabase(url.DatabaseName); //var cmd = MongoLogSite.CommonUti.FileUtil.FileToString(System.IO.Path.Combine(rootPath, "exception_statistic.js")); //IMongoQuery //BsonJavaScript.Create(cmd); //IMongoQuery query = Query.Text(cmd); //var s = query.ToJson(); //var s = db.Eval(cmd); // 无权限 var pipeline = new BsonArray { new BsonDocument { { "$match", new BsonDocument { { "ExTime", new BsonDocument { { "$gte", DateTime.Now.Date.AddHours(8)// "ISODate(\"2018-04-17T08:00:00.000+0800\")" } } } } } }, new BsonDocument { { "$group", new BsonDocument { { "_id", new BsonDocument { { "Action", new BsonDocument { { "$ifNull", new BsonArray { new BsonString("$Action"), new BsonString("$ExMsg"), //new BsonDocument //{ // { // "$substr",new BsonArray // { // new BsonString("$ExMsg"), // new BsonInt32(0), // new BsonInt32(40), // } // } //} } } } //"$Action" }, { "Date", new BsonDocument { { "$dateToString", new BsonDocument { { "format", "%Y-%m-%d" }, { "date", "$ExTime" } } } } }, { "Severity", "$Severity" } } }, { "Count", new BsonDocument { { "$sum", 1 } } } } } }, //new BsonDocument //{ // { "$project",new BsonDocument // { // {"Action", "$_id.action" }, // {"Date","$_id.date" }, // {"Count",1 }, // {"id",1 }, // } // } //}, new BsonDocument { { "$sort", new BsonDocument { { "Count", -1 }, } } }, }; var command = new CommandDocument { { "aggregate", "ExceptionLog" }, { "pipeline", pipeline } }; MongoExceptionLogCommandResuilt result = null; var cmdResult = db.RunCommand(command); if (cmdResult.Ok) { if (cmdResult.Response != null) { result = JsonConvert.DeserializeObject <MongoExceptionLogCommandResuilt>(cmdResult.Response.ToJson()); //result = JSON.Instance.} } } return(result); }
/// <summary> /// 初始化 /// </summary> /// <param name="_CommandDocument"></param> /// <param name="_RunLevel"></param> public MongoCommand(CommandDocument _CommandDocument, PathLv _RunLevel) { cmdDocument = _CommandDocument; RunLevel = _RunLevel; CommandString = String.Empty; }
/// <summary> /// Uploads a GridFS file. /// </summary> /// <param name="stream">The source stream.</param> /// <param name="remoteFileName">The remote file name.</param> /// <param name="createOptions">The create options.</param> /// <returns>The file info of the new GridFS file.</returns> public MongoGridFSFileInfo Upload( Stream stream, string remoteFileName, MongoGridFSCreateOptions createOptions ) { using (database.RequestStart()) { EnsureIndexes(); var files_id = createOptions.Id ?? BsonObjectId.GenerateNewId(); var chunkSize = createOptions.ChunkSize == 0 ? settings.ChunkSize : createOptions.ChunkSize; var buffer = new byte[chunkSize]; var length = 0; for (int n = 0; true; n++) { int bytesRead = stream.Read(buffer, 0, chunkSize); if (bytesRead == 0) { break; } length += bytesRead; byte[] data = buffer; if (bytesRead < chunkSize) { data = new byte[bytesRead]; Buffer.BlockCopy(buffer, 0, data, 0, bytesRead); } var chunk = new BsonDocument { { "_id", BsonObjectId.GenerateNewId() }, { "files_id", files_id }, { "n", n }, { "data", new BsonBinaryData(data) } }; chunks.Insert(chunk, settings.SafeMode); if (bytesRead < chunkSize) { break; } } var md5Command = new CommandDocument { { "filemd5", files_id }, { "root", settings.Root } }; var md5Result = database.RunCommand(md5Command); var md5 = md5Result.Response["md5"].AsString; var uploadDate = createOptions.UploadDate == DateTime.MinValue ? DateTime.UtcNow : createOptions.UploadDate; BsonDocument fileInfo = new BsonDocument { { "_id", files_id }, { "filename", remoteFileName }, { "length", length }, { "chunkSize", chunkSize }, { "uploadDate", uploadDate }, { "md5", md5 }, { "contentType", createOptions.ContentType }, // optional { "aliases", BsonArray.Create((IEnumerable<string>) createOptions.Aliases) }, // optional { "metadata", createOptions.Metadata } // optional }; files.Insert(fileInfo, settings.SafeMode); return FindOneById(files_id); } }
/// <summary> /// Uploads a GridFS file. /// </summary> /// <param name="stream">The source stream.</param> /// <param name="remoteFileName">The remote file name.</param> /// <param name="createOptions">The create options.</param> /// <returns>The file info of the new GridFS file.</returns> public MongoGridFSFileInfo Upload( Stream stream, string remoteFileName, MongoGridFSCreateOptions createOptions ) { using (database.RequestStart(false)) { // not slaveOk EnsureIndexes(); var files_id = createOptions.Id ?? BsonObjectId.GenerateNewId(); var chunkSize = createOptions.ChunkSize == 0 ? settings.ChunkSize : createOptions.ChunkSize; var buffer = new byte[chunkSize]; var length = 0; string md5Client; using (var md5Algorithm = MD5.Create()) { for (int n = 0; true; n++) { // might have to call Stream.Read several times to get a whole chunk var bytesNeeded = chunkSize; var bytesRead = 0; while (bytesNeeded > 0) { var partialRead = stream.Read(buffer, bytesRead, bytesNeeded); if (partialRead == 0) { break; // EOF may or may not have a partial chunk } bytesNeeded -= partialRead; bytesRead += partialRead; } if (bytesRead == 0) { break; // EOF no partial chunk } length += bytesRead; byte[] data = buffer; if (bytesRead < chunkSize) { data = new byte[bytesRead]; Buffer.BlockCopy(buffer, 0, data, 0, bytesRead); } var chunk = new BsonDocument { { "_id", BsonObjectId.GenerateNewId() }, { "files_id", files_id }, { "n", n }, { "data", new BsonBinaryData(data) } }; chunks.Insert(chunk, settings.SafeMode); md5Algorithm.TransformBlock(data, 0, data.Length, null, 0); if (bytesRead < chunkSize) { break; // EOF after partial chunk } } md5Algorithm.TransformFinalBlock(new byte[0], 0, 0); md5Client = BsonUtils.ToHexString(md5Algorithm.Hash); } var md5Command = new CommandDocument { { "filemd5", files_id }, { "root", settings.Root } }; var md5Result = database.RunCommand(md5Command); var md5Server = md5Result.Response["md5"].AsString; if (!md5Client.Equals(md5Server, StringComparison.OrdinalIgnoreCase)) { throw new MongoGridFSException("Upload client and server MD5 hashes are not equal."); } var uploadDate = createOptions.UploadDate == DateTime.MinValue ? DateTime.UtcNow : createOptions.UploadDate; BsonDocument fileInfo = new BsonDocument { { "_id", files_id }, { "filename", remoteFileName }, { "length", length }, { "chunkSize", chunkSize }, { "uploadDate", uploadDate }, { "md5", md5Server }, { "contentType", createOptions.ContentType }, // optional { "aliases", BsonArray.Create(createOptions.Aliases) }, // optional { "metadata", createOptions.Metadata } // optional }; files.Insert(fileInfo, settings.SafeMode); return FindOneById(files_id); } }
public void SetupCollection() { var uri = new MongoUrl(ConnectionString); var client = new MongoClient(uri); MongoDatabase db = client.GetServer().GetDatabase(uri.DatabaseName); Int64 cappedSize; if (!Int64.TryParse(CappedSizeInMb, out cappedSize)) { cappedSize = 5 * 1024L; } if (!db.CollectionExists(CollectionName)) { CollectionOptionsBuilder options = CollectionOptions .SetCapped(true) .SetMaxSize(1024L * 1024L * cappedSize); //5 gb. db.CreateCollection(CollectionName, options); } LogCollection = db.GetCollection(CollectionName); var builder = new IndexOptionsBuilder(); const string ttlIndex = FieldNames.Timestamp + "_-1"; var index = LogCollection.GetIndexes().SingleOrDefault(x => x.Name == ttlIndex); if (index != null) { if (ExpireAfter != null) { if (index.TimeToLive != ExpireAfter.ToTimeSpan()) { var d = new CommandDocument() { { "collMod", CollectionName }, { "index", new BsonDocument { { "keyPattern", new BsonDocument { { FieldNames.Timestamp, -1 } } }, { "expireAfterSeconds", (int)(ExpireAfter.ToTimeSpan().TotalSeconds) } } } }; db.RunCommand(d); } } } else { if (ExpireAfter != null) { builder.SetTimeToLive(ExpireAfter.ToTimeSpan()); } LogCollection.CreateIndex(IndexKeys.Descending(FieldNames.Timestamp), builder); } LogCollection.CreateIndex(IndexKeys .Ascending(FieldNames.Level, FieldNames.Thread, FieldNames.Loggername) ); }