/* * public OffensivenessModelRequest(NonsenseModelResponse nonsenseModelResponse) * { * this.msg_id = nonsenseModelResponse.msg_id; * this.question = nonsenseModelResponse.question; * this.question_id = nonsenseModelResponse.question_id; * this.action = "ESTIMATE_OFFENSIVENESS".ToLower(); * } */ //NLP Answer Nonsense -> NLP answer Offense model public OffensivenessModelRequest(NewAnswerNonsenseCheck newAnswerNonsenseCheck) { this.question = newAnswerNonsenseCheck.answer; this.question_id = newAnswerNonsenseCheck.question_id; this.msg_id = ServerUtilities.getAndGenerateMsgIDForGivenAnswerOffensive(newAnswerNonsenseCheck.user_id, newAnswerNonsenseCheck.answer, newAnswerNonsenseCheck.question_id); this.action = "ESTIMATE_OFFENSIVENESS".ToLower(); }
private void SendAnswerOffenseRequest(NewAnswerNonsenseCheck newAnswerNonsenseCheck) { var temp = new OffensivenessModelRequest(newAnswerNonsenseCheck); if (connections.ContainsKey("NLP") && connections["NLP"] != null && connections["NLP"].State == WebSocketState.Open && temp != null) { String json = JsonSerializer.Serialize(temp); connections["NLP"].SendAsync(new ArraySegment <byte>(usedEncoding.GetBytes(json), 0, json.Length), WebSocketMessageType.Text, true, CancellationToken.None); } else { taskQueue.Enqueue(new NLPTask <NewAnswerNonsenseCheck>(newAnswerNonsenseCheck, SendAnswerOffenseRequest)); } }
/// <summary> /// This method does all the things necessary to handle a nonsense answer given by a certain user /// </summary> /// <param name="newAnswerNonsenseCheck"></param> public static void ProcessNonsenseAnswer(NewAnswerNonsenseCheck newAnswerNonsenseCheck) { ProcessOffensiveAnswer(new NewAnswerOffenseCheck(newAnswerNonsenseCheck)); // Just adds answer to bad_answer table }