// Use this for initialization void Start() { // Initialize N-Gram predictor = useHierarchicalNGram ? new HierarchNGram<char>(nValue, threshold) as INGram<char> : new NGram<char>(nValue) as INGram<char>; // Initialize list of chars listOfChars = new List<char>(); // Initialize right and wrong predictions right = 0; wrong = 0; // Initialize prediction to none prediction = default(char); }
public static INGram GetNGram(this INGram ingram) { switch (ingram.GetType().ToString().ToLower()) { case "unigram": return(new Unigram()); case "filteredunigram": return(new FilteredUnigram()); case "bigram": return(new Bigram(new Unigram())); case "filteredbigram": return(new Bigram(new FilteredUnigram())); default: return(null); } }
public Classifier(FileInfo trainedNetwork, Characteristic characteristic, List <VocabularyWord> vocabulary, INGram inGram) { if (characteristic == null || characteristic.Name.Equals("") || characteristic.PossibleValues == null || characteristic.PossibleValues.Count == 0 || vocabulary == null || vocabulary.Count == 0 || inGram == null) { throw new ArgumentException(); } this._characteristic = characteristic; this._vocabulary = vocabulary; _inputLayerSize = vocabulary.Count; _outputLayerSize = characteristic.PossibleValues.Count; this._inGram = inGram; if (trainedNetwork == null) { _network = CreateNeuralNetwork(); } else { try { _network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(trainedNetwork); } catch (PersistError) { throw new ArgumentException(); } } }
public Classifier(Characteristic characteristic, List <VocabularyWord> vocabulary, INGram inGram) : this(null, characteristic, vocabulary, inGram) { }
public VocabularyBuilder(INGram inGram) { _inGram = inGram ?? throw new ArgumentNullException(); }
public Bigram(INGram nGram) { _nGram = nGram ?? throw new ArgumentNullException(); }