public void Build() { this.featureIndex = new int[this.features.Count]; for(int i=0;(i< this.features.Count);i++) { for (int j = 0; (j < Extractor.ArffAttributeLabels.Length); j++) if (((string)this.features[i]).Equals(Extractor.ArffAttributeLabels[j])) { this.featureIndex[i] = j; break; } } instances = new Instances(new StreamReader(this.filename)); instances.Class = instances.attribute(this.features.Count); classifier = new J48(); classifier.buildClassifier(instances); //setup the feature vector //fvWekaAttributes = new FastVector(this.features.Count + 1); //for (i = 0; (i < this.features.Count); i++) // fvWekaAttributes.addElement(new weka.core.Attribute(this.features[i])); //Setup the class attribute //FastVector fvClassVal = new FastVector(); //for (i = 0; (i < this.classes.Count); i++) // fvClassVal.addElement(this.classes[i]); //weka.core.Attribute ClassAttribute = new weka.core.Attribute("activity", fvClassVal); }
/* Use when the player logs in to initially create the classifier with data from server */ public void InitializeClassifier(String dataString) { try { java.io.StringReader stringReader = new java.io.StringReader(dataString); java.io.BufferedReader buffReader = new java.io.BufferedReader(stringReader); playerData = new weka.core.Instances(buffReader); /* State where in each Instance the class attribute is, if its not already specified by the file */ if (playerData.classIndex() == -1) playerData.setClassIndex(playerData.numAttributes() - 1); /* NAIVE BAYES */ //classifier = new weka.classifiers.bayes.NaiveBayes(); /* NEURAL NET */ //classifier = new weka.classifiers.functions.MultilayerPerceptron(); //((weka.classifiers.functions.MultilayerPerceptron)classifier).setHiddenLayers("12"); /* J48 TREE */ //classifier = new weka.classifiers.trees.J48(); /* IB1 NEAREST NEIGHBOUR */ //classifier = new weka.classifiers.lazy.IB1(); /* RANDOM FOREST */ classifier = new weka.classifiers.trees.RandomForest(); classifier.buildClassifier(playerData); Debug.Log("Initialized Classifier"); } catch (java.lang.Exception ex) { Debug.LogError(ex.getMessage()); } }
/// <summary> Evaluates a classifier with the options given in an array of /// strings. <p/> /// /// Valid options are: <p/> /// /// -t name of training file <br/> /// Name of the file with the training data. (required) <p/> /// /// -T name of test file <br/> /// Name of the file with the test data. If missing a cross-validation /// is performed. <p/> /// /// -c class index <br/> /// Index of the class attribute (1, 2, ...; default: last). <p/> /// /// -x number of folds <br/> /// The number of folds for the cross-validation (default: 10). <p/> /// /// -s random number seed <br/> /// Random number seed for the cross-validation (default: 1). <p/> /// /// -m file with cost matrix <br/> /// The name of a file containing a cost matrix. <p/> /// /// -l name of model input file <br/> /// Loads classifier from the given file. <p/> /// /// -d name of model output file <br/> /// Saves classifier built from the training data into the given file. <p/> /// /// -v <br/> /// Outputs no statistics for the training data. <p/> /// /// -o <br/> /// Outputs statistics only, not the classifier. <p/> /// /// -i <br/> /// Outputs detailed information-retrieval statistics per class. <p/> /// /// -k <br/> /// Outputs information-theoretic statistics. <p/> /// /// -p <br/> /// Outputs predictions for test instances (and nothing else). <p/> /// /// -r <br/> /// Outputs cumulative margin distribution (and nothing else). <p/> /// /// -g <br/> /// Only for classifiers that implement "Graphable." Outputs /// the graph representation of the classifier (and nothing /// else). <p/> /// /// </summary> /// <param name="classifier">machine learning classifier /// </param> /// <param name="options">the array of string containing the options /// </param> /// <throws> Exception if model could not be evaluated successfully </throws> /// <returns> a string describing the results /// </returns> public static System.String evaluateModel(Classifier classifier, System.String[] options) { Instances train = null, tempTrain, test = null, template = null; int seed = 1, folds = 10, classIndex = - 1; System.String trainFileName, testFileName, sourceClass, classIndexString, seedString, foldsString, objectInputFileName, objectOutputFileName, attributeRangeString; bool noOutput = false, printClassifications = false, trainStatistics = true, printMargins = false, printComplexityStatistics = false, printGraph = false, classStatistics = false, printSource = false; System.Text.StringBuilder text = new System.Text.StringBuilder(); System.IO.StreamReader trainReader = null, testReader = null; //UPGRADE_TODO: Class 'java.io.ObjectInputStream' was converted to 'System.IO.BinaryReader' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioObjectInputStream'" System.IO.BinaryReader objectInputStream = null; System.IO.Stream objectStream=null; CostMatrix costMatrix = null; System.Text.StringBuilder schemeOptionsText = null; Range attributesToOutput = null; long trainTimeStart = 0, trainTimeElapsed = 0, testTimeStart = 0, testTimeElapsed = 0; Classifier classifierBackup; try { // Get basic options (options the same for all schemes) classIndexString = Utils.getOption('c', options); if (classIndexString.Length != 0) { if (classIndexString.Equals("first")) classIndex = 1; else if (classIndexString.Equals("last")) classIndex = - 1; else classIndex = System.Int32.Parse(classIndexString); } trainFileName = Utils.getOption('t', options); objectInputFileName = Utils.getOption('l', options); objectOutputFileName = Utils.getOption('d', options); testFileName = Utils.getOption('T', options); if (trainFileName.Length == 0) { if (objectInputFileName.Length == 0) { throw new System.Exception("No training file and no object " + "input file given."); } if (testFileName.Length == 0) { throw new System.Exception("No training file and no test " + "file given."); } } else if ((objectInputFileName.Length != 0) && ((!(classifier is UpdateableClassifier)) || (testFileName.Length == 0))) { throw new System.Exception("Classifier not incremental, or no " + "test file provided: can't " + "use both train and model file."); } try { if (trainFileName.Length != 0) { //UPGRADE_TODO: The differences in the expected value of parameters for constructor 'java.io.BufferedReader.BufferedReader' may cause compilation errors. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1092'" //UPGRADE_WARNING: At least one expression was used more than once in the target code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1181'" //UPGRADE_TODO: Constructor 'java.io.FileReader.FileReader' was converted to 'System.IO.StreamReader' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073'" trainReader = new System.IO.StreamReader(new System.IO.StreamReader(trainFileName, System.Text.Encoding.Default).BaseStream, new System.IO.StreamReader(trainFileName, System.Text.Encoding.Default).CurrentEncoding); } if (testFileName.Length != 0) { //UPGRADE_TODO: The differences in the expected value of parameters for constructor 'java.io.BufferedReader.BufferedReader' may cause compilation errors. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1092'" //UPGRADE_WARNING: At least one expression was used more than once in the target code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1181'" //UPGRADE_TODO: Constructor 'java.io.FileReader.FileReader' was converted to 'System.IO.StreamReader' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073'" testReader = new System.IO.StreamReader(new System.IO.StreamReader(testFileName, System.Text.Encoding.Default).BaseStream, new System.IO.StreamReader(testFileName, System.Text.Encoding.Default).CurrentEncoding); } if (objectInputFileName.Length != 0) { //UPGRADE_TODO: Constructor 'java.io.FileInputStream.FileInputStream' was converted to 'System.IO.FileStream.FileStream' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioFileInputStreamFileInputStream_javalangString'" objectStream= new System.IO.FileStream(objectInputFileName, System.IO.FileMode.Open, System.IO.FileAccess.Read); if (objectInputFileName.EndsWith(".gz")) { //UPGRADE_ISSUE: Constructor 'java.util.zip.GZIPInputStream.GZIPInputStream' was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1000_javautilzipGZIPInputStream'" objectStream= new ICSharpCode.SharpZipLib.GZip.GZipInputStream(objectStream); } //UPGRADE_TODO: Class 'java.io.ObjectInputStream' was converted to 'System.IO.BinaryReader' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioObjectInputStream'" objectInputStream = new System.IO.BinaryReader(objectStream); } } catch (System.Exception e) { //UPGRADE_TODO: The equivalent in .NET for method 'java.lang.Throwable.getMessage' may return a different value. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1043'" throw new System.Exception("Can't open file " + e.Message + '.'); } if (testFileName.Length != 0) { template = test = new Instances(testReader, 1); if (classIndex != - 1) { test.ClassIndex = classIndex - 1; } else { test.ClassIndex = test.numAttributes() - 1; } if (classIndex > test.numAttributes()) { throw new System.Exception("Index of class attribute too large."); } } if (trainFileName.Length != 0) { if ((classifier is UpdateableClassifier) && (testFileName.Length != 0)) { train = new Instances(trainReader, 1); } else { train = new Instances(trainReader); } template = train; if (classIndex != - 1) { train.ClassIndex = classIndex - 1; } else { train.ClassIndex = train.numAttributes() - 1; } if ((testFileName.Length != 0) && !test.equalHeaders(train)) { throw new System.ArgumentException("Train and test file not compatible!"); } if (classIndex > train.numAttributes()) { throw new System.Exception("Index of class attribute too large."); } } if (template == null) { throw new System.Exception("No actual dataset provided to use as template"); } seedString = Utils.getOption('s', options); if (seedString.Length != 0) { seed = System.Int32.Parse(seedString); } foldsString = Utils.getOption('x', options); if (foldsString.Length != 0) { folds = System.Int32.Parse(foldsString); } costMatrix = handleCostOption(Utils.getOption('m', options), template.numClasses()); classStatistics = Utils.getFlag('i', options); noOutput = Utils.getFlag('o', options); trainStatistics = !Utils.getFlag('v', options); printComplexityStatistics = Utils.getFlag('k', options); printMargins = Utils.getFlag('r', options); printGraph = Utils.getFlag('g', options); sourceClass = Utils.getOption('z', options); printSource = (sourceClass.Length != 0); // Check -p option try { attributeRangeString = Utils.getOption('p', options); } catch (System.Exception e) { //UPGRADE_TODO: The equivalent in .NET for method 'java.lang.Throwable.getMessage' may return a different value. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1043'" throw new System.Exception(e.Message + "\nNOTE: the -p option has changed. " + "It now expects a parameter specifying a range of attributes " + "to list with the predictions. Use '-p 0' for none."); } if (attributeRangeString.Length != 0) { // if no test file given, we cannot print predictions if (testFileName.Length == 0) throw new System.Exception("Cannot print predictions ('-p') without test file ('-T')!"); printClassifications = true; if (!attributeRangeString.Equals("0")) attributesToOutput = new Range(attributeRangeString); } // if no training file given, we don't have any priors if ((trainFileName.Length == 0) && (printComplexityStatistics)) throw new System.Exception("Cannot print complexity statistics ('-k') without training file ('-t')!"); // If a model file is given, we can't process // scheme-specific options if (objectInputFileName.Length != 0) { Utils.checkForRemainingOptions(options); } else { // Set options for classifier // if (classifier instanceof OptionHandler) // { // for (int i = 0; i < options.length; i++) // { // if (options[i].length() != 0) // { // if (schemeOptionsText == null) // { // schemeOptionsText = new StringBuffer(); // } // if (options[i].indexOf(' ') != -1) // { // schemeOptionsText.append('"' + options[i] + "\" "); // } // else // { // schemeOptionsText.append(options[i] + " "); // } // } // } // ((OptionHandler)classifier).setOptions(options); // } } Utils.checkForRemainingOptions(options); } catch (System.Exception e) { //UPGRADE_TODO: The equivalent in .NET for method 'java.lang.Throwable.getMessage' may return a different value. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1043'" throw new System.Exception("\nWeka exception: " + e.Message + makeOptionString(classifier)); } // Setup up evaluation objects Evaluation trainingEvaluation = new Evaluation(new Instances(template, 0), costMatrix); Evaluation testingEvaluation = new Evaluation(new Instances(template, 0), costMatrix); if (objectInputFileName.Length != 0) { testingEvaluation.useNoPriors(); // Load classifier from file //UPGRADE_WARNING: Method 'java.io.ObjectInputStream.readObject' was converted to 'SupportClass.Deserialize' which may throw an exception. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1101'" //classifier = (Classifier) SupportClass.Deserialize(objectInputStream); //FileStream fs = new FileStream("DataFile.dat", FileMode.Open); try { BinaryFormatter formatter = new BinaryFormatter(); // Deserialize the hashtable from the file and // assign the reference to the local variable. // addresses = (Hashtable)formatter.Deserialize(fs); classifier = (Classifier)formatter.Deserialize(objectStream); } catch (Exception e) { Console.WriteLine("Failed to deserialize. Reason: " + e.Message); throw; } finally { objectStream.Close(); //fs.Close(); } objectInputStream.Close(); } // backup of fully setup classifier for cross-validation classifierBackup = Classifier.makeCopy(classifier); // Build the classifier if no object file provided if ((classifier is UpdateableClassifier) && (testFileName.Length != 0) && (costMatrix == null) && (trainFileName.Length != 0)) { // Build classifier incrementally trainingEvaluation.Priors = train; testingEvaluation.Priors = train; trainTimeStart = (System.DateTime.Now.Ticks - 621355968000000000) / 10000; if (objectInputFileName.Length == 0) { classifier.buildClassifier(train); } while (train.readInstance(trainReader)) { trainingEvaluation.updatePriors(train.instance(0)); testingEvaluation.updatePriors(train.instance(0)); ((UpdateableClassifier) classifier).updateClassifier(train.instance(0)); train.delete(0); } trainTimeElapsed = (System.DateTime.Now.Ticks - 621355968000000000) / 10000 - trainTimeStart; trainReader.Close(); } else if (objectInputFileName.Length == 0) { // Build classifier in one go tempTrain = new Instances(train); trainingEvaluation.Priors = tempTrain; testingEvaluation.Priors = tempTrain; trainTimeStart = (System.DateTime.Now.Ticks - 621355968000000000) / 10000; classifier.buildClassifier(tempTrain); trainTimeElapsed = (System.DateTime.Now.Ticks - 621355968000000000) / 10000 - trainTimeStart; } // Save the classifier if an object output file is provided if (objectOutputFileName.Length != 0) { //UPGRADE_TODO: Constructor 'java.io.FileOutputStream.FileOutputStream' was converted to 'System.IO.FileStream.FileStream' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioFileOutputStreamFileOutputStream_javalangString'" System.IO.Stream os = new System.IO.FileStream(objectOutputFileName, System.IO.FileMode.Create); if (objectOutputFileName.EndsWith(".gz")) { //UPGRADE_ISSUE: Constructor 'java.util.zip.GZIPOutputStream.GZIPOutputStream' was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1000_javautilzipGZIPOutputStream'" os = new ICSharpCode.SharpZipLib.GZip.GZipOutputStream(os); } //UPGRADE_TODO: Class 'java.io.ObjectOutputStream' was converted to 'System.IO.BinaryWriter' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioObjectOutputStream'" System.IO.BinaryWriter objectOutputStream = new System.IO.BinaryWriter(os); //UPGRADE_TODO: Method 'java.io.ObjectOutputStream.writeObject' was converted to 'SupportClass.Serialize' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioObjectOutputStreamwriteObject_javalangObject'" //SupportClass.Serialize(objectOutputStream, classifier); BinaryFormatter bformatter = new BinaryFormatter(); bformatter.Serialize(os, classifier); objectOutputStream.Flush(); objectOutputStream.Close(); } // If classifier is drawable output string describing graph if ((classifier is Drawable) && (printGraph)) { return ((Drawable) classifier).graph(); } // Output the classifier as equivalent source if ((classifier is Sourcable) && (printSource)) { return wekaStaticWrapper((Sourcable) classifier, sourceClass); } // Output test instance predictions only if (printClassifications) { return toPrintClassifications(classifier, new Instances(template, 0), testFileName, classIndex, attributesToOutput); } // Output model if (!(noOutput || printMargins)) { // if (classifier instanceof OptionHandler) // { // if (schemeOptionsText != null) // { // text.append("\nOptions: "+schemeOptionsText); // text.append("\n"); // } // } //UPGRADE_TODO: The equivalent in .NET for method 'java.lang.Object.toString' may return a different value. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1043'" text.Append("\n" + classifier.ToString() + "\n"); } if (!printMargins && (costMatrix != null)) { text.Append("\n=== Evaluation Cost Matrix ===\n\n").Append(costMatrix.ToString()); } // Compute error estimate from training data if ((trainStatistics) && (trainFileName.Length != 0)) { if ((classifier is UpdateableClassifier) && (testFileName.Length != 0) && (costMatrix == null)) { // Classifier was trained incrementally, so we have to // reopen the training data in order to test on it. //UPGRADE_TODO: The differences in the expected value of parameters for constructor 'java.io.BufferedReader.BufferedReader' may cause compilation errors. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1092'" //UPGRADE_WARNING: At least one expression was used more than once in the target code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1181'" //UPGRADE_TODO: Constructor 'java.io.FileReader.FileReader' was converted to 'System.IO.StreamReader' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073'" trainReader = new System.IO.StreamReader(new System.IO.StreamReader(trainFileName, System.Text.Encoding.Default).BaseStream, new System.IO.StreamReader(trainFileName, System.Text.Encoding.Default).CurrentEncoding); // Incremental testing train = new Instances(trainReader, 1); if (classIndex != - 1) { train.ClassIndex = classIndex - 1; } else { train.ClassIndex = train.numAttributes() - 1; } testTimeStart = (System.DateTime.Now.Ticks - 621355968000000000) / 10000; while (train.readInstance(trainReader)) { trainingEvaluation.evaluateModelOnce((Classifier) classifier, train.instance(0)); train.delete(0); } testTimeElapsed = (System.DateTime.Now.Ticks - 621355968000000000) / 10000 - testTimeStart; trainReader.Close(); } else { testTimeStart = (System.DateTime.Now.Ticks - 621355968000000000) / 10000; trainingEvaluation.evaluateModel(classifier, train); testTimeElapsed = (System.DateTime.Now.Ticks - 621355968000000000) / 10000 - testTimeStart; } // Print the results of the training evaluation if (printMargins) { return trainingEvaluation.toCumulativeMarginDistributionString(); } else { text.Append("\nTime taken to build model: " + Utils.doubleToString(trainTimeElapsed / 1000.0, 2) + " seconds"); text.Append("\nTime taken to test model on training data: " + Utils.doubleToString(testTimeElapsed / 1000.0, 2) + " seconds"); text.Append(trainingEvaluation.toSummaryString("\n\n=== Error on training" + " data ===\n", printComplexityStatistics)); if (template.classAttribute().Nominal) { if (classStatistics) { text.Append("\n\n" + trainingEvaluation.toClassDetailsString()); } text.Append("\n\n" + trainingEvaluation.toMatrixString()); } } } // Compute proper error estimates if (testFileName.Length != 0) { // Testing is on the supplied test data while (test.readInstance(testReader)) { testingEvaluation.evaluateModelOnce((Classifier) classifier, test.instance(0)); test.delete(0); } testReader.Close(); text.Append("\n\n" + testingEvaluation.toSummaryString("=== Error on test data ===\n", printComplexityStatistics)); } else if (trainFileName.Length != 0) { // Testing is via cross-validation on training data //UPGRADE_TODO: The differences in the expected value of parameters for constructor 'java.util.Random.Random' may cause compilation errors. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1092'" System.Random random = new System.Random((System.Int32) seed); // use untrained (!) classifier for cross-validation classifier = Classifier.makeCopy(classifierBackup); testingEvaluation.crossValidateModel(classifier, train, folds, random); if (template.classAttribute().Numeric) { text.Append("\n\n\n" + testingEvaluation.toSummaryString("=== Cross-validation ===\n", printComplexityStatistics)); } else { text.Append("\n\n\n" + testingEvaluation.toSummaryString("=== Stratified " + "cross-validation ===\n", printComplexityStatistics)); } } if (template.classAttribute().Nominal) { if (classStatistics) { text.Append("\n\n" + testingEvaluation.toClassDetailsString()); } text.Append("\n\n" + testingEvaluation.toMatrixString()); } return text.ToString(); }
public async Task <string> classifyTest(weka.classifiers.Classifier cl) { string a = ""; double rate = 0; try { //instsTest = Instances.mergeInstances(ins,null); /*if (ins.classIndex() == -1) * ins.setClassIndex(insts.numAttributes() - 1);*/ System.Console.WriteLine("Performing " + percentSplit + "% split evaluation."); weka.filters.Filter normalized = new weka.filters.unsupervised.attribute.Normalize(); normalized.setInputFormat(insts); insts = weka.filters.Filter.useFilter(insts, normalized); //randomize the order of the instances in the dataset. weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize(); myRandom.setInputFormat(insts); insts = weka.filters.Filter.useFilter(insts, myRandom); //replace missing values weka.filters.Filter replaceMissingValues = new weka.filters.unsupervised.attribute.ReplaceMissingValues(); replaceMissingValues.setInputFormat(insts); insts = weka.filters.Filter.useFilter(insts, replaceMissingValues); int trainSize = insts.numInstances() * percentSplit / 100; int testSize = insts.numInstances() - trainSize; weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize); cl.buildClassifier(train); //double label = cl.classifyInstance(instsTest.instance(0)); double label = cl.classifyInstance(ins); ins.setClassValue(label); //instsTest.instance(0).setClassValue(label); a = ins.toString(ins.numAttributes() - 1); weka.core.SerializationHelper.write("mymodel.model", cl); int numCorrect = 0; for (int i = trainSize; i < insts.numInstances(); i++) { weka.core.Instance currentInst = insts.instance(i); double predictedClass = cl.classifyInstance(currentInst); if (predictedClass == insts.instance(i).classValue()) { numCorrect++; } } rate = (double)((double)numCorrect / (double)testSize * 100.0); } catch (java.lang.Exception ex) { //ex.printStackTrace(); rate = -1; } return(rate.ToString() + ";" + a ?? ""); }
public static void GenerarModelo() { // Fuente de datos DataSource source = new DataSource(DirectorioDatosPrueba()); // convertimos a instancias Instances data = source.getDataSet(); // generamos una copia Instances dataCopy = new Instances(data); // hay qeu decirle cual es el atributo de clase if (data.classIndex() == -1) { data.setClassIndex(data.numAttributes() - 1); } #region Discretizacion //// DISCRETIZACION //bool bHasNonNominal = true; //bool bHasMissingValues = true; //weka.filters.unsupervised.attribute.Discretize unSupervised = new weka.filters.unsupervised.attribute.Discretize(); //weka.filters.supervised.attribute.Discretize supervised = new weka.filters.supervised.attribute.Discretize(); ////Instances dataCopy = new Instances(data); //// Quitamos las missing classes de las instancias ////data.deleteWithMissingClass(); //// ////data = NormalizeDataset(data); //// si no hay missing values //if (bHasNonNominal) //{ // // Para los que son nominales y no son missing values // weka.filters.supervised.attribute.Discretize supervised2 = new weka.filters.supervised.attribute.Discretize(); // bool k = supervised2.getUseKononenko(); // supervised2.setUseKononenko(true); // k = supervised2.getUseKononenko(); // k= supervised2.setInputFormat(data); // data = weka.filters.Filter.useFilter(data, supervised2); //} //// si hay missing valyes //if (bHasMissingValues) //{ // //System.err.println("Warning: filling in missing values in data set"); // weka.filters.unsupervised.attribute.ReplaceMissingValues m_MissingValuesFilter = new weka.filters.unsupervised.attribute.ReplaceMissingValues(); // m_MissingValuesFilter.setInputFormat(data); // data = weka.filters.Filter.useFilter(data, m_MissingValuesFilter); //} //// EQUAL FREQUENCIES //// creamos el objeto para discretizar //weka.filters.unsupervised.attribute.Discretize discretizer = new weka.filters.unsupervised.attribute.Discretize(); //// Le decimos que use el metodo equal frequecies //discretizer.setUseEqualFrequency(true); //// ponemos el numero de bins que nos salga de las narices //discretizer.setBins(3); //// Este metodo es de la clase PotentialClassIgnorer //discretizer.setIgnoreClass(true);//Set the IgnoreClass value. Set this to true if the class index is to be unset before the filter is applied. //// Sets the format of the input instances. //bool val = discretizer.setInputFormat(data); //// Aplicamos la discretizacion //data = Filter.useFilter(data, discretizer); //// FIND NUM BINS //// creamos el discretizador //weka.filters.unsupervised.attribute.Discretize discretizer2 = new weka.filters.unsupervised.attribute.Discretize(); //// le decimos que use este metodo //discretizer2.setFindNumBins(true); //// esto es opcional //discretizer2.setIgnoreClass(true); //// //discretizer2.setInputFormat(data); //// //data = Filter.useFilter(data, discretizer2); #endregion #region Modelos #region Cadenas de texto // hay qeu decirle cual es el atributo de clase if (data.classIndex() == -1) { data.setClassIndex(data.numAttributes() - 1); } string k2 = "weka.classifiers.bayes.BayesNet -D -Q weka.classifiers.bayes.net.search.local.K2 -- -P 1 -S BAYES -E weka.classifiers.bayes.net.estimate.SimpleEstimator -- -A 0.5"; string HillClimber = "weka.classifiers.bayes.BayesNet -D -Q weka.classifiers.bayes.net.search.local.HillClimber -- -P 1 -S BAYES -E weka.classifiers.bayes.net.estimate.SimpleEstimator -- -A 0.5"; string TAN = "weka.classifiers.bayes.BayesNet -D -Q weka.classifiers.bayes.net.search.local.TAN -- -S BAYES -E weka.classifiers.bayes.net.estimate.SimpleEstimator -- -A 0.5"; string bayesNetworkNaiveBayes = "weka.classifiers.bayes.NaiveBayes"; string j48 = "weka.classifiers.trees.J48 -C 0.25 -M 2"; string rF = "weka.classifiers.trees.RandomForest -P 100 -I 100 -num-slots 1 -K 0 -M 1.0 -V 0.001 -S 1"; string knn = "weka.classifiers.lazy.IBk -K " + 1.ToString() + " -W 0 -A \"weka.core.neighboursearch.LinearNNSearch -A \\\"weka.core.EuclideanDistance -R first-last\\\"\""; string ann = "weka.classifiers.functions.MultilayerPerceptron -L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H a"; string polykernel = "weka.classifiers.functions.SMO -C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -E 1.0 -C 250007\" -calibrator \"weka.classifiers.functions.Logistic -R 1.0E-8 -M -1 -num-decimal-places 4\""; string rbfkernell = "weka.classifiers.functions.SMO -C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.RBFKernel -G 0.01 -C 250007\" -calibrator \"weka.classifiers.functions.Logistic -R 1.0E-8 -M -1 -num-decimal-places 4\""; string normalized = "weka.classifiers.functions.SMO -C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.NormalizedPolyKernel -E 2.0 -C 250007\" -calibrator \"weka.classifiers.functions.Logistic -R 1.0E-8 -M -1 -num-decimal-places 4\""; string puk = "weka.classifiers.functions.SMO -C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.Puk -O 1.0 -S 1.0 -C 250007\" -calibrator \"weka.classifiers.functions.Logistic -R 1.0E-8 -M -1 -num-decimal-places 4\""; #endregion #region Clasificador // Separamos la cadena de texto del clasificador en un array string[] options = Utils.splitOptions("weka.classifiers.functions.SMO -C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -E 1.0 -C 250007\" -calibrator \"weka.classifiers.functions.Logistic -R 1.0E-8 -M -1 -num-decimal-places 4\""); string className = options[0]; options[0] = string.Empty; // Creamos el clasificador weka.classifiers.Classifier scheme = (Classifier)Utils.forName(java.lang.Class.forName("weka.classifiers.Classifier"), className, options); #endregion #region Evaluar #region Percentage split // porcentaje que queremos para predecir int trainingPercent = 80; // sacamos el numero de isntancias que se corresponden a ese procentaje int trainSize = (int)Math.Round(data.numInstances() * ((double)trainingPercent / (double)100)); // sacamos el porentajhe que queremos para probar int testSize = data.numInstances() - trainSize; // aleatoreizamos el dataset para asegurarnos que las instancias no estan ordenadas data.randomize(new java.util.Random(DateTime.Now.Ticks)); // Separamos las instancias en 2 grupos (entrenar con unas y predecri con otras) Instances train = new Instances(data, 0, trainSize); Instances test = new Instances(data, trainSize, testSize); // Generamos el clasificador con el training y lo evaluamos con el test Classifier clsCopyTest = AbstractClassifier.makeCopy(scheme); clsCopyTest.buildClassifier(train); Evaluation evalTest = new Evaluation(data); evalTest.evaluateModel(clsCopyTest, test); Console.WriteLine(); Console.WriteLine("Correct % = " + evalTest.pctCorrect()); Console.WriteLine("Incorrect % = " + evalTest.pctIncorrect()); Console.WriteLine("AUC = " + evalTest.areaUnderROC(1)); Console.WriteLine("kappa = " + evalTest.kappa()); Console.WriteLine("MAE = " + evalTest.meanAbsoluteError()); Console.WriteLine("RMSE = " + evalTest.rootMeanSquaredError()); Console.WriteLine("RAE = " + evalTest.relativeAbsoluteError()); Console.WriteLine("RRSE = " + evalTest.rootRelativeSquaredError()); Console.WriteLine("Precision = " + evalTest.precision(1)); Console.WriteLine("Recall = " + evalTest.recall(1)); Console.WriteLine("fMeasure = " + evalTest.fMeasure(1)); Console.WriteLine("Error Rate = " + evalTest.errorRate()); double[][] conf = evalTest.confusionMatrix(); #endregion #region Cross Validation long seed = DateTime.Now.Ticks; int folds = 15; // randomizamos los datos en funcion de la semilla java.util.Random rand = new java.util.Random(seed); data.randomize(rand); // estratificamso el dataset if (data.classAttribute().isNominal()) { data.stratify(folds); } // crear evaluador Evaluation eval3 = new Evaluation(data); // iteramos en funcion de los diferentes folds con los que querams hacer el CV para ver si mejora con mas folds for (int n = 0; n < folds; n++) { //dividimos aleatroiamente el numero de instancias en train y test en funcion de la validacion Instances train2 = data.trainCV(folds, n); Instances test2 = data.testCV(folds, n); // creamos y evaluamos el clasificador Classifier clsCopy = AbstractClassifier.makeCopy(scheme); clsCopy.buildClassifier(train); eval3.evaluateModel(clsCopy, test); } // output evaluation Console.WriteLine(); //Console.WriteLine(eval3.toMatrixString("=== Confusion matrix for fold " + (n + 1) + "/" + folds + " ===\n")); Console.WriteLine("Correct % = " + eval3.pctCorrect()); Console.WriteLine("Incorrect % = " + eval3.pctIncorrect()); Console.WriteLine("AUC = " + eval3.areaUnderROC(1)); Console.WriteLine("kappa = " + eval3.kappa()); Console.WriteLine("MAE = " + eval3.meanAbsoluteError()); Console.WriteLine("RMSE = " + eval3.rootMeanSquaredError()); Console.WriteLine("RAE = " + eval3.relativeAbsoluteError()); Console.WriteLine("RRSE = " + eval3.rootRelativeSquaredError()); Console.WriteLine("Precision = " + eval3.precision(1)); Console.WriteLine("Recall = " + eval3.recall(1)); Console.WriteLine("fMeasure = " + eval3.fMeasure(1)); Console.WriteLine("Error Rate = " + eval3.errorRate()); // Matriz de confusion Console.WriteLine(eval3.toMatrixString("=== Overall Confusion Matrix ===\n")); #endregion #region Predecir #region Clasificar // Le decimos cual es la clase sobre la que tiene que predecir data.setClassIndex(data.numAttributes() - 1); // Cogemos el numero de clases de ese atributo int numClasses = data.numClasses(); // Pintamos por consola cuales son las clases de un atributo for (int i = 0; i < numClasses; i++) { // Pintamos por consola String classValue = data.classAttribute().value(i); Console.WriteLine("Class Value " + i + " is " + classValue); } //creamos el clasificador scheme.buildClassifier(data); // TODO: aqui deberiamos cargar los nuevos datosque queremos que el modelo clasifique // nosotors lo hacemos con el Dtaset almacenado en la variable data2 que es el mismo que data // Le decimos que prediga sobre la ultima clase //data2.setClassIndex(data2.numAttributes() - 1); // Pintamos la cabecera Console.WriteLine("==================="); Console.WriteLine("Actual Class, Predicted Class"); // Clasificamos cada instancia del dataset data2 for (int i = 0; i < data.numInstances(); i++) { // coge el numero de la clase a la que pertenece esa instancia para un atributo (en el caso de Beta 0-1-2-3) double actualClass = data.instance(i).classValue(); //valor para esa clase en concreto (en el caso de B cogeria el intervalo correspondiente a la clase dentro de los 4 intervalos en los que puede estar) String actual = data.classAttribute().value((int)actualClass); // cogemos la instancia completa dle nuevo dataser Instance newInst = (Instance)data.instance(i).copy(); newInst.setClassMissing(); // TODO: hay que llamar a esto de forma recursiva, llamar a un metodo desde el otro // claificamso la instancia double predNB = scheme.classifyInstance(newInst); double[] distribution = scheme.distributionForInstance(newInst); // cogemos la clase a la que se ha predicho que pretende la instancia String predString = data.classAttribute().value((int)predNB); Console.WriteLine(actual + ", " + predString); } #endregion #region Regresion ////set class index to the last attribute //data.setClassIndex(data.numAttributes() - 1); ////build model //SMOreg smo = new SMOreg(); //smo.buildClassifier(data); ////output model //Console.WriteLine(smo); //// Cargar nuevos datos para predecir ////set class index to the last attribute //data2.setClassIndex(data2.numAttributes() - 1); ////loop through the new dataset and make predictions //Console.WriteLine("==================="); //Console.WriteLine("Actual Class, SMO Predicted"); //for (int i = 0; i < data2.numInstances(); i++) //{ // //get class double value for current instance // double actualValue = data2.instance(i).classValue(); // //get Instance object of current instance // Instance newInst = data2.instance(i); // //call classifyInstance, which returns a double value for the class // double predSMO = smo.classifyInstance(newInst); // Console.WriteLine(actualValue + ", " + predSMO); //} #endregion #endregion #endregion // Lo construimos scheme.buildClassifier(data); // Pintar datos en consola //Console.WriteLine(eval.toSummaryString()); //Console.WriteLine(System.Environment.NewLine); //Console.WriteLine(eval.toClassDetailsString()); #endregion }
public MITesDataCollectionForm(string dataDirectory, string arffFile, bool isHierarchical) { //where data is being stored this.dataDirectory = dataDirectory; //Initialize high resolution unix timer UnixTime.InitializeTime(); //Initialize and start GUI progress thread progressMessage = null; aProgressThread = new Thread(new ThreadStart(ProgressThread)); aProgressThread.Start(); #region Load Configuration files //load the activity and sensor configuration files progressMessage = "Loading XML protocol and sensors ..."; AXML.Reader reader = new AXML.Reader(Constants.MASTER_DIRECTORY, dataDirectory); #if (!PocketPC) if (reader.validate() == false) { throw new Exception("Error Code 0: XML format error - activities.xml does not match activities.xsd!"); } else { #endif this.annotation = reader.parse(); this.annotation.DataDirectory = dataDirectory; SXML.Reader sreader = new SXML.Reader(Constants.MASTER_DIRECTORY, dataDirectory); #if (!PocketPC) if (sreader.validate() == false) { throw new Exception("Error Code 0: XML format error - sensors.xml does not match sensors.xsd!"); } else { #endif this.sensors = sreader.parse(Constants.MAX_CONTROLLERS); progressMessage += " Completed\r\n"; //TODO: remove BT components progressMessage += "Loading configuration file ..."; MITesFeatures.core.conf.ConfigurationReader creader = new MITesFeatures.core.conf.ConfigurationReader(dataDirectory); this.configuration = creader.parse(); progressMessage += " Completed\r\n"; #if (!PocketPC) } } #endif #endregion Load Configuration files #region Initialize External Data Reception Channels //Initialize 1 master decoder this.masterDecoder = new MITesDecoder(); //Initialize the software mode isExtracting = false; isCollectingDetailedData = false; isPlotting = true; isClassifying = true; #region Initialize Feature Extraction this.isExtracting = false; if (this.sensors.TotalReceivers > 0) // if there is at least 1 MIT //Extractor.Initialize(this.mitesDecoders[0], dataDirectory, this.annotation, this.sensors, this.configuration); Extractor.Initialize(this.masterDecoder, dataDirectory, this.annotation, this.sensors, this.configuration); else if (this.sensors.Sensors.Count > 0) // only built in Extractor.Initialize(this.masterDecoder, dataDirectory, this.annotation, this.sensors, this.configuration); #endregion Initialize Feature Extraction labelIndex = new Hashtable(); instances = new Instances(new StreamReader(arffFile)); instances.Class = instances.attribute(Extractor.ArffAttributeLabels.Length); classifier = new J48(); if (!File.Exists("model.xml")) { classifier.buildClassifier(instances); TextWriter tc = new StreamWriter("model.xml"); classifier.toXML(tc); tc.Flush(); tc.Close(); } else classifier.buildClassifier("model.xml", instances); fvWekaAttributes = new FastVector(Extractor.ArffAttributeLabels.Length + 1); for (int i = 0; (i < Extractor.ArffAttributeLabels.Length); i++) fvWekaAttributes.addElement(new weka.core.Attribute(Extractor.ArffAttributeLabels[i])); FastVector fvClassVal = new FastVector(); labelCounters = new int[((AXML.Category)this.annotation.Categories[0]).Labels.Count + 1]; activityLabels = new string[((AXML.Category)this.annotation.Categories[0]).Labels.Count + 1]; for (int i = 0; (i < ((AXML.Category)this.annotation.Categories[0]).Labels.Count); i++) { labelCounters[i] = 0; string label = ""; int j = 0; for (j = 0; (j < this.annotation.Categories.Count - 1); j++) label += ((AXML.Label)((AXML.Category)this.annotation.Categories[j]).Labels[i]).Name.Replace(' ', '_') + "_"; label += ((AXML.Label)((AXML.Category)this.annotation.Categories[j]).Labels[i]).Name.Replace(' ', '_'); activityLabels[i] = label; labelIndex.Add(label, i); fvClassVal.addElement(label); } weka.core.Attribute ClassAttribute = new weka.core.Attribute("activity", fvClassVal); isClassifying = true; this.aMITesActivityCounters = new Hashtable(); if (!((this.sensors.Sensors.Count == 1) && (this.sensors.HasBuiltinSensors))) { //Initialize arrays to store USB and Bluetooth controllers this.mitesControllers = new MITesReceiverController[this.sensors.TotalWiredReceivers]; #if (PocketPC) this.bluetoothControllers = new BluetoothController[this.sensors.TotalBluetoothReceivers]; //this.ts = new Thread[this.sensors.TotalBluetoothReceivers]; #endif //Initialize array to store Bluetooth connection status //this.bluetoothConnectionStatus = new bool[this.sensors.TotalBluetoothReceivers]; //Initialize a decoder for each sensor this.mitesDecoders = new MITesDecoder[this.sensors.TotalReceivers]; #if (PocketPC) #region Bluetooth reception channels initialization //Initialize and search for wockets connections progressMessage += "Initializing Bluetooth receivers ... searching " + this.sensors.TotalBluetoothReceivers + " BT receivers\r\n"; //Try to initialize all Bluetooth receivers 10 times then exit int initializationAttempt = 0; while (initializationAttempt <= 10) { if (InitializeBluetoothReceivers() == false) { initializationAttempt++; if (initializationAttempt == 10) { MessageBox.Show("Exiting: Some Bluetooth receivers in your configuration were not initialized."); Application.Exit(); System.Diagnostics.Process.GetCurrentProcess().Kill(); } else progressMessage += "Failed to initialize all BT connections. Retrying (" + initializationAttempt + ")...\r\n"; } else break; Thread.Sleep(2000); } #endregion Bluetooth reception channels initialization #endif #region USB reception channels initialization if (InitializeUSBReceivers() == false) { MessageBox.Show("Exiting: Some USB receivers in your configuration were not initialized."); #if (PocketPC) Application.Exit(); System.Diagnostics.Process.GetCurrentProcess().Kill(); #else Environment.Exit(0); #endif } #endregion USB reception channels initialization } //} #endregion Initialize External Data Reception Channels #if (PocketPC) #region Initialize Builtin Data Reception Channels if (InitializeBuiltinReceivers() == false) { MessageBox.Show("Exiting: A built in receiver channel was not found."); Application.Exit(); System.Diagnostics.Process.GetCurrentProcess().Kill(); } #endregion Initialize Builtin Data Reception Channels #endif #region Initialize GUI Components //initialize the interface components InitializeComponent(); //Initialize GUI timers progressMessage += "Initializing Timers ..."; InitializeTimers(); progressMessage += " Completed\r\n"; //Initialize different GUI components progressMessage += "Initializing GUI ..."; InitializeInterface(); progressMessage += " Completed\r\n"; this.isPlotting = true; //count the number of accelerometers if (this.sensors.IsHR) this.maxPlots = this.sensors.Sensors.Count - 1; else this.maxPlots = this.sensors.Sensors.Count; SetFormPositions(); if (this.sensors.TotalReceivers > 0) aMITesPlotter = new MITesScalablePlotter(this.panel1, MITesScalablePlotter.DeviceTypes.IPAQ, maxPlots, this.masterDecoder, GetGraphSize(false)); else aMITesPlotter = new MITesScalablePlotter(this.panel1, MITesScalablePlotter.DeviceTypes.IPAQ, maxPlots, this.masterDecoder, GetGraphSize(false)); //Override the resize event #if (PocketPC) this.Resize += new EventHandler(OnResize); #else this.form1.Resize += new EventHandler(OnResizeForm1); this.form1.FormClosing += new FormClosingEventHandler(form_FormClosing); this.form2.Resize += new EventHandler(OnResizeForm2); this.form2.FormClosing += new FormClosingEventHandler(form_FormClosing); this.form3.Resize += new EventHandler(OnResizeForm3); this.form3.FormClosing += new FormClosingEventHandler(form_FormClosing); this.form4.Resize += new EventHandler(OnResizeForm4); this.form4.FormClosing += new FormClosingEventHandler(form_FormClosing); #endif //Initialize the quality interface progressMessage += "Initializing MITes Quality GUI ..."; InitializeQualityInterface(); progressMessage += " Completed\r\n"; //Remove classifier tabs #if (PocketPC) this.tabControl1.TabPages.RemoveAt(4); this.tabControl1.SelectedIndex = 0; #else this.ShowForms(); #endif #endregion Initialize GUI Components #region Initialize Quality Tracking variables InitializeQuality(); #endregion Initialize Quality Tracking variables #region Initialize Logging InitializeLogging(dataDirectory); #endregion Initialize Logging #region Initialize CSV Storage (PC Only) #if (!PocketPC) //create some counters for activity counts averageX = new int[this.sensors.MaximumSensorID + 1]; averageY = new int[this.sensors.MaximumSensorID + 1]; averageZ = new int[this.sensors.MaximumSensorID + 1]; averageRawX = new int[this.sensors.MaximumSensorID + 1]; averageRawY = new int[this.sensors.MaximumSensorID + 1]; averageRawZ = new int[this.sensors.MaximumSensorID + 1]; prevX = new int[this.sensors.MaximumSensorID + 1]; prevY = new int[this.sensors.MaximumSensorID + 1]; prevZ = new int[this.sensors.MaximumSensorID + 1]; acCounters = new int[this.sensors.MaximumSensorID + 1]; activityCountWindowSize = 0; activityCountCSVs = new StreamWriter[this.sensors.MaximumSensorID + 1]; samplingCSVs = new StreamWriter[this.sensors.MaximumSensorID + 1]; averagedRaw = new StreamWriter[this.sensors.MaximumSensorID + 1]; masterCSV = new StreamWriter(dataDirectory + "\\MITesSummaryData.csv"); hrCSV = new StreamWriter(dataDirectory + "\\HeartRate_MITes.csv"); string csv_line1 = "UnixTimeStamp,TimeStamp,X,Y,Z"; string csv_line2 = "UnixTimeStamp,TimeStamp,Sampling"; string hr_csv_header = "UnixTimeStamp,TimeStamp,HR"; string master_csv_header = "UnixTimeStamp,TimeStamp"; foreach (Category category in this.annotation.Categories) master_csv_header += "," + category.Name; foreach (Sensor sensor in this.sensors.Sensors) { int sensor_id = Convert.ToInt32(sensor.ID); string location = sensor.Location.Replace(' ', '-'); if (sensor_id > 0) //exclude HR { activityCountCSVs[sensor_id] = new StreamWriter(dataDirectory + "\\MITes_" + sensor_id.ToString("00") + "_ActivityCount_" + location + ".csv"); activityCountCSVs[sensor_id].WriteLine(csv_line1); averagedRaw[sensor_id] = new StreamWriter(dataDirectory + "\\MITes_" + sensor_id.ToString("00") + "_1s-RawMean_" + location + ".csv"); averagedRaw[sensor_id].WriteLine(csv_line1); samplingCSVs[sensor_id] = new StreamWriter(dataDirectory + "\\MITes_" + sensor_id.ToString("00") + "_SampleRate_" + location + ".csv"); samplingCSVs[sensor_id].WriteLine(csv_line2); master_csv_header += ",MITes" + sensor_id.ToString("00") + "_SR," + "MITes" + sensor_id.ToString("00") + "_AVRaw_X," + "MITes" + sensor_id.ToString("00") + "_AVRaw_Y," + "MITes" + sensor_id.ToString("00") + "_AVRaw_Z," + "MITes" + sensor_id.ToString("00") + "_AC_X," + "MITes" + sensor_id.ToString("00") + "_AC_Y," + "MITes" + sensor_id.ToString("00") + "_AC_Z"; } } master_csv_header += ",HR"; this.masterCSV.WriteLine(master_csv_header); this.hrCSV.WriteLine(hr_csv_header); #endif #endregion Initialize CSV Storage (PC Only) #region Start Collecting Data //if (this.sensors.TotalReceivers > 0) // isStartedReceiver = true; //Start the built in polling thread #if (PocketPC) if (this.sensors.HasBuiltinSensors) { this.pollingThread = new Thread(new ThreadStart(this.pollingData)); this.pollingThread.Priority = ThreadPriority.Lowest; this.pollingThread.Start(); } #endif //Terminate the progress thread progressThreadQuit = true; //Enable all timer functions this.readDataTimer.Enabled = true; this.qualityTimer.Enabled = true; if (this.sensors.IsHR) this.HRTimer.Enabled = true; #endregion Start Collecting Data }