Beispiel #1
0
 public StreamConfigurationVectorEnumerator(StreamConfigurationVector collection)
 {
     collectionRef = collection;
     currentIndex  = -1;
     currentObject = null;
     currentSize   = collectionRef.Count;
 }
Beispiel #2
0
 public StreamConfigurationVector(StreamConfigurationVector other) : this(CNTKLibPINVOKE.new_StreamConfigurationVector__SWIG_1(StreamConfigurationVector.getCPtr(other)), true)
 {
     if (CNTKLibPINVOKE.SWIGPendingException.Pending)
     {
         throw CNTKLibPINVOKE.SWIGPendingException.Retrieve();
     }
 }
Beispiel #3
0
 public void SetRange(int index, StreamConfigurationVector values)
 {
     CNTKLibPINVOKE.StreamConfigurationVector_SetRange(swigCPtr, index, StreamConfigurationVector.getCPtr(values));
     if (CNTKLibPINVOKE.SWIGPendingException.Pending)
     {
         throw CNTKLibPINVOKE.SWIGPendingException.Retrieve();
     }
 }
Beispiel #4
0
        static CNTKDictionary CreateCtfDeserializer(string ctfFilePath, int outputShape)
        {
            var ctfStreamConfigurationVector = new StreamConfigurationVector();

            ctfStreamConfigurationVector.Add(new StreamConfiguration(TargetsName, outputShape, isSparse: false));

            var targetCtfDeserializer = CTFDeserializer(ctfFilePath, ctfStreamConfigurationVector);

            return(targetCtfDeserializer);
        }
Beispiel #5
0
        public static StreamConfigurationVector Repeat(StreamConfiguration value, int count)
        {
            global::System.IntPtr     cPtr = CNTKLibPINVOKE.StreamConfigurationVector_Repeat(StreamConfiguration.getCPtr(value), count);
            StreamConfigurationVector ret  = (cPtr == global::System.IntPtr.Zero) ? null : new StreamConfigurationVector(cPtr, true);

            if (CNTKLibPINVOKE.SWIGPendingException.Pending)
            {
                throw CNTKLibPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Beispiel #6
0
        public StreamConfigurationVector GetRange(int index, int count)
        {
            global::System.IntPtr     cPtr = CNTKLibPINVOKE.StreamConfigurationVector_GetRange(swigCPtr, index, count);
            StreamConfigurationVector ret  = (cPtr == global::System.IntPtr.Zero) ? null : new StreamConfigurationVector(cPtr, true);

            if (CNTKLibPINVOKE.SWIGPendingException.Pending)
            {
                throw CNTKLibPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
        MinibatchSource CreateMinibatchSource(string filePath, string featuresName, string targetsName,
                                              int numberOfClasses, int[] inputShape, bool randomize)
        {
            var inputSize            = inputShape.Aggregate((d1, d2) => d1 * d2);
            var streamConfigurations = new StreamConfigurationVector
            {
                new StreamConfiguration(featuresName, inputSize, isSparse: true),
                new StreamConfiguration(targetsName, numberOfClasses, isSparse: false)
            };

            var deserializer = CNTKLib.CTFDeserializer(filePath, streamConfigurations);

            var minibatchSourceConfig = new MinibatchSourceConfig(new DictionaryVector()
            {
                deserializer
            });

            return(CNTKLib.CreateCompositeMinibatchSource(minibatchSourceConfig));
        }
Beispiel #8
0
 internal static global::System.Runtime.InteropServices.HandleRef getCPtr(StreamConfigurationVector obj)
 {
     return((obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr);
 }
Beispiel #9
0
        public static MinibatchSource TextFormatMinibatchSourceInternal(string dataFilePath, StreamConfigurationVector streamConfigs)
        {
            global::System.IntPtr cPtr = CNTKLibPINVOKE.MinibatchSource_TextFormatMinibatchSourceInternal__SWIG_4(dataFilePath, StreamConfigurationVector.getCPtr(streamConfigs));
            MinibatchSource       ret  = (cPtr == global::System.IntPtr.Zero) ? null : new MinibatchSource(cPtr, true);

            if (CNTKLibPINVOKE.SWIGPendingException.Pending)
            {
                throw CNTKLibPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Beispiel #10
0
        public static MinibatchSource TextFormatMinibatchSourceInternal(string dataFilePath, StreamConfigurationVector streamConfigs, ulong epochSize, bool randomize, ulong randomizationWindow, bool sampleBasedRandomizationWindow)
        {
            global::System.IntPtr cPtr = CNTKLibPINVOKE.MinibatchSource_TextFormatMinibatchSourceInternal__SWIG_0(dataFilePath, StreamConfigurationVector.getCPtr(streamConfigs), epochSize, randomize, randomizationWindow, sampleBasedRandomizationWindow);
            MinibatchSource       ret  = (cPtr == global::System.IntPtr.Zero) ? null : new MinibatchSource(cPtr, true);

            if (CNTKLibPINVOKE.SWIGPendingException.Pending)
            {
                throw CNTKLibPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Beispiel #11
0
        static void Main(string[] args)
        {
            var device = DeviceDescriptor.CPUDevice;


            int  numOutputClasses = 2;    // need to know these from the start
            uint inputDim         = 6000; // also these
            uint batchSize        = 50;   // not sure how to make this increase to 100% of the file, for now

            // full path works "C:\\...
            //string CurrentFolder = Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().Location);
            //string dataPath_test = "Data\\YXFFData6001Test.txt";
            //string dataPath = Path.Combine(CurrentFolder, dataPath_test);

            string dataPath_model = "C:\\Users\\ITRI\\Documents\\Programming\\Csharp\\Learning_CNTK\\Data\\mModelZ1.dnn";
            string dataPath_train = "C:\\Users\\ITRI\\Documents\\Programming\\Csharp\\Learning_CNTK\\Data\\YXFFData6001Train.txt";


            // load saved model
            Function model = Function.Load(dataPath_model, device);

            // the model output needs to be processed still
            // out = C.softmax(z)
            var modelOut = CNTKLib.Softmax(model);

            var feature_fromModel = modelOut.Arguments[0];
            var label_fromModel   = modelOut.Output;

            string featureStreamName = "features";
            string labelsStreamName  = "labels";

            var streamConfig = new StreamConfigurationVector {
                new StreamConfiguration(featureStreamName, inputDim),
                new StreamConfiguration(labelsStreamName, numOutputClasses)
            };

            var deserializerConfig_train = CNTKLib.CTFDeserializer(dataPath_train, streamConfig);


            //StreamConfigurationVector streams = new StreamConfigurationVector
            //{
            //new StreamConfiguration("feature", 100),
            //new StreamConfiguration("label", 10)
            //};
            //var deserializerConfiguration = CNTKLib.CTFDeserializer(ctfFilePath, streams);
            MinibatchSourceConfig MBconfig_train = new MinibatchSourceConfig(new List <CNTKDictionary> {
                deserializerConfig_train
            })
            {
                MaxSweeps = 1000,
                randomizationWindowInChunks  = 0,
                randomizationWindowInSamples = 100000,
            };


            var MBsource_train = CNTK.CNTKLib.CreateCompositeMinibatchSource(MBconfig_train);

            var featureStreamInfo_train = MBsource_train.StreamInfo(featureStreamName);
            var labelStreamInfo_train   = MBsource_train.StreamInfo(labelsStreamName);

            var nextBatch_train = MBsource_train.GetNextMinibatch(batchSize, device);

            var MBdensefeature_train = nextBatch_train[featureStreamInfo_train].data;
            var MBdenseLabel_train   = nextBatch_train[labelStreamInfo_train].data.GetDenseData <float>(label_fromModel);


            //Variable feature = modelOut.Arguments[0];
            //Variable label = Variable.InputVariable(new int[] { numOutputClasses }, DataType.Float);


            //define input and output variable and connecting to the stream configuration
            var feature = Variable.InputVariable(new NDShape(1, inputDim), DataType.Float, featureStreamName);
            var label   = Variable.InputVariable(new NDShape(1, numOutputClasses), DataType.Float, labelsStreamName);

            ////Step 2: define values, and variables
            //Variable x = Variable.InputVariable(new int[] { 1 }, DataType.Float, "input");
            //Variable y = Variable.InputVariable(new int[] { 1 }, DataType.Float, "output");

            ////Step 2: define training data set from table above
            //var xValues = Value.CreateBatch(new NDShape(1, 1), new float[] { 1f, 2f, 3f, 4f, 5f }, device);
            //var yValues = Value.CreateBatch(new NDShape(1, 1), new float[] { 3f, 5f, 7f, 9f, 11f }, device);

            //var features = Value.CreateBatch(NDShape sampleShape, IEnumerable<T> batch, DeviceDescriptor device);
            //Value.CreateBatch(inputDim,  ,device);


            // prepare the training data

            //var featureStreamInfo = minibatchSource_train.StreamInfo(featureStreamName);
            //var labelStreamInfo = minibatchSource_train.StreamInfo(labelsStreamName);

            //var minibatchData = minibatchSource_train.GetNextMinibatch((uint)batchSize, device);


            //input
            Variable inputVar = modelOut.Arguments.Single();

            var inputDataMap = new Dictionary <Variable, Value>();

            inputDataMap.Add(inputVar, MBdensefeature_train);


            //output
            var      outputDataMap = new Dictionary <Variable, Value>();
            Variable outputVar     = modelOut.Output;

            outputDataMap.Add(outputVar, null);


            // evaluate with loaded data
            modelOut.Evaluate(inputDataMap, outputDataMap, device);

            var outputData = outputDataMap[outputVar].GetDenseData <float>(outputVar);

            var actualLabels = outputData.Select((IList <float> l) => l.IndexOf(l.Max())).ToList();
            //var loss = CNTKLib.CrossEntropyWithSoftmax(classifierOutput, labelVariable);
            //var evalError = CNTKLib.ClassificationError(classifierOutput, labelVariable);
            IList <int> expectedLabels = MBdenseLabel_train.Select(l => l.IndexOf(1.0F)).ToList();

            int misMatches = actualLabels.Zip(expectedLabels, (a, b) => a.Equals(b) ? 0 : 1).Sum();

            int labelsLength = actualLabels.Count;

            string correctness(bool comparison)
            {
                if (comparison)
                {
                    return("Correct prediction");
                }
                else
                {
                    return("Incorrect prediction");
                }
            }

            for (int i = 0; i < labelsLength; i++)
            {
                Console.WriteLine($"{i+1}.\tPredicted value:  {actualLabels[i]};\tExpected value:  {expectedLabels[i]};\t{correctness(actualLabels[i] == expectedLabels[i])}.");
            }

            Console.WriteLine($"Validating Model: Total Samples = {batchSize}, Misclassify Count = {misMatches}.");



            Console.Write("Success");
        }