public static bool IsAssign(this TransferFuncType tft) { switch (tft) { case AssignWithCopy: case AssignWithTake: return(true); } return(false); }
protected internal override unsafe void Transfer(void *dest, void *src, TransferFuncType funcType) { // Copy the native data var sz = NativeDataSize; Buffer.MemoryCopy(src, dest, sz, sz); // Transfer the fields foreach (var fld in NativeFields) { fld.SwiftType.Transfer((byte *)dest + fld.Offset, (byte *)src + fld.Offset, funcType); } }
private void INIT(TransferFuncType tFuncType) { input = 0; deltaBack = 0; output = 0; deltaBias = 0; bias = Gaussian.GetRandomGaussian(); this.tFuncType = tFuncType; biasAllowed = true; incomingConnection = new List <int>(); outgoingConnection = new List <int>(); }
public static double Evaluate(TransferFuncType tfuncType, double x) { double output = 0; switch (tfuncType) { case TransferFuncType.NONE: output = None(x); break; case TransferFuncType.SIGMOID: output = Sigmoid(x); break; case TransferFuncType.RATIONALSIGMOID: output = RationalSigmoid(x); break; case TransferFuncType.FASTSIGMOID: output = FastSigmoid(x); break; case TransferFuncType.GAUSSIAN: output = Gaussian(x); break; case TransferFuncType.TANH: output = TANH(x); break; case TransferFuncType.LINEAR: output = Linear(x); break; case TransferFuncType.RECTILINEAR: output = RectiLinear(x); break; case TransferFuncType.SOFTMAX: output = Math.Exp(x); break; default: throw new FormatException("Invalid Input Provided To TransferFunction.Evaluate()"); } return(output); }
protected internal override unsafe void Transfer(void *dest, void *src, TransferFuncType funcType) { // Manage ref counts if (funcType.IsCopy()) { ((CustomViewData *)src)->View.AddRef(); } if (funcType.IsAssign()) { ((CustomViewData *)dest)->View.UnRef(); } // This msut come after the above, since AddRef might modify the GCHandle // that we're copying here.. base.Transfer(dest, src, funcType); }
public Neuron(TransferFuncType tFuncType, Layer previousLayer, ref Dictionary <int, Neuron> neurons, ref int neuronCounter, ref Dictionary <int, Connection> connections, ref int connectionCounter, double learningRate, double weightRescaleFactor) { INIT(tFuncType); if (previousLayer != null) { foreach (int neuronIdx in previousLayer.neuronIdxs) { Connection connection = new Connection(ref connections, ref connectionCounter, learningRate, weightRescaleFactor); connection.srcDest[neuronIdx] = neuronCounter; this.incomingConnection.Add(connection.Idx); neurons[neuronIdx].outgoingConnection.Add(connection.Idx); } } Idx = neuronCounter; neurons[neuronCounter++] = this; }
TransferFunc GetTransferFunc(TransferFuncType type) => type switch {
public Neuron(TransferFuncType tFuncType) { INIT(tFuncType); }
public Layer(Layer previousLayer, object layerData, ref Dictionary <int, Neuron> neurons, ref int neuronCounter, ref Dictionary <int, Connection> connections, ref int connectionCounter, double learningRate, double weightRescaleFactor) { // Form connections based upon layer type if (layerData is LayerData.RELU) { // One-To-One Connections neuronIdxs = new List <int>(); tFuncType = TransferFuncType.RECTILINEAR; foreach (int prevNeuronIdx in previousLayer.neuronIdxs) { Neuron neuron = new Neuron(tFuncType); neuron.biasAllowed = false; Connection connection = new Connection(ref connections, ref connectionCounter, learningRate, weightRescaleFactor, false); connection.weight = 1; neuron.Idx = neuronCounter; connection.srcDest[prevNeuronIdx] = neuron.Idx; neuron.incomingConnection.Add(connection.Idx); neurons[prevNeuronIdx].outgoingConnection.Add(connection.Idx); neurons[neuronCounter++] = neuron; neuronIdxs.Add(neuron.Idx); } } else if (layerData is LayerData.FullyConnected) { // Cross Connections LayerData.FullyConnected currLayerData = (LayerData.FullyConnected)layerData; tFuncType = currLayerData.tFuncType; neuronIdxs = new List <int>(); for (int i = 0; i < currLayerData.cntNeurons; i++) { Neuron neuron = new Neuron(tFuncType, previousLayer, ref neurons, ref neuronCounter, ref connections, ref connectionCounter, learningRate, weightRescaleFactor); neuronIdxs.Add(neuron.Idx); } } else if (layerData is LayerData.Convolutional) { LayerData.Convolutional currLayerData = (LayerData.Convolutional)layerData; tFuncType = TransferFuncType.LINEAR; neuronIdxs = new List <int>(); int dimIn = (int)Math.Sqrt(previousLayer.cntNeurons); // Form connections for each filter foreach (int filter in currLayerData.filters) { int filterStartIdx, filterEndIdx; if (currLayerData.padding) { filterStartIdx = -filter / 2; filterEndIdx = filter / 2; } else { filterStartIdx = 0; filterEndIdx = filter - 1; } Dictionary <int, int> filterConnections = new Dictionary <int, int>(); for (int k1 = filterStartIdx; k1 <= filterEndIdx; k1++) { for (int k2 = filterStartIdx; k2 <= filterEndIdx; k2++) { Connection connection = new Connection(ref connections, ref connectionCounter, learningRate, weightRescaleFactor); int hashIdx = (k1 + filter / 2) * filter + (k2 + filter / 2); connection.weight = 1; connection.updateAllowed = false; filterConnections[hashIdx] = connection.Idx; } } // Zero padding is introduced for area which is not completly overlapped by filter for (int i = 0; i + (currLayerData.padding ? 0 : filter - 1) < dimIn; i += currLayerData.stride) { for (int j = 0; j + (currLayerData.padding ? 0 : filter - 1) < dimIn; j += currLayerData.stride) { Neuron neuron = new Neuron(tFuncType); neuron.Idx = neuronCounter; neuron.biasAllowed = false; neurons[neuronCounter++] = neuron; neuronIdxs.Add(neuron.Idx); for (int k1 = filterStartIdx; k1 <= filterEndIdx; k1++) { for (int k2 = filterStartIdx; k2 <= filterEndIdx; k2++) { int idx = GetIndex(i + k1, j + k2, dimIn, previousLayer); if (idx == -1) { continue; } int hashIdx = (k1 + filter / 2) * filter + (k2 + filter / 2); int cIdx = filterConnections[hashIdx]; connections[cIdx].srcDest[idx] = neuron.Idx; neurons[idx].outgoingConnection.Add(cIdx); neurons[neuron.Idx].incomingConnection.Add(cIdx); } } } } } } else if (layerData is LayerData.MaxPool) { LayerData.MaxPool currLayerData = (LayerData.MaxPool)layerData; this.tFuncType = TransferFuncType.MAXPOOL; neuronIdxs = new List <int>(); int dimIn = (int)Math.Sqrt(previousLayer.cntNeurons); // Zero padding is introduced for area which is not completly overlapped by filter for (int i = 0; i < dimIn; i += currLayerData.stride) { for (int j = 0; j < dimIn; j += currLayerData.stride) { Neuron neuron = new Neuron(tFuncType); neuron.biasAllowed = false; neuron.Idx = neuronCounter; neurons[neuronCounter++] = neuron; neuronIdxs.Add(neuron.Idx); Dictionary <int, int> filterConnections = new Dictionary <int, int>(); for (int k1 = 0; k1 < currLayerData.size; k1++) { for (int k2 = 0; k2 < currLayerData.size; k2++) { Connection connection = new Connection(ref connections, ref connectionCounter, learningRate, weightRescaleFactor, false); connection.weight = 1; int hashIdx = k1 * currLayerData.size + k2; filterConnections[hashIdx] = connection.Idx; } } // Form new connections for (int k1 = 0; k1 < currLayerData.size; k1++) { for (int k2 = 0; k2 < currLayerData.size; k2++) { int idx = GetIndex(i + k1, j + k2, dimIn, previousLayer); if (idx == -1) { continue; } int hashIdx = k1 * currLayerData.size + k2; int cIdx = filterConnections[hashIdx]; connections[cIdx].srcDest[idx] = neuron.Idx; neurons[idx].outgoingConnection.Add(cIdx); neurons[neuron.Idx].incomingConnection.Add(cIdx); } } } } } else { throw new Exception("Invalid LayerConnectionStyle given to Layer.FormConnections !!!!"); } cntNeurons = neuronIdxs.Count(); }
public Layer(TransferFuncType tFuncType) { cntNeurons = 0; neuronIdxs = new List <int>(); this.tFuncType = tFuncType; }