public override void Parse(TimeSpan timeOffset, ArraySegment <byte> byteSegment, bool markerBit) { Debug.Assert(byteSegment.Array != null, "byteSegment.Array != null"); if (byteSegment.Count < JpegHeaderSize) { throw new MediaPayloadParserException("Input data size is smaller than JPEG header size"); } int offset = byteSegment.Offset + 1; int fragmentOffset = BigEndianConverter.ReadUInt24(byteSegment.Array, offset); offset += 3; int type = byteSegment.Array[offset++]; int q = byteSegment.Array[offset++]; int width = byteSegment.Array[offset++] * 8; int height = byteSegment.Array[offset++] * 8; int dri = 0; if (type > 63) { dri = BigEndianConverter.ReadUInt16(byteSegment.Array, offset); offset += 4; } if (fragmentOffset == 0) { if (_frameStream.Position != 0) { GenerateFrame(timeOffset); } bool quantizationTablesChanged = false; if (q > 127) { int mbz = byteSegment.Array[offset]; if (mbz == 0) { _hasExternalQuantizationTable = true; int quantizationTablesLength = BigEndianConverter.ReadUInt16(byteSegment.Array, offset + 2); offset += 4; if (!ArrayUtils.IsBytesEquals(byteSegment.Array, offset, quantizationTablesLength, _quantizationTables, 0, _quantizationTablesLength)) { if (_quantizationTables.Length < quantizationTablesLength) { _quantizationTables = new byte[quantizationTablesLength]; } Buffer.BlockCopy(byteSegment.Array, offset, _quantizationTables, 0, quantizationTablesLength); _quantizationTablesLength = quantizationTablesLength; quantizationTablesChanged = true; } offset += quantizationTablesLength; } } if (quantizationTablesChanged || _currentType != type || _currentQ != q || _currentFrameWidth != width || _currentFrameHeight != height || _currentDri != dri) { _currentType = type; _currentQ = q; _currentFrameWidth = width; _currentFrameHeight = height; _currentDri = dri; ReInitializeJpegHeader(); } _frameStream.Write(_jpegHeaderBytesSegment.Array, _jpegHeaderBytesSegment.Offset, _jpegHeaderBytesSegment.Count); } if (fragmentOffset != 0 && _frameStream.Position == 0) { return; } if (_frameStream.Position > JpegMaxSize) { throw new MediaPayloadParserException($"Jpeg frame is too large, more than {JpegMaxSize / (1024 * 1024)} Mb"); } int dataSize = byteSegment.Offset + byteSegment.Count - offset; if (dataSize < 0) { throw new MediaPayloadParserException($"Invalid payload size: {dataSize}"); } _frameStream.Write(byteSegment.Array, offset, dataSize); }
/// <summary> /// This function train the input image and write result to text files in folder @"/Output" /// The result text files include speed comparison between global inhibition and local inhibition, /// the stable of the out put array (by comparing hamming distance arrays). /// Finally this method draw an image of active column as .png file. /// </summary> /// <param name="imageSize">Size of the image (image has same width and height)</param> /// <param name="columnDimension">List of sparse space size.(with same width and height)</param> /// <param name="inputBinarizedFile">input image after binarized</param> /// <param name="hammingFile">Path to hamming distance output file </param> /// <param name="outputSpeedFile">Path to speed comparison output file</param> /// <param name="activeColumnFile">Path to active column after training output file (as array text)</param> /// <param name="outputImage">Path to active column after training output file (as .png image file)</param> /// <param name="isGlobalInhibition">is using Global inhibition algorithms or not (if false using local inhibition)</param> private static void Training(int imageSize, int columnDimension, string inputBinarizedFile, string hammingFile, string outputSpeedFile, string activeColumnFile, string outputImage, bool isGlobalInhibition) { int outputImageSize = 1024; int activeColumn = columnDimension * columnDimension; var stopwatch = new Stopwatch(); using (StreamWriter swHamming = new StreamWriter(hammingFile)) { using (StreamWriter swSpeed = new StreamWriter(outputSpeedFile, true)) { using (StreamWriter swActiveColumn = new StreamWriter(activeColumnFile)) { var parameters = SetupParameters(imageSize, columnDimension, isGlobalInhibition); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); stopwatch.Start(); sp.Init(mem); stopwatch.Stop(); int actiColumnLength = activeColumn; int[] activeArray = new int[actiColumnLength]; // Read input csv file into array int[] inputVector = NeoCortexUtils.ReadCsvFileTest(inputBinarizedFile).ToArray(); stopwatch.Restart(); int iterations = 300; int[] oldArray = new int[activeArray.Length]; for (int k = 0; k < iterations; k++) { sp.compute(inputVector, activeArray, true); var activeCols = activeArray.IndexWhere((el) => el == 1); var distance = MathHelpers.GetHammingDistance(oldArray, activeArray); var similarity = MathHelpers.CalcArraySimilarity(oldArray, activeArray); swHamming.WriteLine($"{distance} | {similarity}"); var str = Helpers.StringifyVector(activeCols); Debug.WriteLine(str); oldArray = new int[actiColumnLength]; activeArray.CopyTo(oldArray, 0); } stopwatch.Stop(); var activeArrayString = Helpers.StringifyVector(activeArray); swActiveColumn.WriteLine("Active Array: " + activeArrayString); string inhibition = isGlobalInhibition ? "Global" : "Local"; double milliseconds = stopwatch.ElapsedMilliseconds; double seconds = milliseconds / 1000; swSpeed.WriteLine($"Topology: {columnDimension.ToString().PadRight(5)} | Inhibition type: {inhibition.PadRight(7)} | Total time: {milliseconds:N0} milliseconds ({seconds:N2} seconds)."); int[,] twoDimenArray = ArrayUtils.Make2DArray(activeArray, columnDimension, columnDimension); twoDimenArray = ArrayUtils.Transpose(twoDimenArray); NeoCortexUtils.DrawBitmap(twoDimenArray, outputImageSize, outputImageSize, outputImage); } } } }
public override void Initialize() { ArrayUtils.ZeroFill(_state); ArrayUtils.ZeroFill(_checksum); base.Initialize(); }
/// <returns>the values of counter as a shadow copy of array</returns> public virtual long[] AsArray() { return(ArrayUtils.Clone(counters)); }
public static bool Prefix() { var console = Console.instance; var input = console.m_input.text; if (!input.StartsWith("call", true, CultureInfo.InvariantCulture)) { return(true); } var parts = CommandParser.Parse(input).ToArray(); if (parts.Length < 3) { console.AddString( "call command requires at least 3 arguments: call <class> <methodName> [arg1] [arg2]"); return(false); } console.AddString(input); var targetClass = typeof(Console).Assembly.GetTypes() .FirstOrDefault(t => t.Name.Equals(parts[1].Text, StringComparison.InvariantCultureIgnoreCase)); if (targetClass == null) { console.AddString($"Could not find class with name '{parts[1]}'"); return(false); } var method = targetClass.GetMethods(BindingFlags.Public | BindingFlags.Static | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.InvokeMethod) .FirstOrDefault(m => m.Name.Equals(parts[2].Text, StringComparison.InvariantCultureIgnoreCase)); if (method == null) { console.AddString($"Could not find method '{parts[2].Text}' on class '{targetClass.FullName}'"); return(false); } // Parameters must match target method. var methodParams = method.GetParameters(); if (methodParams.Length != parts.Length - 3) // -3 for command, class and method name args { console.AddString( $"Command does not match method parameters. Expected parameter types: {string.Join(", ", methodParams.Select(p => p.ParameterType.Name))}"); return(false); } var methodArgsToSupply = new object[methodParams.Length]; for (var i = 0; i < methodParams.Length; i++) { var argText = parts[i + 3].Text; var paramType = methodParams[i].ParameterType; try { methodArgsToSupply[i] = TypeDescriptor.GetConverter(paramType).ConvertFrom(argText); } catch (NotSupportedException) { console.AddString( $"Cannot convert argument {i + 1} '{argText}' to type of '{paramType.FullName}'"); return(false); } } // Handle instance method calls by getting singleton instance (if possible). object targetInstance = null; if (!method.IsStatic) { targetInstance = targetClass == typeof(Player) ? Player.m_localPlayer : targetClass.GetField("m_instance", BindingFlags.Static | BindingFlags.NonPublic) ?.GetValue(null); if (targetInstance == null) { console.AddString( $"Could not call method '{method.Name}' because it's an instance method and no static instance field is defined on it"); return(false); } } // Call method, print result. var result = method.Invoke(targetInstance, methodArgsToSupply); if (result != null) { console.AddString(string.Join(", ", ArrayUtils.ToStringArray(result))); } return(false); }
public string[] GetFileExtensions() { return(ArrayUtils.MakeArray <string>(_languageExtensions.Keys)); }
/// <summary> /// Returns the text bytes including the encoding's preamble (<see cref="Encoding.GetPreamble"/>), if any. /// </summary> private byte[] GetBytes(string text, Encoding encoding) { byte[] resourceData = encoding.GetBytes(text); resourceData = (byte[])ArrayUtils.Concat(encoding.GetPreamble(), resourceData); return(resourceData); }
public void SendFile() { int bytesRead = 0; long len = new FileInfo(_filePath).Length; DtmFileInfoSruct flHdr = new DtmFileInfoSruct(_filePath, len, 0); int ckSize = _clientSocket.SendBufferSize - (flHdr.GetHeaderSize() + DtmPacketStruct.GetHeaderSize()); byte[] inputBuffer = new byte[ckSize]; try { using (FileStream inStream = new FileStream(_filePath, FileMode.Open, FileAccess.Read, FileShare.Read)) { // loop through file while ((bytesRead = inStream.Read(inputBuffer, 0, ckSize)) > 0) { // wrap in a file info; option flag is used for payload length flHdr.OptionsFlag = bytesRead; byte[] hdrArr = flHdr.ToBytes(); // add data if (bytesRead == ckSize) { hdrArr = ArrayUtils.Concat(hdrArr, inputBuffer); } else { hdrArr = ArrayUtils.Concat(hdrArr, ArrayUtils.GetRange(inputBuffer, 0, bytesRead)); } // encrypt the header and data byte[] enc = SymmetricTransform(_fileSymProcessor, hdrArr); // send to the remote host Transmit(DtmPacketFlags.Transfer, (short)DtmTransferFlags.DataChunk, _fileId, new MemoryStream(enc)); // increment counter _bytesSent += bytesRead; // progress if (ProgressPercent != null) { double progress = 100.0 * (double)_bytesSent / inStream.Length; ProgressPercent(this, new System.ComponentModel.ProgressChangedEventArgs((int)progress, (object)inStream.Length)); } } } // notify app if (FileTransferred != null) { FileTransferred(this, new DtmPacketArgs((short)DtmTransferFlags.Sent, _fileId)); } } catch (Exception ex) { throw new CryptoFileTransferException("DtmFileTransfer:SendFile", "The file transfer did not complete!", ex); } finally { // flush SendFlush(); } }
internal int ComputeCastDistance(List <StackValue> args, ClassTable classTable, Core core) { //Compute the cost to migrate a class calls argument types to the coresponding base types //This cannot be used to determine whether a function can be called as it will ignore anything that doesn't //it should only be used to determine which class is closer if (args.Count != FormalParams.Length) { return(int.MaxValue); } int distance = 0; if (0 == args.Count) { return(distance); } else { // Check if all the types match the current function at 'n' for (int i = 0; i < args.Count; ++i) { int rcvdType = (int)args[i].metaData.type; // If its a default argumnet, then it wasnt provided by the caller // The rcvdType is the type of the argument signature if (args[i].optype == AddressType.DefaultArg) { rcvdType = FormalParams[i].UID; } int expectedType = FormalParams[i].UID; int currentCost = 0; if (FormalParams[i].IsIndexable != ArrayUtils.IsArray(args[i])) //Replication code will take care of this { continue; } else if (FormalParams[i].IsIndexable && (FormalParams[i].IsIndexable == ArrayUtils.IsArray(args[i]))) { continue; } else if (expectedType == rcvdType && (FormalParams[i].IsIndexable == ArrayUtils.IsArray(args[i]))) { continue; } else if (rcvdType != ProtoCore.DSASM.Constants.kInvalidIndex && expectedType != ProtoCore.DSASM.Constants.kInvalidIndex) { currentCost += ClassUtils.GetUpcastCountTo(classTable.ClassNodes[rcvdType], classTable.ClassNodes[expectedType], core); } distance += currentCost; } return(distance); } }
public override void OnStart() { // Choose a random combat action from this character's action set combatMove.SetValue(ArrayUtils.RandomElement(character.CharacterControl.ActionSet.combatActionScriptableObjects)); }
private void Receive(Stream PacketStream) { // get the packet header DtmPacketStruct prcPacket = new DtmPacketStruct(PacketStream); // read the packet byte[] enc = new byte[prcPacket.PayloadLength]; // get the encrypted data PacketStream.Read(enc, 0, enc.Length); // decrypt it using file crypto processor byte[] dec = SymmetricTransform(_fileSymProcessor, enc); // get file info header DtmFileInfoSruct pktFi = new DtmFileInfoSruct(dec); // store file name and size string fileName = pktFi.FileName; long fileSize = pktFi.FileSize; long streamLen = 0; try { using (FileStream outStream = new FileStream(_tempPath, FileMode.Append, FileAccess.Write, FileShare.Read)) { // calculate offsets int hdrSize = pktFi.GetHeaderSize(); int len = dec.Length - hdrSize; // write to file outStream.Write(ArrayUtils.GetRange(dec, hdrSize, len), 0, len); // store length streamLen = outStream.Length; // progress if (ProgressPercent != null) { double progress = 100.0 * (double)pktFi.OptionsFlag / fileSize; ProgressPercent(this, new System.ComponentModel.ProgressChangedEventArgs((int)progress, (object)fileSize)); } } // transfer completed if (streamLen == fileSize) { // reset attributes File.SetAttributes(_tempPath, File.GetAttributes(_tempPath) & ~FileAttributes.Hidden); // rename the file File.Move(_tempPath, VTDev.Libraries.CEXEngine.Tools.FileTools.GetUniqueName(_filePath)); // notify app if (FileTransferred != null) { FileTransferred(this, new DtmPacketArgs((short)DtmTransferFlags.Received, prcPacket.OptionFlag)); } // flush and close ReceiveClose(); } } catch (Exception ex) { throw new CryptoFileTransferException("DtmFileTransfer:Receive", "The file transfer did not complete!", ex); } }
public override DynamicMetaObject Bind(DynamicMetaObject target, DynamicMetaObject[] args) { return(PythonProtocol.Index(this, PythonIndexType.DeleteSlice, ArrayUtils.Insert(target, args))); }
public product(params object[] iterables) { InnerEnumerator = Yielder(ArrayUtils.ConvertAll(iterables, x => new List(PythonOps.GetEnumerator(x)))); }
/// <summary> /// Implements the experiment. /// </summary> /// <param name="cfg"></param> /// <param name="encoder"></param> /// <param name="inputValues"></param> private static void RunExperiment(HtmConfig cfg, EncoderBase encoder, List <int[]> inputValues) { // Creates the htm memory. var mem = new Connections(cfg); bool isInStableState = false; // // HPC extends the default Spatial Pooler algorithm. // The purpose of HPC is to set the SP in the new-born stage at the begining of the learning process. // In this stage the boosting is very active, but the SP behaves instable. After this stage is over // (defined by the second argument) the HPC is controlling the learning process of the SP. // Once the SDR generated for every input gets stable, the HPC will fire event that notifies your code // that SP is stable now. HomeostaticPlasticityController hpa = new HomeostaticPlasticityController(mem, inputValues.Count * 40, (isStable, numPatterns, actColAvg, seenInputs) => { // Event should only be fired when entering the stable state. // Ideal SP should never enter unstable state after stable state. if (isStable == false) { Debug.WriteLine($"INSTABLE STATE"); // This should usually not happen. isInStableState = false; } else { Debug.WriteLine($"STABLE STATE"); // Here you can perform any action if required. isInStableState = true; } }, requiredSimilarityThreshold: 0.975); // It creates the instance of Spatial Pooler Multithreaded version. SpatialPooler sp = new SpatialPoolerMT(hpa); // Initializes the sp.Init(mem); // Holds the indicies of active columns of the SDR. Dictionary <string, int[]> prevActiveColIndicies = new Dictionary <string, int[]>(); // Holds the active column SDRs. Dictionary <string, int[]> prevActiveCols = new Dictionary <string, int[]>(); // Will hold the similarity of SDKk and SDRk-1 fro every input. Dictionary <string, double> prevSimilarity = new Dictionary <string, double>(); // // Initiaize start similarity to zero. for (int i = 0; i < inputValues.Count; i++) { string inputKey = GetInputGekFromIndex(i); prevSimilarity.Add(inputKey, 0.0); prevActiveColIndicies.Add(inputKey, new int[0]); } // Learning process will take 1000 iterations (cycles) int maxSPLearningCycles = 1000; for (int cycle = 0; cycle < maxSPLearningCycles; cycle++) { //Debug.WriteLine($"Cycle ** {cycle} ** Stability: {isInStableState}"); // // This trains the layer on input pattern. for (int inputIndx = 0; inputIndx < inputValues.Count; inputIndx++) { string inputKey = GetInputGekFromIndex(inputIndx); int[] input = inputValues[inputIndx]; double similarity; int[] activeColumns = new int[(int)cfg.NumColumns]; // Learn the input pattern. // Output lyrOut is the output of the last module in the layer. sp.compute(input, activeColumns, true); // DrawImages(cfg, inputKey, input, activeColumns); var actColsIndicies = ArrayUtils.IndexWhere(activeColumns, c => c == 1); similarity = MathHelpers.CalcArraySimilarity(actColsIndicies, prevActiveColIndicies[inputKey]); Debug.WriteLine($"[i={inputKey}, cols=:{actColsIndicies.Length} s={similarity}] SDR: {Helpers.StringifyVector(actColsIndicies)}"); prevActiveCols[inputKey] = activeColumns; prevActiveColIndicies[inputKey] = actColsIndicies; prevSimilarity[inputKey] = similarity; if (isInStableState) { GenerateResult(cfg, inputValues, prevActiveColIndicies, prevActiveCols); return; } } } }
public string[] GetNameSelection() { return(ArrayUtils.Concat(mdata.StringColumnNames, mdata.CategoryColumnNames)); }
/// <summary> /// Compute the number of type transforms needed to turn the current type into the target type /// Note that this method returns int[] -> char[] as an exact match /// </summary> /// <param name="args"></param> /// <returns></returns> public int ComputeTypeDistance(List <StackValue> args, ProtoCore.DSASM.ClassTable classTable, Core core, bool allowArrayPromotion = false) { //Modified from proc Table, does not use quite the same arguments int distance = (int)ProcedureDistance.kMaxDistance; if (0 == args.Count && 0 == FormalParams.Length) { distance = (int)ProcedureDistance.kExactMatchDistance; } else { // Jun Comment: // Default args not provided by the caller would have been pushed by the call instruction as optype = DefaultArs if (FormalParams.Length == args.Count) { // Check if all the types match the current function at 'n' for (int i = 0; i < args.Count; ++i) { int rcvdType = (int)args[i].metaData.type; // If its a default argumnet, then it wasnt provided by the caller // The rcvdType is the type of the argument signature if (args[i].optype == AddressType.DefaultArg) { rcvdType = FormalParams[i].UID; } int expectedType = FormalParams[i].UID; int currentScore = (int)ProcedureDistance.kNotMatchScore; //Fuqiang: For now disable rank check //if function is expecting array, but non-array or array of lower rank is passed, break. //if (args[i].rank != -1 && args[i].UID != (int)PrimitiveType.kTypeVar && args[i].rank < argTypeList[i].rank) //Only enable type check, and array and non-array check /* SUSPECTED REDUNDANT Luke,Jun * if (args[i].rank != -1 && args[i].UID != (int)PrimitiveType.kTypeVar && !args[i].IsIndexable && FormalParams[i].IsIndexable) * { * distance = (int)ProcedureDistance.kMaxDistance; * break; * } * else */ //sv rank > param rank if (allowArrayPromotion) { //stop array -> single if (ArrayUtils.IsArray(args[i]) && !FormalParams[i].IsIndexable) //Replication code will take care of this { distance = (int)ProcedureDistance.kMaxDistance; break; } } else { //stop array -> single && single -> array if (ArrayUtils.IsArray(args[i]) != FormalParams[i].IsIndexable) //Replication code will take care of this { distance = (int)ProcedureDistance.kMaxDistance; break; } } if (FormalParams[i].IsIndexable && (FormalParams[i].IsIndexable == ArrayUtils.IsArray(args[i]))) { //In case of conversion from double to int, add a conversion score. //There are overloaded methods and the difference is the parameter type between int and double. //Add this to make it call the correct one. - Randy bool bContainsDouble = ArrayUtils.ContainsDoubleElement(args[i], core); if (FormalParams[i].UID == (int)PrimitiveType.kTypeInt && bContainsDouble) { currentScore = (int)ProcedureDistance.kCoerceDoubleToIntScore; } else if (FormalParams[i].UID == (int)PrimitiveType.kTypeDouble && !bContainsDouble) { currentScore = (int)ProcedureDistance.kCoerceIntToDoubleScore; } else { currentScore = (int)ProcedureDistance.kExactMatchScore; } } else if (expectedType == rcvdType && (FormalParams[i].IsIndexable == ArrayUtils.IsArray(args[i]))) { currentScore = (int)ProcedureDistance.kExactMatchScore; } else if (rcvdType != ProtoCore.DSASM.Constants.kInvalidIndex) { currentScore = classTable.ClassNodes[rcvdType].GetCoercionScore(expectedType); if (currentScore == (int)ProcedureDistance.kNotMatchScore) { distance = (int)ProcedureDistance.kMaxDistance; break; } } distance -= currentScore; } } } return(distance); }
public string[] GetLanguageNames() { return(ArrayUtils.MakeArray <string>(_languageNames.Keys)); }
public bool DoesTypeDeepMatch(List <StackValue> formalParameters, Core core) { if (formalParameters.Count != FormalParams.Length) { return(false); } for (int i = 0; i < FormalParams.Length; i++) { if (FormalParams[i].IsIndexable != ArrayUtils.IsArray(formalParameters[i])) { return(false); } if (FormalParams[i].IsIndexable && ArrayUtils.IsArray(formalParameters[i])) { if (FormalParams[i].rank != ArrayUtils.GetMaxRankForArray(formalParameters[i], core) && FormalParams[i].rank != DSASM.Constants.kArbitraryRank) { return(false); } Type typ = FormalParams[i]; Dictionary <ClassNode, int> arrayTypes = ArrayUtils.GetTypeStatisticsForArray(formalParameters[i], core); ClassNode cn = null; if (arrayTypes.Count == 0) { //This was an empty array Validity.Assert(cn == null, "If it was an empty array, there shouldn't be a type node"); cn = core.ClassTable.ClassNodes[(int)PrimitiveType.kTypeNull]; } else if (arrayTypes.Count == 1) { //UGLY, get the key out of the array types, of which there is only one foreach (ClassNode key in arrayTypes.Keys) { cn = key; } } else if (arrayTypes.Count > 1) { ClassNode commonBaseType = ArrayUtils.GetGreatestCommonSubclassForArray(formalParameters[i], core); if (commonBaseType == null) { throw new ProtoCore.Exceptions.ReplicationCaseNotCurrentlySupported("Array with no common superclass not yet supported: {0C644179-14F5-4172-8EF8-A2F3739901B2}"); } cn = commonBaseType; //From now on perform tests on the commmon base type } ClassNode argTypeNode = core.ClassTable.ClassNodes[typ.UID]; //cn now represents the class node of the argument //argTypeNode represents the class node of the argument int coersionScore = cn.GetCoercionScore(core.ClassTable.ClassNodes.IndexOf(argTypeNode)); //TODO(Jun)This is worrying test //Disable var as exact match, otherwise resolution between double and var will fail if (cn != argTypeNode && cn != core.ClassTable.ClassNodes[(int)PrimitiveType.kTypeNull] && argTypeNode != core.ClassTable.ClassNodes[(int)PrimitiveType.kTypeVar]) { return(false); } //if (coersionScore != (int)ProcedureDistance.kExactMatchScore) // return false; continue; } if (FormalParams[i].UID != (int)formalParameters[i].metaData.type) { return(false); } } return(true); }
public void ProcessData(IMatrixData mdata, Parameters param, ref IMatrixData[] supplTables, ref IDocumentData[] documents, ProcessInfo processInfo) { List <string> expNames = mdata.ColumnNames; string errorString = null; int[,] colInds = SortNumberedNames(expNames, mdata.ColumnDescriptions, maxInd, ref errorString, out string[] allSuffixes, out string[] allPrefixes, out List <string> allDescriptions); if (errorString != null) { processInfo.ErrString = errorString; return; } int[] normalIndices = ArrayUtils.Complement(To1DArray(colInds), expNames.Count); normalIndices = FilterExpressionColIndices(normalIndices, mdata.ColumnNames, allPrefixes); int[] validNumCols = GetValidNumCols(mdata.NumericColumnNames, allPrefixes); int nrows = mdata.RowCount * allSuffixes.Length; int ncols = normalIndices.Length + allPrefixes.Length; double[,] data = new double[nrows, ncols]; double[,] quality = new double[nrows, ncols]; bool[,] imputed = new bool[nrows, ncols]; List <double[]> numCols = new List <double[]>(); for (int i = 0; i < validNumCols.Length; i++) { numCols.Add(new double[nrows]); } List <string[]> stringCols = new List <string[]>(); for (int i = 0; i < mdata.StringColumnCount + 1; i++) { stringCols.Add(new string[nrows]); } List <string[][]> catCols = new List <string[][]>(); for (int i = 0; i < mdata.CategoryColumnCount + 1; i++) { catCols.Add(new string[nrows][]); } List <double[][]> multiNumCols = new List <double[][]>(); for (int i = 0; i < mdata.MultiNumericColumnCount; i++) { multiNumCols.Add(new double[nrows][]); } List <string> expColNames = new List <string>(); List <string> expColDescriptions = new List <string>(); foreach (int t in normalIndices) { expColNames.Add(expNames[t]); expColDescriptions.Add(mdata.ColumnDescriptions[t]); } foreach (Tuple <string, string> t in allPrefixes.Zip(allDescriptions, Tuple.Create)) { expColNames.Add(t.Item1); expColDescriptions.Add(t.Item2); } int count = 0; for (int i = 0; i < allSuffixes.Length; i++) { for (int j = 0; j < mdata.RowCount; j++) { count++; int rowInd = i * mdata.RowCount + j; for (int k = 0; k < normalIndices.Length; k++) { data[rowInd, k] = mdata.Values.Get(j, normalIndices[k]); quality[rowInd, k] = mdata.Quality.Get(j, normalIndices[k]); imputed[rowInd, k] = mdata.IsImputed[j, normalIndices[k]]; } for (int k = 0; k < allPrefixes.Length; k++) { data[rowInd, normalIndices.Length + k] = mdata.Values.Get(j, colInds[k, i]); quality[rowInd, normalIndices.Length + k] = mdata.Quality.Get(j, colInds[k, i]); imputed[rowInd, normalIndices.Length + k] = mdata.IsImputed[j, colInds[k, i]]; } for (int k = 0; k < validNumCols.Length; k++) { numCols[k][rowInd] = mdata.NumericColumns[validNumCols[k]][j]; } for (int k = 0; k < mdata.StringColumnCount; k++) { stringCols[k][rowInd] = mdata.StringColumns[k][j]; } for (int k = 0; k < mdata.CategoryColumnCount; k++) { catCols[k][rowInd] = mdata.GetCategoryColumnEntryAt(k, j); } for (int k = 0; k < mdata.MultiNumericColumnCount; k++) { multiNumCols[k][rowInd] = mdata.MultiNumericColumns[k][j]; } catCols[mdata.CategoryColumnCount][rowInd] = new[] { allSuffixes[i] }; stringCols[stringCols.Count - 1][count - 1] = "UID" + count; } } string[] catColNames = ArrayUtils.Concat(mdata.CategoryColumnNames, new[] { "Multiplicity" }); mdata.ColumnNames = expColNames; mdata.ColumnDescriptions = expColDescriptions; mdata.Values.Set(data); mdata.Quality.Set(quality); mdata.IsImputed.Set(imputed); mdata.SetAnnotationColumns(new List <string>(ArrayUtils.Concat(mdata.StringColumnNames, new[] { "Unique identifier" })), stringCols, new List <string>(catColNames), catCols, ArrayUtils.SubList(mdata.NumericColumnNames, validNumCols), numCols, mdata.MultiNumericColumnNames, multiNumCols); }
public object CallInstanceReflected(CodeContext context, object instance, params object[] args) { return(CallReflected(context, CallType.ImplicitInstance, ArrayUtils.Insert(instance, args))); }
private void MakeReverseDelegateWorker(CodeContext context) { Type[] sigTypes; Type[] callSiteType; Type retType; GetSignatureInfo(out sigTypes, out callSiteType, out retType); DynamicMethod dm = new DynamicMethod("ReverseInteropInvoker", retType, ArrayUtils.RemoveLast(sigTypes), DynamicModule); ILGenerator ilGen = dm.GetILGenerator(); PythonContext pc = context.LanguageContext; Type callDelegateSiteType = CompilerHelpers.MakeCallSiteDelegateType(callSiteType); CallSite site = CallSite.Create(callDelegateSiteType, pc.Invoke(new CallSignature(_argtypes.Length))); List <object> constantPool = new List <object>(); constantPool.Add(null); // 1st item is the target object, will be put in later. constantPool.Add(site); ilGen.BeginExceptionBlock(); //CallSite<Func<CallSite, object, object>> mySite; //mySite.Target(mySite, target, ...); LocalBuilder siteLocal = ilGen.DeclareLocal(site.GetType()); ilGen.Emit(OpCodes.Ldarg_0); ilGen.Emit(OpCodes.Ldc_I4, constantPool.Count - 1); ilGen.Emit(OpCodes.Ldelem_Ref); ilGen.Emit(OpCodes.Castclass, site.GetType()); ilGen.Emit(OpCodes.Stloc, siteLocal); ilGen.Emit(OpCodes.Ldloc, siteLocal); ilGen.Emit(OpCodes.Ldfld, site.GetType().GetField("Target")); ilGen.Emit(OpCodes.Ldloc, siteLocal); // load code context int contextIndex = constantPool.Count; Debug.Assert(pc.SharedContext != null); constantPool.Add(pc.SharedContext); ilGen.Emit(OpCodes.Ldarg_0); ilGen.Emit(OpCodes.Ldc_I4, contextIndex); ilGen.Emit(OpCodes.Ldelem_Ref); // load function target, in constant pool slot 0 ilGen.Emit(OpCodes.Ldarg_0); ilGen.Emit(OpCodes.Ldc_I4_0); ilGen.Emit(OpCodes.Ldelem_Ref); // load arguments for (int i = 0; i < _argtypes.Length; i++) { INativeType nativeType = _argtypes[i]; nativeType.EmitReverseMarshalling(ilGen, new Arg(i + 1, sigTypes[i + 1]), constantPool, 0); } ilGen.Emit(OpCodes.Call, callDelegateSiteType.GetMethod("Invoke")); LocalBuilder finalRes = null; // emit forward marshaling for return value if (_restype != null) { LocalBuilder tmpRes = ilGen.DeclareLocal(typeof(object)); ilGen.Emit(OpCodes.Stloc, tmpRes); finalRes = ilGen.DeclareLocal(retType); ((INativeType)_restype).EmitMarshalling(ilGen, new Local(tmpRes), constantPool, 0); ilGen.Emit(OpCodes.Stloc, finalRes); } else { ilGen.Emit(OpCodes.Pop); } // } catch(Exception e) { // emit the cleanup code ilGen.BeginCatchBlock(typeof(Exception)); ilGen.Emit(OpCodes.Ldarg_0); ilGen.Emit(OpCodes.Ldc_I4, contextIndex); ilGen.Emit(OpCodes.Ldelem_Ref); ilGen.Emit(OpCodes.Call, typeof(ModuleOps).GetMethod("CallbackException")); ilGen.EndExceptionBlock(); if (_restype != null) { ilGen.Emit(OpCodes.Ldloc, finalRes); } ilGen.Emit(OpCodes.Ret); _reverseDelegateConstants = constantPool; _reverseDelegateType = GetReverseDelegateType(ArrayUtils.RemoveFirst(sigTypes), CallingConvention); _reverseDelegate = dm; }
private static Exception ErrorDecoding(CodeContext context, params object[] args) { return(PythonExceptions.CreateThrowable(SSLError(context), ArrayUtils.Insert("Error decoding PEM-encoded file ", args))); }
protected override Expression VisitTry(TryExpression node) { int startYields = _yields.Count; bool savedInTryWithFinally = _inTryWithFinally; if (node.Finally != null || node.Fault != null) { _inTryWithFinally = true; } Expression @try = Visit(node.Body); int tryYields = _yields.Count; IList <CatchBlock> handlers = Visit(node.Handlers, VisitCatchBlock); int catchYields = _yields.Count; // push a new return label in case the finally block yields _returnLabels.Push(Expression.Label("tryLabel")); // only one of these can be non-null Expression @finally = Visit(node.Finally); Expression fault = Visit(node.Fault); LabelTarget finallyReturn = _returnLabels.Pop(); int finallyYields = _yields.Count; _inTryWithFinally = savedInTryWithFinally; if (@try == node.Body && handlers == node.Handlers && @finally == node.Finally && fault == node.Fault) { return(node); } // No yields, just return if (startYields == _yields.Count) { Debug.Assert(@try.Type == node.Type); Debug.Assert(handlers == null || handlers.Count == 0 || handlers[0].Body.Type == node.Type); return(Expression.MakeTry(null, @try, @finally, fault, handlers)); } if (fault != null && finallyYields != catchYields) { // No one needs this yet, and it's not clear how we should get back to // the fault throw new NotSupportedException("yield in fault block is not supported"); } // If try has yields, we need to build a new try body that // dispatches to the yield labels var tryStart = Expression.Label("tryStart"); if (tryYields != startYields) { @try = Expression.Block(MakeYieldRouter(node.Body.Type, startYields, tryYields, tryStart), @try); Debug.Assert(@try.Type == node.Body.Type); } // Transform catches with yield to deferred handlers if (catchYields != tryYields) { var block = new List <Expression>(); block.Add(MakeYieldRouter(node.Body.Type, tryYields, catchYields, tryStart)); block.Add(null); // empty slot to fill in later for (int i = 0, n = handlers.Count; i < n; i++) { CatchBlock c = handlers[i]; if (c == node.Handlers[i]) { continue; } if (handlers.IsReadOnly) { handlers = ArrayUtils.ToArray(handlers); } // the variable that will be scoped to the catch block var exceptionVar = Expression.Variable(c.Test, null); // the variable that the catch block body will use to // access the exception. We reuse the original variable if // the catch block had one. It needs to be hoisted because // the catch might contain yields. var deferredVar = c.Variable ?? Expression.Variable(c.Test, null); LiftVariable(deferredVar); // We need to ensure that filters can access the exception // variable Expression filter = c.Filter; if (filter != null && c.Variable != null) { filter = Expression.Block(new[] { c.Variable }, Expression.Assign(c.Variable, exceptionVar), filter); } // catch (ExceptionType exceptionVar) { // deferredVar = exceptionVar; // } handlers[i] = Expression.Catch( exceptionVar, Expression.Block( DelayedAssign(Visit(deferredVar), exceptionVar), Expression.Default(node.Body.Type) ), filter ); // We need to rewrite rethrows into "throw deferredVar" var catchBody = new RethrowRewriter { Exception = deferredVar }.Visit(c.Body); // if (deferredVar != null) { // ... catch body ... // } block.Add( Expression.Condition( Expression.NotEqual(Visit(deferredVar), AstUtils.Constant(null, deferredVar.Type)), catchBody, Expression.Default(node.Body.Type) ) ); } block[1] = Expression.MakeTry(null, @try, null, null, new ReadOnlyCollection <CatchBlock>(handlers)); @try = Expression.Block(block); Debug.Assert(@try.Type == node.Body.Type); handlers = new CatchBlock[0]; // so we don't reuse these } if (finallyYields != catchYields) { // We need to add a catch block to save the exception, so we // can rethrow in case there is a yield in the finally. Also, // add logic for returning. It looks like this: // // try { ... } catch (Exception all) { saved = all; } // finally { // if (_finallyReturnVar) goto finallyReturn; // ... // if (saved != null) throw saved; // finallyReturn: // } // if (_finallyReturnVar) goto _return; // We need to add a catch(Exception), so if we have catches, // wrap them in a try if (handlers.Count > 0) { @try = Expression.MakeTry(null, @try, null, null, handlers); Debug.Assert(@try.Type == node.Body.Type); handlers = new CatchBlock[0]; } // NOTE: the order of these routers is important // The first call changes the labels to all point at "tryEnd", // so the second router will jump to "tryEnd" var tryEnd = Expression.Label("tryEnd"); Expression inFinallyRouter = MakeYieldRouter(node.Body.Type, catchYields, finallyYields, tryEnd); Expression inTryRouter = MakeYieldRouter(node.Body.Type, catchYields, finallyYields, tryStart); var all = Expression.Variable(typeof(Exception), "e"); var saved = Expression.Variable(typeof(Exception), "$saved$" + _temps.Count); LiftVariable(saved); @try = Expression.Block( Expression.TryCatchFinally( Expression.Block( inTryRouter, @try, DelayedAssign(Visit(saved), AstUtils.Constant(null, saved.Type)), Expression.Label(tryEnd) ), Expression.Block( MakeSkipFinallyBlock(finallyReturn), inFinallyRouter, @finally, Expression.Condition( Expression.NotEqual(Visit(saved), AstUtils.Constant(null, saved.Type)), Expression.Throw(Visit(saved)), Utils.Empty() ), Expression.Label(finallyReturn) ), Expression.Catch(all, Utils.Void(DelayedAssign(Visit(saved), all))) ), Expression.Condition( Expression.Equal(_gotoRouter, AstUtils.Constant(GotoRouterYielding)), Expression.Goto(_returnLabels.Peek()), Utils.Empty() ) ); @finally = null; } else if (@finally != null) { // try or catch had a yield, modify finally so we can skip over it @finally = Expression.Block( MakeSkipFinallyBlock(finallyReturn), @finally, Expression.Label(finallyReturn) ); } // Make the outer try, if needed if (handlers.Count > 0 || @finally != null || fault != null) { @try = Expression.MakeTry(null, @try, @finally, fault, handlers); } Debug.Assert(@try.Type == node.Body.Type); return(Expression.Block(Expression.Label(tryStart), @try)); }
/// <summary> /// Construct the check matrix of a Goppa code in canonical form from the irreducible Goppa polynomial over the finite field <c>GF(2^m)</c>. /// </summary> /// /// <param name="Field">The finite field</param> /// <param name="Gp">The irreducible Goppa polynomial</param> /// /// <returns>The new GF2Matrix</returns> public static GF2Matrix CreateCanonicalCheckMatrix(GF2mField Field, PolynomialGF2mSmallM Gp) { int m = Field.Degree; int n = 1 << m; int t = Gp.Degree; // create matrix H over GF(2^m) int[][] hArray = ArrayUtils.CreateJagged <int[][]>(t, n); // create matrix YZ int[][] yz = ArrayUtils.CreateJagged <int[][]>(t, n); if (ParallelUtils.IsParallel) { Parallel.For(0, n, j => yz[0][j] = Field.Inverse(Gp.EvaluateAt(j))); } else { // here j is used as index and as element of field GF(2^m) for (int j = 0; j < n; j++) { yz[0][j] = Field.Inverse(Gp.EvaluateAt(j)); } } for (int i = 1; i < t; i++) { // here j is used as index and as element of field GF(2^m) if (ParallelUtils.IsParallel) { Parallel.For(0, n, j => { yz[i][j] = Field.Multiply(yz[i - 1][j], j); }); } else { for (int j = 0; j < n; j++) { yz[i][j] = Field.Multiply(yz[i - 1][j], j); } } } // create matrix H = XYZ for (int i = 0; i < t; i++) { if (ParallelUtils.IsParallel) { Parallel.For(0, n, j => { for (int k = 0; k <= i; k++) { hArray[i][j] = Field.Add(hArray[i][j], Field.Multiply(yz[k][j], Gp.GetCoefficient(t + k - i))); } }); } else { for (int j = 0; j < n; j++) { for (int k = 0; k <= i; k++) { hArray[i][j] = Field.Add(hArray[i][j], Field.Multiply(yz[k][j], Gp.GetCoefficient(t + k - i))); } } } } // convert to matrix over GF(2) int[][] result = ArrayUtils.CreateJagged <int[][]>(t * m, IntUtils.URShift((n + 31), 5)); if (ParallelUtils.IsParallel) { for (int j = 0; j < n; j++) { int q = IntUtils.URShift(j, 5); int r = 1 << (j & 0x1f); for (int i = 0; i < t; i++) { int e = hArray[i][j]; Parallel.For(0, m, u => { int b = (IntUtils.URShift(e, u)) & 1; if (b != 0) { int ind = (i + 1) * m - u - 1; result[ind][q] ^= r; } }); } } } else { for (int j = 0; j < n; j++) { int q = IntUtils.URShift(j, 5); int r = 1 << (j & 0x1f); for (int i = 0; i < t; i++) { int e = hArray[i][j]; for (int u = 0; u < m; u++) { int b = (IntUtils.URShift(e, u)) & 1; if (b != 0) { int ind = (i + 1) * m - u - 1; result[ind][q] ^= r; } } } } } return(new GF2Matrix(n, result)); }
/// <summary> /// Packs arguments into the buffer according to given specifiers and repeaters. /// Count specifies the number of valid specifiers/repeaters. /// </summary> private static void PackInternal(byte[] buffer, object[] args, char[] specifiers, int[] repeaters, int count) { Encoding encoding = Configuration.Application.Globalization.PageEncoding; bool le = BitConverter.IsLittleEndian; int a = 0; // index of the current argument int pos = 0; // the position in the buffer for (int i = 0; i < count; i++) { char specifier = specifiers[i]; int repeater = repeaters[i]; switch (specifier) { case 'x': // NUL byte repeated for "repeater" count: ArrayUtils.Fill(buffer, 0, pos, repeater); pos += repeater; break; case '@': // NUL-fill to absolute position; // if it is less then the current position the result is shortened if (repeater > pos) { ArrayUtils.Fill(buffer, 0, pos, repeater - pos); } pos = repeater; break; case 'X': pos = Math.Max(0, pos - repeater); break; case 'a': // NUL-padded string case 'A': // SPACE-padded string { // argument has already been converted to string: string s = (string)args[a++]; int length = Math.Min(s.Length, repeater); int byte_count = encoding.GetBytes(s, 0, length, buffer, pos); Debug.Assert(byte_count == length, "Multibyte characters not supported"); // padding: if (repeater > length) { ArrayUtils.Fill(buffer, (byte)((specifier == 'a') ? 0x00 : 0x20), pos + length, repeater - length); } pos += repeater; break; } case 'h': // Hex string, low/high nibble first - converts to a string, takes n hex digits from string: case 'H': { // argument has already been converted to string: string s = (string)args[a++]; int nibble_shift = (specifier == 'h') ? 0 : 4; for (int j = 0; j < repeater; j++) { int digit = Core.Parsers.Convert.AlphaNumericToDigit(s[j]); if (digit > 15) { PhpException.Throw(PhpError.Warning, LibResources.GetString("illegal_hex_digit", specifier, s[j])); digit = 0; } if (j % 2 == 0) { buffer[pos] = unchecked ((byte)(digit << nibble_shift)); } else { buffer[pos] |= unchecked ((byte)(digit << (4 - nibble_shift))); pos++; } } // odd number of hex digits (append '0' digit): if (repeater % 2 == 1) { pos++; } break; } case 'c': // signed char case 'C': // unsigned char while (repeater-- > 0) { buffer[pos++] = unchecked ((byte)Core.Convert.ObjectToInteger(args[a++])); } break; case 's': // signed short (always 16 bit, machine byte order) case 'S': // unsigned short (always 16 bit, machine byte order) while (repeater-- > 0) { PackNumber(BitConverter.GetBytes(unchecked ((ushort)Core.Convert.ObjectToInteger(args[a++]))), le, buffer, ref pos); } break; case 'n': // unsigned short (always 16 bit, big endian byte order) case 'v': // unsigned short (always 16 bit, little endian byte order) while (repeater-- > 0) { PackNumber(BitConverter.GetBytes(unchecked ((ushort)Core.Convert.ObjectToInteger(args[a++]))), specifier == 'v', buffer, ref pos); } break; case 'i': // signed integer (machine dependent size and byte order - always 32 bit) case 'I': // signed integer (machine dependent size and byte order - always 32 bit) case 'l': // signed long (always 32 bit, machine byte order) case 'L': // unsigned long (always 32 bit, machine byte order) while (repeater-- > 0) { PackNumber(BitConverter.GetBytes(Core.Convert.ObjectToInteger(args[a++])), le, buffer, ref pos); } break; case 'N': // unsigned long (always 32 bit, big endian byte order) case 'V': // unsigned long (always 32 bit, little endian byte order) while (repeater-- > 0) { PackNumber(BitConverter.GetBytes(Core.Convert.ObjectToInteger(args[a++])), specifier == 'V', buffer, ref pos); } break; case 'f': // float (machine dependent size and representation - size is always 4B) while (repeater-- > 0) { PackNumber(BitConverter.GetBytes(unchecked ((float)Core.Convert.ObjectToDouble(args[a++]))), le, buffer, ref pos); } break; case 'd': // double (machine dependent size and representation - size is always 8B) while (repeater-- > 0) { PackNumber(BitConverter.GetBytes(Core.Convert.ObjectToDouble(args[a++])), le, buffer, ref pos); } break; default: Debug.Fail("Invalid specifier"); break; } } }
internal static DynamicMetaObject Call(DynamicMetaObjectBinder /*!*/ call, DynamicMetaObject target, DynamicMetaObject /*!*/[] /*!*/ args) { Assert.NotNull(call, args); Assert.NotNullItems(args); if (target.NeedsDeferral()) { return(call.Defer(ArrayUtils.Insert(target, args))); } foreach (DynamicMetaObject mo in args) { if (mo.NeedsDeferral()) { RestrictTypes(args); return(call.Defer( ArrayUtils.Insert(target, args) )); } } DynamicMetaObject self = target.Restrict(target.GetLimitType()); ValidationInfo valInfo = BindingHelpers.GetValidationInfo(target); PythonType pt = DynamicHelpers.GetPythonType(target.Value); PythonContext pyContext = PythonContext.GetPythonContext(call); // look for __call__, if it's present dispatch to it. Otherwise fall back to the // default binder PythonTypeSlot callSlot; if (!typeof(Delegate).IsAssignableFrom(target.GetLimitType()) && pt.TryResolveSlot(pyContext.SharedContext, "__call__", out callSlot)) { ConditionalBuilder cb = new ConditionalBuilder(call); Expression body; callSlot.MakeGetExpression( pyContext.Binder, PythonContext.GetCodeContext(call), self, GetPythonType(self), cb ); if (!cb.IsFinal) { cb.FinishCondition(GetCallError(call, self)); } Expression[] callArgs = ArrayUtils.Insert( PythonContext.GetCodeContext(call), cb.GetMetaObject().Expression, DynamicUtils.GetExpressions(args) ); body = DynamicExpression.Dynamic( PythonContext.GetPythonContext(call).Invoke( BindingHelpers.GetCallSignature(call) ), typeof(object), callArgs ); body = Ast.TryFinally( Ast.Block( Ast.Call(typeof(PythonOps).GetMethod("FunctionPushFrame"), Ast.Constant(pyContext)), body ), Ast.Call(typeof(PythonOps).GetMethod("FunctionPopFrame")) ); return(BindingHelpers.AddDynamicTestAndDefer( call, new DynamicMetaObject(body, self.Restrictions.Merge(BindingRestrictions.Combine(args))), args, valInfo )); } return(null); }
/// <summary> /// This function train the input image and write result to text files in folder @"/OutputDutyCycle" /// The result text files include speed comparison between global inhibition and local inhibition, /// the stable of the out put array (by comparing hamming distance arrays). /// Finally this method draw an image of active column as .png file. /// This training method is used for testing speed of training with different value of max boost and duty cycle /// </summary> /// <param name="inputBinarizedFile">input image after binarized</param> /// <param name="hammingFile">Path to hamming distance output file</param> /// <param name="outputSpeedFile">Path to speed comparison output file</param> /// <param name="outputImage">Path to active column after training output file (as .png image file)</param> /// <param name="parameters">Parameter setup</param> private static void Training(string inputBinarizedFile, string hammingFile, string outputSpeedFile, string outputImage, Parameters parameters) { int outputImageSize = 1024; int topology = parameters.Get <int[]>(KEY.COLUMN_DIMENSIONS)[0]; int activeColumn = topology * topology; var stopwatch = new Stopwatch(); using (StreamWriter swHamming = new StreamWriter(hammingFile)) { using (StreamWriter swSpeed = new StreamWriter(outputSpeedFile, true)) { var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); stopwatch.Start(); sp.Init(mem); stopwatch.Stop(); int actiColumnLength = activeColumn; int[] activeArray = new int[actiColumnLength]; // Read input csv file into array int[] inputVector = NeoCortexUtils.ReadCsvFileTest(inputBinarizedFile).ToArray(); stopwatch.Restart(); int iterations = 1000; int[] oldArray = new int[activeArray.Length]; for (int k = 0; k < iterations; k++) { sp.compute(inputVector, activeArray, true); var activeCols = activeArray.IndexWhere((el) => el == 1); var distance = MathHelpers.GetHammingDistance(oldArray, activeArray); var similarity = MathHelpers.CalcArraySimilarity(oldArray, activeArray); swHamming.WriteLine($"{distance} | {similarity}"); var str = Helpers.StringifyVector(activeCols); Debug.WriteLine(str); oldArray = new int[actiColumnLength]; activeArray.CopyTo(oldArray, 0); } var activeArrayString = Helpers.StringifyVector(activeArray); stopwatch.Stop(); Debug.WriteLine("Active Array: " + activeArrayString); int potentialRadius = parameters.Get <int>(KEY.POTENTIAL_RADIUS); bool isGlobalInhibition = parameters.Get <bool>(KEY.GLOBAL_INHIBITION); string inhibition = isGlobalInhibition ? "Global" : "Local"; double milliseconds = stopwatch.ElapsedMilliseconds; double seconds = milliseconds / 1000; swSpeed.WriteLine($"Column dimension: {topology.ToString().PadRight(5)} |Potential Radius: {potentialRadius}| Inhibition type: {inhibition.PadRight(7)} | Total time: {milliseconds:N0} milliseconds ({seconds:N2} seconds)."); int[,] twoDimenArray = ArrayUtils.Make2DArray(activeArray, topology, topology); twoDimenArray = ArrayUtils.Transpose(twoDimenArray); NeoCortexUtils.DrawBitmap(twoDimenArray, outputImageSize, outputImageSize, outputImage); } } }
public void ProcessData(IMatrixData mdata, Parameters param, ref IMatrixData[] supplTables, ref IDocumentData[] documents, ProcessInfo processInfo) { const bool rows = true; int minValids = PerseusPluginUtils.GetMinValids(param, out bool percentage); ParameterWithSubParams <int> modeParam = param.GetParamWithSubParams <int>("Mode"); int modeInd = modeParam.Value; if (modeInd != 0 && mdata.CategoryRowNames.Count == 0) { processInfo.ErrString = "No grouping is defined."; return; } PerseusPluginUtils.ReadValuesShouldBeParams(param, out FilteringMode filterMode, out double threshold, out double threshold2); if (modeInd != 0) { int gind = modeParam.GetSubParameters().GetParam <int>("Grouping").Value; string[][] groupCol = mdata.GetCategoryRowAt(gind); if (param.GetParam <int>("Filter mode").Value == 2) { //discarded List <int> valids = new List <int>(); List <int> notvalids = new List <int>(); string[] groupVals = ArrayUtils.UniqueValuesPreserveOrder(groupCol); Array.Sort(groupVals); int[][] groupInds = CalcGroupInds(groupVals, groupCol); for (int i = 0; i < mdata.RowCount; i++) { int[] counts = new int[groupVals.Length]; int[] totals = new int[groupVals.Length]; for (int j = 0; j < groupInds.Length; j++) { for (int k = 0; k < groupInds[j].Length; k++) { if (groupInds[j][k] >= 0) { totals[groupInds[j][k]]++; } } if (PerseusPluginUtils.IsValid(mdata.Values.Get(i, j), threshold, threshold2, filterMode)) { for (int k = 0; k < groupInds[j].Length; k++) { if (groupInds[j][k] >= 0) { counts[groupInds[j][k]]++; } } } } bool[] groupValid = new bool[counts.Length]; for (int j = 0; j < groupValid.Length; j++) { groupValid[j] = PerseusPluginUtils.Valid(counts[j], minValids, percentage, totals[j]); } if (modeInd == 2 ? ArrayUtils.Or(groupValid) : ArrayUtils.And(groupValid)) { valids.Add(i); } else { notvalids.Add(i); } } supplTables = new[] { PerseusPluginUtils.CreateSupplTabSplit(mdata, notvalids.ToArray()) }; } NonzeroFilterGroup(minValids, percentage, mdata, param, modeInd == 2, threshold, threshold2, filterMode, groupCol); } else { if (param.GetParam <int>("Filter mode").Value == 2) { supplTables = new[] { PerseusPluginUtils.NonzeroFilter1Split(rows, minValids, percentage, mdata, param, threshold, threshold2, filterMode) }; } PerseusPluginUtils.NonzeroFilter1(rows, minValids, percentage, mdata, param, threshold, threshold2, filterMode); } }
protected override byte[] GetResult() => ArrayUtils.Clone(_state);
/// <summary> /// Constructor (from flattened policy vector) /// </summary> /// <param name="values"></param> /// <param name="policyLogisticVectorsFlatAs"></param> /// <param name="draws"></param> public ONNXRuntimeExecutorResultBatch(bool isWDL, FP16[] values, float[] policyLogisticVectorsFlatAs, float[] valueFCActiviationsFlat, int numPositionsUsed) : this(isWDL, values, ArrayUtils.ToArrayOfArray(policyLogisticVectorsFlatAs, EncodedPolicyVector.POLICY_VECTOR_LENGTH), ArrayUtils.ToArrayOfArray(valueFCActiviationsFlat, 32 * 64), numPositionsUsed) { }