Beispiel #1
0
        protected override void processPixels(byte[] inValues, byte[] outValues, int w, int h, int xstep, int ystep) {
            //Using the iterator method
            /*
            Chunker c = new Chunker(chunkSize, bmpData.Width, bmpData.Height, bmpData.Stride, 3);
            int pixel;
            int nPixels = chunkSize * chunkSize;
            foreach (System.Collections.IEnumerable iterator in c.getIterators()) {
                pixel = 0;
                foreach (int index in iterator) {
                    if (index != -1) {
                        rgbValues[index] = (byte)(pixel * 256 / nPixels);
                        rgbValues[index + 1] = (byte)(pixel * 256 / nPixels);
                        rgbValues[index + 2] = (byte)(pixel * 256 / nPixels);
                    }
                    pixel++;
                }
            }
            */

            //Using the chunk pull/push method
            Chunker c = new Chunker(chunkSize, w, h, ystep, xstep);
            byte[] data = new byte[chunkSize * chunkSize * 3];
            for (int i = 0; i < c.getNumChunks(); i++) {
                c.getBlock(inValues, data, i);
                for (int j = 0; j < data.Length; j++) {
                    data[j] = (byte)(j * 255 / (data.Length - 1));
                }
                c.setBlock(outValues, data, i);
            }
        }
        public void Chunking()
        {
            var starts = new List <int>();
            var ends   = new List <int>();

            var count = Chunker.ExecuteInChunks(1500, (startList, endList, s, e) =>
            {
                startList.Add(s);
                endList.Add(e);
                return(1);
            }, starts, ends, CancellationToken.None);

            Assert.AreEqual(0, starts[0]);
            Assert.AreEqual(499, ends[0]);

            Assert.AreEqual(500, starts[1]);
            Assert.AreEqual(999, ends[1]);

            Assert.AreEqual(1000, starts[2]);
            Assert.AreEqual(1499, ends[2]);

            Assert.AreEqual(3, starts.Count);
            Assert.AreEqual(3, ends.Count);
            Assert.AreEqual(3, count);
        }
Beispiel #3
0
        private void InitializeChunker()
        {
            InputStream modelIn = null;

            try
            {
                modelIn = new FileInputStream(ChunkerModel);
                ChunkerModel model = new ChunkerModel(modelIn);
                chunker = new ChunkerME(model);
            }
            catch (IOException ex)
            {
                chunker = null;
            }
            finally
            {
                if (modelIn != null)
                {
                    try
                    {
                        modelIn.close();
                    }
                    catch (IOException ex)
                    {
                    }
                }
            }
        }
Beispiel #4
0
        private void reassemble(byte[][] past, byte[][] diff, byte[][] vectors)
        {
            Chunker c = new Chunker(chunkSize, state.channelWidth, state.channelHeight, state.channelWidth, 1);
            int     pixelTL;

            for (int i = 0; i < c.getNumChunks(); i++)
            {
                pixelTL = c.chunkIndexToPixelIndex(i);

                //update channels to be difference.
                restoreChunk(state.channels[0], past[0], diff[0], vectors[0][i], pixelTL, state.channelWidth);
            }

            //Do the second two channels
            Size smaller = Subsample.deduceCbCrSize(state);

            c = new Chunker(chunkSize, smaller.Width, smaller.Height, smaller.Width, 1);
            for (int i = 0; i < c.getNumChunks(); i++)
            {
                pixelTL = c.chunkIndexToPixelIndex(i);

                restoreChunk(state.channels[1], past[1], diff[1], vectors[1][i], pixelTL, smaller.Width);
                restoreChunk(state.channels[2], past[2], diff[2], vectors[2][i], pixelTL, smaller.Width);
            }
        }
Beispiel #5
0
        private void readChannel(BinaryReader reader, byte[] channel, Chunker c)
        {
            byte[] data = new byte[64];
            byte   count, val;
            var    indexer = Chunker.zigZag8Index();

            for (int iChunk = 0; iChunk < c.getNumChunks(); iChunk++)
            {
                for (int iPixel = 0; iPixel < 64;)
                {
                    val = reader.ReadByte();
                    if (val != rleToken)
                    {
                        data[iPixel++] = val;
                    }
                    else
                    {
                        count = reader.ReadByte();
                        val   = reader.ReadByte();
                        while (count > 0)
                        {
                            data[iPixel++] = val;
                            count--;
                        }
                    }
                }
                //set the data into the channel
                c.setZigZag8Block(channel, data, iChunk);
            }
        }
Beispiel #6
0
        public override void viewExtra(Graphics g)
        {
            //base.viewExtra(g);
            if (state == null)
            {
                return;
            }
            Chunker c = new Chunker(8, state.channelWidth, state.channelHeight, state.channelWidth, 1);
            int     offsetX, offsetY;
            int     y = state.channelHeight - 4;
            int     x = 4;

            for (int i = 0; i < vState.channels[0].Length; i++)
            {
                offsetX = ((vState.channels[0][i] & 0xF0) >> 4) - 7;
                offsetY = (vState.channels[0][i] & 0x0F) - 7;
                if (offsetX == 0 && offsetY == 0)
                {
                    g.FillRectangle(Brushes.BlanchedAlmond, x - 1, y - 1, 2, 2);
                }
                else
                {
                    g.DrawLine(Pens.BlanchedAlmond, x, y, x + offsetX, y - offsetY);
                }
                x += 8;
                if (x - 4 >= state.channelWidth)
                {
                    x  = 4;
                    y -= 8;
                }
            }
        }
Beispiel #7
0
        public void OneExtra()
        {
            var starts = new List <int>();
            var ends   = new List <int>();

            var count = Chunker.ExecuteInChunks(1001, (startList, endList, s, e) =>
            {
                startList.Add(s);
                endList.Add(e);
                return(1);
            }, starts, ends);

            Assert.AreEqual(0, starts[0]);
            Assert.AreEqual(499, ends[0]);

            Assert.AreEqual(500, starts[1]);
            Assert.AreEqual(999, ends[1]);

            Assert.AreEqual(1000, starts[2]);
            Assert.AreEqual(1000, ends[2]);

            Assert.AreEqual(3, starts.Count);
            Assert.AreEqual(3, ends.Count);
            Assert.AreEqual(3, count);
        }
Beispiel #8
0
        protected override void processPixels(byte[] inValues, byte[] outValues, int w, int h, int xstep, int ystep)
        {
            //Using the iterator method

            /*
             * Chunker c = new Chunker(chunkSize, bmpData.Width, bmpData.Height, bmpData.Stride, 3);
             * int pixel;
             * int nPixels = chunkSize * chunkSize;
             * foreach (System.Collections.IEnumerable iterator in c.getIterators()) {
             *  pixel = 0;
             *  foreach (int index in iterator) {
             *      if (index != -1) {
             *          rgbValues[index] = (byte)(pixel * 256 / nPixels);
             *          rgbValues[index + 1] = (byte)(pixel * 256 / nPixels);
             *          rgbValues[index + 2] = (byte)(pixel * 256 / nPixels);
             *      }
             *      pixel++;
             *  }
             * }
             */

            //Using the chunk pull/push method
            Chunker c = new Chunker(chunkSize, w, h, ystep, xstep);

            byte[] data = new byte[chunkSize * chunkSize * 3];
            for (int i = 0; i < c.getNumChunks(); i++)
            {
                c.getBlock(inValues, data, i);
                for (int j = 0; j < data.Length; j++)
                {
                    data[j] = (byte)(j * 255 / (data.Length - 1));
                }
                c.setBlock(outValues, data, i);
            }
        }
Beispiel #9
0
        static void Main(string[] args)
        {
            var settings = GetCommandLineArgs(args);
            string imageLocation = settings.Image;

            var chunker = new Chunker(imageLocation, settings.Width, settings.Height);
            chunker.SplitAndSave();
        }
Beispiel #10
0
        private void setupBlobs(DataBlob metadata)
        {
            C1      = new DataBlob();
            C2      = new DataBlob();
            C3      = new DataBlob();
            V2      = new DataBlob();
            V3      = new DataBlob();
            C1.type = C2.type = C3.type = DataBlob.Type.Channels;
            V2.type = V3.type = DataBlob.Type.Vectors;

            //import metadata onto channels
            C1.imageWidth      = C2.imageWidth = C3.imageWidth = metadata.imageWidth;
            C1.imageHeight     = C2.imageHeight = C3.imageHeight = metadata.imageHeight;
            C1.channelWidth    = C2.channelWidth = C3.channelWidth = metadata.channelWidth;
            C1.channelHeight   = C2.channelHeight = C3.channelHeight = metadata.channelHeight;
            C1.quantizeQuality = C2.quantizeQuality = C3.quantizeQuality = metadata.quantizeQuality;
            C1.samplingMode    = C2.samplingMode = C3.samplingMode = metadata.samplingMode;

            Chunker c = new Chunker(8, metadata.channelWidth, metadata.channelHeight, metadata.channelWidth, 1);

            V2.imageWidth      = V3.imageWidth = metadata.imageWidth;
            V2.imageHeight     = V3.imageHeight = metadata.imageHeight;
            V2.channelWidth    = V3.channelWidth = c.getChunksWide();
            V2.channelHeight   = V3.channelHeight = c.getChunksHigh();
            V2.quantizeQuality = V3.quantizeQuality = metadata.quantizeQuality;
            V2.samplingMode    = V3.samplingMode = metadata.samplingMode;

            //Allocate space for incoming data
            C1.channels = new byte[3][];
            C2.channels = new byte[3][];
            C3.channels = new byte[3][];
            V2.channels = new byte[3][];
            V3.channels = new byte[3][];

            int  cMajor    = C1.channelWidth * C1.channelHeight;
            Size sizeMinor = Subsample.getPaddedCbCrSize(new Size(C1.channelWidth, C1.channelHeight), C1.samplingMode);
            int  cMinor    = sizeMinor.Width * sizeMinor.Height;

            C1.channels[0] = new byte[cMajor];
            C2.channels[0] = new byte[cMajor];
            C3.channels[0] = new byte[cMajor];
            C1.channels[1] = new byte[cMinor];
            C2.channels[1] = new byte[cMinor];
            C3.channels[1] = new byte[cMinor];
            C1.channels[2] = new byte[cMinor];
            C2.channels[2] = new byte[cMinor];
            C3.channels[2] = new byte[cMinor];
            cMajor         = V2.channelWidth * V2.channelHeight;
            sizeMinor      = Subsample.getCbCrSize(new Size(V2.channelWidth, V2.channelHeight), V2.samplingMode);
            cMinor         = sizeMinor.Width * sizeMinor.Height;
            V2.channels[0] = new byte[cMajor];
            V3.channels[0] = new byte[cMajor];
            V2.channels[1] = new byte[cMinor];
            V3.channels[1] = new byte[cMinor];
            V2.channels[2] = new byte[cMinor];
            V3.channels[2] = new byte[cMinor];
        }
Beispiel #11
0
 private Chunker[] CreateChunkers()
 {
     Chunker[] ret = new Chunker[chunksCount];
     for (int i = 0; i < chunksCount; i++)
     {
         ret[i] = new Chunker(i);
     }
     return(ret);
 }
Beispiel #12
0
        protected static void writeChannels(BinaryWriter writer, DataBlob ch)
        {
            Chunker c = new Chunker(8, ch.channelWidth, ch.channelHeight, ch.channelWidth, 1);

            writeChannel(writer, ch.channels[0], c);
            Size s = Subsample.deduceCbCrSize(ch);

            c = new Chunker(8, s.Width, s.Height, s.Width, 1);
            writeChannel(writer, ch.channels[1], c);
            writeChannel(writer, ch.channels[2], c);
        }
Beispiel #13
0
        private void readChannels(BinaryReader reader, DataBlob ch)
        {
            Chunker c = new Chunker(8, ch.channelWidth, ch.channelHeight, ch.channelWidth, 1);

            readChannel(reader, ch.channels[0], c);
            Size s = Subsample.deduceCbCrSize(ch);

            c = new Chunker(8, s.Width, s.Height, s.Width, 1);
            readChannel(reader, ch.channels[1], c);
            readChannel(reader, ch.channels[2], c);
        }
Beispiel #14
0
        public void Chunk_GivenRuleWithConsecutiveChunks_CapturesConsecutiveChunks()
        {
            string taggedText = "[NP I/NN] [VP ate/VBD [NP cake/NN]]";

            var rules = new List<Rule>
            {
                new Rule { ChunkName = "DC", Pattern = @"{NP} {VP}" }
            };

            string chunkedText = new Chunker().Chunk(taggedText, rules);
            Assert.AreEqual("[DC [NP I/NN] [VP ate/VBD [NP cake/NN]]]", chunkedText);
        }
Beispiel #15
0
        public void Chunk_GivenRuleToMatchTagOnly_ReturnsTagChunksForAnyWords()
        {
            string taggedText = "The/DT good/JJ ,/, the/DT bad/JJ and/CC the/DT ugly/JJ";

            var rules = new List<Rule>
            {
                new Rule { ChunkName = "ADJ", Pattern = @"{*/JJ}" }
            };

            string chunkedText = new Chunker().Chunk(taggedText, rules);
            Assert.AreEqual("The/DT [ADJ good/JJ] ,/, the/DT [ADJ bad/JJ] and/CC the/DT [ADJ ugly/JJ]", chunkedText);
        }
Beispiel #16
0
        public void Chunk_GivenRuleToMatchWordOnly_ReturnsWordChunksForAnyTags()
        {
            string taggedText = "I/PRP thought/VBD a/DT strange/JJ thought/NN";

            var rules = new List<Rule>
            {
                new Rule { ChunkName = "TH", Pattern = @"{thought/*}" }
            };

            string chunkedText = new Chunker().Chunk(taggedText, rules);
            Assert.AreEqual("I/PRP [TH thought/VBD] a/DT strange/JJ [TH thought/NN]", chunkedText);
        }
Beispiel #17
0
        private void calcMoVec(byte[][] chOld, byte[][] chNew)
        {
            //for each channel
            //chunk state.channels into 8x8 blocks
            //compare each block with blocks surrounding them in the arg channels
            //over x = [-7,7] (range 15 values)
            //and  y = [-7,7] (range 15 values)


            //Do the first channel
            Chunker c = new Chunker(chunkSize, state.channelWidth, state.channelHeight, state.channelWidth, 1);
            int     pixelTL;
            byte    offset;

            //need to set vState.channelWidth and vState.channelHeight correctly, I think....
            vState.channels[0]   = new byte[c.getNumChunks()];
            vState.channelWidth  = c.getChunksWide();
            vState.channelHeight = c.getChunksHigh();

            for (int i = 0; i < c.getNumChunks(); i++)
            {
                pixelTL = c.chunkIndexToPixelIndex(i);
                //find best match given search area
                offset = findOffsetVector(chNew[0], chOld[0], pixelTL, state.channelWidth);
                //save best match vector
                vState.channels[0][i] = offset;
                //update channels to be difference.
                if (i == 20)
                {
                    i = 20;
                }
                setDiff(state.channels[0], chNew[0], chOld[0], pixelTL, offset, state.channelWidth);
            }

            //Do the second two channels
            Size smaller = Subsample.deduceCbCrSize(state);

            c = new Chunker(chunkSize, smaller.Width, smaller.Height, smaller.Width, 1);
            vState.channels[1] = new byte[c.getNumChunks()];
            vState.channels[2] = new byte[c.getNumChunks()];
            for (int i = 0; i < c.getNumChunks(); i++)
            {
                pixelTL = c.chunkIndexToPixelIndex(i);
                offset  = findOffsetVector(chNew[1], chOld[1], pixelTL, smaller.Width);
                vState.channels[1][i] = offset;
                setDiff(state.channels[1], chNew[1], chOld[1], pixelTL, offset, smaller.Width);
                //offset = findOffsetVector(state.channels[2], channels[2], pixelTL, state.channelWidth);
                //Just use the same vectors for channel 3 as channel 2. Probably okay.
                vState.channels[2][i] = offset;
                setDiff(state.channels[2], chNew[2], chOld[2], pixelTL, offset, smaller.Width);
            }
        }
        static void Main(string[] args)
        {
            Console.WriteLine("Welcome to console version of MathParser programing language interpreter");
            Thread.Sleep(500);
            Console.WriteLine("Current Version: v" + currentVersion);
            Console.Beep();
            Console.WriteLine("Press any key to continue...");
            Console.ReadKey();

            Chunker.SetEnvironment(new ConsoleEnvironment());
            int  debugerLevel = 0;
            bool validInput   = false;

            while (!validInput)
            {
                try
                {
                    Console.Clear();
                    Console.Write("Set debuger level to: ");
                    debugerLevel = int.Parse(Console.ReadLine());
                }
                finally
                {
                    validInput = true;
                }
            }
            Console.Clear();
            while (true)
            {
                //Console.Clear();
                Console.Write(">");
                Queue <Word> words = TextProcessor.ProcessString(Console.ReadLine());
                if (debugerLevel > 1)
                {
                    foreach (Word w in words)
                    {
                        Console.WriteLine(w.GetCharacterType().ToString());
                    }
                }
                Chunk  r   = Chunker.Chunkify(words);
                Result res = r.Evaluate();
                if (debugerLevel > 0)
                {
                    Console.WriteLine(res.ToString());
                }
                else if (res.IsUnexpected())
                {
                    Console.WriteLine(res.ToString());
                }
            }
        }
Beispiel #19
0
        static int Run(string input, string name)
        {
            Scanner      scanner = new Scanner(input, name);
            List <Token> tokens  = scanner.Tokens;

            if (scanner.HasScanned == false)
            {
                return(0);
            }

            Parser parser  = new Parser(tokens, name);
            Node   program = parser.ParsedTree;

            if (parser.HasParsed == false)
            {
                return(0);
            }

            Chunker chunker = new Chunker(program, name, Prelude.GetPrelude());
            Chunk   chunk   = chunker.Chunk;

            if (chunker.HasChunked == true)
            {
                VM       vm     = new VM(chunk);
                VMResult result = vm.ProtectedRun();
                if (result.status != VMResultType.OK)
                {
                    Console.WriteLine("Program returned ERROR!");
                }
                else if (result.value.Type != UnitType.Null)
                {
                    Console.WriteLine("Program returned: " + result.value);
                }

                // Print modules
                Unit machine_modules = chunk.GetUnitFromTable("machine", "modules");
                Console.WriteLine(vm.ProtectedCallFunction(machine_modules, null));

                // Print memory_use
                Unit machine_memory_use = chunk.GetUnitFromTable("machine", "memory_use");
                Console.WriteLine(vm.ProtectedCallFunction(machine_memory_use, null));

                // Print Global variable errors
                Unit errors_unit = vm.GetGlobal(chunk, "errors");
                if (errors_unit.Type == UnitType.Integer)
                {
                    Console.WriteLine("Total errors in test: " + errors_unit.integerValue);
                }
            }
            return(0);
        }
Beispiel #20
0
        static void Main(string [] args)
        {
            stopwatch = Stopwatch.StartNew();

            string path = Constants.output_path + "output-" + MapToggles.bit_mask_string + ".svg";

            Console.WriteLine("Reading files...");
            Map map = new Map();

            LogTime();

            Console.WriteLine("Chunking...");
            Chunker.GenerateChunks(map);
            LogTime();

            Console.WriteLine("Grouping Cities...");
            CityGrouper.GroupCities(map);
            LogTime();

            Console.WriteLine("Naming Towers...");
            Namer.NameTowers(map);
            LogTime();

            Console.WriteLine("Nudging towers...");
            Nudger.NudgeTowers(map);
            LogTime();

            Console.WriteLine("Placing Labels...");
            LabelPlacer.PlaceLabels(map);
            LogTime();

            Console.WriteLine("Generating SVG...");
            string contents = SVGWriter.GenerateSVG(map, false);

            LogTime();

            Console.WriteLine("Writing File...");
            File.WriteAllText(path, contents);
            LogTime();

            Console.WriteLine("Done");

            Console.WriteLine(" >> Saved to: " + path);

            Console.WriteLine(total_time + "s total used.");

            Console.WriteLine("\nPress any key to exit.");
            Console.ReadKey();
        }
        public DefaultRtmpProtocol()
        {
            _framer  = new Framer();
            _chunker = new Chunker();

            _framer.ChunkReceived += (s, e) =>
            {
                _chunker.AddChunk(e.Chunk);
            };

            _chunker.MessageReceived += (s, e) =>
            {
                this.OnMessageReceived(s, e);
            };
        }
Beispiel #22
0
        protected static void writeChannel(BinaryWriter writer, byte[] channel, Chunker c)
        {
            byte count, prev, val;
            var  indexer = Chunker.zigZag8Index();

            byte[] data = new byte[64];
            for (int i = 0; i < c.getNumChunks(); i++)
            {
                c.getBlock(channel, data, i);
                count = 0;
                prev  = data[0];
                foreach (int index in indexer)
                {
                    val = data[index];
                    if (val == prev)
                    {
                        count++;
                    }
                    else
                    {
                        if (prev == rleToken || count >= 3)
                        {
                            writer.Write(rleToken);
                            writer.Write(count);
                        }
                        else if (count == 2)
                        {
                            writer.Write((prev));
                        }
                        writer.Write((prev));
                        prev  = val;
                        count = 1;
                    }
                }
                //write out the last token
                if (prev == rleToken || count >= 3)
                {
                    writer.Write(rleToken);
                    writer.Write(count);
                }
                else if (count == 2)
                {
                    writer.Write(prev);
                }
                writer.Write(prev);
                //final chunk written out
            } //channel written out
        }
Beispiel #23
0
        public void LessThenChunkSize()
        {
            List <int> starts = new List <int>();
            List <int> ends   = new List <int>();

            Chunker.ExecuteInChunks(1, (s, e) =>
            {
                starts.Add(s);
                ends.Add(e);
            });

            Assert.AreEqual(0, starts[0]);
            Assert.AreEqual(0, ends[0]);

            Assert.AreEqual(1, starts.Count);
            Assert.AreEqual(1, ends.Count);
        }
Beispiel #24
0
        public void LessThenChunkSize()
        {
            var starts = new List <int>();
            var ends   = new List <int>();

            var count = Chunker.ExecuteInChunks(1, (startList, endList, s, e) =>
            {
                startList.Add(s);
                endList.Add(e);
                return(1);
            }, starts, ends);

            Assert.AreEqual(0, starts[0]);
            Assert.AreEqual(0, ends[0]);

            Assert.AreEqual(1, starts.Count);
            Assert.AreEqual(1, ends.Count);
            Assert.AreEqual(1, count);
        }
Beispiel #25
0
        /// <summary>
        /// Performs startup checks and then initializes NLP classes if the program install is valid.
        /// </summary>
        /// <returns>0 if NLP classes were initialized properly. Otherwise, returns as RunStartupChecks.</returns>
        public static int RunStartupActions()
        {
            int startupChecksOutput = RunStartupChecks();

            if (startupChecksOutput != 0)
            {
                return(startupChecksOutput);
            }

            MEDetector          = new SentenceDetector(new FileStream(Environment.CurrentDirectory + OPENNLP_RESOURCES_ROOT_DIRECTORY + OPENNLP_SENT_DETECT_MODEL, FileMode.Open, FileAccess.Read));
            METokenizer         = new Tokenizer(new FileStream(Environment.CurrentDirectory + OPENNLP_RESOURCES_ROOT_DIRECTORY + OPENNLP_TOKENIZER_MODEL, FileMode.Open, FileAccess.Read));
            METagger            = new POSTagger(new FileStream(Environment.CurrentDirectory + OPENNLP_RESOURCES_ROOT_DIRECTORY + OPENNLP_POS_TAGGER_MODEL, FileMode.Open, FileAccess.Read));
            MEChunker           = new Chunker(new FileStream(Environment.CurrentDirectory + OPENNLP_RESOURCES_ROOT_DIRECTORY + OPENNLP_CHUNKER_MODEL, FileMode.Open, FileAccess.Read));
            SimpleLemmatizer    = new DictLemmatizer();
            MENameFinder        = new NameFinder(NameFinder.DETECTOR_TYPE.Person);
            NHThesaurus         = new MyThes(Environment.CurrentDirectory + NHUNSPELL_THESAURUS_FILE);
            Wnlib.WNCommon.path = Environment.CurrentDirectory + WORDNET_ROOT_DIRECTORY;

            return(0);
        }
Beispiel #26
0
        protected override void clean()
        {
            base.clean();
            if (state == null || state.channels == null)
            {
                return;
            }

            if (!isInverse)
            {
                state.quantizeQuality = properties["quality"].nValue;
            }
            generateQTables(state.quantizeQuality);

            padChannels();
            Chunker c = new Chunker(chunkSize, state.channelWidth, state.channelHeight, state.channelWidth, 1);

            byte[] data = new byte[chunkSize * chunkSize];
            for (int i = 0; i < c.getNumChunks(); i++)
            {
                c.getBlock(state.channels[0], data, i);
                data = isInverse ? doIDCT(data, quantizationY) : doDCT(data, quantizationY);
                c.setBlock(state.channels[0], data, i);
            }

            Size tempS = Subsample.getPaddedCbCrSize(new Size(state.channelWidth, state.channelHeight), state.samplingMode);

            c = new Chunker(chunkSize, tempS.Width, tempS.Height, tempS.Width, 1);
            for (int i = 0; i < c.getNumChunks(); i++)
            {
                c.getBlock(state.channels[1], data, i);
                data = isInverse ? doIDCT(data, quantizationC) : doDCT(data, quantizationC);
                c.setBlock(state.channels[1], data, i);
                c.getBlock(state.channels[2], data, i);
                data = isInverse ? doIDCT(data, quantizationC) : doDCT(data, quantizationC);
                c.setBlock(state.channels[2], data, i);
            }
        }
Beispiel #27
0
        public void OneExtra()
        {
            List <int> starts = new List <int>();
            List <int> ends   = new List <int>();

            Chunker.ExecuteInChunks(1001, (s, e) =>
            {
                starts.Add(s);
                ends.Add(e);
            });

            Assert.AreEqual(0, starts[0]);
            Assert.AreEqual(499, ends[0]);

            Assert.AreEqual(500, starts[1]);
            Assert.AreEqual(999, ends[1]);

            Assert.AreEqual(1000, starts[2]);
            Assert.AreEqual(1000, ends[2]);

            Assert.AreEqual(3, starts.Count);
            Assert.AreEqual(3, ends.Count);
        }
Beispiel #28
0
        private void readChannel(BinaryReader reader, byte[] channel, Chunker c) {
            byte[] data = new byte[64];
            byte count, val;
            var indexer = Chunker.zigZag8Index();

            for (int iChunk = 0; iChunk < c.getNumChunks(); iChunk++) {
                for (int iPixel = 0; iPixel < 64;) {
                    val = reader.ReadByte();
                    if (val != rleToken) {
                        data[iPixel++] = val;
                    } else {
                        count = reader.ReadByte();
                        val = reader.ReadByte();
                        while (count > 0) {
                            data[iPixel++] = val;
                            count--;
                        }
                    }
                }
                //set the data into the channel
                c.setZigZag8Block(channel, data, iChunk);
            }
        }
Beispiel #29
0
        public TextMiningEngine()
        {
            Console.WriteLine("TextMiningEngine initialization ...");
            client       = new HttpClient();
            symptomsList = new List <Symptom>();
            GetSymptomsList();
            //getSymptomsListBeta();

            //Preparing dictionary
            //Construct dictionnary for symptoms
            TrieDictionary dict = new TrieDictionary();

            foreach (Symptom pheno in symptomsList)
            {
                dict.addEntry(new com.aliasi.dict.DictionaryEntry(pheno.Name, "PHENOTYPE"));
                foreach (string synonym in pheno.Synonyms)
                {
                    dict.addEntry(new com.aliasi.dict.DictionaryEntry(synonym, "PHENOTYPE"));
                }
            }

            TokenizerFactory     tokenizerFactory = IndoEuropeanTokenizerFactory.INSTANCE;
            WeightedEditDistance editDistance     = new FixedWeightEditDistance(0, -1, -1, -1, System.Double.NaN);

            double maxDistance = 0.0;

            chunker = new ApproxDictionaryChunker(dict, tokenizerFactory, editDistance, maxDistance);

            //////////////////////////////////////////
            //FOR HMM PREPARATION
            java.io.File modelFile = new java.io.File($"{Environment.GetEnvironmentVariable("RD_AGGREGATOR_SETTINGS")}/Aggregator/tools/model.test");

            chunkerHMM = (Chunker)AbstractExternalizable.readObject(modelFile);

            //////////////////////////////////////////
            Console.WriteLine("TextMiningEngine initialization finished");
        }
        public void TestPenguins()
        {
            var listPenguinChunks = new List<byte[]>();
            var listPenguinChangedChunks = new List<byte[]>();

            using (FileStream fs = File.OpenRead(@"Resources\Penguins.jpg"))
            {
                Chunker c = new Chunker(fs, HasherType.BuzHash);

                foreach (byte[] chunk in c)
                {
                    listPenguinChunks.Add(chunk);
                }

            }
            using (FileStream fs = File.OpenRead(@"Resources\Penguins - Changed.jpg"))
            {
                Chunker c = new Chunker(fs, HasherType.BuzHash);

                foreach (byte[] chunk in c)
                {
                    listPenguinChangedChunks.Add(chunk);
                }

            }
            Assert.AreEqual(176, listPenguinChangedChunks.Count);
            Assert.AreEqual(176, listPenguinChunks.Count);

            for (var i = 0; i < 176; i++)
            {
                if (i == 0)
                    CollectionAssert.AreNotEqual(listPenguinChunks[i], listPenguinChangedChunks[i]);
                else
                    CollectionAssert.AreEqual(listPenguinChunks[i], listPenguinChangedChunks[i]);
            }
        }
Beispiel #31
0
 public ShallowParseCorefEnhancerStream(POSTagger posTagger, Chunker chunker, ObjectStream <RawCorefSample> samples) : base(samples)
 {
     this.posTagger = posTagger;
     this.chunker   = chunker;
 }
Beispiel #32
0
        private void reassemble(byte[][] past, byte[][] diff, byte[][] vectors) {

            Chunker c = new Chunker(chunkSize, state.channelWidth, state.channelHeight, state.channelWidth, 1);
            int pixelTL;
            for (int i = 0; i < c.getNumChunks(); i++) {
                pixelTL = c.chunkIndexToPixelIndex(i);

                //update channels to be difference.
                restoreChunk(state.channels[0], past[0], diff[0], vectors[0][i], pixelTL, state.channelWidth);
            }

            //Do the second two channels
            Size smaller = Subsample.deduceCbCrSize(state);
            c = new Chunker(chunkSize, smaller.Width, smaller.Height, smaller.Width, 1);
            for (int i = 0; i < c.getNumChunks(); i++) {
                pixelTL = c.chunkIndexToPixelIndex(i);
                
                restoreChunk(state.channels[1], past[1], diff[1], vectors[1][i], pixelTL, smaller.Width);
                restoreChunk(state.channels[2], past[2], diff[2], vectors[2][i], pixelTL, smaller.Width);
            }
        }
Beispiel #33
0
        private void ChunksEnumeratedAsFileIsRead(Func <IChunker> chunkerFactory, HashType hashType)
        {
            var chunks = new List <ChunkInfo>();

            byte[] bytes;

            using (var chunker = chunkerFactory())
            {
                bytes = new byte[4 * chunker.Configuration.MinPushBufferSize];

                var r = new Random(Seed: 0);
                r.NextBytes(bytes);

                using (var session = chunker.BeginChunking(chunk =>
                {
                    chunks.Add(chunk);
                }))
                {
                    int pushSize       = 2 * chunker.Configuration.MinPushBufferSize;
                    int lastChunkCount = 0;
                    for (int i = 0; i < bytes.Length; i += pushSize)
                    {
                        session.PushBuffer(bytes, i, Math.Min(pushSize, bytes.Length - i));
                        Assert.True(chunks.Count > lastChunkCount);
                        lastChunkCount = chunks.Count;
                    }
                }
            }

            string[] expectedChunkHashes = chunks.Select(c => c.Hash.ToHex()).ToArray();

            DedupNode rootFromhash;

            string[] actualChunkHashes;

            using (var hasher = new DedupNodeOrChunkHashAlgorithm(Chunker.Create(hashType.GetChunkerConfiguration())))
            {
                hasher.SetInputLength(bytes.Length);
                hasher.ComputeHash(bytes);
                rootFromhash      = hasher.GetNode();
                actualChunkHashes = rootFromhash.EnumerateChunkLeafsInOrder().Select(c => c.Hash.ToHex()).ToArray();
                Assert.Equal(expectedChunkHashes, actualChunkHashes);
            }

            var seenNodes = new HashSet <byte[]>(chunks.Select(c => c.Hash), ByteArrayComparer.Instance);

            DedupNode?root = null;

            foreach (var node in PackedDedupNodeTree.EnumerateTree(chunks)
                     .Where(n => n.Type != DedupNode.NodeType.ChunkLeaf))
            {
                foreach (var child in node.ChildNodes)
                {
                    Assert.True(seenNodes.Contains(child.Hash));
                }

                Assert.True(seenNodes.Add(node.Hash));
                root = node;
            }

            Assert.True(root.HasValue);

            // ReSharper disable once PossibleInvalidOperationException
            Assert.Equal(rootFromhash, root.Value);
            actualChunkHashes = root.Value.EnumerateChunkLeafsInOrder().Select(c => c.Hash.ToHex()).ToArray();
            Assert.Equal(expectedChunkHashes, actualChunkHashes);
        }
Beispiel #34
0
 private Chunker GetChunker()
 {
     return(_chunker ?? (_chunker = ResolveOpenNlpTool <ChunkerModel, ChunkerME>("en-chunker.bin")));
 }
Beispiel #35
0
        private void setupBlobs(DataBlob metadata) {
            C1 = new DataBlob();
            C2 = new DataBlob();
            C3 = new DataBlob();
            V2 = new DataBlob();
            V3 = new DataBlob();
            C1.type = C2.type = C3.type = DataBlob.Type.Channels;
            V2.type = V3.type = DataBlob.Type.Vectors;

            //import metadata onto channels
            C1.imageWidth = C2.imageWidth = C3.imageWidth = metadata.imageWidth;
            C1.imageHeight = C2.imageHeight = C3.imageHeight = metadata.imageHeight;
            C1.channelWidth = C2.channelWidth = C3.channelWidth = metadata.channelWidth;
            C1.channelHeight = C2.channelHeight = C3.channelHeight = metadata.channelHeight;
            C1.quantizeQuality = C2.quantizeQuality = C3.quantizeQuality = metadata.quantizeQuality;
            C1.samplingMode = C2.samplingMode = C3.samplingMode = metadata.samplingMode;

            Chunker c = new Chunker(8, metadata.channelWidth, metadata.channelHeight, metadata.channelWidth, 1);
            V2.imageWidth = V3.imageWidth = metadata.imageWidth;
            V2.imageHeight = V3.imageHeight = metadata.imageHeight;
            V2.channelWidth = V3.channelWidth = c.getChunksWide();
            V2.channelHeight = V3.channelHeight = c.getChunksHigh();
            V2.quantizeQuality = V3.quantizeQuality = metadata.quantizeQuality;
            V2.samplingMode = V3.samplingMode = metadata.samplingMode;

            //Allocate space for incoming data
            C1.channels = new byte[3][];
            C2.channels = new byte[3][];
            C3.channels = new byte[3][];
            V2.channels = new byte[3][];
            V3.channels = new byte[3][];

            int cMajor = C1.channelWidth * C1.channelHeight;
            Size sizeMinor = Subsample.getPaddedCbCrSize(new Size(C1.channelWidth, C1.channelHeight), C1.samplingMode);
            int cMinor = sizeMinor.Width * sizeMinor.Height;
            C1.channels[0] = new byte[cMajor];
            C2.channels[0] = new byte[cMajor];
            C3.channels[0] = new byte[cMajor];
            C1.channels[1] = new byte[cMinor];
            C2.channels[1] = new byte[cMinor];
            C3.channels[1] = new byte[cMinor];
            C1.channels[2] = new byte[cMinor];
            C2.channels[2] = new byte[cMinor];
            C3.channels[2] = new byte[cMinor];
            cMajor = V2.channelWidth * V2.channelHeight;
            sizeMinor = Subsample.getCbCrSize(new Size(V2.channelWidth, V2.channelHeight), V2.samplingMode);
            cMinor = sizeMinor.Width * sizeMinor.Height;
            V2.channels[0] = new byte[cMajor];
            V3.channels[0] = new byte[cMajor];
            V2.channels[1] = new byte[cMinor];
            V3.channels[1] = new byte[cMinor];
            V2.channels[2] = new byte[cMinor];
            V3.channels[2] = new byte[cMinor];
        }
Beispiel #36
0
 private void readChannels(BinaryReader reader, DataBlob ch) {
     Chunker c = new Chunker(8, ch.channelWidth, ch.channelHeight, ch.channelWidth, 1);
     readChannel(reader, ch.channels[0], c);
     Size s = Subsample.deduceCbCrSize(ch);
     c = new Chunker(8, s.Width, s.Height, s.Width, 1);
     readChannel(reader, ch.channels[1], c);
     readChannel(reader, ch.channels[2], c);
 }
Beispiel #37
0
        private void open(object sender, EventArgs e) {
            soil();
            clean();
            state = new DataBlob();
            state.type = DataBlob.Type.Channels;
            state.channels = new byte[3][];
            using (Stream stream = new FileStream(inPath, FileMode.Open, FileAccess.Read, FileShare.Read)) {
                using (BinaryReader reader = new BinaryReader(stream, Encoding.Default)) {
                    state.imageWidth = reader.ReadUInt16();
                    state.imageHeight = reader.ReadUInt16();
                    state.channelWidth = reader.ReadUInt16();
                    state.channelHeight = reader.ReadUInt16();
                    state.quantizeQuality = reader.ReadByte();
                    state.samplingMode = (DataBlob.Samples)reader.ReadByte();
                    
                    state.channels[0] = new byte[state.channelWidth * state.channelHeight];

                    byte[] data = new byte[64];
                    byte count, val;

                    //======================
                    //===== Y Channel ======
                    //======================
                    Chunker c = new Chunker(8, state.channelWidth, state.channelHeight, state.channelWidth, 1);
                    var indexer = Chunker.zigZag8Index();
                    for (int iChunk = 0; iChunk < c.getNumChunks(); iChunk++) {
                        for (int iPixel = 0; iPixel < 64;) {
                            val = reader.ReadByte();
                            if (val != rleToken) {
                                data[iPixel++] = val;
                            } else {
                                count = reader.ReadByte();
                                val = reader.ReadByte();
                                while (count > 0) {
                                    data[iPixel++] = val;
                                    count--;
                                }
                            }
                        }
                        c.setZigZag8Block(state.channels[0], data, iChunk);
                    }

                    //===========================
                    //===== Cr, Cb Channels =====
                    //===========================
                    Size len = Subsample.getPaddedCbCrSize(new Size(state.channelWidth, state.channelHeight), state.samplingMode);
                    state.channels[1] = new byte[len.Width * len.Height];
                    state.channels[2] = new byte[state.channels[1].Length];
                    c = new Chunker(8, len.Width, len.Height, len.Width, 1);
                    
                    indexer = Chunker.zigZag8Index();
                    for (int channel = 1; channel < state.channels.Length; channel++) {
                        for (int iChunk = 0; iChunk < c.getNumChunks(); iChunk++) {
                            for (int iPixel = 0; iPixel < 64;) {
                                val = reader.ReadByte();
                                if (val != rleToken) {
                                    data[iPixel++] = val;
                                } else {
                                    count = reader.ReadByte();
                                    val = reader.ReadByte();
                                    while (count > 0) {
                                        data[iPixel++] = val;
                                        count--;
                                    }
                                }
                            }
                            c.setZigZag8Block(state.channels[channel], data, iChunk);
                        }
                    }
                }
            } //close file
        }
Beispiel #38
0
        //curl -X POST --header 'Content-type: text/csv' --header 'Authorization: Bearer [token]' --data-binary "@/Users/User1/Desktop/HumanActivity/source1.csv" 'http://<serveraddress>:30063/api/1.1/accounts/1549746082718454/datastreams/1554938538981549/ingestdata/1554942688319300/inputs'
        public IList <dynamic> LoadCSVFile <T>(string filepath, Job <T> jobObj, uint chunkMB = 0) where T : StreamFormat
        {
            // Create Job
            //var jobInJson = JsonConvert.SerializeObject(jobObj);
            var jobResponses = new List <dynamic>();

            try
            {
                var jobResponse = _endpointUri
                                  .AppendPathSegments("accounts", _account, "jobs")
                                  .WithOAuthBearerToken(_token)
                                  .PostJsonAsync(jobObj)
                                  .ReceiveJson().Result;
                jobResponses.Add(jobResponse);
                // var jsonResponse = JsonConvert.SerializeObject(jobResponse);

                // Loop sending chunks
                // Send data
                //curl -X POST --header 'Content-type: text/csv' --header 'Authorization: Bearer [token]' --data-binary "@/Users/User1/Desktop/HumanActivity/source1.csv" 'http://<serveraddress>:30063/api/1.1/accounts/1549746082718454/datastreams/1554938538981549/ingestdata/1554942688319300/inputs
                using (var reader = File.OpenText(filepath))
                {
                    // If no chunking send entire content at once
                    if (chunkMB <= 0)
                    {
                        var fileContent  = new StringContent(File.ReadAllText(filepath));
                        var fileResponse = new Url(_baseUri + jobResponse.links[0].url)
                                           .WithHeader("Content-type", "text/csv")
                                           .WithOAuthBearerToken(_token)
                                           .PostAsync(fileContent)
                                           .ReceiveJson().Result;
                        jobResponses.Add(fileResponse);
                    }
                    else
                    {
                        Action <String> sendText = delegate(String text)
                        {
                            var chunkContent  = new StringContent(text);
                            var chunkResponse = new Url(_baseUri + jobResponse.links[0].url)
                                                .WithHeader("Content-type", "text/csv")
                                                .WithOAuthBearerToken(_token)
                                                .PostAsync(chunkContent)
                                                .ReceiveJson().Result;
                            jobResponses.Add(chunkResponse);
                        };
                        Chunker.ChunkTextFromText(Chunker.GetChunkSize(chunkMB), reader, sendText);
                    }
                }

                // Complete file ingest job
                var complJobObj = new Job <T>();

                complJobObj.jobType    = JOBTYPE.INGESTDATA;
                complJobObj.status     = JOBSTATUS.COMPLETED;
                complJobObj.datastream = jobObj.datastream;
                complJobObj.spec       = jobObj.spec;
                jobResponse            = _endpointUri
                                         .AppendPathSegments("accounts", _account, "jobs", (string)jobResponse.id)
                                         .WithOAuthBearerToken(_token)
                                         .PutJsonAsync(complJobObj)
                                         .ReceiveJson().Result;
                jobResponses.Add(jobResponse);
            }
            catch (Exception e)
            {
                // TODO logging
                // Unwrapping
                if (e is System.AggregateException)
                {
                    foreach (var ie in ((System.AggregateException)e).InnerExceptions)
                    {
                        if (ie is FlurlHttpException)
                        {
                            string msg = ((FlurlHttpException)ie).GetResponseStringAsync().Result;
                            ie.HelpLink = msg;
                        }
                    }
                }
                throw (e);
            }
            return(jobResponses);
        }
Beispiel #39
0
        public void Chunk_GivenRuleWithRegexOptions_AppliesRegexOptions()
        {
            string taggedText = "Faster/JJR and/CC faster/JJR he/PRP went/VBD";

            var rules = new List<Rule>
            {
                new Rule { ChunkName = "FS", Pattern = @"{faster/JJR}", RegexOptions = RegexOptions.IgnoreCase }
            };

            string chunkedText = new Chunker().Chunk(taggedText, rules);
            Assert.AreEqual("[FS Faster/JJR] and/CC [FS faster/JJR] he/PRP went/VBD", chunkedText);
        }
Beispiel #40
0
        public override void viewExtra(Graphics g) {
            //base.viewExtra(g);
            if (state == null) {
                return;
            }
            Chunker c = new Chunker(8, state.channelWidth, state.channelHeight, state.channelWidth, 1);
            int offsetX, offsetY;
            int y = state.channelHeight - 4;
            int x = 4;

            for (int i = 0; i < vState.channels[0].Length; i++) {
                offsetX = ((vState.channels[0][i] & 0xF0) >> 4) - 7;
                offsetY = (vState.channels[0][i] & 0x0F) - 7;
                if (offsetX == 0 && offsetY == 0) {
                    g.FillRectangle(Brushes.BlanchedAlmond, x-1, y-1, 2, 2);
                } else {
                    g.DrawLine(Pens.BlanchedAlmond, x, y, x + offsetX, y - offsetY);
                }
                x += 8;
                if (x - 4 >= state.channelWidth) {
                    x = 4;
                    y -= 8;
                }
            }
        }
Beispiel #41
0
 public void Chunk_GivenWhiteSpaceTaggedText_ReturnsTaggedText()
 {
     string taggedText = "   ";
     string chunkedText = new Chunker().Chunk(taggedText, GetChunkRules());
     Assert.AreEqual(taggedText, chunkedText);
 }
Beispiel #42
0
        protected override void clean() {
            base.clean();
            if (state == null || state.channels == null) {
                return;
            }

            if (!isInverse) {
                state.quantizeQuality = properties["quality"].nValue;
            }
            generateQTables(state.quantizeQuality);
            
            padChannels();
            Chunker c = new Chunker(chunkSize, state.channelWidth, state.channelHeight, state.channelWidth, 1);
            
            byte[] data = new byte[chunkSize * chunkSize];
            for (int i = 0; i < c.getNumChunks(); i++) {
                c.getBlock(state.channels[0], data, i);
                data = isInverse ? doIDCT(data, quantizationY) : doDCT(data, quantizationY);
                c.setBlock(state.channels[0], data, i);
            }
            
            Size tempS = Subsample.getPaddedCbCrSize(new Size(state.channelWidth, state.channelHeight), state.samplingMode);
            c = new Chunker(chunkSize, tempS.Width, tempS.Height, tempS.Width, 1);
            for (int i = 0; i < c.getNumChunks(); i++) {
                c.getBlock(state.channels[1], data, i);
                data = isInverse ? doIDCT(data, quantizationC) : doDCT(data, quantizationC);
                c.setBlock(state.channels[1], data, i);
                c.getBlock(state.channels[2], data, i);
                data = isInverse ? doIDCT(data, quantizationC) : doDCT(data, quantizationC);
                c.setBlock(state.channels[2], data, i);
            }
        }
Beispiel #43
0
        public void Chunk_GivenRuleWithConsecutiveTags_CapturesConsecutiveTags()
        {
            string taggedText = "The/DT old/JJ friendly/JJ science/NN professor/NN";

            var rules = new List<Rule>
            {
                new Rule { ChunkName = "NP", Pattern = @"{*/(DT|JJ|NNPS|NNP|NNS|NN|PRP|CD)}+" }
            };

            string chunkedText = new Chunker().Chunk(taggedText, rules);
            Assert.AreEqual("[NP The/DT old/JJ friendly/JJ science/NN professor/NN]", chunkedText);
        }
Beispiel #44
0
 public void Chunk_GivenNestedChunkRules_ReturnsNestedChunks()
 {
     string taggedText = "The/DT professor/NN walked/VBD the/DT dog/NN in/IN the/DT park/NN ./. The/DT dog/NN chased/VBD a/DT big/JJ stick/NN ./.";
     string chunkedText = new Chunker().Chunk(taggedText, GetChunkRules());
     Assert.AreEqual("[DC [NP The/DT professor/NN] [VP walked/VBD [NP the/DT dog/NN] [PP in/IN [NP the/DT park/NN]]]] ./. [DC [NP The/DT dog/NN] [VP chased/VBD [NP a/DT big/JJ stick/NN]]] ./.", chunkedText);
 }
Beispiel #45
0
        public IList <dynamic> LoadCSVFiles <T>(IList <string> filepaths, Job <T> jobObj, int blocksize = 0, int sleepSecs = 0, uint chunkMB = 0) where T : StreamFormat
        {
            if (blocksize == 0)
            {
                blocksize = filepaths.Count;
            }
            else if (blocksize < MINFILEBLOCK)
            {
                blocksize = MINFILEBLOCK;
            }
            if (sleepSecs < MINSLEEPSEC)
            {
                sleepSecs = MINSLEEPSEC;
            }

            List <dynamic> jobResponses = new List <dynamic>();
            dynamic        jobResponse  = null;
            var            jobid        = "";

            for (int i = 0; i < filepaths.Count; i++)
            {
                if (i % blocksize == 0)
                {
                    // Create job
                    try
                    {
                        jobResponse = _endpointUri
                                      .AppendPathSegments("accounts", _account, "jobs")
                                      .WithOAuthBearerToken(_token)
                                      .PostJsonAsync(jobObj)
                                      .ReceiveJson().Result;
                        // var jsonResponse = JsonConvert.SerializeObject(jobResponse);
                        jobid = jobResponse.id;
                        var clone = Clone(jobResponse);
                        // Callback
                        _responseCallback?.Invoke(clone);
                        // Add to responses
                        jobResponses.Add(clone);
                    }
                    catch (Exception e)
                    {
                        // TODO logging
                        // Unwrappingnes
                        if (e is System.AggregateException)
                        {
                            foreach (var ie in ((System.AggregateException)e).InnerExceptions)
                            {
                                if (ie is FlurlHttpException)
                                {
                                    string msg = ((FlurlHttpException)ie).GetResponseStringAsync().Result;
                                    ie.HelpLink = msg;
                                }
                            }
                        }
                        e.HelpLink = $"failure to create job starting with file {filepaths[i]} with blocksize {blocksize}";
                        var excResponse = CreateExceptionResponse(e, "creating job");
                        // Call callback
                        _responseCallback?.Invoke(excResponse);
                        jobResponses.Add(excResponse);;
                        break;
                    }
                }

                // Send file
                var start = DateTime.Now;
                try
                {
                    var info = new FileInfo(filepaths[i]);
                    var size = info.Length;
                    Console.WriteLine($"Sending file {filepaths[i]} of {info.Length} bytes" + (chunkMB > 0?$", in chunks of {chunkMB} MB":""));
                    var fileResponses = new List <dynamic>();
                    // Loop sending chunks
                    // Send data
                    //curl -X POST --header 'Content-type: text/csv' --header 'Authorization: Bearer [token]' --data-binary "@/Users/User1/Desktop/HumanActivity/source1.csv" 'http://<serveraddress>:30063/api/1.1/accounts/1549746082718454/datastreams/1554938538981549/ingestdata/1554942688319300/inputs
                    using (var reader = File.OpenText(filepaths[i]))
                    {
                        // If no chunking send entire content at once
                        if (chunkMB <= 0)
                        {
                            var fileContent  = new StringContent(File.ReadAllText(filepaths[i]));
                            var fileResponse = new Url(_baseUri + jobResponse.links[0].url)
                                               .WithHeader("Content-type", "text/csv")
                                               .WithOAuthBearerToken(_token)
                                               .PostAsync(fileContent)
                                               .ReceiveJson().Result;
                            fileResponses.Add(fileResponse);
                            // Call callback
                            _responseCallback?.Invoke(fileResponse);
                        }
                        else
                        {
                            Action <String> sendText = delegate(String text)
                            {
                                var chunkContent  = new StringContent(text);
                                var chunkResponse = new Url(_baseUri + jobResponse.links[0].url)
                                                    .WithHeader("Content-type", "text/csv")
                                                    .WithOAuthBearerToken(_token)
                                                    .PostAsync(chunkContent)
                                                    .ReceiveJson().Result;
                                fileResponses.Add(chunkResponse);
                                // Call callback
                                _responseCallback?.Invoke(chunkResponse);
                            };
                            Chunker.ChunkTextFromText(Chunker.GetChunkSize(chunkMB), reader, sendText);
                        }
                    }

                    // TODO: Move to callbacks
                    dynamic stats = new ExpandoObject();
                    stats.jobid = jobid;
                    stats.file  = filepaths[i];
                    stats.time  = TimeSpan.FromTicks(DateTime.Now.Ticks - start.Ticks);
                    Console.WriteLine($"It took {stats.time} to send file {stats.file}");

                    // Add to responses
                    jobResponses.Add(stats);
                    jobResponses.AddRange(fileResponses);
                }
                catch (Exception e)
                {
                    // TODO logging
                    // Unwrapping
                    if (e is System.AggregateException)
                    {
                        foreach (var ie in ((System.AggregateException)e).InnerExceptions)
                        {
                            if (ie is FlurlHttpException)
                            {
                                string msg = ((FlurlHttpException)ie).GetResponseStringAsync().Result;
                                ie.HelpLink = msg;
                            }
                        }
                    }
                    e.HelpLink = $"failure to send file {filepaths[i]} after {TimeSpan.FromTicks(DateTime.Now.Ticks-start.Ticks)}";
                    var excResponse = CreateExceptionResponse(e, "sending file");

                    // Call callback for exception thrown
                    _responseCallback?.Invoke(excResponse);

                    jobResponses.Add(excResponse);
                }

                if (i % blocksize == (blocksize - 1) || i == (filepaths.Count - 1))
                {
                    try
                    {
                        // Complete files ingest job
                        var complJobObj = new Job <T>();
                        complJobObj.jobType    = JOBTYPE.INGESTDATA;
                        complJobObj.status     = JOBSTATUS.COMPLETED;
                        complJobObj.datastream = jobObj.datastream;
                        complJobObj.spec       = jobObj.spec;
                        jobResponse            = _endpointUri
                                                 .AppendPathSegments("accounts", _account, "jobs", (string)jobResponse.id)
                                                 .WithOAuthBearerToken(_token)
                                                 .PutJsonAsync(complJobObj)
                                                 .ReceiveJson().Result;

                        // Call callback
                        _responseCallback?.Invoke(jobResponse);

                        // Add to responses
                        jobResponses.Add(Clone(jobResponse));
                    }
                    catch (Exception e)
                    {
                        // TODO logging
                        // Unwrapping
                        if (e is System.AggregateException)
                        {
                            foreach (var ie in ((System.AggregateException)e).InnerExceptions)
                            {
                                if (ie is FlurlHttpException)
                                {
                                    string msg = ((FlurlHttpException)ie).GetResponseStringAsync().Result;
                                    ie.HelpLink = msg;
                                }
                            }
                        }
                        e.HelpLink = $"failure to close the job {(string)jobResponse.id} starting with file {filepaths[i - (blocksize - 1)]} with blocksize {blocksize}";
                        var excResponse = CreateExceptionResponse(e, "closing job");

                        // Call callback
                        _responseCallback?.Invoke(excResponse);

                        // Add to responses
                        jobResponses.Add(excResponse);

                        break;
                    }

                    if (sleepSecs > 0)
                    {
                        Thread.Sleep(sleepSecs * 1000);
                    }
                }
            }
            return(jobResponses);
        }
Beispiel #46
0
 public void Chunk_GivenEmptyStringTaggedText_ReturnsTaggedText()
 {
     string taggedText = string.Empty;
     string chunkedText = new Chunker().Chunk(taggedText, GetChunkRules());
     Assert.AreEqual(taggedText, chunkedText);
 }
Beispiel #47
0
        private void save_old(object sender, EventArgs e)
        {
            clean();

            if (state == null || state.channels == null)
            {
                return;
            }
            //data to save: image width, image height, quality factor, y channel, Cr channel, Cb channel
            //perform zig-zag RLE on channels.
            //
            //16 bits for width
            //16 bits for height
            // 8 bits for quality factor
            // 8 bits for channel sampling mode
            //[width x height] bytes for y channel (uncompressed)
            //[(width + 1) / 2 + (height + 1) / 2] bytes for Cr channel
            //[(width + 1) / 2 + (height + 1) / 2] bytes for Cb channel

            //RLE:
            //for each channel
            //  for each chunk
            //    for each value
            //      value != prev:
            //        prev == token ||
            //        count >= 3: write token, write count, write prev, prev = value, count = 1
            //        count == 2: write prev, write prev, prev = value, count = 1
            //        count == 1: write prev, prev = value
            //      vale == prev:
            //        count++
            //    write prev as above
            //    next chunk...

            byte prev, count, val;

            using (Stream stream = new BufferedStream(new FileStream(outPath, FileMode.Create, FileAccess.Write, FileShare.None))) {
                using (BinaryWriter writer = new BinaryWriter(stream, Encoding.Default)) {
                    writer.Write((short)state.imageWidth);
                    writer.Write((short)state.imageHeight);
                    writer.Write((short)state.channelWidth);
                    writer.Write((short)state.channelHeight);
                    writer.Write((byte)state.quantizeQuality);
                    writer.Write((byte)state.samplingMode);
                    byte[]  data    = new byte[64];
                    Chunker c       = new Chunker(8, state.channelWidth, state.channelHeight, state.channelWidth, 1);
                    var     indexer = Chunker.zigZag8Index();
                    for (int i = 0; i < c.getNumChunks(); i++)
                    {
                        c.getBlock(state.channels[0], data, i);
                        count = 0;
                        prev  = data[0];
                        foreach (int index in indexer)
                        {
                            val = data[index];
                            if (val == prev)
                            {
                                count++;
                            }
                            else
                            {
                                if (prev == rleToken || count >= 3)
                                {
                                    writer.Write(rleToken);
                                    writer.Write(count);
                                    writer.Write(prev);
                                }
                                else if (count == 2)
                                {
                                    writer.Write(prev);
                                    writer.Write(prev);
                                }
                                else
                                {
                                    writer.Write(prev);
                                }
                                prev  = val;
                                count = 1;
                            }
                        }
                        //write out the last prev
                        if (prev == rleToken || count >= 3)
                        {
                            writer.Write(rleToken);
                            writer.Write(count);
                            writer.Write(prev);
                        }
                        else if (count == 2)
                        {
                            writer.Write(prev);
                            writer.Write(prev);
                        }
                        else
                        {
                            writer.Write(prev);
                        } //chunk written out
                    }     //channel written out

                    //

                    switch (state.samplingMode)
                    {
                    case DataBlob.Samples.s444:
                        //just use the existing chunker
                        break;

                    case DataBlob.Samples.s411:
                        c = new Chunker(8, (state.channelWidth + 3) / 4, state.channelHeight, (state.channelWidth + 3) / 4, 1);
                        break;

                    case DataBlob.Samples.s420:
                        c = new Chunker(8, (state.channelWidth + 1) / 2, (state.channelHeight + 1) / 2, (state.channelWidth + 1) / 2, 1);
                        break;

                    case DataBlob.Samples.s422:
                        c = new Chunker(8, (state.channelWidth + 1) / 2, state.channelHeight, (state.channelWidth + 1) / 2, 1);
                        break;
                    }
                    indexer = Chunker.zigZag8Index();
                    for (int channel = 1; channel < state.channels.Length; channel++)
                    {
                        for (int i = 0; i < c.getNumChunks(); i++)
                        {
                            c.getBlock(state.channels[channel], data, i);
                            count = 0;
                            prev  = data[0];
                            foreach (int index in indexer)
                            {
                                val = data[index];
                                if (val == prev)
                                {
                                    count++;
                                }
                                else
                                {
                                    if (prev == rleToken || count >= 3)
                                    {
                                        writer.Write(rleToken);
                                        writer.Write(count);
                                        writer.Write(prev);
                                    }
                                    else if (count == 2)
                                    {
                                        writer.Write(prev);
                                        writer.Write(prev);
                                    }
                                    else
                                    {
                                        writer.Write(prev);
                                    }
                                    prev  = val;
                                    count = 1;
                                }
                            }
                            //write out the last prev
                            if (prev == rleToken || count >= 3)
                            {
                                writer.Write(rleToken);
                                writer.Write(count);
                                writer.Write(prev);
                            }
                            else if (count == 2)
                            {
                                writer.Write(prev);
                                writer.Write(prev);
                            }
                            else
                            {
                                writer.Write(prev);
                            } //chunk written out
                        }     //channel written out
                    }         // all channels written out
                }
            }

            FileInfo fi = new FileInfo(outPath);

            String msgBox = String.Format("Image as bitmap: {0} Bytes\nImage compressed: {1} Bytes\nCompression ratio: {2:0.00} : 1\nor {3:0.00}% smaller.",
                                          (state.imageHeight * state.imageWidth * 3),
                                          fi.Length,
                                          (state.imageHeight * state.imageWidth * 3.0) / fi.Length,
                                          ((state.imageHeight * state.imageWidth * 3.0) - fi.Length) / (state.imageHeight * state.imageWidth * 3.0) * 100);

            System.Windows.Forms.MessageBox.Show(msgBox, "Compression Info");
        }
Beispiel #48
0
        private void open(object sender, EventArgs e)
        {
            soil();
            clean();
            state          = new DataBlob();
            state.type     = DataBlob.Type.Channels;
            state.channels = new byte[3][];
            using (Stream stream = new FileStream(inPath, FileMode.Open, FileAccess.Read, FileShare.Read)) {
                using (BinaryReader reader = new BinaryReader(stream, Encoding.Default)) {
                    state.imageWidth      = reader.ReadUInt16();
                    state.imageHeight     = reader.ReadUInt16();
                    state.channelWidth    = reader.ReadUInt16();
                    state.channelHeight   = reader.ReadUInt16();
                    state.quantizeQuality = reader.ReadByte();
                    state.samplingMode    = (DataBlob.Samples)reader.ReadByte();

                    state.channels[0] = new byte[state.channelWidth * state.channelHeight];

                    byte[] data = new byte[64];
                    byte   count, val;

                    //======================
                    //===== Y Channel ======
                    //======================
                    Chunker c       = new Chunker(8, state.channelWidth, state.channelHeight, state.channelWidth, 1);
                    var     indexer = Chunker.zigZag8Index();
                    for (int iChunk = 0; iChunk < c.getNumChunks(); iChunk++)
                    {
                        for (int iPixel = 0; iPixel < 64;)
                        {
                            val = reader.ReadByte();
                            if (val != rleToken)
                            {
                                data[iPixel++] = val;
                            }
                            else
                            {
                                count = reader.ReadByte();
                                val   = reader.ReadByte();
                                while (count > 0)
                                {
                                    data[iPixel++] = val;
                                    count--;
                                }
                            }
                        }
                        c.setZigZag8Block(state.channels[0], data, iChunk);
                    }

                    //===========================
                    //===== Cr, Cb Channels =====
                    //===========================
                    Size len = Subsample.getPaddedCbCrSize(new Size(state.channelWidth, state.channelHeight), state.samplingMode);
                    state.channels[1] = new byte[len.Width * len.Height];
                    state.channels[2] = new byte[state.channels[1].Length];
                    c = new Chunker(8, len.Width, len.Height, len.Width, 1);

                    indexer = Chunker.zigZag8Index();
                    for (int channel = 1; channel < state.channels.Length; channel++)
                    {
                        for (int iChunk = 0; iChunk < c.getNumChunks(); iChunk++)
                        {
                            for (int iPixel = 0; iPixel < 64;)
                            {
                                val = reader.ReadByte();
                                if (val != rleToken)
                                {
                                    data[iPixel++] = val;
                                }
                                else
                                {
                                    count = reader.ReadByte();
                                    val   = reader.ReadByte();
                                    while (count > 0)
                                    {
                                        data[iPixel++] = val;
                                        count--;
                                    }
                                }
                            }
                            c.setZigZag8Block(state.channels[channel], data, iChunk);
                        }
                    }
                }
            } //close file
        }
Beispiel #49
0
 protected static void writeChannels(BinaryWriter writer, DataBlob ch) {
     Chunker c = new Chunker(8, ch.channelWidth, ch.channelHeight, ch.channelWidth, 1);
     writeChannel(writer, ch.channels[0], c);
     Size s = Subsample.deduceCbCrSize(ch);
     c = new Chunker(8, s.Width, s.Height, s.Width, 1);
     writeChannel(writer, ch.channels[1], c);
     writeChannel(writer, ch.channels[2], c);
 }
Beispiel #50
0
        public void Chunk_GivenRuleWithOptionalChunk_CapturesWithAndWithoutOptionalChunk()
        {
            string taggedText = "[A Arnold] [B barrel] [B Bill] [A apple] [A Adam] [A Africa] [B Bob]";

            var rules = new List<Rule>
            {
                new Rule { ChunkName = "ABB", Pattern = @"{A} {B} {B}*" }
            };

            string chunkedText = new Chunker().Chunk(taggedText, rules);
            Assert.AreEqual("[ABB [A Arnold] [B barrel] [B Bill]] [A apple] [A Adam] [ABB [A Africa] [B Bob]]", chunkedText);
        }
Beispiel #51
0
 protected static void writeChannel(BinaryWriter writer, byte[] channel, Chunker c) {
     byte count, prev, val;
     var indexer = Chunker.zigZag8Index();
     byte[] data = new byte[64];
     for (int i = 0; i < c.getNumChunks(); i++) {
         c.getBlock(channel, data, i);
         count = 0;
         prev = data[0];
         foreach (int index in indexer) {
             val = data[index];
             if (val == prev) {
                 count++;
             } else {
                 if (prev == rleToken || count >= 3) {
                     writer.Write(rleToken);
                     writer.Write(count);
                 } else if (count == 2) {
                     writer.Write((prev));
                 }
                 writer.Write((prev));
                 prev = val;
                 count = 1;
             }
         }
         //write out the last token
         if (prev == rleToken || count >= 3) {
             writer.Write(rleToken);
             writer.Write(count);
         } else if (count == 2) {
             writer.Write(prev);
         }
         writer.Write(prev);
         //final chunk written out
     } //channel written out
 }
Beispiel #52
0
        private void calcMoVec(byte[][] chOld, byte[][] chNew) {
            //for each channel
            //chunk state.channels into 8x8 blocks
            //compare each block with blocks surrounding them in the arg channels 
            //over x = [-7,7] (range 15 values)
            //and  y = [-7,7] (range 15 values)


            //Do the first channel
            Chunker c = new Chunker(chunkSize, state.channelWidth, state.channelHeight, state.channelWidth, 1);
            int pixelTL;
            byte offset;
            //need to set vState.channelWidth and vState.channelHeight correctly, I think....
            vState.channels[0] = new byte[c.getNumChunks()];
            vState.channelWidth = c.getChunksWide();
            vState.channelHeight = c.getChunksHigh();
            
            for (int i = 0; i < c.getNumChunks(); i++) {
                pixelTL = c.chunkIndexToPixelIndex(i);
                //find best match given search area
                offset = findOffsetVector(chNew[0], chOld[0], pixelTL, state.channelWidth);
                //save best match vector
                vState.channels[0][i] = offset;
                //update channels to be difference.
                if ( i == 20 ) {
                    i = 20;
                }
                setDiff(state.channels[0], chNew[0], chOld[0], pixelTL, offset, state.channelWidth);
            }

            //Do the second two channels
            Size smaller = Subsample.deduceCbCrSize(state);
            c = new Chunker(chunkSize, smaller.Width, smaller.Height, smaller.Width, 1);
            vState.channels[1] = new byte[c.getNumChunks()];
            vState.channels[2] = new byte[c.getNumChunks()];
            for (int i = 0; i < c.getNumChunks(); i++) {
                pixelTL = c.chunkIndexToPixelIndex(i);
                offset = findOffsetVector(chNew[1], chOld[1], pixelTL, smaller.Width);
                vState.channels[1][i] = offset;
                setDiff(state.channels[1], chNew[1], chOld[1], pixelTL, offset, smaller.Width);
                //offset = findOffsetVector(state.channels[2], channels[2], pixelTL, state.channelWidth);
                //Just use the same vectors for channel 3 as channel 2. Probably okay.
                vState.channels[2][i] = offset;
                setDiff(state.channels[2], chNew[2], chOld[2], pixelTL, offset, smaller.Width);
            }
        }
Beispiel #53
0
        private void save_old(object sender, EventArgs e) {
            clean();

            if (state == null || state.channels == null) {
                return;
            }
            //data to save: image width, image height, quality factor, y channel, Cr channel, Cb channel
            //perform zig-zag RLE on channels.
            //
            //16 bits for width
            //16 bits for height
            // 8 bits for quality factor
            // 8 bits for channel sampling mode
            //[width x height] bytes for y channel (uncompressed)
            //[(width + 1) / 2 + (height + 1) / 2] bytes for Cr channel
            //[(width + 1) / 2 + (height + 1) / 2] bytes for Cb channel

            //RLE:
            //for each channel
            //  for each chunk
            //    for each value
            //      value != prev: 
            //        prev == token || 
            //        count >= 3: write token, write count, write prev, prev = value, count = 1
            //        count == 2: write prev, write prev, prev = value, count = 1
            //        count == 1: write prev, prev = value
            //      vale == prev:
            //        count++
            //    write prev as above
            //    next chunk...

            byte prev, count, val;
            using (Stream stream = new BufferedStream(new FileStream(outPath, FileMode.Create, FileAccess.Write, FileShare.None))) {
                using (BinaryWriter writer = new BinaryWriter(stream, Encoding.Default)) {
                    writer.Write((short)state.imageWidth);
                    writer.Write((short)state.imageHeight);
                    writer.Write((short)state.channelWidth);
                    writer.Write((short)state.channelHeight);
                    writer.Write((byte)state.quantizeQuality);
                    writer.Write((byte)state.samplingMode);
                    byte[] data = new byte[64];
                    Chunker c = new Chunker(8, state.channelWidth, state.channelHeight, state.channelWidth, 1);
                    var indexer = Chunker.zigZag8Index();
                    for (int i = 0; i < c.getNumChunks(); i++) {
                        c.getBlock(state.channels[0], data, i);
                        count = 0;
                        prev = data[0];
                        foreach (int index in indexer) {
                            val = data[index];
                            if (val == prev) {
                                count++;
                            } else {
                                if (prev == rleToken || count >= 3) {
                                    writer.Write(rleToken);
                                    writer.Write(count);
                                    writer.Write(prev);
                                } else if (count == 2) {
                                    writer.Write(prev);
                                    writer.Write(prev);
                                } else {
                                    writer.Write(prev);
                                }
                                prev = val;
                                count = 1;
                            }
                        }
                        //write out the last prev
                        if (prev == rleToken || count >= 3) {
                            writer.Write(rleToken);
                            writer.Write(count);
                            writer.Write(prev);
                        } else if (count == 2) {
                            writer.Write(prev);
                            writer.Write(prev);
                        } else {
                            writer.Write(prev);
                        } //chunk written out
                    } //channel written out

                    //

                    switch (state.samplingMode) {
                        case DataBlob.Samples.s444:
                            //just use the existing chunker
                            break;
                        case DataBlob.Samples.s411:
                            c = new Chunker(8, (state.channelWidth + 3) / 4, state.channelHeight, (state.channelWidth + 3) / 4, 1);
                            break;
                        case DataBlob.Samples.s420:
                            c = new Chunker(8, (state.channelWidth + 1) / 2, (state.channelHeight + 1) / 2, (state.channelWidth + 1) / 2, 1);
                            break;
                        case DataBlob.Samples.s422:
                            c = new Chunker(8, (state.channelWidth + 1) / 2, state.channelHeight, (state.channelWidth + 1) / 2, 1);
                            break;
                    }
                    indexer = Chunker.zigZag8Index();
                    for (int channel = 1; channel < state.channels.Length; channel++) {
                        for (int i = 0; i < c.getNumChunks(); i++) {
                            c.getBlock(state.channels[channel], data, i);
                            count = 0;
                            prev = data[0];
                            foreach (int index in indexer) {
                                val = data[index];
                                if (val == prev) {
                                    count++;
                                } else {
                                    if (prev == rleToken || count >= 3) {
                                        writer.Write(rleToken);
                                        writer.Write(count);
                                        writer.Write(prev);
                                    } else if (count == 2) {
                                        writer.Write(prev);
                                        writer.Write(prev);
                                    } else {
                                        writer.Write(prev);
                                    }
                                    prev = val;
                                    count = 1;
                                }
                            }
                            //write out the last prev
                            if (prev == rleToken || count >= 3) {
                                writer.Write(rleToken);
                                writer.Write(count);
                                writer.Write(prev);
                            } else if (count == 2) {
                                writer.Write(prev);
                                writer.Write(prev);
                            } else {
                                writer.Write(prev);
                            } //chunk written out
                        } //channel written out
                    } // all channels written out
                }
            }

            FileInfo fi = new FileInfo(outPath);

            String msgBox = String.Format("Image as bitmap: {0} Bytes\nImage compressed: {1} Bytes\nCompression ratio: {2:0.00} : 1\nor {3:0.00}% smaller.",
                (state.imageHeight * state.imageWidth * 3),
                fi.Length,
                (state.imageHeight * state.imageWidth * 3.0) / fi.Length,
                ((state.imageHeight * state.imageWidth * 3.0) - fi.Length) / (state.imageHeight * state.imageWidth * 3.0) * 100);

            System.Windows.Forms.MessageBox.Show(msgBox, "Compression Info");
        }