Пример #1
0
 /// <exception cref="System.IO.IOException"/>
 public virtual void Map(NullWritable nk, NullWritable nv, OutputCollector <Org.Apache.Hadoop.IO.Text
                                                                            , Org.Apache.Hadoop.IO.Text> output, Reporter reporter)
 {
     // Emit 4096 fixed-size records
     val.Set(b, 0, 1000);
     val.GetBytes()[0] = unchecked ((byte)id);
     for (int i = 0; i < 4096; ++i)
     {
         key.Set(fmt.Format(tagfmt, i).ToString());
         output.Collect(key, val);
         ((StringBuilder)fmt.Out()).Length = keylen;
     }
     // Emit two "tagged" records from the map. To validate the merge, segments
     // should have both a small and large record such that reading a large
     // record from an on-disk segment into an in-memory segment will write
     // over the beginning of a record in the in-memory segment, causing the
     // merge and/or validation to fail.
     // Add small, tagged record
     val.Set(b, 0, GetValLen(id, nMaps) - 128);
     val.GetBytes()[0] = unchecked ((byte)id);
     ((StringBuilder)fmt.Out()).Length = keylen;
     key.Set("A" + fmt.Format(tagfmt, id).ToString());
     output.Collect(key, val);
     // Add large, tagged record
     val.Set(b, 0, GetValLen(id, nMaps));
     val.GetBytes()[0] = unchecked ((byte)id);
     ((StringBuilder)fmt.Out()).Length = keylen;
     key.Set("B" + fmt.Format(tagfmt, id).ToString());
     output.Collect(key, val);
 }
Пример #2
0
        /// <summary>Map file name and offset into statistical data.</summary>
        /// <remarks>
        /// Map file name and offset into statistical data.
        /// <p>
        /// The map task is to get the
        /// <tt>key</tt>, which contains the file name, and the
        /// <tt>value</tt>, which is the offset within the file.
        /// The parameters are passed to the abstract method
        /// <see cref="IOMapperBase{T}.DoIO(Org.Apache.Hadoop.Mapred.Reporter, string, long)"
        ///     />
        /// , which performs the io operation,
        /// usually read or write data, and then
        /// <see cref="IOMapperBase{T}.CollectStats(Org.Apache.Hadoop.Mapred.OutputCollector{K, V}, string, long, object)
        ///     "/>
        ///
        /// is called to prepare stat data for a subsequent reducer.
        /// </remarks>
        /// <exception cref="System.IO.IOException"/>
        public virtual void Map(Text key, LongWritable value, OutputCollector <Text, Text>
                                output, Reporter reporter)
        {
            string name      = key.ToString();
            long   longValue = value.Get();

            reporter.SetStatus("starting " + name + " ::host = " + hostName);
            this.stream = GetIOStream(name);
            T    statValue = null;
            long tStart    = Runtime.CurrentTimeMillis();

            try
            {
                statValue = DoIO(reporter, name, longValue);
            }
            finally
            {
                if (stream != null)
                {
                    stream.Close();
                }
            }
            long tEnd     = Runtime.CurrentTimeMillis();
            long execTime = tEnd - tStart;

            CollectStats(output, name, execTime, statValue);
            reporter.SetStatus("finished " + name + " ::host = " + hostName);
        }
Пример #3
0
            /// <exception cref="System.IO.IOException"/>
            public virtual void Map(WritableComparable key, Writable value, OutputCollector <BytesWritable
                                                                                             , BytesWritable> output, Reporter reporter)
            {
                int itemCount = 0;

                while (numBytesToWrite > 0)
                {
                    int keyLength = minKeySize + (keySizeRange != 0 ? random.Next(keySizeRange) : 0);
                    randomKey.SetSize(keyLength);
                    RandomizeBytes(randomKey.GetBytes(), 0, randomKey.GetLength());
                    int valueLength = minValueSize + (valueSizeRange != 0 ? random.Next(valueSizeRange
                                                                                        ) : 0);
                    randomValue.SetSize(valueLength);
                    RandomizeBytes(randomValue.GetBytes(), 0, randomValue.GetLength());
                    output.Collect(randomKey, randomValue);
                    numBytesToWrite -= keyLength + valueLength;
                    reporter.IncrCounter(ThreadedMapBenchmark.Counters.BytesWritten, 1);
                    reporter.IncrCounter(ThreadedMapBenchmark.Counters.RecordsWritten, 1);
                    if (++itemCount % 200 == 0)
                    {
                        reporter.SetStatus("wrote record " + itemCount + ". " + numBytesToWrite + " bytes left."
                                           );
                    }
                }
                reporter.SetStatus("done with " + itemCount + " records.");
            }
Пример #4
0
            /// <exception cref="System.IO.IOException"/>
            internal override void CollectStats(OutputCollector <Text, Text> output, string name
                                                , long execTime, object corruptedBlock)
            {
                output.Collect(new Text(AccumulatingReducer.ValueTypeLong + "blocks"), new Text(1
                                                                                                .ToString()));
                if (corruptedBlock.GetType().FullName.EndsWith("String"))
                {
                    output.Collect(new Text(AccumulatingReducer.ValueTypeString + "badBlocks"), new Text
                                       ((string)corruptedBlock));
                    return;
                }
                long  totalSize   = ((long)corruptedBlock);
                float ioRateMbSec = (float)totalSize * 1000 / (execTime *unchecked ((int)(0x100000
                                                                                          )));

                Log.Info("Number of bytes processed = " + totalSize);
                Log.Info("Exec time = " + execTime);
                Log.Info("IO rate = " + ioRateMbSec);
                output.Collect(new Text(AccumulatingReducer.ValueTypeLong + "size"), new Text(totalSize
                                                                                              .ToString()));
                output.Collect(new Text(AccumulatingReducer.ValueTypeLong + "time"), new Text(execTime
                                                                                              .ToString()));
                output.Collect(new Text(AccumulatingReducer.ValueTypeFloat + "rate"), new Text((ioRateMbSec
                                                                                                * 1000).ToString()));
            }
Пример #5
0
 // Mapper that fails
 /// <exception cref="System.IO.IOException"/>
 public virtual void Map(WritableComparable key, Writable value, OutputCollector <WritableComparable
                                                                                  , Writable> @out, Reporter reporter)
 {
     // NOTE- the next line is required for the TestDebugScript test to succeed
     System.Console.Error.WriteLine("failing map");
     throw new RuntimeException("failing map");
 }
 /// <exception cref="System.IO.IOException"/>
 public override void Map(K key, V value, OutputCollector <K, V> output, Reporter reporter
                          )
 {
     output.Collect(key, value);
     reporter.IncrCounter(TestUserDefinedCounters.EnumCounter.MapRecords, 1);
     reporter.IncrCounter("StringCounter", "MapRecords", 1);
 }
Пример #7
0
 /// <summary>The waiting function.</summary>
 /// <remarks>
 /// The waiting function.  The map exits once it gets a signal. Here the
 /// signal is the file existence.
 /// </remarks>
 /// <exception cref="System.IO.IOException"/>
 public virtual void Map(WritableComparable key, Writable val, OutputCollector <WritableComparable
                                                                                , Writable> output, Reporter reporter)
 {
     if (ShouldWait(id))
     {
         if (fs != null)
         {
             while (!fs.Exists(GetSignalFile(id)))
             {
                 try
                 {
                     reporter.Progress();
                     lock (this)
                     {
                         Sharpen.Runtime.Wait(this, 1000);
                     }
                 }
                 catch (Exception)
                 {
                     // wait for 1 sec
                     System.Console.Out.WriteLine("Interrupted while the map was waiting for " + " the signal."
                                                  );
                     break;
                 }
             }
         }
         else
         {
             throw new IOException("Could not get the DFS!!");
         }
     }
 }
Пример #8
0
            /// <exception cref="System.IO.IOException"/>
            public override void Map(IntWritable key, TupleWritable val, OutputCollector <IntWritable
                                                                                          , IntWritable> @out, Reporter reporter)
            {
                int    k     = key.Get();
                string kvstr = "Unexpected tuple: " + Stringify(key, val);

                if (0 == k % (srcs * srcs))
                {
                    for (int i = 0; i < val.Size(); ++i)
                    {
                        NUnit.Framework.Assert.IsTrue(kvstr, val.Get(i) is IntWritable);
                        int vali = ((IntWritable)val.Get(i)).Get();
                        NUnit.Framework.Assert.IsTrue(kvstr, (vali - i) * srcs == 10 * k);
                    }
                }
                else
                {
                    for (int i = 0; i < val.Size(); ++i)
                    {
                        if (i == k % srcs)
                        {
                            NUnit.Framework.Assert.IsTrue(kvstr, val.Get(i) is IntWritable);
                            int vali = ((IntWritable)val.Get(i)).Get();
                            NUnit.Framework.Assert.IsTrue(kvstr, srcs * (vali - i) == 10 * (k - i));
                        }
                        else
                        {
                            NUnit.Framework.Assert.IsTrue(kvstr, !val.Has(i));
                        }
                    }
                }
                @out.Collect(key, one);
            }
Пример #9
0
 /// <exception cref="System.IO.IOException"/>
 public virtual void Map(Text key, Text val, OutputCollector <IntWritable, IntWritable
                                                              > @out, Reporter reporter)
 {
     // Class for calling collect in separate threads
     // id for the thread
     _T1416289116[] feeders = new _T1416289116[NumFeeders];
     // start the feeders
     for (int i = 0; i < NumFeeders; i++)
     {
         feeders[i] = new _T1416289116(this, i);
         feeders[i].Start();
     }
     // wait for them to finish
     for (int i_1 = 0; i_1 < NumFeeders; i_1++)
     {
         try
         {
             feeders[i_1].Join();
         }
         catch (Exception ie)
         {
             throw new IOException(ie.ToString());
         }
     }
 }
Пример #10
0
        /// <summary>Combines values for a given key.</summary>
        /// <param name="key">
        /// the key is expected to be a Text object, whose prefix indicates
        /// the type of aggregation to aggregate the values.
        /// </param>
        /// <param name="values">the values to combine</param>
        /// <param name="output">to collect combined values</param>
        /// <exception cref="System.IO.IOException"/>
        public override void Reduce(Text key, IEnumerator <Text> values, OutputCollector <Text
                                                                                          , Text> output, Reporter reporter)
        {
            string          keyStr     = key.ToString();
            int             pos        = keyStr.IndexOf(ValueAggregatorDescriptor.TypeSeparator);
            string          type       = Sharpen.Runtime.Substring(keyStr, 0, pos);
            ValueAggregator aggregator = ValueAggregatorBaseDescriptor.GenerateValueAggregator
                                             (type);

            while (values.HasNext())
            {
                aggregator.AddNextValue(values.Next());
            }
            IEnumerator outputs = aggregator.GetCombinerOutput().GetEnumerator();

            while (outputs.HasNext())
            {
                object v = outputs.Next();
                if (v is Text)
                {
                    output.Collect(key, (Text)v);
                }
                else
                {
                    output.Collect(key, new Text(v.ToString()));
                }
            }
        }
Пример #11
0
            /// <exception cref="System.IO.IOException"/>
            public virtual void Map(WritableComparable key, Text value, OutputCollector <Text,
                                                                                         Text> output, Reporter reporter)
            {
                string str = StringUtils.ToLowerCase(value.ToString());

                output.Collect(new Text(str), value);
            }
Пример #12
0
 /// <exception cref="System.IO.IOException"/>
 public virtual void Map(LongWritable key, Text value, OutputCollector <Text, IntWritable
                                                                        > output, Reporter reporter)
 {
     LoadGeneratorMR.MapperThatRunsNNLoadGenerator.ProgressThread progressThread = new
                                                                                   LoadGeneratorMR.MapperThatRunsNNLoadGenerator.ProgressThread(this, reporter);
     progressThread.Start();
     try
     {
         new Org.Apache.Hadoop.FS.LoadGenerator.LoadGenerator(jobConf).GenerateLoadOnNN();
         System.Console.Out.WriteLine("Finished generating load on NN, sending results to the reducer"
                                      );
         PrintResults(System.Console.Out);
         progressThread.keepGoing = false;
         progressThread.Join();
         // Send results to Reducer
         output.Collect(OpenExectime, new IntWritable((int)executionTime[Open]));
         output.Collect(NumopsOpen, new IntWritable((int)numOfOps[Open]));
         output.Collect(ListExectime, new IntWritable((int)executionTime[List]));
         output.Collect(NumopsList, new IntWritable((int)numOfOps[List]));
         output.Collect(DeleteExectime, new IntWritable((int)executionTime[Delete]));
         output.Collect(NumopsDelete, new IntWritable((int)numOfOps[Delete]));
         output.Collect(CreateExectime, new IntWritable((int)executionTime[Create]));
         output.Collect(NumopsCreate, new IntWritable((int)numOfOps[Create]));
         output.Collect(WriteCloseExectime, new IntWritable((int)executionTime[WriteClose]
                                                            ));
         output.Collect(NumopsWriteClose, new IntWritable((int)numOfOps[WriteClose]));
         output.Collect(Totalops, new IntWritable((int)totalOps));
         output.Collect(ElapsedTime, new IntWritable((int)totalTime));
     }
     catch (Exception e)
     {
         Sharpen.Runtime.PrintStackTrace(e);
     }
 }
Пример #13
0
 /// <exception cref="System.IO.IOException"/>
 public virtual void Reduce(LongWritable key, IEnumerator <Text> values, OutputCollector
                            <LongWritable, Text> output, Reporter reporter)
 {
     while (values.HasNext())
     {
         Text value = values.Next();
         Log.Debug("REDUCE key:" + key + "  value:" + value);
         if (ReducerBadRecords[0].Equals(value.ToString()))
         {
             Log.Warn("REDUCE Encountered BAD record");
             System.Environment.Exit(-1);
         }
         else
         {
             if (ReducerBadRecords[1].Equals(value.ToString()))
             {
                 try
                 {
                     Log.Warn("REDUCE Encountered BAD record");
                     Sharpen.Thread.Sleep(15 * 60 * 1000);
                 }
                 catch (Exception e)
                 {
                     Sharpen.Runtime.PrintStackTrace(e);
                 }
             }
         }
         output.Collect(key, value);
     }
 }
Пример #14
0
			/// <exception cref="System.IO.IOException"/>
			public virtual void Reduce(UTF8 key, IEnumerator<UTF8> values, OutputCollector<UTF8
				, UTF8> output, Reporter reporter)
			{
				while (values.HasNext())
				{
					output.Collect(key, new UTF8(values.Next().ToString()));
				}
			}
Пример #15
0
 /// <summary>Writes all keys and values directly to output.</summary>
 /// <exception cref="System.IO.IOException"/>
 public virtual void Reduce(K key, IEnumerator <V> values, OutputCollector <K, V> output
                            , Reporter reporter)
 {
     while (values.HasNext())
     {
         output.Collect(key, values.Next());
     }
 }
Пример #16
0
            /// <exception cref="System.IO.IOException"/>
            public virtual void Map(IntWritable key, IntWritable val, OutputCollector <IntWritable
                                                                                       , IntWritable> @out, Reporter reporter)
            {
                int keyint = key.Get();
                int valint = val.Get();

                @out.Collect(new IntWritable(keyint), new IntWritable(valint));
            }
 /// <exception cref="System.IO.IOException"/>
 public virtual void Reduce(LongWritable key, IEnumerator <Text> values, OutputCollector
                            <LongWritable, Text> output, Reporter reporter)
 {
     while (values.HasNext())
     {
         output.Collect(key, values.Next());
     }
 }
Пример #18
0
 /// <summary>Create a handler that will handle any records output from the application.
 ///     </summary>
 /// <param name="collector">the "real" collector that takes the output</param>
 /// <param name="reporter">the reporter for reporting progress</param>
 public OutputHandler(OutputCollector <K, V> collector, Reporter reporter, RecordReader
                      <FloatWritable, NullWritable> recordReader, string expectedDigest)
 {
     this.reporter       = reporter;
     this.collector      = collector;
     this.recordReader   = recordReader;
     this.expectedDigest = expectedDigest;
 }
Пример #19
0
            /// <summary>Reduce method</summary>
            /// <exception cref="System.IO.IOException"/>
            public virtual void Reduce(Text key, IEnumerator <Text> values, OutputCollector <Text
                                                                                             , Text> output, Reporter reporter)
            {
                string field = key.ToString();

                reporter.SetStatus("starting " + field + " ::host = " + hostName);
                // sum long values
                if (field.StartsWith("l:"))
                {
                    long lSum = 0;
                    while (values.HasNext())
                    {
                        lSum += long.Parse(values.Next().ToString());
                    }
                    output.Collect(key, new Text(lSum.ToString()));
                }
                if (field.StartsWith("min:"))
                {
                    long minVal = -1;
                    while (values.HasNext())
                    {
                        long value = long.Parse(values.Next().ToString());
                        if (minVal == -1)
                        {
                            minVal = value;
                        }
                        else
                        {
                            if (value != 0 && value < minVal)
                            {
                                minVal = value;
                            }
                        }
                    }
                    output.Collect(key, new Text(minVal.ToString()));
                }
                if (field.StartsWith("max:"))
                {
                    long maxVal = -1;
                    while (values.HasNext())
                    {
                        long value = long.Parse(values.Next().ToString());
                        if (maxVal == -1)
                        {
                            maxVal = value;
                        }
                        else
                        {
                            if (value > maxVal)
                            {
                                maxVal = value;
                            }
                        }
                    }
                    output.Collect(key, new Text(maxVal.ToString()));
                }
                reporter.SetStatus("finished " + field + " ::host = " + hostName);
            }
Пример #20
0
                /// <exception cref="System.IO.IOException"/>
                public virtual void Map(BytesWritable key, BytesWritable value, OutputCollector <BytesWritable
                                                                                                 , IntWritable> output, Reporter reporter)
                {
                    // newKey = (key, value)
                    BytesWritable keyValue = new BytesWritable(Pair(key, value));

                    // output (newKey, value)
                    output.Collect(keyValue, this.value);
                }
Пример #21
0
 /// <summary>Collecting all required parameters to execute a Mapper.map call.</summary>
 /// <remarks>
 /// Collecting all required parameters to execute a Mapper.map call.
 /// <p>
 /// </remarks>
 /// <param name="key"/>
 /// <param name="value"/>
 /// <param name="output"/>
 /// <param name="reporter"/>
 public MapperInvokeRunable(MultithreadedMapRunner <K1, V1, K2, V2> _enclosing, K1
                            key, V1 value, OutputCollector <K2, V2> output, Reporter reporter)
 {
     this._enclosing = _enclosing;
     this.key        = key;
     this.value      = value;
     this.output     = output;
     this.reporter   = reporter;
 }
Пример #22
0
            /// <exception cref="System.IO.IOException"/>
            public virtual void Map(LongWritable key, Text value, OutputCollector <Text, LongWritable
                                                                                   > output, Reporter reporter)
            {
                string v = value.ToString();
                string k = Sharpen.Runtime.Substring(v, 0, v.IndexOf(","));

                v = Sharpen.Runtime.Substring(v, v.IndexOf(",") + 1);
                output.Collect(new Text(k), new LongWritable(long.Parse(v)));
            }
Пример #23
0
        /// <summary>Returns the OutputCollector to be used by a Mapper instance in the chain.
        ///     </summary>
        /// <param name="mapperIndex">index of the Mapper instance to get the OutputCollector.
        ///     </param>
        /// <param name="output">the original OutputCollector of the task.</param>
        /// <param name="reporter">the reporter of the task.</param>
        /// <returns>the OutputCollector to be used in the chain.</returns>
        public virtual OutputCollector GetMapperCollector(int mapperIndex, OutputCollector
                                                          output, Reporter reporter)
        {
            Serialization keySerialization   = mappersKeySerialization[mapperIndex];
            Serialization valueSerialization = mappersValueSerialization[mapperIndex];

            return(new Chain.ChainOutputCollector(this, mapperIndex, keySerialization, valueSerialization
                                                  , output, reporter));
        }
Пример #24
0
 /// <exception cref="System.IO.IOException"/>
 protected internal virtual void Emit(K key, V val, OutputCollector <K, V> @out)
 {
     ++total;
     while ((float)kept / total < keep)
     {
         ++kept;
         @out.Collect(key, val);
     }
 }
Пример #25
0
 /// <exception cref="System.IO.IOException"/>
 public override void Reduce(K key, IEnumerator <V> values, OutputCollector <K, V> output
                             , Reporter reporter)
 {
     if (Reporter.Null == reporter)
     {
         NUnit.Framework.Assert.Fail("A valid Reporter should have been used but, Reporter.NULL is used"
                                     );
     }
 }
Пример #26
0
 /// <exception cref="System.IO.IOException"/>
 public virtual void Map(WritableComparable key, Writable val, OutputCollector <WritableComparable
                                                                                , Writable> output, Reporter reporter)
 {
     NUnit.Framework.Assert.IsNotNull("Mapper not configured!", loader);
     // load the memory
     loader.Load();
     // work as identity mapper
     output.Collect(key, val);
 }
Пример #27
0
 /// <exception cref="System.IO.IOException"/>
 public virtual void Map(LongWritable key, Text val, OutputCollector <LongWritable,
                                                                      Text> output, Reporter reporter)
 {
     // Everybody other than id 0 outputs
     if (!id.EndsWith("0_0"))
     {
         output.Collect(key, val);
     }
 }
Пример #28
0
                /// <exception cref="System.IO.IOException"/>
                public virtual void Map(WritableComparable key, Writable value, OutputCollector <IntWritable
                                                                                                 , SortValidator.RecordStatsChecker.RecordStatsWritable> output, Reporter reporter
                                        )
                {
                    // Set up rawKey and rawValue on the first call to 'map'
                    if (recordId == -1)
                    {
                        rawKey   = CreateRaw(key.GetType());
                        rawValue = CreateRaw(value.GetType());
                    }
                    ++recordId;
                    if (this.key == sortOutput)
                    {
                        // Check if keys are 'sorted' if this
                        // record is from sort's output
                        if (prevKey == null)
                        {
                            prevKey  = key;
                            keyClass = prevKey.GetType();
                        }
                        else
                        {
                            // Sanity check
                            if (keyClass != key.GetType())
                            {
                                throw new IOException("Type mismatch in key: expected " + keyClass.FullName + ", received "
                                                      + key.GetType().FullName);
                            }
                            // Check if they were sorted correctly
                            if (prevKey.CompareTo(key) > 0)
                            {
                                throw new IOException("The 'map-reduce' framework wrongly" + " classifed (" + prevKey
                                                      + ") > (" + key + ") " + "for record# " + recordId);
                            }
                            prevKey = key;
                        }
                        // Check if the sorted output is 'partitioned' right
                        int keyPartition = partitioner.GetPartition(key, value, noSortReducers);
                        if (partition != keyPartition)
                        {
                            throw new IOException("Partitions do not match for record# " + recordId + " ! - '"
                                                  + partition + "' v/s '" + keyPartition + "'");
                        }
                    }
                    // Construct the record-stats and output (this.key, record-stats)
                    byte[] keyBytes    = rawKey.GetRawBytes(key);
                    int    keyBytesLen = rawKey.GetRawBytesLength(key);

                    byte[] valueBytes       = rawValue.GetRawBytes(value);
                    int    valueBytesLen    = rawValue.GetRawBytesLength(value);
                    int    keyValueChecksum = (WritableComparator.HashBytes(keyBytes, keyBytesLen) ^ WritableComparator
                                               .HashBytes(valueBytes, valueBytesLen));

                    output.Collect(this.key, new SortValidator.RecordStatsChecker.RecordStatsWritable
                                       ((keyBytesLen + valueBytesLen), 1, keyValueChecksum));
                }
Пример #29
0
        /// <summary>Chains the <code>map(...)</code> methods of the Mappers in the chain.</summary>
        /// <exception cref="System.IO.IOException"/>
        public virtual void Map(object key, object value, OutputCollector output, Reporter
                                reporter)
        {
            Mapper mapper = chain.GetFirstMap();

            if (mapper != null)
            {
                mapper.Map(key, value, chain.GetMapperCollector(0, output, reporter), reporter);
            }
        }
Пример #30
0
            /// <exception cref="System.IO.IOException"/>
            public virtual void Map(LongWritable key, Text value, OutputCollector <Text, Text>
                                    output, Reporter reporter)
            {
                string record   = value.ToString();
                int    blankPos = record.IndexOf(" ");

                keyText.Set(Sharpen.Runtime.Substring(record, 0, blankPos));
                valueText.Set(Sharpen.Runtime.Substring(record, blankPos + 1));
                output.Collect(keyText, valueText);
            }
Пример #31
0
 public void testUncompressed()
 {
     OutputCollector collect = new OutputCollector();
     OutStream @out = new OutStream("test", 100, null, collect);
     PositionCollector[] positions = new PositionCollector[1024];
     for (int i = 0; i < 1024; ++i)
     {
         positions[i] = new PositionCollector();
         @out.getPosition(positions[i]);
         @out.WriteByte((byte)i);
     }
     @out.Flush();
     Assert.Equal(1024, collect.buffer.size());
     for (int i = 0; i < 1024; ++i)
     {
         Assert.Equal((byte)i, collect.buffer.get(i));
     }
     ByteBuffer inBuf = ByteBuffer.allocate(collect.buffer.size());
     collect.buffer.setByteBuffer(inBuf, 0, collect.buffer.size());
     inBuf.flip();
     #pragma warning disable 612
     InStream @in = InStream.create(null, "test", new ByteBuffer[] { inBuf },
         new long[] { 0 }, inBuf.remaining(), null, 100);
     #pragma warning restore 612
     Assert.Equal("uncompressed stream test position: 0 length: 1024" +
                  " range: 0 offset: 0 limit: 0",
                  @in.ToString());
     for (int i = 0; i < 1024; ++i)
     {
         int x = @in.ReadByte();
         Assert.Equal(i & 0xff, x);
     }
     for (int i = 1023; i >= 0; --i)
     {
         @in.seek(positions[i]);
         Assert.Equal(i & 0xff, @in.ReadByte());
     }
 }
Пример #32
0
        /// <summary>
        /// Given a resolved and type checked Boogie program, infers invariants for the program
        /// and then attempts to verify it.  Returns:
        ///  - Done if command line specified no verification
        ///  - FatalError if a fatal error occurred, in which case an error has been printed to console
        ///  - VerificationCompleted if inference and verification completed, in which the out
        ///    parameters contain meaningful values
        /// </summary>
        public static PipelineOutcome InferAndVerify(Program program,
            PipelineStatistics stats, string filename,
            ErrorReporterDelegate er = null, string requestId = "unknown")
        {
            Contract.Requires(program != null);
              Contract.Requires(stats != null);
              Contract.Ensures(0 <= Contract.ValueAtReturn(out stats.InconclusiveCount) && 0 <= Contract.ValueAtReturn(out stats.TimeoutCount));

              if (requestId == null)
              {
            requestId = "unknown";
              }
              RequestIdToCancellationTokenSources[requestId] = new List<CancellationTokenSource>();

              #region Infer invariants using Abstract Interpretation

              // Always use (at least) intervals, if not specified otherwise (e.g. with the "/noinfer" switch)
              if (CommandLineOptions.Clo.UseAbstractInterpretation)
              {
            if (!CommandLineOptions.Clo.Ai.J_Intervals && !CommandLineOptions.Clo.Ai.J_Trivial)
            {
              // use /infer:j as the default
              CommandLineOptions.Clo.Ai.J_Intervals = true;
            }
              }
              Microsoft.Boogie.AbstractInterpretation.NativeAbstractInterpretation.RunAbstractInterpretation(program);

              #endregion

              #region Do some preprocessing on the program (e.g., loop unrolling, lambda expansion)

              if (CommandLineOptions.Clo.LoopUnrollCount != -1)
              {
            program.UnrollLoops(CommandLineOptions.Clo.LoopUnrollCount, CommandLineOptions.Clo.SoundLoopUnrolling);
              }

              Dictionary<string, Dictionary<string, Block>> extractLoopMappingInfo = null;
              if (CommandLineOptions.Clo.ExtractLoops)
              {
            extractLoopMappingInfo = program.ExtractLoops();
              }

              if (CommandLineOptions.Clo.PrintInstrumented)
              {
            program.Emit(new TokenTextWriter(Console.Out));
              }

              if (CommandLineOptions.Clo.ExpandLambdas)
              {
            LambdaHelper.ExpandLambdas(program);
            //PrintBplFile ("-", program, true);
              }

              #endregion

              if (!CommandLineOptions.Clo.Verify)
              {
            return PipelineOutcome.Done;
              }

              #region Run Houdini and verify
              if (CommandLineOptions.Clo.ContractInfer)
              {
            return RunHoudini(program, stats, er, filename);
              }
              #endregion

              #region Select and prioritize implementations that should be verified

              var impls = program.TopLevelDeclarations.OfType<Implementation>().Where(
            impl => impl != null && CommandLineOptions.Clo.UserWantsToCheckRoutine(cce.NonNull(impl.Name)) && !impl.SkipVerification);

              // operate on a stable copy, in case it gets updated while we're running
              Implementation[] stablePrioritizedImpls = null;
              if (CommandLineOptions.Clo.VerifySnapshots)
              {
            impls.Iter(impl => { impl.DependenciesChecksum = DependencyCollector.DependenciesChecksum(impl); });
            stablePrioritizedImpls = impls.OrderByDescending(
              impl => impl.Priority != 1 ? impl.Priority : Cache.VerificationPriority(impl)).ToArray();
              }
              else
              {
            stablePrioritizedImpls = impls.OrderByDescending(impl => impl.Priority).ToArray();
              }

              #endregion

              #region Verify each implementation

              var outputCollector = new OutputCollector(stablePrioritizedImpls);
              var outcome = PipelineOutcome.VerificationCompleted;
              var tasks = new Task[stablePrioritizedImpls.Length];
              for (int i = 0; i < stablePrioritizedImpls.Length && outcome != PipelineOutcome.FatalError; i++)
              {
            var taskIndex = i;
            var id = stablePrioritizedImpls[i].Id;
            CancellationTokenSource src;
            if (ImplIdToCancellationTokenSource.TryGetValue(id, out src))
            {
              src.Cancel();
            }
            src = new CancellationTokenSource();
            RequestIdToCancellationTokenSources[requestId].Add(src);
            ImplIdToCancellationTokenSource[id] = src;
            var t = Task.Factory.StartNew((dummy) =>
            {
              VerifyImplementation(program, stats, er, requestId, extractLoopMappingInfo, stablePrioritizedImpls, taskIndex, outputCollector, Checkers, src.Token);
              ImplIdToCancellationTokenSource.Remove(id);
            }, src.Token, TaskCreationOptions.LongRunning);
            tasks[taskIndex] = t;
              }
              try
              {
            Task.WaitAll(tasks);
              }
              catch (AggregateException ae)
              {
            ae.Handle(e =>
            {
              var pe = e as ProverException;
              if (pe != null)
              {
            printer.ErrorWriteLine(Console.Out, "Fatal Error: ProverException: {0}", e);
            outcome = PipelineOutcome.FatalError;
            return true;
              }
              var oce = e as OperationCanceledException;
              if (oce != null)
              {
            return true;
              }
              return false;
            });
              }
              finally
              {
            CleanupCheckers(requestId);
              }

              cce.NonNull(CommandLineOptions.Clo.TheProverFactory).Close();

              outputCollector.WriteMoreOutput();

              #endregion

              return outcome;
        }
Пример #33
0
        /// <summary>
        /// Given a resolved and type checked Boogie program, infers invariants for the program
        /// and then attempts to verify it.  Returns:
        ///  - Done if command line specified no verification
        ///  - FatalError if a fatal error occurred, in which case an error has been printed to console
        ///  - VerificationCompleted if inference and verification completed, in which the out
        ///    parameters contain meaningful values
        /// </summary>
        public static PipelineOutcome InferAndVerify(Program program,
            PipelineStatistics stats,
            string programId = null,
            ErrorReporterDelegate er = null, string requestId = null)
        {
            Contract.Requires(program != null);
              Contract.Requires(stats != null);
              Contract.Ensures(0 <= Contract.ValueAtReturn(out stats.InconclusiveCount) && 0 <= Contract.ValueAtReturn(out stats.TimeoutCount));

              if (requestId == null)
              {
            requestId = FreshRequestId();
              }

              var start = DateTime.UtcNow;

              #region Do some pre-abstract-interpretation preprocessing on the program
              // Doing lambda expansion before abstract interpretation means that the abstract interpreter
              // never needs to see any lambda expressions.  (On the other hand, if it were useful for it
              // to see lambdas, then it would be better to more lambda expansion until after infererence.)
              if (CommandLineOptions.Clo.ExpandLambdas) {
            LambdaHelper.ExpandLambdas(program);
            //PrintBplFile ("-", program, true);
              }
              #endregion

              #region Infer invariants using Abstract Interpretation

              // Always use (at least) intervals, if not specified otherwise (e.g. with the "/noinfer" switch)
              if (CommandLineOptions.Clo.UseAbstractInterpretation)
              {
            if (!CommandLineOptions.Clo.Ai.J_Intervals && !CommandLineOptions.Clo.Ai.J_Trivial)
            {
              // use /infer:j as the default
              CommandLineOptions.Clo.Ai.J_Intervals = true;
            }
              }
              Microsoft.Boogie.AbstractInterpretation.NativeAbstractInterpretation.RunAbstractInterpretation(program);

              #endregion

              #region Do some post-abstract-interpretation preprocessing on the program (e.g., loop unrolling)

              if (CommandLineOptions.Clo.LoopUnrollCount != -1)
              {
            program.UnrollLoops(CommandLineOptions.Clo.LoopUnrollCount, CommandLineOptions.Clo.SoundLoopUnrolling);
              }

              Dictionary<string, Dictionary<string, Block>> extractLoopMappingInfo = null;
              if (CommandLineOptions.Clo.ExtractLoops)
              {
            extractLoopMappingInfo = program.ExtractLoops();
              }

              if (CommandLineOptions.Clo.PrintInstrumented)
              {
            program.Emit(new TokenTextWriter(Console.Out, CommandLineOptions.Clo.PrettyPrint));
              }
              #endregion

              if (!CommandLineOptions.Clo.Verify)
              {
            return PipelineOutcome.Done;
              }

              #region Run Houdini and verify
              if (CommandLineOptions.Clo.ContractInfer)
              {
            return RunHoudini(program, stats, er);
              }
              #endregion

              #region Select and prioritize implementations that should be verified

              var impls = program.Implementations.Where(
            impl => impl != null && CommandLineOptions.Clo.UserWantsToCheckRoutine(cce.NonNull(impl.Name)) && !impl.SkipVerification);

              // operate on a stable copy, in case it gets updated while we're running
              Implementation[] stablePrioritizedImpls = null;
              if (0 < CommandLineOptions.Clo.VerifySnapshots)
              {
            OtherDefinitionAxiomsCollector.Collect(program.Axioms);
            DependencyCollector.Collect(program);
            stablePrioritizedImpls = impls.OrderByDescending(
              impl => impl.Priority != 1 ? impl.Priority : Cache.VerificationPriority(impl)).ToArray();
              }
              else
              {
            stablePrioritizedImpls = impls.OrderByDescending(impl => impl.Priority).ToArray();
              }

              #endregion

              if (1 < CommandLineOptions.Clo.VerifySnapshots)
              {
            CachedVerificationResultInjector.Inject(program, stablePrioritizedImpls, requestId, programId, out stats.CachingActionCounts);
              }

              #region Verify each implementation

              var outputCollector = new OutputCollector(stablePrioritizedImpls);
              var outcome = PipelineOutcome.VerificationCompleted;

              try
              {
              var cts = new CancellationTokenSource();
              RequestIdToCancellationTokenSource.AddOrUpdate(requestId, cts, (k, ov) => cts);

              var tasks = new Task[stablePrioritizedImpls.Length];
              // We use this semaphore to limit the number of tasks that are currently executing.
              var semaphore = new SemaphoreSlim(CommandLineOptions.Clo.VcsCores);

              // Create a task per implementation.
              for (int i = 0; i < stablePrioritizedImpls.Length; i++)
              {
              var taskIndex = i;
              var id = stablePrioritizedImpls[taskIndex].Id;

              CancellationTokenSource old;
              if (ImplIdToCancellationTokenSource.TryGetValue(id, out old))
              {
                  old.Cancel();
              }
              ImplIdToCancellationTokenSource.AddOrUpdate(id, cts, (k, ov) => cts);

              var t = new Task((dummy) =>
                  {
                      try
                      {
                          if (outcome == PipelineOutcome.FatalError)
                          {
                              return;
                          }
                          if (cts.Token.IsCancellationRequested)
                          {
                              cts.Token.ThrowIfCancellationRequested();
                          }
                          VerifyImplementation(program, stats, er, requestId, extractLoopMappingInfo, stablePrioritizedImpls, taskIndex, outputCollector, Checkers, programId);
                          ImplIdToCancellationTokenSource.TryRemove(id, out old);
                      }
                      finally
                      {
                          semaphore.Release();
                      }
                  }, cts.Token, TaskCreationOptions.None);
              tasks[taskIndex] = t;
              }

              // Execute the tasks.
              int j = 0;
              for (; j < stablePrioritizedImpls.Length && outcome != PipelineOutcome.FatalError; j++)
              {
              try
              {
                  semaphore.Wait(cts.Token);
              }
              catch (OperationCanceledException)
              {
                  break;
              }
              tasks[j].Start(TaskScheduler.Default);
              }

              // Don't wait for tasks that haven't been started yet.
              tasks = tasks.Take(j).ToArray();
              Task.WaitAll(tasks);
              }
              catch (AggregateException ae)
              {
              ae.Handle(e =>
              {
              var pe = e as ProverException;
              if (pe != null)
              {
                  printer.ErrorWriteLine(Console.Out, "Fatal Error: ProverException: {0}", e);
                  outcome = PipelineOutcome.FatalError;
                  return true;
              }
              var oce = e as OperationCanceledException;
              if (oce != null)
              {
                  return true;
              }
              return false;
              });
              }
              finally
              {
            CleanupCheckers(requestId);
              }

              cce.NonNull(CommandLineOptions.Clo.TheProverFactory).Close();

              outputCollector.WriteMoreOutput();

              if (1 < CommandLineOptions.Clo.VerifySnapshots && programId != null)
              {
            program.FreezeTopLevelDeclarations();
            programCache.Set(programId, program, policy);
              }

              if (0 <= CommandLineOptions.Clo.VerifySnapshots && CommandLineOptions.Clo.TraceCachingForBenchmarking)
              {
            var end = DateTime.UtcNow;
            if (TimePerRequest.Count == 0)
            {
              FirstRequestStart = start;
            }
            TimePerRequest[requestId] = end.Subtract(start);
            StatisticsPerRequest[requestId] = stats;

            var printTimes = true;

            Console.Out.WriteLine(CachedVerificationResultInjector.Statistics.Output(printTimes));

            Console.Out.WriteLine("Statistics per request as CSV:");
            var actions = string.Join(", ", Enum.GetNames(typeof(VC.ConditionGeneration.CachingAction)));
            Console.Out.WriteLine("Request ID{0}, Error, E (C), Inconclusive, I (C), Out of Memory, OoM (C), Timeout, T (C), Verified, V (C), {1}", printTimes ? ", Time (ms)" : "", actions);
            foreach (var kv in TimePerRequest.OrderBy(kv => ExecutionEngine.AutoRequestId(kv.Key)))
            {
              var s = StatisticsPerRequest[kv.Key];
              var cacs = s.CachingActionCounts;
              var c = cacs != null ? ", " + cacs.Select(ac => string.Format("{0,3}", ac)).Concat(", ") : "";
              var t = printTimes ? string.Format(", {0,8:F0}", kv.Value.TotalMilliseconds) : "";
              Console.Out.WriteLine("{0,-19}{1}, {2,2}, {3,2}, {4,2}, {5,2}, {6,2}, {7,2}, {8,2}, {9,2}, {10,2}, {11,2}{12}", kv.Key, t, s.ErrorCount, s.CachedErrorCount, s.InconclusiveCount, s.CachedInconclusiveCount, s.OutOfMemoryCount, s.CachedOutOfMemoryCount, s.TimeoutCount, s.CachedTimeoutCount, s.VerifiedCount, s.CachedVerifiedCount, c);
            }

            if (printTimes)
            {
              Console.Out.WriteLine();
              Console.Out.WriteLine("Total time (ms) since first request: {0:F0}", end.Subtract(FirstRequestStart).TotalMilliseconds);
            }
              }

              #endregion

              if (SecureVCGen.outfile != null)
              SecureVCGen.outfile.Close();

              return outcome;
        }
Пример #34
0
        private static void VerifyImplementation(Program program, PipelineStatistics stats, ErrorReporterDelegate er, string requestId, Dictionary<string, Dictionary<string, Block>> extractLoopMappingInfo, Implementation[] stablePrioritizedImpls, int index, OutputCollector outputCollector, List<Checker> checkers, string programId)
        {
            Implementation impl = stablePrioritizedImpls[index];
              VerificationResult verificationResult = null;
              var output = new StringWriter();

              printer.Inform("", output);  // newline
              printer.Inform(string.Format("Verifying {0} ...", impl.Name), output);

              int priority = 0;
              var wasCached = false;
              if (0 < CommandLineOptions.Clo.VerifySnapshots) {
            var cachedResults = Cache.Lookup(impl, out priority);
            if (cachedResults != null && priority == Priority.SKIP) {
              if (CommandLineOptions.Clo.XmlSink != null) {
            CommandLineOptions.Clo.XmlSink.WriteStartMethod(impl.Name, cachedResults.Start);
              }

              printer.Inform(string.Format("Retrieving cached verification result for implementation {0}...", impl.Name), output);
              if (CommandLineOptions.Clo.VerifySnapshots < 3 || cachedResults.Outcome == ConditionGeneration.Outcome.Correct) {
            verificationResult = cachedResults;
            wasCached = true;
              }
            }
              }

              if (!wasCached)
              {
            #region Verify the implementation

            verificationResult = new VerificationResult(requestId, impl, programId);

            using (var vcgen = CreateVCGen(program, checkers))
            {
              vcgen.CachingActionCounts = stats.CachingActionCounts;
              verificationResult.ProofObligationCountBefore = vcgen.CumulativeAssertionCount;
              verificationResult.Start = DateTime.UtcNow;

              if (CommandLineOptions.Clo.XmlSink != null)
              {
            CommandLineOptions.Clo.XmlSink.WriteStartMethod(impl.Name, verificationResult.Start);
              }
              try
              {
            if (CommandLineOptions.Clo.inferLeastForUnsat != null)
            {
              var svcgen = vcgen as VC.StratifiedVCGen;
              Contract.Assert(svcgen != null);
              var ss = new HashSet<string>();
              foreach (var c in program.Constants)
              {
                if (!c.Name.StartsWith(CommandLineOptions.Clo.inferLeastForUnsat)) continue;
                ss.Add(c.Name);
              }
              verificationResult.Outcome = svcgen.FindLeastToVerify(impl, ref ss);
              verificationResult.Errors = new List<Counterexample>();
              output.WriteLine("Result: {0}", string.Join(" ", ss));
            }
            else
            {
              verificationResult.Outcome = vcgen.VerifyImplementation(impl, out verificationResult.Errors, requestId);
              if (CommandLineOptions.Clo.ExtractLoops && verificationResult.Errors != null)
              {
                var vcg = vcgen as VCGen;
                if (vcg != null)
                {
                  for (int i = 0; i < verificationResult.Errors.Count; i++)
                  {
                    verificationResult.Errors[i] = vcg.extractLoopTrace(verificationResult.Errors[i], impl.Name, program, extractLoopMappingInfo);
                  }
                }
              }
            }
              }
              catch (VCGenException e)
              {
            var errorInfo = errorInformationFactory.CreateErrorInformation(impl.tok, String.Format("{0} (encountered in implementation {1}).", e.Message, impl.Name), requestId, "Error");
            errorInfo.BoogieErrorCode = "BP5010";
            errorInfo.ImplementationName = impl.Name;
            printer.WriteErrorInformation(errorInfo, output);
            if (er != null)
            {
              lock (er)
              {
                er(errorInfo);
              }
            }
            verificationResult.Errors = null;
            verificationResult.Outcome = VCGen.Outcome.Inconclusive;
              }
              catch (UnexpectedProverOutputException upo)
              {
            printer.AdvisoryWriteLine("Advisory: {0} SKIPPED because of internal error: unexpected prover output: {1}", impl.Name, upo.Message);
            verificationResult.Errors = null;
            verificationResult.Outcome = VCGen.Outcome.Inconclusive;
              }

              verificationResult.ProofObligationCountAfter = vcgen.CumulativeAssertionCount;
              verificationResult.End = DateTime.UtcNow;
            }

            #endregion

            #region Cache the verification result

            if (0 < CommandLineOptions.Clo.VerifySnapshots && !string.IsNullOrEmpty(impl.Checksum))
            {
              Cache.Insert(impl, verificationResult);
            }

            #endregion
              }

              #region Process the verification results and statistics

              ProcessOutcome(verificationResult.Outcome, verificationResult.Errors, TimeIndication(verificationResult), stats, output, impl.TimeLimit, er, verificationResult.ImplementationName, verificationResult.ImplementationToken, verificationResult.RequestId, wasCached);

              ProcessErrors(verificationResult.Errors, verificationResult.Outcome, output, er, impl);

              if (CommandLineOptions.Clo.XmlSink != null)
              {
            CommandLineOptions.Clo.XmlSink.WriteEndMethod(verificationResult.Outcome.ToString().ToLowerInvariant(), verificationResult.End, verificationResult.End - verificationResult.Start);
              }

              outputCollector.Add(index, output);

              outputCollector.WriteMoreOutput();

              if (verificationResult.Outcome == VCGen.Outcome.Errors || CommandLineOptions.Clo.Trace)
              {
            Console.Out.Flush();
              }

              #endregion
        }
Пример #35
0
 public void testCompressed()
 {
     OutputCollector collect = new OutputCollector();
     CompressionCodec codec = new ZlibCodec();
     OutStream @out = new OutStream("test", 300, codec, collect);
     PositionCollector[] positions = new PositionCollector[1024];
     for (int i = 0; i < 1024; ++i)
     {
         positions[i] = new PositionCollector();
         @out.getPosition(positions[i]);
         @out.WriteByte((byte)i);
     }
     @out.Flush();
     Assert.Equal("test", @out.ToString());
     Assert.Equal(961, collect.buffer.size());
     ByteBuffer inBuf = ByteBuffer.allocate(collect.buffer.size());
     collect.buffer.setByteBuffer(inBuf, 0, collect.buffer.size());
     inBuf.flip();
     InStream @in = InStream.create(null, "test", new ByteBuffer[] { inBuf },
         new long[] { 0 }, inBuf.remaining(), codec, 300);
     Assert.Equal("compressed stream test position: 0 length: 961 range: 0" +
                  " offset: 0 limit: 0 range 0 = 0 to 961",
                  @in.ToString());
     for (int i = 0; i < 1024; ++i)
     {
         int x = @in.ReadByte();
         Assert.Equal(i & 0xff, x);
     }
     Assert.Equal(0, @in.available());
     for (int i = 1023; i >= 0; --i)
     {
         @in.seek(positions[i]);
         Assert.Equal(i & 0xff, @in.ReadByte());
     }
 }
Пример #36
0
        public void testCorruptStream()
        {
            OutputCollector collect = new OutputCollector();
            CompressionCodec codec = new ZlibCodec();
            OutStream @out = new OutStream("test", 500, codec, collect);
            PositionCollector[] positions = new PositionCollector[1024];
            for (int i = 0; i < 1024; ++i)
            {
                positions[i] = new PositionCollector();
                @out.getPosition(positions[i]);
                @out.WriteByte((byte)i);
            }
            @out.Flush();

            // now try to read the stream with a buffer that is too small
            ByteBuffer inBuf = ByteBuffer.allocate(collect.buffer.size());
            collect.buffer.setByteBuffer(inBuf, 0, collect.buffer.size());
            inBuf.flip();
            InStream @in = InStream.create(null, "test", new ByteBuffer[] { inBuf },
                new long[] { 0 }, inBuf.remaining(), codec, 100);
            byte[] contents = new byte[1024];
            try
            {
                @in.Read(contents, 0, contents.Length);
                fail();
            }
            catch (IllegalArgumentException iae)
            {
                // EXPECTED
            }

            // make a corrupted header
            inBuf.clear();
            inBuf.put((byte)32);
            inBuf.put((byte)0);
            inBuf.flip();
            @in = InStream.create(null, "test2", new ByteBuffer[] { inBuf }, new long[] { 0 },
                inBuf.remaining(), codec, 300);
            try
            {
                @in.ReadByte();
                fail();
            }
            catch (InvalidOperationException ise)
            {
                // EXPECTED
            }
        }
Пример #37
0
        public void testDisjointBuffers()
        {
            OutputCollector collect = new OutputCollector();
            CompressionCodec codec = new ZlibCodec();
            OutStream @out = new OutStream("test", 400, codec, collect);
            PositionCollector[] positions = new PositionCollector[1024];
            DataOutput stream = new DataOutputStream(@out);
            for (int i = 0; i < 1024; ++i)
            {
                positions[i] = new PositionCollector();
                @out.getPosition(positions[i]);
                stream.writeInt(i);
            }
            @out.Flush();
            Assert.Equal("test", @out.ToString());
            Assert.Equal(1674, collect.buffer.size());
            ByteBuffer[] inBuf = new ByteBuffer[3];
            inBuf[0] = ByteBuffer.allocate(500);
            inBuf[1] = ByteBuffer.allocate(1200);
            inBuf[2] = ByteBuffer.allocate(500);
            collect.buffer.setByteBuffer(inBuf[0], 0, 483);
            collect.buffer.setByteBuffer(inBuf[1], 483, 1625 - 483);
            collect.buffer.setByteBuffer(inBuf[2], 1625, 1674 - 1625);

            for (int i = 0; i < inBuf.Length; ++i)
            {
                inBuf[i].flip();
            }
            InStream @in = InStream.create(null, "test", inBuf,
                new long[] { 0, 483, 1625 }, 1674, codec, 400);
            Assert.Equal("compressed stream test position: 0 length: 1674 range: 0" +
                         " offset: 0 limit: 0 range 0 = 0 to 483;" +
                         "  range 1 = 483 to 1142;  range 2 = 1625 to 49",
                         @in.ToString());
            DataInputStream inStream = new DataInputStream(@in);
            for (int i = 0; i < 1024; ++i)
            {
                int x = inStream.readInt();
                Assert.Equal(i, x);
            }
            Assert.Equal(0, @in.available());
            for (int i = 1023; i >= 0; --i)
            {
                @in.seek(positions[i]);
                Assert.Equal(i, inStream.readInt());
            }

            @in = InStream.create(null, "test", new ByteBuffer[] { inBuf[1], inBuf[2] },
                new long[] { 483, 1625 }, 1674, codec, 400);
            inStream = new DataInputStream(@in);
            positions[303].reset();
            @in.seek(positions[303]);
            for (int i = 303; i < 1024; ++i)
            {
                Assert.Equal(i, inStream.readInt());
            }

            @in = InStream.create(null, "test", new ByteBuffer[] { inBuf[0], inBuf[2] },
                new long[] { 0, 1625 }, 1674, codec, 400);
            inStream = new DataInputStream(@in);
            positions[1001].reset();
            for (int i = 0; i < 300; ++i)
            {
                Assert.Equal(i, inStream.readInt());
            }
            @in.seek(positions[1001]);
            for (int i = 1001; i < 1024; ++i)
            {
                Assert.Equal(i, inStream.readInt());
            }
        }
Пример #38
0
        public void testUncompressedDisjointBuffers()
        {
            OutputCollector collect = new OutputCollector();
            OutStream @out = new OutStream("test", 400, null, collect);
            PositionCollector[] positions = new PositionCollector[1024];
            DataOutput stream = new DataOutputStream(@out);
            for (int i = 0; i < 1024; ++i)
            {
                positions[i] = new PositionCollector();
                @out.getPosition(positions[i]);
                stream.writeInt(i);
            }
            @out.Flush();
            Assert.Equal("test", @out.ToString());
            Assert.Equal(4096, collect.buffer.size());
            ByteBuffer[] inBuf = new ByteBuffer[3];
            inBuf[0] = ByteBuffer.allocate(1100);
            inBuf[1] = ByteBuffer.allocate(2200);
            inBuf[2] = ByteBuffer.allocate(1100);
            collect.buffer.setByteBuffer(inBuf[0], 0, 1024);
            collect.buffer.setByteBuffer(inBuf[1], 1024, 2048);
            collect.buffer.setByteBuffer(inBuf[2], 3072, 1024);

            for (int i = 0; i < inBuf.Length; ++i)
            {
                inBuf[i].flip();
            }
            InStream @in = InStream.create(null, "test", inBuf,
                new long[] { 0, 1024, 3072 }, 4096, null, 400);
            Assert.Equal("uncompressed stream test position: 0 length: 4096" +
                         " range: 0 offset: 0 limit: 0",
                         @in.ToString());
            DataInputStream inStream = new DataInputStream(@in);
            for (int i = 0; i < 1024; ++i)
            {
                int x = inStream.readInt();
                Assert.Equal(i, x);
            }
            Assert.Equal(0, @in.available());
            for (int i = 1023; i >= 0; --i)
            {
                @in.seek(positions[i]);
                Assert.Equal(i, inStream.readInt());
            }

            @in = InStream.create(null, "test", new ByteBuffer[] { inBuf[1], inBuf[2] },
                new long[] { 1024, 3072 }, 4096, null, 400);
            inStream = new DataInputStream(@in);
            positions[256].reset();
            @in.seek(positions[256]);
            for (int i = 256; i < 1024; ++i)
            {
                Assert.Equal(i, inStream.readInt());
            }

            @in = InStream.create(null, "test", new ByteBuffer[] { inBuf[0], inBuf[2] },
                new long[] { 0, 3072 }, 4096, null, 400);
            inStream = new DataInputStream(@in);
            positions[768].reset();
            for (int i = 0; i < 256; ++i)
            {
                Assert.Equal(i, inStream.readInt());
            }
            @in.seek(positions[768]);
            for (int i = 768; i < 1024; ++i)
            {
                Assert.Equal(i, inStream.readInt());
            }
        }