/// <summary>Function to load the image file into memory.</summary>
        /// <param name="file">The stream for the file to load.</param>
        /// <param name="name">The name of the file.</param>
        /// <returns>The image data, the virtual file entry for the working file and the original pixel format of the file.</returns>
        public (IGorgonImage image, IGorgonVirtualFile workingFile, BufferFormat originalFormat) LoadImageFile(Stream file, string name)
        {
            IGorgonImage       result = null;
            IGorgonVirtualFile workFile;
            BufferFormat       originalFormat;

            IGorgonImageInfo imageInfo = DefaultCodec.GetMetaData(file);

            originalFormat = imageInfo.Format;
            var formatInfo = new GorgonFormatInfo(imageInfo.Format);

            // We absolutely need to have an extension, or else the texconv tool will not work.
            if ((DefaultCodec.CodecCommonExtensions.Count > 0) &&
                (!string.Equals(Path.GetExtension(name), DefaultCodec.CodecCommonExtensions[0], System.StringComparison.OrdinalIgnoreCase)))
            {
                _log.Print("Adding DDS extension to working file or else external tools may not be able to read it.", LoggingLevel.Verbose);
                name = Path.ChangeExtension(name, DefaultCodec.CodecCommonExtensions[0]);
            }

            _log.Print($"Copying content file {name} to {ScratchArea.FileSystem.MountPoints.First().PhysicalPath} as working file...", LoggingLevel.Intermediate);

            // Copy to a working file.
            using (Stream outStream = ScratchArea.OpenStream(name, FileMode.Create))
            {
                file.CopyTo(outStream);
                workFile = ScratchArea.FileSystem.GetFile(name);
            }

            _log.Print($"{workFile.FullPath} is now the working file for the image editor.", LoggingLevel.Intermediate);

            if (formatInfo.IsCompressed)
            {
                _log.Print($"Image is compressed using [{formatInfo.Format}] as its pixel format.", LoggingLevel.Intermediate);

                if (_compressor == null)
                {
                    throw new GorgonException(GorgonResult.CannotRead, string.Format(Resources.GORIMG_ERR_COMPRESSED_FILE, formatInfo.Format));
                }

                _log.Print($"Loading image '{workFile.FullPath}'...", LoggingLevel.Simple);
                result = _compressor.Decompress(ref workFile, imageInfo);

                if (result == null)
                {
                    throw new GorgonException(GorgonResult.CannotRead, string.Format(Resources.GORIMG_ERR_COMPRESSED_FILE, formatInfo.Format));
                }

                _log.Print($"Loaded compressed ([{formatInfo.Format}]) image data as [{result.Format}]", LoggingLevel.Intermediate);
            }
            else
            {
                _log.Print($"Loading image '{workFile.FullPath}'...", LoggingLevel.Simple);
                using (Stream workingFileStream = workFile.OpenStream())
                {
                    result = DefaultCodec.LoadFromStream(workingFileStream);
                }
            }

            return(result, workFile, originalFormat);
        }
Example #2
0
        /// <exception cref="System.Exception"/>
        public virtual void TestValueIteratorWithCompression()
        {
            Path          tmpDir = new Path("build/test/test.reduce.task.compression");
            Configuration conf   = new Configuration();
            DefaultCodec  codec  = new DefaultCodec();

            codec.SetConf(conf);
            foreach (TestReduceTask.Pair[] testCase in testCases)
            {
                RunValueIterator(tmpDir, testCase, conf, codec);
            }
        }
Example #3
0
 public ProtocolAdapterBase(Stream baseStream, PacketBoundTo boundTo)
 {
     BaseStream          = baseStream;
     BufferedReadStream  = new BufferedStream(baseStream);
     BufferedWriteStream = new BufferedStream(baseStream);
     _rawReadCodec       = DefaultCodec.Clone(BufferedReadStream);
     _rawWriteCodec      = DefaultCodec.Clone(BufferedWriteStream);
     BoundTo             = boundTo;
     RemoteBoundTo       = boundTo switch
     {
         PacketBoundTo.Client => PacketBoundTo.Server,
         PacketBoundTo.Server => PacketBoundTo.Client,
         _ => default
     };
 }
Example #4
0
        /// <summary>Unit tests for SequenceFile metadata.</summary>
        /// <exception cref="System.Exception"/>
        public virtual void TestSequenceFileMetadata()
        {
            Log.Info("Testing SequenceFile with metadata");
            int count = 1024 * 10;
            CompressionCodec codec = new DefaultCodec();
            Path             file  = new Path(Runtime.GetProperty("test.build.data", ".") + "/test.seq.metadata"
                                              );
            Path sortedFile = new Path(Runtime.GetProperty("test.build.data", ".") + "/test.sorted.seq.metadata"
                                       );
            Path recordCompressedFile = new Path(Runtime.GetProperty("test.build.data", ".")
                                                 + "/test.rc.seq.metadata");
            Path blockCompressedFile = new Path(Runtime.GetProperty("test.build.data", ".") +
                                                "/test.bc.seq.metadata");
            FileSystem fs = FileSystem.GetLocal(conf);

            SequenceFile.Metadata theMetadata = new SequenceFile.Metadata();
            theMetadata.Set(new Text("name_1"), new Text("value_1"));
            theMetadata.Set(new Text("name_2"), new Text("value_2"));
            theMetadata.Set(new Text("name_3"), new Text("value_3"));
            theMetadata.Set(new Text("name_4"), new Text("value_4"));
            int seed = new Random().Next();

            try
            {
                // SequenceFile.Writer
                WriteMetadataTest(fs, count, seed, file, SequenceFile.CompressionType.None, null,
                                  theMetadata);
                SequenceFile.Metadata aMetadata = ReadMetadata(fs, file);
                if (!theMetadata.Equals(aMetadata))
                {
                    Log.Info("The original metadata:\n" + theMetadata.ToString());
                    Log.Info("The retrieved metadata:\n" + aMetadata.ToString());
                    throw new RuntimeException("metadata not match:  " + 1);
                }
                // SequenceFile.RecordCompressWriter
                WriteMetadataTest(fs, count, seed, recordCompressedFile, SequenceFile.CompressionType
                                  .Record, codec, theMetadata);
                aMetadata = ReadMetadata(fs, recordCompressedFile);
                if (!theMetadata.Equals(aMetadata))
                {
                    Log.Info("The original metadata:\n" + theMetadata.ToString());
                    Log.Info("The retrieved metadata:\n" + aMetadata.ToString());
                    throw new RuntimeException("metadata not match:  " + 2);
                }
                // SequenceFile.BlockCompressWriter
                WriteMetadataTest(fs, count, seed, blockCompressedFile, SequenceFile.CompressionType
                                  .Block, codec, theMetadata);
                aMetadata = ReadMetadata(fs, blockCompressedFile);
                if (!theMetadata.Equals(aMetadata))
                {
                    Log.Info("The original metadata:\n" + theMetadata.ToString());
                    Log.Info("The retrieved metadata:\n" + aMetadata.ToString());
                    throw new RuntimeException("metadata not match:  " + 3);
                }
                // SequenceFile.Sorter
                SortMetadataTest(fs, file, sortedFile, theMetadata);
                aMetadata = ReadMetadata(fs, recordCompressedFile);
                if (!theMetadata.Equals(aMetadata))
                {
                    Log.Info("The original metadata:\n" + theMetadata.ToString());
                    Log.Info("The retrieved metadata:\n" + aMetadata.ToString());
                    throw new RuntimeException("metadata not match:  " + 4);
                }
            }
            finally
            {
                fs.Close();
            }
            Log.Info("Successfully tested SequenceFile with metadata");
        }
Example #5
0
        public virtual void TestCompress()
        {
            JobConf job = new JobConf();

            job.Set(JobContext.TaskAttemptId, attempt);
            job.Set(FileOutputFormat.Compress, "true");
            FileOutputFormat.SetOutputPath(job, workDir.GetParent().GetParent());
            FileOutputFormat.SetWorkOutputPath(job, workDir);
            FileSystem fs = workDir.GetFileSystem(job);

            if (!fs.Mkdirs(workDir))
            {
                NUnit.Framework.Assert.Fail("Failed to create output directory");
            }
            string file = "test_compress.txt";
            // A reporter that does nothing
            Reporter reporter = Reporter.Null;
            TextOutputFormat <object, object> theOutputFormat = new TextOutputFormat <object, object
                                                                                      >();
            RecordWriter <object, object> theRecordWriter = theOutputFormat.GetRecordWriter(localFs
                                                                                            , job, file, reporter);

            Org.Apache.Hadoop.IO.Text key1 = new Org.Apache.Hadoop.IO.Text("key1");
            Org.Apache.Hadoop.IO.Text key2 = new Org.Apache.Hadoop.IO.Text("key2");
            Org.Apache.Hadoop.IO.Text val1 = new Org.Apache.Hadoop.IO.Text("val1");
            Org.Apache.Hadoop.IO.Text val2 = new Org.Apache.Hadoop.IO.Text("val2");
            NullWritable nullWritable      = NullWritable.Get();

            try
            {
                theRecordWriter.Write(key1, val1);
                theRecordWriter.Write(null, nullWritable);
                theRecordWriter.Write(null, val1);
                theRecordWriter.Write(nullWritable, val2);
                theRecordWriter.Write(key2, nullWritable);
                theRecordWriter.Write(key1, null);
                theRecordWriter.Write(null, null);
                theRecordWriter.Write(key2, val2);
            }
            finally
            {
                theRecordWriter.Close(reporter);
            }
            StringBuilder expectedOutput = new StringBuilder();

            expectedOutput.Append(key1).Append("\t").Append(val1).Append("\n");
            expectedOutput.Append(val1).Append("\n");
            expectedOutput.Append(val2).Append("\n");
            expectedOutput.Append(key2).Append("\n");
            expectedOutput.Append(key1).Append("\n");
            expectedOutput.Append(key2).Append("\t").Append(val2).Append("\n");
            DefaultCodec codec = new DefaultCodec();

            codec.SetConf(job);
            Path                   expectedFile = new Path(workDir, file + codec.GetDefaultExtension());
            FileInputStream        istream      = new FileInputStream(expectedFile.ToString());
            CompressionInputStream cistream     = codec.CreateInputStream(istream);
            LineReader             reader       = new LineReader(cistream);
            string                 output       = string.Empty;

            Org.Apache.Hadoop.IO.Text @out = new Org.Apache.Hadoop.IO.Text();
            while (reader.ReadLine(@out) > 0)
            {
                output += @out;
                output += "\n";
            }
            reader.Close();
            NUnit.Framework.Assert.AreEqual(expectedOutput.ToString(), output);
        }