public void WriteAndReadDataset()
        {
            string filename = Path.Combine(folder, "testDataset.H5");
            var    dset     = dsets.First();

            try
            {
                var fileId = Hdf5.CreateFile(filename);
                Assert.IsTrue(fileId > 0);
                Hdf5.WriteDataset(fileId, "/test", dset);
                Hdf5.CloseFile(fileId);
            }
            catch (Exception ex)
            {
                CreateExceptionAssert(ex);
            }

            try
            {
                var fileId = Hdf5.OpenFile(filename);
                Assert.IsTrue(fileId > 0);
                double[,] dset2 = (double[, ])Hdf5.ReadDataset <double>(fileId, "/test").result;
                CompareDatasets(dset, dset2);
                bool same = dset == dset2;

                Hdf5.CloseFile(fileId);
            }
            catch (Exception ex)
            {
                CreateExceptionAssert(ex);
            }
        }
        public void WriteAndReadDatetimeDataset()
        {
            string filename = Path.Combine(folder, "testDatetime.H5");
            var    times    = new DateTime[10, 5];
            var    offset   = new DateTime(2000, 1, 1, 12, 0, 0);

            for (var i = 0; i < 10; i++)
            {
                for (var j = 0; j < 5; j++)
                {
                    times[i, j] = offset.AddDays(i + j * 5);
                }
            }

            try
            {
                var fileId = Hdf5.CreateFile(filename);
                Assert.IsTrue(fileId > 0);
                Hdf5.WriteDataset(fileId, "/test", times);

                var timesRead = (DateTime[, ])Hdf5.ReadDataset <DateTime>(fileId, "/test").result;
                CompareDatasets(times, timesRead);

                Hdf5.CloseFile(fileId);
            }
            catch (Exception ex)
            {
                CreateExceptionAssert(ex);
            }
        }
        public void WriteAndReadTimespanDataset()
        {
            string filename = Path.Combine(folder, "testTimespan.H5");
            var    times    = new TimeSpan[10, 5];
            var    offset   = new TimeSpan(1, 0, 0, 0, 0);

            for (var i = 0; i < 10; i++)
            {
                for (var j = 0; j < 5; j++)
                {
                    times[i, j] = offset.Add(new TimeSpan(i + j * 5, 0, 0));
                }
            }

            try
            {
                var fileId = Hdf5.CreateFile(filename);
                Assert.IsTrue(fileId > 0);
                Hdf5.WriteDataset(fileId, "/test", times);

                TimeSpan[,] timesRead = (TimeSpan[, ])Hdf5.ReadDataset <TimeSpan>(fileId, "/test").result;
                CompareDatasets(times, timesRead);

                Hdf5.CloseFile(fileId);
            }
            catch (Exception ex)
            {
                CreateExceptionAssert(ex);
            }
        }
예제 #4
0
        static void ReadFile(string filePath)
        {
            var file  = Hdf5.OpenFile(filePath, true);
            var group = Hdf5.CreateOrOpenGroup(file, "group");

            var(ok, rawData) = Hdf5.ReadDataset <int>(group, "dataset");
            if (ok)
            {
                var data = (int[, ])rawData;
                for (int i = 0; i < data.GetLength(0); ++i)
                {
                    for (int j = 0; j < data.GetLength(1); ++j)
                    {
                        Write($"{data[i, j],3}");
                    }
                    WriteLine();
                }
            }

            //var dataSet = H5D.open(group, "dataset");
            //var hello = Hdf5.ReadUnicodeString(dataSet, "string");
            //H5D.close(dataSet);
            //WriteLine($"string: {hello}");

            Hdf5.CloseGroup(group);
            Hdf5.CloseFile(file);
        }
예제 #5
0
        static void Main(string[] args)
        {
            string fileName = @"D:\Shubham\HDF5\HDFinOneLine_1434.H5";
            var    fileID   = Hdf5.OpenFile(fileName);
            var    groupId  = H5G.open(fileID, "A/B");
            Array  dset2    = Hdf5.ReadDataset <Int32>(groupId, "Sequence 2", 100000, 200000);

            Console.WriteLine("Program Finished...");
            Console.ReadKey();
        }
예제 #6
0
        public void WriteAndReadGroupsWithDataset()
        {
            string filename = Path.Combine(folder, "testGroups.H5");

            try
            {
                var fileId = Hdf5.CreateFile(filename);
                Assert.IsTrue(fileId > 0);
                var dset = dsets.First();

                var groupId = H5G.create(fileId, Hdf5Utils.NormalizedName("/A")); ///B/C/D/E/F/G/H
                Hdf5.WriteDataset(groupId, Hdf5Utils.NormalizedName("test"), dset);
                var subGroupId  = Hdf5.CreateOrOpenGroup(groupId, Hdf5Utils.NormalizedName("C"));
                var subGroupId2 = Hdf5.CreateOrOpenGroup(groupId, Hdf5Utils.NormalizedName("/D")); // will be saved at the root location
                dset = dsets.Skip(1).First();
                Hdf5.WriteDataset(subGroupId, Hdf5Utils.NormalizedName("test2"), dset);
                Hdf5.CloseGroup(subGroupId);
                Hdf5.CloseGroup(subGroupId2);
                Hdf5.CloseGroup(groupId);
                groupId = H5G.create(fileId, Hdf5Utils.NormalizedName("/A/B")); ///B/C/D/E/F/G/H
                dset    = dsets.Skip(1).First();
                Hdf5.WriteDataset(groupId, Hdf5Utils.NormalizedName("test"), dset);
                Hdf5.CloseGroup(groupId);

                groupId = Hdf5.CreateGroupRecursively(fileId, Hdf5Utils.NormalizedName("A/B/C/D/E/F/I"));
                Hdf5.CloseGroup(groupId);
                Hdf5.CloseFile(fileId);


                fileId = Hdf5.OpenFile(filename);
                Assert.IsTrue(fileId > 0);
                fileId = Hdf5.OpenFile(filename);

                groupId         = H5G.open(fileId, Hdf5Utils.NormalizedName("/A/B"));
                double[,] dset2 = (double[, ])Hdf5.ReadDataset <double>(groupId, Hdf5Utils.NormalizedName("test")).result;
                CompareDatasets(dset, dset2);
                Assert.IsTrue(Hdf5.CloseGroup(groupId) >= 0);
                groupId = H5G.open(fileId, Hdf5Utils.NormalizedName("/A/C"));
                dset2   = (double[, ])Hdf5.ReadDataset <double>(groupId, Hdf5Utils.NormalizedName("test2")).result;
                CompareDatasets(dset, dset2);
                Assert.IsTrue(Hdf5.CloseGroup(groupId) >= 0);
                bool same = dset == dset2;
                dset  = dsets.First();
                dset2 = (double[, ])Hdf5.ReadDataset <double>(fileId, Hdf5Utils.NormalizedName("/A/test")).result;
                CompareDatasets(dset, dset2);
                Assert.IsTrue(Hdf5Utils.ItemExists(fileId, Hdf5Utils.NormalizedName("A/B/C/D/E/F/I"), DataTypes.Hdf5ElementType.Dataset));

                Assert.IsTrue(Hdf5.CloseFile(fileId) == 0);
            }
            catch (Exception ex)
            {
                CreateExceptionAssert(ex);
            }
        }
        public void WriteAndReadSubsetOfDataset()
        {
            string filename = Path.Combine(folder, "testSubset.H5");

            try
            {
                var fileId = Hdf5.CreateFile(filename);
                Assert.IsTrue(fileId > 0);
                var chunkSize = new ulong[] { 5, 5 };
                using (var chunkedDset = new ChunkedDataset <double>("/test", fileId, dsets.First()))
                {
                    foreach (var ds in dsets.Skip(1))
                    {
                        chunkedDset.AppendDataset(ds);
                    }
                    ;
                }

                Hdf5.CloseFile(fileId);
            }
            catch (Exception ex)
            {
                CreateExceptionAssert(ex);
            }

            try
            {
                var   fileId   = Hdf5.OpenFile(filename);
                ulong begIndex = 8;
                ulong endIndex = 21;
                var   dset     = Hdf5.ReadDataset <double>(fileId, "/test", begIndex, endIndex);
                Hdf5.CloseFile(fileId);


                Assert.IsTrue(dset.Rank == dsets.First().Rank);
                int count = Convert.ToInt32(endIndex - begIndex + 1);
                Assert.IsTrue(count == dset.GetLength(0));
                // Creat a range from number 8 to 21
                var testRange = Enumerable.Range((int)begIndex, count).Select(t => (double)t);

                // Get the first column from row index number 8 (the 9th row) to row index number 21 (22th row)
                var x0Range = dset.Cast <double>().Where((d, i) => i % 5 == 0);
                Assert.IsTrue(testRange.SequenceEqual(x0Range));
            }
            catch (Exception ex)
            {
                CreateExceptionAssert(ex);
            }
        }
예제 #8
0
        private long WriteDataset(string filename)
        {
            long tef2 = Hdf5.CreateFile(filename);

            int[] blah = { 1, 2, 4, 5, 0 };
            Hdf5.WriteDatasetFromArray <int>(tef2, "blah", blah);
            Hdf5.CloseFile(tef2);
            var what = "???"; // breakpoint in VS to test h5 file contents independently before next write step

            tef2    = Hdf5.OpenFile(filename);
            blah[4] = 6;
            Hdf5.WriteDatasetFromArray <int>(tef2, "blah", blah); // This command throws several debug errors from PInvoke
            var(success, result) = Hdf5.ReadDataset <int>(tef2, "blah");
            Assert.IsTrue(success);
            Assert.IsTrue(result.Cast <int>().SequenceEqual(blah));
            // loading the hdf5 file shows it only has {1, 2, 4, 5, 0} stored.
            return(tef2);
        }
예제 #9
0
        public void OverrideDataset()
        {
            string   filename1 = "overridedataset1.h5";
            long     id        = WriteDataset(filename1);
            FileInfo fi        = new FileInfo(filename1);
            var      l1        = fi.Length;

            Hdf5.CloseFile(id);
            File.Delete(filename1);
            string filename = "overridedataset.h5";

            Hdf5.Settings.EnableErrorReporting(true);
            Hdf5.Settings.OverrideExistingData = true;
            long tef2 = Hdf5.CreateFile(filename);

            int[] blah = { 1, 2, 4, 5, 0 };
            Hdf5.WriteDatasetFromArray <int>(tef2, "blah", blah);
            Hdf5.CloseFile(tef2);
            var what = "???"; // breakpoint in VS to test h5 file contents independently before next write step

            tef2 = Hdf5.OpenFile(filename);
            for (int i = 0; i < 10; i++)
            {
                blah[4] = i + i;
                Hdf5.WriteDatasetFromArray <int>(tef2, "blah", blah);
            }

            var(success, result) = Hdf5.ReadDataset <int>(tef2, "blah");
            Assert.IsTrue(success);
            Assert.IsTrue(result.Cast <int>().SequenceEqual(blah));
            FileInfo fi2 = new FileInfo(filename);
            var      l2  = fi.Length;

            Hdf5.CloseFile(tef2);
            File.Delete(filename);
            Assert.IsTrue(l1 == l2);
        }
예제 #10
0
        public void OverrideAndIncreaseDataset()
        {
            string filename = "overrideandincreasedataset.h5";

            Hdf5.Settings.EnableErrorReporting(true);
            Hdf5.Settings.OverrideExistingData = true;
            long id = Hdf5.CreateFile(filename);

            int[] d1 = { 1, 2, 3, 4, 5 };
            int[] d2 = { 11, 12, 13, 14, 15 };
            Hdf5.WriteDatasetFromArray <int>(id, "d1", d1);
            Hdf5.WriteDatasetFromArray <int>(id, "d2", d2);
            Hdf5.CloseFile(id);
            id = Hdf5.OpenFile(filename);
            var(success, result)   = Hdf5.ReadDataset <int>(id, "d1");
            var(success1, result2) = Hdf5.ReadDataset <int>(id, "d2");
            Hdf5.CloseFile(id);

            int[] d3 = { 21, 22, 24, 25, 26, 27, 28, 29, 210 };
            Assert.IsTrue(success);
            Assert.IsTrue(result.Cast <int>().SequenceEqual(d1));
            Assert.IsTrue(success1);
            Assert.IsTrue(result2.Cast <int>().SequenceEqual(d2));

            id = Hdf5.OpenFile(filename);
            Hdf5.WriteDatasetFromArray <int>(id, "d1", d3);
            Hdf5.CloseFile(id);
            id = Hdf5.OpenFile(filename);
            var(success3, result3) = Hdf5.ReadDataset <int>(id, "d1");
            var(success4, result4) = Hdf5.ReadDataset <int>(id, "d2");
            Hdf5.CloseFile(id);
            Assert.IsTrue(success3);
            Assert.IsTrue(result3.Cast <int>().SequenceEqual(d3));
            Assert.IsTrue(success4);
            Assert.IsTrue(result4.Cast <int>().SequenceEqual(d2));
        }
예제 #11
0
        public static List <(IVariableV1, NDArray)> load_weights_from_hdf5_group(long f, List <ILayer> layers)
        {
            string original_keras_version = "2.5.0";
            string original_backend       = null;

            if (Hdf5.AttributeExists(f, "keras_version"))
            {
                var(success, attr) = Hdf5.ReadStringAttributes(f, "keras_version", "");
                if (success)
                {
                    original_keras_version = attr.First();
                }
                // keras version should be 2.5.0+
                var ver_major = int.Parse(original_keras_version.Split('.')[0]);
                var ver_minor = int.Parse(original_keras_version.Split('.')[1]);
                if (ver_major < 2 || (ver_major == 2 && ver_minor < 5))
                {
                    throw new ValueError("keras version should be 2.5.0 or later.");
                }
            }
            if (Hdf5.AttributeExists(f, "backend"))
            {
                var(success, attr) = Hdf5.ReadStringAttributes(f, "backend", "");
                if (success)
                {
                    original_backend = attr.First();
                }
            }

            var filtered_layers = new List <ILayer>();

            foreach (var layer in layers)
            {
                var weights = _legacy_weights(layer);
                if (weights.Count > 0)
                {
                    filtered_layers.append(layer);
                }
            }

            string[] layer_names          = load_attributes_from_hdf5_group(f, "layer_names");
            var      filtered_layer_names = new List <string>();

            foreach (var name in layer_names)
            {
                if (!filtered_layers.Select(x => x.Name).Contains(name))
                {
                    continue;
                }
                long g            = H5G.open(f, name);
                var  weight_names = load_attributes_from_hdf5_group(g, "weight_names");
                if (weight_names.Count() > 0)
                {
                    filtered_layer_names.Add(name);
                }
                H5G.close(g);
            }

            layer_names = filtered_layer_names.ToArray();
            if (layer_names.Length != filtered_layers.Count())
            {
                throw new ValueError("You are trying to load a weight file " +
                                     $"containing {layer_names}" +
                                     $" layers into a model with {filtered_layers.Count} layers.");
            }

            var weight_value_tuples = new List <(IVariableV1, NDArray)>();

            foreach (var(k, name) in enumerate(layer_names))
            {
                var  weight_values = new List <NDArray>();
                long g             = H5G.open(f, name);
                var  weight_names  = load_attributes_from_hdf5_group(g, "weight_names");
                foreach (var i_ in weight_names)
                {
                    (bool success, Array result) = Hdf5.ReadDataset <float>(g, i_);
                    if (success)
                    {
                        weight_values.Add(np.array(result));
                    }
                }
                H5G.close(g);
                var layer            = filtered_layers[k];
                var symbolic_weights = _legacy_weights(layer);
                preprocess_weights_for_loading(layer, weight_values, original_keras_version, original_backend);
                if (weight_values.Count() != symbolic_weights.Count())
                {
                    throw new ValueError($"Layer #{k} (named {layer.Name}" +
                                         "in the current model) was found to " +
                                         $"correspond to layer {name} in the save file." +
                                         $"However the new layer {layer.Name} expects " +
                                         $"{symbolic_weights.Count()} weights, but the saved weights have " +
                                         $"{weight_values.Count()} elements.");
                }
                weight_value_tuples.AddRange(zip(symbolic_weights, weight_values));
            }

            keras.backend.batch_set_value(weight_value_tuples);
            return(weight_value_tuples);
        }