From b0ae15abc9e773e4ab8ee160885a47876a3649fb Mon Sep 17 00:00:00 2001 From: Alexander Luzgarev Date: Sun, 10 Mar 2019 12:35:02 +0100 Subject: [PATCH] Support complex numerical arrays --- MatFileHandler.Tests/MatFileReaderHdfTests.cs | 72 ++++++++++++++++++ .../test-data/hdf/limits_complex.mat | Bin 0 -> 4256 bytes .../test-data/hdf/matrix_complex.mat | Bin 0 -> 2064 bytes MatFileHandler/HdfFileReader.cs | 32 ++++++++ 4 files changed, 104 insertions(+) create mode 100644 MatFileHandler.Tests/test-data/hdf/limits_complex.mat create mode 100644 MatFileHandler.Tests/test-data/hdf/matrix_complex.mat diff --git a/MatFileHandler.Tests/MatFileReaderHdfTests.cs b/MatFileHandler.Tests/MatFileReaderHdfTests.cs index a5e7995..9b086e4 100644 --- a/MatFileHandler.Tests/MatFileReaderHdfTests.cs +++ b/MatFileHandler.Tests/MatFileReaderHdfTests.cs @@ -1,5 +1,6 @@ using NUnit.Framework; using System.IO; +using System.Numerics; namespace MatFileHandler.Tests { @@ -68,6 +69,32 @@ namespace MatFileHandler.Tests Assert.That(matrix[2, 1], Is.EqualTo(6.0)); } + /// + /// Test reading a two-dimensional complex array. + /// + [Test] + public void TestComplexMatrix() + { + var matFile = ReadHdfTestFile("matrix_complex"); + var matrix = matFile["matrix"].Value as IArrayOf; + Assert.That(matrix.Dimensions, Is.EqualTo(new[] { 3, 2 })); + Assert.That(matrix.ConvertToComplexArray(), Is.EqualTo(new[] + { + new Complex(1.0, 4.0), + new Complex(3.0, 1.0), + new Complex(5.0, 0.25), + new Complex(2.0, 2.0), + new Complex(4.0, 0.5), + new Complex(6.0, 0.125), + })); + Assert.That(matrix[0, 0], Is.EqualTo(new Complex(1.0, 4.0))); + Assert.That(matrix[0, 1], Is.EqualTo(new Complex(2.0, 2.0))); + Assert.That(matrix[1, 0], Is.EqualTo(new Complex(3.0, 1.0))); + Assert.That(matrix[1, 1], Is.EqualTo(new Complex(4.0, 0.5))); + Assert.That(matrix[2, 0], Is.EqualTo(new Complex(5.0, 0.25))); + Assert.That(matrix[2, 1], Is.EqualTo(new Complex(6.0, 0.125))); + } + /// /// Test reading lower and upper limits of integer data types. /// @@ -102,6 +129,51 @@ namespace MatFileHandler.Tests CheckLimits(array as IArrayOf, CommonData.UInt64Limits); } + /// + /// Test writing lower and upper limits of integer-based complex data types. + /// + [Test] + public void TestComplexLimits() + { + var matFile = ReadHdfTestFile("limits_complex"); + IArray array; + array = matFile["int8_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.Int8Limits); + Assert.That( + array.ConvertToComplexArray(), + Is.EqualTo(new[] { -128.0 + (127.0 * Complex.ImaginaryOne), 127.0 - (128.0 * Complex.ImaginaryOne) })); + + array = matFile["uint8_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.UInt8Limits); + + array = matFile["int16_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.Int16Limits); + + array = matFile["uint16_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.UInt16Limits); + + array = matFile["int32_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.Int32Limits); + + array = matFile["uint32_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.UInt32Limits); + + array = matFile["int64_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.Int64Limits); + + array = matFile["uint64_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.UInt64Limits); + } + + private static void CheckComplexLimits(IArrayOf> array, T[] limits) + where T : struct + { + Assert.That(array, Is.Not.Null); + Assert.That(array.Dimensions, Is.EqualTo(new[] { 1, 2 })); + Assert.That(array[0], Is.EqualTo(new ComplexOf(limits[0], limits[1]))); + Assert.That(array[1], Is.EqualTo(new ComplexOf(limits[1], limits[0]))); + } + private static void CheckLimits(IArrayOf array, T[] limits) where T : struct { diff --git a/MatFileHandler.Tests/test-data/hdf/limits_complex.mat b/MatFileHandler.Tests/test-data/hdf/limits_complex.mat new file mode 100644 index 0000000000000000000000000000000000000000..542a420763cea3179748cd418550fb77b0cbb669 GIT binary patch literal 4256 zcmeHJPj3=I6o1RM&00%>nwZ9Tm?MWu7K%tY6$^$YQX5O62a~#~v?YPI0Zh2D2M>Dl zBY5=a(U0Iq@mt_~GwURX|V4FFl7ramBn1L~+~yR|wRqigy;E#r*A^4Q% zKFQcjU)5?Tnhu}FPsF2xR~5oWw8|FC^j`M=PtH5t3m9F1-?UgWnv5$g z1BmghvA%XE#(~vuxxX@%FxL5;ExQ}BF4{0)v!9&pcOBjmWK2Im+GYz{Ueh|Y1YQz^ z^YGG?Vm0O^m*&`s5-%kt%1grx+wjiEx<8~pCi5v_n4b&arMJ0-yPu$Pmw*=?h4bJ= z_yX*X@iMYjPJIww5);KMtz&FZ5=)p-8|tUQj%+?XmVjR&k9h>Y)DrNcIyMh}=9A7C zKOO>2Z)zj_BqoZVLDoh_`op2W2tLGR83XP|U5W&sO*Y)vIe}xD`~PnW?XTFGnk2z{jn$$4sIGrwt1LPBDqI8D^8@x4{@gn1?B>MuUT zHEFc7;iJS!uj~Br$z|uN-5j7&UsopOLVi*%21KCyO}6k?W9~bXaxekeg9(J+X&F#V zFTB6;x{Bj5$jwn#12NI{G&2p<)$y8gT^$7>2(RyT&ZY2vpu#HnEn`2-RabE`QX^$Q g$eIV|8GNVxATd$=%xX|wMc@{y7r}du<|BUZ4}U2ef&c&j literal 0 HcmV?d00001 diff --git a/MatFileHandler.Tests/test-data/hdf/matrix_complex.mat b/MatFileHandler.Tests/test-data/hdf/matrix_complex.mat new file mode 100644 index 0000000000000000000000000000000000000000..5529ee172e635a10a3dabf199612abc5017e9ae7 GIT binary patch literal 2064 zcmeHIJ5K^Z5T4_q7nB4aF~n+03nT}k;x&MPfy6^1BwCor35Sr}LBb=kw6M_5pW)A- zw6wJJCs^3y?(M9JghXOvA#=HWJ2N}8-_FhM8JX&F<`DOzJ7{F8+s(FZMX+p}gJ!qq z=vdC46-#@G2xfbhIj|bo?dVt;cF-_;7}GEo*W*cD(=e{Z4zQ3r+QojoWjQ9sqRbk_ zX#!F_MMDnf_%{`JVBahl*bKO)B#3wfj7dW3cU@=zH5w>{_k>RruK<4l!en2e z{FC#lr};dvnQ}de6b6qzF(Mk$JjZ`hfkHl0=370d{Zg00GxeD)gJTYQ?Hk@nLJHFH zOH^Rd%~hj>3T(NV6pt{-<-L@DQuo5G8^6IEoHM&d?OCX-Ug* z_NOy2E!NMXIAaeHSE1$7@P3`(_mpdZP+Fi$DDCT{ztB7&U80P8BY&9uBUfJ~)||uW z(e-5)%U>P)bq5?ryOBLA9o&D!8>$chNbH^Hc Q7uZW}4UO*b%C-PL0YtB6tN;K2 literal 0 HcmV?d00001 diff --git a/MatFileHandler/HdfFileReader.cs b/MatFileHandler/HdfFileReader.cs index fa14d82..d8b348c 100644 --- a/MatFileHandler/HdfFileReader.cs +++ b/MatFileHandler/HdfFileReader.cs @@ -393,6 +393,38 @@ namespace MatFileHandler var numberOfElements = dims.NumberOfElements(); var dataSize = numberOfElements * SizeOfArrayElement(arrayType); var storageSize = (int)H5D.get_storage_size(datasetId); + var dataSetType = H5D.get_type(datasetId); + var dataSetTypeClass = H5T.get_class(dataSetType); + var isCompound = dataSetTypeClass == H5T.class_t.COMPOUND; + if (isCompound) + { + var h5Type = H5tTypeFromArrayType(arrayType); + var h5Size = H5T.get_size(h5Type); + var h5tComplexReal = H5T.create(H5T.class_t.COMPOUND, h5Size); + H5T.insert(h5tComplexReal, "real", IntPtr.Zero, h5Type); + var realData = ReadDataset(datasetId, h5tComplexReal, dataSize); + var convertedRealData = ConvertDataToProperType(realData, arrayType); + var h5tComplexImaginary = H5T.create(H5T.class_t.COMPOUND, h5Size); + H5T.insert(h5tComplexImaginary, "imag", IntPtr.Zero, h5Type); + var imaginaryData = ReadDataset(datasetId, h5tComplexImaginary, dataSize); + var convertedImaginaryData = ConvertDataToProperType(imaginaryData, arrayType); + if (arrayType == ArrayType.MxDouble) + { + var complexData = + (convertedRealData as double[]) + .Zip(convertedImaginaryData as double[], (x, y) => new Complex(x, y)) + .ToArray(); + return new HdfNumericalArrayOf(dims, complexData); + } + else + { + var complexData = + convertedRealData + .Zip(convertedImaginaryData, (x, y) => new ComplexOf(x, y)) + .ToArray(); + return new HdfNumericalArrayOf>(dims, complexData); + } + } if (dataSize != storageSize) { throw new Exception("Data size mismatch.");