diff --git a/MatFileHandler.Tests/MatFileReaderHdfTests.cs b/MatFileHandler.Tests/MatFileReaderHdfTests.cs index a5e7995..9b086e4 100644 --- a/MatFileHandler.Tests/MatFileReaderHdfTests.cs +++ b/MatFileHandler.Tests/MatFileReaderHdfTests.cs @@ -1,5 +1,6 @@ using NUnit.Framework; using System.IO; +using System.Numerics; namespace MatFileHandler.Tests { @@ -68,6 +69,32 @@ namespace MatFileHandler.Tests Assert.That(matrix[2, 1], Is.EqualTo(6.0)); } + /// + /// Test reading a two-dimensional complex array. + /// + [Test] + public void TestComplexMatrix() + { + var matFile = ReadHdfTestFile("matrix_complex"); + var matrix = matFile["matrix"].Value as IArrayOf; + Assert.That(matrix.Dimensions, Is.EqualTo(new[] { 3, 2 })); + Assert.That(matrix.ConvertToComplexArray(), Is.EqualTo(new[] + { + new Complex(1.0, 4.0), + new Complex(3.0, 1.0), + new Complex(5.0, 0.25), + new Complex(2.0, 2.0), + new Complex(4.0, 0.5), + new Complex(6.0, 0.125), + })); + Assert.That(matrix[0, 0], Is.EqualTo(new Complex(1.0, 4.0))); + Assert.That(matrix[0, 1], Is.EqualTo(new Complex(2.0, 2.0))); + Assert.That(matrix[1, 0], Is.EqualTo(new Complex(3.0, 1.0))); + Assert.That(matrix[1, 1], Is.EqualTo(new Complex(4.0, 0.5))); + Assert.That(matrix[2, 0], Is.EqualTo(new Complex(5.0, 0.25))); + Assert.That(matrix[2, 1], Is.EqualTo(new Complex(6.0, 0.125))); + } + /// /// Test reading lower and upper limits of integer data types. /// @@ -102,6 +129,51 @@ namespace MatFileHandler.Tests CheckLimits(array as IArrayOf, CommonData.UInt64Limits); } + /// + /// Test writing lower and upper limits of integer-based complex data types. + /// + [Test] + public void TestComplexLimits() + { + var matFile = ReadHdfTestFile("limits_complex"); + IArray array; + array = matFile["int8_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.Int8Limits); + Assert.That( + array.ConvertToComplexArray(), + Is.EqualTo(new[] { -128.0 + (127.0 * Complex.ImaginaryOne), 127.0 - (128.0 * Complex.ImaginaryOne) })); + + array = matFile["uint8_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.UInt8Limits); + + array = matFile["int16_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.Int16Limits); + + array = matFile["uint16_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.UInt16Limits); + + array = matFile["int32_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.Int32Limits); + + array = matFile["uint32_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.UInt32Limits); + + array = matFile["int64_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.Int64Limits); + + array = matFile["uint64_complex"].Value; + CheckComplexLimits(array as IArrayOf>, CommonData.UInt64Limits); + } + + private static void CheckComplexLimits(IArrayOf> array, T[] limits) + where T : struct + { + Assert.That(array, Is.Not.Null); + Assert.That(array.Dimensions, Is.EqualTo(new[] { 1, 2 })); + Assert.That(array[0], Is.EqualTo(new ComplexOf(limits[0], limits[1]))); + Assert.That(array[1], Is.EqualTo(new ComplexOf(limits[1], limits[0]))); + } + private static void CheckLimits(IArrayOf array, T[] limits) where T : struct { diff --git a/MatFileHandler.Tests/test-data/hdf/limits_complex.mat b/MatFileHandler.Tests/test-data/hdf/limits_complex.mat new file mode 100644 index 0000000..542a420 Binary files /dev/null and b/MatFileHandler.Tests/test-data/hdf/limits_complex.mat differ diff --git a/MatFileHandler.Tests/test-data/hdf/matrix_complex.mat b/MatFileHandler.Tests/test-data/hdf/matrix_complex.mat new file mode 100644 index 0000000..5529ee1 Binary files /dev/null and b/MatFileHandler.Tests/test-data/hdf/matrix_complex.mat differ diff --git a/MatFileHandler/HdfFileReader.cs b/MatFileHandler/HdfFileReader.cs index fa14d82..d8b348c 100644 --- a/MatFileHandler/HdfFileReader.cs +++ b/MatFileHandler/HdfFileReader.cs @@ -393,6 +393,38 @@ namespace MatFileHandler var numberOfElements = dims.NumberOfElements(); var dataSize = numberOfElements * SizeOfArrayElement(arrayType); var storageSize = (int)H5D.get_storage_size(datasetId); + var dataSetType = H5D.get_type(datasetId); + var dataSetTypeClass = H5T.get_class(dataSetType); + var isCompound = dataSetTypeClass == H5T.class_t.COMPOUND; + if (isCompound) + { + var h5Type = H5tTypeFromArrayType(arrayType); + var h5Size = H5T.get_size(h5Type); + var h5tComplexReal = H5T.create(H5T.class_t.COMPOUND, h5Size); + H5T.insert(h5tComplexReal, "real", IntPtr.Zero, h5Type); + var realData = ReadDataset(datasetId, h5tComplexReal, dataSize); + var convertedRealData = ConvertDataToProperType(realData, arrayType); + var h5tComplexImaginary = H5T.create(H5T.class_t.COMPOUND, h5Size); + H5T.insert(h5tComplexImaginary, "imag", IntPtr.Zero, h5Type); + var imaginaryData = ReadDataset(datasetId, h5tComplexImaginary, dataSize); + var convertedImaginaryData = ConvertDataToProperType(imaginaryData, arrayType); + if (arrayType == ArrayType.MxDouble) + { + var complexData = + (convertedRealData as double[]) + .Zip(convertedImaginaryData as double[], (x, y) => new Complex(x, y)) + .ToArray(); + return new HdfNumericalArrayOf(dims, complexData); + } + else + { + var complexData = + convertedRealData + .Zip(convertedImaginaryData, (x, y) => new ComplexOf(x, y)) + .ToArray(); + return new HdfNumericalArrayOf>(dims, complexData); + } + } if (dataSize != storageSize) { throw new Exception("Data size mismatch.");