Fixed unit tests for `Matrix.sum(_:axies:)`

This commit is contained in:
Vincent Esche 2019-08-11 16:21:22 +02:00
parent 51f9480325
commit cc1a716740
1 changed files with 34 additions and 1 deletions

View File

@ -515,6 +515,23 @@ class MatrixTests: XCTestCase {
XCTAssertEqual(actual, expected, accuracy: 1e-8)
}
func test_sum_matrix_rows_double() {
typealias Scalar = Double
let lhs: Matrix<Scalar> = [
[1, 2, 3],
[4, 5, 6],
]
let actual = sum(lhs, axies: .row)
let expected: Matrix<Scalar> = [
[6],
[15],
]
XCTAssertEqual(actual, expected, accuracy: 1e-5)
}
func test_sum_matrix_rows_float() {
typealias Scalar = Float
@ -523,7 +540,7 @@ class MatrixTests: XCTestCase {
[4, 5, 6],
]
let actual = sum(lhs, axies: .column)
let actual = sum(lhs, axies: .row)
let expected: Matrix<Scalar> = [
[6],
[15],
@ -532,6 +549,22 @@ class MatrixTests: XCTestCase {
XCTAssertEqual(actual, expected, accuracy: 1e-5)
}
func test_sum_matrix_columns_double() {
typealias Scalar = Double
let lhs: Matrix<Scalar> = [
[1, 2, 3],
[4, 5, 6],
]
let actual = sum(lhs, axies: .column)
let expected: Matrix<Scalar> = [
[5, 7, 9],
]
XCTAssertEqual(actual, expected, accuracy: 1e-5)
}
func test_sum_matrix_columns_float() {
typealias Scalar = Float