Created the intentional duplicate of interpretArray. Intenional duplicate, because I know nothing about this php and perl scripter mountebank fantasy data type.
Signed-off-by: Adam Rocska <adam.rocska@adams.solutions>
This commit is contained in:
parent
3bd97a83f1
commit
c645dd40c4
|
@ -38,8 +38,12 @@ class PayloadInterpreter {
|
|||
payloadStart: input.payloadStart,
|
||||
resolvePointers: resolvePointers
|
||||
)
|
||||
case .dataCacheContainer(let entryCount):
|
||||
// TODO
|
||||
case .dataCacheContainer(let entryCount): return interpretDataCacheContainer(
|
||||
entryCount: entryCount,
|
||||
decoder: decoder,
|
||||
payloadStart: input.payloadStart,
|
||||
resolvePointers: resolvePointers
|
||||
)
|
||||
return Payload.dataCacheContainer([])
|
||||
case .endMarker: return Payload.endMarker
|
||||
case .boolean(let payload): return Payload.boolean(payload)
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
import Foundation
|
||||
|
||||
func interpretDataCacheContainer(
|
||||
entryCount: UInt32,
|
||||
decoder: Decoder,
|
||||
payloadStart: Data.Index,
|
||||
resolvePointers: Bool
|
||||
) -> Payload? {
|
||||
var result: [Payload] = []
|
||||
var payloadIndexToRead = payloadStart
|
||||
for _ in 1...entryCount {
|
||||
guard let output: Decoder.Output = decoder.read(at: payloadIndexToRead, resolvePointers: resolvePointers) else {
|
||||
return nil
|
||||
}
|
||||
payloadIndexToRead = output.payloadRange.upperBound
|
||||
result.append(output.payload)
|
||||
}
|
||||
|
||||
return Payload.dataCacheContainer(result)
|
||||
}
|
|
@ -81,7 +81,7 @@ class FunctionInterpretArrayTest: XCTestCase {
|
|||
|
||||
}
|
||||
|
||||
struct ReadKey: Hashable {
|
||||
fileprivate struct ReadKey: Hashable {
|
||||
let controlByteOffset: Data.Index
|
||||
let resolvePointers: Bool
|
||||
}
|
||||
|
|
|
@ -0,0 +1,126 @@
|
|||
import Foundation
|
||||
import XCTest
|
||||
@testable import Decoder
|
||||
|
||||
class FunctionInterpretDataCacheContainerTest: XCTestCase {
|
||||
|
||||
func testInterpretDataCacheContainer_returnsNilIfAtLeastOneElementIsUnresolved() {
|
||||
let mockDecoder = MockDecoder()
|
||||
XCTAssertNil(interpretDataCacheContainer(entryCount: 3, decoder: mockDecoder, payloadStart: 10, resolvePointers: true))
|
||||
XCTAssertEqual(1, mockDecoder.readCounter)
|
||||
mockDecoder.entries[ReadKey(controlByteOffset: 10, resolvePointers: true)] = (
|
||||
payload: Payload.int32(100),
|
||||
controlRange: Range(uncheckedBounds: (0, 5)),
|
||||
payloadRange: Range(uncheckedBounds: (5, 7))
|
||||
)
|
||||
XCTAssertNil(interpretDataCacheContainer(entryCount: 3, decoder: mockDecoder, payloadStart: 10, resolvePointers: true))
|
||||
XCTAssertEqual(3, mockDecoder.readCounter)
|
||||
}
|
||||
|
||||
func testInterpretDataCacheContainer_constructsAndReturnsDataCacheContainerIfAllElementsAreResolved() {
|
||||
let expectedDataCacheContainer = [
|
||||
Payload.utf8String("Test String"),
|
||||
Payload.double(123.0),
|
||||
Payload.bytes(Data([0xAB, 0xCD, 0xDE])),
|
||||
Payload.uInt16(123),
|
||||
Payload.uInt32(123),
|
||||
Payload.map(["key": Payload.utf8String("value")]),
|
||||
Payload.int32(123),
|
||||
Payload.uInt64(123),
|
||||
Payload.uInt128(Data([0xAB, 0xCD])),
|
||||
Payload.dataCacheContainer([
|
||||
Payload.utf8String("ab"),
|
||||
Payload.utf8String("cd"),
|
||||
Payload.utf8String("ef")
|
||||
]),
|
||||
Payload.dataCacheContainer([
|
||||
Payload.utf8String("ab"),
|
||||
Payload.utf8String("cd"),
|
||||
Payload.utf8String("ef")
|
||||
]),
|
||||
Payload.endMarker,
|
||||
Payload.boolean(true),
|
||||
Payload.boolean(false),
|
||||
Payload.float(123.0)
|
||||
]
|
||||
|
||||
let mockDecoder = MockDecoder()
|
||||
|
||||
let startOffset = 10
|
||||
var offset = startOffset
|
||||
for (index, payload) in expectedDataCacheContainer.enumerated() {
|
||||
let nextOffset = offset + index + 1
|
||||
let key = ReadKey(controlByteOffset: offset, resolvePointers: true)
|
||||
let value = (
|
||||
payload,
|
||||
Range(uncheckedBounds: (0, 5)),
|
||||
Range(uncheckedBounds: (offset, nextOffset))
|
||||
)
|
||||
mockDecoder.entries[key] = value
|
||||
offset = nextOffset
|
||||
}
|
||||
|
||||
guard let payload = interpretDataCacheContainer(
|
||||
entryCount: UInt32(expectedDataCacheContainer.count),
|
||||
decoder: mockDecoder,
|
||||
payloadStart: startOffset,
|
||||
resolvePointers: true
|
||||
) else {
|
||||
XCTFail("Should have been able to resolve a payload")
|
||||
return
|
||||
}
|
||||
|
||||
switch payload {
|
||||
case .dataCacheContainer(let items):
|
||||
for (index, item) in items.enumerated() {
|
||||
XCTAssertEqual(expectedDataCacheContainer[index], item)
|
||||
}
|
||||
default: XCTFail("Should have resolved an dataCacheContainer payload.")
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
fileprivate struct ReadKey: Hashable {
|
||||
let controlByteOffset: Data.Index
|
||||
let resolvePointers: Bool
|
||||
}
|
||||
|
||||
fileprivate class MockDecoder: Decoder {
|
||||
|
||||
var entries: [ReadKey: Decoder.Output] = [:]
|
||||
var readCounter = 0
|
||||
|
||||
init() {
|
||||
super.init(
|
||||
data: Data([0xFF]),
|
||||
controlByteInterpreter: MockControlByteInterpreter(),
|
||||
payloadInterpreter: MockPayloadInterpreter()
|
||||
)
|
||||
}
|
||||
|
||||
override func read(at controlByteOffset: Int, resolvePointers: Bool) -> Decoder.Output? {
|
||||
readCounter += 1
|
||||
return entries[ReadKey(controlByteOffset: controlByteOffset, resolvePointers: resolvePointers)]
|
||||
}
|
||||
}
|
||||
|
||||
fileprivate class MockControlByteInterpreter: ControlByteInterpreter {
|
||||
init() {
|
||||
super.init(
|
||||
typeResolver: { _, _ in nil },
|
||||
payloadSizeResolver: { _, _, _ in nil },
|
||||
definitionSizeResolver: { _, _, _ in nil }
|
||||
)
|
||||
}
|
||||
|
||||
override func interpret(bytes: Data, sourceEndianness: Endianness) -> InterpretationResult? { return nil }
|
||||
}
|
||||
|
||||
fileprivate class MockPayloadInterpreter: PayloadInterpreter {
|
||||
override func interpret(
|
||||
input: Input,
|
||||
using decoder: Decoder,
|
||||
resolvePointers: Bool
|
||||
) -> Payload? { return nil }
|
||||
}
|
Loading…
Reference in New Issue