OSON encode/decoder to support field name length > 255 bytes & relative offsets + Code refactoring for fetching Oracle Client and DB server versions in Thick mode

This commit is contained in:
Sharad Chandran R 2024-05-02 20:09:41 +05:30
parent 9491db7094
commit 02d49ccff0
16 changed files with 695 additions and 316 deletions

View File

@ -17,6 +17,12 @@ Common Changes
returned by SODA in Oracle Database 23.4 and higher in the ``_id``
attribute of documents stored in native collections.
#) Added support for an Oracle Database 23c JSON feature allowing for field
names with more than 255 UTF-8 encoded bytes.
#) Added support for an Oracle Database 23c JSON feature improving JSON
storage usage.
Thin Mode Changes
++++++++++++++++++

View File

@ -860,7 +860,7 @@ class Connection extends EventEmitter {
//---------------------------------------------------------------------------
encodeOSON(value) {
const encoder = new oson.OsonEncoder();
return encoder.encode(transformer.transformJsonValue(value));
return encoder.encode(transformer.transformJsonValue(value), this._impl._osonMaxFieldNameSize);
}
//---------------------------------------------------------------------------

View File

@ -358,7 +358,7 @@ messages.set(ERR_INTEGER_TOO_LARGE, // NJS-112
messages.set(ERR_UNEXPECTED_DATA, // NJS-113
'unexpected data received: %s');
messages.set(ERR_OSON_FIELD_NAME_LIMITATION, // NJS-114
'OSON field names may not exceed 255 UTF-8 encoded bytes');
'OSON field names may not exceed %d UTF-8 encoded bytes');
messages.set(ERR_ORACLE_NUMBER_NO_REPR, // NJS-115
'value cannot be represented as an Oracle Database number');
messages.set(ERR_UNSUPPORTED_VERIFIER_TYPE, // NJS-116

View File

@ -37,6 +37,7 @@ class ConnectionImpl {
this._inProgress = false;
this._dbObjectTypes = new Map();
this._requestQueue = [];
this._osonMaxFieldNameSize = 255;
}
//---------------------------------------------------------------------------

View File

@ -44,7 +44,8 @@ module.exports = {
TNS_JSON_MAGIC_BYTE_1: 0xff,
TNS_JSON_MAGIC_BYTE_2: 0x4a, // 'J'
TNS_JSON_MAGIC_BYTE_3: 0x5a, // 'Z'
TNS_JSON_VERSION: 1,
TNS_JSON_VERSION_MAX_FNAME_255: 1,
TNS_JSON_VERSION_MAX_FNAME_65535: 3,
TNS_JSON_FLAG_HASH_ID_UINT8: 0x0100,
TNS_JSON_FLAG_HASH_ID_UINT16: 0x0200,
TNS_JSON_FLAG_NUM_FNAMES_UINT16: 0x0400,
@ -56,6 +57,7 @@ module.exports = {
TNS_JSON_FLAG_LEN_IN_PCODE: 0x04,
TNS_JSON_FLAG_NUM_FNAMES_UINT32: 0x08,
TNS_JSON_FLAG_IS_SCALAR: 0x10,
TNS_JSON_FLAG_SEC_FNAMES_SEG_UINT16: 0x100,
// JSON data types
TNS_JSON_TYPE_NULL: 0x30,

View File

@ -1,4 +1,4 @@
// Copyright (c) 2023, Oracle and/or its affiliates.
// Copyright (c) 2023, 2024, Oracle and/or its affiliates.
//-----------------------------------------------------------------------------
//
@ -36,7 +36,7 @@ const vector = require("./vector.js");
const nodbUtil = require("../../util.js");
/**
* Class used for decodeing
* Class used for decoding
*/
class OsonDecoder extends BaseBuffer {
@ -52,6 +52,7 @@ class OsonDecoder extends BaseBuffer {
// significant bits of the node type; determine the offsets in the tree
// segment to the field ids array and the value offsets array
let container, offsetsPos, fieldIdsPos;
const containerOffset = this.pos - this.treeSegPos - 1;
let numChildren = this._getNumChildren(nodeType);
const isObject = ((nodeType & 0x40) === 0);
if (numChildren === undefined) {
@ -86,7 +87,10 @@ class OsonDecoder extends BaseBuffer {
fieldIdsPos += this.fieldIdLength;
}
this.pos = offsetsPos;
const offset = this._getOffset(nodeType);
let offset = this._getOffset(nodeType);
if (this.relativeOffsets) {
offset += containerOffset;
}
offsetsPos = this.pos;
this.pos = this.treeSegPos + offset;
if (isObject) {
@ -221,6 +225,46 @@ class OsonDecoder extends BaseBuffer {
}
}
//---------------------------------------------------------------------------
// _getFieldNames
//
// Reads the field names from the buffer.
//---------------------------------------------------------------------------
_getFieldNames(arrStartPos, numFields, offsetsSize, fieldNamesSegSize, fieldNamesSize) {
// skip the hash id array (1 byte * fieldNamesSize for each field)
this.skipBytes(numFields * fieldNamesSize);
// skip the field name offsets array for now
const offsetsPos = this.pos;
this.skipBytes(numFields * offsetsSize);
const ptr = this.readBytes(fieldNamesSegSize);
const finalPos = this.pos;
// determine the names of the fields
this.pos = offsetsPos;
let offset;
for (let i = arrStartPos; i < arrStartPos + numFields; i++) {
if (offsetsSize === 2) {
offset = this.readUInt16BE();
} else {
offset = this.readUInt32BE();
}
// get the field name object
let temp;
if (fieldNamesSize === 1) {
// Short Field Name
temp = ptr.readUInt8(offset);
} else {
// Long Field Name
temp = ptr.readUInt16BE(offset);
}
this.fieldNames[i] = ptr.subarray(offset + fieldNamesSize, offset + temp + fieldNamesSize).toString();
}
this.pos = finalPos;
}
//---------------------------------------------------------------------------
// decode()
//
@ -237,14 +281,16 @@ class OsonDecoder extends BaseBuffer {
errors.throwErr(errors.ERR_UNEXPECTED_DATA, magic.toString('hex'));
}
const version = this.readUInt8();
if (version !== constants.TNS_JSON_VERSION) {
if (version !== constants.TNS_JSON_VERSION_MAX_FNAME_255 &&
version !== constants.TNS_JSON_VERSION_MAX_FNAME_65535) {
errors.throwErr(errors.ERR_OSON_VERSION_NOT_SUPPORTED, version);
}
const flags = this.readUInt16BE();
const primaryFlags = this.readUInt16BE();
this.relativeOffsets = primaryFlags & constants.TNS_JSON_FLAG_REL_OFFSET_MODE;
// scalar values are much simpler
if (flags & constants.TNS_JSON_FLAG_IS_SCALAR) {
if (flags & constants.TNS_JSON_FLAG_TREE_SEG_UINT32) {
if (primaryFlags & constants.TNS_JSON_FLAG_IS_SCALAR) {
if (primaryFlags & constants.TNS_JSON_FLAG_TREE_SEG_UINT32) {
this.skipBytes(4);
} else {
this.skipBytes(2);
@ -252,31 +298,46 @@ class OsonDecoder extends BaseBuffer {
return this._decodeNode();
}
// determine the number of field names
let numFieldNames;
if (flags & constants.TNS_JSON_FLAG_NUM_FNAMES_UINT32) {
numFieldNames = this.readUInt32BE();
// determine the number of short field names
let numShortFieldNames;
if (primaryFlags & constants.TNS_JSON_FLAG_NUM_FNAMES_UINT32) {
numShortFieldNames = this.readUInt32BE();
this.fieldIdLength = 4;
} else if (flags & constants.TNS_JSON_FLAG_NUM_FNAMES_UINT16) {
numFieldNames = this.readUInt16BE();
} else if (primaryFlags & constants.TNS_JSON_FLAG_NUM_FNAMES_UINT16) {
numShortFieldNames = this.readUInt16BE();
this.fieldIdLength = 2;
} else {
numFieldNames = this.readUInt8();
numShortFieldNames = this.readUInt8();
this.fieldIdLength = 1;
}
// determine the size of the field names segment
let fieldNameOffsetsSize, fieldNamesSegSize;
if (flags & constants.TNS_JSON_FLAG_FNAMES_SEG_UINT32) {
fieldNameOffsetsSize = 4;
fieldNamesSegSize = this.readUInt32BE();
// determine the size of the short field names segment
let shortFieldNameOffsetsSize, shortFieldNamesSegSize;
if (primaryFlags & constants.TNS_JSON_FLAG_FNAMES_SEG_UINT32) {
shortFieldNameOffsetsSize = 4;
shortFieldNamesSegSize = this.readUInt32BE();
} else {
fieldNameOffsetsSize = 2;
fieldNamesSegSize = this.readUInt16BE();
shortFieldNameOffsetsSize = 2;
shortFieldNamesSegSize = this.readUInt16BE();
}
// if the version indicates that field names > 255 bytes exist, parse
// the information about that segment
let longFieldNameOffsetsSize, longFieldNamesSegSize;
let numLongFieldNames = 0;
if (version === constants.TNS_JSON_VERSION_MAX_FNAME_65535) {
const secondaryFlags = this.readUInt16BE();
if (secondaryFlags & constants.TNS_JSON_FLAG_SEC_FNAMES_SEG_UINT16) {
longFieldNameOffsetsSize = 2;
} else {
longFieldNameOffsetsSize = 4;
}
numLongFieldNames = this.readUInt32BE();
longFieldNamesSegSize = this.readUInt32BE();
}
// skip the size of the tree segment
if (flags & constants.TNS_JSON_FLAG_TREE_SEG_UINT32) {
if (primaryFlags & constants.TNS_JSON_FLAG_TREE_SEG_UINT32) {
this.skipBytes(4);
} else {
this.skipBytes(2);
@ -285,37 +346,18 @@ class OsonDecoder extends BaseBuffer {
// skip the number of "tiny" nodes
this.skipBytes(2);
// skip the hash id array
let hashIdSize;
if (flags & constants.TNS_JSON_FLAG_HASH_ID_UINT8) {
hashIdSize = 1;
} else if (flags & constants.TNS_JSON_FLAG_HASH_ID_UINT16) {
hashIdSize = 2;
} else {
hashIdSize = 4;
this.fieldNames = new Array(numShortFieldNames + numLongFieldNames);
// if there are any short names, read them now
if (numShortFieldNames > 0) {
this._getFieldNames(0, numShortFieldNames,
shortFieldNameOffsetsSize, shortFieldNamesSegSize, 1);
}
this.skipBytes(numFieldNames * hashIdSize);
// skip over the field name offsets and field names
let fieldNameOffsetsPos = this.pos;
this.skipBytes(numFieldNames * fieldNameOffsetsSize);
const fieldNamesPos = this.pos;
this.skipBytes(fieldNamesSegSize);
// determine the names of the fields
this.fieldNames = new Array(numFieldNames);
for (let i = 0; i < numFieldNames; i++) {
let offset = fieldNamesPos;
if (flags & constants.TNS_JSON_FLAG_FNAMES_SEG_UINT32) {
offset += this.buf.readUInt32BE(fieldNameOffsetsPos);
fieldNameOffsetsPos += 4;
} else {
offset += this.buf.readUInt16BE(fieldNameOffsetsPos);
fieldNameOffsetsPos += 2;
}
const len = this.buf[offset];
const name = this.buf.subarray(offset + 1, offset + len + 1).toString();
this.fieldNames[i] = name;
// if there are any long names, read them now
if (numLongFieldNames > 0) {
this._getFieldNames(numShortFieldNames, numLongFieldNames,
longFieldNameOffsetsSize, longFieldNamesSegSize, 2);
}
// determine tree segment position in the buffer
@ -329,18 +371,22 @@ class OsonDecoder extends BaseBuffer {
class OsonFieldName {
constructor(name) {
constructor(name, maxFieldNameSize) {
this.name = name;
this.nameBytes = Buffer.from(name);
if (this.nameBytes.length > 255) {
errors.throwErr(errors.ERR_OSON_FIELD_NAME_LIMITATION);
if (this.nameBytes.length > maxFieldNameSize) {
errors.throwErr(errors.ERR_OSON_FIELD_NAME_LIMITATION, maxFieldNameSize);
}
this.hashId = BigInt(0x811C9DC5);
const multiplier = BigInt(16777619);
const mask = BigInt(0xffffffff);
// BigInt constants for calculating Hash ID for the OSON Field Name
const INITIAL_HASHID = 0x811C9DC5n;
const HASH_MULTIPLIER = 16777619n;
const HASH_MASK = 0xffffffffn;
this.hashId = INITIAL_HASHID;
for (let i = 0; i < this.nameBytes.length; i++) {
const c = BigInt(this.nameBytes[i]);
this.hashId = ((this.hashId ^ c) * multiplier) & mask;
this.hashId = ((this.hashId ^ c) * HASH_MULTIPLIER) & HASH_MASK;
}
this.hashId = Number(this.hashId) & 0xff;
}
@ -349,41 +395,25 @@ class OsonFieldName {
class OsonFieldNamesSegment extends GrowableBuffer {
constructor(value) {
constructor() {
super();
this.fieldNamesMap = new Map();
this.fieldNames = [];
this._examineNode(value);
this._processFieldNames();
}
//---------------------------------------------------------------------------
// _exmaineNode()
// addName()
//
// Examines the value. If it contains fields, unique names are retained. The
// values are then examined to see if they also contain fields. Arrays are
// examined to determine they contain elements that contain fields.
// Adds a name to the field names segment.
//---------------------------------------------------------------------------
_examineNode(value) {
if (Array.isArray(value)) {
for (const element of value) {
this._examineNode(element);
}
} else if (value && Array.isArray(value.fields)) {
for (let i = 0; i < value.fields.length; i++) {
const name = value.fields[i];
const element = value.values[i];
if (!this.fieldNamesMap.has(name)) {
const fieldName = new OsonFieldName(name);
this.fieldNamesMap.set(name, fieldName);
this.fieldNames.push(fieldName);
fieldName.offset = this.pos;
this.writeUInt8(fieldName.nameBytes.length);
this.writeBytes(fieldName.nameBytes);
}
this._examineNode(element);
}
addName(fieldName) {
fieldName.offset = this.pos;
if (fieldName.nameBytes.length <= 255) {
this.writeUInt8(fieldName.nameBytes.length);
} else {
this.writeUInt16BE(fieldName.nameBytes.length);
}
this.writeBytes(fieldName.nameBytes);
this.fieldNames.push(fieldName);
}
//---------------------------------------------------------------------------
@ -391,7 +421,7 @@ class OsonFieldNamesSegment extends GrowableBuffer {
//
// Processes the field names in preparation for encoding within OSON.
//---------------------------------------------------------------------------
_processFieldNames() {
_processFieldNames(fieldIdOffset) {
this.fieldNames.sort((a, b) => {
if (a.hashId < b.hashId)
return -1;
@ -408,7 +438,7 @@ class OsonFieldNamesSegment extends GrowableBuffer {
return 0;
});
for (let i = 0; i < this.fieldNames.length; i++) {
this.fieldNames[i].fieldId = i + 1;
this.fieldNames[i].fieldId = fieldIdOffset + i + 1;
}
if (this.fieldNames.length < 256) {
this.fieldIdSize = 1;
@ -428,7 +458,7 @@ class OsonTreeSegment extends GrowableBuffer {
//
// Encodes an array in the OSON tree segment.
//---------------------------------------------------------------------------
_encodeArray(value, fnamesSeg) {
_encodeArray(value, encoder) {
this._encodeContainer(constants.TNS_JSON_TYPE_ARRAY, value.length);
const len = value.length * 4;
const pos = this.reserveBytes(len);
@ -436,7 +466,7 @@ class OsonTreeSegment extends GrowableBuffer {
for (const element of value) {
this.buf.writeUInt32BE(this.pos, offsetsBufPos);
offsetsBufPos += 4;
this.encodeNode(element, fnamesSeg);
this.encodeNode(element, encoder);
}
}
@ -468,27 +498,27 @@ class OsonTreeSegment extends GrowableBuffer {
//
// Encodes an object in the OSON tree segment.
//---------------------------------------------------------------------------
_encodeObject(value, fnamesSeg) {
_encodeObject(value, encoder) {
const numChildren = value.values.length;
this._encodeContainer(constants.TNS_JSON_TYPE_OBJECT, numChildren);
const len = numChildren * (fnamesSeg.fieldIdSize + 4);
const pos = this.reserveBytes(len);
let fieldIdOffset = pos;
let valueOffset = pos + (numChildren * fnamesSeg.fieldIdSize);
let fieldIdOffset = this.pos;
let valueOffset = this.pos + (numChildren * encoder.fieldIdSize);
const finalOffset = valueOffset + numChildren * 4;
this.reserveBytes(finalOffset - this.pos);
for (let i = 0; i < value.fields.length; i++) {
const fieldName = fnamesSeg.fieldNamesMap.get(value.fields[i]);
if (fnamesSeg.fieldIdSize == 1) {
const fieldName = encoder.fieldNamesMap.get(value.fields[i]);
if (encoder.fieldIdSize == 1) {
this.buf[fieldIdOffset] = fieldName.fieldId;
} else if (fnamesSeg.fieldIdSize == 2) {
} else if (encoder.fieldIdSize == 2) {
this.buf.writeUInt16BE(fieldName.fieldId, fieldIdOffset);
} else {
this.buf.writeUInt32BE(fieldName.fieldId, fieldIdOffset);
}
this.buf.writeUInt32BE(this.pos, valueOffset);
fieldIdOffset += fnamesSeg.fieldIdSize;
fieldIdOffset += encoder.fieldIdSize;
valueOffset += 4;
this.encodeNode(value.values[i], fnamesSeg);
this.encodeNode(value.values[i], encoder);
}
}
@ -497,7 +527,7 @@ class OsonTreeSegment extends GrowableBuffer {
//
// Encodes a value (node) in the OSON tree segment.
//---------------------------------------------------------------------------
encodeNode(value, fnamesSeg) {
encodeNode(value, encoder) {
// handle null
if (value === undefined || value === null) {
@ -556,7 +586,7 @@ class OsonTreeSegment extends GrowableBuffer {
// handle arrays
} else if (Array.isArray(value)) {
this._encodeArray(value, fnamesSeg);
this._encodeArray(value, encoder);
// handle vectors
} else if (nodbUtil.isVectorValue(value)) {
@ -574,44 +604,181 @@ class OsonTreeSegment extends GrowableBuffer {
// handle objects
} else {
this._encodeObject(value, fnamesSeg);
this._encodeObject(value, encoder);
}
}
}
/**
* Class used for encoding
*/
class OsonEncoder extends GrowableBuffer {
//---------------------------------------------------------------------------
// _addFieldName()
//
// Add a field with the given name.
//---------------------------------------------------------------------------
_addFieldName(name) {
const fieldName = new OsonFieldName(name, this.maxFieldNameSize);
this.fieldNamesMap.set(name, fieldName);
if (fieldName.nameBytes.length <= 255) {
this.shortFieldNamesSeg.addName(fieldName);
} else {
if (!this.longFieldNamesSeg) {
this.longFieldNamesSeg = new OsonFieldNamesSegment();
}
this.longFieldNamesSeg.addName(fieldName);
}
}
//---------------------------------------------------------------------------
// _examineNode()
//
// Examines the value. If it contains fields, unique names are retained. The
// values are then examined to see if they also contain fields. Arrays are
// examined to determine they contain elements that contain fields.
//---------------------------------------------------------------------------
_examineNode(value) {
if (Array.isArray(value)) {
for (const element of value) {
this._examineNode(element);
}
} else if (value && Array.isArray(value.fields)) {
for (let i = 0; i < value.fields.length; i++) {
const name = value.fields[i];
const element = value.values[i];
if (!this.fieldNamesMap.has(name)) {
this._addFieldName(name);
}
this._examineNode(element);
}
}
}
//---------------------------------------------------------------------------
// _writeExtendedHeader()
//
// Write the extended header containing information about the short and long
// field name segments.
//---------------------------------------------------------------------------
_writeExtendedHeader() {
// write number of short field names
if (this.fieldIdSize === 1) {
this.writeUInt8(this.shortFieldNamesSeg.fieldNames.length);
} else if (this.fieldIdSize === 2) {
this.writeUInt16BE(this.shortFieldNamesSeg.fieldNames.length);
} else {
this.writeUInt32BE(this.shortFieldNamesSeg.fieldNames.length);
}
// write size of short field names segment
if (this.shortFieldNamesSeg.pos < 65536) {
this.writeUInt16BE(this.shortFieldNamesSeg.pos);
} else {
this.writeUInt32BE(this.shortFieldNamesSeg.pos);
}
// write fields for long field names segment, if applicable
if (this.longFieldNamesSeg) {
let secondaryFlags = 0;
if (this.longFieldNamesSeg.pos < 65536) {
secondaryFlags = constants.TNS_JSON_FLAG_SEC_FNAMES_SEG_UINT16;
}
this.writeUInt16BE(secondaryFlags);
this.writeUInt32BE(this.longFieldNamesSeg.fieldNames.length);
this.writeUInt32BE(this.longFieldNamesSeg.pos);
}
}
//---------------------------------------------------------------------------
// _writeFieldNamesSeg()
//
// Write the contents of the field names segment to the buffer.
//---------------------------------------------------------------------------
_writeFieldNamesSeg(fieldNamesSeg) {
// write array of hash ids
for (const fieldName of fieldNamesSeg.fieldNames) {
if (fieldName.nameBytes.length <= 255) {
this.writeUInt8(fieldName.hashId);
} else {
this.writeUInt16BE(fieldName.hashId);
}
}
// write array of field name offsets for the short field names
for (const fieldName of fieldNamesSeg.fieldNames) {
if (fieldNamesSeg.pos < 65536) {
this.writeUInt16BE(fieldName.offset);
} else {
this.writeUInt32BE(fieldName.offset);
}
}
// write field names
if (fieldNamesSeg.pos > 0) {
this.writeBytes(fieldNamesSeg.buf.subarray(0, fieldNamesSeg.pos));
}
}
//---------------------------------------------------------------------------
// encode()
//
// Encodes the value as OSON and returns a buffer containing the OSON bytes.
//---------------------------------------------------------------------------
encode(value) {
encode(value, maxFieldNameSize) {
// determine flags to use
let fnamesSeg;
this.maxFieldNameSize = maxFieldNameSize;
// determine the flags to use
let flags = constants.TNS_JSON_FLAG_INLINE_LEAF;
if (Array.isArray(value) || (value && Array.isArray(value.fields))) {
// examine all values recursively to determine the unique set of field
// names and whether they need to be added to the long field names
// segment (> 255 bytes) or short field names segment (<= 255 bytes)
this.fieldNamesMap = new Map();
this.shortFieldNamesSeg = new OsonFieldNamesSegment();
this._examineNode(value);
// perform processing of field names segments and determine the total
// number of unique field names in the value
let totalNumFieldNames = 0;
if (this.shortFieldNamesSeg) {
this.shortFieldNamesSeg._processFieldNames(0);
totalNumFieldNames += this.shortFieldNamesSeg.fieldNames.length;
}
if (this.longFieldNamesSeg) {
this.longFieldNamesSeg._processFieldNames(totalNumFieldNames);
totalNumFieldNames += this.longFieldNamesSeg.fieldNames.length;
}
// determine remaining flags and field id size
flags |= constants.TNS_JSON_FLAG_HASH_ID_UINT8 |
constants.TNS_JSON_FLAG_TINY_NODES_STAT;
fnamesSeg = new OsonFieldNamesSegment(value);
if (fnamesSeg.fieldNames.length > 65535) {
if (totalNumFieldNames > 65535) {
flags |= constants.TNS_JSON_FLAG_NUM_FNAMES_UINT32;
} else if (fnamesSeg.fieldNames.length > 255) {
this.fieldIdSize = 4;
} else if (totalNumFieldNames > 255) {
flags |= constants.TNS_JSON_FLAG_NUM_FNAMES_UINT16;
this.fieldIdSize = 2;
} else {
this.fieldIdSize = 1;
}
if (fnamesSeg.pos > 65535) {
if (this.shortFieldNamesSeg.pos > 65535) {
flags |= constants.TNS_JSON_FLAG_FNAMES_SEG_UINT32;
}
} else {
// if the value is a simple scalar
flags |= constants.TNS_JSON_FLAG_IS_SCALAR;
}
// encode values into the tree segment
// encode values into the OSON tree segment
const treeSeg = new OsonTreeSegment();
treeSeg.encodeNode(value, fnamesSeg);
treeSeg.encodeNode(value, this);
if (treeSeg.pos > 65535) {
flags |= constants.TNS_JSON_FLAG_TREE_SEG_UINT32;
}
@ -620,28 +787,16 @@ class OsonEncoder extends GrowableBuffer {
this.writeUInt8(constants.TNS_JSON_MAGIC_BYTE_1);
this.writeUInt8(constants.TNS_JSON_MAGIC_BYTE_2);
this.writeUInt8(constants.TNS_JSON_MAGIC_BYTE_3);
this.writeUInt8(constants.TNS_JSON_VERSION);
if (this.longFieldNamesSeg) {
this.writeUInt8(constants.TNS_JSON_VERSION_MAX_FNAME_65535);
} else {
this.writeUInt8(constants.TNS_JSON_VERSION_MAX_FNAME_255);
}
this.writeUInt16BE(flags);
// write extended header (when value is not scalar)
if (fnamesSeg) {
// write number of field names
if (fnamesSeg.fieldNames.length < 256) {
this.writeUInt8(fnamesSeg.fieldNames.length);
} else if (fnamesSeg.fieldNames.length < 65536) {
this.writeUInt16BE(fnamesSeg.fieldNames.length);
} else {
this.writeUInt32BE(fnamesSeg.fieldNames.length);
}
// write size of field names segment
if (fnamesSeg.pos < 65536) {
this.writeUInt16BE(fnamesSeg.pos);
} else {
this.writeUInt32BE(fnamesSeg.pos);
}
if (this.shortFieldNamesSeg) {
this._writeExtendedHeader();
}
// write size of tree segment
@ -652,30 +807,16 @@ class OsonEncoder extends GrowableBuffer {
}
// write remainder of header and any data (when value is not scalar)
if (fnamesSeg) {
if (this.shortFieldNamesSeg) {
// write number of "tiny" nodes (always zero)
this.writeUInt16BE(0);
// write array of hash ids
for (const fieldName of fnamesSeg.fieldNames) {
this.writeUInt8(fieldName.hashId);
// write the field names segments
this._writeFieldNamesSeg(this.shortFieldNamesSeg);
if (this.longFieldNamesSeg) {
this._writeFieldNamesSeg(this.longFieldNamesSeg);
}
// write array of field name offsets
for (const fieldName of fnamesSeg.fieldNames) {
if (fnamesSeg.pos < 65536) {
this.writeUInt16BE(fieldName.offset);
} else {
this.writeUInt32BE(fieldName.offset);
}
}
// write field names
if (fnamesSeg.pos > 0) {
this.writeBytes(fnamesSeg.buf.subarray(0, fnamesSeg.pos));
}
}
// write tree segment data

View File

@ -76,6 +76,12 @@ class ProtocolMessage extends Message {
if (serverCompileCaps) {
this.serverCompileCaps = Buffer.from(serverCompileCaps);
buf.caps.adjustForServerCompileCaps(this.serverCompileCaps);
// Set the maximum OSON field name size
if (buf.caps.ttcFieldVersion >= constants.TNS_CCAP_FIELD_VERSION_23_1) {
this.connection._osonMaxFieldNameSize = 65535;
} else {
this.connection._osonMaxFieldNameSize = 255;
}
}
const serverRunTimeCaps = buf.readBytesWithLength();
if (serverRunTimeCaps) {

View File

@ -852,7 +852,7 @@ class MessageWithData extends Message {
} else if ([constants.TNS_DATA_TYPE_ROWID, constants.TNS_DATA_TYPE_UROWID].includes(oraTypeNum)) {
buf.writeBytesWithLength(Buffer.from(value));
} else if (oraTypeNum === constants.TNS_DATA_TYPE_JSON) {
buf.writeOson(value);
buf.writeOson(value, this.connection._osonMaxFieldNameSize);
} else if (oraTypeNum === constants.TNS_DATA_TYPE_VECTOR) {
buf.writeVector(value);
} else if (oraTypeNum === constants.TNS_DATA_TYPE_INT_NAMED) {

View File

@ -551,9 +551,9 @@ class WritePacket extends BaseBuffer {
* Encodes a JavaScript object into OSON and then writes it (QLocator
* followed by data) to the buffer.
*/
writeOson(value) {
writeOson(value, osonMaxFieldSize) {
const encoder = new oson.OsonEncoder();
const buf = encoder.encode(value);
const buf = encoder.encode(value, osonMaxFieldSize);
this.writeQLocator(buf.length);
this.writeBytesWithLength(buf);
}

View File

@ -489,6 +489,11 @@ static bool njsConnection_connectAsync(njsBaton *baton)
// handle warnings if any
dpiContext_getError(baton->globals->context, &baton->warningInfo);
// Set the Oracle server version on the baton
if (dpiConn_getServerVersion(baton->dpiConnHandle, NULL, NULL,
&baton->versionInfo) < 0)
return njsBaton_setErrorDPI(baton);
return true;
}
@ -501,12 +506,26 @@ static bool njsConnection_connectPostAsync(njsBaton *baton, napi_env env,
napi_value *result)
{
njsConnection *conn = (njsConnection*) baton->callingInstance;
napi_value connObj, osonMaxFieldNameSize;
// process warnings if any
if (baton->warningInfo.isWarning) {
conn->warningInfo = baton->warningInfo;
}
// For Oracle Database 23c and Oracle Client 23c, the maximum field name
// size for the OSON object is 65535
conn->versionInfo = baton->versionInfo;
if (conn->versionInfo.versionNum >= 23
&& baton->globals->clientVersionInfo.versionNum >= 23) {
NJS_CHECK_NAPI(env, napi_get_reference_value(env,
baton->jsCallingObjRef, &connObj))
NJS_CHECK_NAPI(env, napi_create_uint32(env,
65535, &osonMaxFieldNameSize))
NJS_CHECK_NAPI(env, napi_set_named_property(env, connObj,
"_osonMaxFieldNameSize", osonMaxFieldNameSize))
}
// transfer the ODPI-C connection handle to the new object
conn->handle = baton->dpiConnHandle;
baton->dpiConnHandle = NULL;
@ -1007,7 +1026,6 @@ static bool njsConnection_getBatchErrors(njsBaton *baton, napi_env env,
NJS_NAPI_METHOD_IMPL_SYNC(njsConnection_getCallTimeout, 0, NULL)
{
njsConnection *conn = (njsConnection*) callingInstance;
dpiVersionInfo versionInfo;
uint32_t callTimeout;
// return undefined for an invalid connection
@ -1015,9 +1033,7 @@ NJS_NAPI_METHOD_IMPL_SYNC(njsConnection_getCallTimeout, 0, NULL)
return true;
// if an Oracle Client less than 18.1 is being used, return undefined
if (dpiContext_getClientVersion(globals->context, &versionInfo) < 0)
return njsUtils_throwErrorDPI(env, globals);
if (versionInfo.versionNum < 18)
if (globals->clientVersionInfo.versionNum < 18)
return true;
// get value and return it
@ -1373,14 +1389,10 @@ static bool njsConnection_getOutBinds(njsBaton *baton, napi_env env,
NJS_NAPI_METHOD_IMPL_SYNC(njsConnection_getOracleServerVersion, 0, NULL)
{
njsConnection *conn = (njsConnection*) callingInstance;
dpiVersionInfo versionInfo;
if (conn->handle) {
if (dpiConn_getServerVersion(conn->handle, NULL, NULL,
&versionInfo) < 0)
return njsUtils_throwErrorDPI(env, globals);
NJS_CHECK_NAPI(env, napi_create_uint32(env, versionInfo.fullVersionNum,
returnValue))
NJS_CHECK_NAPI(env, napi_create_uint32(env,
conn->versionInfo.fullVersionNum, returnValue))
}
return true;
@ -1394,17 +1406,13 @@ NJS_NAPI_METHOD_IMPL_SYNC(njsConnection_getOracleServerVersion, 0, NULL)
NJS_NAPI_METHOD_IMPL_SYNC(njsConnection_getOracleServerVersionString, 0, NULL)
{
njsConnection *conn = (njsConnection*) callingInstance;
dpiVersionInfo versionInfo;
char versionString[40];
if (conn->handle) {
if (dpiConn_getServerVersion(conn->handle, NULL, NULL,
&versionInfo) < 0)
return njsUtils_throwErrorDPI(env, globals);
(void) snprintf(versionString, sizeof(versionString), "%d.%d.%d.%d.%d",
versionInfo.versionNum, versionInfo.releaseNum,
versionInfo.updateNum, versionInfo.portReleaseNum,
versionInfo.portUpdateNum);
conn->versionInfo.versionNum, conn->versionInfo.releaseNum,
conn->versionInfo.updateNum, conn->versionInfo.portReleaseNum,
conn->versionInfo.portUpdateNum);
NJS_CHECK_NAPI(env, napi_create_string_utf8(env, versionString,
NAPI_AUTO_LENGTH, returnValue))
}
@ -1781,6 +1789,10 @@ bool njsConnection_newFromBaton(njsBaton *baton, napi_env env,
if (baton->warningInfo.isWarning) {
conn->warningInfo = baton->warningInfo;
}
// set the version information on the connection object
conn->versionInfo = baton->versionInfo;
return true;
}
@ -1990,13 +2002,10 @@ static bool njsConnection_processBinds(njsBaton *baton, napi_env env,
static bool njsConnection_processImplicitResults(njsBaton *baton)
{
njsImplicitResult *implicitResult = NULL, *tempImplicitResult;
dpiVersionInfo versionInfo;
dpiStmt *stmt;
// clients earlier than 12.1 do not support implicit results
if (dpiContext_getClientVersion(baton->globals->context, &versionInfo) < 0)
return njsBaton_setErrorDPI(baton);
if (versionInfo.versionNum < 12)
if (baton->globals->clientVersionInfo.versionNum < 12)
return true;
// process all implicit results returned

View File

@ -142,7 +142,6 @@ static void njsModule_finalizeGlobals(napi_env env, void *finalize_data,
static bool njsModule_populateGlobals(napi_env env, napi_value module,
napi_value settings, njsModuleGlobals *globals)
{
dpiVersionInfo versionInfo;
char versionString[40];
napi_value temp;
@ -221,16 +220,18 @@ static bool njsModule_populateGlobals(napi_env env, napi_value module,
&globals->jsEncodeVectorFn))
// acquire Oracle client version and store this in the settings object
if (dpiContext_getClientVersion(globals->context, &versionInfo) < 0)
if (dpiContext_getClientVersion(globals->context, &globals->clientVersionInfo) < 0)
return njsUtils_throwErrorDPI(env, globals);
NJS_CHECK_NAPI(env, napi_create_uint32(env, versionInfo.fullVersionNum,
NJS_CHECK_NAPI(env, napi_create_uint32(env, globals->clientVersionInfo.fullVersionNum,
&temp))
NJS_CHECK_NAPI(env, napi_set_named_property(env, settings,
"oracleClientVersion", temp))
(void) snprintf(versionString, sizeof(versionString), "%d.%d.%d.%d.%d",
versionInfo.versionNum, versionInfo.releaseNum,
versionInfo.updateNum, versionInfo.portReleaseNum,
versionInfo.portUpdateNum);
globals->clientVersionInfo.versionNum,
globals->clientVersionInfo.releaseNum,
globals->clientVersionInfo.updateNum,
globals->clientVersionInfo.portReleaseNum,
globals->clientVersionInfo.portUpdateNum);
NJS_CHECK_NAPI(env, napi_create_string_utf8(env, versionString,
strlen(versionString), &temp))
NJS_CHECK_NAPI(env, napi_set_named_property(env, settings,

View File

@ -279,6 +279,9 @@ struct njsBaton {
njsModuleGlobals *globals;
void *callingInstance;
// version information
dpiVersionInfo versionInfo;
// error handling
bool dpiError;
bool hasError;
@ -493,6 +496,7 @@ struct njsClassDef {
// data for class Connection exposed to JS.
struct njsConnection {
dpiConn *handle;
dpiVersionInfo versionInfo;
char *tag;
size_t tagLength;
bool retag;
@ -539,6 +543,7 @@ struct njsLobBuffer {
// data for module globals
struct njsModuleGlobals {
dpiContext *context;
dpiVersionInfo clientVersionInfo;
napi_ref jsAqDeqOptionsConstructor;
napi_ref jsAqEnqOptionsConstructor;
napi_ref jsAqMessageConstructor;

View File

@ -411,6 +411,10 @@ static bool njsPool_getConnectionAsync(njsBaton *baton)
// handle warnings if any
dpiContext_getError(baton->globals->context, &baton->warningInfo);
if (dpiConn_getServerVersion(baton->dpiConnHandle, NULL, NULL,
&baton->versionInfo) < 0)
return njsBaton_setErrorDPI(baton);
// keep track of return parameters
NJS_FREE_AND_CLEAR(baton->tag);
baton->tagLength = 0;

View File

@ -390,7 +390,7 @@ static bool njsSodaOperation_getOnePostAsync(njsBaton *baton, napi_env env,
static bool njsSodaOperation_processOptions(njsBaton *baton, napi_env env,
napi_value options)
{
dpiVersionInfo versionInfo;
dpiVersionInfo *versionInfo;
bool lock;
// allocate memory for ODPI-C operations structure
@ -399,11 +399,9 @@ static bool njsSodaOperation_processOptions(njsBaton *baton, napi_env env,
return njsBaton_setErrorInsufficientMemory(baton);
// set fetch array size, but ONLY if the client version exceeds 19.5
if (dpiContext_getClientVersion(baton->globals->context,
&versionInfo) < 0)
return njsUtils_throwErrorDPI(env, baton->globals);
if (versionInfo.versionNum > 19 ||
(versionInfo.versionNum == 19 && versionInfo.releaseNum >= 5))
versionInfo = &baton->globals->clientVersionInfo;
if (versionInfo->versionNum > 19 ||
(versionInfo->versionNum == 19 && versionInfo->releaseNum >= 5))
baton->sodaOperOptions->fetchArraySize = baton->fetchArraySize;
// process each of the options

View File

@ -41,6 +41,7 @@ describe('244.dataTypeJson.js', function() {
let connection;
let isRunnable = false;
let isOracle_23_4 = false;
const tableName = "nodb_json";
const jsonVals = assist.jsonValues;
@ -53,6 +54,11 @@ describe('244.dataTypeJson.js', function() {
isRunnable = true;
}
// Check if we are running the latest Oracle Server and Client versions
// for vector and long field names support
isOracle_23_4 = connection.oracleServerVersion >= 2304000000
&& (oracledb.thin || oracledb.oracleClientVersion >= 2304000000);
if (!isRunnable) {
this.skip();
}
@ -78,7 +84,7 @@ describe('244.dataTypeJson.js', function() {
this.skip();
}
oracledb.stmtCacheSize = default_stmtCacheSize;
await connection.execute("DROP table " + tableName + " PURGE");
await testsUtil.dropTable(connection, tableName);
}); // after()
it('244.1.1 SELECT query', async function() {
@ -97,6 +103,28 @@ describe('244.dataTypeJson.js', function() {
await assist.verifyRefCursorWithFetchInfo(connection, tableName, jsonVals);
}); // 244.1.4
it('244.1.5 Negative field name length > 255 bytes - Oracle 21c', async function() {
if (isOracle_23_4 || !oracledb.thin) {
this.skip();
}
// The server does not throw an error for out-of-bounds field length
// names as of now.
const sequence = 1;
const longFieldName = 'A'.repeat(256);
const jsonVal = {};
jsonVal[longFieldName] = "2018/11/01 18:30:00";
const sql = "insert into " + tableName + " ( id, content ) values (:i, :c)";
const binds = [
{ val: sequence, type: oracledb.NUMBER, dir: oracledb.BIND_IN },
{ val: jsonVal, type: oracledb.DB_TYPE_JSON, dir: oracledb.BIND_IN }
];
await assert.rejects(
async () => await connection.execute(sql, binds),
// NJS-114: OSON field names may not exceed %d UTF-8 encoded bytes
/NJS-114:/
);
}); // 244.1.5
}); // 244.1
describe('244.2 stores null value correctly', function() {
@ -112,40 +140,26 @@ describe('244.dataTypeJson.js', function() {
before('create table, insert data', async function() {
if (!isRunnable) {
this.skip();
return;
}
oracledb.fetchAsBuffer = [oracledb.BLOB];
const proc = "BEGIN \n" +
" DECLARE \n" +
" e_table_missing EXCEPTION; \n" +
" PRAGMA EXCEPTION_INIT(e_table_missing, -00942); \n" +
" BEGIN \n" +
" EXECUTE IMMEDIATE('DROP TABLE " + tableName + " PURGE'); \n" +
" EXCEPTION \n" +
" WHEN e_table_missing \n" +
" THEN NULL; \n" +
" END; \n" +
" EXECUTE IMMEDIATE (' \n" +
" CREATE TABLE " + tableName + " ( \n" +
" id NUMBER, \n" +
" content JSON, \n" +
" osonCol BLOB, \n" +
" constraint Oson_ck_1 check (OsonCol is json format oson)" +
" ) \n" +
" '); \n" +
"END; ";
await connection.execute(proc);
const sql = " CREATE TABLE " + tableName +
" ( \n" +
" id NUMBER, \n" +
" content JSON, \n" +
" osonCol BLOB, \n" +
" constraint Oson_ck_1 check (OsonCol is json format oson)" +
" )";
await testsUtil.createTable(connection, tableName, sql);
}); // before()
after(async function() {
if (!isRunnable) {
this.skip();
return;
}
oracledb.fetchAsBuffer = [];
oracledb.stmtCacheSize = default_stmtCacheSize;
await connection.execute("DROP table " + tableName + " PURGE");
await testsUtil.dropTable(connection, tableName);
}); // after()
it('244.3.1 works with executeMany()', async function() {
@ -205,7 +219,9 @@ describe('244.dataTypeJson.js', function() {
[18, jsonVal18]
];
if (connection.oracleServerVersion >= 2304000000) {
// Inserting TypedArrays is only allowed with Oracle Database 23c and
// Oracle Client 23c versions and above
if (isOracle_23_4) {
binds.push([19, jsonVal19]);
binds.push([20, jsonVal20]);
}
@ -256,7 +272,6 @@ describe('244.dataTypeJson.js', function() {
before('create table, insert data', async function() {
if (!isRunnable) {
this.skip();
return;
}
oracledb.stmtCacheSize = 0;
await assist.setUp(connection, tableName, jsonVals);
@ -267,12 +282,11 @@ describe('244.dataTypeJson.js', function() {
after(async function() {
if (!isRunnable) {
this.skip();
return;
}
oracledb.stmtCacheSize = default_stmtCacheSize;
await connection.execute(drop_proc_in);
await connection.execute(drop_proc_out);
await connection.execute("DROP table " + tableName + " PURGE");
await testsUtil.dropTable(connection, tableName);
}); // after()
it('244.4.1 bind by name', async function() {
@ -332,7 +346,6 @@ describe('244.dataTypeJson.js', function() {
before('create table, insert data', async function() {
if (!isRunnable) {
this.skip();
return;
}
oracledb.stmtCacheSize = 0;
await assist.setUp(connection, tableName, jsonVals);
@ -342,11 +355,10 @@ describe('244.dataTypeJson.js', function() {
after(async function() {
if (!isRunnable) {
this.skip();
return;
}
oracledb.stmtCacheSize = default_stmtCacheSize;
await connection.execute(proc_drop);
await connection.execute("DROP table " + tableName + " PURGE");
await testsUtil.dropTable(connection, tableName);
}); // after()
it('244.5.1 bind by name', async function() {
@ -410,7 +422,6 @@ describe('244.dataTypeJson.js', function() {
before('create table, insert data', async function() {
if (!isRunnable) {
this.skip();
return;
}
oracledb.stmtCacheSize = 0;
await assist.setUp(connection, tableName, jsonVals);
@ -421,12 +432,11 @@ describe('244.dataTypeJson.js', function() {
after(async function() {
if (!isRunnable) {
this.skip();
return;
}
oracledb.stmtCacheSize = default_stmtCacheSize;
await connection.execute(drop_proc_in);
await connection.execute(drop_proc_out);
await connection.execute("DROP table " + tableName + " PURGE");
await testsUtil.dropTable(connection, tableName);
}); // after()
it('244.6.1 bind by name', async function() {
@ -495,7 +505,6 @@ describe('244.dataTypeJson.js', function() {
before('create table, insert data', async function() {
if (!isRunnable) {
this.skip();
return;
}
oracledb.stmtCacheSize = 0;
await assist.setUp(connection, tableName, jsonVals);
@ -505,11 +514,10 @@ describe('244.dataTypeJson.js', function() {
after(async function() {
if (!isRunnable) {
this.skip();
return;
}
oracledb.stmtCacheSize = default_stmtCacheSize;
await connection.execute(proc_drop);
await connection.execute("DROP table " + tableName + " PURGE");
await testsUtil.dropTable(connection, tableName);
}); // after()
it('244.7.1 bind by name', async function() {
@ -551,35 +559,20 @@ describe('244.dataTypeJson.js', function() {
before('create table, insert data', async function() {
if (!isRunnable) {
this.skip();
return;
}
const proc = "BEGIN \n" +
" DECLARE \n" +
" e_table_missing EXCEPTION; \n" +
" PRAGMA EXCEPTION_INIT(e_table_missing, -00942); \n" +
" BEGIN \n" +
" EXECUTE IMMEDIATE('DROP TABLE " + tableName + " PURGE'); \n" +
" EXCEPTION \n" +
" WHEN e_table_missing \n" +
" THEN NULL; \n" +
" END; \n" +
" EXECUTE IMMEDIATE (' \n" +
" CREATE TABLE " + tableName + " ( \n" +
" num NUMBER, \n" +
" content JSON \n" +
" ) \n" +
" '); \n" +
"END; ";
await connection.execute(proc);
const sql = " CREATE TABLE " + tableName + " ( \n" +
" num NUMBER, \n" +
" content JSON \n" +
" )";
await testsUtil.createTable(connection, tableName, sql);
}); // before()
after(async function() {
if (!isRunnable) {
this.skip();
return;
}
oracledb.stmtCacheSize = default_stmtCacheSize;
await connection.execute("DROP table " + tableName + " PURGE");
await testsUtil.dropTable(connection, tableName);
}); // after()
it('244.8.1 bind by name', async function() {
@ -624,107 +617,310 @@ describe('244.dataTypeJson.js', function() {
describe('244.9 testing JSON with oracledb.fetchAsString and fetchInfo oracledb.STRING', function() {
let sequence = 1;
before('create table, insert data', async function() {
if (!isRunnable) {
this.skip();
return;
}
const proc = "BEGIN \n" +
" DECLARE \n" +
" e_table_missing EXCEPTION; \n" +
" PRAGMA EXCEPTION_INIT(e_table_missing, -00942); \n" +
" BEGIN \n" +
" EXECUTE IMMEDIATE('DROP TABLE " + tableName + " PURGE'); \n" +
" EXCEPTION \n" +
" WHEN e_table_missing \n" +
" THEN NULL; \n" +
" END; \n" +
" EXECUTE IMMEDIATE (' \n" +
" CREATE TABLE " + tableName + " ( \n" +
" id NUMBER, \n" +
" content JSON \n" +
" ) \n" +
" '); \n" +
"END; ";
await connection.execute(proc);
const sql = " CREATE TABLE " + tableName + " ( \n" +
" id NUMBER, \n" +
" content JSON \n" +
" )";
await testsUtil.createTable(connection, tableName, sql);
}); // before()
after(async function() {
if (!isRunnable) {
this.skip();
}
oracledb.stmtCacheSize = default_stmtCacheSize;
oracledb.fetchAsString = [];
await testsUtil.dropTable(connection, tableName);
}); // after()
const testInsertAndFetch = async function(seq, jsonVal, resultStr, selectOpts) {
let sql = "insert into " + tableName + " ( id, content ) values (:i, :c)";
const binds = [
{ val: seq, type: oracledb.NUMBER, dir: oracledb.BIND_IN },
{ val: jsonVal, type: oracledb.DB_TYPE_JSON, dir: oracledb.BIND_IN }
];
await connection.execute(sql, binds);
sql = "select content as C from " + tableName + " where id = " + seq;
const result = await connection.execute(sql, [], selectOpts);
assert.strictEqual(typeof result.rows[0][0], 'string');
assert.strictEqual(result.rows[0][0].length, resultStr.length);
assert.strictEqual(result.rows[0][0], resultStr);
};
it('244.9.1 works with oracledb.fetchAsString', async function() {
oracledb.fetchAsString = [ oracledb.DB_TYPE_JSON ];
const jsonVals = [{ "key5": "2018/11/01 18:30:00" }];
const resultStr = ["{\"key5\":\"2018/11/01 18:30:00\"}"];
// Add the JSON Field with Long Field Name to the JSON Values Array
// for Oracle DB 23.4 (and Oracle Client 23.4)
if (isOracle_23_4) {
const longFieldName = 'A'.repeat(1000);
const jsonVal = {};
jsonVal[longFieldName] = "2018/11/01 18:30:00";
jsonVals.push(jsonVal);
resultStr.push(`{"${longFieldName}":"2018/11/01 18:30:00"}`);
}
for (let i = 1; i <= jsonVals.length; i++) {
await testInsertAndFetch(sequence, jsonVals[i - 1], resultStr[i - 1], {});
sequence++;
}
}); // 244.9.1
it('244.9.2 could work with fetchInfo oracledb.STRING', async function() {
oracledb.fetchAsString = [];
const jsonVal = { "key5": "2018/11/01 18:30:00" };
const resultStr = "{\"key5\":\"2018/11/01 18:30:00\"}";
const options = {
fetchInfo: { C: { type: oracledb.STRING } }
};
// Test Insert and Fetch of JSON Data
await testInsertAndFetch(sequence, jsonVal, resultStr, options);
sequence++;
}); // 244.9.2
}); // 244.9
describe('244.10 testing JSON with long field names > 255 bytes', function() {
const table = 'nodb_json_long';
let sequence = 1;
before('create table, insert data', async function() {
if (!isOracle_23_4) {
this.skip();
}
const sql = " CREATE TABLE " + table + " ( \n" +
" id NUMBER, \n" +
" content JSON \n" +
" )";
await testsUtil.createTable(connection, table, sql);
}); // before()
after(async function() {
if (!isOracle_23_4) {
return;
}
oracledb.stmtCacheSize = default_stmtCacheSize;
oracledb.fetchAsString = [];
await connection.execute("DROP table " + tableName + " PURGE");
await testsUtil.dropTable(connection, table);
}); // after()
it('244.9.1 works with oracledb.fetchAsString', async function() {
it('244.10.1 single long JSON field name', async function() {
oracledb.fetchAsString = [ oracledb.DB_TYPE_JSON ];
const sequence = 1;
const jsonVal = { "key5": "2018/11/01 18:30:00" };
const resultStr = "{\"key5\":\"2018/11/01 18:30:00\"}";
const longFieldName = 'A'.repeat(1000);
const jsonVal = {};
jsonVal[longFieldName] = "2018/11/01 18:30:00";
const resultStr = `{"${longFieldName}":"2018/11/01 18:30:00"}`;
let sql = "insert into " + tableName + " ( id, content ) values (:i, :c)";
let sql = "insert into " + table + " ( id, content ) values (:i, :c)";
const binds = [
{ val: sequence, type: oracledb.NUMBER, dir: oracledb.BIND_IN },
{ val: jsonVal, type: oracledb.DB_TYPE_JSON, dir: oracledb.BIND_IN }
];
await connection.execute(sql, binds);
sql = "select content as C from " + tableName + " where id = " + sequence;
sql = "select content as C from " + table + " where id = " + sequence;
const result = await connection.execute(sql);
assert.strictEqual(typeof result.rows[0][0], 'string');
assert.strictEqual(result.rows[0][0].length, resultStr.length);
assert.strictEqual(result.rows[0][0], resultStr);
}); // 244.9.1
sequence++;
it.skip('244.9.2 doesn\'t work with outFormat: oracledb.DB_TYPE_JSON', async function() {
oracledb.fetchAsString = [ oracledb.DB_TYPE_JSON ];
const sequence = 2;
const jsonVal = { "key5": "2018/11/01 18:30:00" };
const resultStr = "{\"key5\":\"2018/11/01 18:30:00\"}";
}); // 244.10.1
let sql = "insert into " + tableName + " ( id, content ) values (:i, :c)";
const binds = [
{ val: sequence, type: oracledb.NUMBER, dir: oracledb.BIND_IN },
{ val: jsonVal, type: oracledb.DB_TYPE_JSON, dir: oracledb.BIND_IN }
];
await connection.execute(sql, binds);
sql = "select content as C from " + tableName + " where id = " + sequence;
const options = { outFormat: oracledb.DB_TYPE_JSON };
const result = await connection.execute(sql, [], options);
assert.strictEqual(typeof result.rows[0][0], 'string');
assert.strictEqual(result.rows[0][0].length, resultStr.length);
assert.strictEqual(result.rows[0][0], resultStr);
}); // 244.9.2
it('244.9.3 could work with fetchInfo oracledb.STRING', async function() {
it('244.10.2 multiple long JSON field names', async function() {
oracledb.fetchAsString = [];
const sequence = 3;
const jsonVal = { "key5": "2018/11/01 18:30:00" };
const resultStr = "{\"key5\":\"2018/11/01 18:30:00\"}";
let sql = "insert into " + tableName + " ( id, content ) values (:i, :c)";
const jsonVal = {};
const NO_OF_ALPHABETS = 26;
for (let i = 0; i < NO_OF_ALPHABETS; i++) {
for (let j = 0; j < NO_OF_ALPHABETS; j++) {
const longFieldName = String.fromCharCode('A'.charCodeAt(0) + i) +
String.fromCharCode('A'.charCodeAt(0) + j) + 'X'.repeat(500);
jsonVal[longFieldName] = i + j;
}
}
// Testing multi-byte field names and multi-byte field values
const multiByteLongFieldName = '𠜎'.repeat(1000);
jsonVal[multiByteLongFieldName] = '𠜎𠜎𠜎𠜎𠜎';
let sql = "insert into " + table + " ( id, content ) values (:i, :c)";
const binds = [
{ val: sequence, type: oracledb.NUMBER, dir: oracledb.BIND_IN },
{ val: jsonVal, type: oracledb.DB_TYPE_JSON, dir: oracledb.BIND_IN }
];
await connection.execute(sql, binds);
sql = "select content as C from " + tableName + " where id = " + sequence;
const options = {
fetchInfo: { C: { type: oracledb.STRING } }
};
const result = await connection.execute(sql, [], options);
assert.strictEqual(typeof result.rows[0][0], 'string');
assert.strictEqual(result.rows[0][0].length, resultStr.length);
assert.strictEqual(result.rows[0][0], resultStr);
}); // 244.9.3
sql = "select content as C from " + table + " where id = " + sequence;
const result = await connection.execute(sql);
assert.deepStrictEqual(result.rows[0][0], jsonVal);
sequence++;
}); // 244.9
}); // 244.10.2
describe('244.10 Verify auto-generated SODA document key', function() {
it('244.10.3 multiple long and short JSON field names', async function() {
oracledb.fetchAsString = [];
const jsonVal = {};
const NO_OF_ALPHABETS = 26;
for (let i = 0; i < NO_OF_ALPHABETS; i++) {
for (let j = 0; j < NO_OF_ALPHABETS; j++) {
const shortFieldName = String.fromCharCode('A'.charCodeAt(0) + i) +
String.fromCharCode('A'.charCodeAt(0) + j);
jsonVal[shortFieldName] = 6.75;
const longFieldName = String.fromCharCode('A'.charCodeAt(0) + i) +
String.fromCharCode('A'.charCodeAt(0) + j) + 'X'.repeat(254);
jsonVal[longFieldName] = i + j;
}
}
let sql = "insert into " + table + " ( id, content ) values (:i, :c)";
const binds = [
{ val: sequence, type: oracledb.NUMBER, dir: oracledb.BIND_IN },
{ val: jsonVal, type: oracledb.DB_TYPE_JSON, dir: oracledb.BIND_IN }
];
await connection.execute(sql, binds);
sql = "select content as C from " + table + " where id = " + sequence;
const result = await connection.execute(sql);
assert.deepStrictEqual(result.rows[0][0], jsonVal);
sequence++;
}); // 244.10.3
it('244.10.4 negative case for out-of-bounds field length names', async function() {
// The server does not throw an error for out-of-bounds field length
// names as of now.
if (!oracledb.thin)
this.skip();
const longFieldName = 'A'.repeat(65536);
const jsonVal = {};
jsonVal[longFieldName] = "2018/11/01 18:30:00";
const sql = "insert into " + table + " ( id, content ) values (:i, :c)";
const binds = [
{ val: sequence, type: oracledb.NUMBER, dir: oracledb.BIND_IN },
{ val: jsonVal, type: oracledb.DB_TYPE_JSON, dir: oracledb.BIND_IN }
];
await assert.rejects(
async () => await connection.execute(sql, binds),
// NJS-114: OSON field names may not exceed %d UTF-8 encoded bytes
/NJS-114:/
);
sequence++;
}); // 244.10.4
}); // 244.10
describe('244.11 testing compressed JSON with relative offsets', function() {
// Relative offsets enable the offset values in the OSON format to be
// much smaller and also allow for repeated values, which lends itself
// well to compression.
const table = 'nodb_json_rel_offsets';
let sequence = 1;
before('create table, insert data', async function() {
if (!isOracle_23_4) {
this.skip();
}
const sql = " CREATE TABLE " + table + " ( \n" +
" id NUMBER, \n" +
" content JSON \n" +
" ) JSON (content) STORE AS (COMPRESS HIGH)";
await testsUtil.createTable(connection, table, sql);
}); // before()
after(async function() {
if (!isOracle_23_4) {
return;
}
oracledb.stmtCacheSize = default_stmtCacheSize;
oracledb.fetchAsString = [];
await testsUtil.dropTable(connection, table);
}); // after()
it('244.11.1 fetch JSON with relative offsets', async function() {
const longFieldName = 'A'.repeat(1000);
const jsonVal = {};
jsonVal[longFieldName] = "2018/11/01 18:30:00";
jsonVal['num_list'] = [1.5, 2.25, 3.75, 5.5];
jsonVal['str_list'] = ["string 1", "string 2"];
// Send a JSON string, which is converted and stored as compressed JSON
// by the database
let sql = "insert into " + table + " ( id, content ) values (:i, :c)";
const binds = [
{ val: sequence, type: oracledb.NUMBER, dir: oracledb.BIND_IN },
{ val: JSON.stringify(jsonVal) }
];
await connection.execute(sql, binds);
sql = "select content as C from " + table + " where id = " + sequence;
const result = await connection.execute(sql);
assert.deepStrictEqual(result.rows[0][0], jsonVal);
sequence++;
}); // 244.11.1
it('244.11.2 fetch JSON with relative offsets and shared fields and values', async function() {
const jsonVal = [];
for (let i = 0; i < 15; i++) {
jsonVal.push({a: 6711, b: 'String value'});
}
// Send a JSON string, which is converted and stored as compressed JSON
// by the database
let sql = "insert into " + table + " ( id, content ) values (:i, :c)";
const binds = [
{ val: sequence, type: oracledb.NUMBER, dir: oracledb.BIND_IN },
{ val: JSON.stringify(jsonVal) }
];
await connection.execute(sql, binds);
sql = "select content as C from " + table + " where id = " + sequence;
const result = await connection.execute(sql);
assert.deepStrictEqual(result.rows[0][0], jsonVal);
sequence++;
}); // 244.11.2
it('244.11.3 fetch JSON with relative offsets and shared fields, not values', async function() {
const jsonVal = [];
for (let i = 0; i < 15; i++) {
jsonVal.push({a: 6711 + i, b: 'String value ' + i});
}
// Send a JSON string, which is converted and stored as compressed JSON
// by the database
let sql = "insert into " + table + " ( id, content ) values (:i, :c)";
const binds = [
{ val: sequence, type: oracledb.NUMBER, dir: oracledb.BIND_IN },
{ val: JSON.stringify(jsonVal) }
];
await connection.execute(sql, binds);
sql = "select content as C from " + table + " where id = " + sequence;
const result = await connection.execute(sql);
assert.deepStrictEqual(result.rows[0][0], jsonVal);
sequence++;
}); // 244.11.3
}); // 244.11
describe('244.12 Verify auto-generated SODA document key', function() {
const TABLE = 'nodb_244_63soda';
let supportsJsonId;
@ -746,7 +942,7 @@ describe('244.dataTypeJson.js', function() {
await connection.execute(sql);
}); // after()
it('244.10.1 Verify Json Id on select', async function() {
it('244.12.1 Verify Json Id on select', async function() {
const inpDoc = {"name": "Jenny"};
let sql = ` insert into ${TABLE} values (:1)`;
let result = await connection.execute(sql, [{
@ -826,7 +1022,8 @@ describe('244.dataTypeJson.js', function() {
result = await connection.execute(sql);
genDoc = result.rows[2][0];
assert.deepStrictEqual(genDoc, inpDocWithUserKey);
});
});
}); // 244.12.1
}); // 244.12
});

View File

@ -4884,6 +4884,7 @@ oracledb.OUT_FORMAT_OBJECT and resultSet = true
244.1.2 resultSet stores JSON data correctly
244.1.3 works well with REF Cursor
244.1.4 columns fetched from REF CURSORS can be mapped by fetchInfo settings
244.1.5 Negative field name length > 255 bytes - Oracle 21c
244.2 stores null value correctly
244.2.1 testing Null, Empty string and Undefined
244.3 testing JSON with executeMany()
@ -4905,10 +4906,18 @@ oracledb.OUT_FORMAT_OBJECT and resultSet = true
244.8.2 bind by position
244.9 testing JSON with oracledb.fetchAsString and fetchInfo oracledb.STRING
244.9.1 works with oracledb.fetchAsString
244.9.2 doesn't work with outFormat: oracledb.DB_TYPE_JSON
244.9.3 could work with fetchInfo oracledb.STRING
244.10 Verify auto-generated SODA document key
244.10.1 Verify Json Id on select
244.9.2 could work with fetchInfo oracledb.STRING
244.10 testing JSON with Long Field Names > 255 bytes
244.10.1 single long JSON field name
244.10.2 multiple long JSON field names
244.10.3 multiple long and short JSON field names
244.10.4 negative case for out-of-bounds field length names
244.11 testing compressed JSON with relative offsets
244.11.1 fetch JSON with relative offsets
244.11.2 fetch JSON with relative offsets and shared fields and values
244.11.3 fetch JSON with relative offsets and shared fields, not values
244.12 Verify auto-generated SODA document key
244.12.1 Verify Json Id on select
245. fetchLobAsStrBuf.js
245.1 CLOB,BLOB Insert