Skip to content

Commit

Permalink
#76 #126 - Results (rows and column metadata) are now simple arrays
Browse files Browse the repository at this point in the history
Optionaly key-value collections (useColumnNames)
  • Loading branch information
patriksimek committed Mar 31, 2014
1 parent e864ec7 commit 5e51de9
Show file tree
Hide file tree
Showing 7 changed files with 37 additions and 60 deletions.
4 changes: 3 additions & 1 deletion README.md
Expand Up @@ -24,7 +24,8 @@ Current version: 0.1.5
- Added option to choose whether to pass/receive times in UTC or local time (`useUTC`) - Added option to choose whether to pass/receive times in UTC or local time (`useUTC`)
- Binary, VarBinary and Image are now supported as input parameters - Binary, VarBinary and Image are now supported as input parameters
- Binary, VarBinary and Image types are now returned as Buffer (was Array) - Binary, VarBinary and Image types are now returned as Buffer (was Array)
- Connection errors are now correctly propagated to `connect` event. - Connection errors are now correctly propagated to `connect` event
- Better support for numeric column names and columns with same name
- Errors are now instanceof Error / ConnectionError / RequestError (was plain text) - Errors are now instanceof Error / ConnectionError / RequestError (was plain text)
- Transaction isolationLevel default is now `READ_COMMITED` (was `READ_UNCOMMITED`) - Transaction isolationLevel default is now `READ_COMMITED` (was `READ_UNCOMMITED`)
- Fixed issue when zero value was casted as null when using BigInt as input parameter - Fixed issue when zero value was casted as null when using BigInt as input parameter
Expand All @@ -38,6 +39,7 @@ Current version: 0.1.5
- There was a change in default transaction isolationLevel from `READ_UNCOMMITED` to `READ_COMMITED`. You can disable this by `options.isolationLevel = require('tedious').ISOLATION_LEVEL.READ_UNCOMMITTED`. - There was a change in default transaction isolationLevel from `READ_UNCOMMITED` to `READ_COMMITED`. You can disable this by `options.isolationLevel = require('tedious').ISOLATION_LEVEL.READ_UNCOMMITTED`.
- Binary values are now returned in Buffers. - Binary values are now returned in Buffers.
- All error values are no longer strings, but instances of Error. - All error values are no longer strings, but instances of Error.
- Results (rows and column metadata) are now simple arrays. You can change this to key-value collections by `options.useColumnNames = true`.


<a name="documentation" /> <a name="documentation" />
## Documentation ## Documentation
Expand Down
9 changes: 8 additions & 1 deletion src/connection.coffee
Expand Up @@ -201,6 +201,7 @@ class Connection extends EventEmitter
@config.options.encrypt ||= false @config.options.encrypt ||= false
@config.options.cryptoCredentialsDetails ||= {} @config.options.cryptoCredentialsDetails ||= {}
@config.options.useUTC ?= true @config.options.useUTC ?= true
@config.options.useColumnNames ?= false


if !@config.options.port && !@config.options.instanceName if !@config.options.port && !@config.options.instanceName
@config.options.port = DEFAULT_PORT @config.options.port = DEFAULT_PORT
Expand Down Expand Up @@ -269,7 +270,13 @@ class Connection extends EventEmitter
) )
@tokenStreamParser.on('columnMetadata', (token) => @tokenStreamParser.on('columnMetadata', (token) =>
if @request if @request
@request.emit('columnMetadata', token.columns) if @config.options.useColumnNames
columns = {}
columns[col.colName] = col for col in token.columns when not columns[col.colName]?
else
columns = token.columns

@request.emit('columnMetadata', columns)
else else
@emit 'error', new Error "Received 'columnMetadata' when no sqlRequest is in progress" @emit 'error', new Error "Received 'columnMetadata' when no sqlRequest is in progress"
@close() @close()
Expand Down
5 changes: 0 additions & 5 deletions src/token/colmetadata-token-parser.coffee
Expand Up @@ -2,8 +2,6 @@


metadataParse = require('../metadata-parser') metadataParse = require('../metadata-parser')


DIGITS_REGEX = /^\d+$/

parser = (buffer, colMetadata, options) -> parser = (buffer, colMetadata, options) ->
columnCount = buffer.readUInt16LE() columnCount = buffer.readUInt16LE()


Expand Down Expand Up @@ -37,9 +35,6 @@ parser = (buffer, colMetadata, options) ->


columns.push(column) columns.push(column)


if !(DIGITS_REGEX.test(column.colName))
columns[column.colName] = column

# Return token # Return token
name: 'COLMETADATA' name: 'COLMETADATA'
event: 'columnMetadata' event: 'columnMetadata'
Expand Down
22 changes: 6 additions & 16 deletions src/token/nbcrow-token-parser.coffee
Expand Up @@ -3,8 +3,6 @@
valueParse = require('../value-parser') valueParse = require('../value-parser')
sprintf = require('sprintf').sprintf sprintf = require('sprintf').sprintf


DIGITS_REGEX = /^\d+$/

parser = (buffer, columnsMetaData, options) -> parser = (buffer, columnsMetaData, options) ->
length = Math.ceil columnsMetaData.length / 8 length = Math.ceil columnsMetaData.length / 8
bytes = buffer.readBuffer length bytes = buffer.readBuffer length
Expand All @@ -14,7 +12,7 @@ parser = (buffer, columnsMetaData, options) ->
for i in [0..7] for i in [0..7]
bitmap.push if byte & (1 << i) then true else false bitmap.push if byte & (1 << i) then true else false


columns = [] columns = if options.useColumnNames then {} else []
for columnMetaData, index in columnsMetaData for columnMetaData, index in columnsMetaData
#console.log sprintf('Token @ 0x%02X', buffer.position) #console.log sprintf('Token @ 0x%02X', buffer.position)


Expand All @@ -27,23 +25,15 @@ parser = (buffer, columnsMetaData, options) ->
value: value value: value
metadata: columnMetaData metadata: columnMetaData


columns.push(column) if options.useColumnNames

unless columns[columnMetaData.colName]?
if !(DIGITS_REGEX.test(columnMetaData.colName)) columns[columnMetaData.colName] = column
saveColumn(columnMetaData.colName, columns, column) else
columns.push(column)


# Return token # Return token
name: 'NBCROW' name: 'NBCROW'
event: 'row' event: 'row'
columns: columns columns: columns


saveColumn = (columnName, columns, value) ->
entry = columns[columnName]
if !entry
columns[columnName] = value;
else if Array.isArray(entry)
entry.push(value)
else
columns[columnName] = [entry, value]

module.exports = parser module.exports = parser
24 changes: 7 additions & 17 deletions src/token/row-token-parser.coffee
Expand Up @@ -3,10 +3,8 @@
valueParse = require('../value-parser') valueParse = require('../value-parser')
sprintf = require('sprintf').sprintf sprintf = require('sprintf').sprintf


DIGITS_REGEX = /^\d+$/

parser = (buffer, columnsMetaData, options) -> parser = (buffer, columnsMetaData, options) ->
columns = [] columns = if options.useColumnNames then {} else []
for columnMetaData in columnsMetaData for columnMetaData in columnsMetaData
#console.log sprintf('Token @ 0x%02X', buffer.position) #console.log sprintf('Token @ 0x%02X', buffer.position)


Expand All @@ -15,24 +13,16 @@ parser = (buffer, columnsMetaData, options) ->
column = column =
value: value value: value
metadata: columnMetaData metadata: columnMetaData


columns.push(column) if options.useColumnNames

unless columns[columnMetaData.colName]?
if !(DIGITS_REGEX.test(columnMetaData.colName)) columns[columnMetaData.colName] = column
saveColumn(columnMetaData.colName, columns, column) else
columns.push(column)


# Return token # Return token
name: 'ROW' name: 'ROW'
event: 'row' event: 'row'
columns: columns columns: columns


saveColumn = (columnName, columns, value) ->
entry = columns[columnName]
if !entry
columns[columnName] = value;
else if Array.isArray(entry)
entry.push(value)
else
columns[columnName] = [entry, value]

module.exports = parser module.exports = parser
31 changes: 13 additions & 18 deletions test/integration/connection-test.coffee
Expand Up @@ -184,7 +184,7 @@ exports.encrypt = (test) ->
) )


exports.execSql = (test) -> exports.execSql = (test) ->
test.expect(8) test.expect(7)


config = getConfig() config = getConfig()


Expand All @@ -207,7 +207,6 @@ exports.execSql = (test) ->
request.on('row', (columns) -> request.on('row', (columns) ->
test.strictEqual(columns.length, 1) test.strictEqual(columns.length, 1)
test.strictEqual(columns[0].value, 8) test.strictEqual(columns[0].value, 8)
test.strictEqual(columns.C1.value, 8)
) )


connection = new Connection(config) connection = new Connection(config)
Expand All @@ -232,6 +231,7 @@ exports.numericColumnName = (test) ->
test.expect(5) test.expect(5)


config = getConfig() config = getConfig()
config.options.useColumnNames = true


request = new Request('select 8 as [123]', (err, rowCount) -> request = new Request('select 8 as [123]', (err, rowCount) ->
test.ok(!err) test.ok(!err)
Expand All @@ -241,12 +241,12 @@ exports.numericColumnName = (test) ->
) )


request.on('columnMetadata', (columnsMetadata) -> request.on('columnMetadata', (columnsMetadata) ->
test.strictEqual(columnsMetadata.length, 1) test.strictEqual(Object.keys(columnsMetadata).length, 1)
) )


request.on('row', (columns) -> request.on('row', (columns) ->
test.strictEqual(columns.length, 1) test.strictEqual(Object.keys(columns).length, 1)
test.strictEqual(columns[0].value, 8) test.strictEqual(columns[123].value, 8)
) )


connection = new Connection(config) connection = new Connection(config)
Expand All @@ -268,30 +268,26 @@ exports.numericColumnName = (test) ->
) )


exports.duplicateColumnNames = (test) -> exports.duplicateColumnNames = (test) ->
test.expect(10) test.expect(6)


config = getConfig() config = getConfig()
config.options.useColumnNames = true


request = new Request('select 1 as abc, 2 as xyz, 3 as abc', (err, rowCount) -> request = new Request('select 1 as abc, 2 as xyz, \'3\' as abc', (err, rowCount) ->
test.ok(!err) test.ok(!err)
test.strictEqual(rowCount, 1) test.strictEqual(rowCount, 1)


connection.close() connection.close()
) )


request.on('columnMetadata', (columnsMetadata) -> request.on('columnMetadata', (columnsMetadata) ->
test.strictEqual(columnsMetadata.length, 3) test.strictEqual(Object.keys(columnsMetadata).length, 2)
) )


request.on('row', (columns) -> request.on('row', (columns) ->
test.strictEqual(columns.length, 3) test.strictEqual(Object.keys(columns).length, 2)


test.strictEqual(columns[0].value, 1) test.strictEqual(columns.abc.value, 1)
test.strictEqual(columns[1].value, 2)
test.strictEqual(columns[2].value, 3)

test.strictEqual(columns.abc[0].value, 1)
test.strictEqual(columns.abc[1].value, 3)
test.strictEqual(columns.xyz.value, 2) test.strictEqual(columns.xyz.value, 2)
) )


Expand All @@ -317,7 +313,7 @@ exports.execSqlMultipleTimes = (test) ->
timesToExec = 5 timesToExec = 5
sqlExecCount = 0 sqlExecCount = 0


test.expect(timesToExec * 8) test.expect(timesToExec * 7)


config = getConfig() config = getConfig()


Expand Down Expand Up @@ -346,7 +342,6 @@ exports.execSqlMultipleTimes = (test) ->
request.on('row', (columns) -> request.on('row', (columns) ->
test.strictEqual(columns.length, 1) test.strictEqual(columns.length, 1)
test.strictEqual(columns[0].value, 8) test.strictEqual(columns[0].value, 8)
test.strictEqual(columns.C1.value, 8)
) )


connection.execSql(request) connection.execSql(request)
Expand Down Expand Up @@ -800,7 +795,7 @@ exports.cancelRequest = (test) ->


request.on('row', (columns) -> request.on('row', (columns) ->
test.strictEqual(columns.length, 1) test.strictEqual(columns.length, 1)
test.strictEqual(columns.C1.value, 1) test.strictEqual(columns[0].value, 1)
) )


connection = new Connection(config) connection = new Connection(config)
Expand Down
2 changes: 0 additions & 2 deletions test/unit/token/colmetadata-token-parser-test.coffee
Expand Up @@ -27,7 +27,6 @@ module.exports.int = (test) ->
test.strictEqual(token.columns[0].flags, 3) test.strictEqual(token.columns[0].flags, 3)
test.strictEqual(token.columns[0].type.name, 'Int') test.strictEqual(token.columns[0].type.name, 'Int')
test.strictEqual(token.columns[0].colName, 'name') test.strictEqual(token.columns[0].colName, 'name')
test.strictEqual(token.columns.name.colName, 'name')


test.done() test.done()


Expand Down Expand Up @@ -64,7 +63,6 @@ module.exports.varchar = (test) ->
test.strictEqual(token.columns[0].collation.version, 0x8) test.strictEqual(token.columns[0].collation.version, 0x8)
test.strictEqual(token.columns[0].collation.sortId, 0x9a) test.strictEqual(token.columns[0].collation.sortId, 0x9a)
test.strictEqual(token.columns[0].colName, 'name') test.strictEqual(token.columns[0].colName, 'name')
test.strictEqual(token.columns.name.colName, 'name')
test.strictEqual(token.columns[0].dataLength, length) test.strictEqual(token.columns[0].dataLength, length)


test.done() test.done()

0 comments on commit 5e51de9

Please sign in to comment.