diff --git a/.jshintrc b/.jshintrc new file mode 100644 index 00000000..c6c11efc --- /dev/null +++ b/.jshintrc @@ -0,0 +1,5 @@ +{ + "trailing": true, + "indent": 2, + "evil": true +} diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..6e4be29a --- /dev/null +++ b/.travis.yml @@ -0,0 +1,6 @@ +language: node_js +node_js: + - 0.8 + - "0.10" +before_script: + - node script/create-test-tables.js pg://postgres@127.0.0.1:5432/postgres diff --git a/Makefile b/Makefile index 0a3bd679..8407f971 100644 --- a/Makefile +++ b/Makefile @@ -1,45 +1,70 @@ SHELL := /bin/bash -user=postgres -password=1234 -host=localhost -port=5432 -database=postgres -verbose=false +connectionString=postgres:// -params := -u $(user) --password $(password) -p $(port) -d $(database) -h $(host) --verbose $(verbose) +params := $(connectionString) node-command := xargs -n 1 -I file node file $(params) -.PHONY : test test-connection test-integration bench test-native build/default/binding.node +.PHONY : test test-connection test-integration bench test-native \ + build/default/binding.node jshint upgrade-pg publish + +all: + npm install + +help: + @echo "make prepare-test-db [connectionString=postgres://]" + @echo "make test-all [connectionString=postgres://]" + test: test-unit -test-all: test-unit test-integration test-native test-binary +test-all: jshint test-unit test-integration test-native test-binary + +test-travis: test-all upgrade-pg + @make test-all connectionString=postgres://postgres@localhost:5433/postgres + +upgrade-pg: + @chmod 755 script/travis-pg-9.2-install.sh + @./script/travis-pg-9.2-install.sh bench: @find benchmark -name "*-bench.js" | $(node-command) build/default/binding.node: - @node-waf configure build + @node-gyp rebuild test-unit: @find test/unit -name "*-tests.js" | $(node-command) test-connection: + @echo "***Testing connection***" @node script/test-connection.js $(params) test-connection-binary: - @node script/test-connection.js $(params) --binary true + @echo "***Testing binary connection***" + @node script/test-connection.js $(params) binary test-native: build/default/binding.node @echo "***Testing native bindings***" @find test/native -name "*-tests.js" | $(node-command) - @find test/integration -name "*-tests.js" | $(node-command) --native true + @find test/integration -name "*-tests.js" | $(node-command) native -test-integration: test-connection +test-integration: test-connection @echo "***Testing Pure Javascript***" @find test/integration -name "*-tests.js" | $(node-command) test-binary: test-connection-binary @echo "***Testing Pure Javascript (binary)***" - @find test/integration -name "*-tests.js" | $(node-command) --binary true + @find test/integration -name "*-tests.js" | $(node-command) binary + +prepare-test-db: + @echo "***Preparing the database for tests***" + @find script/create-test-tables.js | $(node-command) + +jshint: + @echo "***Starting jshint***" + @./node_modules/.bin/jshint lib + +publish: + @rm -r build || (exit 0) + @npm publish diff --git a/NEWS.md b/NEWS.md new file mode 100644 index 00000000..5edda2aa --- /dev/null +++ b/NEWS.md @@ -0,0 +1,70 @@ +All major and minor releases are briefly explained below. + +For richer information consult the commit log on github with referenced pull requests. + +We do not include break-fix version release in this file. + +### v2.5.0 +- Ability to opt-in to int8 parsing via `pg.defaults.parseInt8 = true` + +### v2.4.0 +- Use eval in the result set parser to increase performance + +### v2.3.0 +- Remove built-in support for binary Int64 parsing. +_Due to the low usage & required compiled dependency this will be pushed into a 3rd party add-on_ + +### v2.2.0 +- [Add support for excapeLiteral and escapeIdentifier in both JavaScript and the native bindings](https://github.com/brianc/node-postgres/pull/396) + +### v2.1.0 +- Add support for SSL connections in JavaScript driver + - this means you can connect to heroku postgres from your local machine without the native bindings! +- [Add field metadata to result object](https://github.com/brianc/node-postgres/blob/master/test/integration/client/row-description-on-results-tests.js) +- [Add ability for rows to be returned as arrays instead of objects](https://github.com/brianc/node-postgres/blob/master/test/integration/client/results-as-array-tests.js) + +### v2.0.0 + +- Properly handle various PostgreSQL to JavaScript type conversions to avoid data loss: + +``` +PostgreSQL | pg@v2.0 JavaScript | pg@v1.0 JavaScript +--------------------------------|---------------- +float4 | number (float) | string +float8 | number (float) | string +int8 | string | number (int) +numeric | string | number (float) +decimal | string | number (float) +``` + +For more information see https://github.com/brianc/node-postgres/pull/353 +If you are unhappy with these changes you can always [override the built in type parsing fairly easily](https://github.com/brianc/node-pg-parse-float). + +### v1.3.0 + +- Make client_encoding configurable and optional + +### v1.2.0 + +- return field metadata on result object: access via result.fields[i].name/dataTypeID + +### v1.1.0 + +- built in support for `JSON` data type for PostgreSQL Server @ v9.2.0 or greater + +### v1.0.0 + +- remove deprecated functionality + - Callback function passed to `pg.connect` now __requires__ 3 arguments + - Client#pauseDrain() / Client#resumeDrain removed + - numeric, decimal, and float data types no longer parsed into float before being returned. Will be returned from query results as `String` + +### v0.15.0 + +- client now emits `end` when disconnected from back-end server +- if client is disconnected in the middle of a query, query receives an error + +### v0.14.0 + +- add deprecation warnings in prep for v1.0 +- fix read/write failures in native module under node v0.9.x diff --git a/README.md b/README.md index 2250af14..d8fafb95 100644 --- a/README.md +++ b/README.md @@ -1,124 +1,68 @@ #node-postgres -Non-blocking PostgreSQL client for node.js. Pure JavaScript and native libpq bindings. Active development, well tested, and production use. +[![Build Status](https://secure.travis-ci.org/brianc/node-postgres.png?branch=master)](http://travis-ci.org/brianc/node-postgres) + +PostgreSQL client for node.js. Pure JavaScript and native libpq bindings. ## Installation npm install pg - + ## Examples -### Simple, using built-in client pool +### Simple - var pg = require('pg'); - //or native libpq bindings - //var pg = require('pg').native +Connect to a postgres instance, run a query, and disconnect. - var conString = "tcp://postgres:1234@localhost/postgres"; +```javascript +var pg = require('pg'); +//or native libpq bindings +//var pg = require('pg').native - //error handling omitted - pg.connect(conString, function(err, client) { - client.query("SELECT NOW() as when", function(err, result) { - console.log("Row count: %d",result.rows.length); // 1 - console.log("Current year: %d", result.rows[0].when.getYear()); - }); - }); +var conString = "postgres://postgres:1234@localhost/postgres"; -### Evented api +var client = new pg.Client(conString); +client.connect(function(err) { + if(err) { + return console.error('could not connect to postgres', err); + } + client.query('SELECT NOW() AS "theTime"', function(err, result) { + if(err) { + return console.error('error running query', err); + } + console.log(result.rows[0].theTime); + //output: Tue Jan 15 2013 19:12:47 GMT-600 (CST) + client.end(); + }); +}); - var pg = require('pg'); //native libpq bindings = `var pg = require('pg').native` - var conString = "tcp://postgres:1234@localhost/postgres"; +``` + +### Client pooling + +Typically you will access the PostgreSQL server through a pool of clients. node-postgres ships with a built in pool to help get you up and running quickly. + +```javascript +var pg = require('pg'); +var conString = "postgres://postgres:1234@localhost/postgres"; + +pg.connect(conString, function(err, client, done) { + if(err) { + return console.error('error fetching client from pool', err); + } + client.query('SELECT $1::int AS numbor', ['1'], function(err, result) { + //call `done()` to release the client back to the pool + done(); - var client = new pg.Client(conString); - client.connect(); + if(err) { + return console.error('error running query', err); + } + console.log(result.rows[0].numbor); + //output: 1 + }); +}); - //queries are queued and executed one after another once the connection becomes available - client.query("CREATE TEMP TABLE beatles(name varchar(10), height integer, birthday timestamptz)"); - client.query("INSERT INTO beatles(name, height, birthday) values($1, $2, $3)", ['Ringo', 67, new Date(1945, 11, 2)]); - client.query("INSERT INTO beatles(name, height, birthday) values($1, $2, $3)", ['John', 68, new Date(1944, 10, 13)]); - - //queries can be executed either via text/parameter values passed as individual arguments - //or by passing an options object containing text, (optional) parameter values, and (optional) query name - client.query({ - name: 'insert beatle', - text: "INSERT INTO beatles(name, height, birthday) values($1, $2, $3)", - values: ['George', 70, new Date(1946, 02, 14)] - }); - - //subsequent queries with the same name will be executed without re-parsing the query plan by postgres - client.query({ - name: 'insert beatle', - values: ['Paul', 63, new Date(1945, 04, 03)] - }); - var query = client.query("SELECT * FROM beatles WHERE name = $1", ['John']); - - //can stream row results back 1 at a time - query.on('row', function(row) { - console.log(row); - console.log("Beatle name: %s", row.name); //Beatle name: John - console.log("Beatle birth year: %d", row.birthday.getYear()); //dates are returned as javascript dates - console.log("Beatle height: %d' %d\"", Math.floor(row.height/12), row.height%12); //integers are returned as javascript ints - }); - - //fired after last row is emitted - query.on('end', function() { - client.end(); - }); - -### Example notes - -node-postgres supports both an 'event emitter' style API and a 'callback' style. The callback style is more concise and generally preferred, but the evented API can come in handy. They can be mixed and matched. The only events which do __not__ fire when callbacks are supplied are the `error` events, as they are to be handled by the callback function. - -All examples will work with the pure javascript bindings (currently default) or the libpq native (c/c++) bindings (currently in beta) - -To use native libpq bindings replace `require('pg')` with `require('pg').native`. - -The two share the same interface so __no other code changes should be required__. If you find yourself having to change code other than the require statement when switching from `pg` to `pg.native`, please report an issue. - -### Info - -* pure javascript client and native libpq bindings share _the same api_ -* _heavily_ tested - * the same suite of 200+ integration tests passed by both javascript & libpq bindings - * benchmark & long-running memory leak tests performed before releases - * tested with with - * postgres 8.x, 9.x - * Linux, OS X - * node 2.x & 4.x -* row-by-row result streaming -* built-in (optional) connection pooling -* responsive project maintainer -* supported PostgreSQL features - * parameterized queries - * named statements with query plan caching - * async notifications - * extensible js<->postgresql data-type coercion -* query queue -* active development -* fast -* close mirror of the node-mysql api for future multi-database-supported ORM implementation ease - -### Contributors - -Many thanks to the following: - -* [creationix](https://github.com/creationix) -* [felixge](https://github.com/felixge) -* [pshc](https://github.com/pshc) -* [pjornblomqvist](https://github.com/bjornblomqvist) -* [JulianBirch](https://github.com/JulianBirch) -* [ef4](https://github.com/ef4) -* [napa3um](https://github.com/napa3um) -* [drdaeman](https://github.com/drdaeman) -* [booo](https://github.com/booo) -* [neonstalwart](https://github.com/neonstalwart) -* [homme](https://github.com/homme) -* [bdunavant](https://github.com/bdunavant) -* [tokumine](https://github.com/tokumine) -* [shtylman](https://github.com/shtylman) -* [cricri](https://github.com/cricri) -* [AlexanderS](https://github.com/AlexanderS) -* [ahtih](https://github.com/ahtih) +``` ## Documentation @@ -126,20 +70,78 @@ Documentation is a work in progress primarily taking place on the github WIKI ### [Documentation](https://github.com/brianc/node-postgres/wiki) -### __PLEASE__ check out the WIKI +## Native Bindings + +node-postgres contains a pure JavaScript driver and also exposes JavaScript bindings to libpq. You can use either interface. I personally use the JavaScript bindings as the are quite fast, and I like having everything implemented in JavaScript. + +To use native libpq bindings replace `require('pg')` with `require('pg').native`. + +The two share the same interface so __no other code changes should be required__. If you find yourself having to change code other than the require statement when switching from `pg` to `pg.native`, please report an issue. + +## Features + +* pure JavaScript client and native libpq bindings share _the same api_ +* optional connection pooling +* extensible js<->postgresql data-type coercion +* supported PostgreSQL features + * parameterized queries + * named statements with query plan caching + * async notifications with `LISTEN/NOTIFY` + * bulk import & export with `COPY TO/COPY FROM` + +## Contributing + +__I love contributions.__ + +You are welcome contribute via pull requests. If you need help getting the tests running locally feel free to email me or gchat me. + +I will __happily__ accept your pull request if it: +- _has tests_ +- looks reasonable +- does not break backwards compatibility +- satisfies jshint + +Information about the testing processes is in the [wiki](https://github.com/brianc/node-postgres/wiki/Testing). + +If you need help or have questions about constructing a pull request I'll be glad to help out as well. + +## Support + +If at all possible when you open an issue please provide +- version of node +- version of postgres +- smallest possible snippet of code to reproduce the problem + +Usually I'll pop the code into the repo as a test. Hopefully the test fails. Then I make the test pass. Then everyone's happy! + + +If you need help or run into _any_ issues getting node-postgres to work on your system please report a bug or contact me directly. I am usually available via google-talk at my github account public email address. + +I usually tweet about any important status updates or changes to node-postgres. +Follow me [@briancarlson](https://twitter.com/briancarlson) to keep up to date. + + +## Extras + +node-postgres is by design _low level_ with the bare minimum of abstraction. These might help out: + +- https://github.com/grncdr/node-any-db +- https://github.com/brianc/node-sql -If you have a question, post it to the FAQ section of the WIKI so everyone can read the answer ## Production Use * [yammer.com](http://www.yammer.com) * [bayt.com](http://bayt.com) +* [bitfloor.com](https://bitfloor.com) +* [Vendly](http://www.vend.ly) +* [SaferAging](http://www.saferaging.com) +* [CartoDB](http://www.cartodb.com) +* [Heap](https://heapanalytics.com) +* [zoomsquare](http://www.zoomsquare.com/) -_if you use node-postgres in production and would like your site listed here, fork & add it_ +_If you use node-postgres in production and would like your site listed here, fork & add it._ -## Help -If you need help or run into _any_ issues getting node-postgres to work on your system please report a bug or contact me directly. I am usually available via google-talk at my github account public email address. - ## License Copyright (c) 2010 Brian Carlson (brian.m.carlson@gmail.com) @@ -161,6 +163,3 @@ Copyright (c) 2010 Brian Carlson (brian.m.carlson@gmail.com) LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - - diff --git a/benchmark/4e822a1.txt b/benchmark/4e822a1.txt new file mode 100644 index 00000000..ce94b25e --- /dev/null +++ b/benchmark/4e822a1.txt @@ -0,0 +1,17 @@ +benchmark +starting simple-query-parsing +4166 ops/sec - (100/0.024) +8333 ops/sec - (1000/0.12) +10405 ops/sec - (10000/0.961) +10515 ops/sec - (10000/0.951) +10638 ops/sec - (10000/0.94) +10460 ops/sec - (10000/0.956) +starting prepared-statement-parsing +4166 ops/sec - (100/0.024) +8264 ops/sec - (1000/0.121) +7530 ops/sec - (10000/1.328) +8250 ops/sec - (10000/1.212) +8156 ops/sec - (10000/1.226) +8110 ops/sec - (10000/1.233) +done + diff --git a/benchmark/835f71a76f.txt b/benchmark/835f71a76f.txt new file mode 100644 index 00000000..8d35cd4c --- /dev/null +++ b/benchmark/835f71a76f.txt @@ -0,0 +1,17 @@ +benchmark +starting simple-query-parsing +3703 ops/sec - (100/0.027) +7299 ops/sec - (1000/0.137) +8888 ops/sec - (10000/1.125) +8733 ops/sec - (10000/1.145) +8810 ops/sec - (10000/1.135) +8771 ops/sec - (10000/1.14) +starting prepared-statement-parsing +3846 ops/sec - (100/0.026) +7299 ops/sec - (1000/0.137) +7225 ops/sec - (10000/1.384) +7288 ops/sec - (10000/1.372) +7225 ops/sec - (10000/1.384) +7457 ops/sec - (10000/1.341) +done + diff --git a/benchmark/df766c913.txt b/benchmark/df766c913.txt new file mode 100644 index 00000000..80f26749 --- /dev/null +++ b/benchmark/df766c913.txt @@ -0,0 +1,17 @@ +benchmark +starting simple-query-parsing +3571 ops/sec - (100/0.028) +7299 ops/sec - (1000/0.137) +8873 ops/sec - (10000/1.127) +8536 ops/sec - (40000/4.686) +8494 ops/sec - (40000/4.709) +7695 ops/sec - (40000/5.198) +starting prepared-statement-parsing +4000 ops/sec - (100/0.025) +6944 ops/sec - (1000/0.144) +7153 ops/sec - (10000/1.398) +7127 ops/sec - (40000/5.612) +7208 ops/sec - (40000/5.549) +6460 ops/sec - (40000/6.191) +done + diff --git a/benchmark/index.js b/benchmark/index.js new file mode 100644 index 00000000..a07fe454 --- /dev/null +++ b/benchmark/index.js @@ -0,0 +1,42 @@ +var async = require('async'); +var max = 10000; +var maxTimes = 3; +var doLoops = function(bench, loops, times, cb) { + var start = new Date(); + var count = 0; + + var done = function() { + var duration = (new Date() - start) + var seconds = (duration / 1000); + console.log("%d ops/sec - (%d/%d)", ~~(loops/seconds), loops, seconds); + var next = loops * 10; + if(next > max) { + if(times > maxTimes) return cb(); + times++; + next = max; + } + setTimeout(function() { + doLoops(bench, next, times, cb); + }, 100); + } + + var run = function() { + if(count++ >= loops){ + return done(); + } + bench(function() { + setImmediate(run); + }); + } + run(); +} +var bench = require(__dirname + '/simple-query-parsing'); +console.log(); +var benches = ['simple-query-parsing', 'prepared-statement-parsing']; +async.forEachSeries(benches, function(name, cb) { + var bench = require(__dirname + '/' + name)(); + console.log('starting ', name); + doLoops(bench, 100, 1, cb); +}, function(err, res) { + console.log('done') +}) diff --git a/benchmark/js-versus-native-bench.js b/benchmark/js-versus-native-bench.js deleted file mode 100644 index b65fb98c..00000000 --- a/benchmark/js-versus-native-bench.js +++ /dev/null @@ -1,68 +0,0 @@ -var pg = require(__dirname + '/../lib') -var pgNative = require(__dirname + '/../lib/native'); -var bencher = require('bencher'); -var helper = require(__dirname + '/../test/test-helper') -var conString = helper.connectionString() - -var round = function(num) { - return Math.round((num*1000))/1000 -} - -var doBenchmark = function() { - var bench = bencher({ - name: 'js/native compare', - repeat: 1000, - actions: [{ - name: 'javascript client - simple query', - run: function(next) { - var query = client.query('SELECT name, age FROM person WHERE age > 10'); - query.on('end', function() { - next(); - }); - } - },{ - name: 'native client - simple query', - run: function(next) { - var query = nativeClient.query('SELECT name FROM person WHERE age > $1', [10]); - query.on('end', function() { - next(); - }); - } - }, { - name: 'javascript client - parameterized query', - run: function(next) { - var query = client.query('SELECT name, age FROM person WHERE age > $1', [10]); - query.on('end', function() { - next(); - }); - } - },{ - name: 'native client - parameterized query', - run: function(next) { - var query = nativeClient.query('SELECT name, age FROM person WHERE age > $1', [10]); - query.on('end', function() { - next(); - }); - } - }] - }); - bench(function(result) { - console.log(); - console.log("%s (%d repeats):", result.name, result.repeat) - result.actions.forEach(function(action) { - console.log(" %s: \n average: %d ms\n total: %d ms", action.name, round(action.meanTime), round(action.totalTime)); - }) - client.end(); - nativeClient.end(); - }) -} - -var client = new pg.Client(conString); -var nativeClient = new pgNative.Client(conString); -client.connect(); -client.on('connect', function() { - nativeClient.connect(); - nativeClient.on('connect', function() { - doBenchmark(); - }); -}); diff --git a/benchmark/large-datatset-bench.js b/benchmark/large-datatset-bench.js deleted file mode 100644 index a5e0346a..00000000 --- a/benchmark/large-datatset-bench.js +++ /dev/null @@ -1,125 +0,0 @@ -var pg = require(__dirname + '/../lib') -var bencher = require('bencher'); -var helper = require(__dirname + '/../test/test-helper') -var conString = helper.connectionString() - -var round = function(num) { - return Math.round((num*1000))/1000 -} - -var doBenchmark = function(cb) { - var bench = bencher({ - name: 'select large sets', - repeat: 10, - actions: [{ - name: 'selecting string', - run: function(next) { - var query = client.query('SELECT name FROM items'); - query.on('error', function(er) { - console.log(er);throw er; - }); - - query.on('end', function() { - next(); - }); - } - }, { - name: 'selecting integer', - run: function(next) { - var query = client.query('SELECT count FROM items'); - query.on('error', function(er) { - console.log(er);throw er; - }); - - query.on('end', function() { - next(); - }) - } - }, { - name: 'selecting date', - run: function(next) { - var query = client.query('SELECT created FROM items'); - query.on('error', function(er) { - console.log(er);throw er; - }); - - query.on('end', function() { - next(); - }) - } - }, { - name: 'selecting row', - run: function(next) { - var query = client.query('SELECT * FROM items'); - query.on('end', function() { - next(); - }) - } - }, { - name: 'loading all rows into memory', - run: function(next) { - var query = client.query('SELECT * FROM items', next); - } - }] - }); - bench(function(result) { - console.log(); - console.log("%s (%d repeats):", result.name, result.repeat) - result.actions.forEach(function(action) { - console.log(" %s: \n average: %d ms\n total: %d ms", action.name, round(action.meanTime), round(action.totalTime)); - }) - client.end(); - cb(); - }) -} - - -var client = new pg.Client(conString); -client.connect(); -console.log(); -console.log("creating temp table"); -client.query("CREATE TEMP TABLE items(name VARCHAR(10), created TIMESTAMPTZ, count INTEGER)"); -var count = 10000; -console.log("inserting %d rows", count); -for(var i = 0; i < count; i++) { - var query = { - name: 'insert', - text: "INSERT INTO items(name, created, count) VALUES($1, $2, $3)", - values: ["item"+i, new Date(2010, 01, 01, i, 0, 0), i] - }; - client.query(query); -} - -client.once('drain', function() { - console.log('done with insert. executing pure-javascript benchmark.'); - doBenchmark(function() { - var oldclient = client; - client = new pg.native.Client(conString); - client.on('error', function(err) { - console.log(err); - throw err; - }); - - client.connect(); - client.connect(); - console.log(); - console.log("creating temp table"); - client.query("CREATE TEMP TABLE items(name VARCHAR(10), created TIMESTAMPTZ, count INTEGER)"); - var count = 10000; - console.log("inserting %d rows", count); - for(var i = 0; i < count; i++) { - var query = { - name: 'insert', - text: "INSERT INTO items(name, created, count) VALUES($1, $2, $3)", - values: ["item"+i, new Date(2010, 01, 01, i, 0, 0), i] - }; - client.query(query); - } - client.once('drain', function() { - console.log("executing native benchmark"); - doBenchmark(function() { - console.log("all done"); - }) - }) - }); -}); diff --git a/benchmark/prepared-statement-parsing.js b/benchmark/prepared-statement-parsing.js new file mode 100644 index 00000000..d869d5c2 --- /dev/null +++ b/benchmark/prepared-statement-parsing.js @@ -0,0 +1,73 @@ +var Client = require(__dirname + '/../lib/client'); +var buffers = require(__dirname + '/../test/test-buffers'); +require(__dirname + '/../test/unit/test-helper'); + +var stream = new MemoryStream(); +stream.readyState = 'open'; +var client = new Client({ + stream: stream +}); + +var rowDescription = new buffers.rowDescription([{ + name: 'id', + tableID: 1, + attributeNumber: 1, + dataTypeID: 23, //int4 + typeModifer: 0, + formatCode: 0 +},{ + name: 'name', + tableID: 1, + attributeNumber: 2, + dataTypeID: 25, //text + typeModifer: 0, + formatCode: 0 //text format +}, { + name: 'comment', + tableID: 1, + attributeNumber: 3, + dataTypeID: 25, //text + typeModifer: 0, + formatCode: 0 //text format +}]); +var row1 = buffers.dataRow(['1', 'Brian', 'Something groovy']); +var row2 = buffers.dataRow(['2', 'Bob', 'Testint test']); +var row3 = buffers.dataRow(['3', 'The amazing power of the everlasting gobstopper', 'okay now']); +var parseCompleteBuffer = buffers.parseComplete(); +var bindCompleteBuffer = buffers.bindComplete(); +var portalSuspendedBuffer = buffers.portalSuspended(); +var complete = buffers.commandComplete('SELECT 3'); +var ready = buffers.readyForQuery(); +var buffer = Buffer.concat([parseCompleteBuffer, + bindCompleteBuffer, + rowDescription, + row1, + row2, + row3, + portalSuspendedBuffer, + row1, + row2, + row3, + portalSuspendedBuffer, + row1, + row2, + row3, + portalSuspendedBuffer, + complete, ready]); + +var bufferSlice = require('buffer-slice'); +var buffers = bufferSlice(10, buffer); + +client.connect(assert.calls(function() { + client.connection.emit('readyForQuery'); + module.exports = function() { + return function(done) { + client.query('SELECT * FROM whatever WHERE this = "doesnt even matter"', ['whatever'], function(err, res) { + assert.equal(res.rows.length, 9); + done(); + }); + buffers.forEach(stream.emit.bind(stream, 'data')); + }; + }; +})); +client.connection.emit('readyForQuery'); diff --git a/benchmark/simple-query-bench.js b/benchmark/simple-query-bench.js deleted file mode 100644 index 46601589..00000000 --- a/benchmark/simple-query-bench.js +++ /dev/null @@ -1,58 +0,0 @@ -var pg = require(__dirname + '/../lib') -var bencher = require('bencher'); -var helper = require(__dirname + '/../test/test-helper') -var conString = helper.connectionString() - -var round = function(num) { - return Math.round((num*1000))/1000 -} - -var doBenchmark = function() { - var bench = bencher({ - name: 'query compare', - repeat: 1000, - actions: [{ - name: 'simple query', - run: function(next) { - var query = client.query('SELECT name FROM person WHERE age > 10'); - query.on('end', function() { - next(); - }); - } - },{ - name: 'unnamed prepared statement', - run: function(next) { - var query = client.query('SELECT name FROM person WHERE age > $1', [10]); - query.on('end', function() { - next(); - }); - } - },{ - name: 'named prepared statement', - run: function(next) { - var config = { - name: 'get peeps', - text: 'SELECT name FROM person WHERE age > $1', - values: [10] - } - client.query(config).on('end', function() { - next(); - }); - } - }] - }); - bench(function(result) { - console.log(); - console.log("%s (%d repeats):", result.name, result.repeat) - result.actions.forEach(function(action) { - console.log(" %s: \n average: %d ms\n total: %d ms", action.name, round(action.meanTime), round(action.totalTime)); - }) - client.end(); - }) -} - - - -var client = new pg.Client(conString); -client.connect(); -client.connection.once('readyForQuery', doBenchmark) diff --git a/benchmark/simple-query-parsing.js b/benchmark/simple-query-parsing.js new file mode 100644 index 00000000..fb4895d5 --- /dev/null +++ b/benchmark/simple-query-parsing.js @@ -0,0 +1,59 @@ +var Client = require(__dirname + '/../lib/client'); +var buffers = require(__dirname + '/../test/test-buffers'); +require(__dirname + '/../test/unit/test-helper'); + +var stream = new MemoryStream(); +stream.readyState = 'open'; +var client = new Client({ + stream: stream +}); + +var rowDescription = new buffers.rowDescription([{ + name: 'id', + tableID: 1, + attributeNumber: 1, + dataTypeID: 23, //int4 + typeModifer: 0, + formatCode: 0 +},{ + name: 'name', + tableID: 1, + attributeNumber: 2, + dataTypeID: 25, //text + typeModifer: 0, + formatCode: 0 //text format +}, { + name: 'comment', + tableID: 1, + attributeNumber: 3, + dataTypeID: 25, //text + typeModifer: 0, + formatCode: 0 //text format +}]); +var row1 = buffers.dataRow(['1', 'Brian', 'Something groovy']); +var row2 = buffers.dataRow(['2', 'Bob', 'Testint test']); +var row3 = buffers.dataRow(['3', 'The amazing power of the everlasting gobstopper', 'okay now']); +var complete = buffers.commandComplete('SELECT 3'); +var ready = buffers.readyForQuery(); +var buffer = Buffer.concat([ + rowDescription, + row1, row2, row3, + row1, row2, row3, + row1, row2, row3, + complete, ready]); +var bufferSlice = require('buffer-slice'); +buffers = bufferSlice(10, buffer); + +client.connect(assert.calls(function() { + client.connection.emit('readyForQuery'); + module.exports = function() { + return function(done) { + client.query('SELECT * FROM whatever WHERE this = "doesnt even matter"', function(err, res) { + assert.equal(res.rows.length, 9); + done(); + }); + buffers.forEach(stream.emit.bind(stream, 'data')); + }; + }; +})); +client.connection.emit('readyForQuery'); diff --git a/binding.gyp b/binding.gyp new file mode 100644 index 00000000..02c80a4d --- /dev/null +++ b/binding.gyp @@ -0,0 +1,37 @@ +{ + 'targets': [ + { + 'target_name': 'binding', + 'conditions' : [ + ['OS=="win"', { + 'conditions' : [ + ['"> 3; - - var inv = function(value) { - if (invert) { - return ~value & 0xff; - } - - return value; - }; - - // read first (maybe partial) byte - var mask = 0xff; - var firstBits = 8 - (offset % 8); - if (bits < firstBits) { - mask = (0xff << (8 - bits)) & 0xff; - firstBits = bits; - } - - if (offset) { - mask = mask >> (offset % 8); - } - - var result = 0; - if ((offset % 8) + bits >= 8) { - result = callback(0, inv(data[offsetBytes]) & mask, firstBits); - } - - // read bytes - var bytes = (bits + offset) >> 3; - for (var i = offsetBytes + 1; i < bytes; i++) { - result = callback(result, inv(data[i]), 8); - } - - // bits to read, that are not a complete byte - var lastBits = (bits + offset) % 8; - if (lastBits > 0) { - result = callback(result, inv(data[bytes]) >> (8 - lastBits), lastBits); - } - - return result; -}; - -var parseFloatFromBits = function(data, precisionBits, exponentBits) { - var bias = Math.pow(2, exponentBits - 1) - 1; - var sign = parseBits(data, 1); - var exponent = parseBits(data, exponentBits, 1); - - if (exponent === 0) - return 0; - - // parse mantissa - var precisionBitsCounter = 1; - var parsePrecisionBits = function(lastValue, newValue, bits) { - if (lastValue === 0) { - lastValue = 1; - } - - for (var i = 1; i <= bits; i++) { - precisionBitsCounter /= 2; - if ((newValue & (0x1 << (bits - i))) > 0) { - lastValue += precisionBitsCounter; - } - } - - return lastValue; - }; - - var mantissa = parseBits(data, precisionBits, exponentBits + 1, false, parsePrecisionBits); - - // special cases - if (exponent == (Math.pow(2, exponentBits + 1) - 1)) { - if (mantissa === 0) { - return (sign === 0) ? Infinity : -Infinity; - } - - return NaN; - } - - // normale number - return ((sign === 0) ? 1 : -1) * Math.pow(2, exponent - bias) * mantissa; -}; - -var parseBool = function(value) { - return (parseBits(value, 8) == 1); -}; - -var parseInt16 = function(value) { - if (parseBits(value, 1) == 1) { - return -1 * (parseBits(value, 15, 1, true) + 1); - } - - return parseBits(value, 15, 1); -}; - -var parseInt32 = function(value) { - if (parseBits(value, 1) == 1) { - return -1 * (parseBits(value, 31, 1, true) + 1); - } - - return parseBits(value, 31, 1); -}; - -var parseInt64 = function(value) { - if (parseBits(value, 1) == 1) { - return -1 * (parseBits(value, 63, 1, true) + 1); - } - - return parseBits(value, 63, 1); -}; - -var parseFloat32 = function(value) { - return parseFloatFromBits(value, 23, 8); -}; - -var parseFloat64 = function(value) { - return parseFloatFromBits(value, 52, 11); -}; - -var parseNumeric = function(value) { - var sign = parseBits(value, 16, 32); - if (sign == 0xc000) { - return NaN; - } - - var weight = Math.pow(10000, parseBits(value, 16, 16)); - var result = 0; - - var digits = []; - var ndigits = parseBits(value, 16); - for (var i = 0; i < ndigits; i++) { - result += parseBits(value, 16, 64 + (16 * i)) * weight; - weight /= 10000; - } - - var scale = Math.pow(10, parseBits(value, 16, 48)); - return ((sign === 0) ? 1 : -1) * Math.round(result * scale) / scale; -}; - -var parseDate = function(value) { - var sign = parseBits(value, 1); - var rawValue = parseBits(value, 63, 1); - - // discard usecs and shift from 2000 to 1970 - var result = new Date((((sign === 0) ? 1 : -1) * rawValue / 1000) + 946684800000); - - // add microseconds to the date - result.usec = rawValue % 1000; - result.getMicroSeconds = function() { - return this.usec; - }; - result.setMicroSeconds = function(value) { - this.usec = value; - }; - result.getUTCMicroSeconds = function() { - return this.usec; - }; - - return result; -}; - -var parseArray = function(value) { - var dim = parseBits(value, 32); - - var flags = parseBits(value, 32, 32); - var elementType = parseBits(value, 32, 64); - - var offset = 96; - var dims = []; - for (var i = 0; i < dim; i++) { - // parse dimension - dims[i] = parseBits(value, 32, offset); - offset += 32; - - // ignore lower bounds - offset += 32; - } - - var parseElement = function(elementType) { - // parse content length - var length = parseBits(value, 32, offset); - offset += 32; - - // parse null values - if (length == 0xffffffff) { - return null; - } - - if ((elementType == 0x17) || (elementType == 0x14)) { - // int/bigint - var result = parseBits(value, length * 8, offset); - offset += length * 8; - return result; - } - else if (elementType == 0x19) { - // string - var result = value.toString(this.encoding, offset >> 3, (offset += (length << 3)) >> 3); - return result; - } - else { - console.log("ERROR: ElementType not implemented: " + elementType); - } - }; - - var parse = function(dimension, elementType) { - var array = []; - - if (dimension.length > 1) { - var count = dimension.shift(); - for (var i = 0; i < count; i++) { - array[i] = parse(dimension, elementType); - } - dimension.unshift(count); - } - else { - for (var i = 0; i < dimension[0]; i++) { - array[i] = parseElement(elementType); - } - } - - return array; - }; - - return parse(dims, elementType); -}; - -var parseText = function(value) { - return value.toString('utf8'); -}; - -var parseBool = function(value) { - return (parseBits(value, 8) > 0); -}; - -var init = function(register) { - register(20, parseInt64); - register(21, parseInt16); - register(23, parseInt32); - register(26, parseInt32); - register(1700, parseNumeric); - register(700, parseFloat32); - register(701, parseFloat64); - register(16, parseBool); - register(1114, parseDate); - register(1184, parseDate); - register(1007, parseArray); - register(1016, parseArray); - register(1008, parseArray); - register(1009, parseArray); - register(25, parseText); -}; - -module.exports = { - init: init -}; diff --git a/lib/client.js b/lib/client.js index 5d1af39a..d37b0a58 100644 --- a/lib/client.js +++ b/lib/client.js @@ -2,36 +2,40 @@ var crypto = require('crypto'); var EventEmitter = require('events').EventEmitter; var util = require('util'); +var ConnectionParameters = require(__dirname + '/connection-parameters'); var Query = require(__dirname + '/query'); -var utils = require(__dirname + '/utils'); var defaults = require(__dirname + '/defaults'); var Connection = require(__dirname + '/connection'); +var CopyFromStream = require(__dirname + '/copystream').CopyFromStream; +var CopyToStream = require(__dirname + '/copystream').CopyToStream; var Client = function(config) { EventEmitter.call(this); - if(typeof config === 'string') { - config = utils.normalizeConnectionInfo(config) - } - config = config || {}; - this.user = config.user || defaults.user; - this.database = config.database || defaults.database; - this.port = config.port || defaults.port; - this.host = config.host || defaults.host; - this.connection = config.connection || new Connection({stream: config.stream}); + + this.connectionParameters = new ConnectionParameters(config); + this.user = this.connectionParameters.user; + this.database = this.connectionParameters.database; + this.port = this.connectionParameters.port; + this.host = this.connectionParameters.host; + this.password = this.connectionParameters.password; + + var c = config || {}; + + this.connection = c.connection || new Connection({ + stream: c.stream, + ssl: c.ssl + }); this.queryQueue = []; - this.password = config.password || defaults.password; - this.binary = config.binary || defaults.binary; + this.binary = c.binary || defaults.binary; this.encoding = 'utf8'; this.processID = null; this.secretKey = null; - var self = this; + this.ssl = c.ssl || false; }; util.inherits(Client, EventEmitter); -var p = Client.prototype; - -p.connect = function(callback) { +Client.prototype.connect = function(callback) { var self = this; var con = this.connection; if(this.host && this.host.indexOf('/') === 0) { @@ -43,6 +47,17 @@ p.connect = function(callback) { //once connection is established send startup message con.on('connect', function() { + if(self.ssl) { + con.requestSsl(); + } else { + con.startup({ + user: self.user, + database: self.database + }); + } + }); + + con.on('sslconnect', function() { con.startup({ user: self.user, database: self.database @@ -63,9 +78,9 @@ p.connect = function(callback) { }); con.once('backendKeyData', function(msg) { - self.processID = msg.processID; - self.secretKey = msg.secretKey; - }); + self.processID = msg.processID; + self.secretKey = msg.secretKey; + }); //hook up query handling events to connection //after the connection initially becomes ready for queries @@ -74,10 +89,12 @@ p.connect = function(callback) { con.on('rowDescription', function(msg) { self.activeQuery.handleRowDescription(msg); }); + //delegate datarow to active query con.on('dataRow', function(msg) { self.activeQuery.handleDataRow(msg); }); + //TODO should query gain access to connection? con.on('portalSuspended', function(msg) { self.activeQuery.getRows(con); @@ -92,6 +109,25 @@ p.connect = function(callback) { } }); + con.on('copyInResponse', function(msg) { + self.activeQuery.streamData(self.connection); + }); + + con.on('copyOutResponse', function(msg) { + if(self.activeQuery.stream === undefined) { + self.activeQuery._canceledDueToError = + new Error('No destination stream defined'); + //canceling query requires creation of new connection + //look for postgres frontend/backend protocol + (new self.constructor({port: self.port, host: self.host})) + .cancel(self, self.activeQuery); + } + }); + + con.on('copyData', function (msg) { + self.activeQuery.handleCopyFromChunk(msg.chunk); + }); + if (!callback) { self.emit('connect'); } else { @@ -107,12 +143,22 @@ p.connect = function(callback) { }); con.on('readyForQuery', function() { + var error; if(self.activeQuery) { - self.activeQuery.handleReadyForQuery(); + //try/catch/rethrow to ensure exceptions don't prevent the queryQueue from + //being processed + try{ + self.activeQuery.handleReadyForQuery(); + } catch(e) { + error = e; + } } - this.activeQuery = null; + self.activeQuery = null; self.readyForQuery = true; self._pulseQueryQueue(); + if(error) { + throw error; + } }); con.on('error', function(error) { @@ -127,37 +173,93 @@ p.connect = function(callback) { if(self.activeQuery.isPreparedStatement) { con.sync(); } - self.activeQuery.handleError(error); + var activeQuery = self.activeQuery; self.activeQuery = null; + activeQuery.handleError(error); } }); + con.once('end', function() { + if(self.activeQuery) { + self.activeQuery.handleError(new Error('Stream unexpectedly ended during query execution')); + self.activeQuery = null; + } + self.emit('end'); + }); + + con.on('notice', function(msg) { self.emit('notice', msg); }); }; -p.cancel = function(client, query) { - if (client.activeQuery == query) { - var con = this.connection; +Client.prototype.cancel = function(client, query) { + if(client.activeQuery == query) { + var con = this.connection; - if(this.host && this.host.indexOf('/') === 0) { - con.connect(this.host + '/.s.PGSQL.' + this.port); - } else { - con.connect(this.port, this.host); - } + if(this.host && this.host.indexOf('/') === 0) { + con.connect(this.host + '/.s.PGSQL.' + this.port); + } else { + con.connect(this.port, this.host); + } - //once connection is established send cancel message - con.on('connect', function() { - con.cancel(client.processID, client.secretKey); - }); - } - else if (client.queryQueue.indexOf(query) != -1) - client.queryQueue.splice(client.queryQueue.indexOf(query), 1); + //once connection is established send cancel message + con.on('connect', function() { + con.cancel(client.processID, client.secretKey); + }); + } else if(client.queryQueue.indexOf(query) != -1) { + client.queryQueue.splice(client.queryQueue.indexOf(query), 1); + } }; -p._pulseQueryQueue = function() { +// Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c +Client.prototype.escapeIdentifier = function(str) { + + var escaped = '"'; + + for(var i = 0; i < str.length; i++) { + var c = str[i]; + if(c === '"') { + escaped += c + c; + } else { + escaped += c; + } + } + + escaped += '"'; + + return escaped; +}; + +// Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c +Client.prototype.escapeLiteral = function(str) { + + var hasBackslash = false; + var escaped = '\''; + + for(var i = 0; i < str.length; i++) { + var c = str[i]; + if(c === '\'') { + escaped += c + c; + } else if (c === '\\') { + escaped += c + c; + hasBackslash = true; + } else { + escaped += c; + } + } + + escaped += '\''; + + if(hasBackslash === true) { + escaped = ' E' + escaped; + } + + return escaped; +}; + +Client.prototype._pulseQueryQueue = function() { if(this.readyForQuery===true) { this.activeQuery = this.queryQueue.shift(); if(this.activeQuery) { @@ -166,48 +268,51 @@ p._pulseQueryQueue = function() { this.activeQuery.submit(this.connection); } else if(this.hasExecuted) { this.activeQuery = null; - this._drainPaused > 0 ? this._drainPaused++ : this.emit('drain') + this.emit('drain'); } } }; -p.query = function(config, values, callback) { - //can take in strings or config objects - config = (typeof(config) == 'string') ? { text: config } : config; - if (this.binary && !('binary' in config)) { - config.binary = true; - } - - if(values) { - if(typeof values === 'function') { - callback = values; +Client.prototype._copy = function (text, stream) { + var config = {}; + config.text = text; + config.stream = stream; + config.callback = function (error) { + if(error) { + config.stream.error(error); } else { - config.values = values; + config.stream.close(); } + }; + var query = new Query(config); + this.queryQueue.push(query); + this._pulseQueryQueue(); + return config.stream; + +}; + +Client.prototype.copyFrom = function (text) { + return this._copy(text, new CopyFromStream()); +}; + +Client.prototype.copyTo = function (text) { + return this._copy(text, new CopyToStream()); +}; + +Client.prototype.query = function(config, values, callback) { + //can take in strings, config object or query object + var query = (config instanceof Query) ? config : + new Query(config, values, callback); + if(this.binary && !query.binary) { + query.binary = true; } - config.callback = callback; - - var query = new Query(config); this.queryQueue.push(query); this._pulseQueryQueue(); return query; }; -//prevents client from otherwise emitting 'drain' event until 'resumeDrain' is called -p.pauseDrain = function() { - this._drainPaused = 1; -}; - -//resume raising 'drain' event -p.resumeDrain = function() { - if(this._drainPaused > 1) { - this.emit('drain'); - } - this._drainPaused = 0; -}; - -p.end = function() { +Client.prototype.end = function() { this.connection.end(); }; @@ -215,4 +320,7 @@ Client.md5 = function(string) { return crypto.createHash('md5').update(string).digest('hex'); }; +// expose a Query constructor +Client.Query = Query; + module.exports = Client; diff --git a/lib/connection-parameters.js b/lib/connection-parameters.js new file mode 100644 index 00000000..d6c1a234 --- /dev/null +++ b/lib/connection-parameters.js @@ -0,0 +1,81 @@ +var dns = require('dns'); +var path = require('path'); + +var defaults = require(__dirname + '/defaults'); + +var val = function(key, config) { + return config[key] || + process.env['PG' + key.toUpperCase()] || + defaults[key]; +}; + +var url = require('url'); +//parses a connection string +var parse = function(str) { + //unix socket + if(str.charAt(0) === '/') { + return { host: str }; + } + // url parse expects spaces encoded as %20 + str = encodeURI(str); + var result = url.parse(str, true); + var config = {}; + config.host = result.hostname; + config.database = result.pathname ? result.pathname.slice(1) : null; + var auth = (result.auth || ':').split(':'); + config.user = auth[0]; + config.password = auth[1]; + config.port = result.port; + + var ssl = result.query.ssl; + if (ssl === 'true' || ssl === '1') { + config.ssl = true; + } + + return config; +}; + +var ConnectionParameters = function(config) { + config = typeof config == 'string' ? parse(config) : (config || {}); + this.user = val('user', config); + this.database = val('database', config); + this.port = parseInt(val('port', config), 10); + this.host = val('host', config); + this.password = val('password', config); + this.binary = val('binary', config); + this.ssl = config.ssl || defaults.ssl; + this.client_encoding = val("client_encoding", config); + //a domain socket begins with '/' + this.isDomainSocket = (!(this.host||'').indexOf('/')); +}; + +var add = function(params, config, paramName) { + var value = config[paramName]; + if(value) { + params.push(paramName+"='"+value+"'"); + } +}; + +ConnectionParameters.prototype.getLibpqConnectionString = function(cb) { + var params = []; + add(params, this, 'user'); + add(params, this, 'password'); + add(params, this, 'port'); + if(this.database) { + params.push("dbname='" + this.database + "'"); + } + if(this.isDomainSocket) { + params.push("host=" + this.host); + return cb(null, params.join(' ')); + } + if(this.client_encoding) { + params.push("client_encoding='" + this.client_encoding + "'"); + } + dns.lookup(this.host, function(err, address) { + if(err) return cb(err, null); + params.push("hostaddr=" + address); + return cb(null, params.join(' ')); + }); +}; + +module.exports = ConnectionParameters; diff --git a/lib/connection.js b/lib/connection.js index 6d9f11fb..c83ee964 100644 --- a/lib/connection.js +++ b/lib/connection.js @@ -4,8 +4,10 @@ var EventEmitter = require('events').EventEmitter; var util = require('util'); var utils = require(__dirname + '/utils'); -var Writer = require(__dirname + '/writer'); +var Writer = require('buffer-writer'); +var TEXT_MODE = 0; +var BINARY_MODE = 1; var Connection = function(config) { EventEmitter.call(this); config = config || {}; @@ -17,18 +19,25 @@ var Connection = function(config) { this.encoding = 'utf8'; this.parsedStatements = {}; this.writer = new Writer(); + this.ssl = config.ssl || false; + this._ending = false; + this._mode = TEXT_MODE; + this._emitMessage = false; + var self = this; + this.on('newListener', function(eventName) { + if(eventName == 'message') { + self._emitMessage = true; + } + }); }; util.inherits(Connection, EventEmitter); -var p = Connection.prototype; +Connection.prototype.connect = function(port, host) { -p.connect = function(port, host) { - - if(this.stream.readyState === 'closed'){ + if(this.stream.readyState === 'closed') { this.stream.connect(port, host); - } - else if(this.stream.readyState == 'open') { + } else if(this.stream.readyState == 'open') { this.emit('connect'); } @@ -38,22 +47,76 @@ p.connect = function(port, host) { self.emit('connect'); }); - - this.stream.on('data', function(buffer) { - self.setBuffer(buffer); - var msg; - while(msg = self.parseMessage()) { - self.emit('message', msg); - self.emit(msg.name, msg); + this.stream.on('error', function(error) { + //don't raise ECONNRESET errors - they can & should be ignored + //during disconnect + if(self._ending && error.code == 'ECONNRESET') { + return; } + self.emit('error', error); }); - this.stream.on('error', function(error) { - self.emit('error', error); + this.stream.on('end', function() { + self.emit('end'); + }); + + if(!this.ssl) { + return this.attachListeners(this.stream); + } + + this.stream.once('data', function(buffer) { + var responseCode = buffer.toString('utf8'); + if(responseCode != 'S') { + return self.emit('error', new Error('The server does not support SSL connections')); + } + var tls = require('tls'); + self.stream = tls.connect({ + socket: self.stream, + servername: host, + rejectUnauthorized: self.ssl.rejectUnauthorized, + ca: self.ssl.ca, + pfx: self.ssl.pfx, + key: self.ssl.key, + passphrase: self.ssl.passphrase, + cert: self.ssl.cert, + NPNProtocols: self.ssl.NPNProtocols + }); + self.attachListeners(self.stream); + self.emit('sslconnect'); }); }; -p.startup = function(config) { +Connection.prototype.attachListeners = function(stream) { + stream.on('data', function(buff) { + this.setBuffer(buff); + var msg = this.parseMessage(); + while(msg) { + if(this._emitMessage) { + this.emit('message', msg); + } + this.emit(msg.name, msg); + msg = this.parseMessage(); + } + }.bind(this)); +}; + +Connection.prototype.requestSsl = function(config) { + this.checkSslResponse = true; + + var bodyBuffer = this.writer + .addInt16(0x04D2) + .addInt16(0x162F).flush(); + + var length = bodyBuffer.length + 4; + + var buffer = new Writer() + .addInt32(length) + .add(bodyBuffer) + .join(); + this.stream.write(buffer); +}; + +Connection.prototype.startup = function(config) { var bodyBuffer = this.writer .addInt16(3) .addInt16(0) @@ -61,6 +124,8 @@ p.startup = function(config) { .addCString(config.user) .addCString('database') .addCString(config.database) + .addCString('client_encoding') + .addCString("'utf-8'") .addCString('').flush(); //this message is sent without a code @@ -73,7 +138,7 @@ p.startup = function(config) { this.stream.write(buffer); }; -p.cancel = function(processID, secretKey) { +Connection.prototype.cancel = function(processID, secretKey) { var bodyBuffer = this.writer .addInt16(1234) .addInt16(5678) @@ -90,27 +155,28 @@ p.cancel = function(processID, secretKey) { this.stream.write(buffer); }; -p.password = function(password) { +Connection.prototype.password = function(password) { //0x70 = 'p' this._send(0x70, this.writer.addCString(password)); }; -p._send = function(code, more) { +Connection.prototype._send = function(code, more) { + if(!this.stream.writable) { return false; } if(more === true) { this.writer.addHeader(code); } else { return this.stream.write(this.writer.flush(code)); } -} +}; -p.query = function(text) { +Connection.prototype.query = function(text) { //0x51 = Q this.stream.write(this.writer.addCString(text).flush(0x51)); }; //send parse message //"more" === true to buffer the message until flush() is called -p.parse = function(query, more) { +Connection.prototype.parse = function(query, more) { //expect something like this: // { name: 'queryName', // text: 'select * from blah', @@ -135,7 +201,7 @@ p.parse = function(query, more) { //send bind message //"more" === true to buffer the message until flush() is called -p.bind = function(config, more) { +Connection.prototype.bind = function(config, more) { //normalize config config = config || {}; config.portal = config.portal || ''; @@ -153,13 +219,12 @@ p.bind = function(config, more) { if(val === null || typeof val === "undefined") { buffer.addInt32(-1); } else { - val = val.toString(); buffer.addInt32(Buffer.byteLength(val)); buffer.addString(val); } } - if (config.binary) { + if(config.binary) { buffer.addInt16(1); // format codes to use binary buffer.addInt16(1); } @@ -172,7 +237,7 @@ p.bind = function(config, more) { //send execute message //"more" === true to buffer the message until flush() is called -p.execute = function(config, more) { +Connection.prototype.execute = function(config, more) { config = config || {}; config.portal = config.portal || ''; config.rows = config.rows || ''; @@ -186,33 +251,48 @@ p.execute = function(config, more) { var emptyBuffer = Buffer(0); -p.flush = function() { +Connection.prototype.flush = function() { //0x48 = 'H' - this.writer.add(emptyBuffer) + this.writer.add(emptyBuffer); this._send(0x48); -} +}; -p.sync = function() { +Connection.prototype.sync = function() { //clear out any pending data in the writer - this.writer.flush(0) - + this.writer.flush(0); + this.writer.add(emptyBuffer); this._send(0x53); }; -p.end = function() { +Connection.prototype.end = function() { //0x58 = 'X' this.writer.add(emptyBuffer); this._send(0x58); + this._ending = true; }; -p.describe = function(msg, more) { +Connection.prototype.describe = function(msg, more) { this.writer.addCString(msg.type + (msg.name || '')); this._send(0x44, more); }; +Connection.prototype.sendCopyFromChunk = function (chunk) { + this.stream.write(this.writer.add(chunk).flush(0x64)); +}; + +Connection.prototype.endCopyFrom = function () { + this.stream.write(this.writer.add(emptyBuffer).flush(0x63)); +}; + +Connection.prototype.sendCopyFail = function (msg) { + //this.stream.write(this.writer.add(emptyBuffer).flush(0x66)); + this.writer.addCString(msg); + this._send(0x66); +}; + //parsing methods -p.setBuffer = function(buffer) { +Connection.prototype.setBuffer = function(buffer) { if(this.lastBuffer) { //we have unfinished biznaz //need to combine last two buffers var remaining = this.lastBuffer.length - this.lastOffset; @@ -221,11 +301,30 @@ p.setBuffer = function(buffer) { buffer.copy(combinedBuffer, remaining, 0); buffer = combinedBuffer; } + this.lastBuffer = false; this.buffer = buffer; this.offset = 0; }; -p.parseMessage = function() { +Connection.prototype.readSslResponse = function() { + var remaining = this.buffer.length - (this.offset); + if(remaining < 1) { + this.lastBuffer = this.buffer; + this.lastOffset = this.offset; + return false; + } + return { + name: 'sslresponse', + text: this.buffer[this.offset++] + }; +}; + +var Message = function(name, length) { + this.name = name; + this.length = length; +}; + +Connection.prototype.parseMessage = function() { var remaining = this.buffer.length - (this.offset); if(remaining < 5) { //cannot read id + length without at least 5 bytes @@ -237,8 +336,9 @@ p.parseMessage = function() { //read message id code var id = this.buffer[this.offset++]; + var buffer = this.buffer; //read message length - var length = this.parseInt32(); + var length = this.parseInt32(buffer); if(remaining <= length) { this.lastBuffer = this.buffer; @@ -247,89 +347,81 @@ p.parseMessage = function() { return false; } - var msg = { - length: length - }; - switch(id) { case 0x52: //R - msg.name = 'authenticationOk'; - return this.parseR(msg); + return this.parseR(buffer, length); case 0x53: //S - msg.name = 'parameterStatus'; - return this.parseS(msg); + return this.parseS(buffer, length); case 0x4b: //K - msg.name = 'backendKeyData'; - return this.parseK(msg); + return this.parseK(buffer, length); case 0x43: //C - msg.name = 'commandComplete'; - return this.parseC(msg); + return this.parseC(buffer, length); case 0x5a: //Z - msg.name = 'readyForQuery'; - return this.parseZ(msg); + return this.parseZ(buffer, length); case 0x54: //T - msg.name = 'rowDescription'; - return this.parseT(msg); + return this.parseT(buffer, length); case 0x44: //D - msg.name = 'dataRow'; - return this.parseD(msg); + return this.parseD(buffer, length); case 0x45: //E - msg.name = 'error'; - return this.parseE(msg); + return this.parseE(buffer, length); case 0x4e: //N - msg.name = 'notice'; - return this.parseN(msg); + return this.parseN(buffer, length); case 0x31: //1 - msg.name = 'parseComplete'; - return msg; + return new Message('parseComplete', length); case 0x32: //2 - msg.name = 'bindComplete'; - return msg; + return new Message('bindComplete', length); case 0x41: //A - msg.name = 'notification'; - return this.parseA(msg); + return this.parseA(buffer, length); case 0x6e: //n - msg.name = 'noData'; - return msg; + return new Message('noData', length); case 0x49: //I - msg.name = 'emptyQuery'; - return msg; + return new Message('emptyQuery', length); case 0x73: //s - msg.name = 'portalSuspended'; - return msg; + return new Message('portalSuspended', length); - default: - throw new Error("Unrecognized message code " + id); + case 0x47: //G + return this.parseG(buffer, length); + + case 0x48: //H + return this.parseH(buffer, length); + + case 0x63: //c + return new Message('copyDone', length); + + case 0x64: //d + return this.parsed(buffer, length); } + return false; }; -p.parseR = function(msg) { +Connection.prototype.parseR = function(buffer, length) { var code = 0; + var msg = new Message('authenticationOk', length); if(msg.length === 8) { - code = this.parseInt32(); + code = this.parseInt32(buffer); if(code === 3) { msg.name = 'authenticationCleartextPassword'; } return msg; } if(msg.length === 12) { - code = this.parseInt32(); + code = this.parseInt32(buffer); if(code === 5) { //md5 required msg.name = 'authenticationMD5Password'; msg.salt = new Buffer(4); @@ -341,78 +433,120 @@ p.parseR = function(msg) { throw new Error("Unknown authenticatinOk message type" + util.inspect(msg)); }; -p.parseS = function(msg) { - msg.parameterName = this.parseCString(); - msg.parameterValue = this.parseCString(); +Connection.prototype.parseS = function(buffer, length) { + var msg = new Message('parameterStatus', length); + msg.parameterName = this.parseCString(buffer); + msg.parameterValue = this.parseCString(buffer); return msg; }; -p.parseK = function(msg) { - msg.processID = this.parseInt32(); - msg.secretKey = this.parseInt32(); +Connection.prototype.parseK = function(buffer, length) { + var msg = new Message('backendKeyData', length); + msg.processID = this.parseInt32(buffer); + msg.secretKey = this.parseInt32(buffer); return msg; }; -p.parseC = function(msg) { - msg.text = this.parseCString(); +Connection.prototype.parseC = function(buffer, length) { + var msg = new Message('commandComplete', length); + msg.text = this.parseCString(buffer); return msg; }; -p.parseZ = function(msg) { - msg.status = this.readChar(); +Connection.prototype.parseZ = function(buffer, length) { + var msg = new Message('readyForQuery', length); + msg.name = 'readyForQuery'; + msg.status = this.readString(buffer, 1); return msg; }; -p.parseT = function(msg) { - msg.fieldCount = this.parseInt16(); +ROW_DESCRIPTION = 'rowDescription'; +Connection.prototype.parseT = function(buffer, length) { + var msg = new Message(ROW_DESCRIPTION, length); + msg.fieldCount = this.parseInt16(buffer); var fields = []; for(var i = 0; i < msg.fieldCount; i++){ - fields[i] = this.parseField(); + fields.push(this.parseField(buffer)); } msg.fields = fields; return msg; }; -p.parseField = function() { - var field = { - name: this.parseCString(), - tableID: this.parseInt32(), - columnID: this.parseInt16(), - dataTypeID: this.parseInt32(), - dataTypeSize: this.parseInt16(), - dataTypeModifier: this.parseInt32(), - format: this.parseInt16() === 0 ? 'text' : 'binary' - }; +var Field = function() { + this.name = null; + this.tableID = null; + this.columnID = null; + this.dataTypeID = null; + this.dataTypeSize = null; + this.dataTypeModifier = null; + this.format = null; +}; + +FORMAT_TEXT = 'text'; +FORMAT_BINARY = 'binary'; +Connection.prototype.parseField = function(buffer) { + var field = new Field(); + field.name = this.parseCString(buffer); + field.tableID = this.parseInt32(buffer); + field.columnID = this.parseInt16(buffer); + field.dataTypeID = this.parseInt32(buffer); + field.dataTypeSize = this.parseInt16(buffer); + field.dataTypeModifier = this.parseInt32(buffer); + if(this.parseInt16(buffer) === TEXT_MODE) { + this._mode = TEXT_MODE; + field.format = FORMAT_TEXT; + } else { + this._mode = BINARY_MODE; + field.format = FORMAT_BINARY; + } return field; }; -p.parseD = function(msg) { - var fieldCount = this.parseInt16(); - var fields = []; +DATA_ROW = 'dataRow'; +var DataRowMessage = function(name, length, fieldCount) { + this.name = DATA_ROW; + this.length = length; + this.fieldCount = fieldCount; + this.fields = []; +}; + + +//extremely hot-path code +Connection.prototype.parseD = function(buffer, length) { + var fieldCount = this.parseInt16(buffer); + var msg = new DataRowMessage(length, fieldCount); for(var i = 0; i < fieldCount; i++) { - var length = this.parseInt32(); - fields[i] = (length === -1 ? null : this.readBytes(length)) - }; - msg.fieldCount = fieldCount; - msg.fields = fields; + msg.fields.push(this._readValue(buffer)); + } return msg; }; +//extremely hot-path code +Connection.prototype._readValue = function(buffer) { + var length = this.parseInt32(buffer); + if(length === -1) return null; + if(this._mode === TEXT_MODE) { + return this.readString(buffer, length); + } + return this.readBytes(buffer, length); +}; + //parses error -p.parseE = function(input) { +Connection.prototype.parseE = function(buffer, length) { var fields = {}; var msg, item; - var fieldType = this.readString(1); + var input = new Message('error', length); + var fieldType = this.readString(buffer, 1); while(fieldType != '\0') { - fields[fieldType] = this.parseCString(); - fieldType = this.readString(1); + fields[fieldType] = this.parseCString(buffer); + fieldType = this.readString(buffer, 1); } - if (input.name === 'error') { + if(input.name === 'error') { // the msg is an Error instance msg = new Error(fields.M); for (item in input) { // copy input properties to the error - if (input.hasOwnProperty(item)) { + if(input.hasOwnProperty(item)) { msg[item] = input[item]; } } @@ -436,52 +570,72 @@ p.parseE = function(input) { }; //same thing, different name -p.parseN = p.parseE; - -p.parseA = function(msg) { - msg.processId = this.parseInt32(); - msg.channel = this.parseCString(); - msg.payload = this.parseCString(); +Connection.prototype.parseN = function(buffer, length) { + var msg = this.parseE(buffer, length); + msg.name = 'notice'; return msg; }; -p.readChar = function() { - return Buffer([this.buffer[this.offset++]]).toString(this.encoding); +Connection.prototype.parseA = function(buffer, length) { + var msg = new Message('notification', length); + msg.processId = this.parseInt32(buffer); + msg.channel = this.parseCString(buffer); + msg.payload = this.parseCString(buffer); + return msg; }; -p.parseInt32 = function() { - var value = this.peekInt32(); +Connection.prototype.parseG = function (buffer, length) { + var msg = new Message('copyInResponse', length); + return this.parseGH(buffer, msg); +}; + +Connection.prototype.parseH = function(buffer, length) { + var msg = new Message('copyOutResponse', length); + return this.parseGH(buffer, msg); +}; + +Connection.prototype.parseGH = function (buffer, msg) { + var isBinary = this.buffer[this.offset] !== 0; + this.offset++; + msg.binary = isBinary; + var columnCount = this.parseInt16(buffer); + msg.columnTypes = []; + for(var i = 0; i 1) { - this.emit('drain') - }; + this.emit('drain'); + } this._drainPaused = 0; }; +Connection.prototype.sendCopyFail = function(msg) { + this.endCopyFrom(msg); +}; + var clientBuilder = function(config) { config = config || {}; var connection = new Connection(); + EventEmitter.call(connection); connection._queryQueue = []; connection._namedQueries = {}; connection._activeQuery = null; - connection._config = utils.normalizeConnectionInfo(config); + connection.connectionParameters = new ConnectionParameters(config); //attach properties to normalize interface with pure js client - connection.user = connection._config.user; - connection.password = connection._config.password; - connection.database = connection._config.database; - connection.host = connection._config.host; - connection.port = connection._config.port; + connection.user = connection.connectionParameters.user; + connection.password = connection.connectionParameters.password; + connection.database = connection.connectionParameters.database; + connection.host = connection.connectionParameters.host; + connection.port = connection.connectionParameters.port; connection.on('connect', function() { connection._connected = true; connection._pulseQueryQueue(true); }); + connection.on('_rowDescription', function(rowDescription) { + connection._activeQuery.handleRowDescription(rowDescription); + }); + //proxy some events to active query connection.on('_row', function(row) { connection._activeQuery.handleRow(row); }); + connection.on('_cmdStatus', function(status) { + //set this here so we can pass it to the query + //when the query completes + connection._lastMeta = status; + }); + //TODO: emit more native error properties (make it match js error) connection.on('_error', function(err) { //create Error object from object literal @@ -149,20 +213,55 @@ var clientBuilder = function(config) { } }); + connection.on('_end', function() { + process.nextTick(function() { + if(connection._activeQuery) { + connection._activeQuery.handleError(new Error("Connection was ended during query")); + } + connection.emit('end'); + }); + }); + connection.on('_readyForQuery', function() { + var error; var q = this._activeQuery; //a named query finished being prepared if(this._namedQuery) { this._namedQuery = false; this._sendQueryPrepared(q.name, q.values||[]); } else { - connection._activeQuery.handleReadyForQuery(); + //try/catch/rethrow to ensure exceptions don't prevent the queryQueue from + //being processed + try{ + connection._activeQuery.handleReadyForQuery(connection._lastMeta); + } catch(e) { + error = e; + } connection._activeQuery = null; connection._pulseQueryQueue(); + if(error) throw error; } }); - + connection.on('copyInResponse', function () { + //connection is ready to accept chunks + //start to send data from stream + connection._activeQuery.streamData(connection); + }); + connection.on('copyOutResponse', function(msg) { + if (connection._activeQuery.stream === undefined) { + connection._activeQuery._canceledDueToError = new Error('No destination stream defined'); + (new clientBuilder({port: connection.port, host: connection.host})).cancel(connection, connection._activeQuery); + } + }); + connection.on('copyData', function (chunk) { + //recieve chunk from connection + //move it to stream + connection._activeQuery.handleCopyFromChunk(chunk); + }); return connection; }; +// expose a Query constructor +clientBuilder.Query = NativeQuery; + module.exports = clientBuilder; diff --git a/lib/native/query.js b/lib/native/query.js index 6187c20a..905d1f78 100644 --- a/lib/native/query.js +++ b/lib/native/query.js @@ -1,92 +1,95 @@ var EventEmitter = require('events').EventEmitter; var util = require('util'); -var types = require(__dirname + "/../types"); +var types = require(__dirname + '/../types/'); +var utils = require(__dirname + '/../utils'); +var Result = require(__dirname + '/../result'); //event emitter proxy -var NativeQuery = function(text, values, callback) { - //TODO there are better ways to detect overloads - if(typeof text == 'object') { - this.text = text.text; - this.values = text.values; - this.name = text.name; - if(typeof values === 'function') { - this.callback = values; - } else if(values) { - this.values = values; - this.callback = callback; - } - } else { - this.text = text; - this.values = values; - this.callback = callback; - if(typeof values == 'function') { - this.values = null; - this.callback = values; - } - } - if(this.callback) { - this.rows = []; - } - //normalize values - if(this.values) { - for(var i = 0, len = this.values.length; i < len; i++) { - var item = this.values[i]; - switch(typeof item) { - case 'undefined': - this.values[i] = null; - break; - case 'object': - this.values[i] = item === null ? null : JSON.stringify(item); - break; - case 'string': - //value already string - break; - default: - //numbers - this.values[i] = item.toString(); - } - } +var NativeQuery = function(config, values, callback) { + // use of "new" optional + if (!(this instanceof NativeQuery)) { + return new NativeQuery(config, values, callback); } EventEmitter.call(this); + + var c = utils.normalizeQueryConfig(config, values, callback); + + this.name = c.name; + this.text = c.text; + this.values = c.values; + this.callback = c.callback; + + this._result = new Result(config.rowMode); + this._addedFields = false; + //normalize values + if(this.values) { + for(var i = 0, len = this.values.length; i < len; i++) { + this.values[i] = utils.prepareValue(this.values[i]); + } + } + this._canceledDueToError = false; }; util.inherits(NativeQuery, EventEmitter); -var p = NativeQuery.prototype; -//maps from native rowdata into api compatible row object -var mapRowData = function(row) { - var result = {}; - for(var i = 0, len = row.length; i < len; i++) { - var item = row[i]; - result[item.name] = item.value == null ? null : types.getTypeParser(item.type, 'text')(item.value); - } - return result; -} - -p.handleRow = function(rowData) { - var row = mapRowData(rowData); - if(this.callback) { - this.rows.push(row); - } - this.emit('row', row); +NativeQuery.prototype.handleRowDescription = function(rowDescription) { + this._result.addFields(rowDescription); }; -p.handleError = function(error) { +NativeQuery.prototype.handleRow = function(rowData) { + var row = this._result.parseRow(rowData); if(this.callback) { - this.callback(error); + this._result.addRow(row); + } + this.emit('row', row, this._result); +}; + +NativeQuery.prototype.handleError = function(error) { + if (this._canceledDueToError) { + error = this._canceledDueToError; + this._canceledDueToError = false; + } + if(this.callback) { + var cb = this.callback; + //remove callback to prevent double call on readyForQuery this.callback = null; + cb(error); } else { this.emit('error', error); } -} +}; -p.handleReadyForQuery = function() { - if(this.callback) { - this.callback(null, { rows: this.rows }); +NativeQuery.prototype.handleReadyForQuery = function(meta) { + if (this._canceledDueToError) { + return this.handleError(this._canceledDueToError); } - this.emit('end'); + if(meta) { + this._result.addCommandComplete(meta); + } + if(this.callback) { + this.callback(null, this._result); + } + this.emit('end', this._result); +}; + +NativeQuery.prototype.streamData = function (connection) { + if(this.stream) { + this.stream.startStreamingToConnection(connection); + } + else { + connection.sendCopyFail('No source stream defined'); + } +}; + +NativeQuery.prototype.handleCopyFromChunk = function (chunk) { + if(this.stream) { + this.stream.handleChunk(chunk); + } + //if there are no stream (for example when copy to query was sent by + //query method instead of copyTo) error will be handled + //on copyOutResponse event, so silently ignore this error here }; module.exports = NativeQuery; diff --git a/lib/pool.js b/lib/pool.js new file mode 100644 index 00000000..9cf9aabf --- /dev/null +++ b/lib/pool.js @@ -0,0 +1,68 @@ +var EventEmitter = require('events').EventEmitter; + +var defaults = require(__dirname + '/defaults'); +var genericPool = require('generic-pool'); + +var pools = { + //dictionary of all key:pool pairs + all: {}, + //reference to the client constructor - can override in tests or for require('pg').native + Client: require(__dirname + '/client'), + getOrCreate: function(clientConfig) { + clientConfig = clientConfig || {}; + var name = JSON.stringify(clientConfig); + var pool = pools.all[name]; + if(pool) { + return pool; + } + pool = genericPool.Pool({ + name: name, + max: defaults.poolSize, + idleTimeoutMillis: defaults.poolIdleTimeout, + reapIntervalMillis: defaults.reapIntervalMillis, + log: defaults.poolLog, + create: function(cb) { + var client = new pools.Client(clientConfig); + client.connect(function(err) { + if(err) return cb(err, null); + + //handle connected client background errors by emitting event + //via the pg object and then removing errored client from the pool + client.on('error', function(e) { + pool.emit('error', e, client); + pool.destroy(client); + }); + + return cb(null, client); + }); + }, + destroy: function(client) { + client.end(); + } + }); + pools.all[name] = pool; + //mixin EventEmitter to pool + EventEmitter.call(pool); + for(var key in EventEmitter.prototype) { + if(EventEmitter.prototype.hasOwnProperty(key)) { + pool[key] = EventEmitter.prototype[key]; + } + } + //monkey-patch with connect method + pool.connect = function(cb) { + pool.acquire(function(err, client) { + if(err) return cb(err, null, function() {/*NOOP*/}); + cb(null, client, function(err) { + if(err) { + pool.destroy(client); + } else { + pool.release(client); + } + }); + }); + }; + return pool; + } +}; + +module.exports = pools; diff --git a/lib/query.js b/lib/query.js index 3d5dfc08..44ecee9f 100644 --- a/lib/query.js +++ b/lib/query.js @@ -1,31 +1,49 @@ var EventEmitter = require('events').EventEmitter; var util = require('util'); -var Result = require(__dirname + "/result"); -var Types = require(__dirname + "/types"); +var Result = require(__dirname + '/result'); +var Types = require(__dirname + '/types/'); +var utils = require(__dirname + '/utils'); + +var Query = function(config, values, callback) { + // use of "new" optional + if(!(this instanceof Query)) { return new Query(config, values, callback); } + + config = utils.normalizeQueryConfig(config, values, callback); -var Query = function(config) { this.text = config.text; this.values = config.values; this.rows = config.rows; this.types = config.types; this.name = config.name; this.binary = config.binary; + this.stream = config.stream; //use unique portal name each time - this.portal = config.portal || "" + this.portal = config.portal || ""; this.callback = config.callback; this._fieldNames = []; this._fieldConverters = []; - this._result = new Result(); + this._result = new Result(config.rowMode); this.isPreparedStatement = false; + this._canceledDueToError = false; EventEmitter.call(this); }; util.inherits(Query, EventEmitter); -var p = Query.prototype; -p.requiresPreparation = function() { - return (this.values || 0).length > 0 || this.name || this.rows || this.binary; +Query.prototype.requiresPreparation = function() { + //named queries must always be prepared + if(this.name) { return true; } + //always prepare if there are max number of rows expected per + //portal execution + if(this.rows) { return true; } + //don't prepare empty text queries + if(!this.text) { return false; } + //binary should be prepared to specify results should be in binary + //unless there are no parameters + if(this.binary && !this.values) { return false; } + //prepare if there are values + return (this.values || 0).length > 0; }; @@ -36,62 +54,50 @@ var noParse = function(val) { //associates row metadata from the supplied //message with this query object //metadata used when parsing row results -p.handleRowDescription = function(msg) { - this._fieldNames = []; - this._fieldConverters = []; - var len = msg.fields.length; - for(var i = 0; i < len; i++) { - var field = msg.fields[i]; - var format = field.format; - this._fieldNames[i] = field.name; - this._fieldConverters[i] = Types.getTypeParser(field.dataTypeID, format); - }; +Query.prototype.handleRowDescription = function(msg) { + this._result.addFields(msg.fields); }; -p.handleDataRow = function(msg) { - var self = this; - var row = {}; - for(var i = 0; i < msg.fields.length; i++) { - var rawValue = msg.fields[i]; - if(rawValue === null) { - //leave null values alone - row[self._fieldNames[i]] = null; - } else { - //convert value to javascript - row[self._fieldNames[i]] = self._fieldConverters[i](rawValue); - } - } - self.emit('row', row); +Query.prototype.handleDataRow = function(msg) { + var row = this._result.parseRow(msg.fields); + this.emit('row', row, this._result); //if there is a callback collect rows - if(self.callback) { - self._result.addRow(row); + if(this.callback) { + this._result.addRow(row); } }; -p.handleCommandComplete = function(msg) { +Query.prototype.handleCommandComplete = function(msg) { this._result.addCommandComplete(msg); }; -p.handleReadyForQuery = function() { +Query.prototype.handleReadyForQuery = function() { + if(this._canceledDueToError) { + return this.handleError(this._canceledDueToError); + } if(this.callback) { this.callback(null, this._result); } this.emit('end', this._result); }; -p.handleError = function(err) { +Query.prototype.handleError = function(err) { + if(this._canceledDueToError) { + err = this._canceledDueToError; + this._canceledDueToError = false; + } //if callback supplied do not emit error event as uncaught error //events will bubble up to node process if(this.callback) { - this.callback(err) + this.callback(err); } else { this.emit('error', err); } this.emit('end'); }; -p.submit = function(connection) { +Query.prototype.submit = function(connection) { var self = this; if(this.requiresPreparation()) { this.prepare(connection); @@ -100,11 +106,11 @@ p.submit = function(connection) { } }; -p.hasBeenParsed = function(connection) { +Query.prototype.hasBeenParsed = function(connection) { return this.name && connection.parsedStatements[this.name]; }; -p.getRows = function(connection) { +Query.prototype.getRows = function(connection) { connection.execute({ portal: this.portalName, rows: this.rows @@ -112,7 +118,7 @@ p.getRows = function(connection) { connection.flush(); }; -p.prepare = function(connection) { +Query.prototype.prepare = function(connection) { var self = this; //prepared statements need sync to be called after each command //complete or when an error is encountered @@ -124,14 +130,16 @@ p.prepare = function(connection) { name: self.name, types: self.types }, true); - connection.parsedStatements[this.name] = true; + if(this.name) { + connection.parsedStatements[this.name] = true; + } } //TODO is there some better way to prepare values for the database? if(self.values) { - self.values = self.values.map(function(val) { - return (val instanceof Date) ? JSON.stringify(val) : val; - }); + for(var i = 0, len = self.values.length; i < len; i++) { + self.values[i] = utils.prepareValue(self.values[i]); + } } //http://developer.postgresql.org/pgdocs/postgres/protocol-flow.html#PROTOCOL-FLOW-EXT-QUERY @@ -150,4 +158,17 @@ p.prepare = function(connection) { this.getRows(connection); }; +Query.prototype.streamData = function (connection) { + if(this.stream) this.stream.startStreamingToConnection(connection); + else connection.sendCopyFail('No source stream defined'); +}; + +Query.prototype.handleCopyFromChunk = function (chunk) { + if(this.stream) { + this.stream.handleChunk(chunk); + } + //if there are no stream (for example when copy to query was sent by + //query method instead of copyTo) error will be handled + //on copyOutResponse event, so silently ignore this error here +}; module.exports = Query; diff --git a/lib/result.js b/lib/result.js index f46ed418..c9a777ec 100644 --- a/lib/result.js +++ b/lib/result.js @@ -1,32 +1,94 @@ +var types = require(__dirname + '/types/'); + //result object returned from query //in the 'end' event and also //passed as second argument to provided callback -var Result = function() { +var Result = function(rowMode) { + this.command = null; + this.rowCount = null; + this.oid = null; this.rows = []; + this.fields = []; + this._parsers = []; + this.RowCtor = null; + if(rowMode == "array") { + this.parseRow = this._parseRowAsArray; + } }; -var p = Result.prototype; - - -var matchRegexp = /([A-Za-z]+) (\d+ )?(\d+)?/ +var matchRegexp = /([A-Za-z]+) ?(\d+ )?(\d+)?/; //adds a command complete message -p.addCommandComplete = function(msg) { - var match = matchRegexp.exec(msg.text); +Result.prototype.addCommandComplete = function(msg) { + var match; + if(msg.text) { + //pure javascript + match = matchRegexp.exec(msg.text); + } else { + //native bindings + match = matchRegexp.exec(msg.command); + } if(match) { this.command = match[1]; //match 3 will only be existing on insert commands if(match[3]) { - this.rowCount = parseInt(match[3]); - this.oid = parseInt(match[2]); + //msg.value is from native bindings + this.rowCount = parseInt(match[3] || msg.value, 10); + this.oid = parseInt(match[2], 10); } else { - this.rowCount = parseInt(match[2]); + this.rowCount = parseInt(match[2], 10); } } }; -p.addRow = function(row) { +Result.prototype._parseRowAsArray = function(rowData) { + var row = []; + for(var i = 0, len = rowData.length; i < len; i++) { + var rawValue = rowData[i]; + if(rawValue !== null) { + row.push(this._parsers[i](rawValue)); + } else { + row.push(null); + } + } + return row; +}; + +//rowData is an array of text or binary values +//this turns the row into a JavaScript object +Result.prototype.parseRow = function(rowData) { + return new this.RowCtor(this._parsers, rowData); +}; + +Result.prototype.addRow = function(row) { this.rows.push(row); }; +var inlineParser = function(fieldName, i) { + return "\nthis['" + fieldName + "'] = " + + "rowData[" + i + "] == null ? null : parsers[" + i + "](rowData[" + i + "]);"; +}; + +Result.prototype.addFields = function(fieldDescriptions) { + //clears field definitions + //multiple query statements in 1 action can result in multiple sets + //of rowDescriptions...eg: 'select NOW(); select 1::int;' + //you need to reset the fields + if(this.fields.length) { + this.fields = []; + this._parsers = []; + } + var ctorBody = ""; + for(var i = 0; i < fieldDescriptions.length; i++) { + var desc = fieldDescriptions[i]; + this.fields.push(desc); + var parser = types.getTypeParser(desc.dataTypeID, desc.format || 'text'); + this._parsers.push(parser); + //this is some craziness to compile the row result parsing + //results in ~60% speedup on large query result sets + ctorBody += inlineParser(desc.name, i); + } + this.RowCtor = Function("parsers", "rowData", ctorBody); +}; + module.exports = Result; diff --git a/lib/textParsers.js b/lib/textParsers.js deleted file mode 100644 index 5032d04b..00000000 --- a/lib/textParsers.js +++ /dev/null @@ -1,131 +0,0 @@ -//parses PostgreSQL server formatted date strings into javascript date objects -var parseDate = function(isoDate) { - //TODO this could do w/ a refactor - var dateMatcher = /(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})(\.\d{1,})?/; - - var match = dateMatcher.exec(isoDate); - //could not parse date - if(!match) { - return null; - } - var year = match[1]; - var month = parseInt(match[2],10)-1; - var day = match[3]; - var hour = parseInt(match[4],10); - var min = parseInt(match[5],10); - var seconds = parseInt(match[6], 10); - - var miliString = match[7]; - var mili = 0; - if(miliString) { - mili = 1000 * parseFloat(miliString); - } - - var tZone = /([Z|+\-])(\d{2})?(\d{2})?/.exec(isoDate.split(' ')[1]); - //minutes to adjust for timezone - var tzAdjust = 0; - - if(tZone) { - var type = tZone[1]; - switch(type) { - case 'Z': break; - case '-': - tzAdjust = -(((parseInt(tZone[2],10)*60)+(parseInt(tZone[3]||0,10)))); - break; - case '+': - tzAdjust = (((parseInt(tZone[2],10)*60)+(parseInt(tZone[3]||0,10)))); - break; - default: - throw new Error("Unidentifed tZone part " + type); - } - } - - var utcOffset = Date.UTC(year, month, day, hour, min, seconds, mili); - - var date = new Date(utcOffset - (tzAdjust * 60* 1000)); - return date; -}; - -var parseBool = function(val) { - return val === 't'; -} - -var parseIntegerArray = function(val) { - return JSON.parse(val.replace("{","[").replace("}","]")); -}; - -var parseStringArray = function(val) { - if (!val) return null; - if (val[0] !== '{' || val[val.length-1] !== '}') - throw "Not postgresql array! (" + arrStr + ")"; - - var x = val.substring(1, val.length - 1); - if (x === '') return []; - x = x.match(/(NULL|[^,]+|"((?:.|\n|\r)*?)(?!\\)"|\{((?:.|\n|\r)*?(?!\\)\}) (,|$))/mg); - if (x === null) throw "Not postgre array"; - return x.map(function (el) { - if (el === 'NULL') return null; - if (el[0] === '{') return arguments.callee(el); - if (el[0] === '\"') return el.substring(1, el.length - 1).replace(/\\(.)/g, '$1'); - return el; - }); -}; - - -var NUM = '([+-]?\\d+)'; -var YEAR = NUM + '\\s+years?'; -var MON = NUM + '\\s+mons?'; -var DAY = NUM + '\\s+days?'; -var TIME = '([+-])?(\\d\\d):(\\d\\d):(\\d\\d)'; -var INTERVAL = [YEAR,MON,DAY,TIME].map(function(p){ return "("+p+")?" }).join('\\s*'); - -var parseInterval = function(val) { - if (!val) return {}; - var m = new RegExp(INTERVAL).exec(val); - var i = {}; - if (m[2]) i.years = parseInt(m[2]); - if (m[4]) i.months = parseInt(m[4]); - if (m[6]) i.days = parseInt(m[6]); - if (m[9]) i.hours = parseInt(m[9]); - if (m[10]) i.minutes = parseInt(m[10]); - if (m[11]) i.seconds = parseInt(m[11]); - if (m[8] == '-'){ - if (i.hours) i.hours *= -1; - if (i.minutes) i.minutes *= -1; - if (i.seconds) i.seconds *= -1; - } - for (field in i){ - if (i[field] == 0) - delete i[field]; - } - return i; -}; - -var parseByteA = function(val) { - return new Buffer(val.replace(/\\([0-7]{3})/g, function (full_match, code) { - return String.fromCharCode(parseInt(code, 8)); - }).replace(/\\\\/g, "\\"), "binary"); -} - -var init = function(register) { - register(20, parseInt); - register(21, parseInt); - register(23, parseInt); - register(26, parseInt); - register(1700, parseFloat); - register(700, parseFloat); - register(701, parseFloat); - register(16, parseBool); - register(1114, parseDate); - register(1184, parseDate); - register(1007, parseIntegerArray); - register(1016, parseIntegerArray); - register(1008, parseStringArray); - register(1009, parseStringArray); - register(1186, parseInterval); - register(17, parseByteA); -}; - -module.exports = { - init: init, -}; diff --git a/lib/types/arrayParser.js b/lib/types/arrayParser.js new file mode 100644 index 00000000..96a37b93 --- /dev/null +++ b/lib/types/arrayParser.js @@ -0,0 +1,97 @@ +function ArrayParser(source, converter) { + this.source = source; + this.converter = converter; + this.pos = 0; + this.entries = []; + this.recorded = []; + this.dimension = 0; + if (!this.converter) { + this.converter = function(entry) { + return entry; + }; + } +} + +ArrayParser.prototype.eof = function() { + return this.pos >= this.source.length; +}; + +ArrayParser.prototype.nextChar = function() { + var c; + if ((c = this.source[this.pos++]) === "\\") { + return { + char: this.source[this.pos++], + escaped: true + }; + } else { + return { + char: c, + escaped: false + }; + } +}; + +ArrayParser.prototype.record = function(c) { + return this.recorded.push(c); +}; + +ArrayParser.prototype.newEntry = function(includeEmpty) { + var entry; + if (this.recorded.length > 0 || includeEmpty) { + entry = this.recorded.join(""); + if (entry === "NULL" && !includeEmpty) { + entry = null; + } + if (entry !== null) { + entry = this.converter(entry); + } + this.entries.push(entry); + this.recorded = []; + } +}; + +ArrayParser.prototype.parse = function(nested) { + var c, p, quote; + if (nested === null) { + nested = false; + } + quote = false; + while (!this.eof()) { + c = this.nextChar(); + if (c.char === "{" && !quote) { + this.dimension++; + if (this.dimension > 1) { + p = new ArrayParser(this.source.substr(this.pos - 1), this.converter); + this.entries.push(p.parse(true)); + this.pos += p.pos - 2; + } + } else if (c.char === "}" && !quote) { + this.dimension--; + if (this.dimension === 0) { + this.newEntry(); + if (nested) { + return this.entries; + } + } + } else if (c.char === '"' && !c.escaped) { + if (quote) { + this.newEntry(true); + } + quote = !quote; + } else if (c.char === ',' && !quote) { + this.newEntry(); + } else { + this.record(c.char); + } + } + if (this.dimension !== 0) { + throw "array dimension not balanced"; + } + return this.entries; +}; + +module.exports = { + create: function(source, converter){ + return new ArrayParser(source, converter); + } +}; diff --git a/lib/types/binaryParsers.js b/lib/types/binaryParsers.js new file mode 100644 index 00000000..a71ebb7c --- /dev/null +++ b/lib/types/binaryParsers.js @@ -0,0 +1,256 @@ +var parseBits = function(data, bits, offset, invert, callback) { + offset = offset || 0; + invert = invert || false; + callback = callback || function(lastValue, newValue, bits) { return (lastValue * Math.pow(2, bits)) + newValue; }; + var offsetBytes = offset >> 3; + + var inv = function(value) { + if (invert) { + return ~value & 0xff; + } + + return value; + }; + + // read first (maybe partial) byte + var mask = 0xff; + var firstBits = 8 - (offset % 8); + if (bits < firstBits) { + mask = (0xff << (8 - bits)) & 0xff; + firstBits = bits; + } + + if (offset) { + mask = mask >> (offset % 8); + } + + var result = 0; + if ((offset % 8) + bits >= 8) { + result = callback(0, inv(data[offsetBytes]) & mask, firstBits); + } + + // read bytes + var bytes = (bits + offset) >> 3; + for (var i = offsetBytes + 1; i < bytes; i++) { + result = callback(result, inv(data[i]), 8); + } + + // bits to read, that are not a complete byte + var lastBits = (bits + offset) % 8; + if (lastBits > 0) { + result = callback(result, inv(data[bytes]) >> (8 - lastBits), lastBits); + } + + return result; +}; + +var parseFloatFromBits = function(data, precisionBits, exponentBits) { + var bias = Math.pow(2, exponentBits - 1) - 1; + var sign = parseBits(data, 1); + var exponent = parseBits(data, exponentBits, 1); + + if (exponent === 0) { + return 0; + } + + // parse mantissa + var precisionBitsCounter = 1; + var parsePrecisionBits = function(lastValue, newValue, bits) { + if (lastValue === 0) { + lastValue = 1; + } + + for (var i = 1; i <= bits; i++) { + precisionBitsCounter /= 2; + if ((newValue & (0x1 << (bits - i))) > 0) { + lastValue += precisionBitsCounter; + } + } + + return lastValue; + }; + + var mantissa = parseBits(data, precisionBits, exponentBits + 1, false, parsePrecisionBits); + + // special cases + if (exponent == (Math.pow(2, exponentBits + 1) - 1)) { + if (mantissa === 0) { + return (sign === 0) ? Infinity : -Infinity; + } + + return NaN; + } + + // normale number + return ((sign === 0) ? 1 : -1) * Math.pow(2, exponent - bias) * mantissa; +}; + +var parseBool = function(value) { + return (parseBits(value, 8) == 1); +}; + +var parseInt16 = function(value) { + if (parseBits(value, 1) == 1) { + return -1 * (parseBits(value, 15, 1, true) + 1); + } + + return parseBits(value, 15, 1); +}; + +var parseInt32 = function(value) { + if (parseBits(value, 1) == 1) { + return -1 * (parseBits(value, 31, 1, true) + 1); + } + + return parseBits(value, 31, 1); +}; + +var parseFloat32 = function(value) { + return parseFloatFromBits(value, 23, 8); +}; + +var parseFloat64 = function(value) { + return parseFloatFromBits(value, 52, 11); +}; + +var parseNumeric = function(value) { + var sign = parseBits(value, 16, 32); + if (sign == 0xc000) { + return NaN; + } + + var weight = Math.pow(10000, parseBits(value, 16, 16)); + var result = 0; + + var digits = []; + var ndigits = parseBits(value, 16); + for (var i = 0; i < ndigits; i++) { + result += parseBits(value, 16, 64 + (16 * i)) * weight; + weight /= 10000; + } + + var scale = Math.pow(10, parseBits(value, 16, 48)); + return ((sign === 0) ? 1 : -1) * Math.round(result * scale) / scale; +}; + +var parseDate = function(isUTC, value) { + var sign = parseBits(value, 1); + var rawValue = parseBits(value, 63, 1); + + // discard usecs and shift from 2000 to 1970 + var result = new Date((((sign === 0) ? 1 : -1) * rawValue / 1000) + 946684800000); + + if (!isUTC) { + result.setTime(result.getTime() + result.getTimezoneOffset() * 60000); + } + + // add microseconds to the date + result.usec = rawValue % 1000; + result.getMicroSeconds = function() { + return this.usec; + }; + result.setMicroSeconds = function(value) { + this.usec = value; + }; + result.getUTCMicroSeconds = function() { + return this.usec; + }; + + return result; +}; + +var parseArray = function(value) { + var dim = parseBits(value, 32); + + var flags = parseBits(value, 32, 32); + var elementType = parseBits(value, 32, 64); + + var offset = 96; + var dims = []; + for (var i = 0; i < dim; i++) { + // parse dimension + dims[i] = parseBits(value, 32, offset); + offset += 32; + + // ignore lower bounds + offset += 32; + } + + var parseElement = function(elementType) { + // parse content length + var length = parseBits(value, 32, offset); + offset += 32; + + // parse null values + if (length == 0xffffffff) { + return null; + } + + var result; + if ((elementType == 0x17) || (elementType == 0x14)) { + // int/bigint + result = parseBits(value, length * 8, offset); + offset += length * 8; + return result; + } + else if (elementType == 0x19) { + // string + result = value.toString(this.encoding, offset >> 3, (offset += (length << 3)) >> 3); + return result; + } + else { + console.log("ERROR: ElementType not implemented: " + elementType); + } + }; + + var parse = function(dimension, elementType) { + var array = []; + var i; + + if (dimension.length > 1) { + var count = dimension.shift(); + for (i = 0; i < count; i++) { + array[i] = parse(dimension, elementType); + } + dimension.unshift(count); + } + else { + for (i = 0; i < dimension[0]; i++) { + array[i] = parseElement(elementType); + } + } + + return array; + }; + + return parse(dims, elementType); +}; + +var parseText = function(value) { + return value.toString('utf8'); +}; + +var parseBool = function(value) { + return (parseBits(value, 8) > 0); +}; + +var init = function(register) { + register(21, parseInt16); + register(23, parseInt32); + register(26, parseInt32); + register(1700, parseNumeric); + register(700, parseFloat32); + register(701, parseFloat64); + register(16, parseBool); + register(1114, parseDate.bind(null, false)); + register(1184, parseDate.bind(null, true)); + register(1007, parseArray); + register(1016, parseArray); + register(1008, parseArray); + register(1009, parseArray); + register(25, parseText); +}; + +module.exports = { + init: init +}; diff --git a/lib/types.js b/lib/types/index.js similarity index 57% rename from lib/types.js rename to lib/types/index.js index c2bbc110..b731433c 100644 --- a/lib/types.js +++ b/lib/types/index.js @@ -1,5 +1,5 @@ -var textParsers = require(__dirname + "/textParsers"), -binaryParsers = require(__dirname + "/binaryParsers"); +var textParsers = require(__dirname + '/textParsers'); +var binaryParsers = require(__dirname + '/binaryParsers'); var typeParsers = { text: {}, @@ -9,21 +9,27 @@ var typeParsers = { //the empty parse function var noParse = function(val) { return String(val); -} +}; //returns a function used to convert a specific type (specified by //oid) into a result javascript type var getTypeParser = function(oid, format) { - if (!typeParsers[format]) + if (!typeParsers[format]) { return noParse; - + } return typeParsers[format][oid] || noParse; }; +var setTypeParser = function(oid, format, parseFn) { + if(typeof format == 'function') { + parseFn = format; + format = 'text'; + } + typeParsers[format][oid] = parseFn; +}; + textParsers.init(function(oid, converter) { - typeParsers.text[oid] = function(value) { - return converter(String(value)); - }; + typeParsers.text[oid] = converter; }); binaryParsers.init(function(oid, converter) { @@ -32,4 +38,5 @@ binaryParsers.init(function(oid, converter) { module.exports = { getTypeParser: getTypeParser, -} + setTypeParser: setTypeParser +}; diff --git a/lib/types/textParsers.js b/lib/types/textParsers.js new file mode 100644 index 00000000..c7ec064f --- /dev/null +++ b/lib/types/textParsers.js @@ -0,0 +1,199 @@ +var arrayParser = require(__dirname + "/arrayParser.js"); + +//parses PostgreSQL server formatted date strings into javascript date objects +var parseDate = function(isoDate) { + //TODO this could do w/ a refactor + var dateMatcher = /(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})(\.\d{1,})?/; + + var match = dateMatcher.exec(isoDate); + //could not parse date + if(!match) { + dateMatcher = /^(\d{4})-(\d{2})-(\d{2})$/; + match = dateMatcher.test(isoDate); + if(!match) { + return null; + } else { + //it is a date in YYYY-MM-DD format + return new Date(isoDate); + } + } + var year = match[1]; + var month = parseInt(match[2],10)-1; + var day = match[3]; + var hour = parseInt(match[4],10); + var min = parseInt(match[5],10); + var seconds = parseInt(match[6], 10); + + var miliString = match[7]; + var mili = 0; + if(miliString) { + mili = 1000 * parseFloat(miliString); + } + + //match timezones like the following: + //Z (UTC) + //-05 + //+06:30 + var tZone = /([Z|+\-])(\d{2})?:?(\d{2})?/.exec(isoDate.split(' ')[1]); + //minutes to adjust for timezone + var tzAdjust = 0; + if(tZone) { + var type = tZone[1]; + switch(type) { + case 'Z': + break; + case '-': + tzAdjust = -(((parseInt(tZone[2],10)*60)+(parseInt(tZone[3]||0,10)))); + break; + case '+': + tzAdjust = (((parseInt(tZone[2],10)*60)+(parseInt(tZone[3]||0,10)))); + break; + default: + throw new Error("Unidentifed tZone part " + type); + } + + var utcOffset = Date.UTC(year, month, day, hour, min, seconds, mili); + return new Date(utcOffset - (tzAdjust * 60* 1000)); + } + //no timezone information + else { + return new Date(year, month, day, hour, min, seconds, mili); + } + +}; + +var parseBool = function(val) { + return val === 't'; +}; + +var parseIntegerArray = function(val) { + if(!val) { return null; } + var p = arrayParser.create(val, function(entry){ + if(entry !== null) { + entry = parseInt(entry, 10); + } + return entry; + }); + + return p.parse(); +}; + +var parseFloatArray = function(val) { + if(!val) { return null; } + var p = arrayParser.create(val, function(entry) { + return entry; + }); + + return p.parse(); +}; + +var parseStringArray = function(val) { + if(!val) { return null; } + + var p = arrayParser.create(val); + return p.parse(); +}; + + +var NUM = '([+-]?\\d+)'; +var YEAR = NUM + '\\s+years?'; +var MON = NUM + '\\s+mons?'; +var DAY = NUM + '\\s+days?'; +var TIME = '([+-])?(\\d\\d):(\\d\\d):(\\d\\d)'; +var INTERVAL = [YEAR,MON,DAY,TIME].map(function(p){ + return "("+p+")?"; +}).join('\\s*'); + +var parseInterval = function(val) { + if (!val) { return {}; } + var m = new RegExp(INTERVAL).exec(val); + var i = {}; + if (m[2]) { i.years = parseInt(m[2], 10); } + if (m[4]) { i.months = parseInt(m[4], 10); } + if (m[6]) { i.days = parseInt(m[6], 10); } + if (m[9]) { i.hours = parseInt(m[9], 10); } + if (m[10]) { i.minutes = parseInt(m[10], 10); } + if (m[11]) { i.seconds = parseInt(m[11], 10); } + if (m[8] == '-'){ + if (i.hours) { i.hours *= -1; } + if (i.minutes) { i.minutes *= -1; } + if (i.seconds) { i.seconds *= -1; } + } + for (var field in i){ + if (i[field] === 0) { + delete i[field]; + } + } + return i; +}; + +var parseByteA = function(val) { + if(/^\\x/.test(val)){ + // new 'hex' style response (pg >9.0) + return new Buffer(val.substr(2), 'hex'); + }else{ + var out = ""; + var i = 0; + while(i < val.length){ + if(val[i] != "\\"){ + out += val[i]; + ++i; + }else{ + if(val.substr(i+1,3).match(/[0-7]{3}/)){ + out += String.fromCharCode(parseInt(val.substr(i+1,3),8)); + i += 4; + }else{ + backslashes = 1; + while(i+backslashes < val.length && val[i+backslashes] == "\\") + backslashes++; + for(k=0; k 0) { + result = result + ','; + } + if(val[i] instanceof Date) { + result = result + JSON.stringify(val[i]); + } + else if(typeof val[i] === 'undefined') { + result = result + 'NULL'; + } + else if(Array.isArray(val[i])) { + result = result + arrayString(val[i]); + } + else + { + result = result + + (val[i] === null ? 'NULL' : JSON.stringify(val[i])); + } + } + result = result + '}'; + return result; } -//builds libpq specific connection string -//from a supplied config object -//the config object conforms to the interface of the config object -//accepted by the pure javascript client -var getLibpgConString = function(config, callback) { - if(typeof config == 'object') { - var params = [] - add(params, config, 'user'); - add(params, config, 'password'); - add(params, config, 'port'); - if(config.database) { - params.push("dbname='" + config.database + "'"); - } - if(config.host) { - if(config.host != 'localhost' && config.host != '127.0.0.1') { - //do dns lookup - return require('dns').lookup(config.host, 4, function(err, address) { - if(err) return callback(err, null); - params.push("hostaddr="+address) - callback(null, params.join(" ")) - }) - } - params.push("hostaddr=127.0.0.1 "); - } - callback(null, params.join(" ")); - } else { - throw new Error("Unrecognized config type for connection"); +//converts values from javascript types +//to their 'raw' counterparts for use as a postgres parameter +//note: you can override this function to provide your own conversion mechanism +//for complex types, etc... +var prepareValue = function(val) { + if(val instanceof Date) { + return dateToString(val); } + if(typeof val === 'undefined') { + return null; + } + if(Array.isArray(val)) { + return arrayString(val); + } + if(!val || typeof val !== 'object') { + return val === null ? null : val.toString(); + } + return JSON.stringify(val); +}; + +function dateToString(date) { + function pad(number, digits) { + number = ""+number; + while(number.length < digits) + number = "0"+number; + return number; + } + + var offset = -date.getTimezoneOffset(); + var ret = pad(date.getFullYear(), 4) + '-' + + pad(date.getMonth() + 1, 2) + '-' + + pad(date.getDate(), 2) + 'T' + + pad(date.getHours(), 2) + ':' + + pad(date.getMinutes(), 2) + ':' + + pad(date.getSeconds(), 2) + '.' + + pad(date.getMilliseconds(), 3); + + if(offset < 0) { + ret += "-"; + offset *= -1; + } + else + ret += "+"; + + return ret + pad(Math.floor(offset/60), 2) + ":" + pad(offset%60, 2); +} + +function normalizeQueryConfig (config, values, callback) { + //can take in strings or config objects + config = (typeof(config) == 'string') ? { text: config } : config; + if(values) { + if(typeof values === 'function') { + config.callback = values; + } else { + config.values = values; + } + } + if(callback) { + config.callback = callback; + } + return config; } module.exports = { - normalizeConnectionInfo: normalizeConnectionInfo, - //only exported here to make testing of this method possible - //since it contains quite a bit of logic and testing for - //each connection scenario in an integration test is impractical - buildLibpqConnectionString: getLibpgConString -} + prepareValue: prepareValue, + normalizeQueryConfig: normalizeQueryConfig +}; diff --git a/lib/writer.js b/lib/writer.js deleted file mode 100644 index 49aed26d..00000000 --- a/lib/writer.js +++ /dev/null @@ -1,130 +0,0 @@ -//binary data writer tuned for creating -//postgres message packets as effeciently as possible by reusing the -//same buffer to avoid memcpy and limit memory allocations -var Writer = function(size) { - this.size = size || 1024; - this.buffer = Buffer(this.size + 5); - this.offset = 5; - this.headerPosition = 0; -}; - -var p = Writer.prototype; - -//resizes internal buffer if not enough size left -p._ensure = function(size) { - var remaining = this.buffer.length - this.offset; - if(remaining < size) { - var oldBuffer = this.buffer; - this.buffer = new Buffer(oldBuffer.length + size); - oldBuffer.copy(this.buffer); - } -} - -p.addInt32 = function(num) { - this._ensure(4) - this.buffer[this.offset++] = (num >>> 24 & 0xFF) - this.buffer[this.offset++] = (num >>> 16 & 0xFF) - this.buffer[this.offset++] = (num >>> 8 & 0xFF) - this.buffer[this.offset++] = (num >>> 0 & 0xFF) - return this; -} - -p.addInt16 = function(num) { - this._ensure(2) - this.buffer[this.offset++] = (num >>> 8 & 0xFF) - this.buffer[this.offset++] = (num >>> 0 & 0xFF) - return this; -} - -//for versions of node requiring 'length' as 3rd argument to buffer.write -var writeString = function(buffer, string, offset, len) { - buffer.write(string, offset, len); -} - -//overwrite function for older versions of node -if(Buffer.prototype.write.length === 3) { - writeString = function(buffer, string, offset, len) { - buffer.write(string, offset); - } -} - -p.addCString = function(string) { - //just write a 0 for empty or null strings - if(!string) { - this._ensure(1); - } else { - var len = Buffer.byteLength(string); - this._ensure(len + 1); //+1 for null terminator - writeString(this.buffer, string, this.offset, len); - this.offset += len; - } - - this.buffer[this.offset++] = 0; // null terminator - return this; -} - -p.addChar = function(char) { - this._ensure(1); - writeString(this.buffer, char, this.offset, 1); - this.offset++; - return this; -} - -p.addString = function(string) { - var string = string || ""; - var len = Buffer.byteLength(string); - this._ensure(len); - this.buffer.write(string, this.offset); - this.offset += len; - return this; -} - -p.getByteLength = function() { - return this.offset - 5; -} - -p.add = function(otherBuffer) { - this._ensure(otherBuffer.length); - otherBuffer.copy(this.buffer, this.offset); - this.offset += otherBuffer.length; - return this; -} - -p.clear = function() { - this.offset = 5; - this.headerPosition = 0; - this.lastEnd = 0; -} - -//appends a header block to all the written data since the last -//subsequent header or to the beginning if there is only one data block -p.addHeader = function(code, last) { - var origOffset = this.offset; - this.offset = this.headerPosition; - this.buffer[this.offset++] = code; - //length is everything in this packet minus the code - this.addInt32(origOffset - (this.headerPosition+1)) - //set next header position - this.headerPosition = origOffset; - //make space for next header - this.offset = origOffset; - if(!last) { - this._ensure(5); - this.offset += 5; - } -} - -p.join = function(code) { - if(code) { - this.addHeader(code, true); - } - return this.buffer.slice(code ? 0 : 5, this.offset); -} - -p.flush = function(code) { - var result = this.join(code); - this.clear(); - return result; -} - -module.exports = Writer; diff --git a/package.json b/package.json index 37bc02dd..4cdce1be 100644 --- a/package.json +++ b/package.json @@ -1,20 +1,35 @@ -{ "name": "pg", - "version": "0.6.10", +{ + "name": "pg", + "version": "2.5.0", "description": "PostgreSQL client - pure javascript & libpq with the same API", - "keywords" : ["postgres", "pg", "libpq", "postgre", "database", "rdbms"], + "keywords": [ + "postgres", + "pg", + "libpq", + "postgre", + "database", + "rdbms" + ], "homepage": "http://github.com/brianc/node-postgres", - "repository" : { - "type" : "git", - "url" : "git://github.com/brianc/node-postgres.git" + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git" }, - "author" : "Brian Carlson ", - "main" : "./lib", - "dependencies" : { - "generic-pool" : "1.0.9" + "author": "Brian Carlson ", + "main": "./lib", + "dependencies": { + "generic-pool": "2.0.3", + "buffer-writer": "1.0.0" }, - "scripts" : { - "test" : "make test", - "install" : "node-waf configure build || (exit 0)" + "devDependencies": { + "jshint": "1.1.0", + "semver": "~1.1.4" }, - "engines" : { "node": ">= 0.4.0" } + "scripts": { + "test": "make test-travis connectionString=postgres://postgres@localhost:5432/postgres", + "install": "node-gyp rebuild || (exit 0)" + }, + "engines": { + "node": ">= 0.8.0" + } } diff --git a/script/list-db-types.js b/script/list-db-types.js index 71b4ab7e..748d32f2 100644 --- a/script/list-db-types.js +++ b/script/list-db-types.js @@ -1,6 +1,6 @@ var helper = require(__dirname + "/../test/integration/test-helper"); var pg = helper.pg; -pg.connect(helper.connectionString(), assert.success(function(client) { +pg.connect(helper.config, assert.success(function(client) { var query = client.query('select oid, typname from pg_type where typtype = \'b\' order by oid'); query.on('row', console.log); })) diff --git a/script/setup-bench-data.js b/script/setup-bench-data.js new file mode 100644 index 00000000..4f809f78 --- /dev/null +++ b/script/setup-bench-data.js @@ -0,0 +1,5 @@ +var pg = require('../lib'); +var +pg.connect(function(err, client) { + +}) diff --git a/script/test-connection.js b/script/test-connection.js index 81128610..a70ada39 100644 --- a/script/test-connection.js +++ b/script/test-connection.js @@ -3,7 +3,7 @@ var helper = require(__dirname + '/../test/test-helper'); console.log(); console.log("testing ability to connect to '%j'", helper.config); var pg = require(__dirname + '/../lib'); -pg.connect(helper.config, function(err, client) { +pg.connect(helper.config, function(err, client, done) { if(err !== null) { console.error("Recieved connection error when attempting to contact PostgreSQL:"); console.error(err); @@ -18,6 +18,7 @@ pg.connect(helper.config, function(err, client) { console.error(err); process.exit(255); } + done(); pg.end(); }) }) diff --git a/script/travis-pg-9.2-install.sh b/script/travis-pg-9.2-install.sh new file mode 100755 index 00000000..82ad58da --- /dev/null +++ b/script/travis-pg-9.2-install.sh @@ -0,0 +1,20 @@ +#! /usr/bin/env bash +#sudo cat /etc/postgresql/9.1/main/pg_hba.conf +#sudo cat /etc/postgresql/9.1/main/pg_ident.conf +#sudo cat /etc/postgresql/9.1/main/postgresql.conf +sudo /etc/init.d/postgresql stop +sudo apt-get -y --purge remove postgresql +echo "yes" | sudo add-apt-repository ppa:pitti/postgresql +sudo apt-get update -qq +sudo apt-get -q -y -o Dpkg::Options::=--force-confdef install postgresql-9.2 postgresql-contrib-9.2 +sudo chmod 777 /etc/postgresql/9.2/main/pg_hba.conf +sudo echo "local all postgres trust" > /etc/postgresql/9.2/main/pg_hba.conf +sudo echo "local all all trust" >> /etc/postgresql/9.2/main/pg_hba.conf +sudo echo "host all all 127.0.0.1/32 trust" >> /etc/postgresql/9.2/main/pg_hba.conf +sudo echo "host all all ::1/128 trust" >> /etc/postgresql/9.2/main/pg_hba.conf +sudo echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/9.2/main/pg_hba.conf +sudo echo "host all all 0.0.0.0 255.255.255.255 trust" >> /etc/postgresql/9.2/main/pg_hba.conf +sudo /etc/init.d/postgresql restart +# for some reason both postgres 9.1 and 9.2 are started +# 9.2 is running on port 5433 +node script/create-test-tables.js postgres://postgres@localhost:5433/postgres diff --git a/src/binding.cc b/src/binding.cc index 8518c561..a9a7943f 100644 --- a/src/binding.cc +++ b/src/binding.cc @@ -1,5 +1,7 @@ +#include #include #include +#include #include #include #include @@ -7,6 +9,9 @@ #define LOG(msg) printf("%s\n",msg); #define TRACE(msg) //printf("%s\n", msg); +#if PG_VERSION_NUM >= 90000 +#define ESCAPE_SUPPORTED +#endif #define THROW(msg) return ThrowException(Exception::Error(String::New(msg))); @@ -30,6 +35,7 @@ static Persistent type_symbol; static Persistent channel_symbol; static Persistent payload_symbol; static Persistent emit_symbol; +static Persistent command_symbol; class Connection : public ObjectWrap { @@ -59,28 +65,39 @@ public: routine_symbol = NODE_PSYMBOL("routine"); name_symbol = NODE_PSYMBOL("name"); value_symbol = NODE_PSYMBOL("value"); - type_symbol = NODE_PSYMBOL("type"); + type_symbol = NODE_PSYMBOL("dataTypeID"); channel_symbol = NODE_PSYMBOL("channel"); payload_symbol = NODE_PSYMBOL("payload"); - + command_symbol = NODE_PSYMBOL("command"); NODE_SET_PROTOTYPE_METHOD(t, "connect", Connect); +#ifdef ESCAPE_SUPPORTED + NODE_SET_PROTOTYPE_METHOD(t, "escapeIdentifier", EscapeIdentifier); + NODE_SET_PROTOTYPE_METHOD(t, "escapeLiteral", EscapeLiteral); +#endif NODE_SET_PROTOTYPE_METHOD(t, "_sendQuery", SendQuery); NODE_SET_PROTOTYPE_METHOD(t, "_sendQueryWithParams", SendQueryWithParams); NODE_SET_PROTOTYPE_METHOD(t, "_sendPrepare", SendPrepare); NODE_SET_PROTOTYPE_METHOD(t, "_sendQueryPrepared", SendQueryPrepared); NODE_SET_PROTOTYPE_METHOD(t, "cancel", Cancel); NODE_SET_PROTOTYPE_METHOD(t, "end", End); - + NODE_SET_PROTOTYPE_METHOD(t, "_sendCopyFromChunk", SendCopyFromChunk); + NODE_SET_PROTOTYPE_METHOD(t, "_endCopyFrom", EndCopyFrom); target->Set(String::NewSymbol("Connection"), t->GetFunction()); TRACE("created class"); } - //static function called by libev as callback entrypoint + //static function called by libuv as callback entrypoint static void - io_event(EV_P_ ev_io *w, int revents) + io_event(uv_poll_t* w, int status, int revents) { + TRACE("Received IO event"); + + if(status == -1) { + TRACE("Connection error. -1 status from lib_uv_poll"); + } + Connection *connection = static_cast(w->data); connection->HandleIOEvent(revents); } @@ -121,12 +138,67 @@ public: return Undefined(); } +#ifdef ESCAPE_SUPPORTED + //v8 entry point into Connection#escapeIdentifier + static Handle + EscapeIdentifier(const Arguments& args) + { + HandleScope scope; + Connection *self = ObjectWrap::Unwrap(args.This()); + + char* inputStr = MallocCString(args[0]); + + if(!inputStr) { + THROW("Unable to allocate memory for a string in EscapeIdentifier.") + } + + char* escapedStr = self->EscapeIdentifier(inputStr); + free(inputStr); + + if(escapedStr == NULL) { + THROW(self->GetLastError()); + } + + Local jsStr = String::New(escapedStr, strlen(escapedStr)); + PQfreemem(escapedStr); + + return scope.Close(jsStr); + } + + //v8 entry point into Connection#escapeLiteral + static Handle + EscapeLiteral(const Arguments& args) + { + HandleScope scope; + Connection *self = ObjectWrap::Unwrap(args.This()); + + char* inputStr = MallocCString(args[0]); + + if(!inputStr) { + THROW("Unable to allocate memory for a string in EscapeIdentifier.") + } + + char* escapedStr = self->EscapeLiteral(inputStr); + free(inputStr); + + if(escapedStr == NULL) { + THROW(self->GetLastError()); + } + + Local jsStr = String::New(escapedStr, strlen(escapedStr)); + PQfreemem(escapedStr); + + return scope.Close(jsStr); + } +#endif + //v8 entry point into Connection#_sendQuery static Handle SendQuery(const Arguments& args) { HandleScope scope; Connection *self = ObjectWrap::Unwrap(args.This()); + const char *lastErrorMessage; if(!args[0]->IsString()) { THROW("First parameter must be a string query"); } @@ -135,7 +207,8 @@ public: int result = self->Send(queryText); free(queryText); if(result == 0) { - THROW("PQsendQuery returned error code"); + lastErrorMessage = self->GetLastError(); + THROW(lastErrorMessage); } //TODO should we flush before throw? self->Flush(); @@ -191,8 +264,6 @@ public: THROW("Values must be an array"); } - Handle params = args[1]; - Local jsParams = Local::Cast(args[1]); int len = jsParams->Length(); @@ -232,19 +303,28 @@ public: return Undefined(); } - ev_io read_watcher_; - ev_io write_watcher_; + uv_poll_t read_watcher_; + uv_poll_t write_watcher_; PGconn *connection_; bool connecting_; + bool ioInitialized_; + bool copyOutMode_; + bool copyInMode_; + bool reading_; + bool writing_; + bool ended_; Connection () : ObjectWrap () { connection_ = NULL; connecting_ = false; - + ioInitialized_ = false; + copyOutMode_ = false; + copyInMode_ = false; + reading_ = false; + writing_ = false; + ended_ = false; TRACE("Initializing ev watchers"); - ev_init(&read_watcher_, io_event); read_watcher_.data = this; - ev_init(&write_watcher_, io_event); write_watcher_.data = this; } @@ -252,6 +332,31 @@ public: { } + static Handle + SendCopyFromChunk(const Arguments& args) { + HandleScope scope; + Connection *self = ObjectWrap::Unwrap(args.This()); + //TODO handle errors in some way + if (args.Length() < 1 && !Buffer::HasInstance(args[0])) { + THROW("SendCopyFromChunk requires 1 Buffer argument"); + } + self->SendCopyFromChunk(args[0]->ToObject()); + return Undefined(); + } + static Handle + EndCopyFrom(const Arguments& args) { + HandleScope scope; + Connection *self = ObjectWrap::Unwrap(args.This()); + char * error_msg = NULL; + if (args[0]->IsString()) { + error_msg = MallocCString(args[0]); + } + //TODO handle errors in some way + self->EndCopyFrom(error_msg); + free(error_msg); + return Undefined(); + } + protected: //v8 entry point to constructor static Handle @@ -264,33 +369,59 @@ protected: return args.This(); } +#ifdef ESCAPE_SUPPORTED + char * EscapeIdentifier(const char *str) + { + TRACE("js::EscapeIdentifier") + return PQescapeIdentifier(connection_, str, strlen(str)); + } + + char * EscapeLiteral(const char *str) + { + TRACE("js::EscapeLiteral") + return PQescapeLiteral(connection_, str, strlen(str)); + } +#endif + int Send(const char *queryText) { - return PQsendQuery(connection_, queryText); + TRACE("js::Send") + int rv = PQsendQuery(connection_, queryText); + StartWrite(); + return rv; } int SendQueryParams(const char *command, const int nParams, const char * const *paramValues) { - return PQsendQueryParams(connection_, command, nParams, NULL, paramValues, NULL, NULL, 0); + TRACE("js::SendQueryParams") + int rv = PQsendQueryParams(connection_, command, nParams, NULL, paramValues, NULL, NULL, 0); + StartWrite(); + return rv; } int SendPrepare(const char *name, const char *command, const int nParams) { - return PQsendPrepare(connection_, name, command, nParams, NULL); + TRACE("js::SendPrepare") + int rv = PQsendPrepare(connection_, name, command, nParams, NULL); + StartWrite(); + return rv; } int SendPreparedQuery(const char *name, int nParams, const char * const *paramValues) { - return PQsendQueryPrepared(connection_, name, nParams, paramValues, NULL, NULL, 0); + int rv = PQsendQueryPrepared(connection_, name, nParams, paramValues, NULL, NULL, 0); + StartWrite(); + return rv; } - int Cancel() + bool Cancel() { - PGcancel* pgCancel = PQgetCancel(connection_); - char errbuf[256]; - int result = PQcancel(pgCancel, errbuf, 256); - PQfreeCancel(pgCancel); - return result; + PGcancel* pgCancel = PQgetCancel(connection_); + char errbuf[256]; + int result = PQcancel(pgCancel, errbuf, 256); + StartWrite(); + PQfreeCancel(pgCancel); + return result; } //flushes socket @@ -298,7 +429,7 @@ protected: { if(PQflush(connection_) == 1) { TRACE("Flushing"); - ev_io_start(EV_DEFAULT_ &write_watcher_); + uv_poll_start(&write_watcher_, UV_WRITABLE, io_event); } } @@ -315,21 +446,21 @@ protected: //and hands off control to libev bool Connect(const char* conninfo) { + if(ended_) return true; connection_ = PQconnectStart(conninfo); if (!connection_) { LOG("Connection couldn't be created"); } - if (PQsetnonblocking(connection_, 1) == -1) { - LOG("Unable to set connection to non-blocking"); - return false; - } - ConnStatusType status = PQstatus(connection_); if(CONNECTION_BAD == status) { - LOG("Bad connection status"); + return false; + } + + if (PQsetnonblocking(connection_, 1) == -1) { + LOG("Unable to set connection to non-blocking"); return false; } @@ -344,8 +475,10 @@ protected: PQsetNoticeProcessor(connection_, NoticeReceiver, this); TRACE("Setting watchers to socket"); - ev_io_set(&read_watcher_, fd, EV_READ); - ev_io_set(&write_watcher_, fd, EV_WRITE); + uv_poll_init(uv_default_loop(), &read_watcher_, fd); + uv_poll_init(uv_default_loop(), &write_watcher_, fd); + + ioInitialized_ = true; connecting_ = true; StartWrite(); @@ -367,13 +500,9 @@ protected: Emit("notice", ¬ice); } - //called to process io_events from libev + //called to process io_events from libuv void HandleIOEvent(int revents) { - if(revents & EV_ERROR) { - LOG("Connection error."); - return; - } if(connecting_) { TRACE("Processing connecting_ io"); @@ -381,24 +510,41 @@ protected: return; } - if(revents & EV_READ) { - TRACE("revents & EV_READ"); + if(revents & UV_READABLE) { + TRACE("revents & UV_READABLE"); + TRACE("about to consume input"); if(PQconsumeInput(connection_) == 0) { - LOG("Something happened, consume input is 0"); + TRACE("could not read, terminating"); + End(); + EmitLastError(); + //LOG("Something happened, consume input is 0"); return; } + TRACE("Consumed"); - //declare handlescope as this method is entered via a libev callback + //declare handlescope as this method is entered via a libuv callback //and not part of the public v8 interface HandleScope scope; - - if (PQisBusy(connection_) == 0) { + if (this->copyOutMode_) { + this->HandleCopyOut(); + } + if (!this->copyInMode_ && !this->copyOutMode_ && PQisBusy(connection_) == 0) { PGresult *result; bool didHandleResult = false; + TRACE("PQgetResult"); while ((result = PQgetResult(connection_))) { - HandleResult(result); - didHandleResult = true; + TRACE("HandleResult"); + didHandleResult = HandleResult(result); + TRACE("PQClear"); PQclear(result); + if(!didHandleResult) { + //this means that we are in copy in or copy out mode + //in this situation PQgetResult will return same + //result untill all data will be read (copy out) or + //until data end notification (copy in) + //and because of this, we need to break cycle + break; + } } //might have fired from notification if(didHandleResult) { @@ -407,6 +553,7 @@ protected: } PGnotify *notify; + TRACE("PQnotifies"); while ((notify = PQnotifies(connection_))) { Local result = Object::New(); result->Set(channel_symbol, String::New(notify->relname)); @@ -418,32 +565,122 @@ protected: } - if(revents & EV_WRITE) { - TRACE("revents & EV_WRITE"); + if(revents & UV_WRITABLE) { + TRACE("revents & UV_WRITABLE"); if (PQflush(connection_) == 0) { - StopWrite(); + //nothing left to write, poll the socket for more to read + StartRead(); } } } + bool HandleCopyOut () { + char * buffer = NULL; + int copied; + Buffer * chunk; + copied = PQgetCopyData(connection_, &buffer, 1); + while (copied > 0) { + chunk = Buffer::New(buffer, copied); + Local node_chunk = Local::New(chunk->handle_); + Emit("copyData", &node_chunk); + PQfreemem(buffer); + copied = PQgetCopyData(connection_, &buffer, 1); + } + if (copied == 0) { + //wait for next read ready + //result was not handled completely + return false; + } else if (copied == -1) { + this->copyOutMode_ = false; + return true; + } else if (copied == -2) { + this->copyOutMode_ = false; + return true; + } + return false; + } - void HandleResult(const PGresult* result) + //maps the postgres tuple results to v8 objects + //and emits row events + //TODO look at emitting fewer events because the back & forth between + //javascript & c++ might introduce overhead (requires benchmarking) + void EmitRowDescription(const PGresult* result) { + HandleScope scope; + Local row = Array::New(); + int fieldCount = PQnfields(result); + for(int fieldNumber = 0; fieldNumber < fieldCount; fieldNumber++) { + Local field = Object::New(); + //name of field + char* fieldName = PQfname(result, fieldNumber); + field->Set(name_symbol, String::New(fieldName)); + + //oid of type of field + int fieldType = PQftype(result, fieldNumber); + field->Set(type_symbol, Integer::New(fieldType)); + + row->Set(Integer::New(fieldNumber), field); + } + + Handle e = (Handle)row; + Emit("_rowDescription", &e); + } + + bool HandleResult(PGresult* result) + { + TRACE("PQresultStatus"); ExecStatusType status = PQresultStatus(result); switch(status) { case PGRES_TUPLES_OK: - HandleTuplesResult(result); + { + EmitRowDescription(result); + HandleTuplesResult(result); + EmitCommandMetaData(result); + return true; + } break; case PGRES_FATAL_ERROR: - HandleErrorResult(result); + { + TRACE("HandleErrorResult"); + HandleErrorResult(result); + return true; + } break; case PGRES_COMMAND_OK: case PGRES_EMPTY_QUERY: - //do nothing + { + EmitCommandMetaData(result); + return true; + } + break; + case PGRES_COPY_IN: + { + this->copyInMode_ = true; + Emit("copyInResponse"); + return false; + } + break; + case PGRES_COPY_OUT: + { + this->copyOutMode_ = true; + Emit("copyOutResponse"); + return this->HandleCopyOut(); + } break; default: - printf("Unrecogized query status: %s\n", PQresStatus(status)); + printf("YOU SHOULD NEVER SEE THIS! PLEASE OPEN AN ISSUE ON GITHUB! Unrecogized query status: %s\n", PQresStatus(status)); break; } + return true; + } + + void EmitCommandMetaData(PGresult* result) + { + HandleScope scope; + Local info = Object::New(); + info->Set(command_symbol, String::New(PQcmdStatus(result))); + info->Set(value_symbol, String::New(PQcmdTuples(result))); + Handle e = (Handle)info; + Emit("_cmdStatus", &e); } //maps the postgres tuple results to v8 objects @@ -452,33 +689,23 @@ protected: //javascript & c++ might introduce overhead (requires benchmarking) void HandleTuplesResult(const PGresult* result) { + HandleScope scope; int rowCount = PQntuples(result); for(int rowNumber = 0; rowNumber < rowCount; rowNumber++) { //create result object for this row Local row = Array::New(); int fieldCount = PQnfields(result); for(int fieldNumber = 0; fieldNumber < fieldCount; fieldNumber++) { - Local field = Object::New(); - //name of field - char* fieldName = PQfname(result, fieldNumber); - field->Set(name_symbol, String::New(fieldName)); - - //oid of type of field - int fieldType = PQftype(result, fieldNumber); - field->Set(type_symbol, Integer::New(fieldType)); //value of field if(PQgetisnull(result, rowNumber, fieldNumber)) { - field->Set(value_symbol, Null()); + row->Set(Integer::New(fieldNumber), Null()); } else { char* fieldValue = PQgetvalue(result, rowNumber, fieldNumber); - field->Set(value_symbol, String::New(fieldValue)); + row->Set(Integer::New(fieldNumber), String::New(fieldValue)); } - - row->Set(Integer::New(fieldNumber), field); } - //not sure about what to dealloc or scope#Close here Handle e = (Handle)row; Emit("_row", &e); } @@ -488,8 +715,15 @@ protected: { HandleScope scope; //instantiate the return object as an Error with the summary Postgres message - Local msg = Local::Cast(Exception::Error(String::New(PQresultErrorField(result, PG_DIAG_MESSAGE_PRIMARY)))); - + TRACE("ReadResultField"); + const char* errorMessage = PQresultErrorField(result, PG_DIAG_MESSAGE_PRIMARY); + if(!errorMessage) { + //there is no error, it has already been consumed in the last + //read-loop callback + return; + } + Local msg = Local::Cast(Exception::Error(String::New(errorMessage))); + TRACE("AttachErrorFields"); //add the other information returned by Postgres to the error object AttachErrorField(result, msg, severity_symbol, PG_DIAG_SEVERITY); AttachErrorField(result, msg, code_symbol, PG_DIAG_SQLSTATE); @@ -503,6 +737,7 @@ protected: AttachErrorField(result, msg, line_symbol, PG_DIAG_SOURCE_LINE); AttachErrorField(result, msg, routine_symbol, PG_DIAG_SOURCE_FUNCTION); Handle m = msg; + TRACE("EmitError"); Emit("_error", &m); } @@ -516,9 +751,12 @@ protected: void End() { + TRACE("stopping read & write"); StopRead(); StopWrite(); DestroyConnection(); + Emit("_end"); + ended_ = true; } private: @@ -553,30 +791,28 @@ private: { PostgresPollingStatusType status = PQconnectPoll(connection_); switch(status) { - case PGRES_POLLING_READING: - TRACE("Polled: PGRES_POLLING_READING"); - StopWrite(); - StartRead(); - break; - case PGRES_POLLING_WRITING: - TRACE("Polled: PGRES_POLLING_WRITING"); - StopRead(); - StartWrite(); - break; - case PGRES_POLLING_FAILED: - StopRead(); - StopWrite(); - TRACE("Polled: PGRES_POLLING_FAILED"); - EmitLastError(); - break; - case PGRES_POLLING_OK: - TRACE("Polled: PGRES_POLLING_OK"); - connecting_ = false; - StartRead(); - Emit("connect"); - default: - //printf("Unknown polling status: %d\n", status); - break; + case PGRES_POLLING_READING: + TRACE("Polled: PGRES_POLLING_READING"); + StartRead(); + break; + case PGRES_POLLING_WRITING: + TRACE("Polled: PGRES_POLLING_WRITING"); + StartWrite(); + break; + case PGRES_POLLING_FAILED: + StopRead(); + StopWrite(); + TRACE("Polled: PGRES_POLLING_FAILED"); + EmitLastError(); + break; + case PGRES_POLLING_OK: + TRACE("Polled: PGRES_POLLING_OK"); + connecting_ = false; + StartRead(); + Emit("connect"); + default: + //printf("Unknown polling status: %d\n", status); + break; } } @@ -591,28 +827,49 @@ private: EmitError(PQerrorMessage(connection_)); } + const char *GetLastError() + { + return PQerrorMessage(connection_); + } + void StopWrite() { - TRACE("Stoping write watcher"); - ev_io_stop(EV_DEFAULT_ &write_watcher_); + TRACE("write STOP"); + if(ioInitialized_ && writing_) { + uv_poll_stop(&write_watcher_); + writing_ = false; + } } void StartWrite() { - TRACE("Starting write watcher"); - ev_io_start(EV_DEFAULT_ &write_watcher_); + TRACE("write START"); + if(reading_) { + TRACE("stop READ to start WRITE"); + StopRead(); + } + uv_poll_start(&write_watcher_, UV_WRITABLE, io_event); + writing_ = true; } void StopRead() { - TRACE("Stoping read watcher"); - ev_io_stop(EV_DEFAULT_ &read_watcher_); + TRACE("read STOP"); + if(ioInitialized_ && reading_) { + uv_poll_stop(&read_watcher_); + reading_ = false; + } } void StartRead() { - TRACE("Starting read watcher"); - ev_io_start(EV_DEFAULT_ &read_watcher_); + TRACE("read START"); + if(writing_) { + TRACE("stop WRITE to start READ"); + StopWrite(); + } + uv_poll_start(&read_watcher_, UV_READABLE, io_event); + reading_ = true; } //Converts a v8 array to an array of cstrings //the result char** array must be free() when it is no longer needed @@ -664,6 +921,14 @@ private: strcpy(cString, *utf8String); return cString; } + void SendCopyFromChunk(Handle chunk) { + PQputCopyData(connection_, Buffer::Data(chunk), Buffer::Length(chunk)); + } + void EndCopyFrom(char * error_msg) { + PQputCopyEnd(connection_, error_msg); + this->copyInMode_ = false; + } + }; @@ -672,3 +937,4 @@ extern "C" void init (Handle target) HandleScope scope; Connection::Init(target); } +NODE_MODULE(binding, init) diff --git a/test/cli.js b/test/cli.js index 04f58321..b6ca963a 100644 --- a/test/cli.js +++ b/test/cli.js @@ -1,56 +1,17 @@ -var config = { - port: 5432, - host: 'localhost', - user: 'postgres', - database: 'postgres', - password: '', - test: 'unit' -}; +var ConnectionParameters = require(__dirname + '/../lib/connection-parameters'); +var config = new ConnectionParameters(process.argv[2]); -var args = process.argv; -for(var i = 0; i < args.length; i++) { - switch(args[i].toLowerCase()) { - case '-u': - case '--user': - config.user = args[++i]; +for(var i = 0; i < process.argv.length; i++) { + switch(process.argv[i].toLowerCase()) { + case 'native': + config.native = true; break; - case '--password': - config.password = args[++i]; + case 'binary': + config.binary = true; break; - case '--verbose': - config.verbose = (args[++i] == "true"); - break; - case '-d': - case '--database': - config.database = args[++i]; - break; - case '-p': - case '--port': - config.port = args[++i]; - break; - case '-h': - case '--host': - config.host = args[++i]; - break; - case '--down': - config.down = true; - break; - case '-t': - case '--test': - config.test = args[++i]; - case '--native': - config.native = (args[++i] == "true"); - case '--binary': - config.binary = (args[++i] == "true"); default: break; } } -var log = function(keys) { - keys.forEach(function(key) { - console.log(key + ": '" + config[key] + "'"); - }); -} - module.exports = config; diff --git a/test/integration/client/api-tests.js b/test/integration/client/api-tests.js index 0fd94108..c3baca8f 100644 --- a/test/integration/client/api-tests.js +++ b/test/integration/client/api-tests.js @@ -1,9 +1,5 @@ var helper = require(__dirname + '/../test-helper'); -var pg = require(__dirname + '/../../../lib'); - -if(helper.args.native) { - pg = require(__dirname + '/../../../lib').native; -} +var pg = helper.pg; var log = function() { //console.log.apply(console, arguments); @@ -16,7 +12,13 @@ var sink = new helper.Sink(5, 10000, function() { test('api', function() { log("connecting to %j", helper.config) - pg.connect(helper.config, assert.calls(function(err, client) { + //test weird callback behavior with node-pool + pg.connect(helper.config, function(err) { + assert.isNull(err); + arguments[1].emit('drain'); + arguments[2](); + }); + pg.connect(helper.config, assert.calls(function(err, client, done) { assert.equal(err, null, "Failed to connect: " + helper.sys.inspect(err)); client.query('CREATE TEMP TABLE band(name varchar(100))'); @@ -51,14 +53,14 @@ test('api', function() { assert.equal(result.rows.pop().name, 'the flaming lips'); assert.equal(result.rows.pop().name, 'the beach boys'); sink.add(); + done(); })) })) - })) }) test('executing nested queries', function() { - pg.connect(helper.config, assert.calls(function(err, client) { + pg.connect(helper.config, assert.calls(function(err, client, done) { assert.isNull(err); log("connected for nested queriese") client.query('select now as now from NOW()', assert.calls(function(err, result) { @@ -68,6 +70,7 @@ test('executing nested queries', function() { log('all nested queries recieved') assert.ok('all queries hit') sink.add(); + done(); })) })) })) @@ -77,27 +80,29 @@ test('executing nested queries', function() { test('raises error if cannot connect', function() { var connectionString = "pg://sfalsdkf:asdf@localhost/ieieie"; log("trying to connect to invalid place for error") - pg.connect(connectionString, assert.calls(function(err, client) { + pg.connect(connectionString, assert.calls(function(err, client, done) { assert.ok(err, 'should have raised an error') log("invalid connection supplied error to callback") sink.add(); + done(); })) }) test("query errors are handled and do not bubble if callback is provded", function() { - pg.connect(helper.config, assert.calls(function(err, client) { + pg.connect(helper.config, assert.calls(function(err, client, done) { assert.isNull(err) log("checking for query error") client.query("SELECT OISDJF FROM LEIWLISEJLSE", assert.calls(function(err, result) { assert.ok(err); log("query error supplied error to callback") sink.add(); + done(); })) })) }) test('callback is fired once and only once', function() { - pg.connect(helper.config, assert.calls(function(err, client) { + pg.connect(helper.config, assert.calls(function(err, client, done) { assert.isNull(err); client.query("CREATE TEMP TABLE boom(name varchar(10))"); var callCount = 0; @@ -108,12 +113,13 @@ test('callback is fired once and only once', function() { ].join(";"), function(err, callback) { assert.equal(callCount++, 0, "Call count should be 0. More means this callback fired more than once."); sink.add(); + done(); }) })) }) test('can provide callback and config object', function() { - pg.connect(helper.config, assert.calls(function(err, client) { + pg.connect(helper.config, assert.calls(function(err, client, done) { assert.isNull(err); client.query({ name: 'boom', @@ -121,12 +127,13 @@ test('can provide callback and config object', function() { }, assert.calls(function(err, result) { assert.isNull(err); assert.equal(result.rows[0].now.getYear(), new Date().getYear()) + done(); })) })) }) test('can provide callback and config and parameters', function() { - pg.connect(helper.config, assert.calls(function(err, client) { + pg.connect(helper.config, assert.calls(function(err, client, done) { assert.isNull(err); var config = { text: 'select $1::text as val' @@ -135,12 +142,13 @@ test('can provide callback and config and parameters', function() { assert.isNull(err); assert.equal(result.rows.length, 1); assert.equal(result.rows[0].val, 'hi'); + done(); })) })) }) test('null and undefined are both inserted as NULL', function() { - pg.connect(helper.config, assert.calls(function(err, client) { + pg.connect(helper.config, assert.calls(function(err, client, done) { assert.isNull(err); client.query("CREATE TEMP TABLE my_nulls(a varchar(1), b varchar(1), c integer, d integer, e date, f date)"); client.query("INSERT INTO my_nulls(a,b,c,d,e,f) VALUES ($1,$2,$3,$4,$5,$6)", [ null, undefined, null, undefined, null, undefined ]); @@ -153,6 +161,7 @@ test('null and undefined are both inserted as NULL', function() { assert.isNull(result.rows[0].d); assert.isNull(result.rows[0].e); assert.isNull(result.rows[0].f); + done(); })) })) }) diff --git a/test/integration/client/array-tests.js b/test/integration/client/array-tests.js index 548b3768..e01a252c 100644 --- a/test/integration/client/array-tests.js +++ b/test/integration/client/array-tests.js @@ -2,7 +2,7 @@ var helper = require(__dirname + "/test-helper"); var pg = helper.pg; test('parsing array results', function() { - pg.connect(helper.config, assert.calls(function(err, client) { + pg.connect(helper.config, assert.calls(function(err, client, done) { assert.isNull(err); client.query("CREATE TEMP TABLE why(names text[], numbors integer[])"); client.query('INSERT INTO why(names, numbors) VALUES(\'{"aaron", "brian","a b c" }\', \'{1, 2, 3}\')').on('error', console.log); @@ -23,9 +23,113 @@ test('parsing array results', function() { assert.equal(names[0], 'aaron'); assert.equal(names[1], 'brian'); assert.equal(names[2], "a b c"); + })) + }) + + test('empty array', function(){ + client.query("SELECT '{}'::text[] as names", assert.success(function(result) { + var names = result.rows[0].names; + assert.lengthIs(names, 0); + })) + }) + + test('element containing comma', function(){ + client.query("SELECT '{\"joe,bob\",jim}'::text[] as names", assert.success(function(result) { + var names = result.rows[0].names; + assert.lengthIs(names, 2); + assert.equal(names[0], 'joe,bob'); + assert.equal(names[1], 'jim'); + })) + }) + + test('bracket in quotes', function(){ + client.query("SELECT '{\"{\",\"}\"}'::text[] as names", assert.success(function(result) { + var names = result.rows[0].names; + assert.lengthIs(names, 2); + assert.equal(names[0], '{'); + assert.equal(names[1], '}'); + })) + }) + + test('null value', function(){ + client.query("SELECT '{joe,null,bob,\"NULL\"}'::text[] as names", assert.success(function(result) { + var names = result.rows[0].names; + assert.lengthIs(names, 4); + assert.equal(names[0], 'joe'); + assert.equal(names[1], null); + assert.equal(names[2], 'bob'); + assert.equal(names[3], 'NULL'); + })) + }) + + test('element containing quote char', function(){ + client.query("SELECT ARRAY['joe''', 'jim', 'bob\"'] AS names", assert.success(function(result) { + var names = result.rows[0].names; + assert.lengthIs(names, 3); + assert.equal(names[0], 'joe\''); + assert.equal(names[1], 'jim'); + assert.equal(names[2], 'bob"'); + })) + }) + + test('nested array', function(){ + client.query("SELECT '{{1,joe},{2,bob}}'::text[] as names", assert.success(function(result) { + var names = result.rows[0].names; + assert.lengthIs(names, 2); + + assert.lengthIs(names[0], 2); + assert.equal(names[0][0], '1'); + assert.equal(names[0][1], 'joe'); + + assert.lengthIs(names[1], 2); + assert.equal(names[1][0], '2'); + assert.equal(names[1][1], 'bob'); + + })) + }) + + test('integer array', function(){ + client.query("SELECT '{1,2,3}'::integer[] as names", assert.success(function(result) { + var names = result.rows[0].names; + assert.lengthIs(names, 3); + assert.equal(names[0], 1); + assert.equal(names[1], 2); + assert.equal(names[2], 3); + })) + }) + + test('integer nested array', function(){ + client.query("SELECT '{{1,100},{2,100},{3,100}}'::integer[] as names", assert.success(function(result) { + var names = result.rows[0].names; + assert.lengthIs(names, 3); + assert.equal(names[0][0], 1); + assert.equal(names[0][1], 100); + + assert.equal(names[1][0], 2); + assert.equal(names[1][1], 100); + + assert.equal(names[2][0], 3); + assert.equal(names[2][1], 100); + })) + }) + + test('JS array parameter', function(){ + client.query("SELECT $1::integer[] as names", [[[1,100],[2,100],[3,100]]], assert.success(function(result) { + var names = result.rows[0].names; + assert.lengthIs(names, 3); + assert.equal(names[0][0], 1); + assert.equal(names[0][1], 100); + + assert.equal(names[1][0], 2); + assert.equal(names[1][1], 100); + + assert.equal(names[2][0], 3); + assert.equal(names[2][1], 100); + done(); pg.end(); })) }) + })) }) diff --git a/test/integration/client/cancel-query-tests.js b/test/integration/client/cancel-query-tests.js index 842b471a..80b05b27 100644 --- a/test/integration/client/cancel-query-tests.js +++ b/test/integration/client/cancel-query-tests.js @@ -5,42 +5,42 @@ test("cancellation of a query", function() { var client = helper.client(); - var qry = client.query("select name from person order by name"); + var qry = "select name from person order by name"; client.on('drain', client.end.bind(client)); - var rows1 = 0, rows2 = 0, rows3 = 0, rows4 = 0; + var rows1 = 0, rows2 = 0, rows3 = 0, rows4 = 0; - var query1 = client.query(qry); - query1.on('row', function(row) { - rows1++; - }); - var query2 = client.query(qry); - query2.on('row', function(row) { - rows2++; - }); - var query3 = client.query(qry); - query3.on('row', function(row) { - rows3++; - }); - var query4 = client.query(qry); - query4.on('row', function(row) { - rows4++; - }); + var query1 = client.query(qry); + query1.on('row', function(row) { + rows1++; + }); + var query2 = client.query(qry); + query2.on('row', function(row) { + rows2++; + }); + var query3 = client.query(qry); + query3.on('row', function(row) { + rows3++; + }); + var query4 = client.query(qry); + query4.on('row', function(row) { + rows4++; + }); - helper.pg.cancel(helper.config, client, query1); - helper.pg.cancel(helper.config, client, query2); - helper.pg.cancel(helper.config, client, query4); + helper.pg.cancel(helper.config, client, query1); + helper.pg.cancel(helper.config, client, query2); + helper.pg.cancel(helper.config, client, query4); - setTimeout(function() { - assert.equal(rows1, 0); - assert.equal(rows2, 0); - assert.equal(rows4, 0); - }, 2000); + setTimeout(function() { + assert.equal(rows1, 0); + assert.equal(rows2, 0); + assert.equal(rows4, 0); + }, 2000); assert.emits(query3, 'end', function() { - test("returned right number of rows", function() { - assert.equal(rows3, 26); - }); - }); + test("returned right number of rows", function() { + assert.equal(rows3, 26); + }); + }); }); diff --git a/test/integration/client/configuration-tests.js b/test/integration/client/configuration-tests.js index c641b300..e922a4e7 100644 --- a/test/integration/client/configuration-tests.js +++ b/test/integration/client/configuration-tests.js @@ -1,6 +1,13 @@ var helper = require(__dirname + '/test-helper'); var pg = helper.pg; +//clear process.env +var realEnv = {}; +for(var key in process.env) { + realEnv[key] = process.env[key]; + if(!key.indexOf('PG')) delete process.env[key]; +} + test('default values', function() { assert.same(pg.defaults,{ user: process.env.USER, @@ -44,3 +51,8 @@ if(!helper.args.native) { }) } + +//restore process.env +for(var key in realEnv) { + process.env[key] = realEnv[key]; +} diff --git a/test/integration/client/copy-tests.js b/test/integration/client/copy-tests.js new file mode 100644 index 00000000..92e4cb87 --- /dev/null +++ b/test/integration/client/copy-tests.js @@ -0,0 +1,167 @@ +var helper = require(__dirname + '/../test-helper'); +var pg = require(__dirname + '/../../../lib'); +if(helper.args.native) { + pg = require(__dirname + '/../../../lib').native; +} +var ROWS_TO_INSERT = 1000; +var prepareTable = function (client, callback) { + client.query( + 'CREATE TEMP TABLE copy_test (id SERIAL, name CHARACTER VARYING(10), age INT)', + assert.calls(function (err, result) { + assert.equal(err, null, "create table query should not fail"); + callback(); + }) + ); +}; +test('COPY FROM', function () { + pg.connect(helper.config, function (error, client, done) { + assert.equal(error, null, "Failed to connect: " + helper.sys.inspect(error)); + prepareTable(client, function () { + var stream = client.copyFrom("COPY copy_test (name, age) FROM stdin WITH CSV"); + stream.on('error', function (error) { + assert.ok(false, "COPY FROM stream should not emit errors" + helper.sys.inspect(error)); + }); + for (var i = 0; i < ROWS_TO_INSERT; i++) { + stream.write( String(Date.now() + Math.random()).slice(0,10) + ',' + i + '\n'); + } + assert.emits(stream, 'close', function () { + client.query("SELECT count(*), sum(age) from copy_test", function (err, result) { + assert.equal(err, null, "Query should not fail"); + assert.lengthIs(result.rows, 1) + assert.equal(result.rows[0].sum, ROWS_TO_INSERT * (0 + ROWS_TO_INSERT -1)/2); + assert.equal(result.rows[0].count, ROWS_TO_INSERT); + done(); + }); + }, "COPY FROM stream should emit close after query end"); + stream.end(); + }); + }); +}); +test('COPY TO', function () { + pg.connect(helper.config, function (error, client, done) { + assert.equal(error, null, "Failed to connect: " + helper.sys.inspect(error)); + prepareTable(client, function () { + var stream = client.copyTo("COPY person (id, name, age) TO stdin WITH CSV"); + var buf = new Buffer(0); + stream.on('error', function (error) { + assert.ok(false, "COPY TO stream should not emit errors" + helper.sys.inspect(error)); + }); + assert.emits(stream, 'data', function (chunk) { + buf = Buffer.concat([buf, chunk]); + }, "COPY IN stream should emit data event for each row"); + assert.emits(stream, 'end', function () { + var lines = buf.toString().split('\n'); + assert.equal(lines.length >= 0, true, "copy in should return rows saved by copy from"); + assert.equal(lines[0].split(',').length, 3, "each line should consists of 3 fields"); + done(); + }, "COPY IN stream should emit end event after all rows"); + }); + }); +}); + +test('COPY TO, queue queries', function () { + if(helper.config.native) return false; + pg.connect(helper.config, assert.calls(function (error, client, done) { + assert.equal(error, null, "Failed to connect: " + helper.sys.inspect(error)); + prepareTable(client, function () { + var query1Done = false, + copyQueryDone = false, + query2Done = false; + client.query("SELECT count(*) from person", function () { + query1Done = true; + assert.ok(!copyQueryDone && ! query2Done, "first query has to be executed before others"); + }); + var stream = client.copyTo("COPY person (id, name, age) TO stdin WITH CSV"); + //imitate long query, to make impossible, + //that copy query end callback runs after + //second query callback + client.query("SELECT pg_sleep(1)", function () { + query2Done = true; + assert.ok(copyQueryDone && query2Done, "second query has to be executed after others"); + }); + var buf = new Buffer(0); + stream.on('error', function (error) { + assert.ok(false, "COPY TO stream should not emit errors" + helper.sys.inspect(error)); + }); + assert.emits(stream, 'data', function (chunk) { + buf = Buffer.concat([buf, chunk]); + }, "COPY IN stream should emit data event for each row"); + assert.emits(stream, 'end', function () { + copyQueryDone = true; + assert.ok(query1Done && ! query2Done, "copy query has to be executed before second query and after first"); + var lines = buf.toString().split('\n'); + assert.equal(lines.length >= 0, true, "copy in should return rows saved by copy from"); + assert.equal(lines[0].split(',').length, 3, "each line should consists of 3 fields"); + done(); + }, "COPY IN stream should emit end event after all rows"); + }); + })); +}); + +test("COPY TO incorrect usage with large data", function () { + if(helper.config.native) return false; + //when many data is loaded from database (and it takes a lot of time) + //there are chance, that query will be canceled before it ends + //but if there are not so much data, cancel message may be + //send after copy query ends + //so we need to test both situations + pg.connect(helper.config, assert.calls(function (error, client, done) { + assert.equal(error, null, "Failed to connect: " + helper.sys.inspect(error)); + //intentionally incorrect usage of copy. + //this has to report error in standart way, instead of just throwing exception + client.query( + "COPY (SELECT GENERATE_SERIES(1, 10000000)) TO STDOUT WITH CSV", + assert.calls(function (error) { + assert.ok(error, "error should be reported when sending copy to query with query method"); + client.query("SELECT 1", assert.calls(function (error, result) { + assert.isNull(error, "incorrect copy usage should not break connection"); + assert.ok(result, "incorrect copy usage should not break connection"); + done(); + })); + }) + ); + })); +}); + +test("COPY TO incorrect usage with small data", function () { + if(helper.config.native) return false; + pg.connect(helper.config, assert.calls(function (error, client, done) { + assert.equal(error, null, "Failed to connect: " + helper.sys.inspect(error)); + //intentionally incorrect usage of copy. + //this has to report error in standart way, instead of just throwing exception + client.query( + "COPY (SELECT GENERATE_SERIES(1, 1)) TO STDOUT WITH CSV", + assert.calls(function (error) { + assert.ok(error, "error should be reported when sending copy to query with query method"); + client.query("SELECT 1", assert.calls(function (error, result) { + assert.isNull(error, "incorrect copy usage should not break connection: " + error); + assert.ok(result, "incorrect copy usage should not break connection"); + done(); + })); + }) + ); + })); +}); + +test("COPY FROM incorrect usage", function () { + pg.connect(helper.config, function (error, client, done) { + assert.equal(error, null, "Failed to connect: " + helper.sys.inspect(error)); + prepareTable(client, function () { + //intentionally incorrect usage of copy. + //this has to report error in standart way, instead of just throwing exception + client.query( + "COPY copy_test from STDIN WITH CSV", + assert.calls(function (error) { + assert.ok(error, "error should be reported when sending copy to query with query method"); + client.query("SELECT 1", assert.calls(function (error, result) { + assert.isNull(error, "incorrect copy usage should not break connection: " + error); + assert.ok(result, "incorrect copy usage should not break connection"); + done(); + pg.end(helper.config); + })); + }) + ); + }); + }); +}); + diff --git a/test/integration/client/drain-tests.js b/test/integration/client/drain-tests.js deleted file mode 100644 index b6a2434d..00000000 --- a/test/integration/client/drain-tests.js +++ /dev/null @@ -1,55 +0,0 @@ -var helper = require(__dirname + '/test-helper'); -var pg = require(__dirname + '/../../../lib'); - -if(helper.args.native) { - pg = require(__dirname + '/../../../lib').native; -} - -var testDrainOfClientWithPendingQueries = function() { - pg.connect(helper.config, assert.success(function(client) { - test('when there are pending queries and client is resumed', function() { - var drainCount = 0; - client.on('drain', function() { - drainCount++; - }); - client.pauseDrain(); - client.query('SELECT NOW()', function() { - client.query('SELECT NOW()', function() { - assert.equal(drainCount, 0); - process.nextTick(function() { - assert.equal(drainCount, 1); - pg.end(); - }); - }); - client.resumeDrain(); - assert.equal(drainCount, 0); - }); - }); - })); -}; - -pg.connect(helper.config, assert.success(function(client) { - var drainCount = 0; - client.on('drain', function() { - drainCount++; - }); - test('pauseDrain and resumeDrain on simple client', function() { - client.pauseDrain(); - client.resumeDrain(); - process.nextTick(assert.calls(function() { - assert.equal(drainCount, 0); - test('drain is paused', function() { - client.pauseDrain(); - client.query('SELECT NOW()', assert.success(function() { - process.nextTick(function() { - assert.equal(drainCount, 0); - client.resumeDrain(); - assert.equal(drainCount, 1); - testDrainOfClientWithPendingQueries(); - }); - })); - }); - })); - }); -})); - diff --git a/test/integration/client/empty-query-tests.js b/test/integration/client/empty-query-tests.js index 3eb207c4..6f0d574d 100644 --- a/test/integration/client/empty-query-tests.js +++ b/test/integration/client/empty-query-tests.js @@ -5,11 +5,11 @@ test("empty query message handling", function() { assert.emits(client, 'drain', function() { client.end(); }); - client.query({text: "", binary: false}); + client.query({text: ""}); }); test('callback supported', assert.calls(function() { - client.query({text: "", binary: false}, function(err, result) { + client.query("", function(err, result) { assert.isNull(err); assert.empty(result.rows); }) diff --git a/test/integration/client/error-handling-tests.js b/test/integration/client/error-handling-tests.js index 0a855238..616493b6 100644 --- a/test/integration/client/error-handling-tests.js +++ b/test/integration/client/error-handling-tests.js @@ -11,7 +11,6 @@ var createErorrClient = function() { }; test('error handling', function(){ - test('within a simple query', function() { var client = createErorrClient(); @@ -115,18 +114,26 @@ test('non-error calls supplied callback', function() { }); test('when connecting to invalid host', function() { - return false; + //this test fails about 30% on travis and only on travis... + //I'm not sure what the cause could be + if(process.env.TRAVIS) return false; + var client = new Client({ - user: 'brian', + user: 'aslkdjfsdf', password: '1234', host: 'asldkfjasdf!!#1308140.com' }); - assert.emits(client, 'error'); + var delay = 5000; + var tid = setTimeout(function() { + assert(false, "When connecting to an invalid host the error event should be emitted but it has been " + delay + " and still no error event."); + }, delay); + client.on('error', function() { + clearTimeout(tid); + }) client.connect(); }); test('when connecting to invalid host with callback', function() { - return false; var client = new Client({ user: 'brian', password: '1234', @@ -156,9 +163,19 @@ test('multiple connection errors (gh#31)', function() { }); test('with callback method', function() { - var badConString = "tcp://aslkdfj:oi14081@"+helper.args.host+":"+helper.args.port+"/"+helper.args.database; + var badConString = "postgres://aslkdfj:oi14081@"+helper.args.host+":"+helper.args.port+"/"+helper.args.database; return false; }); - +}); + +test('query receives error on client shutdown', function() { + var client = new Client(helper.config); + client.connect(assert.calls(function() { + client.query('SELECT pg_sleep(5)', assert.calls(function(err, res) { + assert(err); + })); + client.end(); + assert.emits(client, 'end'); + })); }); diff --git a/test/integration/client/escape-tests.js b/test/integration/client/escape-tests.js new file mode 100644 index 00000000..40214e03 --- /dev/null +++ b/test/integration/client/escape-tests.js @@ -0,0 +1,153 @@ +var helper = require(__dirname + '/test-helper'); + +function createClient(callback) { + var client = new Client(helper.config); + client.connect(function(err) { + return callback(client); + }); +} + +test('escapeLiteral: no special characters', function() { + createClient(function(client) { + var expected = "'hello world'"; + var actual = client.escapeLiteral('hello world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains double quotes only', function() { + createClient(function(client) { + var expected = "'hello \" world'"; + var actual = client.escapeLiteral('hello " world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains single quotes only', function() { + createClient(function(client) { + var expected = "'hello \'\' world'"; + var actual = client.escapeLiteral('hello \' world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains backslashes only', function() { + createClient(function(client) { + var expected = " E'hello \\\\ world'"; + var actual = client.escapeLiteral('hello \\ world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains single quotes and double quotes', function() { + createClient(function(client) { + var expected = "'hello '' \" world'"; + var actual = client.escapeLiteral('hello \' " world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains double quotes and backslashes', function() { + createClient(function(client) { + var expected = " E'hello \\\\ \" world'"; + var actual = client.escapeLiteral('hello \\ " world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains single quotes and backslashes', function() { + createClient(function(client) { + var expected = " E'hello \\\\ '' world'"; + var actual = client.escapeLiteral('hello \\ \' world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains single quotes, double quotes, and backslashes', function() { + createClient(function(client) { + var expected = " E'hello \\\\ '' \" world'"; + var actual = client.escapeLiteral('hello \\ \' " world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: no special characters', function() { + createClient(function(client) { + var expected = '"hello world"'; + var actual = client.escapeIdentifier('hello world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: contains double quotes only', function() { + createClient(function(client) { + var expected = '"hello "" world"'; + var actual = client.escapeIdentifier('hello " world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: contains single quotes only', function() { + createClient(function(client) { + var expected = '"hello \' world"'; + var actual = client.escapeIdentifier('hello \' world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: contains backslashes only', function() { + createClient(function(client) { + var expected = '"hello \\ world"'; + var actual = client.escapeIdentifier('hello \\ world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: contains single quotes and double quotes', function() { + createClient(function(client) { + var expected = '"hello \' "" world"'; + var actual = client.escapeIdentifier('hello \' " world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: contains double quotes and backslashes', function() { + return createClient(function(client) { + var expected = '"hello \\ "" world"'; + var actual = client.escapeIdentifier('hello \\ " world'); + assert.equal(expected, actual); + client.end(); + return; + }); +}); + +test('escapeIdentifier: contains single quotes and backslashes', function() { + createClient(function(client) { + var expected = '"hello \\ \' world"'; + var actual = client.escapeIdentifier('hello \\ \' world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: contains single quotes, double quotes, and backslashes', function() { + createClient(function(client) { + var expected = '"hello \\ \' "" world"'; + var actual = client.escapeIdentifier('hello \\ \' " world'); + assert.equal(expected, actual); + client.end(); + }); +}); diff --git a/test/integration/client/force-native-with-envvar-tests.js b/test/integration/client/force-native-with-envvar-tests.js new file mode 100644 index 00000000..e41587d7 --- /dev/null +++ b/test/integration/client/force-native-with-envvar-tests.js @@ -0,0 +1,38 @@ +/** + * helper needs to be loaded for the asserts but it alos proloads + * client which we don't want here + * + */ +var helper = require(__dirname+"/test-helper") + , path = require('path') +; + +var paths = { + 'pg' : path.join(__dirname, '..', '..', '..', 'lib', 'index.js') , + 'query_js' : path.join(__dirname, '..', '..', '..', 'lib', 'query.js') , + 'query_native' : path.join(__dirname, '..', '..', '..', 'lib', 'native', 'query.js') , +}; + +/** + * delete the modules we are concerned about from the + * module cache, so they get loaded cleanly and the env + * var can kick in ... + */ +function emptyCache(){ + Object.keys(require.cache).forEach(function(key){ + delete require.cache[key]; + }); +}; + +emptyCache(); +process.env.NODE_PG_FORCE_NATIVE = '1'; + +var pg = require( paths.pg ); +var query_native = require( paths.query_native ); +var query_js = require( paths.query_js ); + +assert.deepEqual(pg.Client.Query, query_native); +assert.notDeepEqual(pg.Client.Query, query_js); + +emptyCache(); +delete process.env.NODE_PG_FORCE_NATIVE diff --git a/test/integration/client/heroku-ssl-tests.js b/test/integration/client/heroku-ssl-tests.js new file mode 100644 index 00000000..5b6b87da --- /dev/null +++ b/test/integration/client/heroku-ssl-tests.js @@ -0,0 +1,25 @@ +var helper = require(__dirname + '/../test-helper'); +var pg = helper.pg; + +var host = 'ec2-107-20-224-218.compute-1.amazonaws.com'; +var database = 'db6kfntl5qhp2'; +var user = 'kwdzdnqpdiilfs'; +var port = 5432; + +var config = { + host: host, + port: port, + database: database, + user: user, + password: 'uaZoSSHgi7mVM7kYaROtusClKu', + ssl: true +}; + +//connect & disconnect from heroku +pg.connect(config, assert.success(function(client, done) { + client.query('SELECT NOW() as time', assert.success(function(res) { + assert(res.rows[0].time.getTime()); + done(); + pg.end(); + })) +})); diff --git a/test/integration/client/huge-numeric-tests.js b/test/integration/client/huge-numeric-tests.js new file mode 100644 index 00000000..4165711f --- /dev/null +++ b/test/integration/client/huge-numeric-tests.js @@ -0,0 +1,22 @@ +var helper = require(__dirname + '/test-helper'); + +helper.pg.connect(helper.config, assert.success(function(client, done) { + var types = require(__dirname + '/../../../lib/types'); + //1231 = numericOID + types.setTypeParser(1700, function(){ + return 'yes'; + }) + types.setTypeParser(1700, 'binary', function(){ + return 'yes'; + }) + var bignum = '294733346389144765940638005275322203805'; + client.query('CREATE TEMP TABLE bignumz(id numeric)'); + client.query('INSERT INTO bignumz(id) VALUES ($1)', [bignum]); + client.query('SELECT * FROM bignumz', assert.success(function(result) { + assert.equal(result.rows[0].id, 'yes') + helper.pg.end(); + done(); + })) +})); + +//custom type converter diff --git a/test/integration/client/json-type-parsing-tests.js b/test/integration/client/json-type-parsing-tests.js new file mode 100644 index 00000000..1c0759bf --- /dev/null +++ b/test/integration/client/json-type-parsing-tests.js @@ -0,0 +1,38 @@ +var helper = require(__dirname + '/test-helper'); +var assert = require('assert'); +//if you want binary support, pull request me! +if (helper.config.binary) { + console.log('binary mode does not support JSON right now'); + return; +} + +test('can read and write json', function() { + helper.pg.connect(helper.config, function(err, client, done) { + assert.ifError(err); + helper.versionGTE(client, '9.2.0', assert.success(function(jsonSupported) { + if(!jsonSupported) { + console.log('skip json test on older versions of postgres'); + done(); + return helper.pg.end(); + } + client.query('CREATE TEMP TABLE stuff(id SERIAL PRIMARY KEY, data JSON)'); + var value ={name: 'Brian', age: 250, alive: true, now: new Date()}; + client.query('INSERT INTO stuff (data) VALUES ($1)', [value]); + client.query('SELECT * FROM stuff', assert.success(function(result) { + assert.equal(result.rows.length, 1); + assert.equal(typeof result.rows[0].data, 'object'); + var row = result.rows[0].data; + assert.strictEqual(row.name, value.name); + assert.strictEqual(row.age, value.age); + assert.strictEqual(row.alive, value.alive); + test('row should have "now" as a date', function() { + return false; + assert(row.now instanceof Date, 'row.now should be a date instance but is ' + typeof row.now); + }); + assert.equal(JSON.stringify(row.now), JSON.stringify(value.now)); + done(); + helper.pg.end(); + })); + })); + }); +}); diff --git a/test/integration/client/no-row-result-tests.js b/test/integration/client/no-row-result-tests.js new file mode 100644 index 00000000..5555ff6f --- /dev/null +++ b/test/integration/client/no-row-result-tests.js @@ -0,0 +1,23 @@ +var helper = require(__dirname + '/test-helper'); +var pg = helper.pg; +var config = helper.config; + +test('can access results when no rows are returned', function() { + if(config.native) return false; + var checkResult = function(result) { + assert(result.fields, 'should have fields definition'); + assert.equal(result.fields.length, 1); + assert.equal(result.fields[0].name, 'val'); + assert.equal(result.fields[0].dataTypeID, 25); + pg.end(); + }; + + pg.connect(config, assert.success(function(client, done) { + var query = client.query('select $1::text as val limit 0', ['hi'], assert.success(function(result) { + checkResult(result); + done(); + })); + + assert.emits(query, 'end', checkResult); + })); +}); diff --git a/test/integration/client/notice-tests.js b/test/integration/client/notice-tests.js index db7b4822..4c6920ac 100644 --- a/test/integration/client/notice-tests.js +++ b/test/integration/client/notice-tests.js @@ -1,5 +1,7 @@ var helper = require(__dirname + '/test-helper'); test('emits notice message', function() { + //TODO this doesn't work on all versions of postgres + return false; var client = helper.client(); client.query('create temp table boom(id serial, size integer)'); assert.emits(client, 'notice', function(notice) { diff --git a/test/integration/client/parse-int-8-tests.js b/test/integration/client/parse-int-8-tests.js new file mode 100644 index 00000000..7028e900 --- /dev/null +++ b/test/integration/client/parse-int-8-tests.js @@ -0,0 +1,18 @@ + +var helper = require(__dirname + '/../test-helper'); +var pg = helper.pg; +test('ability to turn on and off parser', function() { + if(helper.args.binary) return false; + pg.connect(helper.config, assert.success(function(client, done) { + pg.defaults.parseInt8 = true; + client.query('CREATE TEMP TABLE asdf(id SERIAL PRIMARY KEY)'); + client.query('SELECT COUNT(*) as "count" FROM asdf', assert.success(function(res) { + pg.defaults.parseInt8 = false; + client.query('SELECT COUNT(*) as "count" FROM asdf', assert.success(function(res) { + done(); + assert.strictEqual("0", res.rows[0].count); + pg.end(); + })); + })); + })); +}); diff --git a/test/integration/client/prepared-statement-tests.js b/test/integration/client/prepared-statement-tests.js index ff2fac0d..34e5f9b5 100644 --- a/test/integration/client/prepared-statement-tests.js +++ b/test/integration/client/prepared-statement-tests.js @@ -82,8 +82,8 @@ test("named prepared statement", function() { test("prepared statements on different clients", function() { var statementName = "differ"; - var statement1 = "select count(*) as count from person"; - var statement2 = "select count(*) as count from person where age < $1"; + var statement1 = "select count(*)::int4 as count from person"; + var statement2 = "select count(*)::int4 as count from person where age < $1"; var client1Finished = false; var client2Finished = false; diff --git a/test/integration/client/query-callback-error-tests.js b/test/integration/client/query-callback-error-tests.js new file mode 100644 index 00000000..bd80153c --- /dev/null +++ b/test/integration/client/query-callback-error-tests.js @@ -0,0 +1,33 @@ +var helper = require(__dirname + '/test-helper'); +var util = require('util'); + +var withQuery = function(text, resultLength, cb) { + test('error during query execution', function() { + var client = new Client(helper.args); + process.removeAllListeners('uncaughtException'); + assert.emits(process, 'uncaughtException', function() { + assert.equal(client.activeQuery, null, 'should remove active query even if error happens in callback'); + client.query('SELECT * FROM blah', assert.success(function(result) { + assert.equal(result.rows.length, resultLength); + client.end(); + cb(); + })); + }); + client.connect(assert.success(function() { + client.query('CREATE TEMP TABLE "blah"(data text)', assert.success(function() { + var q = client.query(text, ['yo'], assert.calls(function() { + assert.emits(client, 'drain'); + throw new Error('WHOOOAAAHH!!'); + })); + })); + })); + }); +} + +//test with good query so our callback is called +//as a successful callback +withQuery('INSERT INTO blah(data) VALUES($1)', 1, function() { + //test with an error query so our callback is called with an error + withQuery('INSERT INTO asldkfjlaskfj eoooeoriiri', 0, function() { + }); +}); diff --git a/test/integration/client/query-error-handling-tests.js b/test/integration/client/query-error-handling-tests.js new file mode 100644 index 00000000..8ac060ea --- /dev/null +++ b/test/integration/client/query-error-handling-tests.js @@ -0,0 +1,31 @@ +var helper = require(__dirname + '/test-helper'); +var util = require('util'); + +test('error during query execution', function() { + var client = new Client(helper.args); + client.connect(assert.success(function() { + var sleepQuery = 'select pg_sleep(5)'; + var pidColName = 'procpid' + var queryColName = 'current_query'; + helper.versionGTE(client, '9.2.0', assert.success(function(isGreater) { + if(isGreater) { + pidColName = 'pid'; + queryColName = 'query'; + } + client.query(sleepQuery, assert.calls(function(err, result) { + assert(err); + client.end(); + })); + var client2 = new Client(helper.args); + client2.connect(assert.success(function() { + var killIdleQuery = "SELECT " + pidColName + ", (SELECT pg_terminate_backend(" + pidColName + ")) AS killed FROM pg_stat_activity WHERE " + queryColName + " = $1"; + client2.query(killIdleQuery, [sleepQuery], assert.calls(function(err, res) { + assert.ifError(err); + assert.equal(res.rows.length, 1); + client2.end(); + assert.emits(client2, 'end'); + })); + })); + })); + })); +}); diff --git a/test/integration/client/quick-disconnect-tests.js b/test/integration/client/quick-disconnect-tests.js new file mode 100644 index 00000000..a1b6bab6 --- /dev/null +++ b/test/integration/client/quick-disconnect-tests.js @@ -0,0 +1,7 @@ +//test for issue #320 +// +var helper = require('./test-helper'); + +var client = new helper.pg.Client(helper.config); +client.connect(); +client.end(); diff --git a/test/integration/client/result-metadata-tests.js b/test/integration/client/result-metadata-tests.js index 1c4f94df..8f66fe16 100644 --- a/test/integration/client/result-metadata-tests.js +++ b/test/integration/client/result-metadata-tests.js @@ -2,26 +2,34 @@ var helper = require(__dirname + "/test-helper"); var pg = helper.pg; test('should return insert metadata', function() { - return false; - pg.connect(helper.config, assert.calls(function(err, client) { + pg.connect(helper.config, assert.calls(function(err, client, done) { assert.isNull(err); - client.query("CREATE TEMP TABLE zugzug(name varchar(10))", assert.calls(function(err, result) { - assert.isNull(err); - //let's list this as ignored for now - // process.nextTick(function() { - // test('should identify "CREATE TABLE" message', function() { - // return false; - // assert.equal(result.command, "CREATE TABLE"); - // assert.equal(result.rowCount, 0); - // }) - // }) - assert.equal(result.oid, null); - client.query("INSERT INTO zugzug(name) VALUES('more work?')", assert.calls(function(err, result) { - assert.equal(result.command, "INSERT"); - assert.equal(result.rowCount, 1); - process.nextTick(client.end.bind(client)); - return false; - })) - })) - })) -}) + + helper.versionGTE(client, '9.0.0', assert.success(function(hasRowCount) { + client.query("CREATE TEMP TABLE zugzug(name varchar(10))", assert.calls(function(err, result) { + assert.isNull(err); + assert.equal(result.oid, null); + assert.equal(result.command, 'CREATE'); + + var q = client.query("INSERT INTO zugzug(name) VALUES('more work?')", assert.calls(function(err, result) { + assert.equal(result.command, "INSERT"); + assert.equal(result.rowCount, 1); + + client.query('SELECT * FROM zugzug', assert.calls(function(err, result) { + assert.isNull(err); + if(hasRowCount) assert.equal(result.rowCount, 1); + assert.equal(result.command, 'SELECT'); + process.nextTick(pg.end.bind(pg)); + })); + })); + + assert.emits(q, 'end', function(result) { + assert.equal(result.command, "INSERT"); + if(hasRowCount) assert.equal(result.rowCount, 1); + done(); + }); + + })); + })); + })); +}); diff --git a/test/integration/client/results-as-array-tests.js b/test/integration/client/results-as-array-tests.js new file mode 100644 index 00000000..ef11a891 --- /dev/null +++ b/test/integration/client/results-as-array-tests.js @@ -0,0 +1,33 @@ +var util = require('util'); +var helper = require('./test-helper'); + +var Client = helper.Client; + +var conInfo = helper.config; + +test('returns results as array', function() { + var client = new Client(conInfo); + var checkRow = function(row) { + assert(util.isArray(row), 'row should be an array'); + assert.equal(row.length, 4); + assert.equal(row[0].getFullYear(), new Date().getFullYear()); + assert.strictEqual(row[1], 1); + assert.strictEqual(row[2], 'hai'); + assert.strictEqual(row[3], null); + } + client.connect(assert.success(function() { + var config = { + text: 'SELECT NOW(), 1::int, $1::text, null', + values: ['hai'], + rowMode: 'array' + }; + var query = client.query(config, assert.success(function(result) { + assert.equal(result.rows.length, 1); + checkRow(result.rows[0]); + client.end(); + })); + assert.emits(query, 'row', function(row) { + checkRow(row); + }); + })); +}); diff --git a/test/integration/client/row-description-on-results-tests.js b/test/integration/client/row-description-on-results-tests.js new file mode 100644 index 00000000..22c92965 --- /dev/null +++ b/test/integration/client/row-description-on-results-tests.js @@ -0,0 +1,37 @@ +var helper = require('./test-helper'); + +var Client = helper.Client; + +var conInfo = helper.config; + +var checkResult = function(result) { + assert(result.fields); + assert.equal(result.fields.length, 3); + var fields = result.fields; + assert.equal(fields[0].name, 'now'); + assert.equal(fields[1].name, 'num'); + assert.equal(fields[2].name, 'texty'); + assert.equal(fields[0].dataTypeID, 1184); + assert.equal(fields[1].dataTypeID, 23); + assert.equal(fields[2].dataTypeID, 25); +}; + +test('row descriptions on result object', function() { + var client = new Client(conInfo); + client.connect(assert.success(function() { + client.query('SELECT NOW() as now, 1::int as num, $1::text as texty', ["hello"], assert.success(function(result) { + checkResult(result); + client.end(); + })); + })); +}); + +test('row description on no rows', function() { + var client = new Client(conInfo); + client.connect(assert.success(function() { + client.query('SELECT NOW() as now, 1::int as num, $1::text as texty LIMIT 0', ["hello"], assert.success(function(result) { + checkResult(result); + client.end(); + })); + })); +}); diff --git a/test/integration/client/simple-query-tests.js b/test/integration/client/simple-query-tests.js index 2edabd73..f8ef1ada 100644 --- a/test/integration/client/simple-query-tests.js +++ b/test/integration/client/simple-query-tests.js @@ -9,15 +9,16 @@ test("simple query interface", function() { client.on('drain', client.end.bind(client)); var rows = []; - query.on('row', function(row) { - rows.push(row['name']) + query.on('row', function(row, result) { + assert.ok(result); + rows.push(row['name']); }); query.once('row', function(row) { test('Can iterate through columns', function () { var columnCount = 0; for (column in row) { columnCount++; - }; + } if ('length' in row) { assert.lengthIs(row, columnCount, 'Iterating through the columns gives a different length from calling .length.'); } @@ -37,7 +38,7 @@ test("simple query interface", function() { test("multiple simple queries", function() { var client = helper.client(); - client.query({ text: "create temp table bang(id serial, name varchar(5));insert into bang(name) VALUES('boom');", binary: false }) + client.query({ text: "create temp table bang(id serial, name varchar(5));insert into bang(name) VALUES('boom');"}) client.query("insert into bang(name) VALUES ('yes');"); var query = client.query("select name from bang"); assert.emits(query, 'row', function(row) { @@ -51,9 +52,9 @@ test("multiple simple queries", function() { test("multiple select statements", function() { var client = helper.client(); - client.query({text: "create temp table boom(age integer); insert into boom(age) values(1); insert into boom(age) values(2); insert into boom(age) values(3)", binary: false}); - client.query({text: "create temp table bang(name varchar(5)); insert into bang(name) values('zoom');", binary: false}); - var result = client.query({text: "select age from boom where age < 2; select name from bang", binary: false}); + client.query("create temp table boom(age integer); insert into boom(age) values(1); insert into boom(age) values(2); insert into boom(age) values(3)"); + client.query({text: "create temp table bang(name varchar(5)); insert into bang(name) values('zoom');"}); + var result = client.query({text: "select age from boom where age < 2; select name from bang"}); assert.emits(result, 'row', function(row) { assert.strictEqual(row['age'], 1); assert.emits(result, 'row', function(row) { diff --git a/test/integration/client/ssl-tests.js b/test/integration/client/ssl-tests.js new file mode 100644 index 00000000..0458d6b0 --- /dev/null +++ b/test/integration/client/ssl-tests.js @@ -0,0 +1,14 @@ +var pg = require(__dirname + '/../../../lib'); +var config = require(__dirname + '/test-helper').config; +test('can connect with ssl', function() { + return false; + config.ssl = { + rejectUnauthorized: false + }; + pg.connect(config, assert.success(function(client) { + return false; + client.query('SELECT NOW()', assert.success(function() { + pg.end(); + })); + })); +}); diff --git a/test/integration/client/test-helper.js b/test/integration/client/test-helper.js index d8ae3d85..24cddf61 100644 --- a/test/integration/client/test-helper.js +++ b/test/integration/client/test-helper.js @@ -1,10 +1,3 @@ var helper = require(__dirname+'/../test-helper'); -//creates a client from cli parameters -helper.client = function() { - var client = new Client(helper.config); - client.connect(); - return client; -}; - module.exports = helper; diff --git a/test/integration/client/timezone-tests.js b/test/integration/client/timezone-tests.js new file mode 100644 index 00000000..b355550d --- /dev/null +++ b/test/integration/client/timezone-tests.js @@ -0,0 +1,29 @@ +var helper = require(__dirname + '/../test-helper'); +var exec = require('child_process').exec; + +var oldTz = process.env.TZ; +process.env.TZ = 'Europe/Berlin'; + +var date = new Date(); + +helper.pg.connect(helper.config, function(err, client, done) { + assert.isNull(err); + + test('timestamp without time zone', function() { + client.query("SELECT CAST($1 AS TIMESTAMP WITHOUT TIME ZONE) AS \"val\"", [ date ], function(err, result) { + assert.isNull(err); + assert.equal(result.rows[0].val.getTime(), date.getTime()); + + test('timestamp with time zone', function() { + client.query("SELECT CAST($1 AS TIMESTAMP WITH TIME ZONE) AS \"val\"", [ date ], function(err, result) { + assert.isNull(err); + assert.equal(result.rows[0].val.getTime(), date.getTime()); + + done(); + helper.pg.end(); + process.env.TZ = oldTz; + }); + }); + }); + }); +}); \ No newline at end of file diff --git a/test/integration/client/transaction-tests.js b/test/integration/client/transaction-tests.js index 4fbfd18b..85ee7e53 100644 --- a/test/integration/client/transaction-tests.js +++ b/test/integration/client/transaction-tests.js @@ -5,8 +5,7 @@ var sink = new helper.Sink(2, function() { }); test('a single connection transaction', function() { - helper.pg.connect(helper.config, assert.calls(function(err, client) { - assert.isNull(err); + helper.pg.connect(helper.config, assert.success(function(client, done) { client.query('begin'); @@ -39,6 +38,7 @@ test('a single connection transaction', function() { client.query(getZed, assert.calls(function(err, result) { assert.isNull(err); assert.empty(result.rows); + done(); sink.add(); })) }) @@ -46,8 +46,7 @@ test('a single connection transaction', function() { }) test('gh#36', function() { - helper.pg.connect(helper.config, function(err, client) { - if(err) throw err; + helper.pg.connect(helper.config, assert.success(function(client, done) { client.query("BEGIN"); client.query({ name: 'X', @@ -67,6 +66,7 @@ test('gh#36', function() { })) client.query("COMMIT", function() { sink.add(); + done(); }) - }) + })); }) diff --git a/test/integration/client/type-coercion-tests.js b/test/integration/client/type-coercion-tests.js index 2c23f130..0e303a21 100644 --- a/test/integration/client/type-coercion-tests.js +++ b/test/integration/client/type-coercion-tests.js @@ -2,7 +2,7 @@ var helper = require(__dirname + '/test-helper'); var sink; var testForTypeCoercion = function(type){ - helper.pg.connect(helper.config, function(err, client) { + helper.pg.connect(helper.config, function(err, client, done) { assert.isNull(err); client.query("create temp table test_type(col " + type.name + ")", assert.calls(function(err, result) { assert.isNull(err); @@ -23,7 +23,9 @@ var testForTypeCoercion = function(type){ }); assert.emits(query, 'row', function(row) { - assert.strictEqual(row.col, val, "expected " + type.name + " of " + val + " but got " + row.col); + var expected = val + " (" + typeof val + ")"; + var returned = row.col + " (" + typeof row.col + ")"; + assert.strictEqual(row.col, val, "expected " + type.name + " of " + expected + " but got " + returned); }, "row should have been called for " + type.name + " of " + val); client.query('delete from test_type'); @@ -31,6 +33,7 @@ var testForTypeCoercion = function(type){ client.query('drop table test_type', function() { sink.add(); + done(); }); }) })); @@ -39,13 +42,21 @@ var testForTypeCoercion = function(type){ var types = [{ name: 'integer', - values: [1, -1, null] + values: [-2147483648, -1, 0, 1, 2147483647, null] },{ name: 'smallint', - values: [-1, 0, 1, null] + values: [-32768, -1, 0, 1, 32767, null] },{ name: 'bigint', - values: [-10000, 0, 10000, null] + values: [ + '-9223372036854775808', + '-9007199254740992', + '0', + '9007199254740992', + '72057594037928030', + '9223372036854775807', + null + ] },{ name: 'varchar(5)', values: ['yo', '', 'zomg!', null] @@ -56,15 +67,21 @@ var types = [{ name: 'bool', values: [true, false, null] },{ - //TODO get some actual huge numbers here name: 'numeric', - values: [-12.34, 0, 12.34, null] + values: [ + '-12.34', + '0', + '12.34', + '-3141592653589793238462643383279502.1618033988749894848204586834365638', + '3141592653589793238462643383279502.1618033988749894848204586834365638', + null + ] },{ name: 'real', - values: [101.1, 0, -101.3, null] + values: [-101.3, -1.2, 0, 1.2, 101.1, null] },{ name: 'double precision', - values: [-1.2, 0, 1.2, null] + values: [-101.3, -1.2, 0, 1.2, 101.1, null] },{ name: 'timestamptz', values: [null] @@ -82,7 +99,7 @@ var types = [{ // ignore some tests in binary mode if (helper.config.binary) { types = types.filter(function(type) { - return !(type.name in {'real':1, 'timetz':1, 'time':1}); + return !(type.name in {'real': 1, 'timetz':1, 'time':1, 'numeric': 1, 'bigint': 1}); }); } @@ -133,7 +150,7 @@ test("timestampz round trip", function() { client.on('drain', client.end.bind(client)); }); -helper.pg.connect(helper.config, assert.calls(function(err, client) { +helper.pg.connect(helper.config, assert.calls(function(err, client, done) { assert.isNull(err); client.query('select null as res;', assert.calls(function(err, res) { assert.isNull(err); @@ -143,5 +160,21 @@ helper.pg.connect(helper.config, assert.calls(function(err, client) { assert.isNull(err); assert.strictEqual(res.rows[0].res, null); sink.add(); + done(); }) })) + +if(!helper.config.binary) { + test("postgres date type", function() { + var client = helper.client(); + client.on('error', function(err) { + console.log(err); + client.end(); + }); + client.query("SELECT '2010-10-31'::date", assert.calls(function(err, result){ + assert.isNull(err); + assert.UTCDate(result.rows[0].date, 2010, 9, 31, 0, 0, 0, 0); + })); + client.on('drain', client.end.bind(client)); + }); +} diff --git a/test/integration/connection-pool/ending-pool-tests.js b/test/integration/connection-pool/ending-pool-tests.js index e46c0fc1..da057a55 100644 --- a/test/integration/connection-pool/ending-pool-tests.js +++ b/test/integration/connection-pool/ending-pool-tests.js @@ -8,12 +8,13 @@ test('disconnects', function() { helper.pg.end(); }); [helper.config, helper.config, helper.config, helper.config].forEach(function(config) { - helper.pg.connect(config, function(err, client) { + helper.pg.connect(config, function(err, client, done) { assert.isNull(err); client.query("SELECT * FROM NOW()", function(err, result) { process.nextTick(function() { assert.equal(called, false, "Should not have disconnected yet") sink.add(); + done(); }) }) }) diff --git a/test/integration/connection-pool/error-tests.js b/test/integration/connection-pool/error-tests.js index 11badf04..a09b1f11 100644 --- a/test/integration/connection-pool/error-tests.js +++ b/test/integration/connection-pool/error-tests.js @@ -1,28 +1,41 @@ var helper = require(__dirname + "/../test-helper"); var pg = require(__dirname + "/../../../lib"); -helper.pg = pg; +pg = pg; //first make pool hold 2 clients -helper.pg.defaults.poolSize = 2; +pg.defaults.poolSize = 2; -var killIdleQuery = 'SELECT procpid, (SELECT pg_terminate_backend(procpid)) AS killed FROM pg_stat_activity WHERE current_query LIKE \'\''; //get first client -helper.pg.connect(helper.config, assert.success(function(client) { +pg.connect(helper.config, assert.success(function(client, done) { client.id = 1; - helper.pg.connect(helper.config, assert.success(function(client2) { - client2.id = 2; - //subscribe to the pg error event - assert.emits(helper.pg, 'error', function(error, brokenClient) { - assert.ok(error); - assert.ok(brokenClient); - assert.equal(client.id, brokenClient.id); - helper.pg.end(); - }); - //kill the connection from client - client2.query(killIdleQuery, assert.success(function(res) { - //check to make sure client connection actually was killed - assert.lengthIs(res.rows, 1); + pg.connect(helper.config, assert.success(function(client2, done2) { + client2.id = 2; + var pidColName = 'procpid' + helper.versionGTE(client2, '9.2.0', assert.success(function(isGreater) { + console.log(isGreater) + var killIdleQuery = 'SELECT pid, (SELECT pg_terminate_backend(pid)) AS killed FROM pg_stat_activity WHERE state = $1'; + var params = ['idle']; + if(!isGreater) { + killIdleQuery = 'SELECT procpid, (SELECT pg_terminate_backend(procpid)) AS killed FROM pg_stat_activity WHERE current_query LIKE $1'; + params = ['%IDLE%'] + } + + //subscribe to the pg error event + assert.emits(pg, 'error', function(error, brokenClient) { + assert.ok(error); + assert.ok(brokenClient); + assert.equal(client.id, brokenClient.id); + }); + + //kill the connection from client + client2.query(killIdleQuery, params, assert.success(function(res) { + //check to make sure client connection actually was killed + assert.lengthIs(res.rows, 1); + //return client2 to the pool + done2(); + pg.end(); + })); + })); })); - })); })); diff --git a/test/integration/connection-pool/idle-timeout-tests.js b/test/integration/connection-pool/idle-timeout-tests.js index c6cbbd9f..34a403fa 100644 --- a/test/integration/connection-pool/idle-timeout-tests.js +++ b/test/integration/connection-pool/idle-timeout-tests.js @@ -3,10 +3,11 @@ var helper = require(__dirname + '/test-helper'); helper.pg.defaults.poolIdleTimeout = 200; test('idle timeout', function() { - helper.pg.connect(helper.config, assert.calls(function(err, client) { + helper.pg.connect(helper.config, assert.calls(function(err, client, done) { assert.isNull(err); client.query('SELECT NOW()'); //just let this one time out //test will hang if pool doesn't timeout + done(); })); }); diff --git a/test/integration/connection-pool/optional-config-tests.js b/test/integration/connection-pool/optional-config-tests.js index d3ddc509..716d3153 100644 --- a/test/integration/connection-pool/optional-config-tests.js +++ b/test/integration/connection-pool/optional-config-tests.js @@ -8,7 +8,14 @@ helper.pg.defaults.port = helper.args.port; helper.pg.defaults.database = helper.args.database; helper.pg.defaults.poolSize = 1; -helper.pg.connect(assert.calls(function(err, client) { +helper.pg.connect(assert.calls(function(err, client, done) { assert.isNull(err); - client.end(); + client.query('SELECT NOW()'); + client.once('drain', function() { + setTimeout(function() { + helper.pg.end(); + done(); + + }, 10); + }); })); diff --git a/test/integration/connection-pool/test-helper.js b/test/integration/connection-pool/test-helper.js index cc86677d..199407cd 100644 --- a/test/integration/connection-pool/test-helper.js +++ b/test/integration/connection-pool/test-helper.js @@ -9,7 +9,7 @@ helper.testPoolSize = function(max) { for(var i = 0; i < max; i++) { helper.pg.poolSize = 10; test("connection #" + i + " executes", function() { - helper.pg.connect(helper.config, function(err, client) { + helper.pg.connect(helper.config, function(err, client, done) { assert.isNull(err); client.query("select * from person", function(err, result) { assert.lengthIs(result.rows, 26) @@ -19,7 +19,8 @@ helper.testPoolSize = function(max) { }) var query = client.query("SELECT * FROM NOW()") query.on('end',function() { - sink.add() + sink.add(); + done(); }) }) }) diff --git a/test/integration/connection-pool/unique-name-tests.js b/test/integration/connection-pool/unique-name-tests.js deleted file mode 100644 index a92a0041..00000000 --- a/test/integration/connection-pool/unique-name-tests.js +++ /dev/null @@ -1,63 +0,0 @@ -var helper = require(__dirname + '/test-helper'); - -helper.pg.defaults.poolSize = 1; -helper.pg.defaults.user = helper.args.user; -helper.pg.defaults.password = helper.args.password; -helper.pg.defaults.database = helper.args.database; -helper.pg.defaults.port = helper.args.port; -helper.pg.defaults.host = helper.args.host; -helper.pg.defaults.binary = helper.args.binary; -helper.pg.defaults.poolIdleTimeout = 100; - -var moreArgs = {}; -for (c in helper.config) { - moreArgs[c] = helper.config[c]; -} -moreArgs.zomg = true; - -var badArgs = {}; -for (c in helper.config) { - badArgs[c] = helper.config[c]; -} - -badArgs.user = badArgs.user + 'laksdjfl'; -badArgs.password = badArgs.password + 'asldkfjlas'; -badArgs.zomg = true; - -test('connecting with complete config', function() { - - helper.pg.connect(helper.config, assert.calls(function(err, client) { - assert.isNull(err); - client.iGotAccessed = true; - client.query("SELECT NOW()") - })); - -}); - -test('connecting with different config object', function() { - - helper.pg.connect(moreArgs, assert.calls(function(err, client) { - assert.isNull(err); - assert.ok(client.iGotAccessed === true) - client.query("SELECT NOW()"); - })) - -}); - -test('connecting with all defaults', function() { - - helper.pg.connect(assert.calls(function(err, client) { - assert.isNull(err); - assert.ok(client.iGotAccessed === true); - client.end(); - })); - -}); - -test('connecting with invalid config', function() { - - helper.pg.connect(badArgs, assert.calls(function(err, client) { - assert.ok(err != null, "Expected connection error using invalid connection credentials"); - })); - -}); diff --git a/test/integration/connection/copy-tests.js b/test/integration/connection/copy-tests.js new file mode 100644 index 00000000..ee4a71c5 --- /dev/null +++ b/test/integration/connection/copy-tests.js @@ -0,0 +1,44 @@ +var helper = require(__dirname+"/test-helper"); +var assert = require('assert'); + +test('COPY FROM events check', function () { + helper.connect(function (con) { + var stdinStream = con.query('COPY person FROM STDIN'); + con.on('copyInResponse', function () { + con.endCopyFrom(); + }); + assert.emits(con, 'copyInResponse', + function () { + con.endCopyFrom(); + }, + "backend should emit copyInResponse after COPY FROM query" + ); + assert.emits(con, 'commandComplete', + function () { + con.end(); + }, + "backend should emit commandComplete after COPY FROM stream ends" + ) + }); +}); +test('COPY TO events check', function () { + helper.connect(function (con) { + var stdoutStream = con.query('COPY person TO STDOUT'); + assert.emits(con, 'copyOutResponse', + function () { + }, + "backend should emit copyOutResponse after COPY TO query" + ); + assert.emits(con, 'copyData', + function () { + }, + "backend should emit copyData on every data row" + ); + assert.emits(con, 'copyDone', + function () { + con.end(); + }, + "backend should emit copyDone after all data rows" + ); + }); +}); diff --git a/test/integration/gh-issues/130.js b/test/integration/gh-issues/130.js new file mode 100644 index 00000000..34670a69 --- /dev/null +++ b/test/integration/gh-issues/130.js @@ -0,0 +1,17 @@ +var helper = require(__dirname + '/../test-helper'); +var exec = require('child_process').exec; + +helper.pg.defaults.poolIdleTimeout = 1000; + +helper.pg.connect(helper.config, function(err,client) { + client.query("SELECT pg_backend_pid()", function(err, result) { + var pid = result.rows[0].pg_backend_pid; + exec('psql -c "select pg_terminate_backend('+pid+')" template1', assert.calls(function (error, stdout, stderr) { + assert.isNull(error); + })); + }); +}); + +helper.pg.on('error', function(err, client) { + //swallow errors +}); diff --git a/test/integration/gh-issues/131.js b/test/integration/gh-issues/131.js new file mode 100644 index 00000000..74f35c12 --- /dev/null +++ b/test/integration/gh-issues/131.js @@ -0,0 +1,19 @@ +var helper = require(__dirname + "/../test-helper"); +var pg = helper.pg; + +test('parsing array results', function() { + pg.connect(helper.config, assert.calls(function(err, client) { + assert.isNull(err); + client.query("CREATE TEMP TABLE why(names text[], numbors integer[], decimals double precision[])"); + client.query('INSERT INTO why(names, numbors, decimals) VALUES(\'{"aaron", "brian","a b c" }\', \'{1, 2, 3}\', \'{.1, 0.05, 3.654}\')').on('error', console.log); + test('decimals', function() { + client.query('SELECT decimals FROM why', assert.success(function(result) { + assert.lengthIs(result.rows[0].decimals, 3); + assert.equal(result.rows[0].decimals[0], 0.1); + assert.equal(result.rows[0].decimals[1], 0.05); + assert.equal(result.rows[0].decimals[2], 3.654); + pg.end(); + })) + }) + })) +}) diff --git a/test/integration/test-helper.js b/test/integration/test-helper.js index 08fea767..7905d157 100644 --- a/test/integration/test-helper.js +++ b/test/integration/test-helper.js @@ -1,10 +1,27 @@ var helper = require(__dirname + '/../test-helper'); -//TODO would this be better served set at ../test-helper? if(helper.args.native) { Client = require(__dirname + '/../../lib/native'); + helper.Client = Client; helper.pg = helper.pg.native; } + +//creates a client from cli parameters +helper.client = function() { + var client = new Client(helper.config); + client.connect(); + return client; +}; + +var semver = require('semver'); +helper.versionGTE = function(client, versionString, callback) { + client.query('SELECT version()', assert.calls(function(err, result) { + if(err) return callback(err); + var version = result.rows[0].version.split(' ')[1]; + return callback(null, semver.gte(version, versionString)); + })); +}; + //export parent helper stuffs module.exports = helper; diff --git a/test/native/callback-api-tests.js b/test/native/callback-api-tests.js index 45006682..0b713573 100644 --- a/test/native/callback-api-tests.js +++ b/test/native/callback-api-tests.js @@ -1,3 +1,4 @@ +var domain = require('domain'); var helper = require(__dirname + "/../test-helper"); var Client = require(__dirname + "/../../lib/native"); @@ -14,3 +15,17 @@ test('fires callback with results', function() { })) })); }) + +test('preserves domain', function() { + var dom = domain.create(); + + dom.run(function() { + var client = new Client(helper.config); + assert.ok(dom === require('domain').active, 'domain is active'); + client.connect() + client.query('select 1', function() { + assert.ok(dom === require('domain').active, 'domain is still active'); + client.end(); + }); + }); +}) diff --git a/test/native/connection-tests.js b/test/native/connection-tests.js index 1cb0ed88..be84be6e 100644 --- a/test/native/connection-tests.js +++ b/test/native/connection-tests.js @@ -1,5 +1,6 @@ var helper = require(__dirname + "/../test-helper"); var Client = require(__dirname + "/../../lib/native"); +var domain = require('domain'); test('connecting with wrong parameters', function() { var con = new Client("user=asldfkj hostaddr=127.0.0.1 port=5432 dbname=asldkfj"); @@ -20,3 +21,16 @@ test('connects', function() { }) }) }) + +test('preserves domain', function() { + var dom = domain.create(); + + dom.run(function() { + var con = new Client(helper.config); + assert.ok(dom === require('domain').active, 'domain is active'); + con.connect(function() { + assert.ok(dom === require('domain').active, 'domain is still active'); + con.end(); + }); + }); +}) diff --git a/test/native/copy-events-tests.js b/test/native/copy-events-tests.js new file mode 100644 index 00000000..76f7e292 --- /dev/null +++ b/test/native/copy-events-tests.js @@ -0,0 +1,40 @@ +var helper = require(__dirname+"/../test-helper"); +var Client = require(__dirname + "/../../lib/native"); +test('COPY FROM events check', function () { + var con = new Client(helper.config), + stdinStream = con.copyFrom('COPY person FROM STDIN'); + assert.emits(con, 'copyInResponse', + function () { + stdinStream.end(); + }, + "backend should emit copyInResponse after COPY FROM query" + ); + assert.emits(con, '_readyForQuery', + function () { + con.end(); + }, + "backend should emit _readyForQuery after data will be coped to stdin stream" + ); + con.connect(); +}); +test('COPY TO events check', function () { + var con = new Client(helper.config), + stdoutStream = con.copyTo('COPY person TO STDOUT'); + assert.emits(con, 'copyOutResponse', + function () {}, + "backend should emit copyOutResponse on copyOutResponse message from server" + ); + assert.emits(con, 'copyData', + function () { + }, + "backend should emit copyData on every data row" + ); + assert.emits(con, '_readyForQuery', + function () { + con.end(); + }, + "backend should emit _readyForQuery after data will be coped to stdout stream" + ); + con.connect(); +}); + diff --git a/test/native/copyto-largedata-tests.js b/test/native/copyto-largedata-tests.js new file mode 100644 index 00000000..8c87948f --- /dev/null +++ b/test/native/copyto-largedata-tests.js @@ -0,0 +1,23 @@ +var helper = require(__dirname+"/../test-helper"); +var Client = require(__dirname + "/../../lib/native"); +test("COPY TO large amount of data from postgres", function () { + //there were a bug in native implementation of COPY TO: + //if there were too much data (if we face situation + //when data is not ready while calling PQgetCopyData); + //while loop in Connection::HandleIOEvent becomes infinite + //in such way hanging node, consumes 100% cpu, and making connection unusable + var con = new Client(helper.config), + rowCount = 100000, + stdoutStream = con.copyTo('COPY (select generate_series(1, ' + rowCount + ')) TO STDOUT'); + stdoutStream.on('data', function () { + rowCount--; + }); + stdoutStream.on('end', function () { + assert.equal(rowCount, 0, "copy to should load exactly requested number of rows"); + con.query("SELECT 1", assert.calls(function (error, result) { + assert.ok(!error && result, "loading large amount of data by copy to should not break connection"); + con.end(); + })); + }); + con.connect(); +}); diff --git a/test/native/error-tests.js b/test/native/error-tests.js index 3184df57..3a932705 100644 --- a/test/native/error-tests.js +++ b/test/native/error-tests.js @@ -5,26 +5,30 @@ test('query with non-text as first parameter throws error', function() { var client = new Client(helper.config); client.connect(); assert.emits(client, 'connect', function() { - assert.throws(function() { - client.query({text:{fail: true}}); - }) client.end(); - }) -}) + assert.emits(client, 'end', function() { + assert.throws(function() { + client.query({text:{fail: true}}); + }); + }); + }); +}); test('parameterized query with non-text as first parameter throws error', function() { var client = new Client(helper.config); client.connect(); assert.emits(client, 'connect', function() { - assert.throws(function() { - client.query({ - text: {fail: true}, - values: [1, 2] - }) - }) client.end(); - }) -}) + assert.emits(client, 'end', function() { + assert.throws(function() { + client.query({ + text: {fail: true}, + values: [1, 2] + }) + }); + }); + }); +}); var connect = function(callback) { var client = new Client(helper.config); @@ -37,24 +41,28 @@ var connect = function(callback) { test('parameterized query with non-array for second value', function() { test('inline', function() { connect(function(client) { - assert.throws(function() { - client.query("SELECT *", "LKSDJF") - }) client.end(); - }) - }) + assert.emits(client, 'end', function() { + assert.throws(function() { + client.query("SELECT *", "LKSDJF") + }); + }); + }); + }); test('config', function() { connect(function(client) { - assert.throws(function() { - client.query({ - text: "SELECT *", - values: "ALSDKFJ" - }) - }) client.end(); - }) - }) -}) + assert.emits(client, 'end', function() { + assert.throws(function() { + client.query({ + text: "SELECT *", + values: "ALSDKFJ" + }); + }); + }); + }); + }); +}); diff --git a/test/test-helper.js b/test/test-helper.js index 55a0a5c5..8d854b81 100644 --- a/test/test-helper.js +++ b/test/test-helper.js @@ -6,7 +6,6 @@ var sys = require('util'); var BufferList = require(__dirname+'/buffer-list') var Connection = require(__dirname + '/../lib/connection'); -var args = require(__dirname + '/cli'); Client = require(__dirname + '/../lib').Client; @@ -29,10 +28,10 @@ assert.same = function(actual, expected) { assert.emits = function(item, eventName, callback, message) { var called = false; var id = setTimeout(function() { - test("Should have called " + eventName, function() { + test("Should have called '" + eventName + "' event", function() { assert.ok(called, message || "Expected '" + eventName + "' to be called.") }); - },2000); + },5000); item.once(eventName, function() { if (eventName === 'error') { @@ -97,13 +96,25 @@ assert.empty = function(actual) { }; assert.success = function(callback) { - return assert.calls(function(err, arg) { - if(err) { - console.log(err); - } - assert.isNull(err); - callback(arg); - }) + if(callback.length === 1 || callback.length === 0) { + return assert.calls(function(err, arg) { + if(err) { + console.log(err); + } + assert.isNull(err); + callback(arg); + }); + } else if (callback.length === 2) { + return assert.calls(function(err, arg1, arg2) { + if(err) { + console.log(err); + } + assert.isNull(err); + callback(arg1, arg2); + }); + } else { + throw new Error('need to preserve arrity of wrapped function'); + } } assert.throws = function(offender) { @@ -124,15 +135,28 @@ var expect = function(callback, timeout) { var executed = false; var id = setTimeout(function() { assert.ok(executed, "Expected execution of function to be fired"); - }, timeout || 2000) + }, timeout || 5000) - return function(err, queryResult) { - clearTimeout(id); - if (err) { - assert.ok(err instanceof Error, "Expected errors to be instances of Error: " + sys.inspect(err)); + if(callback.length < 3) { + return function(err, queryResult) { + clearTimeout(id); + if (err) { + assert.ok(err instanceof Error, "Expected errors to be instances of Error: " + sys.inspect(err)); + } + callback.apply(this, arguments) } - callback.apply(this, arguments) + } else if(callback.length == 3) { + return function(err, arg1, arg2) { + clearTimeout(id); + if (err) { + assert.ok(err instanceof Error, "Expected errors to be instances of Error: " + sys.inspect(err)); + } + callback.apply(this, arguments) + } + } else { + throw new Error("Unsupported arrity " + callback.length); } + } assert.calls = expect; @@ -143,47 +167,24 @@ assert.isNull = function(item, message) { test = function(name, action) { test.testCount ++; - if(args.verbose) { - console.log(name); - } - var result = action(); + test[name] = action; + var result = test[name](); if(result === false) { - test.ignored.push(name); - if(!args.verbose) { - process.stdout.write('?'); - } + process.stdout.write('?'); }else{ - if(!args.verbose) { - process.stdout.write('.'); - } + process.stdout.write('.'); } }; //print out the filename process.stdout.write(require('path').basename(process.argv[1])); -//print a new line since we'll be printing test names -if(args.verbose) { - console.log(); -} -test.testCount = test.testCount || 0; -test.ignored = test.ignored || []; -test.errors = test.errors || []; +var args = require(__dirname + '/cli'); +if(args.binary) process.stdout.write(' (binary)'); +if(args.native) process.stdout.write(' (native)'); process.on('exit', function() { - console.log(''); - if(test.ignored.length || test.errors.length) { - test.ignored.forEach(function(name) { - console.log("Ignored: " + name); - }); - test.errors.forEach(function(error) { - console.log("Error: " + error.name); - }); - console.log(''); - } - test.errors.forEach(function(error) { - throw error.e; - }); -}); + console.log('') +}) process.on('uncaughtException', function(err) { console.error("\n %s", err.stack || err.toString()) @@ -194,7 +195,7 @@ process.on('uncaughtException', function(err) { var count = 0; var Sink = function(expected, timeout, callback) { - var defaultTimeout = 1000; + var defaultTimeout = 5000; if(typeof timeout == 'function') { callback = timeout; timeout = defaultTimeout; @@ -221,10 +222,11 @@ var Sink = function(expected, timeout, callback) { } } + module.exports = { - args: args, Sink: Sink, pg: require(__dirname + '/../lib/'), + args: args, config: args, sys: sys, Client: Client diff --git a/test/unit/client/configuration-tests.js b/test/unit/client/configuration-tests.js index cb60119b..219ad5cd 100644 --- a/test/unit/client/configuration-tests.js +++ b/test/unit/client/configuration-tests.js @@ -4,8 +4,8 @@ test('client settings', function() { test('defaults', function() { var client = new Client(); - assert.equal(client.user, process.env.USER); - assert.equal(client.database, process.env.USER); + assert.equal(client.user, process.env['PGUSER'] || process.env.USER); + assert.equal(client.database, process.env['PGDATABASE'] || process.env.USER); assert.equal(client.port, 5432); }); @@ -31,7 +31,7 @@ test('client settings', function() { test('initializing from a config string', function() { test('uses the correct values from the config string', function() { - var client = new Client("pg://brian:pass@host1:333/databasename") + var client = new Client("postgres://brian:pass@host1:333/databasename") assert.equal(client.user, 'brian') assert.equal(client.password, "pass") assert.equal(client.host, "host1") @@ -39,13 +39,22 @@ test('initializing from a config string', function() { assert.equal(client.database, "databasename") }) - test('when not including all values the defaults are used', function() { - var client = new Client("pg://host1") - assert.equal(client.user, process.env.USER) - assert.equal(client.password, null) + test('uses the correct values from the config string with space in password', function() { + var client = new Client("postgres://brian:pass word@host1:333/databasename") + assert.equal(client.user, 'brian') + assert.equal(client.password, "pass word") assert.equal(client.host, "host1") - assert.equal(client.port, 5432) - assert.equal(client.database, process.env.USER) + assert.equal(client.port, 333) + assert.equal(client.database, "databasename") + }) + + test('when not including all values the defaults are used', function() { + var client = new Client("postgres://host1") + assert.equal(client.user, process.env['PGUSER'] || process.env.USER) + assert.equal(client.password, process.env['PGPASSWORD'] || null) + assert.equal(client.host, "host1") + assert.equal(client.port, process.env['PGPORT'] || 5432) + assert.equal(client.database, process.env['PGDATABASE'] || process.env.USER) }) diff --git a/test/unit/client/escape-tests.js b/test/unit/client/escape-tests.js new file mode 100644 index 00000000..40214e03 --- /dev/null +++ b/test/unit/client/escape-tests.js @@ -0,0 +1,153 @@ +var helper = require(__dirname + '/test-helper'); + +function createClient(callback) { + var client = new Client(helper.config); + client.connect(function(err) { + return callback(client); + }); +} + +test('escapeLiteral: no special characters', function() { + createClient(function(client) { + var expected = "'hello world'"; + var actual = client.escapeLiteral('hello world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains double quotes only', function() { + createClient(function(client) { + var expected = "'hello \" world'"; + var actual = client.escapeLiteral('hello " world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains single quotes only', function() { + createClient(function(client) { + var expected = "'hello \'\' world'"; + var actual = client.escapeLiteral('hello \' world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains backslashes only', function() { + createClient(function(client) { + var expected = " E'hello \\\\ world'"; + var actual = client.escapeLiteral('hello \\ world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains single quotes and double quotes', function() { + createClient(function(client) { + var expected = "'hello '' \" world'"; + var actual = client.escapeLiteral('hello \' " world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains double quotes and backslashes', function() { + createClient(function(client) { + var expected = " E'hello \\\\ \" world'"; + var actual = client.escapeLiteral('hello \\ " world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains single quotes and backslashes', function() { + createClient(function(client) { + var expected = " E'hello \\\\ '' world'"; + var actual = client.escapeLiteral('hello \\ \' world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeLiteral: contains single quotes, double quotes, and backslashes', function() { + createClient(function(client) { + var expected = " E'hello \\\\ '' \" world'"; + var actual = client.escapeLiteral('hello \\ \' " world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: no special characters', function() { + createClient(function(client) { + var expected = '"hello world"'; + var actual = client.escapeIdentifier('hello world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: contains double quotes only', function() { + createClient(function(client) { + var expected = '"hello "" world"'; + var actual = client.escapeIdentifier('hello " world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: contains single quotes only', function() { + createClient(function(client) { + var expected = '"hello \' world"'; + var actual = client.escapeIdentifier('hello \' world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: contains backslashes only', function() { + createClient(function(client) { + var expected = '"hello \\ world"'; + var actual = client.escapeIdentifier('hello \\ world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: contains single quotes and double quotes', function() { + createClient(function(client) { + var expected = '"hello \' "" world"'; + var actual = client.escapeIdentifier('hello \' " world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: contains double quotes and backslashes', function() { + return createClient(function(client) { + var expected = '"hello \\ "" world"'; + var actual = client.escapeIdentifier('hello \\ " world'); + assert.equal(expected, actual); + client.end(); + return; + }); +}); + +test('escapeIdentifier: contains single quotes and backslashes', function() { + createClient(function(client) { + var expected = '"hello \\ \' world"'; + var actual = client.escapeIdentifier('hello \\ \' world'); + assert.equal(expected, actual); + client.end(); + }); +}); + +test('escapeIdentifier: contains single quotes, double quotes, and backslashes', function() { + createClient(function(client) { + var expected = '"hello \\ \' "" world"'; + var actual = client.escapeIdentifier('hello \\ \' " world'); + assert.equal(expected, actual); + client.end(); + }); +}); diff --git a/test/unit/client/query-queue-tests.js b/test/unit/client/query-queue-tests.js index cd87cfe9..62b38bd5 100644 --- a/test/unit/client/query-queue-tests.js +++ b/test/unit/client/query-queue-tests.js @@ -50,63 +50,3 @@ test('drain', function() { }); }); }); - -test('with drain paused', function() { - //mock out a fake connection - var con = new Connection({stream: "NO"}); - con.connect = function() { - con.emit('connect'); - }; - con.query = function() { - }; - - var client = new Client({connection:con}); - - client.connect(); - - var drainCount = 0; - client.on('drain', function() { - drainCount++; - }); - - test('normally unpaused', function() { - con.emit('readyForQuery'); - client.query('boom'); - assert.emits(client, 'drain', function() { - assert.equal(drainCount, 1); - }); - con.emit('readyForQuery'); - }); - - test('pausing', function() { - test('unpaused with no queries in between', function() { - client.pauseDrain(); - client.resumeDrain(); - assert.equal(drainCount, 1); - }); - - test('paused', function() { - test('resumeDrain after empty', function() { - client.pauseDrain(); - client.query('asdf'); - con.emit('readyForQuery'); - assert.equal(drainCount, 1); - client.resumeDrain(); - assert.equal(drainCount, 2); - }); - - test('resumDrain while still pending', function() { - client.pauseDrain(); - client.query('asdf'); - client.query('asdf1'); - con.emit('readyForQuery'); - client.resumeDrain(); - assert.equal(drainCount, 2); - con.emit('readyForQuery'); - assert.equal(drainCount, 3); - }); - - }); - }); - -}); diff --git a/test/unit/client/query-tests.js b/test/unit/client/query-tests.js index ad8865cd..ab367203 100644 --- a/test/unit/client/query-tests.js +++ b/test/unit/client/query-tests.js @@ -4,7 +4,7 @@ q.dateParser = require(__dirname + "/../../../lib/types").getTypeParser(1114, 't q.stringArrayParser = require(__dirname + "/../../../lib/types").getTypeParser(1009, 'text'); test("testing dateParser", function() { - assert.equal(q.dateParser("2010-12-11 09:09:04").toUTCString(),new Date("2010-12-11 09:09:04 GMT").toUTCString()); + assert.equal(q.dateParser("2010-12-11 09:09:04").toString(),new Date("2010-12-11 09:09:04").toString()); }); var testForMs = function(part, expected) { @@ -19,19 +19,19 @@ testForMs('.1', 100); testForMs('.01', 10); testForMs('.74', 740); -test("testing 2dateParser", function() { +test("testing 2dateParser on dates without timezones", function() { var actual = "2010-12-11 09:09:04.1"; - var expected = "\"2010-12-11T09:09:04.100Z\""; + var expected = JSON.stringify(new Date(2010,11,11,9,9,4,100)) assert.equal(JSON.stringify(q.dateParser(actual)),expected); }); -test("testing 2dateParser", function() { +test("testing 2dateParser on dates with timezones", function() { var actual = "2011-01-23 22:15:51.28-06"; var expected = "\"2011-01-24T04:15:51.280Z\""; assert.equal(JSON.stringify(q.dateParser(actual)),expected); }); -test("testing 2dateParser", function() { +test("testing 2dateParser on dates with huge millisecond value", function() { var actual = "2011-01-23 22:15:51.280843-06"; var expected = "\"2011-01-24T04:15:51.280Z\""; assert.equal(JSON.stringify(q.dateParser(actual)),expected); diff --git a/test/unit/client/simple-query-tests.js b/test/unit/client/simple-query-tests.js index af5a6308..19f05c18 100644 --- a/test/unit/client/simple-query-tests.js +++ b/test/unit/client/simple-query-tests.js @@ -82,7 +82,7 @@ test('executing query', function() { name: 'boom' }] }); - assert.ok(handled, "should have handlded rowDescritpion"); + assert.ok(handled, "should have handlded rowDescription"); }); test('handles dataRow messages', function() { @@ -116,7 +116,7 @@ test('executing query', function() { }); con.emit("readyForQuery"); //this would never actually happen - ['dataRow','rowDescritpion', 'commandComplete'].forEach(function(msg) { + ['dataRow','rowDescription', 'commandComplete'].forEach(function(msg) { assert.equal(con.emit(msg), false, "Should no longer be picking up '"+ msg +"' messages"); }); }); diff --git a/test/unit/client/stream-and-query-error-interaction-tests.js b/test/unit/client/stream-and-query-error-interaction-tests.js new file mode 100644 index 00000000..9b02caf8 --- /dev/null +++ b/test/unit/client/stream-and-query-error-interaction-tests.js @@ -0,0 +1,26 @@ +var helper = require(__dirname + '/test-helper'); +var Connection = require(__dirname + '/../../../lib/connection'); +var Client = require(__dirname + '/../../../lib/client'); + +test('emits end when not in query', function() { + var stream = new (require('events').EventEmitter)(); + stream.write = function() { + //NOOP + } + var client = new Client({connection: new Connection({stream: stream})}); + client.connect(assert.calls(function() { + client.query('SELECT NOW()', assert.calls(function(err, result) { + assert(err); + })); + })); + assert.emits(client, 'end'); + client.connection.emit('connect'); + process.nextTick(function() { + client.connection.emit('readyForQuery'); + assert.equal(client.queryQueue.length, 0); + assert(client.activeQuery, 'client should have issued query'); + process.nextTick(function() { + stream.emit('end'); + }); + }); +}); diff --git a/test/unit/client/typed-query-results-tests.js b/test/unit/client/typed-query-results-tests.js index baa86bd4..2f2f14f9 100644 --- a/test/unit/client/typed-query-results-tests.js +++ b/test/unit/client/typed-query-results-tests.js @@ -1,5 +1,5 @@ var helper = require(__dirname + '/test-helper'); -//http://www.postgresql.org/docs/8.4/static/datatype.html +//http://www.postgresql.org/docs/9.2/static/datatype.html test('typed results', function() { var client = helper.client(); var con = client.connection; @@ -18,20 +18,20 @@ test('typed results', function() { name: 'integer/int4', format: 'text', dataTypeID: 23, - actual: '100', - expected: 100 + actual: '2147483647', + expected: 2147483647 },{ name: 'smallint/int2', format: 'text', dataTypeID: 21, - actual: '101', - expected: 101 + actual: '32767', + expected: 32767 },{ name: 'bigint/int8', format: 'text', dataTypeID: 20, - actual: '102', - expected: 102 + actual: '9223372036854775807', + expected: '9223372036854775807' },{ name: 'oid', format: 'text', @@ -42,8 +42,8 @@ test('typed results', function() { name: 'numeric', format: 'text', dataTypeID: 1700, - actual: '12.34', - expected: 12.34 + actual: '31415926535897932384626433832795028841971693993751058.16180339887498948482045868343656381177203091798057628', + expected: '31415926535897932384626433832795028841971693993751058.16180339887498948482045868343656381177203091798057628' },{ name: 'real/float4', dataTypeID: 700, @@ -54,8 +54,8 @@ test('typed results', function() { name: 'double precision / float8', format: 'text', dataTypeID: 701, - actual: '1.2', - expected: 1.2 + actual: '12345678.12345678', + expected: 12345678.12345678 },{ name: 'boolean true', format: 'text', @@ -78,11 +78,11 @@ test('typed results', function() { name: 'timestamptz with minutes in timezone', format: 'text', dataTypeID: 1184, - actual: '2010-10-31 14:54:13.74-0530', + actual: '2010-10-31 14:54:13.74-05:30', expected: function(val) { assert.UTCDate(val, 2010, 9, 31, 20, 24, 13, 740); } - },{ + }, { name: 'timestamptz with other milisecond digits dropped', format: 'text', dataTypeID: 1184, @@ -111,6 +111,15 @@ test('typed results', function() { format: 'text', dataTypeID: 1114, actual: '2010-10-31 00:00:00', + expected: function(val) { + assert.equal(val.toUTCString(), new Date(2010, 9, 31, 0, 0, 0, 0, 0).toUTCString()); + assert.equal(val.toString(), new Date(2010, 9, 31, 0, 0, 0, 0, 0, 0).toString()); + } + },{ + name: 'date', + format: 'text', + dataTypeID: 1082, + actual: '2010-10-31', expected: function(val) { assert.UTCDate(val, 2010, 9, 31, 0, 0, 0, 0); } @@ -156,6 +165,39 @@ test('typed results', function() { } }, + { + name : 'array/char', + format : 'text', + dataTypeID: 1014, + actual: '{asdf,asdf}', + expected : function(val){ + assert.deepEqual(val, ['asdf','asdf']); + } + },{ + name : 'array/varchar', + format : 'text', + dataTypeID: 1015, + actual: '{asdf,asdf}', + expected :function(val){ + assert.deepEqual(val, ['asdf','asdf']); + } + },{ + name : 'array/text', + format : 'text', + dataTypeID: 1008, + actual: '{"hello world"}', + expected :function(val){ + assert.deepEqual(val, ['hello world']); + } + },{ + name : 'array/numeric', + format : 'text', + dataTypeID: 1231, + actual: '{1.2,3.4}', + expected :function(val){ + assert.deepEqual(val, [1.2,3.4]); + } + }, { name: 'binary-string/varchar', @@ -176,18 +218,18 @@ test('typed results', function() { actual: [0, 101], expected: 101 },{ - name: 'binary-bigint/int8', - format: 'binary', - dataTypeID: 20, - actual: [0, 0, 0, 0, 0, 0, 0, 102], - expected: 102 - },{ - name: 'binary-bigint/int8-full', - format: 'binary', - dataTypeID: 20, - actual: [1, 0, 0, 0, 0, 0, 0, 102], - expected: 72057594037928030 - },{ +// name: 'binary-bigint/int8', +// format: 'binary', +// dataTypeID: 20, +// actual: [0, 0, 0, 0, 0, 0, 0, 102], +// expected: '102' +// },{ +// name: 'binary-bigint/int8-full', +// format: 'binary', +// dataTypeID: 20, +// actual: [1, 0, 0, 0, 0, 0, 0, 102], +// expected: '72057594037928038' +// },{ name: 'binary-oid', format: 'binary', dataTypeID: 26, diff --git a/test/unit/connection-parameters/creation-tests.js b/test/unit/connection-parameters/creation-tests.js new file mode 100644 index 00000000..f0d5228d --- /dev/null +++ b/test/unit/connection-parameters/creation-tests.js @@ -0,0 +1,161 @@ +var helper = require(__dirname + '/../test-helper'); +var assert = require('assert'); +var ConnectionParameters = require(__dirname + '/../../../lib/connection-parameters'); +var defaults = require(__dirname + '/../../../lib').defaults; + +//clear process.env +for(var key in process.env) { + delete process.env[key]; +} + +test('ConnectionParameters construction', function() { + assert.ok(new ConnectionParameters(), 'with null config'); + assert.ok(new ConnectionParameters({user: 'asdf'}), 'with config object'); + assert.ok(new ConnectionParameters('postgres://localhost/postgres'), 'with connection string'); +}); + +var compare = function(actual, expected, type) { + assert.equal(actual.user, expected.user, type + ' user'); + assert.equal(actual.database, expected.database, type + ' database'); + assert.equal(actual.port, expected.port, type + ' port'); + assert.equal(actual.host, expected.host, type + ' host'); + assert.equal(actual.password, expected.password, type + ' password'); + assert.equal(actual.binary, expected.binary, type + ' binary'); +}; + +test('ConnectionParameters initializing from defaults', function() { + var subject = new ConnectionParameters(); + compare(subject, defaults, 'defaults'); + assert.ok(subject.isDomainSocket === false); +}); + +test('ConnectionParameters initializing from config', function() { + var config = { + user: 'brian', + database: 'home', + port: 7777, + password: 'pizza', + binary: true, + encoding: 'utf8', + host: 'yo', + ssl: { + asdf: 'blah' + } + }; + var subject = new ConnectionParameters(config); + compare(subject, config, 'config'); + assert.ok(subject.isDomainSocket === false); +}); + +test('initializing with unix domain socket', function() { + var subject = new ConnectionParameters('/var/run/'); + assert.ok(subject.isDomainSocket); + assert.equal(subject.host, '/var/run/'); +}); + +test('libpq connection string building', function() { + var checkForPart = function(array, part) { + assert.ok(array.indexOf(part) > -1, array.join(" ") + " did not contain " + part); + } + + test('builds simple string', function() { + var config = { + user: 'brian', + password: 'xyz', + port: 888, + host: 'localhost', + database: 'bam' + } + var subject = new ConnectionParameters(config); + subject.getLibpqConnectionString(assert.calls(function(err, constring) { + assert.isNull(err); + var parts = constring.split(" "); + checkForPart(parts, "user='brian'"); + checkForPart(parts, "password='xyz'"); + checkForPart(parts, "port='888'"); + checkForPart(parts, "hostaddr=127.0.0.1"); + checkForPart(parts, "dbname='bam'"); + })); + }); + + test('builds dns string', function() { + var config = { + user: 'brian', + password: 'asdf', + port: 5432, + host: 'localhost' + }; + var subject = new ConnectionParameters(config); + subject.getLibpqConnectionString(assert.calls(function(err, constring) { + assert.isNull(err); + var parts = constring.split(" "); + checkForPart(parts, "user='brian'"); + checkForPart(parts, "hostaddr=127.0.0.1"); + })); + }); + + test('error when dns fails', function() { + var config = { + user: 'brian', + password: 'asf', + port: 5432, + host: 'asdlfkjasldfkksfd#!$!!!!..com' + }; + var subject = new ConnectionParameters(config); + subject.getLibpqConnectionString(assert.calls(function(err, constring) { + assert.ok(err); + assert.isNull(constring) + })); + }); + + test('connecting to unix domain socket', function() { + var config = { + user: 'brian', + password: 'asf', + port: 5432, + host: '/tmp/' + }; + var subject = new ConnectionParameters(config); + subject.getLibpqConnectionString(assert.calls(function(err, constring) { + assert.isNull(err); + var parts = constring.split(" "); + checkForPart(parts, "user='brian'"); + checkForPart(parts, "host=/tmp/"); + })); + }); + + test("encoding can be specified by config", function() { + var config = { + client_encoding: "utf-8" + } + var subject = new ConnectionParameters(config); + subject.getLibpqConnectionString(assert.calls(function(err, constring) { + assert.isNull(err); + var parts = constring.split(" "); + checkForPart(parts, "client_encoding='utf-8'"); + })); + }) + + test('password contains < and/or > characters', function () { + return false; + var sourceConfig = { + user:'brian', + password: 'helloe', + port: 5432, + host: 'localhost', + database: 'postgres' + } + var connectionString = 'postgres://' + sourceConfig.user + ':' + sourceConfig.password + '@' + sourceConfig.host + ':' + sourceConfig.port + '/' + sourceConfig.database; + var subject = new ConnectionParameters(connectionString); + assert.equal(subject.password, sourceConfig.password); + }); + + test('password contains weird characters', function() { + var strang = 'postgres://my first name:is&%awesome!@localhost:9000'; + var subject = new ConnectionParameters(strang); + assert.equal(subject.user, 'my first name'); + assert.equal(subject.password, 'is&%awesome!'); + assert.equal(subject.host, 'localhost'); + }); + +}); diff --git a/test/unit/connection-parameters/environment-variable-tests.js b/test/unit/connection-parameters/environment-variable-tests.js new file mode 100644 index 00000000..a5fa5d68 --- /dev/null +++ b/test/unit/connection-parameters/environment-variable-tests.js @@ -0,0 +1,82 @@ +var helper = require(__dirname + '/../test-helper'); +var assert = require('assert'); +var ConnectionParameters = require(__dirname + '/../../../lib/connection-parameters'); +var defaults = require(__dirname + '/../../../lib').defaults; + +//clear process.env +var realEnv = {}; +for(var key in process.env) { + realEnv[key] = process.env[key]; + delete process.env[key]; +} + +test('ConnectionParameters initialized from environment variables', function(t) { + process.env['PGHOST'] = 'local'; + process.env['PGUSER'] = 'bmc2'; + process.env['PGPORT'] = 7890; + process.env['PGDATABASE'] = 'allyerbase'; + process.env['PGPASSWORD'] = 'open'; + + var subject = new ConnectionParameters(); + assert.equal(subject.host, 'local', 'env host'); + assert.equal(subject.user, 'bmc2', 'env user'); + assert.equal(subject.port, 7890, 'env port'); + assert.equal(subject.database, 'allyerbase', 'env database'); + assert.equal(subject.password, 'open', 'env password'); +}); + +test('ConnectionParameters initialized from mix', function(t) { + delete process.env['PGPASSWORD']; + delete process.env['PGDATABASE']; + var subject = new ConnectionParameters({ + user: 'testing', + database: 'zugzug' + }); + assert.equal(subject.host, 'local', 'env host'); + assert.equal(subject.user, 'testing', 'config user'); + assert.equal(subject.port, 7890, 'env port'); + assert.equal(subject.database, 'zugzug', 'config database'); + assert.equal(subject.password, defaults.password, 'defaults password'); +}); + +//clear process.env +for(var key in process.env) { + delete process.env[key]; +} + +test('connection string parsing', function(t) { + var string = 'postgres://brian:pw@boom:381/lala'; + var subject = new ConnectionParameters(string); + assert.equal(subject.host, 'boom', 'string host'); + assert.equal(subject.user, 'brian', 'string user'); + assert.equal(subject.password, 'pw', 'string password'); + assert.equal(subject.port, 381, 'string port'); + assert.equal(subject.database, 'lala', 'string database'); +}); + +test('connection string parsing - ssl', function(t) { + var string = 'postgres://brian:pw@boom:381/lala?ssl=true'; + var subject = new ConnectionParameters(string); + assert.equal(subject.ssl, true, 'ssl'); + + string = 'postgres://brian:pw@boom:381/lala?ssl=1'; + subject = new ConnectionParameters(string); + assert.equal(subject.ssl, true, 'ssl'); + + string = 'postgres://brian:pw@boom:381/lala?other&ssl=true'; + subject = new ConnectionParameters(string); + assert.equal(subject.ssl, true, 'ssl'); + + string = 'postgres://brian:pw@boom:381/lala?ssl=0'; + subject = new ConnectionParameters(string); + assert.equal(!!subject.ssl, false, 'ssl'); + + string = 'postgres://brian:pw@boom:381/lala'; + subject = new ConnectionParameters(string); + assert.equal(!!subject.ssl, false, 'ssl'); +}); + +//restore process.env +for(var key in realEnv) { + process.env[key] = realEnv[key]; +} diff --git a/test/unit/connection/error-tests.js b/test/unit/connection/error-tests.js index bccffac4..98eb20a8 100644 --- a/test/unit/connection/error-tests.js +++ b/test/unit/connection/error-tests.js @@ -1,10 +1,30 @@ var helper = require(__dirname + '/test-helper'); var Connection = require(__dirname + '/../../../lib/connection'); -var con = new Connection({stream: new MemoryStream()}); test("connection emits stream errors", function() { + var con = new Connection({stream: new MemoryStream()}); assert.emits(con, 'error', function(err) { assert.equal(err.message, "OMG!"); }); con.connect(); con.stream.emit('error', new Error("OMG!")); }); + +test('connection emits ECONNRESET errors during normal operation', function() { + var con = new Connection({stream: new MemoryStream()}); + con.connect(); + assert.emits(con, 'error', function(err) { + assert.equal(err.code, 'ECONNRESET'); + }); + var e = new Error('Connection Reset'); + e.code = 'ECONNRESET'; + con.stream.emit('error', e); +}); + +test('connection does not emit ECONNRESET errors during disconnect', function() { + var con = new Connection({stream: new MemoryStream()}); + con.connect(); + var e = new Error('Connection Reset'); + e.code = 'ECONNRESET'; + con.end(); + con.stream.emit('error', e); +}); diff --git a/test/unit/connection/inbound-parser-tests.js b/test/unit/connection/inbound-parser-tests.js index 13e6fd9e..55d71d57 100644 --- a/test/unit/connection/inbound-parser-tests.js +++ b/test/unit/connection/inbound-parser-tests.js @@ -1,4 +1,5 @@ require(__dirname+'/test-helper'); +return false; var Connection = require(__dirname + '/../../../lib/connection'); var buffers = require(__dirname + '/../../test-buffers'); var PARSE = function(buffer) { diff --git a/test/unit/connection/outbound-sending-tests.js b/test/unit/connection/outbound-sending-tests.js index d711a9a1..2d5c0a04 100644 --- a/test/unit/connection/outbound-sending-tests.js +++ b/test/unit/connection/outbound-sending-tests.js @@ -23,6 +23,8 @@ test("sends startup message", function() { .addCString('brian') .addCString('database') .addCString('bang') + .addCString('client_encoding') + .addCString("'utf-8'") .addCString('').join(true)) }); @@ -94,7 +96,7 @@ test('bind messages', function() { con.bind({ portal: 'bang', statement: 'woo', - values: [1, 'hi', null, 'zing'] + values: ['1', 'hi', null, 'zing'] }); var expectedBuffer = new BufferList() .addCString('bang') //portal name diff --git a/test/unit/copystream/copyfrom-tests.js b/test/unit/copystream/copyfrom-tests.js new file mode 100644 index 00000000..7b96049e --- /dev/null +++ b/test/unit/copystream/copyfrom-tests.js @@ -0,0 +1,99 @@ +var helper = require(__dirname + '/../test-helper'); +var CopyFromStream = require(__dirname + '/../../../lib/copystream').CopyFromStream; +var ConnectionImitation = function () { + this.send = 0; + this.hasToBeSend = 0; + this.finished = 0; +}; +ConnectionImitation.prototype = { + endCopyFrom: function () { + assert.ok(this.finished++ === 0, "end shoud be called only once"); + assert.equal(this.send, this.hasToBeSend, "at the moment of the end all data has to be sent"); + }, + sendCopyFromChunk: function (chunk) { + this.send += chunk.length; + return true; + }, + updateHasToBeSend: function (chunk) { + this.hasToBeSend += chunk.length; + return chunk; + } +}; +var buf1 = new Buffer("asdfasd"), + buf2 = new Buffer("q03r90arf0aospd;"), + buf3 = new Buffer(542), + buf4 = new Buffer("93jfemialfjkasjlfas"); + +test('CopyFromStream, start streaming before data, end after data. no drain event', function () { + var stream = new CopyFromStream(); + var conn = new ConnectionImitation(); + stream.on('drain', function () { + assert.ok(false, "there has not be drain event"); + }); + stream.startStreamingToConnection(conn); + assert.ok(stream.write(conn.updateHasToBeSend(buf1))); + assert.ok(stream.write(conn.updateHasToBeSend(buf2))); + assert.ok(stream.write(conn.updateHasToBeSend(buf3))); + assert.ok(stream.writable, "stream has to be writable"); + stream.end(conn.updateHasToBeSend(buf4)); + assert.ok(!stream.writable, "stream has not to be writable"); + stream.end(); + assert.equal(conn.hasToBeSend, conn.send); +}); +test('CopyFromStream, start streaming after end, end after data. drain event', function () { + var stream = new CopyFromStream(); + assert.emits(stream, 'drain', function() {}, 'drain have to be emitted'); + var conn = new ConnectionImitation() + assert.ok(!stream.write(conn.updateHasToBeSend(buf1))); + assert.ok(!stream.write(conn.updateHasToBeSend(buf2))); + assert.ok(!stream.write(conn.updateHasToBeSend(buf3))); + assert.ok(stream.writable, "stream has to be writable"); + stream.end(conn.updateHasToBeSend(buf4)); + assert.ok(!stream.writable, "stream has not to be writable"); + stream.end(); + stream.startStreamingToConnection(conn); + assert.equal(conn.hasToBeSend, conn.send); +}); +test('CopyFromStream, start streaming between data chunks. end after data. drain event', function () { + var stream = new CopyFromStream(); + var conn = new ConnectionImitation() + assert.emits(stream, 'drain', function() {}, 'drain have to be emitted'); + stream.write(conn.updateHasToBeSend(buf1)); + stream.write(conn.updateHasToBeSend(buf2)); + stream.startStreamingToConnection(conn); + stream.write(conn.updateHasToBeSend(buf3)); + assert.ok(stream.writable, "stream has to be writable"); + stream.end(conn.updateHasToBeSend(buf4)); + assert.equal(conn.hasToBeSend, conn.send); + assert.ok(!stream.writable, "stream has not to be writable"); + stream.end(); +}); +test('CopyFromStream, start sreaming before end. end stream with data. drain event', function () { + var stream = new CopyFromStream(); + var conn = new ConnectionImitation() + assert.emits(stream, 'drain', function() {}, 'drain have to be emitted'); + stream.write(conn.updateHasToBeSend(buf1)); + stream.write(conn.updateHasToBeSend(buf2)); + stream.write(conn.updateHasToBeSend(buf3)); + stream.startStreamingToConnection(conn); + assert.ok(stream.writable, "stream has to be writable"); + stream.end(conn.updateHasToBeSend(buf4)); + assert.equal(conn.hasToBeSend, conn.send); + assert.ok(!stream.writable, "stream has not to be writable"); + stream.end(); +}); +test('CopyFromStream, start streaming after end. end with data. drain event', function(){ + var stream = new CopyFromStream(); + var conn = new ConnectionImitation() + assert.emits(stream, 'drain', function() {}, 'drain have to be emitted'); + stream.write(conn.updateHasToBeSend(buf1)); + stream.write(conn.updateHasToBeSend(buf2)); + stream.write(conn.updateHasToBeSend(buf3)); + stream.startStreamingToConnection(conn); + assert.ok(stream.writable, "stream has to be writable"); + stream.end(conn.updateHasToBeSend(buf4)); + stream.startStreamingToConnection(conn); + assert.equal(conn.hasToBeSend, conn.send); + assert.ok(!stream.writable, "stream has not to be writable"); + stream.end(); +}); diff --git a/test/unit/copystream/copyto-tests.js b/test/unit/copystream/copyto-tests.js new file mode 100644 index 00000000..7a6255b7 --- /dev/null +++ b/test/unit/copystream/copyto-tests.js @@ -0,0 +1,122 @@ +var helper = require(__dirname + '/../test-helper'); +var CopyToStream = require(__dirname + '/../../../lib/copystream').CopyToStream; +var DataCounter = function () { + this.sendBytes = 0; + this.recievedBytes = 0; +}; +DataCounter.prototype = { + send: function (buf) { + this.sendBytes += buf.length; + return buf; + }, + recieve: function (chunk) { + this.recievedBytes += chunk.length; + }, + assert: function () { + assert.equal(this.sendBytes, this.recievedBytes, "data bytes send and recieved has to match"); + } +}; +var buf1 = new Buffer("asdfasd"), + buf2 = new Buffer("q03r90arf0aospd;"), + buf3 = new Buffer(542), + buf4 = new Buffer("93jfemialfjkasjlfas"); +test('CopyToStream simple', function () { + var stream = new CopyToStream(), + dc = new DataCounter(); + assert.emits(stream, 'end', function () {}, ''); + stream.on('data', dc.recieve.bind(dc)); + stream.handleChunk(dc.send(buf1)); + stream.handleChunk(dc.send(buf2)); + stream.handleChunk(dc.send(buf3)); + stream.handleChunk(dc.send(buf4)); + dc.assert(); + stream.close(); +}); +test('CopyToStream pause/resume/close', function () { + var stream = new CopyToStream(), + dc = new DataCounter(); + stream.on('data', dc.recieve.bind(dc)); + assert.emits(stream, 'end', function () {}, 'stream has to emit end after closing'); + stream.pause(); + stream.handleChunk(dc.send(buf1)); + stream.handleChunk(dc.send(buf2)); + stream.handleChunk(dc.send(buf3)); + assert.equal(dc.recievedBytes, 0); + stream.resume(); + dc.assert(); + stream.handleChunk(dc.send(buf2)); + dc.assert(); + stream.handleChunk(dc.send(buf3)); + dc.assert(); + stream.pause(); + stream.handleChunk(dc.send(buf4)); + assert(dc.sendBytes - dc.recievedBytes, buf4.length, "stream has not emit, data while it is in paused state"); + stream.resume(); + dc.assert(); + stream.close(); +}); +test('CopyToStream error', function () { + var stream = new CopyToStream(), + dc = new DataCounter(); + stream.on('data', dc.recieve.bind(dc)); + assert.emits(stream, 'error', function () {}, 'stream has to emit error event, when error method called'); + stream.handleChunk(dc.send(buf1)); + stream.handleChunk(dc.send(buf2)); + stream.error(new Error('test error')); +}); +test('CopyToStream do not emit anything while paused', function () { + var stream = new CopyToStream(); + stream.on('data', function () { + assert.ok(false, "stream has not emit data when paused"); + }); + stream.on('end', function () { + assert.ok(false, "stream has not emit end when paused"); + }); + stream.on('error', function () { + assert.ok(false, "stream has not emit end when paused"); + }); + stream.pause(); + stream.handleChunk(buf2); + stream.close(); + stream.error(); +}); +test('CopyToStream emit data and error after resume', function () { + var stream = new CopyToStream(), + paused; + stream.on('data', function () { + assert.ok(!paused, "stream has not emit data when paused"); + }); + stream.on('end', function () { + assert.ok(!paused, "stream has not emit end when paused"); + }); + stream.on('error', function () { + assert.ok(!paused, "stream has not emit end when paused"); + }); + paused = true; + stream.pause(); + stream.handleChunk(buf2); + stream.error(); + paused = false; + stream.resume(); +}); +test('CopyToStream emit data and end after resume', function () { + var stream = new CopyToStream(), + paused; + stream.on('data', function () { + assert.ok(!paused, "stream has not emit data when paused"); + }); + stream.on('end', function () { + assert.ok(!paused, "stream has not emit end when paused"); + }); + stream.on('error', function () { + assert.ok(!paused, "stream has not emit end when paused"); + }); + paused = true; + stream.pause(); + stream.handleChunk(buf2); + stream.close(); + paused = false; + stream.resume(); +}); + + diff --git a/test/unit/pool/basic-tests.js b/test/unit/pool/basic-tests.js new file mode 100644 index 00000000..499711f6 --- /dev/null +++ b/test/unit/pool/basic-tests.js @@ -0,0 +1,179 @@ +var util = require('util'); +var EventEmitter = require('events').EventEmitter; + +var libDir = __dirname + '/../../../lib'; +var defaults = require(libDir + '/defaults'); +var pools = require(libDir + '/pool'); +var poolId = 0; + +require(__dirname + '/../../test-helper'); + +var FakeClient = function() { + EventEmitter.call(this); +} + +util.inherits(FakeClient, EventEmitter); + +FakeClient.prototype.connect = function(cb) { + process.nextTick(cb); +} + +FakeClient.prototype.end = function() { + this.endCalled = true; +} + +//Hangs the event loop until 'end' is called on client +var HangingClient = function(config) { + EventEmitter.call(this); + this.config = config; +} + +util.inherits(HangingClient, EventEmitter); + +HangingClient.prototype.connect = function(cb) { + this.intervalId = setInterval(function() { + console.log('hung client...'); + }, 1000); + process.nextTick(cb); +} + +HangingClient.prototype.end = function() { + clearInterval(this.intervalId); +} + +pools.Client = FakeClient; + +test('no pools exist', function() { + assert.empty(Object.keys(pools.all)); +}); + +test('pool creates pool on miss', function() { + var p = pools.getOrCreate(); + assert.ok(p); + assert.equal(Object.keys(pools.all).length, 1); + var p2 = pools.getOrCreate(); + assert.equal(p, p2); + assert.equal(Object.keys(pools.all).length, 1); + var p3 = pools.getOrCreate("postgres://postgres:password@localhost:5432/postgres"); + assert.notEqual(p, p3); + assert.equal(Object.keys(pools.all).length, 2); +}); + +test('pool follows defaults', function() { + var p = pools.getOrCreate(poolId++); + for(var i = 0; i < 100; i++) { + p.acquire(function(err, client) { + }); + } + assert.equal(p.getPoolSize(), defaults.poolSize); +}); + +test('pool#connect with 3 parameters', function() { + var p = pools.getOrCreate(poolId++); + var tid = setTimeout(function() { + throw new Error("Connection callback was never called"); + }, 100); + p.connect(function(err, client, done) { + clearTimeout(tid); + assert.ifError(err, null); + assert.ok(client); + assert.equal(p.availableObjectsCount(), 0); + assert.equal(p.getPoolSize(), 1); + client.emit('drain'); + assert.equal(p.availableObjectsCount(), 0); + assert.equal(p.getPoolSize(), 1); + done(); + assert.equal(p.availableObjectsCount(), 1); + assert.equal(p.getPoolSize(), 1); + p.destroyAllNow(); + }); +}); + +test('on client error, client is removed from pool', function() { + var p = pools.getOrCreate(poolId++); + p.connect(assert.success(function(client, done) { + assert.ok(client); + done(); + assert.equal(p.availableObjectsCount(), 1); + assert.equal(p.getPoolSize(), 1); + //error event fires on pool BEFORE pool.destroy is called with client + assert.emits(p, 'error', function(err) { + assert.equal(err.message, 'test error'); + assert.ok(!client.endCalled); + assert.equal(p.availableObjectsCount(), 1); + assert.equal(p.getPoolSize(), 1); + //after we're done in our callback, pool.destroy is called + process.nextTick(function() { + assert.ok(client.endCalled); + assert.equal(p.availableObjectsCount(), 0); + assert.equal(p.getPoolSize(), 0); + p.destroyAllNow(); + }); + }); + client.emit('error', new Error('test error')); + })); +}); + +test('pool with connection error on connection', function() { + pools.Client = function() { + return { + connect: function(cb) { + process.nextTick(function() { + cb(new Error('Could not connect')); + }); + } + }; + } + test('two parameters', function() { + var p = pools.getOrCreate(poolId++); + p.connect(assert.calls(function(err, client) { + assert.ok(err); + assert.equal(client, null); + //client automatically removed + assert.equal(p.availableObjectsCount(), 0); + assert.equal(p.getPoolSize(), 0); + })); + }); + test('three parameters', function() { + var p = pools.getOrCreate(poolId++); + var tid = setTimeout(function() { + assert.fail('Did not call connect callback'); + }, 100); + p.connect(function(err, client, done) { + clearTimeout(tid); + assert.ok(err); + assert.equal(client, null); + //done does nothing + done(new Error('OH NOOOO')); + done(); + assert.equal(p.availableObjectsCount(), 0); + assert.equal(p.getPoolSize(), 0); + }); + }); +}); + +test('returnning an error to done()', function() { + var p = pools.getOrCreate(poolId++); + pools.Client = FakeClient; + p.connect(function(err, client, done) { + assert.equal(err, null); + assert(client); + done(new Error("BROKEN")); + assert.equal(p.availableObjectsCount(), 0); + assert.equal(p.getPoolSize(), 0); + }); +}); + +test('fetching pool by object', function() { + var p = pools.getOrCreate({ + user: 'brian', + host: 'localhost', + password: 'password' + }); + var p2 = pools.getOrCreate({ + user: 'brian', + host: 'localhost', + password: 'password' + }); + assert.equal(p, p2); +}); diff --git a/test/unit/pool/timeout-tests.js b/test/unit/pool/timeout-tests.js new file mode 100644 index 00000000..0fc96b2d --- /dev/null +++ b/test/unit/pool/timeout-tests.js @@ -0,0 +1,42 @@ +var util = require('util'); +var EventEmitter = require('events').EventEmitter; + +var libDir = __dirname + '/../../../lib'; +var defaults = require(libDir + '/defaults'); +var pools = require(libDir + '/pool'); +var poolId = 0; + +require(__dirname + '/../../test-helper'); + +var FakeClient = function() { + EventEmitter.call(this); +} + +util.inherits(FakeClient, EventEmitter); + +FakeClient.prototype.connect = function(cb) { + process.nextTick(cb); +} + +FakeClient.prototype.end = function() { + this.endCalled = true; +} + +defaults.poolIdleTimeout = 10; +defaults.reapIntervalMillis = 10; + +test('client times out from idle', function() { + pools.Client = FakeClient; + var p = pools.getOrCreate(poolId++); + p.connect(function(err, client, done) { + done(); + }); + process.nextTick(function() { + assert.equal(p.availableObjectsCount(), 1); + assert.equal(p.getPoolSize(), 1); + setTimeout(function() { + assert.equal(p.availableObjectsCount(), 0); + assert.equal(p.getPoolSize(), 0); + }, 50); + }); +}); diff --git a/test/unit/test-helper.js b/test/unit/test-helper.js index f57b766a..3bcd21e0 100644 --- a/test/unit/test-helper.js +++ b/test/unit/test-helper.js @@ -6,6 +6,7 @@ MemoryStream = function() { this.packets = []; }; + helper.sys.inherits(MemoryStream, EventEmitter); var p = MemoryStream.prototype; @@ -14,6 +15,8 @@ p.write = function(packet) { this.packets.push(packet); }; +p.writable = true; + createClient = function() { var stream = new MemoryStream(); stream.readyState = "open"; diff --git a/test/unit/utils-tests.js b/test/unit/utils-tests.js index f7fa07c5..30ff9d28 100644 --- a/test/unit/utils-tests.js +++ b/test/unit/utils-tests.js @@ -5,7 +5,7 @@ var defaults = require(__dirname + "/../../lib").defaults; //this tests the monkey patching //to ensure comptability with older //versions of node -test("EventEmitter.once", function() { +test("EventEmitter.once", function(t) { //an event emitter var stream = new MemoryStream(); @@ -21,126 +21,27 @@ test("EventEmitter.once", function() { }); -test('normalizing connection info', function() { - test('with objects', function() { - test('empty object uses defaults', function() { - var input = {}; - var output = utils.normalizeConnectionInfo(input); - assert.equal(output.user, defaults.user); - assert.equal(output.database, defaults.database); - assert.equal(output.port, defaults.port); - assert.equal(output.host, defaults.host); - assert.equal(output.password, defaults.password); - }); +test('types are exported', function() { + var pg = require(__dirname + '/../../lib/index'); + assert.ok(pg.types); +}); - test('full object ignores defaults', function() { - var input = { - user: 'test1', - database: 'test2', - port: 'test3', - host: 'test4', - password: 'test5' - }; - assert.equal(utils.normalizeConnectionInfo(input), input); - }); +test('normalizing query configs', function() { + var config + var callback = function () {} - test('connection string', function() { - test('non-unix socket', function() { - test('uses defaults', function() { - var input = ""; - var output = utils.normalizeConnectionInfo(input); - assert.equal(output.user, defaults.user); - assert.equal(output.database, defaults.database); - assert.equal(output.port, defaults.port); - assert.equal(output.host, defaults.host); - assert.equal(output.password, defaults.password); - }); - test('ignores defaults if string contains them all', function() { - var input = "tcp://user1:pass2@host3:3333/databaseName"; - var output = utils.normalizeConnectionInfo(input); - assert.equal(output.user, 'user1'); - assert.equal(output.database, 'databaseName'); - assert.equal(output.port, 3333); - assert.equal(output.host, 'host3'); - assert.equal(output.password, 'pass2'); - }) - }); + config = utils.normalizeQueryConfig({text: 'TEXT'}) + assert.same(config, {text: 'TEXT'}) - test('unix socket', function() { - test('uses defaults', function() { - var input = "/var/run/postgresql"; - var output = utils.normalizeConnectionInfo(input); - assert.equal(output.user, process.env.USER); - assert.equal(output.host, '/var/run/postgresql'); - assert.equal(output.database, process.env.USER); - assert.equal(output.port, 5432); - }); - - test('uses overridden defaults', function() { - defaults.host = "/var/run/postgresql"; - defaults.user = "boom"; - defaults.password = "yeah"; - defaults.port = 1234; - var output = utils.normalizeConnectionInfo("asdf"); - assert.equal(output.user, "boom"); - assert.equal(output.password, "yeah"); - assert.equal(output.port, 1234); - assert.equal(output.host, "/var/run/postgresql"); - }) - }) - }) - }) -}) - -test('libpq connection string building', function() { - var checkForPart = function(array, part) { - assert.ok(array.indexOf(part) > -1, array.join(" ") + " did not contain " + part); - } - - test('builds simple string', function() { - var config = { - user: 'brian', - password: 'xyz', - port: 888, - host: 'localhost', - database: 'bam' - } - utils.buildLibpqConnectionString(config, assert.calls(function(err, constring) { - assert.isNull(err) - var parts = constring.split(" "); - checkForPart(parts, "user='brian'") - checkForPart(parts, "password='xyz'") - checkForPart(parts, "port='888'") - checkForPart(parts, "hostaddr=127.0.0.1") - checkForPart(parts, "dbname='bam'") - })) - }) - test('builds dns string', function() { - var config = { - user: 'brian', - password: 'asdf', - port: 5432, - host: 'localhost' - } - utils.buildLibpqConnectionString(config, assert.calls(function(err, constring) { - assert.isNull(err); - var parts = constring.split(" "); - checkForPart(parts, "user='brian'") - checkForPart(parts, "hostaddr=127.0.0.1") - })) - }) - - test('error when dns fails', function() { - var config = { - user: 'brian', - password: 'asf', - port: 5432, - host: 'asdlfkjasldfkksfd#!$!!!!..com' - } - utils.buildLibpqConnectionString(config, assert.calls(function(err, constring) { - assert.ok(err); - assert.isNull(constring) - })) - }) + config = utils.normalizeQueryConfig({text: 'TEXT'}, [10]) + assert.deepEqual(config, {text: 'TEXT', values: [10]}) + config = utils.normalizeQueryConfig({text: 'TEXT', values: [10]}) + assert.deepEqual(config, {text: 'TEXT', values: [10]}) + + config = utils.normalizeQueryConfig('TEXT', [10], callback) + assert.deepEqual(config, {text: 'TEXT', values: [10], callback: callback}) + + config = utils.normalizeQueryConfig({text: 'TEXT', values: [10]}, callback) + assert.deepEqual(config, {text: 'TEXT', values: [10], callback: callback}) }) diff --git a/test/unit/writer-tests.js b/test/unit/writer-tests.js deleted file mode 100644 index e5ade320..00000000 --- a/test/unit/writer-tests.js +++ /dev/null @@ -1,196 +0,0 @@ -require(__dirname + "/test-helper"); -var Writer = require(__dirname + "/../../lib/writer"); - -test('adding int32', function() { - var testAddingInt32 = function(int, expectedBuffer) { - test('writes ' + int, function() { - var subject = new Writer(); - var result = subject.addInt32(int).join(); - assert.equalBuffers(result, expectedBuffer); - }) - } - - testAddingInt32(0, [0, 0, 0, 0]); - testAddingInt32(1, [0, 0, 0, 1]); - testAddingInt32(256, [0, 0, 1, 0]); - test('writes largest int32', function() { - //todo need to find largest int32 when I have internet access - return false; - }) - - test('writing multiple int32s', function() { - var subject = new Writer(); - var result = subject.addInt32(1).addInt32(10).addInt32(0).join(); - assert.equalBuffers(result, [0, 0, 0, 1, 0, 0, 0, 0x0a, 0, 0, 0, 0]); - }) - - test('having to resize the buffer', function() { - test('after resize correct result returned', function() { - var subject = new Writer(10); - subject.addInt32(1).addInt32(1).addInt32(1) - assert.equalBuffers(subject.join(), [0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1]) - }) - }) -}) - -test('int16', function() { - test('writes 0', function() { - var subject = new Writer(); - var result = subject.addInt16(0).join(); - assert.equalBuffers(result, [0,0]); - }) - - test('writes 400', function() { - var subject = new Writer(); - var result = subject.addInt16(400).join(); - assert.equalBuffers(result, [1, 0x90]) - }) - - test('writes many', function() { - var subject = new Writer(); - var result = subject.addInt16(0).addInt16(1).addInt16(2).join(); - assert.equalBuffers(result, [0, 0, 0, 1, 0, 2]) - }) - - test('resizes if internal buffer fills up', function() { - var subject = new Writer(3); - var result = subject.addInt16(2).addInt16(3).join(); - assert.equalBuffers(result, [0, 2, 0, 3]) - }) - -}) - -test('cString', function() { - test('writes empty cstring', function() { - var subject = new Writer(); - var result = subject.addCString().join(); - assert.equalBuffers(result, [0]) - }) - - test('writes two empty cstrings', function() { - var subject = new Writer(); - var result = subject.addCString("").addCString("").join(); - assert.equalBuffers(result, [0, 0]) - }) - - - test('writes non-empty cstring', function() { - var subject = new Writer(); - var result = subject.addCString("!!!").join(); - assert.equalBuffers(result, [33, 33, 33, 0]); - }) - - test('resizes if reached end', function() { - var subject = new Writer(3); - var result = subject.addCString("!!!").join(); - assert.equalBuffers(result, [33, 33, 33, 0]); - }) - - test('writes multiple cstrings', function() { - var subject = new Writer(); - var result = subject.addCString("!").addCString("!").join(); - assert.equalBuffers(result, [33, 0, 33, 0]); - }) - -}) - -test('writes char', function() { - var subject = new Writer(2); - var result = subject.addChar('a').addChar('b').addChar('c').join(); - assert.equalBuffers(result, [0x61, 0x62, 0x63]) -}) - -test('gets correct byte length', function() { - var subject = new Writer(5); - assert.equal(subject.getByteLength(), 0) - subject.addInt32(0) - assert.equal(subject.getByteLength(), 4) - subject.addCString("!") - assert.equal(subject.getByteLength(), 6) -}) - -test('can add arbitrary buffer to the end', function() { - var subject = new Writer(4); - subject.addCString("!!!") - var result = subject.add(Buffer("@@@")).join(); - assert.equalBuffers(result, [33, 33, 33, 0, 0x40, 0x40, 0x40]); -}) - -test('can write normal string', function() { - var subject = new Writer(4); - var result = subject.addString("!").join(); - assert.equalBuffers(result, [33]); - test('can write cString too', function() { - var result = subject.addCString("!").join(); - assert.equalBuffers(result, [33, 33, 0]); - test('can resize', function() { - var result = subject.addString("!!").join(); - assert.equalBuffers(result, [33, 33, 0, 33, 33]); - }) - - }) - -}) - - -test('clearing', function() { - var subject = new Writer(); - subject.addCString("@!!#!#"); - subject.addInt32(10401); - subject.clear(); - assert.equalBuffers(subject.join(), []); - test('can keep writing', function() { - var joinedResult = subject.addCString("!").addInt32(9).addInt16(2).join(); - assert.equalBuffers(joinedResult, [33, 0, 0, 0, 0, 9, 0, 2]); - test('flush', function() { - var flushedResult = subject.flush(); - test('returns result', function() { - assert.equalBuffers(flushedResult, [33, 0, 0, 0, 0, 9, 0, 2]) - }) - test('clears the writer', function() { - assert.equalBuffers(subject.join(), []) - assert.equalBuffers(subject.flush(), []) - }) - }) - }) - -}) - -test("resizing to much larger", function() { - var subject = new Writer(2); - var string = "!!!!!!!!"; - var result = subject.addCString(string).flush(); - assert.equalBuffers(result, [33, 33, 33, 33, 33, 33, 33, 33, 0]) -}) - -test("flush", function() { - test('added as a hex code to a full writer', function() { - var subject = new Writer(2); - var result = subject.addCString("!").flush(0x50) - assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0]); - }) - - test('added as a hex code to a non-full writer', function() { - var subject = new Writer(10).addCString("!"); - var joinedResult = subject.join(0x50); - var result = subject.flush(0x50); - assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0]); - }) - - test('added as a hex code to a buffer which requires resizing', function() { - var result = new Writer(2).addCString("!!!!!!!!").flush(0x50); - assert.equalBuffers(result, [0x50, 0, 0, 0, 0x0D, 33, 33, 33, 33, 33, 33, 33, 33, 0]); - }) -}) - -test("header", function() { - test('adding two packets with headers', function() { - var subject = new Writer(10).addCString("!"); - subject.addHeader(0x50); - subject.addCString("!!"); - subject.addHeader(0x40); - subject.addCString("!"); - var result = subject.flush(0x10); - assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0, 0x40, 0, 0, 0, 7, 33, 33, 0, 0x10, 0, 0, 0, 6, 33, 0 ]); - }) -})