mirror of
https://github.com/brianc/node-postgres.git
synced 2025-12-08 20:16:25 +00:00
commit
7b703ccfed
6
.travis.yml
Normal file
6
.travis.yml
Normal file
@ -0,0 +1,6 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.8
|
||||
- "0.10"
|
||||
before_script:
|
||||
- node script/create-test-tables.js pg://postgres@127.0.0.1:5432/postgres
|
||||
53
Makefile
53
Makefile
@ -1,45 +1,70 @@
|
||||
SHELL := /bin/bash
|
||||
|
||||
user=postgres
|
||||
password=1234
|
||||
host=localhost
|
||||
port=5432
|
||||
database=postgres
|
||||
verbose=false
|
||||
connectionString=postgres://
|
||||
|
||||
params := -u $(user) --password $(password) -p $(port) -d $(database) -h $(host) --verbose $(verbose)
|
||||
params := $(connectionString)
|
||||
|
||||
node-command := xargs -n 1 -I file node file $(params)
|
||||
|
||||
.PHONY : test test-connection test-integration bench test-native build/default/binding.node
|
||||
.PHONY : test test-connection test-integration bench test-native \
|
||||
build/default/binding.node jshint upgrade-pg publish
|
||||
|
||||
all:
|
||||
npm install
|
||||
|
||||
help:
|
||||
@echo "make prepare-test-db [connectionString=postgres://<your connection string>]"
|
||||
@echo "make test-all [connectionString=postgres://<your connection string>]"
|
||||
|
||||
test: test-unit
|
||||
|
||||
test-all: test-unit test-integration test-native test-binary
|
||||
test-all: jshint test-unit test-integration test-native test-binary
|
||||
|
||||
test-travis: test-all upgrade-pg
|
||||
@make test-all connectionString=postgres://postgres@localhost:5433/postgres
|
||||
|
||||
upgrade-pg:
|
||||
@chmod 755 script/travis-pg-9.2-install.sh
|
||||
@./script/travis-pg-9.2-install.sh
|
||||
|
||||
bench:
|
||||
@find benchmark -name "*-bench.js" | $(node-command)
|
||||
|
||||
build/default/binding.node:
|
||||
@node-waf configure build
|
||||
@node-gyp rebuild
|
||||
|
||||
test-unit:
|
||||
@find test/unit -name "*-tests.js" | $(node-command)
|
||||
|
||||
test-connection:
|
||||
@echo "***Testing connection***"
|
||||
@node script/test-connection.js $(params)
|
||||
|
||||
test-connection-binary:
|
||||
@node script/test-connection.js $(params) --binary true
|
||||
@echo "***Testing binary connection***"
|
||||
@node script/test-connection.js $(params) binary
|
||||
|
||||
test-native: build/default/binding.node
|
||||
@echo "***Testing native bindings***"
|
||||
@find test/native -name "*-tests.js" | $(node-command)
|
||||
@find test/integration -name "*-tests.js" | $(node-command) --native true
|
||||
@find test/integration -name "*-tests.js" | $(node-command) native
|
||||
|
||||
test-integration: test-connection
|
||||
test-integration: test-connection
|
||||
@echo "***Testing Pure Javascript***"
|
||||
@find test/integration -name "*-tests.js" | $(node-command)
|
||||
|
||||
test-binary: test-connection-binary
|
||||
@echo "***Testing Pure Javascript (binary)***"
|
||||
@find test/integration -name "*-tests.js" | $(node-command) --binary true
|
||||
@find test/integration -name "*-tests.js" | $(node-command) binary
|
||||
|
||||
prepare-test-db:
|
||||
@echo "***Preparing the database for tests***"
|
||||
@find script/create-test-tables.js | $(node-command)
|
||||
|
||||
jshint:
|
||||
@echo "***Starting jshint***"
|
||||
@./node_modules/.bin/jshint lib
|
||||
|
||||
publish:
|
||||
@rm -r build || (exit 0)
|
||||
@npm publish
|
||||
|
||||
70
NEWS.md
Normal file
70
NEWS.md
Normal file
@ -0,0 +1,70 @@
|
||||
All major and minor releases are briefly explained below.
|
||||
|
||||
For richer information consult the commit log on github with referenced pull requests.
|
||||
|
||||
We do not include break-fix version release in this file.
|
||||
|
||||
### v2.5.0
|
||||
- Ability to opt-in to int8 parsing via `pg.defaults.parseInt8 = true`
|
||||
|
||||
### v2.4.0
|
||||
- Use eval in the result set parser to increase performance
|
||||
|
||||
### v2.3.0
|
||||
- Remove built-in support for binary Int64 parsing.
|
||||
_Due to the low usage & required compiled dependency this will be pushed into a 3rd party add-on_
|
||||
|
||||
### v2.2.0
|
||||
- [Add support for excapeLiteral and escapeIdentifier in both JavaScript and the native bindings](https://github.com/brianc/node-postgres/pull/396)
|
||||
|
||||
### v2.1.0
|
||||
- Add support for SSL connections in JavaScript driver
|
||||
- this means you can connect to heroku postgres from your local machine without the native bindings!
|
||||
- [Add field metadata to result object](https://github.com/brianc/node-postgres/blob/master/test/integration/client/row-description-on-results-tests.js)
|
||||
- [Add ability for rows to be returned as arrays instead of objects](https://github.com/brianc/node-postgres/blob/master/test/integration/client/results-as-array-tests.js)
|
||||
|
||||
### v2.0.0
|
||||
|
||||
- Properly handle various PostgreSQL to JavaScript type conversions to avoid data loss:
|
||||
|
||||
```
|
||||
PostgreSQL | pg@v2.0 JavaScript | pg@v1.0 JavaScript
|
||||
--------------------------------|----------------
|
||||
float4 | number (float) | string
|
||||
float8 | number (float) | string
|
||||
int8 | string | number (int)
|
||||
numeric | string | number (float)
|
||||
decimal | string | number (float)
|
||||
```
|
||||
|
||||
For more information see https://github.com/brianc/node-postgres/pull/353
|
||||
If you are unhappy with these changes you can always [override the built in type parsing fairly easily](https://github.com/brianc/node-pg-parse-float).
|
||||
|
||||
### v1.3.0
|
||||
|
||||
- Make client_encoding configurable and optional
|
||||
|
||||
### v1.2.0
|
||||
|
||||
- return field metadata on result object: access via result.fields[i].name/dataTypeID
|
||||
|
||||
### v1.1.0
|
||||
|
||||
- built in support for `JSON` data type for PostgreSQL Server @ v9.2.0 or greater
|
||||
|
||||
### v1.0.0
|
||||
|
||||
- remove deprecated functionality
|
||||
- Callback function passed to `pg.connect` now __requires__ 3 arguments
|
||||
- Client#pauseDrain() / Client#resumeDrain removed
|
||||
- numeric, decimal, and float data types no longer parsed into float before being returned. Will be returned from query results as `String`
|
||||
|
||||
### v0.15.0
|
||||
|
||||
- client now emits `end` when disconnected from back-end server
|
||||
- if client is disconnected in the middle of a query, query receives an error
|
||||
|
||||
### v0.14.0
|
||||
|
||||
- add deprecation warnings in prep for v1.0
|
||||
- fix read/write failures in native module under node v0.9.x
|
||||
229
README.md
229
README.md
@ -1,124 +1,68 @@
|
||||
#node-postgres
|
||||
|
||||
Non-blocking PostgreSQL client for node.js. Pure JavaScript and native libpq bindings. Active development, well tested, and production use.
|
||||
[](http://travis-ci.org/brianc/node-postgres)
|
||||
|
||||
PostgreSQL client for node.js. Pure JavaScript and native libpq bindings.
|
||||
|
||||
## Installation
|
||||
|
||||
npm install pg
|
||||
|
||||
|
||||
## Examples
|
||||
|
||||
### Simple, using built-in client pool
|
||||
### Simple
|
||||
|
||||
var pg = require('pg');
|
||||
//or native libpq bindings
|
||||
//var pg = require('pg').native
|
||||
Connect to a postgres instance, run a query, and disconnect.
|
||||
|
||||
var conString = "tcp://postgres:1234@localhost/postgres";
|
||||
```javascript
|
||||
var pg = require('pg');
|
||||
//or native libpq bindings
|
||||
//var pg = require('pg').native
|
||||
|
||||
//error handling omitted
|
||||
pg.connect(conString, function(err, client) {
|
||||
client.query("SELECT NOW() as when", function(err, result) {
|
||||
console.log("Row count: %d",result.rows.length); // 1
|
||||
console.log("Current year: %d", result.rows[0].when.getYear());
|
||||
});
|
||||
});
|
||||
var conString = "postgres://postgres:1234@localhost/postgres";
|
||||
|
||||
### Evented api
|
||||
var client = new pg.Client(conString);
|
||||
client.connect(function(err) {
|
||||
if(err) {
|
||||
return console.error('could not connect to postgres', err);
|
||||
}
|
||||
client.query('SELECT NOW() AS "theTime"', function(err, result) {
|
||||
if(err) {
|
||||
return console.error('error running query', err);
|
||||
}
|
||||
console.log(result.rows[0].theTime);
|
||||
//output: Tue Jan 15 2013 19:12:47 GMT-600 (CST)
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
var pg = require('pg'); //native libpq bindings = `var pg = require('pg').native`
|
||||
var conString = "tcp://postgres:1234@localhost/postgres";
|
||||
```
|
||||
|
||||
### Client pooling
|
||||
|
||||
Typically you will access the PostgreSQL server through a pool of clients. node-postgres ships with a built in pool to help get you up and running quickly.
|
||||
|
||||
```javascript
|
||||
var pg = require('pg');
|
||||
var conString = "postgres://postgres:1234@localhost/postgres";
|
||||
|
||||
pg.connect(conString, function(err, client, done) {
|
||||
if(err) {
|
||||
return console.error('error fetching client from pool', err);
|
||||
}
|
||||
client.query('SELECT $1::int AS numbor', ['1'], function(err, result) {
|
||||
//call `done()` to release the client back to the pool
|
||||
done();
|
||||
|
||||
var client = new pg.Client(conString);
|
||||
client.connect();
|
||||
if(err) {
|
||||
return console.error('error running query', err);
|
||||
}
|
||||
console.log(result.rows[0].numbor);
|
||||
//output: 1
|
||||
});
|
||||
});
|
||||
|
||||
//queries are queued and executed one after another once the connection becomes available
|
||||
client.query("CREATE TEMP TABLE beatles(name varchar(10), height integer, birthday timestamptz)");
|
||||
client.query("INSERT INTO beatles(name, height, birthday) values($1, $2, $3)", ['Ringo', 67, new Date(1945, 11, 2)]);
|
||||
client.query("INSERT INTO beatles(name, height, birthday) values($1, $2, $3)", ['John', 68, new Date(1944, 10, 13)]);
|
||||
|
||||
//queries can be executed either via text/parameter values passed as individual arguments
|
||||
//or by passing an options object containing text, (optional) parameter values, and (optional) query name
|
||||
client.query({
|
||||
name: 'insert beatle',
|
||||
text: "INSERT INTO beatles(name, height, birthday) values($1, $2, $3)",
|
||||
values: ['George', 70, new Date(1946, 02, 14)]
|
||||
});
|
||||
|
||||
//subsequent queries with the same name will be executed without re-parsing the query plan by postgres
|
||||
client.query({
|
||||
name: 'insert beatle',
|
||||
values: ['Paul', 63, new Date(1945, 04, 03)]
|
||||
});
|
||||
var query = client.query("SELECT * FROM beatles WHERE name = $1", ['John']);
|
||||
|
||||
//can stream row results back 1 at a time
|
||||
query.on('row', function(row) {
|
||||
console.log(row);
|
||||
console.log("Beatle name: %s", row.name); //Beatle name: John
|
||||
console.log("Beatle birth year: %d", row.birthday.getYear()); //dates are returned as javascript dates
|
||||
console.log("Beatle height: %d' %d\"", Math.floor(row.height/12), row.height%12); //integers are returned as javascript ints
|
||||
});
|
||||
|
||||
//fired after last row is emitted
|
||||
query.on('end', function() {
|
||||
client.end();
|
||||
});
|
||||
|
||||
### Example notes
|
||||
|
||||
node-postgres supports both an 'event emitter' style API and a 'callback' style. The callback style is more concise and generally preferred, but the evented API can come in handy. They can be mixed and matched. The only events which do __not__ fire when callbacks are supplied are the `error` events, as they are to be handled by the callback function.
|
||||
|
||||
All examples will work with the pure javascript bindings (currently default) or the libpq native (c/c++) bindings (currently in beta)
|
||||
|
||||
To use native libpq bindings replace `require('pg')` with `require('pg').native`.
|
||||
|
||||
The two share the same interface so __no other code changes should be required__. If you find yourself having to change code other than the require statement when switching from `pg` to `pg.native`, please report an issue.
|
||||
|
||||
### Info
|
||||
|
||||
* pure javascript client and native libpq bindings share _the same api_
|
||||
* _heavily_ tested
|
||||
* the same suite of 200+ integration tests passed by both javascript & libpq bindings
|
||||
* benchmark & long-running memory leak tests performed before releases
|
||||
* tested with with
|
||||
* postgres 8.x, 9.x
|
||||
* Linux, OS X
|
||||
* node 2.x & 4.x
|
||||
* row-by-row result streaming
|
||||
* built-in (optional) connection pooling
|
||||
* responsive project maintainer
|
||||
* supported PostgreSQL features
|
||||
* parameterized queries
|
||||
* named statements with query plan caching
|
||||
* async notifications
|
||||
* extensible js<->postgresql data-type coercion
|
||||
* query queue
|
||||
* active development
|
||||
* fast
|
||||
* close mirror of the node-mysql api for future multi-database-supported ORM implementation ease
|
||||
|
||||
### Contributors
|
||||
|
||||
Many thanks to the following:
|
||||
|
||||
* [creationix](https://github.com/creationix)
|
||||
* [felixge](https://github.com/felixge)
|
||||
* [pshc](https://github.com/pshc)
|
||||
* [pjornblomqvist](https://github.com/bjornblomqvist)
|
||||
* [JulianBirch](https://github.com/JulianBirch)
|
||||
* [ef4](https://github.com/ef4)
|
||||
* [napa3um](https://github.com/napa3um)
|
||||
* [drdaeman](https://github.com/drdaeman)
|
||||
* [booo](https://github.com/booo)
|
||||
* [neonstalwart](https://github.com/neonstalwart)
|
||||
* [homme](https://github.com/homme)
|
||||
* [bdunavant](https://github.com/bdunavant)
|
||||
* [tokumine](https://github.com/tokumine)
|
||||
* [shtylman](https://github.com/shtylman)
|
||||
* [cricri](https://github.com/cricri)
|
||||
* [AlexanderS](https://github.com/AlexanderS)
|
||||
* [ahtih](https://github.com/ahtih)
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
@ -126,20 +70,78 @@ Documentation is a work in progress primarily taking place on the github WIKI
|
||||
|
||||
### [Documentation](https://github.com/brianc/node-postgres/wiki)
|
||||
|
||||
### __PLEASE__ check out the WIKI
|
||||
## Native Bindings
|
||||
|
||||
node-postgres contains a pure JavaScript driver and also exposes JavaScript bindings to libpq. You can use either interface. I personally use the JavaScript bindings as the are quite fast, and I like having everything implemented in JavaScript.
|
||||
|
||||
To use native libpq bindings replace `require('pg')` with `require('pg').native`.
|
||||
|
||||
The two share the same interface so __no other code changes should be required__. If you find yourself having to change code other than the require statement when switching from `pg` to `pg.native`, please report an issue.
|
||||
|
||||
## Features
|
||||
|
||||
* pure JavaScript client and native libpq bindings share _the same api_
|
||||
* optional connection pooling
|
||||
* extensible js<->postgresql data-type coercion
|
||||
* supported PostgreSQL features
|
||||
* parameterized queries
|
||||
* named statements with query plan caching
|
||||
* async notifications with `LISTEN/NOTIFY`
|
||||
* bulk import & export with `COPY TO/COPY FROM`
|
||||
|
||||
## Contributing
|
||||
|
||||
__I love contributions.__
|
||||
|
||||
You are welcome contribute via pull requests. If you need help getting the tests running locally feel free to email me or gchat me.
|
||||
|
||||
I will __happily__ accept your pull request if it:
|
||||
- _has tests_
|
||||
- looks reasonable
|
||||
- does not break backwards compatibility
|
||||
- satisfies jshint
|
||||
|
||||
Information about the testing processes is in the [wiki](https://github.com/brianc/node-postgres/wiki/Testing).
|
||||
|
||||
If you need help or have questions about constructing a pull request I'll be glad to help out as well.
|
||||
|
||||
## Support
|
||||
|
||||
If at all possible when you open an issue please provide
|
||||
- version of node
|
||||
- version of postgres
|
||||
- smallest possible snippet of code to reproduce the problem
|
||||
|
||||
Usually I'll pop the code into the repo as a test. Hopefully the test fails. Then I make the test pass. Then everyone's happy!
|
||||
|
||||
|
||||
If you need help or run into _any_ issues getting node-postgres to work on your system please report a bug or contact me directly. I am usually available via google-talk at my github account public email address.
|
||||
|
||||
I usually tweet about any important status updates or changes to node-postgres.
|
||||
Follow me [@briancarlson](https://twitter.com/briancarlson) to keep up to date.
|
||||
|
||||
|
||||
## Extras
|
||||
|
||||
node-postgres is by design _low level_ with the bare minimum of abstraction. These might help out:
|
||||
|
||||
- https://github.com/grncdr/node-any-db
|
||||
- https://github.com/brianc/node-sql
|
||||
|
||||
If you have a question, post it to the FAQ section of the WIKI so everyone can read the answer
|
||||
|
||||
## Production Use
|
||||
* [yammer.com](http://www.yammer.com)
|
||||
* [bayt.com](http://bayt.com)
|
||||
* [bitfloor.com](https://bitfloor.com)
|
||||
* [Vendly](http://www.vend.ly)
|
||||
* [SaferAging](http://www.saferaging.com)
|
||||
* [CartoDB](http://www.cartodb.com)
|
||||
* [Heap](https://heapanalytics.com)
|
||||
* [zoomsquare](http://www.zoomsquare.com/)
|
||||
|
||||
_if you use node-postgres in production and would like your site listed here, fork & add it_
|
||||
_If you use node-postgres in production and would like your site listed here, fork & add it._
|
||||
|
||||
## Help
|
||||
|
||||
If you need help or run into _any_ issues getting node-postgres to work on your system please report a bug or contact me directly. I am usually available via google-talk at my github account public email address.
|
||||
|
||||
## License
|
||||
|
||||
Copyright (c) 2010 Brian Carlson (brian.m.carlson@gmail.com)
|
||||
@ -161,6 +163,3 @@ Copyright (c) 2010 Brian Carlson (brian.m.carlson@gmail.com)
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
|
||||
|
||||
|
||||
17
benchmark/4e822a1.txt
Normal file
17
benchmark/4e822a1.txt
Normal file
@ -0,0 +1,17 @@
|
||||
benchmark
|
||||
starting simple-query-parsing
|
||||
4166 ops/sec - (100/0.024)
|
||||
8333 ops/sec - (1000/0.12)
|
||||
10405 ops/sec - (10000/0.961)
|
||||
10515 ops/sec - (10000/0.951)
|
||||
10638 ops/sec - (10000/0.94)
|
||||
10460 ops/sec - (10000/0.956)
|
||||
starting prepared-statement-parsing
|
||||
4166 ops/sec - (100/0.024)
|
||||
8264 ops/sec - (1000/0.121)
|
||||
7530 ops/sec - (10000/1.328)
|
||||
8250 ops/sec - (10000/1.212)
|
||||
8156 ops/sec - (10000/1.226)
|
||||
8110 ops/sec - (10000/1.233)
|
||||
done
|
||||
|
||||
17
benchmark/835f71a76f.txt
Normal file
17
benchmark/835f71a76f.txt
Normal file
@ -0,0 +1,17 @@
|
||||
benchmark
|
||||
starting simple-query-parsing
|
||||
3703 ops/sec - (100/0.027)
|
||||
7299 ops/sec - (1000/0.137)
|
||||
8888 ops/sec - (10000/1.125)
|
||||
8733 ops/sec - (10000/1.145)
|
||||
8810 ops/sec - (10000/1.135)
|
||||
8771 ops/sec - (10000/1.14)
|
||||
starting prepared-statement-parsing
|
||||
3846 ops/sec - (100/0.026)
|
||||
7299 ops/sec - (1000/0.137)
|
||||
7225 ops/sec - (10000/1.384)
|
||||
7288 ops/sec - (10000/1.372)
|
||||
7225 ops/sec - (10000/1.384)
|
||||
7457 ops/sec - (10000/1.341)
|
||||
done
|
||||
|
||||
17
benchmark/df766c913.txt
Normal file
17
benchmark/df766c913.txt
Normal file
@ -0,0 +1,17 @@
|
||||
benchmark
|
||||
starting simple-query-parsing
|
||||
3571 ops/sec - (100/0.028)
|
||||
7299 ops/sec - (1000/0.137)
|
||||
8873 ops/sec - (10000/1.127)
|
||||
8536 ops/sec - (40000/4.686)
|
||||
8494 ops/sec - (40000/4.709)
|
||||
7695 ops/sec - (40000/5.198)
|
||||
starting prepared-statement-parsing
|
||||
4000 ops/sec - (100/0.025)
|
||||
6944 ops/sec - (1000/0.144)
|
||||
7153 ops/sec - (10000/1.398)
|
||||
7127 ops/sec - (40000/5.612)
|
||||
7208 ops/sec - (40000/5.549)
|
||||
6460 ops/sec - (40000/6.191)
|
||||
done
|
||||
|
||||
42
benchmark/index.js
Normal file
42
benchmark/index.js
Normal file
@ -0,0 +1,42 @@
|
||||
var async = require('async');
|
||||
var max = 10000;
|
||||
var maxTimes = 3;
|
||||
var doLoops = function(bench, loops, times, cb) {
|
||||
var start = new Date();
|
||||
var count = 0;
|
||||
|
||||
var done = function() {
|
||||
var duration = (new Date() - start)
|
||||
var seconds = (duration / 1000);
|
||||
console.log("%d ops/sec - (%d/%d)", ~~(loops/seconds), loops, seconds);
|
||||
var next = loops * 10;
|
||||
if(next > max) {
|
||||
if(times > maxTimes) return cb();
|
||||
times++;
|
||||
next = max;
|
||||
}
|
||||
setTimeout(function() {
|
||||
doLoops(bench, next, times, cb);
|
||||
}, 100);
|
||||
}
|
||||
|
||||
var run = function() {
|
||||
if(count++ >= loops){
|
||||
return done();
|
||||
}
|
||||
bench(function() {
|
||||
setImmediate(run);
|
||||
});
|
||||
}
|
||||
run();
|
||||
}
|
||||
var bench = require(__dirname + '/simple-query-parsing');
|
||||
console.log();
|
||||
var benches = ['simple-query-parsing', 'prepared-statement-parsing'];
|
||||
async.forEachSeries(benches, function(name, cb) {
|
||||
var bench = require(__dirname + '/' + name)();
|
||||
console.log('starting ', name);
|
||||
doLoops(bench, 100, 1, cb);
|
||||
}, function(err, res) {
|
||||
console.log('done')
|
||||
})
|
||||
@ -1,68 +0,0 @@
|
||||
var pg = require(__dirname + '/../lib')
|
||||
var pgNative = require(__dirname + '/../lib/native');
|
||||
var bencher = require('bencher');
|
||||
var helper = require(__dirname + '/../test/test-helper')
|
||||
var conString = helper.connectionString()
|
||||
|
||||
var round = function(num) {
|
||||
return Math.round((num*1000))/1000
|
||||
}
|
||||
|
||||
var doBenchmark = function() {
|
||||
var bench = bencher({
|
||||
name: 'js/native compare',
|
||||
repeat: 1000,
|
||||
actions: [{
|
||||
name: 'javascript client - simple query',
|
||||
run: function(next) {
|
||||
var query = client.query('SELECT name, age FROM person WHERE age > 10');
|
||||
query.on('end', function() {
|
||||
next();
|
||||
});
|
||||
}
|
||||
},{
|
||||
name: 'native client - simple query',
|
||||
run: function(next) {
|
||||
var query = nativeClient.query('SELECT name FROM person WHERE age > $1', [10]);
|
||||
query.on('end', function() {
|
||||
next();
|
||||
});
|
||||
}
|
||||
}, {
|
||||
name: 'javascript client - parameterized query',
|
||||
run: function(next) {
|
||||
var query = client.query('SELECT name, age FROM person WHERE age > $1', [10]);
|
||||
query.on('end', function() {
|
||||
next();
|
||||
});
|
||||
}
|
||||
},{
|
||||
name: 'native client - parameterized query',
|
||||
run: function(next) {
|
||||
var query = nativeClient.query('SELECT name, age FROM person WHERE age > $1', [10]);
|
||||
query.on('end', function() {
|
||||
next();
|
||||
});
|
||||
}
|
||||
}]
|
||||
});
|
||||
bench(function(result) {
|
||||
console.log();
|
||||
console.log("%s (%d repeats):", result.name, result.repeat)
|
||||
result.actions.forEach(function(action) {
|
||||
console.log(" %s: \n average: %d ms\n total: %d ms", action.name, round(action.meanTime), round(action.totalTime));
|
||||
})
|
||||
client.end();
|
||||
nativeClient.end();
|
||||
})
|
||||
}
|
||||
|
||||
var client = new pg.Client(conString);
|
||||
var nativeClient = new pgNative.Client(conString);
|
||||
client.connect();
|
||||
client.on('connect', function() {
|
||||
nativeClient.connect();
|
||||
nativeClient.on('connect', function() {
|
||||
doBenchmark();
|
||||
});
|
||||
});
|
||||
@ -1,125 +0,0 @@
|
||||
var pg = require(__dirname + '/../lib')
|
||||
var bencher = require('bencher');
|
||||
var helper = require(__dirname + '/../test/test-helper')
|
||||
var conString = helper.connectionString()
|
||||
|
||||
var round = function(num) {
|
||||
return Math.round((num*1000))/1000
|
||||
}
|
||||
|
||||
var doBenchmark = function(cb) {
|
||||
var bench = bencher({
|
||||
name: 'select large sets',
|
||||
repeat: 10,
|
||||
actions: [{
|
||||
name: 'selecting string',
|
||||
run: function(next) {
|
||||
var query = client.query('SELECT name FROM items');
|
||||
query.on('error', function(er) {
|
||||
console.log(er);throw er;
|
||||
});
|
||||
|
||||
query.on('end', function() {
|
||||
next();
|
||||
});
|
||||
}
|
||||
}, {
|
||||
name: 'selecting integer',
|
||||
run: function(next) {
|
||||
var query = client.query('SELECT count FROM items');
|
||||
query.on('error', function(er) {
|
||||
console.log(er);throw er;
|
||||
});
|
||||
|
||||
query.on('end', function() {
|
||||
next();
|
||||
})
|
||||
}
|
||||
}, {
|
||||
name: 'selecting date',
|
||||
run: function(next) {
|
||||
var query = client.query('SELECT created FROM items');
|
||||
query.on('error', function(er) {
|
||||
console.log(er);throw er;
|
||||
});
|
||||
|
||||
query.on('end', function() {
|
||||
next();
|
||||
})
|
||||
}
|
||||
}, {
|
||||
name: 'selecting row',
|
||||
run: function(next) {
|
||||
var query = client.query('SELECT * FROM items');
|
||||
query.on('end', function() {
|
||||
next();
|
||||
})
|
||||
}
|
||||
}, {
|
||||
name: 'loading all rows into memory',
|
||||
run: function(next) {
|
||||
var query = client.query('SELECT * FROM items', next);
|
||||
}
|
||||
}]
|
||||
});
|
||||
bench(function(result) {
|
||||
console.log();
|
||||
console.log("%s (%d repeats):", result.name, result.repeat)
|
||||
result.actions.forEach(function(action) {
|
||||
console.log(" %s: \n average: %d ms\n total: %d ms", action.name, round(action.meanTime), round(action.totalTime));
|
||||
})
|
||||
client.end();
|
||||
cb();
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
var client = new pg.Client(conString);
|
||||
client.connect();
|
||||
console.log();
|
||||
console.log("creating temp table");
|
||||
client.query("CREATE TEMP TABLE items(name VARCHAR(10), created TIMESTAMPTZ, count INTEGER)");
|
||||
var count = 10000;
|
||||
console.log("inserting %d rows", count);
|
||||
for(var i = 0; i < count; i++) {
|
||||
var query = {
|
||||
name: 'insert',
|
||||
text: "INSERT INTO items(name, created, count) VALUES($1, $2, $3)",
|
||||
values: ["item"+i, new Date(2010, 01, 01, i, 0, 0), i]
|
||||
};
|
||||
client.query(query);
|
||||
}
|
||||
|
||||
client.once('drain', function() {
|
||||
console.log('done with insert. executing pure-javascript benchmark.');
|
||||
doBenchmark(function() {
|
||||
var oldclient = client;
|
||||
client = new pg.native.Client(conString);
|
||||
client.on('error', function(err) {
|
||||
console.log(err);
|
||||
throw err;
|
||||
});
|
||||
|
||||
client.connect();
|
||||
client.connect();
|
||||
console.log();
|
||||
console.log("creating temp table");
|
||||
client.query("CREATE TEMP TABLE items(name VARCHAR(10), created TIMESTAMPTZ, count INTEGER)");
|
||||
var count = 10000;
|
||||
console.log("inserting %d rows", count);
|
||||
for(var i = 0; i < count; i++) {
|
||||
var query = {
|
||||
name: 'insert',
|
||||
text: "INSERT INTO items(name, created, count) VALUES($1, $2, $3)",
|
||||
values: ["item"+i, new Date(2010, 01, 01, i, 0, 0), i]
|
||||
};
|
||||
client.query(query);
|
||||
}
|
||||
client.once('drain', function() {
|
||||
console.log("executing native benchmark");
|
||||
doBenchmark(function() {
|
||||
console.log("all done");
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
73
benchmark/prepared-statement-parsing.js
Normal file
73
benchmark/prepared-statement-parsing.js
Normal file
@ -0,0 +1,73 @@
|
||||
var Client = require(__dirname + '/../lib/client');
|
||||
var buffers = require(__dirname + '/../test/test-buffers');
|
||||
require(__dirname + '/../test/unit/test-helper');
|
||||
|
||||
var stream = new MemoryStream();
|
||||
stream.readyState = 'open';
|
||||
var client = new Client({
|
||||
stream: stream
|
||||
});
|
||||
|
||||
var rowDescription = new buffers.rowDescription([{
|
||||
name: 'id',
|
||||
tableID: 1,
|
||||
attributeNumber: 1,
|
||||
dataTypeID: 23, //int4
|
||||
typeModifer: 0,
|
||||
formatCode: 0
|
||||
},{
|
||||
name: 'name',
|
||||
tableID: 1,
|
||||
attributeNumber: 2,
|
||||
dataTypeID: 25, //text
|
||||
typeModifer: 0,
|
||||
formatCode: 0 //text format
|
||||
}, {
|
||||
name: 'comment',
|
||||
tableID: 1,
|
||||
attributeNumber: 3,
|
||||
dataTypeID: 25, //text
|
||||
typeModifer: 0,
|
||||
formatCode: 0 //text format
|
||||
}]);
|
||||
var row1 = buffers.dataRow(['1', 'Brian', 'Something groovy']);
|
||||
var row2 = buffers.dataRow(['2', 'Bob', 'Testint test']);
|
||||
var row3 = buffers.dataRow(['3', 'The amazing power of the everlasting gobstopper', 'okay now']);
|
||||
var parseCompleteBuffer = buffers.parseComplete();
|
||||
var bindCompleteBuffer = buffers.bindComplete();
|
||||
var portalSuspendedBuffer = buffers.portalSuspended();
|
||||
var complete = buffers.commandComplete('SELECT 3');
|
||||
var ready = buffers.readyForQuery();
|
||||
var buffer = Buffer.concat([parseCompleteBuffer,
|
||||
bindCompleteBuffer,
|
||||
rowDescription,
|
||||
row1,
|
||||
row2,
|
||||
row3,
|
||||
portalSuspendedBuffer,
|
||||
row1,
|
||||
row2,
|
||||
row3,
|
||||
portalSuspendedBuffer,
|
||||
row1,
|
||||
row2,
|
||||
row3,
|
||||
portalSuspendedBuffer,
|
||||
complete, ready]);
|
||||
|
||||
var bufferSlice = require('buffer-slice');
|
||||
var buffers = bufferSlice(10, buffer);
|
||||
|
||||
client.connect(assert.calls(function() {
|
||||
client.connection.emit('readyForQuery');
|
||||
module.exports = function() {
|
||||
return function(done) {
|
||||
client.query('SELECT * FROM whatever WHERE this = "doesnt even matter"', ['whatever'], function(err, res) {
|
||||
assert.equal(res.rows.length, 9);
|
||||
done();
|
||||
});
|
||||
buffers.forEach(stream.emit.bind(stream, 'data'));
|
||||
};
|
||||
};
|
||||
}));
|
||||
client.connection.emit('readyForQuery');
|
||||
@ -1,58 +0,0 @@
|
||||
var pg = require(__dirname + '/../lib')
|
||||
var bencher = require('bencher');
|
||||
var helper = require(__dirname + '/../test/test-helper')
|
||||
var conString = helper.connectionString()
|
||||
|
||||
var round = function(num) {
|
||||
return Math.round((num*1000))/1000
|
||||
}
|
||||
|
||||
var doBenchmark = function() {
|
||||
var bench = bencher({
|
||||
name: 'query compare',
|
||||
repeat: 1000,
|
||||
actions: [{
|
||||
name: 'simple query',
|
||||
run: function(next) {
|
||||
var query = client.query('SELECT name FROM person WHERE age > 10');
|
||||
query.on('end', function() {
|
||||
next();
|
||||
});
|
||||
}
|
||||
},{
|
||||
name: 'unnamed prepared statement',
|
||||
run: function(next) {
|
||||
var query = client.query('SELECT name FROM person WHERE age > $1', [10]);
|
||||
query.on('end', function() {
|
||||
next();
|
||||
});
|
||||
}
|
||||
},{
|
||||
name: 'named prepared statement',
|
||||
run: function(next) {
|
||||
var config = {
|
||||
name: 'get peeps',
|
||||
text: 'SELECT name FROM person WHERE age > $1',
|
||||
values: [10]
|
||||
}
|
||||
client.query(config).on('end', function() {
|
||||
next();
|
||||
});
|
||||
}
|
||||
}]
|
||||
});
|
||||
bench(function(result) {
|
||||
console.log();
|
||||
console.log("%s (%d repeats):", result.name, result.repeat)
|
||||
result.actions.forEach(function(action) {
|
||||
console.log(" %s: \n average: %d ms\n total: %d ms", action.name, round(action.meanTime), round(action.totalTime));
|
||||
})
|
||||
client.end();
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
|
||||
var client = new pg.Client(conString);
|
||||
client.connect();
|
||||
client.connection.once('readyForQuery', doBenchmark)
|
||||
59
benchmark/simple-query-parsing.js
Normal file
59
benchmark/simple-query-parsing.js
Normal file
@ -0,0 +1,59 @@
|
||||
var Client = require(__dirname + '/../lib/client');
|
||||
var buffers = require(__dirname + '/../test/test-buffers');
|
||||
require(__dirname + '/../test/unit/test-helper');
|
||||
|
||||
var stream = new MemoryStream();
|
||||
stream.readyState = 'open';
|
||||
var client = new Client({
|
||||
stream: stream
|
||||
});
|
||||
|
||||
var rowDescription = new buffers.rowDescription([{
|
||||
name: 'id',
|
||||
tableID: 1,
|
||||
attributeNumber: 1,
|
||||
dataTypeID: 23, //int4
|
||||
typeModifer: 0,
|
||||
formatCode: 0
|
||||
},{
|
||||
name: 'name',
|
||||
tableID: 1,
|
||||
attributeNumber: 2,
|
||||
dataTypeID: 25, //text
|
||||
typeModifer: 0,
|
||||
formatCode: 0 //text format
|
||||
}, {
|
||||
name: 'comment',
|
||||
tableID: 1,
|
||||
attributeNumber: 3,
|
||||
dataTypeID: 25, //text
|
||||
typeModifer: 0,
|
||||
formatCode: 0 //text format
|
||||
}]);
|
||||
var row1 = buffers.dataRow(['1', 'Brian', 'Something groovy']);
|
||||
var row2 = buffers.dataRow(['2', 'Bob', 'Testint test']);
|
||||
var row3 = buffers.dataRow(['3', 'The amazing power of the everlasting gobstopper', 'okay now']);
|
||||
var complete = buffers.commandComplete('SELECT 3');
|
||||
var ready = buffers.readyForQuery();
|
||||
var buffer = Buffer.concat([
|
||||
rowDescription,
|
||||
row1, row2, row3,
|
||||
row1, row2, row3,
|
||||
row1, row2, row3,
|
||||
complete, ready]);
|
||||
var bufferSlice = require('buffer-slice');
|
||||
buffers = bufferSlice(10, buffer);
|
||||
|
||||
client.connect(assert.calls(function() {
|
||||
client.connection.emit('readyForQuery');
|
||||
module.exports = function() {
|
||||
return function(done) {
|
||||
client.query('SELECT * FROM whatever WHERE this = "doesnt even matter"', function(err, res) {
|
||||
assert.equal(res.rows.length, 9);
|
||||
done();
|
||||
});
|
||||
buffers.forEach(stream.emit.bind(stream, 'data'));
|
||||
};
|
||||
};
|
||||
}));
|
||||
client.connection.emit('readyForQuery');
|
||||
37
binding.gyp
Normal file
37
binding.gyp
Normal file
@ -0,0 +1,37 @@
|
||||
{
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'binding',
|
||||
'conditions' : [
|
||||
['OS=="win"', {
|
||||
'conditions' : [
|
||||
['"<!@(cmd /C where /Q pg_config || echo n)"!="n"',
|
||||
{
|
||||
'sources': ['src/binding.cc'],
|
||||
'include_dirs': ['<!@(pg_config --includedir)'],
|
||||
'libraries' : ['libpq.lib'],
|
||||
'msvs_settings': {
|
||||
'VCLinkerTool' : {
|
||||
'AdditionalLibraryDirectories' : [
|
||||
'<!@(pg_config --libdir)\\'
|
||||
]
|
||||
},
|
||||
}
|
||||
}
|
||||
]
|
||||
]
|
||||
}, { # OS!="win"
|
||||
'conditions' : [
|
||||
['"y"!="n"', # ToDo: add pg_config existance condition that works on linux
|
||||
{
|
||||
'sources': ['src/binding.cc'],
|
||||
'include_dirs': ['<!@(pg_config --includedir)'],
|
||||
'libraries' : ['-lpq -L<!@(pg_config --libdir)']
|
||||
}
|
||||
]
|
||||
]
|
||||
}]
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -1,258 +0,0 @@
|
||||
var parseBits = function(data, bits, offset, invert, callback) {
|
||||
offset = offset || 0;
|
||||
invert = invert || false;
|
||||
callback = callback || function(lastValue, newValue, bits) { return (lastValue * Math.pow(2, bits)) + newValue; };
|
||||
var offsetBytes = offset >> 3;
|
||||
|
||||
var inv = function(value) {
|
||||
if (invert) {
|
||||
return ~value & 0xff;
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
// read first (maybe partial) byte
|
||||
var mask = 0xff;
|
||||
var firstBits = 8 - (offset % 8);
|
||||
if (bits < firstBits) {
|
||||
mask = (0xff << (8 - bits)) & 0xff;
|
||||
firstBits = bits;
|
||||
}
|
||||
|
||||
if (offset) {
|
||||
mask = mask >> (offset % 8);
|
||||
}
|
||||
|
||||
var result = 0;
|
||||
if ((offset % 8) + bits >= 8) {
|
||||
result = callback(0, inv(data[offsetBytes]) & mask, firstBits);
|
||||
}
|
||||
|
||||
// read bytes
|
||||
var bytes = (bits + offset) >> 3;
|
||||
for (var i = offsetBytes + 1; i < bytes; i++) {
|
||||
result = callback(result, inv(data[i]), 8);
|
||||
}
|
||||
|
||||
// bits to read, that are not a complete byte
|
||||
var lastBits = (bits + offset) % 8;
|
||||
if (lastBits > 0) {
|
||||
result = callback(result, inv(data[bytes]) >> (8 - lastBits), lastBits);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
var parseFloatFromBits = function(data, precisionBits, exponentBits) {
|
||||
var bias = Math.pow(2, exponentBits - 1) - 1;
|
||||
var sign = parseBits(data, 1);
|
||||
var exponent = parseBits(data, exponentBits, 1);
|
||||
|
||||
if (exponent === 0)
|
||||
return 0;
|
||||
|
||||
// parse mantissa
|
||||
var precisionBitsCounter = 1;
|
||||
var parsePrecisionBits = function(lastValue, newValue, bits) {
|
||||
if (lastValue === 0) {
|
||||
lastValue = 1;
|
||||
}
|
||||
|
||||
for (var i = 1; i <= bits; i++) {
|
||||
precisionBitsCounter /= 2;
|
||||
if ((newValue & (0x1 << (bits - i))) > 0) {
|
||||
lastValue += precisionBitsCounter;
|
||||
}
|
||||
}
|
||||
|
||||
return lastValue;
|
||||
};
|
||||
|
||||
var mantissa = parseBits(data, precisionBits, exponentBits + 1, false, parsePrecisionBits);
|
||||
|
||||
// special cases
|
||||
if (exponent == (Math.pow(2, exponentBits + 1) - 1)) {
|
||||
if (mantissa === 0) {
|
||||
return (sign === 0) ? Infinity : -Infinity;
|
||||
}
|
||||
|
||||
return NaN;
|
||||
}
|
||||
|
||||
// normale number
|
||||
return ((sign === 0) ? 1 : -1) * Math.pow(2, exponent - bias) * mantissa;
|
||||
};
|
||||
|
||||
var parseBool = function(value) {
|
||||
return (parseBits(value, 8) == 1);
|
||||
};
|
||||
|
||||
var parseInt16 = function(value) {
|
||||
if (parseBits(value, 1) == 1) {
|
||||
return -1 * (parseBits(value, 15, 1, true) + 1);
|
||||
}
|
||||
|
||||
return parseBits(value, 15, 1);
|
||||
};
|
||||
|
||||
var parseInt32 = function(value) {
|
||||
if (parseBits(value, 1) == 1) {
|
||||
return -1 * (parseBits(value, 31, 1, true) + 1);
|
||||
}
|
||||
|
||||
return parseBits(value, 31, 1);
|
||||
};
|
||||
|
||||
var parseInt64 = function(value) {
|
||||
if (parseBits(value, 1) == 1) {
|
||||
return -1 * (parseBits(value, 63, 1, true) + 1);
|
||||
}
|
||||
|
||||
return parseBits(value, 63, 1);
|
||||
};
|
||||
|
||||
var parseFloat32 = function(value) {
|
||||
return parseFloatFromBits(value, 23, 8);
|
||||
};
|
||||
|
||||
var parseFloat64 = function(value) {
|
||||
return parseFloatFromBits(value, 52, 11);
|
||||
};
|
||||
|
||||
var parseNumeric = function(value) {
|
||||
var sign = parseBits(value, 16, 32);
|
||||
if (sign == 0xc000) {
|
||||
return NaN;
|
||||
}
|
||||
|
||||
var weight = Math.pow(10000, parseBits(value, 16, 16));
|
||||
var result = 0;
|
||||
|
||||
var digits = [];
|
||||
var ndigits = parseBits(value, 16);
|
||||
for (var i = 0; i < ndigits; i++) {
|
||||
result += parseBits(value, 16, 64 + (16 * i)) * weight;
|
||||
weight /= 10000;
|
||||
}
|
||||
|
||||
var scale = Math.pow(10, parseBits(value, 16, 48));
|
||||
return ((sign === 0) ? 1 : -1) * Math.round(result * scale) / scale;
|
||||
};
|
||||
|
||||
var parseDate = function(value) {
|
||||
var sign = parseBits(value, 1);
|
||||
var rawValue = parseBits(value, 63, 1);
|
||||
|
||||
// discard usecs and shift from 2000 to 1970
|
||||
var result = new Date((((sign === 0) ? 1 : -1) * rawValue / 1000) + 946684800000);
|
||||
|
||||
// add microseconds to the date
|
||||
result.usec = rawValue % 1000;
|
||||
result.getMicroSeconds = function() {
|
||||
return this.usec;
|
||||
};
|
||||
result.setMicroSeconds = function(value) {
|
||||
this.usec = value;
|
||||
};
|
||||
result.getUTCMicroSeconds = function() {
|
||||
return this.usec;
|
||||
};
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
var parseArray = function(value) {
|
||||
var dim = parseBits(value, 32);
|
||||
|
||||
var flags = parseBits(value, 32, 32);
|
||||
var elementType = parseBits(value, 32, 64);
|
||||
|
||||
var offset = 96;
|
||||
var dims = [];
|
||||
for (var i = 0; i < dim; i++) {
|
||||
// parse dimension
|
||||
dims[i] = parseBits(value, 32, offset);
|
||||
offset += 32;
|
||||
|
||||
// ignore lower bounds
|
||||
offset += 32;
|
||||
}
|
||||
|
||||
var parseElement = function(elementType) {
|
||||
// parse content length
|
||||
var length = parseBits(value, 32, offset);
|
||||
offset += 32;
|
||||
|
||||
// parse null values
|
||||
if (length == 0xffffffff) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if ((elementType == 0x17) || (elementType == 0x14)) {
|
||||
// int/bigint
|
||||
var result = parseBits(value, length * 8, offset);
|
||||
offset += length * 8;
|
||||
return result;
|
||||
}
|
||||
else if (elementType == 0x19) {
|
||||
// string
|
||||
var result = value.toString(this.encoding, offset >> 3, (offset += (length << 3)) >> 3);
|
||||
return result;
|
||||
}
|
||||
else {
|
||||
console.log("ERROR: ElementType not implemented: " + elementType);
|
||||
}
|
||||
};
|
||||
|
||||
var parse = function(dimension, elementType) {
|
||||
var array = [];
|
||||
|
||||
if (dimension.length > 1) {
|
||||
var count = dimension.shift();
|
||||
for (var i = 0; i < count; i++) {
|
||||
array[i] = parse(dimension, elementType);
|
||||
}
|
||||
dimension.unshift(count);
|
||||
}
|
||||
else {
|
||||
for (var i = 0; i < dimension[0]; i++) {
|
||||
array[i] = parseElement(elementType);
|
||||
}
|
||||
}
|
||||
|
||||
return array;
|
||||
};
|
||||
|
||||
return parse(dims, elementType);
|
||||
};
|
||||
|
||||
var parseText = function(value) {
|
||||
return value.toString('utf8');
|
||||
};
|
||||
|
||||
var parseBool = function(value) {
|
||||
return (parseBits(value, 8) > 0);
|
||||
};
|
||||
|
||||
var init = function(register) {
|
||||
register(20, parseInt64);
|
||||
register(21, parseInt16);
|
||||
register(23, parseInt32);
|
||||
register(26, parseInt32);
|
||||
register(1700, parseNumeric);
|
||||
register(700, parseFloat32);
|
||||
register(701, parseFloat64);
|
||||
register(16, parseBool);
|
||||
register(1114, parseDate);
|
||||
register(1184, parseDate);
|
||||
register(1007, parseArray);
|
||||
register(1016, parseArray);
|
||||
register(1008, parseArray);
|
||||
register(1009, parseArray);
|
||||
register(25, parseText);
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
init: init
|
||||
};
|
||||
242
lib/client.js
242
lib/client.js
@ -2,36 +2,40 @@ var crypto = require('crypto');
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
var util = require('util');
|
||||
|
||||
var ConnectionParameters = require(__dirname + '/connection-parameters');
|
||||
var Query = require(__dirname + '/query');
|
||||
var utils = require(__dirname + '/utils');
|
||||
var defaults = require(__dirname + '/defaults');
|
||||
var Connection = require(__dirname + '/connection');
|
||||
var CopyFromStream = require(__dirname + '/copystream').CopyFromStream;
|
||||
var CopyToStream = require(__dirname + '/copystream').CopyToStream;
|
||||
|
||||
var Client = function(config) {
|
||||
EventEmitter.call(this);
|
||||
if(typeof config === 'string') {
|
||||
config = utils.normalizeConnectionInfo(config)
|
||||
}
|
||||
config = config || {};
|
||||
this.user = config.user || defaults.user;
|
||||
this.database = config.database || defaults.database;
|
||||
this.port = config.port || defaults.port;
|
||||
this.host = config.host || defaults.host;
|
||||
this.connection = config.connection || new Connection({stream: config.stream});
|
||||
|
||||
this.connectionParameters = new ConnectionParameters(config);
|
||||
this.user = this.connectionParameters.user;
|
||||
this.database = this.connectionParameters.database;
|
||||
this.port = this.connectionParameters.port;
|
||||
this.host = this.connectionParameters.host;
|
||||
this.password = this.connectionParameters.password;
|
||||
|
||||
var c = config || {};
|
||||
|
||||
this.connection = c.connection || new Connection({
|
||||
stream: c.stream,
|
||||
ssl: c.ssl
|
||||
});
|
||||
this.queryQueue = [];
|
||||
this.password = config.password || defaults.password;
|
||||
this.binary = config.binary || defaults.binary;
|
||||
this.binary = c.binary || defaults.binary;
|
||||
this.encoding = 'utf8';
|
||||
this.processID = null;
|
||||
this.secretKey = null;
|
||||
var self = this;
|
||||
this.ssl = c.ssl || false;
|
||||
};
|
||||
|
||||
util.inherits(Client, EventEmitter);
|
||||
|
||||
var p = Client.prototype;
|
||||
|
||||
p.connect = function(callback) {
|
||||
Client.prototype.connect = function(callback) {
|
||||
var self = this;
|
||||
var con = this.connection;
|
||||
if(this.host && this.host.indexOf('/') === 0) {
|
||||
@ -43,6 +47,17 @@ p.connect = function(callback) {
|
||||
|
||||
//once connection is established send startup message
|
||||
con.on('connect', function() {
|
||||
if(self.ssl) {
|
||||
con.requestSsl();
|
||||
} else {
|
||||
con.startup({
|
||||
user: self.user,
|
||||
database: self.database
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
con.on('sslconnect', function() {
|
||||
con.startup({
|
||||
user: self.user,
|
||||
database: self.database
|
||||
@ -63,9 +78,9 @@ p.connect = function(callback) {
|
||||
});
|
||||
|
||||
con.once('backendKeyData', function(msg) {
|
||||
self.processID = msg.processID;
|
||||
self.secretKey = msg.secretKey;
|
||||
});
|
||||
self.processID = msg.processID;
|
||||
self.secretKey = msg.secretKey;
|
||||
});
|
||||
|
||||
//hook up query handling events to connection
|
||||
//after the connection initially becomes ready for queries
|
||||
@ -74,10 +89,12 @@ p.connect = function(callback) {
|
||||
con.on('rowDescription', function(msg) {
|
||||
self.activeQuery.handleRowDescription(msg);
|
||||
});
|
||||
|
||||
//delegate datarow to active query
|
||||
con.on('dataRow', function(msg) {
|
||||
self.activeQuery.handleDataRow(msg);
|
||||
});
|
||||
|
||||
//TODO should query gain access to connection?
|
||||
con.on('portalSuspended', function(msg) {
|
||||
self.activeQuery.getRows(con);
|
||||
@ -92,6 +109,25 @@ p.connect = function(callback) {
|
||||
}
|
||||
});
|
||||
|
||||
con.on('copyInResponse', function(msg) {
|
||||
self.activeQuery.streamData(self.connection);
|
||||
});
|
||||
|
||||
con.on('copyOutResponse', function(msg) {
|
||||
if(self.activeQuery.stream === undefined) {
|
||||
self.activeQuery._canceledDueToError =
|
||||
new Error('No destination stream defined');
|
||||
//canceling query requires creation of new connection
|
||||
//look for postgres frontend/backend protocol
|
||||
(new self.constructor({port: self.port, host: self.host}))
|
||||
.cancel(self, self.activeQuery);
|
||||
}
|
||||
});
|
||||
|
||||
con.on('copyData', function (msg) {
|
||||
self.activeQuery.handleCopyFromChunk(msg.chunk);
|
||||
});
|
||||
|
||||
if (!callback) {
|
||||
self.emit('connect');
|
||||
} else {
|
||||
@ -107,12 +143,22 @@ p.connect = function(callback) {
|
||||
});
|
||||
|
||||
con.on('readyForQuery', function() {
|
||||
var error;
|
||||
if(self.activeQuery) {
|
||||
self.activeQuery.handleReadyForQuery();
|
||||
//try/catch/rethrow to ensure exceptions don't prevent the queryQueue from
|
||||
//being processed
|
||||
try{
|
||||
self.activeQuery.handleReadyForQuery();
|
||||
} catch(e) {
|
||||
error = e;
|
||||
}
|
||||
}
|
||||
this.activeQuery = null;
|
||||
self.activeQuery = null;
|
||||
self.readyForQuery = true;
|
||||
self._pulseQueryQueue();
|
||||
if(error) {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
con.on('error', function(error) {
|
||||
@ -127,37 +173,93 @@ p.connect = function(callback) {
|
||||
if(self.activeQuery.isPreparedStatement) {
|
||||
con.sync();
|
||||
}
|
||||
self.activeQuery.handleError(error);
|
||||
var activeQuery = self.activeQuery;
|
||||
self.activeQuery = null;
|
||||
activeQuery.handleError(error);
|
||||
}
|
||||
});
|
||||
|
||||
con.once('end', function() {
|
||||
if(self.activeQuery) {
|
||||
self.activeQuery.handleError(new Error('Stream unexpectedly ended during query execution'));
|
||||
self.activeQuery = null;
|
||||
}
|
||||
self.emit('end');
|
||||
});
|
||||
|
||||
|
||||
con.on('notice', function(msg) {
|
||||
self.emit('notice', msg);
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
p.cancel = function(client, query) {
|
||||
if (client.activeQuery == query) {
|
||||
var con = this.connection;
|
||||
Client.prototype.cancel = function(client, query) {
|
||||
if(client.activeQuery == query) {
|
||||
var con = this.connection;
|
||||
|
||||
if(this.host && this.host.indexOf('/') === 0) {
|
||||
con.connect(this.host + '/.s.PGSQL.' + this.port);
|
||||
} else {
|
||||
con.connect(this.port, this.host);
|
||||
}
|
||||
if(this.host && this.host.indexOf('/') === 0) {
|
||||
con.connect(this.host + '/.s.PGSQL.' + this.port);
|
||||
} else {
|
||||
con.connect(this.port, this.host);
|
||||
}
|
||||
|
||||
//once connection is established send cancel message
|
||||
con.on('connect', function() {
|
||||
con.cancel(client.processID, client.secretKey);
|
||||
});
|
||||
}
|
||||
else if (client.queryQueue.indexOf(query) != -1)
|
||||
client.queryQueue.splice(client.queryQueue.indexOf(query), 1);
|
||||
//once connection is established send cancel message
|
||||
con.on('connect', function() {
|
||||
con.cancel(client.processID, client.secretKey);
|
||||
});
|
||||
} else if(client.queryQueue.indexOf(query) != -1) {
|
||||
client.queryQueue.splice(client.queryQueue.indexOf(query), 1);
|
||||
}
|
||||
};
|
||||
|
||||
p._pulseQueryQueue = function() {
|
||||
// Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c
|
||||
Client.prototype.escapeIdentifier = function(str) {
|
||||
|
||||
var escaped = '"';
|
||||
|
||||
for(var i = 0; i < str.length; i++) {
|
||||
var c = str[i];
|
||||
if(c === '"') {
|
||||
escaped += c + c;
|
||||
} else {
|
||||
escaped += c;
|
||||
}
|
||||
}
|
||||
|
||||
escaped += '"';
|
||||
|
||||
return escaped;
|
||||
};
|
||||
|
||||
// Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c
|
||||
Client.prototype.escapeLiteral = function(str) {
|
||||
|
||||
var hasBackslash = false;
|
||||
var escaped = '\'';
|
||||
|
||||
for(var i = 0; i < str.length; i++) {
|
||||
var c = str[i];
|
||||
if(c === '\'') {
|
||||
escaped += c + c;
|
||||
} else if (c === '\\') {
|
||||
escaped += c + c;
|
||||
hasBackslash = true;
|
||||
} else {
|
||||
escaped += c;
|
||||
}
|
||||
}
|
||||
|
||||
escaped += '\'';
|
||||
|
||||
if(hasBackslash === true) {
|
||||
escaped = ' E' + escaped;
|
||||
}
|
||||
|
||||
return escaped;
|
||||
};
|
||||
|
||||
Client.prototype._pulseQueryQueue = function() {
|
||||
if(this.readyForQuery===true) {
|
||||
this.activeQuery = this.queryQueue.shift();
|
||||
if(this.activeQuery) {
|
||||
@ -166,48 +268,51 @@ p._pulseQueryQueue = function() {
|
||||
this.activeQuery.submit(this.connection);
|
||||
} else if(this.hasExecuted) {
|
||||
this.activeQuery = null;
|
||||
this._drainPaused > 0 ? this._drainPaused++ : this.emit('drain')
|
||||
this.emit('drain');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
p.query = function(config, values, callback) {
|
||||
//can take in strings or config objects
|
||||
config = (typeof(config) == 'string') ? { text: config } : config;
|
||||
if (this.binary && !('binary' in config)) {
|
||||
config.binary = true;
|
||||
}
|
||||
|
||||
if(values) {
|
||||
if(typeof values === 'function') {
|
||||
callback = values;
|
||||
Client.prototype._copy = function (text, stream) {
|
||||
var config = {};
|
||||
config.text = text;
|
||||
config.stream = stream;
|
||||
config.callback = function (error) {
|
||||
if(error) {
|
||||
config.stream.error(error);
|
||||
} else {
|
||||
config.values = values;
|
||||
config.stream.close();
|
||||
}
|
||||
};
|
||||
var query = new Query(config);
|
||||
this.queryQueue.push(query);
|
||||
this._pulseQueryQueue();
|
||||
return config.stream;
|
||||
|
||||
};
|
||||
|
||||
Client.prototype.copyFrom = function (text) {
|
||||
return this._copy(text, new CopyFromStream());
|
||||
};
|
||||
|
||||
Client.prototype.copyTo = function (text) {
|
||||
return this._copy(text, new CopyToStream());
|
||||
};
|
||||
|
||||
Client.prototype.query = function(config, values, callback) {
|
||||
//can take in strings, config object or query object
|
||||
var query = (config instanceof Query) ? config :
|
||||
new Query(config, values, callback);
|
||||
if(this.binary && !query.binary) {
|
||||
query.binary = true;
|
||||
}
|
||||
|
||||
config.callback = callback;
|
||||
|
||||
var query = new Query(config);
|
||||
this.queryQueue.push(query);
|
||||
this._pulseQueryQueue();
|
||||
return query;
|
||||
};
|
||||
|
||||
//prevents client from otherwise emitting 'drain' event until 'resumeDrain' is called
|
||||
p.pauseDrain = function() {
|
||||
this._drainPaused = 1;
|
||||
};
|
||||
|
||||
//resume raising 'drain' event
|
||||
p.resumeDrain = function() {
|
||||
if(this._drainPaused > 1) {
|
||||
this.emit('drain');
|
||||
}
|
||||
this._drainPaused = 0;
|
||||
};
|
||||
|
||||
p.end = function() {
|
||||
Client.prototype.end = function() {
|
||||
this.connection.end();
|
||||
};
|
||||
|
||||
@ -215,4 +320,7 @@ Client.md5 = function(string) {
|
||||
return crypto.createHash('md5').update(string).digest('hex');
|
||||
};
|
||||
|
||||
// expose a Query constructor
|
||||
Client.Query = Query;
|
||||
|
||||
module.exports = Client;
|
||||
|
||||
81
lib/connection-parameters.js
Normal file
81
lib/connection-parameters.js
Normal file
@ -0,0 +1,81 @@
|
||||
var dns = require('dns');
|
||||
var path = require('path');
|
||||
|
||||
var defaults = require(__dirname + '/defaults');
|
||||
|
||||
var val = function(key, config) {
|
||||
return config[key] ||
|
||||
process.env['PG' + key.toUpperCase()] ||
|
||||
defaults[key];
|
||||
};
|
||||
|
||||
var url = require('url');
|
||||
//parses a connection string
|
||||
var parse = function(str) {
|
||||
//unix socket
|
||||
if(str.charAt(0) === '/') {
|
||||
return { host: str };
|
||||
}
|
||||
// url parse expects spaces encoded as %20
|
||||
str = encodeURI(str);
|
||||
var result = url.parse(str, true);
|
||||
var config = {};
|
||||
config.host = result.hostname;
|
||||
config.database = result.pathname ? result.pathname.slice(1) : null;
|
||||
var auth = (result.auth || ':').split(':');
|
||||
config.user = auth[0];
|
||||
config.password = auth[1];
|
||||
config.port = result.port;
|
||||
|
||||
var ssl = result.query.ssl;
|
||||
if (ssl === 'true' || ssl === '1') {
|
||||
config.ssl = true;
|
||||
}
|
||||
|
||||
return config;
|
||||
};
|
||||
|
||||
var ConnectionParameters = function(config) {
|
||||
config = typeof config == 'string' ? parse(config) : (config || {});
|
||||
this.user = val('user', config);
|
||||
this.database = val('database', config);
|
||||
this.port = parseInt(val('port', config), 10);
|
||||
this.host = val('host', config);
|
||||
this.password = val('password', config);
|
||||
this.binary = val('binary', config);
|
||||
this.ssl = config.ssl || defaults.ssl;
|
||||
this.client_encoding = val("client_encoding", config);
|
||||
//a domain socket begins with '/'
|
||||
this.isDomainSocket = (!(this.host||'').indexOf('/'));
|
||||
};
|
||||
|
||||
var add = function(params, config, paramName) {
|
||||
var value = config[paramName];
|
||||
if(value) {
|
||||
params.push(paramName+"='"+value+"'");
|
||||
}
|
||||
};
|
||||
|
||||
ConnectionParameters.prototype.getLibpqConnectionString = function(cb) {
|
||||
var params = [];
|
||||
add(params, this, 'user');
|
||||
add(params, this, 'password');
|
||||
add(params, this, 'port');
|
||||
if(this.database) {
|
||||
params.push("dbname='" + this.database + "'");
|
||||
}
|
||||
if(this.isDomainSocket) {
|
||||
params.push("host=" + this.host);
|
||||
return cb(null, params.join(' '));
|
||||
}
|
||||
if(this.client_encoding) {
|
||||
params.push("client_encoding='" + this.client_encoding + "'");
|
||||
}
|
||||
dns.lookup(this.host, function(err, address) {
|
||||
if(err) return cb(err, null);
|
||||
params.push("hostaddr=" + address);
|
||||
return cb(null, params.join(' '));
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = ConnectionParameters;
|
||||
@ -4,8 +4,10 @@ var EventEmitter = require('events').EventEmitter;
|
||||
var util = require('util');
|
||||
|
||||
var utils = require(__dirname + '/utils');
|
||||
var Writer = require(__dirname + '/writer');
|
||||
var Writer = require('buffer-writer');
|
||||
|
||||
var TEXT_MODE = 0;
|
||||
var BINARY_MODE = 1;
|
||||
var Connection = function(config) {
|
||||
EventEmitter.call(this);
|
||||
config = config || {};
|
||||
@ -17,18 +19,25 @@ var Connection = function(config) {
|
||||
this.encoding = 'utf8';
|
||||
this.parsedStatements = {};
|
||||
this.writer = new Writer();
|
||||
this.ssl = config.ssl || false;
|
||||
this._ending = false;
|
||||
this._mode = TEXT_MODE;
|
||||
this._emitMessage = false;
|
||||
var self = this;
|
||||
this.on('newListener', function(eventName) {
|
||||
if(eventName == 'message') {
|
||||
self._emitMessage = true;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
util.inherits(Connection, EventEmitter);
|
||||
|
||||
var p = Connection.prototype;
|
||||
Connection.prototype.connect = function(port, host) {
|
||||
|
||||
p.connect = function(port, host) {
|
||||
|
||||
if(this.stream.readyState === 'closed'){
|
||||
if(this.stream.readyState === 'closed') {
|
||||
this.stream.connect(port, host);
|
||||
}
|
||||
else if(this.stream.readyState == 'open') {
|
||||
} else if(this.stream.readyState == 'open') {
|
||||
this.emit('connect');
|
||||
}
|
||||
|
||||
@ -38,22 +47,76 @@ p.connect = function(port, host) {
|
||||
self.emit('connect');
|
||||
});
|
||||
|
||||
|
||||
this.stream.on('data', function(buffer) {
|
||||
self.setBuffer(buffer);
|
||||
var msg;
|
||||
while(msg = self.parseMessage()) {
|
||||
self.emit('message', msg);
|
||||
self.emit(msg.name, msg);
|
||||
this.stream.on('error', function(error) {
|
||||
//don't raise ECONNRESET errors - they can & should be ignored
|
||||
//during disconnect
|
||||
if(self._ending && error.code == 'ECONNRESET') {
|
||||
return;
|
||||
}
|
||||
self.emit('error', error);
|
||||
});
|
||||
|
||||
this.stream.on('error', function(error) {
|
||||
self.emit('error', error);
|
||||
this.stream.on('end', function() {
|
||||
self.emit('end');
|
||||
});
|
||||
|
||||
if(!this.ssl) {
|
||||
return this.attachListeners(this.stream);
|
||||
}
|
||||
|
||||
this.stream.once('data', function(buffer) {
|
||||
var responseCode = buffer.toString('utf8');
|
||||
if(responseCode != 'S') {
|
||||
return self.emit('error', new Error('The server does not support SSL connections'));
|
||||
}
|
||||
var tls = require('tls');
|
||||
self.stream = tls.connect({
|
||||
socket: self.stream,
|
||||
servername: host,
|
||||
rejectUnauthorized: self.ssl.rejectUnauthorized,
|
||||
ca: self.ssl.ca,
|
||||
pfx: self.ssl.pfx,
|
||||
key: self.ssl.key,
|
||||
passphrase: self.ssl.passphrase,
|
||||
cert: self.ssl.cert,
|
||||
NPNProtocols: self.ssl.NPNProtocols
|
||||
});
|
||||
self.attachListeners(self.stream);
|
||||
self.emit('sslconnect');
|
||||
});
|
||||
};
|
||||
|
||||
p.startup = function(config) {
|
||||
Connection.prototype.attachListeners = function(stream) {
|
||||
stream.on('data', function(buff) {
|
||||
this.setBuffer(buff);
|
||||
var msg = this.parseMessage();
|
||||
while(msg) {
|
||||
if(this._emitMessage) {
|
||||
this.emit('message', msg);
|
||||
}
|
||||
this.emit(msg.name, msg);
|
||||
msg = this.parseMessage();
|
||||
}
|
||||
}.bind(this));
|
||||
};
|
||||
|
||||
Connection.prototype.requestSsl = function(config) {
|
||||
this.checkSslResponse = true;
|
||||
|
||||
var bodyBuffer = this.writer
|
||||
.addInt16(0x04D2)
|
||||
.addInt16(0x162F).flush();
|
||||
|
||||
var length = bodyBuffer.length + 4;
|
||||
|
||||
var buffer = new Writer()
|
||||
.addInt32(length)
|
||||
.add(bodyBuffer)
|
||||
.join();
|
||||
this.stream.write(buffer);
|
||||
};
|
||||
|
||||
Connection.prototype.startup = function(config) {
|
||||
var bodyBuffer = this.writer
|
||||
.addInt16(3)
|
||||
.addInt16(0)
|
||||
@ -61,6 +124,8 @@ p.startup = function(config) {
|
||||
.addCString(config.user)
|
||||
.addCString('database')
|
||||
.addCString(config.database)
|
||||
.addCString('client_encoding')
|
||||
.addCString("'utf-8'")
|
||||
.addCString('').flush();
|
||||
//this message is sent without a code
|
||||
|
||||
@ -73,7 +138,7 @@ p.startup = function(config) {
|
||||
this.stream.write(buffer);
|
||||
};
|
||||
|
||||
p.cancel = function(processID, secretKey) {
|
||||
Connection.prototype.cancel = function(processID, secretKey) {
|
||||
var bodyBuffer = this.writer
|
||||
.addInt16(1234)
|
||||
.addInt16(5678)
|
||||
@ -90,27 +155,28 @@ p.cancel = function(processID, secretKey) {
|
||||
this.stream.write(buffer);
|
||||
};
|
||||
|
||||
p.password = function(password) {
|
||||
Connection.prototype.password = function(password) {
|
||||
//0x70 = 'p'
|
||||
this._send(0x70, this.writer.addCString(password));
|
||||
};
|
||||
|
||||
p._send = function(code, more) {
|
||||
Connection.prototype._send = function(code, more) {
|
||||
if(!this.stream.writable) { return false; }
|
||||
if(more === true) {
|
||||
this.writer.addHeader(code);
|
||||
} else {
|
||||
return this.stream.write(this.writer.flush(code));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
p.query = function(text) {
|
||||
Connection.prototype.query = function(text) {
|
||||
//0x51 = Q
|
||||
this.stream.write(this.writer.addCString(text).flush(0x51));
|
||||
};
|
||||
|
||||
//send parse message
|
||||
//"more" === true to buffer the message until flush() is called
|
||||
p.parse = function(query, more) {
|
||||
Connection.prototype.parse = function(query, more) {
|
||||
//expect something like this:
|
||||
// { name: 'queryName',
|
||||
// text: 'select * from blah',
|
||||
@ -135,7 +201,7 @@ p.parse = function(query, more) {
|
||||
|
||||
//send bind message
|
||||
//"more" === true to buffer the message until flush() is called
|
||||
p.bind = function(config, more) {
|
||||
Connection.prototype.bind = function(config, more) {
|
||||
//normalize config
|
||||
config = config || {};
|
||||
config.portal = config.portal || '';
|
||||
@ -153,13 +219,12 @@ p.bind = function(config, more) {
|
||||
if(val === null || typeof val === "undefined") {
|
||||
buffer.addInt32(-1);
|
||||
} else {
|
||||
val = val.toString();
|
||||
buffer.addInt32(Buffer.byteLength(val));
|
||||
buffer.addString(val);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.binary) {
|
||||
if(config.binary) {
|
||||
buffer.addInt16(1); // format codes to use binary
|
||||
buffer.addInt16(1);
|
||||
}
|
||||
@ -172,7 +237,7 @@ p.bind = function(config, more) {
|
||||
|
||||
//send execute message
|
||||
//"more" === true to buffer the message until flush() is called
|
||||
p.execute = function(config, more) {
|
||||
Connection.prototype.execute = function(config, more) {
|
||||
config = config || {};
|
||||
config.portal = config.portal || '';
|
||||
config.rows = config.rows || '';
|
||||
@ -186,33 +251,48 @@ p.execute = function(config, more) {
|
||||
|
||||
var emptyBuffer = Buffer(0);
|
||||
|
||||
p.flush = function() {
|
||||
Connection.prototype.flush = function() {
|
||||
//0x48 = 'H'
|
||||
this.writer.add(emptyBuffer)
|
||||
this.writer.add(emptyBuffer);
|
||||
this._send(0x48);
|
||||
}
|
||||
};
|
||||
|
||||
p.sync = function() {
|
||||
Connection.prototype.sync = function() {
|
||||
//clear out any pending data in the writer
|
||||
this.writer.flush(0)
|
||||
|
||||
this.writer.flush(0);
|
||||
|
||||
this.writer.add(emptyBuffer);
|
||||
this._send(0x53);
|
||||
};
|
||||
|
||||
p.end = function() {
|
||||
Connection.prototype.end = function() {
|
||||
//0x58 = 'X'
|
||||
this.writer.add(emptyBuffer);
|
||||
this._send(0x58);
|
||||
this._ending = true;
|
||||
};
|
||||
|
||||
p.describe = function(msg, more) {
|
||||
Connection.prototype.describe = function(msg, more) {
|
||||
this.writer.addCString(msg.type + (msg.name || ''));
|
||||
this._send(0x44, more);
|
||||
};
|
||||
|
||||
Connection.prototype.sendCopyFromChunk = function (chunk) {
|
||||
this.stream.write(this.writer.add(chunk).flush(0x64));
|
||||
};
|
||||
|
||||
Connection.prototype.endCopyFrom = function () {
|
||||
this.stream.write(this.writer.add(emptyBuffer).flush(0x63));
|
||||
};
|
||||
|
||||
Connection.prototype.sendCopyFail = function (msg) {
|
||||
//this.stream.write(this.writer.add(emptyBuffer).flush(0x66));
|
||||
this.writer.addCString(msg);
|
||||
this._send(0x66);
|
||||
};
|
||||
|
||||
//parsing methods
|
||||
p.setBuffer = function(buffer) {
|
||||
Connection.prototype.setBuffer = function(buffer) {
|
||||
if(this.lastBuffer) { //we have unfinished biznaz
|
||||
//need to combine last two buffers
|
||||
var remaining = this.lastBuffer.length - this.lastOffset;
|
||||
@ -221,11 +301,30 @@ p.setBuffer = function(buffer) {
|
||||
buffer.copy(combinedBuffer, remaining, 0);
|
||||
buffer = combinedBuffer;
|
||||
}
|
||||
this.lastBuffer = false;
|
||||
this.buffer = buffer;
|
||||
this.offset = 0;
|
||||
};
|
||||
|
||||
p.parseMessage = function() {
|
||||
Connection.prototype.readSslResponse = function() {
|
||||
var remaining = this.buffer.length - (this.offset);
|
||||
if(remaining < 1) {
|
||||
this.lastBuffer = this.buffer;
|
||||
this.lastOffset = this.offset;
|
||||
return false;
|
||||
}
|
||||
return {
|
||||
name: 'sslresponse',
|
||||
text: this.buffer[this.offset++]
|
||||
};
|
||||
};
|
||||
|
||||
var Message = function(name, length) {
|
||||
this.name = name;
|
||||
this.length = length;
|
||||
};
|
||||
|
||||
Connection.prototype.parseMessage = function() {
|
||||
var remaining = this.buffer.length - (this.offset);
|
||||
if(remaining < 5) {
|
||||
//cannot read id + length without at least 5 bytes
|
||||
@ -237,8 +336,9 @@ p.parseMessage = function() {
|
||||
|
||||
//read message id code
|
||||
var id = this.buffer[this.offset++];
|
||||
var buffer = this.buffer;
|
||||
//read message length
|
||||
var length = this.parseInt32();
|
||||
var length = this.parseInt32(buffer);
|
||||
|
||||
if(remaining <= length) {
|
||||
this.lastBuffer = this.buffer;
|
||||
@ -247,89 +347,81 @@ p.parseMessage = function() {
|
||||
return false;
|
||||
}
|
||||
|
||||
var msg = {
|
||||
length: length
|
||||
};
|
||||
|
||||
switch(id)
|
||||
{
|
||||
|
||||
case 0x52: //R
|
||||
msg.name = 'authenticationOk';
|
||||
return this.parseR(msg);
|
||||
return this.parseR(buffer, length);
|
||||
|
||||
case 0x53: //S
|
||||
msg.name = 'parameterStatus';
|
||||
return this.parseS(msg);
|
||||
return this.parseS(buffer, length);
|
||||
|
||||
case 0x4b: //K
|
||||
msg.name = 'backendKeyData';
|
||||
return this.parseK(msg);
|
||||
return this.parseK(buffer, length);
|
||||
|
||||
case 0x43: //C
|
||||
msg.name = 'commandComplete';
|
||||
return this.parseC(msg);
|
||||
return this.parseC(buffer, length);
|
||||
|
||||
case 0x5a: //Z
|
||||
msg.name = 'readyForQuery';
|
||||
return this.parseZ(msg);
|
||||
return this.parseZ(buffer, length);
|
||||
|
||||
case 0x54: //T
|
||||
msg.name = 'rowDescription';
|
||||
return this.parseT(msg);
|
||||
return this.parseT(buffer, length);
|
||||
|
||||
case 0x44: //D
|
||||
msg.name = 'dataRow';
|
||||
return this.parseD(msg);
|
||||
return this.parseD(buffer, length);
|
||||
|
||||
case 0x45: //E
|
||||
msg.name = 'error';
|
||||
return this.parseE(msg);
|
||||
return this.parseE(buffer, length);
|
||||
|
||||
case 0x4e: //N
|
||||
msg.name = 'notice';
|
||||
return this.parseN(msg);
|
||||
return this.parseN(buffer, length);
|
||||
|
||||
case 0x31: //1
|
||||
msg.name = 'parseComplete';
|
||||
return msg;
|
||||
return new Message('parseComplete', length);
|
||||
|
||||
case 0x32: //2
|
||||
msg.name = 'bindComplete';
|
||||
return msg;
|
||||
return new Message('bindComplete', length);
|
||||
|
||||
case 0x41: //A
|
||||
msg.name = 'notification';
|
||||
return this.parseA(msg);
|
||||
return this.parseA(buffer, length);
|
||||
|
||||
case 0x6e: //n
|
||||
msg.name = 'noData';
|
||||
return msg;
|
||||
return new Message('noData', length);
|
||||
|
||||
case 0x49: //I
|
||||
msg.name = 'emptyQuery';
|
||||
return msg;
|
||||
return new Message('emptyQuery', length);
|
||||
|
||||
case 0x73: //s
|
||||
msg.name = 'portalSuspended';
|
||||
return msg;
|
||||
return new Message('portalSuspended', length);
|
||||
|
||||
default:
|
||||
throw new Error("Unrecognized message code " + id);
|
||||
case 0x47: //G
|
||||
return this.parseG(buffer, length);
|
||||
|
||||
case 0x48: //H
|
||||
return this.parseH(buffer, length);
|
||||
|
||||
case 0x63: //c
|
||||
return new Message('copyDone', length);
|
||||
|
||||
case 0x64: //d
|
||||
return this.parsed(buffer, length);
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
p.parseR = function(msg) {
|
||||
Connection.prototype.parseR = function(buffer, length) {
|
||||
var code = 0;
|
||||
var msg = new Message('authenticationOk', length);
|
||||
if(msg.length === 8) {
|
||||
code = this.parseInt32();
|
||||
code = this.parseInt32(buffer);
|
||||
if(code === 3) {
|
||||
msg.name = 'authenticationCleartextPassword';
|
||||
}
|
||||
return msg;
|
||||
}
|
||||
if(msg.length === 12) {
|
||||
code = this.parseInt32();
|
||||
code = this.parseInt32(buffer);
|
||||
if(code === 5) { //md5 required
|
||||
msg.name = 'authenticationMD5Password';
|
||||
msg.salt = new Buffer(4);
|
||||
@ -341,78 +433,120 @@ p.parseR = function(msg) {
|
||||
throw new Error("Unknown authenticatinOk message type" + util.inspect(msg));
|
||||
};
|
||||
|
||||
p.parseS = function(msg) {
|
||||
msg.parameterName = this.parseCString();
|
||||
msg.parameterValue = this.parseCString();
|
||||
Connection.prototype.parseS = function(buffer, length) {
|
||||
var msg = new Message('parameterStatus', length);
|
||||
msg.parameterName = this.parseCString(buffer);
|
||||
msg.parameterValue = this.parseCString(buffer);
|
||||
return msg;
|
||||
};
|
||||
|
||||
p.parseK = function(msg) {
|
||||
msg.processID = this.parseInt32();
|
||||
msg.secretKey = this.parseInt32();
|
||||
Connection.prototype.parseK = function(buffer, length) {
|
||||
var msg = new Message('backendKeyData', length);
|
||||
msg.processID = this.parseInt32(buffer);
|
||||
msg.secretKey = this.parseInt32(buffer);
|
||||
return msg;
|
||||
};
|
||||
|
||||
p.parseC = function(msg) {
|
||||
msg.text = this.parseCString();
|
||||
Connection.prototype.parseC = function(buffer, length) {
|
||||
var msg = new Message('commandComplete', length);
|
||||
msg.text = this.parseCString(buffer);
|
||||
return msg;
|
||||
};
|
||||
|
||||
p.parseZ = function(msg) {
|
||||
msg.status = this.readChar();
|
||||
Connection.prototype.parseZ = function(buffer, length) {
|
||||
var msg = new Message('readyForQuery', length);
|
||||
msg.name = 'readyForQuery';
|
||||
msg.status = this.readString(buffer, 1);
|
||||
return msg;
|
||||
};
|
||||
|
||||
p.parseT = function(msg) {
|
||||
msg.fieldCount = this.parseInt16();
|
||||
ROW_DESCRIPTION = 'rowDescription';
|
||||
Connection.prototype.parseT = function(buffer, length) {
|
||||
var msg = new Message(ROW_DESCRIPTION, length);
|
||||
msg.fieldCount = this.parseInt16(buffer);
|
||||
var fields = [];
|
||||
for(var i = 0; i < msg.fieldCount; i++){
|
||||
fields[i] = this.parseField();
|
||||
fields.push(this.parseField(buffer));
|
||||
}
|
||||
msg.fields = fields;
|
||||
return msg;
|
||||
};
|
||||
|
||||
p.parseField = function() {
|
||||
var field = {
|
||||
name: this.parseCString(),
|
||||
tableID: this.parseInt32(),
|
||||
columnID: this.parseInt16(),
|
||||
dataTypeID: this.parseInt32(),
|
||||
dataTypeSize: this.parseInt16(),
|
||||
dataTypeModifier: this.parseInt32(),
|
||||
format: this.parseInt16() === 0 ? 'text' : 'binary'
|
||||
};
|
||||
var Field = function() {
|
||||
this.name = null;
|
||||
this.tableID = null;
|
||||
this.columnID = null;
|
||||
this.dataTypeID = null;
|
||||
this.dataTypeSize = null;
|
||||
this.dataTypeModifier = null;
|
||||
this.format = null;
|
||||
};
|
||||
|
||||
FORMAT_TEXT = 'text';
|
||||
FORMAT_BINARY = 'binary';
|
||||
Connection.prototype.parseField = function(buffer) {
|
||||
var field = new Field();
|
||||
field.name = this.parseCString(buffer);
|
||||
field.tableID = this.parseInt32(buffer);
|
||||
field.columnID = this.parseInt16(buffer);
|
||||
field.dataTypeID = this.parseInt32(buffer);
|
||||
field.dataTypeSize = this.parseInt16(buffer);
|
||||
field.dataTypeModifier = this.parseInt32(buffer);
|
||||
if(this.parseInt16(buffer) === TEXT_MODE) {
|
||||
this._mode = TEXT_MODE;
|
||||
field.format = FORMAT_TEXT;
|
||||
} else {
|
||||
this._mode = BINARY_MODE;
|
||||
field.format = FORMAT_BINARY;
|
||||
}
|
||||
return field;
|
||||
};
|
||||
|
||||
p.parseD = function(msg) {
|
||||
var fieldCount = this.parseInt16();
|
||||
var fields = [];
|
||||
DATA_ROW = 'dataRow';
|
||||
var DataRowMessage = function(name, length, fieldCount) {
|
||||
this.name = DATA_ROW;
|
||||
this.length = length;
|
||||
this.fieldCount = fieldCount;
|
||||
this.fields = [];
|
||||
};
|
||||
|
||||
|
||||
//extremely hot-path code
|
||||
Connection.prototype.parseD = function(buffer, length) {
|
||||
var fieldCount = this.parseInt16(buffer);
|
||||
var msg = new DataRowMessage(length, fieldCount);
|
||||
for(var i = 0; i < fieldCount; i++) {
|
||||
var length = this.parseInt32();
|
||||
fields[i] = (length === -1 ? null : this.readBytes(length))
|
||||
};
|
||||
msg.fieldCount = fieldCount;
|
||||
msg.fields = fields;
|
||||
msg.fields.push(this._readValue(buffer));
|
||||
}
|
||||
return msg;
|
||||
};
|
||||
|
||||
//extremely hot-path code
|
||||
Connection.prototype._readValue = function(buffer) {
|
||||
var length = this.parseInt32(buffer);
|
||||
if(length === -1) return null;
|
||||
if(this._mode === TEXT_MODE) {
|
||||
return this.readString(buffer, length);
|
||||
}
|
||||
return this.readBytes(buffer, length);
|
||||
};
|
||||
|
||||
//parses error
|
||||
p.parseE = function(input) {
|
||||
Connection.prototype.parseE = function(buffer, length) {
|
||||
var fields = {};
|
||||
var msg, item;
|
||||
var fieldType = this.readString(1);
|
||||
var input = new Message('error', length);
|
||||
var fieldType = this.readString(buffer, 1);
|
||||
while(fieldType != '\0') {
|
||||
fields[fieldType] = this.parseCString();
|
||||
fieldType = this.readString(1);
|
||||
fields[fieldType] = this.parseCString(buffer);
|
||||
fieldType = this.readString(buffer, 1);
|
||||
}
|
||||
if (input.name === 'error') {
|
||||
if(input.name === 'error') {
|
||||
// the msg is an Error instance
|
||||
msg = new Error(fields.M);
|
||||
for (item in input) {
|
||||
// copy input properties to the error
|
||||
if (input.hasOwnProperty(item)) {
|
||||
if(input.hasOwnProperty(item)) {
|
||||
msg[item] = input[item];
|
||||
}
|
||||
}
|
||||
@ -436,52 +570,72 @@ p.parseE = function(input) {
|
||||
};
|
||||
|
||||
//same thing, different name
|
||||
p.parseN = p.parseE;
|
||||
|
||||
p.parseA = function(msg) {
|
||||
msg.processId = this.parseInt32();
|
||||
msg.channel = this.parseCString();
|
||||
msg.payload = this.parseCString();
|
||||
Connection.prototype.parseN = function(buffer, length) {
|
||||
var msg = this.parseE(buffer, length);
|
||||
msg.name = 'notice';
|
||||
return msg;
|
||||
};
|
||||
|
||||
p.readChar = function() {
|
||||
return Buffer([this.buffer[this.offset++]]).toString(this.encoding);
|
||||
Connection.prototype.parseA = function(buffer, length) {
|
||||
var msg = new Message('notification', length);
|
||||
msg.processId = this.parseInt32(buffer);
|
||||
msg.channel = this.parseCString(buffer);
|
||||
msg.payload = this.parseCString(buffer);
|
||||
return msg;
|
||||
};
|
||||
|
||||
p.parseInt32 = function() {
|
||||
var value = this.peekInt32();
|
||||
Connection.prototype.parseG = function (buffer, length) {
|
||||
var msg = new Message('copyInResponse', length);
|
||||
return this.parseGH(buffer, msg);
|
||||
};
|
||||
|
||||
Connection.prototype.parseH = function(buffer, length) {
|
||||
var msg = new Message('copyOutResponse', length);
|
||||
return this.parseGH(buffer, msg);
|
||||
};
|
||||
|
||||
Connection.prototype.parseGH = function (buffer, msg) {
|
||||
var isBinary = this.buffer[this.offset] !== 0;
|
||||
this.offset++;
|
||||
msg.binary = isBinary;
|
||||
var columnCount = this.parseInt16(buffer);
|
||||
msg.columnTypes = [];
|
||||
for(var i = 0; i<columnCount; i++) {
|
||||
msg.columnTypes.push(this.parseInt16(buffer));
|
||||
}
|
||||
return msg;
|
||||
};
|
||||
|
||||
Connection.prototype.parsed = function (buffer, length) {
|
||||
var msg = new Message('copyData', length);
|
||||
msg.chunk = this.readBytes(buffer, msg.length - 4);
|
||||
return msg;
|
||||
};
|
||||
|
||||
Connection.prototype.parseInt32 = function(buffer) {
|
||||
var value = buffer.readInt32BE(this.offset, true);
|
||||
this.offset += 4;
|
||||
return value;
|
||||
};
|
||||
|
||||
p.peekInt32 = function(offset) {
|
||||
offset = offset || this.offset;
|
||||
var buffer = this.buffer;
|
||||
return ((buffer[offset++] << 24) +
|
||||
(buffer[offset++] << 16) +
|
||||
(buffer[offset++] << 8) +
|
||||
buffer[offset++]);
|
||||
Connection.prototype.parseInt16 = function(buffer) {
|
||||
var value = buffer.readInt16BE(this.offset, true);
|
||||
this.offset += 2;
|
||||
return value;
|
||||
};
|
||||
|
||||
|
||||
p.parseInt16 = function() {
|
||||
return ((this.buffer[this.offset++] << 8) +
|
||||
(this.buffer[this.offset++] << 0));
|
||||
Connection.prototype.readString = function(buffer, length) {
|
||||
return buffer.toString(this.encoding, this.offset, (this.offset += length));
|
||||
};
|
||||
|
||||
p.readString = function(length) {
|
||||
return this.buffer.toString(this.encoding, this.offset, (this.offset += length));
|
||||
Connection.prototype.readBytes = function(buffer, length) {
|
||||
return buffer.slice(this.offset, this.offset += length);
|
||||
};
|
||||
|
||||
p.readBytes = function(length) {
|
||||
return this.buffer.slice(this.offset, this.offset += length);
|
||||
};
|
||||
|
||||
p.parseCString = function() {
|
||||
Connection.prototype.parseCString = function(buffer) {
|
||||
var start = this.offset;
|
||||
while(this.buffer[this.offset++]) { };
|
||||
return this.buffer.toString(this.encoding, start, this.offset - 1);
|
||||
while(buffer[this.offset++] !== 0) { }
|
||||
return buffer.toString(this.encoding, start, this.offset - 1);
|
||||
};
|
||||
//end parsing methods
|
||||
module.exports = Connection;
|
||||
|
||||
206
lib/copystream.js
Normal file
206
lib/copystream.js
Normal file
@ -0,0 +1,206 @@
|
||||
var Stream = require('stream').Stream;
|
||||
var util = require('util');
|
||||
var CopyFromStream = function () {
|
||||
Stream.apply(this, arguments);
|
||||
this._buffer = new Buffer(0);
|
||||
this._connection = false;
|
||||
this._finished = false;
|
||||
this._finishedSent = false;
|
||||
this._closed = false;
|
||||
this._error = false;
|
||||
this._dataBuffered = false;
|
||||
this.__defineGetter__("writable", this._writable.bind(this));
|
||||
};
|
||||
|
||||
util.inherits(CopyFromStream, Stream);
|
||||
|
||||
CopyFromStream.prototype._writable = function () {
|
||||
return !(this._finished || this._error);
|
||||
};
|
||||
|
||||
CopyFromStream.prototype.startStreamingToConnection = function (connection) {
|
||||
if(this._error) {
|
||||
return;
|
||||
}
|
||||
this._connection = connection;
|
||||
this._sendIfConnectionReady();
|
||||
this._endIfNeedAndPossible();
|
||||
};
|
||||
|
||||
CopyFromStream.prototype._handleChunk = function (string, encoding) {
|
||||
var dataChunk,
|
||||
tmpBuffer;
|
||||
if(string !== undefined) {
|
||||
if(string instanceof Buffer) {
|
||||
dataChunk = string;
|
||||
} else {
|
||||
dataChunk = new Buffer(string, encoding);
|
||||
}
|
||||
if(this._buffer.length) {
|
||||
//Buffer.concat is better, but it's missing
|
||||
//in node v0.6.x
|
||||
tmpBuffer = new Buffer(this._buffer.length + dataChunk.length);
|
||||
this._buffer.copy(tmpBuffer);
|
||||
dataChunk.copy(tmpBuffer, this._buffer.length);
|
||||
this._buffer = tmpBuffer;
|
||||
} else {
|
||||
this._buffer = dataChunk;
|
||||
}
|
||||
}
|
||||
|
||||
return this._sendIfConnectionReady();
|
||||
};
|
||||
|
||||
CopyFromStream.prototype._sendIfConnectionReady = function () {
|
||||
var dataSent = false;
|
||||
if(this._connection) {
|
||||
dataSent = this._connection.sendCopyFromChunk(this._buffer);
|
||||
this._buffer = new Buffer(0);
|
||||
if(this._dataBuffered) {
|
||||
this.emit('drain');
|
||||
}
|
||||
this._dataBuffered = false;
|
||||
} else {
|
||||
this._dataBuffered = true;
|
||||
}
|
||||
return dataSent;
|
||||
};
|
||||
|
||||
CopyFromStream.prototype._endIfNeedAndPossible = function () {
|
||||
if(this._connection && this._finished && !this._finishedSent) {
|
||||
this._finishedSent = true;
|
||||
this._connection.endCopyFrom();
|
||||
}
|
||||
};
|
||||
|
||||
CopyFromStream.prototype.write = function (string, encoding) {
|
||||
if(this._error || this._finished) {
|
||||
return false;
|
||||
}
|
||||
return this._handleChunk.apply(this, arguments);
|
||||
};
|
||||
|
||||
CopyFromStream.prototype.end = function (string, encondig) {
|
||||
if(this._error || this._finished) {
|
||||
return false;
|
||||
}
|
||||
this._finished = true;
|
||||
if(string !== undefined) {
|
||||
this._handleChunk.apply(this, arguments);
|
||||
}
|
||||
this._endIfNeedAndPossible();
|
||||
};
|
||||
|
||||
CopyFromStream.prototype.error = function (error) {
|
||||
if(this._error || this._closed) {
|
||||
return false;
|
||||
}
|
||||
this._error = true;
|
||||
this.emit('error', error);
|
||||
};
|
||||
|
||||
CopyFromStream.prototype.close = function () {
|
||||
if(this._error || this._closed) {
|
||||
return false;
|
||||
}
|
||||
if(!this._finishedSent) {
|
||||
throw new Error("seems to be error in code that uses CopyFromStream");
|
||||
}
|
||||
this.emit("close");
|
||||
};
|
||||
|
||||
var CopyToStream = function () {
|
||||
Stream.apply(this, arguments);
|
||||
this._error = false;
|
||||
this._finished = false;
|
||||
this._paused = false;
|
||||
this.buffer = new Buffer(0);
|
||||
this._encoding = undefined;
|
||||
this.__defineGetter__('readable', this._readable.bind(this));
|
||||
};
|
||||
|
||||
util.inherits(CopyToStream, Stream);
|
||||
|
||||
CopyToStream.prototype._outputDataChunk = function () {
|
||||
if(this._paused) {
|
||||
return;
|
||||
}
|
||||
if(this.buffer.length) {
|
||||
if(this._encoding) {
|
||||
this.emit('data', this.buffer.toString(this._encoding));
|
||||
} else {
|
||||
this.emit('data', this.buffer);
|
||||
}
|
||||
this.buffer = new Buffer(0);
|
||||
}
|
||||
};
|
||||
|
||||
CopyToStream.prototype._readable = function () {
|
||||
return !this._finished && !this._error;
|
||||
};
|
||||
|
||||
CopyToStream.prototype.error = function (error) {
|
||||
if(!this.readable) {
|
||||
return false;
|
||||
}
|
||||
this._error = error;
|
||||
if(!this._paused) {
|
||||
this.emit('error', error);
|
||||
}
|
||||
};
|
||||
|
||||
CopyToStream.prototype.close = function () {
|
||||
if(!this.readable) {
|
||||
return false;
|
||||
}
|
||||
this._finished = true;
|
||||
if(!this._paused) {
|
||||
this.emit("end");
|
||||
}
|
||||
};
|
||||
|
||||
CopyToStream.prototype.handleChunk = function (chunk) {
|
||||
var tmpBuffer;
|
||||
if(!this.readable) {
|
||||
return;
|
||||
}
|
||||
if(!this.buffer.length) {
|
||||
this.buffer = chunk;
|
||||
} else {
|
||||
tmpBuffer = new Buffer(this.buffer.length + chunk.length);
|
||||
this.buffer.copy(tmpBuffer);
|
||||
chunk.copy(tmpBuffer, this.buffer.length);
|
||||
this.buffer = tmpBuffer;
|
||||
}
|
||||
this._outputDataChunk();
|
||||
};
|
||||
|
||||
CopyToStream.prototype.pause = function () {
|
||||
if(!this.readable) {
|
||||
return false;
|
||||
}
|
||||
this._paused = true;
|
||||
};
|
||||
|
||||
CopyToStream.prototype.resume = function () {
|
||||
if(!this._paused) {
|
||||
return false;
|
||||
}
|
||||
this._paused = false;
|
||||
this._outputDataChunk();
|
||||
if(this._error) {
|
||||
return this.emit('error', this._error);
|
||||
}
|
||||
if(this._finished) {
|
||||
return this.emit('end');
|
||||
}
|
||||
};
|
||||
|
||||
CopyToStream.prototype.setEncoding = function (encoding) {
|
||||
this._encoding = encoding;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
CopyFromStream: CopyFromStream,
|
||||
CopyToStream: CopyToStream
|
||||
};
|
||||
@ -1,4 +1,7 @@
|
||||
module.exports = {
|
||||
var defaults = module.exports = {
|
||||
// database host defaults to localhost
|
||||
host: 'localhost',
|
||||
|
||||
//database user's name
|
||||
user: process.env.USER,
|
||||
|
||||
@ -7,14 +10,18 @@ module.exports = {
|
||||
|
||||
//database user's password
|
||||
password: null,
|
||||
|
||||
|
||||
//database port
|
||||
port: 5432,
|
||||
|
||||
//number of rows to return at a time from a prepared statement's
|
||||
//portal. 0 will return all rows at once
|
||||
rows: 0,
|
||||
|
||||
|
||||
// binary result mode
|
||||
binary: false,
|
||||
|
||||
//Connection pool options - see https://github.com/coopernurse/node-pool
|
||||
//number of connections to use in connection pool
|
||||
//0 will disable connection pooling
|
||||
poolSize: 10,
|
||||
@ -26,6 +33,13 @@ module.exports = {
|
||||
//frequeny to check for idle clients within the client pool
|
||||
reapIntervalMillis: 1000,
|
||||
|
||||
// binary result mode
|
||||
binary: false
|
||||
}
|
||||
//pool log function / boolean
|
||||
poolLog: false,
|
||||
|
||||
client_encoding: ""
|
||||
};
|
||||
|
||||
//parse int8 so you can get your count values as actual numbers
|
||||
module.exports.__defineSetter__("parseInt8", function(val) {
|
||||
require('./types').setTypeParser(20, 'text', val ? parseInt : function(val) { return val; });
|
||||
});
|
||||
|
||||
112
lib/index.js
112
lib/index.js
@ -2,100 +2,66 @@ var EventEmitter = require('events').EventEmitter;
|
||||
var util = require('util');
|
||||
var Client = require(__dirname+'/client');
|
||||
var defaults = require(__dirname + '/defaults');
|
||||
|
||||
//external genericPool module
|
||||
var genericPool = require('generic-pool');
|
||||
|
||||
//cache of existing client pools
|
||||
var pools = {};
|
||||
var pool = require(__dirname + '/pool');
|
||||
var types = require(__dirname + '/types/');
|
||||
var Connection = require(__dirname + '/connection');
|
||||
|
||||
var PG = function(clientConstructor) {
|
||||
EventEmitter.call(this);
|
||||
this.Client = clientConstructor;
|
||||
this.Connection = require(__dirname + '/connection');
|
||||
this.defaults = defaults;
|
||||
this.Client = pool.Client = clientConstructor;
|
||||
this.Query = this.Client.Query;
|
||||
this.pools = pool;
|
||||
this.types = types;
|
||||
this.Connection = Connection;
|
||||
};
|
||||
|
||||
util.inherits(PG, EventEmitter);
|
||||
|
||||
PG.prototype.end = function() {
|
||||
Object.keys(pools).forEach(function(name) {
|
||||
var pool = pools[name];
|
||||
var self = this;
|
||||
Object.keys(self.pools.all).forEach(function(key) {
|
||||
var pool = self.pools.all[key];
|
||||
pool.drain(function() {
|
||||
pool.destroyAllNow();
|
||||
});
|
||||
})
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
PG.prototype.connect = function(config, callback) {
|
||||
var self = this;
|
||||
var c = config;
|
||||
var cb = callback;
|
||||
//allow for no config to be passed
|
||||
if(typeof c === 'function') {
|
||||
cb = c;
|
||||
c = defaults;
|
||||
if(typeof config == "function") {
|
||||
callback = config;
|
||||
config = null;
|
||||
}
|
||||
|
||||
//get unique pool name even if object was used as config
|
||||
var poolName = typeof(c) === 'string' ? c : c.user+c.host+c.port+c.database;
|
||||
var pool = pools[poolName];
|
||||
|
||||
if(pool) return pool.acquire(cb);
|
||||
|
||||
var pool = pools[poolName] = genericPool.Pool({
|
||||
name: poolName,
|
||||
create: function(callback) {
|
||||
var client = new self.Client(c);
|
||||
client.connect();
|
||||
|
||||
var connectError = function(err) {
|
||||
client.removeListener('connect', connectSuccess);
|
||||
callback(err, null);
|
||||
};
|
||||
|
||||
var connectSuccess = function() {
|
||||
client.removeListener('error', connectError);
|
||||
|
||||
//handle connected client background errors by emitting event
|
||||
//via the pg object and then removing errored client from the pool
|
||||
client.on('error', function(e) {
|
||||
self.emit('error', e, client);
|
||||
pool.destroy(client);
|
||||
});
|
||||
callback(null, client);
|
||||
};
|
||||
|
||||
client.once('connect', connectSuccess);
|
||||
client.once('error', connectError);
|
||||
client.on('drain', function() {
|
||||
pool.release(client);
|
||||
});
|
||||
},
|
||||
destroy: function(client) {
|
||||
client.end();
|
||||
},
|
||||
max: defaults.poolSize,
|
||||
idleTimeoutMillis: defaults.poolIdleTimeout,
|
||||
reapIntervalMillis: defaults.reapIntervalMillis
|
||||
});
|
||||
return pool.acquire(cb);
|
||||
}
|
||||
var pool = this.pools.getOrCreate(config);
|
||||
pool.connect(callback);
|
||||
if(!pool.listeners('error').length) {
|
||||
//propagate errors up to pg object
|
||||
pool.on('error', this.emit.bind(this, 'error'));
|
||||
}
|
||||
};
|
||||
|
||||
// cancel the query runned by the given client
|
||||
PG.prototype.cancel = function(config, client, query) {
|
||||
var c = config;
|
||||
//allow for no config to be passed
|
||||
if(typeof c === 'function')
|
||||
if(typeof c === 'function') {
|
||||
c = defaults;
|
||||
}
|
||||
var cancellingClient = new this.Client(c);
|
||||
cancellingClient.cancel(client, query);
|
||||
};
|
||||
|
||||
var forceNative = Object.prototype.hasOwnProperty.call(process.env, 'NODE_PG_FORCE_NATIVE');
|
||||
if (forceNative) {
|
||||
module.exports = new PG(require(__dirname + '/native'));
|
||||
} else {
|
||||
module.exports = new PG(Client);
|
||||
|
||||
//lazy require native module...the native module may not have installed
|
||||
module.exports.__defineGetter__("native", function() {
|
||||
delete module.exports.native;
|
||||
module.exports.native = new PG(require(__dirname + '/native'));
|
||||
return module.exports.native;
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = new PG(Client);
|
||||
|
||||
//lazy require native module...the native module may not have installed
|
||||
module.exports.__defineGetter__("native", function() {
|
||||
delete module.exports.native;
|
||||
return (module.exports.native = new PG(require(__dirname + '/native')));
|
||||
})
|
||||
|
||||
@ -1,71 +1,114 @@
|
||||
//require the c++ bindings & export to javascript
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
var utils = require(__dirname + "/../utils");
|
||||
|
||||
var ConnectionParameters = require(__dirname + '/../connection-parameters');
|
||||
var CopyFromStream = require(__dirname + '/../copystream').CopyFromStream;
|
||||
var CopyToStream = require(__dirname + '/../copystream').CopyToStream;
|
||||
var JsClient = require(__dirname + '/../client'); // used to import JS escape functions
|
||||
|
||||
var binding;
|
||||
|
||||
try{
|
||||
//TODO remove on v1.0.0
|
||||
try {
|
||||
//v0.5.x
|
||||
binding = require(__dirname + '/../../build/Release/binding.node');
|
||||
binding = require(__dirname + '/../../build/Release/binding.node');
|
||||
} catch(e) {
|
||||
//v0.4.x
|
||||
binding = require(__dirname + '/../../build/default/binding');
|
||||
binding = require(__dirname + '/../../build/default/binding');
|
||||
}
|
||||
|
||||
var Connection = binding.Connection;
|
||||
var types = require(__dirname + "/../types");
|
||||
var NativeQuery = require(__dirname + '/query');
|
||||
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
var p = Connection.prototype;
|
||||
for(var k in EventEmitter.prototype) {
|
||||
p[k] = EventEmitter.prototype[k];
|
||||
Connection.prototype[k] = EventEmitter.prototype[k];
|
||||
}
|
||||
|
||||
var nativeConnect = p.connect;
|
||||
var nativeConnect = Connection.prototype.connect;
|
||||
|
||||
p.connect = function(cb) {
|
||||
Connection.prototype.connect = function(cb) {
|
||||
var self = this;
|
||||
utils.buildLibpqConnectionString(this._config, function(err, conString) {
|
||||
this.connectionParameters.getLibpqConnectionString(function(err, conString) {
|
||||
if(err) {
|
||||
return cb ? cb(err) : self.emit('error', err);
|
||||
}
|
||||
nativeConnect.call(self, conString);
|
||||
if(cb) {
|
||||
var errCallback;
|
||||
var connectCallback = function() {
|
||||
//remove single-fire connection error callback
|
||||
self.removeListener('error', errCallback);
|
||||
cb(null);
|
||||
}
|
||||
};
|
||||
errCallback = function(err) {
|
||||
//remove singel-fire connection success callback
|
||||
self.removeListener('connect', connectCallback);
|
||||
cb(err);
|
||||
}
|
||||
};
|
||||
self.once('connect', connectCallback);
|
||||
self.once('error', errCallback);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
p.query = function(config, values, callback) {
|
||||
var q = new NativeQuery(config, values, callback);
|
||||
this._queryQueue.push(q);
|
||||
this._pulseQueryQueue();
|
||||
return q;
|
||||
}
|
||||
|
||||
var nativeCancel = p.cancel;
|
||||
|
||||
p.cancel = function(client, query) {
|
||||
if (client._activeQuery == query)
|
||||
this.connect(nativeCancel.bind(client));
|
||||
else if (client._queryQueue.indexOf(query) != -1)
|
||||
client._queryQueue.splice(client._queryQueue.indexOf(query), 1);
|
||||
nativeConnect.call(self, conString);
|
||||
});
|
||||
};
|
||||
|
||||
p._pulseQueryQueue = function(initialConnection) {
|
||||
Connection.prototype._copy = function (text, stream) {
|
||||
var q = new NativeQuery(text, function (error) {
|
||||
if (error) {
|
||||
q.stream.error(error);
|
||||
} else {
|
||||
q.stream.close();
|
||||
}
|
||||
});
|
||||
q.stream = stream;
|
||||
this._queryQueue.push(q);
|
||||
this._pulseQueryQueue();
|
||||
return q.stream;
|
||||
};
|
||||
|
||||
Connection.prototype.copyFrom = function (text) {
|
||||
return this._copy(text, new CopyFromStream());
|
||||
};
|
||||
|
||||
Connection.prototype.copyTo = function (text) {
|
||||
return this._copy(text, new CopyToStream());
|
||||
};
|
||||
|
||||
Connection.prototype.sendCopyFromChunk = function (chunk) {
|
||||
this._sendCopyFromChunk(chunk);
|
||||
};
|
||||
|
||||
Connection.prototype.endCopyFrom = function (msg) {
|
||||
this._endCopyFrom(msg);
|
||||
};
|
||||
|
||||
// use JS version if native version undefined
|
||||
// happens when PG version < 9.0.0
|
||||
if (!Connection.prototype.escapeIdentifier) {
|
||||
Connection.prototype.escapeIdentifier = JsClient.prototype.escapeIdentifier;
|
||||
}
|
||||
if (!Connection.prototype.escapeLiteral) {
|
||||
Connection.prototype.escapeLiteral = JsClient.prototype.escapeLiteral;
|
||||
}
|
||||
|
||||
Connection.prototype.query = function(config, values, callback) {
|
||||
var query = (config instanceof NativeQuery) ? config :
|
||||
new NativeQuery(config, values, callback);
|
||||
this._queryQueue.push(query);
|
||||
this._pulseQueryQueue();
|
||||
return query;
|
||||
};
|
||||
|
||||
var nativeCancel = Connection.prototype.cancel;
|
||||
|
||||
Connection.prototype.cancel = function(client, query) {
|
||||
if (client._activeQuery == query) {
|
||||
this.connect(nativeCancel.bind(client));
|
||||
} else if (client._queryQueue.indexOf(query) != -1) {
|
||||
client._queryQueue.splice(client._queryQueue.indexOf(query), 1);
|
||||
}
|
||||
};
|
||||
|
||||
Connection.prototype._pulseQueryQueue = function(initialConnection) {
|
||||
if(!this._connected) {
|
||||
return;
|
||||
}
|
||||
@ -75,7 +118,12 @@ p._pulseQueryQueue = function(initialConnection) {
|
||||
var query = this._queryQueue.shift();
|
||||
if(!query) {
|
||||
if(!initialConnection) {
|
||||
this._drainPaused ? this._drainPaused++ : this.emit('drain');
|
||||
//TODO remove all the pause-drain stuff for v1.0
|
||||
if(this._drainPaused) {
|
||||
this._drainPaused++;
|
||||
} else {
|
||||
this.emit('drain');
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
@ -88,50 +136,66 @@ p._pulseQueryQueue = function(initialConnection) {
|
||||
this._namedQueries[query.name] = true;
|
||||
this._sendPrepare(query.name, query.text, (query.values||[]).length);
|
||||
}
|
||||
}
|
||||
else if(query.values) {
|
||||
} else if(query.values) {
|
||||
//call native function
|
||||
this._sendQueryWithParams(query.text, query.values)
|
||||
this._sendQueryWithParams(query.text, query.values);
|
||||
} else {
|
||||
//call native function
|
||||
this._sendQuery(query.text);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
p.pauseDrain = function() {
|
||||
//TODO remove all the pause-drain stuff for v1.0
|
||||
Connection.prototype.pauseDrain = function() {
|
||||
this._drainPaused = 1;
|
||||
};
|
||||
|
||||
p.resumeDrain = function() {
|
||||
//TODO remove all the pause-drain stuff for v1.0
|
||||
Connection.prototype.resumeDrain = function() {
|
||||
if(this._drainPaused > 1) {
|
||||
this.emit('drain')
|
||||
};
|
||||
this.emit('drain');
|
||||
}
|
||||
this._drainPaused = 0;
|
||||
};
|
||||
|
||||
Connection.prototype.sendCopyFail = function(msg) {
|
||||
this.endCopyFrom(msg);
|
||||
};
|
||||
|
||||
var clientBuilder = function(config) {
|
||||
config = config || {};
|
||||
var connection = new Connection();
|
||||
EventEmitter.call(connection);
|
||||
connection._queryQueue = [];
|
||||
connection._namedQueries = {};
|
||||
connection._activeQuery = null;
|
||||
connection._config = utils.normalizeConnectionInfo(config);
|
||||
connection.connectionParameters = new ConnectionParameters(config);
|
||||
//attach properties to normalize interface with pure js client
|
||||
connection.user = connection._config.user;
|
||||
connection.password = connection._config.password;
|
||||
connection.database = connection._config.database;
|
||||
connection.host = connection._config.host;
|
||||
connection.port = connection._config.port;
|
||||
connection.user = connection.connectionParameters.user;
|
||||
connection.password = connection.connectionParameters.password;
|
||||
connection.database = connection.connectionParameters.database;
|
||||
connection.host = connection.connectionParameters.host;
|
||||
connection.port = connection.connectionParameters.port;
|
||||
connection.on('connect', function() {
|
||||
connection._connected = true;
|
||||
connection._pulseQueryQueue(true);
|
||||
});
|
||||
|
||||
connection.on('_rowDescription', function(rowDescription) {
|
||||
connection._activeQuery.handleRowDescription(rowDescription);
|
||||
});
|
||||
|
||||
//proxy some events to active query
|
||||
connection.on('_row', function(row) {
|
||||
connection._activeQuery.handleRow(row);
|
||||
});
|
||||
|
||||
connection.on('_cmdStatus', function(status) {
|
||||
//set this here so we can pass it to the query
|
||||
//when the query completes
|
||||
connection._lastMeta = status;
|
||||
});
|
||||
|
||||
//TODO: emit more native error properties (make it match js error)
|
||||
connection.on('_error', function(err) {
|
||||
//create Error object from object literal
|
||||
@ -149,20 +213,55 @@ var clientBuilder = function(config) {
|
||||
}
|
||||
});
|
||||
|
||||
connection.on('_end', function() {
|
||||
process.nextTick(function() {
|
||||
if(connection._activeQuery) {
|
||||
connection._activeQuery.handleError(new Error("Connection was ended during query"));
|
||||
}
|
||||
connection.emit('end');
|
||||
});
|
||||
});
|
||||
|
||||
connection.on('_readyForQuery', function() {
|
||||
var error;
|
||||
var q = this._activeQuery;
|
||||
//a named query finished being prepared
|
||||
if(this._namedQuery) {
|
||||
this._namedQuery = false;
|
||||
this._sendQueryPrepared(q.name, q.values||[]);
|
||||
} else {
|
||||
connection._activeQuery.handleReadyForQuery();
|
||||
//try/catch/rethrow to ensure exceptions don't prevent the queryQueue from
|
||||
//being processed
|
||||
try{
|
||||
connection._activeQuery.handleReadyForQuery(connection._lastMeta);
|
||||
} catch(e) {
|
||||
error = e;
|
||||
}
|
||||
connection._activeQuery = null;
|
||||
connection._pulseQueryQueue();
|
||||
if(error) throw error;
|
||||
}
|
||||
});
|
||||
|
||||
connection.on('copyInResponse', function () {
|
||||
//connection is ready to accept chunks
|
||||
//start to send data from stream
|
||||
connection._activeQuery.streamData(connection);
|
||||
});
|
||||
connection.on('copyOutResponse', function(msg) {
|
||||
if (connection._activeQuery.stream === undefined) {
|
||||
connection._activeQuery._canceledDueToError = new Error('No destination stream defined');
|
||||
(new clientBuilder({port: connection.port, host: connection.host})).cancel(connection, connection._activeQuery);
|
||||
}
|
||||
});
|
||||
connection.on('copyData', function (chunk) {
|
||||
//recieve chunk from connection
|
||||
//move it to stream
|
||||
connection._activeQuery.handleCopyFromChunk(chunk);
|
||||
});
|
||||
return connection;
|
||||
};
|
||||
|
||||
// expose a Query constructor
|
||||
clientBuilder.Query = NativeQuery;
|
||||
|
||||
module.exports = clientBuilder;
|
||||
|
||||
@ -1,92 +1,95 @@
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
var util = require('util');
|
||||
|
||||
var types = require(__dirname + "/../types");
|
||||
var types = require(__dirname + '/../types/');
|
||||
var utils = require(__dirname + '/../utils');
|
||||
var Result = require(__dirname + '/../result');
|
||||
|
||||
//event emitter proxy
|
||||
var NativeQuery = function(text, values, callback) {
|
||||
//TODO there are better ways to detect overloads
|
||||
if(typeof text == 'object') {
|
||||
this.text = text.text;
|
||||
this.values = text.values;
|
||||
this.name = text.name;
|
||||
if(typeof values === 'function') {
|
||||
this.callback = values;
|
||||
} else if(values) {
|
||||
this.values = values;
|
||||
this.callback = callback;
|
||||
}
|
||||
} else {
|
||||
this.text = text;
|
||||
this.values = values;
|
||||
this.callback = callback;
|
||||
if(typeof values == 'function') {
|
||||
this.values = null;
|
||||
this.callback = values;
|
||||
}
|
||||
}
|
||||
if(this.callback) {
|
||||
this.rows = [];
|
||||
}
|
||||
//normalize values
|
||||
if(this.values) {
|
||||
for(var i = 0, len = this.values.length; i < len; i++) {
|
||||
var item = this.values[i];
|
||||
switch(typeof item) {
|
||||
case 'undefined':
|
||||
this.values[i] = null;
|
||||
break;
|
||||
case 'object':
|
||||
this.values[i] = item === null ? null : JSON.stringify(item);
|
||||
break;
|
||||
case 'string':
|
||||
//value already string
|
||||
break;
|
||||
default:
|
||||
//numbers
|
||||
this.values[i] = item.toString();
|
||||
}
|
||||
}
|
||||
var NativeQuery = function(config, values, callback) {
|
||||
// use of "new" optional
|
||||
if (!(this instanceof NativeQuery)) {
|
||||
return new NativeQuery(config, values, callback);
|
||||
}
|
||||
|
||||
EventEmitter.call(this);
|
||||
|
||||
var c = utils.normalizeQueryConfig(config, values, callback);
|
||||
|
||||
this.name = c.name;
|
||||
this.text = c.text;
|
||||
this.values = c.values;
|
||||
this.callback = c.callback;
|
||||
|
||||
this._result = new Result(config.rowMode);
|
||||
this._addedFields = false;
|
||||
//normalize values
|
||||
if(this.values) {
|
||||
for(var i = 0, len = this.values.length; i < len; i++) {
|
||||
this.values[i] = utils.prepareValue(this.values[i]);
|
||||
}
|
||||
}
|
||||
this._canceledDueToError = false;
|
||||
};
|
||||
|
||||
util.inherits(NativeQuery, EventEmitter);
|
||||
var p = NativeQuery.prototype;
|
||||
|
||||
//maps from native rowdata into api compatible row object
|
||||
var mapRowData = function(row) {
|
||||
var result = {};
|
||||
for(var i = 0, len = row.length; i < len; i++) {
|
||||
var item = row[i];
|
||||
result[item.name] = item.value == null ? null : types.getTypeParser(item.type, 'text')(item.value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
p.handleRow = function(rowData) {
|
||||
var row = mapRowData(rowData);
|
||||
if(this.callback) {
|
||||
this.rows.push(row);
|
||||
}
|
||||
this.emit('row', row);
|
||||
NativeQuery.prototype.handleRowDescription = function(rowDescription) {
|
||||
this._result.addFields(rowDescription);
|
||||
};
|
||||
|
||||
p.handleError = function(error) {
|
||||
NativeQuery.prototype.handleRow = function(rowData) {
|
||||
var row = this._result.parseRow(rowData);
|
||||
if(this.callback) {
|
||||
this.callback(error);
|
||||
this._result.addRow(row);
|
||||
}
|
||||
this.emit('row', row, this._result);
|
||||
};
|
||||
|
||||
NativeQuery.prototype.handleError = function(error) {
|
||||
if (this._canceledDueToError) {
|
||||
error = this._canceledDueToError;
|
||||
this._canceledDueToError = false;
|
||||
}
|
||||
if(this.callback) {
|
||||
var cb = this.callback;
|
||||
//remove callback to prevent double call on readyForQuery
|
||||
this.callback = null;
|
||||
cb(error);
|
||||
} else {
|
||||
this.emit('error', error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
p.handleReadyForQuery = function() {
|
||||
if(this.callback) {
|
||||
this.callback(null, { rows: this.rows });
|
||||
NativeQuery.prototype.handleReadyForQuery = function(meta) {
|
||||
if (this._canceledDueToError) {
|
||||
return this.handleError(this._canceledDueToError);
|
||||
}
|
||||
this.emit('end');
|
||||
if(meta) {
|
||||
this._result.addCommandComplete(meta);
|
||||
}
|
||||
if(this.callback) {
|
||||
this.callback(null, this._result);
|
||||
}
|
||||
this.emit('end', this._result);
|
||||
};
|
||||
|
||||
NativeQuery.prototype.streamData = function (connection) {
|
||||
if(this.stream) {
|
||||
this.stream.startStreamingToConnection(connection);
|
||||
}
|
||||
else {
|
||||
connection.sendCopyFail('No source stream defined');
|
||||
}
|
||||
};
|
||||
|
||||
NativeQuery.prototype.handleCopyFromChunk = function (chunk) {
|
||||
if(this.stream) {
|
||||
this.stream.handleChunk(chunk);
|
||||
}
|
||||
//if there are no stream (for example when copy to query was sent by
|
||||
//query method instead of copyTo) error will be handled
|
||||
//on copyOutResponse event, so silently ignore this error here
|
||||
};
|
||||
|
||||
module.exports = NativeQuery;
|
||||
|
||||
68
lib/pool.js
Normal file
68
lib/pool.js
Normal file
@ -0,0 +1,68 @@
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
|
||||
var defaults = require(__dirname + '/defaults');
|
||||
var genericPool = require('generic-pool');
|
||||
|
||||
var pools = {
|
||||
//dictionary of all key:pool pairs
|
||||
all: {},
|
||||
//reference to the client constructor - can override in tests or for require('pg').native
|
||||
Client: require(__dirname + '/client'),
|
||||
getOrCreate: function(clientConfig) {
|
||||
clientConfig = clientConfig || {};
|
||||
var name = JSON.stringify(clientConfig);
|
||||
var pool = pools.all[name];
|
||||
if(pool) {
|
||||
return pool;
|
||||
}
|
||||
pool = genericPool.Pool({
|
||||
name: name,
|
||||
max: defaults.poolSize,
|
||||
idleTimeoutMillis: defaults.poolIdleTimeout,
|
||||
reapIntervalMillis: defaults.reapIntervalMillis,
|
||||
log: defaults.poolLog,
|
||||
create: function(cb) {
|
||||
var client = new pools.Client(clientConfig);
|
||||
client.connect(function(err) {
|
||||
if(err) return cb(err, null);
|
||||
|
||||
//handle connected client background errors by emitting event
|
||||
//via the pg object and then removing errored client from the pool
|
||||
client.on('error', function(e) {
|
||||
pool.emit('error', e, client);
|
||||
pool.destroy(client);
|
||||
});
|
||||
|
||||
return cb(null, client);
|
||||
});
|
||||
},
|
||||
destroy: function(client) {
|
||||
client.end();
|
||||
}
|
||||
});
|
||||
pools.all[name] = pool;
|
||||
//mixin EventEmitter to pool
|
||||
EventEmitter.call(pool);
|
||||
for(var key in EventEmitter.prototype) {
|
||||
if(EventEmitter.prototype.hasOwnProperty(key)) {
|
||||
pool[key] = EventEmitter.prototype[key];
|
||||
}
|
||||
}
|
||||
//monkey-patch with connect method
|
||||
pool.connect = function(cb) {
|
||||
pool.acquire(function(err, client) {
|
||||
if(err) return cb(err, null, function() {/*NOOP*/});
|
||||
cb(null, client, function(err) {
|
||||
if(err) {
|
||||
pool.destroy(client);
|
||||
} else {
|
||||
pool.release(client);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
return pool;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = pools;
|
||||
113
lib/query.js
113
lib/query.js
@ -1,31 +1,49 @@
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
var util = require('util');
|
||||
|
||||
var Result = require(__dirname + "/result");
|
||||
var Types = require(__dirname + "/types");
|
||||
var Result = require(__dirname + '/result');
|
||||
var Types = require(__dirname + '/types/');
|
||||
var utils = require(__dirname + '/utils');
|
||||
|
||||
var Query = function(config, values, callback) {
|
||||
// use of "new" optional
|
||||
if(!(this instanceof Query)) { return new Query(config, values, callback); }
|
||||
|
||||
config = utils.normalizeQueryConfig(config, values, callback);
|
||||
|
||||
var Query = function(config) {
|
||||
this.text = config.text;
|
||||
this.values = config.values;
|
||||
this.rows = config.rows;
|
||||
this.types = config.types;
|
||||
this.name = config.name;
|
||||
this.binary = config.binary;
|
||||
this.stream = config.stream;
|
||||
//use unique portal name each time
|
||||
this.portal = config.portal || ""
|
||||
this.portal = config.portal || "";
|
||||
this.callback = config.callback;
|
||||
this._fieldNames = [];
|
||||
this._fieldConverters = [];
|
||||
this._result = new Result();
|
||||
this._result = new Result(config.rowMode);
|
||||
this.isPreparedStatement = false;
|
||||
this._canceledDueToError = false;
|
||||
EventEmitter.call(this);
|
||||
};
|
||||
|
||||
util.inherits(Query, EventEmitter);
|
||||
var p = Query.prototype;
|
||||
|
||||
p.requiresPreparation = function() {
|
||||
return (this.values || 0).length > 0 || this.name || this.rows || this.binary;
|
||||
Query.prototype.requiresPreparation = function() {
|
||||
//named queries must always be prepared
|
||||
if(this.name) { return true; }
|
||||
//always prepare if there are max number of rows expected per
|
||||
//portal execution
|
||||
if(this.rows) { return true; }
|
||||
//don't prepare empty text queries
|
||||
if(!this.text) { return false; }
|
||||
//binary should be prepared to specify results should be in binary
|
||||
//unless there are no parameters
|
||||
if(this.binary && !this.values) { return false; }
|
||||
//prepare if there are values
|
||||
return (this.values || 0).length > 0;
|
||||
};
|
||||
|
||||
|
||||
@ -36,62 +54,50 @@ var noParse = function(val) {
|
||||
//associates row metadata from the supplied
|
||||
//message with this query object
|
||||
//metadata used when parsing row results
|
||||
p.handleRowDescription = function(msg) {
|
||||
this._fieldNames = [];
|
||||
this._fieldConverters = [];
|
||||
var len = msg.fields.length;
|
||||
for(var i = 0; i < len; i++) {
|
||||
var field = msg.fields[i];
|
||||
var format = field.format;
|
||||
this._fieldNames[i] = field.name;
|
||||
this._fieldConverters[i] = Types.getTypeParser(field.dataTypeID, format);
|
||||
};
|
||||
Query.prototype.handleRowDescription = function(msg) {
|
||||
this._result.addFields(msg.fields);
|
||||
};
|
||||
|
||||
p.handleDataRow = function(msg) {
|
||||
var self = this;
|
||||
var row = {};
|
||||
for(var i = 0; i < msg.fields.length; i++) {
|
||||
var rawValue = msg.fields[i];
|
||||
if(rawValue === null) {
|
||||
//leave null values alone
|
||||
row[self._fieldNames[i]] = null;
|
||||
} else {
|
||||
//convert value to javascript
|
||||
row[self._fieldNames[i]] = self._fieldConverters[i](rawValue);
|
||||
}
|
||||
}
|
||||
self.emit('row', row);
|
||||
Query.prototype.handleDataRow = function(msg) {
|
||||
var row = this._result.parseRow(msg.fields);
|
||||
this.emit('row', row, this._result);
|
||||
|
||||
//if there is a callback collect rows
|
||||
if(self.callback) {
|
||||
self._result.addRow(row);
|
||||
if(this.callback) {
|
||||
this._result.addRow(row);
|
||||
}
|
||||
};
|
||||
|
||||
p.handleCommandComplete = function(msg) {
|
||||
Query.prototype.handleCommandComplete = function(msg) {
|
||||
this._result.addCommandComplete(msg);
|
||||
};
|
||||
|
||||
p.handleReadyForQuery = function() {
|
||||
Query.prototype.handleReadyForQuery = function() {
|
||||
if(this._canceledDueToError) {
|
||||
return this.handleError(this._canceledDueToError);
|
||||
}
|
||||
if(this.callback) {
|
||||
this.callback(null, this._result);
|
||||
}
|
||||
this.emit('end', this._result);
|
||||
};
|
||||
|
||||
p.handleError = function(err) {
|
||||
Query.prototype.handleError = function(err) {
|
||||
if(this._canceledDueToError) {
|
||||
err = this._canceledDueToError;
|
||||
this._canceledDueToError = false;
|
||||
}
|
||||
//if callback supplied do not emit error event as uncaught error
|
||||
//events will bubble up to node process
|
||||
if(this.callback) {
|
||||
this.callback(err)
|
||||
this.callback(err);
|
||||
} else {
|
||||
this.emit('error', err);
|
||||
}
|
||||
this.emit('end');
|
||||
};
|
||||
|
||||
p.submit = function(connection) {
|
||||
Query.prototype.submit = function(connection) {
|
||||
var self = this;
|
||||
if(this.requiresPreparation()) {
|
||||
this.prepare(connection);
|
||||
@ -100,11 +106,11 @@ p.submit = function(connection) {
|
||||
}
|
||||
};
|
||||
|
||||
p.hasBeenParsed = function(connection) {
|
||||
Query.prototype.hasBeenParsed = function(connection) {
|
||||
return this.name && connection.parsedStatements[this.name];
|
||||
};
|
||||
|
||||
p.getRows = function(connection) {
|
||||
Query.prototype.getRows = function(connection) {
|
||||
connection.execute({
|
||||
portal: this.portalName,
|
||||
rows: this.rows
|
||||
@ -112,7 +118,7 @@ p.getRows = function(connection) {
|
||||
connection.flush();
|
||||
};
|
||||
|
||||
p.prepare = function(connection) {
|
||||
Query.prototype.prepare = function(connection) {
|
||||
var self = this;
|
||||
//prepared statements need sync to be called after each command
|
||||
//complete or when an error is encountered
|
||||
@ -124,14 +130,16 @@ p.prepare = function(connection) {
|
||||
name: self.name,
|
||||
types: self.types
|
||||
}, true);
|
||||
connection.parsedStatements[this.name] = true;
|
||||
if(this.name) {
|
||||
connection.parsedStatements[this.name] = true;
|
||||
}
|
||||
}
|
||||
|
||||
//TODO is there some better way to prepare values for the database?
|
||||
if(self.values) {
|
||||
self.values = self.values.map(function(val) {
|
||||
return (val instanceof Date) ? JSON.stringify(val) : val;
|
||||
});
|
||||
for(var i = 0, len = self.values.length; i < len; i++) {
|
||||
self.values[i] = utils.prepareValue(self.values[i]);
|
||||
}
|
||||
}
|
||||
|
||||
//http://developer.postgresql.org/pgdocs/postgres/protocol-flow.html#PROTOCOL-FLOW-EXT-QUERY
|
||||
@ -150,4 +158,17 @@ p.prepare = function(connection) {
|
||||
this.getRows(connection);
|
||||
};
|
||||
|
||||
Query.prototype.streamData = function (connection) {
|
||||
if(this.stream) this.stream.startStreamingToConnection(connection);
|
||||
else connection.sendCopyFail('No source stream defined');
|
||||
};
|
||||
|
||||
Query.prototype.handleCopyFromChunk = function (chunk) {
|
||||
if(this.stream) {
|
||||
this.stream.handleChunk(chunk);
|
||||
}
|
||||
//if there are no stream (for example when copy to query was sent by
|
||||
//query method instead of copyTo) error will be handled
|
||||
//on copyOutResponse event, so silently ignore this error here
|
||||
};
|
||||
module.exports = Query;
|
||||
|
||||
@ -1,32 +1,94 @@
|
||||
var types = require(__dirname + '/types/');
|
||||
|
||||
//result object returned from query
|
||||
//in the 'end' event and also
|
||||
//passed as second argument to provided callback
|
||||
var Result = function() {
|
||||
var Result = function(rowMode) {
|
||||
this.command = null;
|
||||
this.rowCount = null;
|
||||
this.oid = null;
|
||||
this.rows = [];
|
||||
this.fields = [];
|
||||
this._parsers = [];
|
||||
this.RowCtor = null;
|
||||
if(rowMode == "array") {
|
||||
this.parseRow = this._parseRowAsArray;
|
||||
}
|
||||
};
|
||||
|
||||
var p = Result.prototype;
|
||||
|
||||
|
||||
var matchRegexp = /([A-Za-z]+) (\d+ )?(\d+)?/
|
||||
var matchRegexp = /([A-Za-z]+) ?(\d+ )?(\d+)?/;
|
||||
|
||||
//adds a command complete message
|
||||
p.addCommandComplete = function(msg) {
|
||||
var match = matchRegexp.exec(msg.text);
|
||||
Result.prototype.addCommandComplete = function(msg) {
|
||||
var match;
|
||||
if(msg.text) {
|
||||
//pure javascript
|
||||
match = matchRegexp.exec(msg.text);
|
||||
} else {
|
||||
//native bindings
|
||||
match = matchRegexp.exec(msg.command);
|
||||
}
|
||||
if(match) {
|
||||
this.command = match[1];
|
||||
//match 3 will only be existing on insert commands
|
||||
if(match[3]) {
|
||||
this.rowCount = parseInt(match[3]);
|
||||
this.oid = parseInt(match[2]);
|
||||
//msg.value is from native bindings
|
||||
this.rowCount = parseInt(match[3] || msg.value, 10);
|
||||
this.oid = parseInt(match[2], 10);
|
||||
} else {
|
||||
this.rowCount = parseInt(match[2]);
|
||||
this.rowCount = parseInt(match[2], 10);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
p.addRow = function(row) {
|
||||
Result.prototype._parseRowAsArray = function(rowData) {
|
||||
var row = [];
|
||||
for(var i = 0, len = rowData.length; i < len; i++) {
|
||||
var rawValue = rowData[i];
|
||||
if(rawValue !== null) {
|
||||
row.push(this._parsers[i](rawValue));
|
||||
} else {
|
||||
row.push(null);
|
||||
}
|
||||
}
|
||||
return row;
|
||||
};
|
||||
|
||||
//rowData is an array of text or binary values
|
||||
//this turns the row into a JavaScript object
|
||||
Result.prototype.parseRow = function(rowData) {
|
||||
return new this.RowCtor(this._parsers, rowData);
|
||||
};
|
||||
|
||||
Result.prototype.addRow = function(row) {
|
||||
this.rows.push(row);
|
||||
};
|
||||
|
||||
var inlineParser = function(fieldName, i) {
|
||||
return "\nthis['" + fieldName + "'] = " +
|
||||
"rowData[" + i + "] == null ? null : parsers[" + i + "](rowData[" + i + "]);";
|
||||
};
|
||||
|
||||
Result.prototype.addFields = function(fieldDescriptions) {
|
||||
//clears field definitions
|
||||
//multiple query statements in 1 action can result in multiple sets
|
||||
//of rowDescriptions...eg: 'select NOW(); select 1::int;'
|
||||
//you need to reset the fields
|
||||
if(this.fields.length) {
|
||||
this.fields = [];
|
||||
this._parsers = [];
|
||||
}
|
||||
var ctorBody = "";
|
||||
for(var i = 0; i < fieldDescriptions.length; i++) {
|
||||
var desc = fieldDescriptions[i];
|
||||
this.fields.push(desc);
|
||||
var parser = types.getTypeParser(desc.dataTypeID, desc.format || 'text');
|
||||
this._parsers.push(parser);
|
||||
//this is some craziness to compile the row result parsing
|
||||
//results in ~60% speedup on large query result sets
|
||||
ctorBody += inlineParser(desc.name, i);
|
||||
}
|
||||
this.RowCtor = Function("parsers", "rowData", ctorBody);
|
||||
};
|
||||
|
||||
module.exports = Result;
|
||||
|
||||
@ -1,131 +0,0 @@
|
||||
//parses PostgreSQL server formatted date strings into javascript date objects
|
||||
var parseDate = function(isoDate) {
|
||||
//TODO this could do w/ a refactor
|
||||
var dateMatcher = /(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})(\.\d{1,})?/;
|
||||
|
||||
var match = dateMatcher.exec(isoDate);
|
||||
//could not parse date
|
||||
if(!match) {
|
||||
return null;
|
||||
}
|
||||
var year = match[1];
|
||||
var month = parseInt(match[2],10)-1;
|
||||
var day = match[3];
|
||||
var hour = parseInt(match[4],10);
|
||||
var min = parseInt(match[5],10);
|
||||
var seconds = parseInt(match[6], 10);
|
||||
|
||||
var miliString = match[7];
|
||||
var mili = 0;
|
||||
if(miliString) {
|
||||
mili = 1000 * parseFloat(miliString);
|
||||
}
|
||||
|
||||
var tZone = /([Z|+\-])(\d{2})?(\d{2})?/.exec(isoDate.split(' ')[1]);
|
||||
//minutes to adjust for timezone
|
||||
var tzAdjust = 0;
|
||||
|
||||
if(tZone) {
|
||||
var type = tZone[1];
|
||||
switch(type) {
|
||||
case 'Z': break;
|
||||
case '-':
|
||||
tzAdjust = -(((parseInt(tZone[2],10)*60)+(parseInt(tZone[3]||0,10))));
|
||||
break;
|
||||
case '+':
|
||||
tzAdjust = (((parseInt(tZone[2],10)*60)+(parseInt(tZone[3]||0,10))));
|
||||
break;
|
||||
default:
|
||||
throw new Error("Unidentifed tZone part " + type);
|
||||
}
|
||||
}
|
||||
|
||||
var utcOffset = Date.UTC(year, month, day, hour, min, seconds, mili);
|
||||
|
||||
var date = new Date(utcOffset - (tzAdjust * 60* 1000));
|
||||
return date;
|
||||
};
|
||||
|
||||
var parseBool = function(val) {
|
||||
return val === 't';
|
||||
}
|
||||
|
||||
var parseIntegerArray = function(val) {
|
||||
return JSON.parse(val.replace("{","[").replace("}","]"));
|
||||
};
|
||||
|
||||
var parseStringArray = function(val) {
|
||||
if (!val) return null;
|
||||
if (val[0] !== '{' || val[val.length-1] !== '}')
|
||||
throw "Not postgresql array! (" + arrStr + ")";
|
||||
|
||||
var x = val.substring(1, val.length - 1);
|
||||
if (x === '') return [];
|
||||
x = x.match(/(NULL|[^,]+|"((?:.|\n|\r)*?)(?!\\)"|\{((?:.|\n|\r)*?(?!\\)\}) (,|$))/mg);
|
||||
if (x === null) throw "Not postgre array";
|
||||
return x.map(function (el) {
|
||||
if (el === 'NULL') return null;
|
||||
if (el[0] === '{') return arguments.callee(el);
|
||||
if (el[0] === '\"') return el.substring(1, el.length - 1).replace(/\\(.)/g, '$1');
|
||||
return el;
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
var NUM = '([+-]?\\d+)';
|
||||
var YEAR = NUM + '\\s+years?';
|
||||
var MON = NUM + '\\s+mons?';
|
||||
var DAY = NUM + '\\s+days?';
|
||||
var TIME = '([+-])?(\\d\\d):(\\d\\d):(\\d\\d)';
|
||||
var INTERVAL = [YEAR,MON,DAY,TIME].map(function(p){ return "("+p+")?" }).join('\\s*');
|
||||
|
||||
var parseInterval = function(val) {
|
||||
if (!val) return {};
|
||||
var m = new RegExp(INTERVAL).exec(val);
|
||||
var i = {};
|
||||
if (m[2]) i.years = parseInt(m[2]);
|
||||
if (m[4]) i.months = parseInt(m[4]);
|
||||
if (m[6]) i.days = parseInt(m[6]);
|
||||
if (m[9]) i.hours = parseInt(m[9]);
|
||||
if (m[10]) i.minutes = parseInt(m[10]);
|
||||
if (m[11]) i.seconds = parseInt(m[11]);
|
||||
if (m[8] == '-'){
|
||||
if (i.hours) i.hours *= -1;
|
||||
if (i.minutes) i.minutes *= -1;
|
||||
if (i.seconds) i.seconds *= -1;
|
||||
}
|
||||
for (field in i){
|
||||
if (i[field] == 0)
|
||||
delete i[field];
|
||||
}
|
||||
return i;
|
||||
};
|
||||
|
||||
var parseByteA = function(val) {
|
||||
return new Buffer(val.replace(/\\([0-7]{3})/g, function (full_match, code) {
|
||||
return String.fromCharCode(parseInt(code, 8));
|
||||
}).replace(/\\\\/g, "\\"), "binary");
|
||||
}
|
||||
|
||||
var init = function(register) {
|
||||
register(20, parseInt);
|
||||
register(21, parseInt);
|
||||
register(23, parseInt);
|
||||
register(26, parseInt);
|
||||
register(1700, parseFloat);
|
||||
register(700, parseFloat);
|
||||
register(701, parseFloat);
|
||||
register(16, parseBool);
|
||||
register(1114, parseDate);
|
||||
register(1184, parseDate);
|
||||
register(1007, parseIntegerArray);
|
||||
register(1016, parseIntegerArray);
|
||||
register(1008, parseStringArray);
|
||||
register(1009, parseStringArray);
|
||||
register(1186, parseInterval);
|
||||
register(17, parseByteA);
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
init: init,
|
||||
};
|
||||
97
lib/types/arrayParser.js
Normal file
97
lib/types/arrayParser.js
Normal file
@ -0,0 +1,97 @@
|
||||
function ArrayParser(source, converter) {
|
||||
this.source = source;
|
||||
this.converter = converter;
|
||||
this.pos = 0;
|
||||
this.entries = [];
|
||||
this.recorded = [];
|
||||
this.dimension = 0;
|
||||
if (!this.converter) {
|
||||
this.converter = function(entry) {
|
||||
return entry;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
ArrayParser.prototype.eof = function() {
|
||||
return this.pos >= this.source.length;
|
||||
};
|
||||
|
||||
ArrayParser.prototype.nextChar = function() {
|
||||
var c;
|
||||
if ((c = this.source[this.pos++]) === "\\") {
|
||||
return {
|
||||
char: this.source[this.pos++],
|
||||
escaped: true
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
char: c,
|
||||
escaped: false
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
ArrayParser.prototype.record = function(c) {
|
||||
return this.recorded.push(c);
|
||||
};
|
||||
|
||||
ArrayParser.prototype.newEntry = function(includeEmpty) {
|
||||
var entry;
|
||||
if (this.recorded.length > 0 || includeEmpty) {
|
||||
entry = this.recorded.join("");
|
||||
if (entry === "NULL" && !includeEmpty) {
|
||||
entry = null;
|
||||
}
|
||||
if (entry !== null) {
|
||||
entry = this.converter(entry);
|
||||
}
|
||||
this.entries.push(entry);
|
||||
this.recorded = [];
|
||||
}
|
||||
};
|
||||
|
||||
ArrayParser.prototype.parse = function(nested) {
|
||||
var c, p, quote;
|
||||
if (nested === null) {
|
||||
nested = false;
|
||||
}
|
||||
quote = false;
|
||||
while (!this.eof()) {
|
||||
c = this.nextChar();
|
||||
if (c.char === "{" && !quote) {
|
||||
this.dimension++;
|
||||
if (this.dimension > 1) {
|
||||
p = new ArrayParser(this.source.substr(this.pos - 1), this.converter);
|
||||
this.entries.push(p.parse(true));
|
||||
this.pos += p.pos - 2;
|
||||
}
|
||||
} else if (c.char === "}" && !quote) {
|
||||
this.dimension--;
|
||||
if (this.dimension === 0) {
|
||||
this.newEntry();
|
||||
if (nested) {
|
||||
return this.entries;
|
||||
}
|
||||
}
|
||||
} else if (c.char === '"' && !c.escaped) {
|
||||
if (quote) {
|
||||
this.newEntry(true);
|
||||
}
|
||||
quote = !quote;
|
||||
} else if (c.char === ',' && !quote) {
|
||||
this.newEntry();
|
||||
} else {
|
||||
this.record(c.char);
|
||||
}
|
||||
}
|
||||
if (this.dimension !== 0) {
|
||||
throw "array dimension not balanced";
|
||||
}
|
||||
return this.entries;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
create: function(source, converter){
|
||||
return new ArrayParser(source, converter);
|
||||
}
|
||||
};
|
||||
256
lib/types/binaryParsers.js
Normal file
256
lib/types/binaryParsers.js
Normal file
@ -0,0 +1,256 @@
|
||||
var parseBits = function(data, bits, offset, invert, callback) {
|
||||
offset = offset || 0;
|
||||
invert = invert || false;
|
||||
callback = callback || function(lastValue, newValue, bits) { return (lastValue * Math.pow(2, bits)) + newValue; };
|
||||
var offsetBytes = offset >> 3;
|
||||
|
||||
var inv = function(value) {
|
||||
if (invert) {
|
||||
return ~value & 0xff;
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
// read first (maybe partial) byte
|
||||
var mask = 0xff;
|
||||
var firstBits = 8 - (offset % 8);
|
||||
if (bits < firstBits) {
|
||||
mask = (0xff << (8 - bits)) & 0xff;
|
||||
firstBits = bits;
|
||||
}
|
||||
|
||||
if (offset) {
|
||||
mask = mask >> (offset % 8);
|
||||
}
|
||||
|
||||
var result = 0;
|
||||
if ((offset % 8) + bits >= 8) {
|
||||
result = callback(0, inv(data[offsetBytes]) & mask, firstBits);
|
||||
}
|
||||
|
||||
// read bytes
|
||||
var bytes = (bits + offset) >> 3;
|
||||
for (var i = offsetBytes + 1; i < bytes; i++) {
|
||||
result = callback(result, inv(data[i]), 8);
|
||||
}
|
||||
|
||||
// bits to read, that are not a complete byte
|
||||
var lastBits = (bits + offset) % 8;
|
||||
if (lastBits > 0) {
|
||||
result = callback(result, inv(data[bytes]) >> (8 - lastBits), lastBits);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
var parseFloatFromBits = function(data, precisionBits, exponentBits) {
|
||||
var bias = Math.pow(2, exponentBits - 1) - 1;
|
||||
var sign = parseBits(data, 1);
|
||||
var exponent = parseBits(data, exponentBits, 1);
|
||||
|
||||
if (exponent === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// parse mantissa
|
||||
var precisionBitsCounter = 1;
|
||||
var parsePrecisionBits = function(lastValue, newValue, bits) {
|
||||
if (lastValue === 0) {
|
||||
lastValue = 1;
|
||||
}
|
||||
|
||||
for (var i = 1; i <= bits; i++) {
|
||||
precisionBitsCounter /= 2;
|
||||
if ((newValue & (0x1 << (bits - i))) > 0) {
|
||||
lastValue += precisionBitsCounter;
|
||||
}
|
||||
}
|
||||
|
||||
return lastValue;
|
||||
};
|
||||
|
||||
var mantissa = parseBits(data, precisionBits, exponentBits + 1, false, parsePrecisionBits);
|
||||
|
||||
// special cases
|
||||
if (exponent == (Math.pow(2, exponentBits + 1) - 1)) {
|
||||
if (mantissa === 0) {
|
||||
return (sign === 0) ? Infinity : -Infinity;
|
||||
}
|
||||
|
||||
return NaN;
|
||||
}
|
||||
|
||||
// normale number
|
||||
return ((sign === 0) ? 1 : -1) * Math.pow(2, exponent - bias) * mantissa;
|
||||
};
|
||||
|
||||
var parseBool = function(value) {
|
||||
return (parseBits(value, 8) == 1);
|
||||
};
|
||||
|
||||
var parseInt16 = function(value) {
|
||||
if (parseBits(value, 1) == 1) {
|
||||
return -1 * (parseBits(value, 15, 1, true) + 1);
|
||||
}
|
||||
|
||||
return parseBits(value, 15, 1);
|
||||
};
|
||||
|
||||
var parseInt32 = function(value) {
|
||||
if (parseBits(value, 1) == 1) {
|
||||
return -1 * (parseBits(value, 31, 1, true) + 1);
|
||||
}
|
||||
|
||||
return parseBits(value, 31, 1);
|
||||
};
|
||||
|
||||
var parseFloat32 = function(value) {
|
||||
return parseFloatFromBits(value, 23, 8);
|
||||
};
|
||||
|
||||
var parseFloat64 = function(value) {
|
||||
return parseFloatFromBits(value, 52, 11);
|
||||
};
|
||||
|
||||
var parseNumeric = function(value) {
|
||||
var sign = parseBits(value, 16, 32);
|
||||
if (sign == 0xc000) {
|
||||
return NaN;
|
||||
}
|
||||
|
||||
var weight = Math.pow(10000, parseBits(value, 16, 16));
|
||||
var result = 0;
|
||||
|
||||
var digits = [];
|
||||
var ndigits = parseBits(value, 16);
|
||||
for (var i = 0; i < ndigits; i++) {
|
||||
result += parseBits(value, 16, 64 + (16 * i)) * weight;
|
||||
weight /= 10000;
|
||||
}
|
||||
|
||||
var scale = Math.pow(10, parseBits(value, 16, 48));
|
||||
return ((sign === 0) ? 1 : -1) * Math.round(result * scale) / scale;
|
||||
};
|
||||
|
||||
var parseDate = function(isUTC, value) {
|
||||
var sign = parseBits(value, 1);
|
||||
var rawValue = parseBits(value, 63, 1);
|
||||
|
||||
// discard usecs and shift from 2000 to 1970
|
||||
var result = new Date((((sign === 0) ? 1 : -1) * rawValue / 1000) + 946684800000);
|
||||
|
||||
if (!isUTC) {
|
||||
result.setTime(result.getTime() + result.getTimezoneOffset() * 60000);
|
||||
}
|
||||
|
||||
// add microseconds to the date
|
||||
result.usec = rawValue % 1000;
|
||||
result.getMicroSeconds = function() {
|
||||
return this.usec;
|
||||
};
|
||||
result.setMicroSeconds = function(value) {
|
||||
this.usec = value;
|
||||
};
|
||||
result.getUTCMicroSeconds = function() {
|
||||
return this.usec;
|
||||
};
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
var parseArray = function(value) {
|
||||
var dim = parseBits(value, 32);
|
||||
|
||||
var flags = parseBits(value, 32, 32);
|
||||
var elementType = parseBits(value, 32, 64);
|
||||
|
||||
var offset = 96;
|
||||
var dims = [];
|
||||
for (var i = 0; i < dim; i++) {
|
||||
// parse dimension
|
||||
dims[i] = parseBits(value, 32, offset);
|
||||
offset += 32;
|
||||
|
||||
// ignore lower bounds
|
||||
offset += 32;
|
||||
}
|
||||
|
||||
var parseElement = function(elementType) {
|
||||
// parse content length
|
||||
var length = parseBits(value, 32, offset);
|
||||
offset += 32;
|
||||
|
||||
// parse null values
|
||||
if (length == 0xffffffff) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var result;
|
||||
if ((elementType == 0x17) || (elementType == 0x14)) {
|
||||
// int/bigint
|
||||
result = parseBits(value, length * 8, offset);
|
||||
offset += length * 8;
|
||||
return result;
|
||||
}
|
||||
else if (elementType == 0x19) {
|
||||
// string
|
||||
result = value.toString(this.encoding, offset >> 3, (offset += (length << 3)) >> 3);
|
||||
return result;
|
||||
}
|
||||
else {
|
||||
console.log("ERROR: ElementType not implemented: " + elementType);
|
||||
}
|
||||
};
|
||||
|
||||
var parse = function(dimension, elementType) {
|
||||
var array = [];
|
||||
var i;
|
||||
|
||||
if (dimension.length > 1) {
|
||||
var count = dimension.shift();
|
||||
for (i = 0; i < count; i++) {
|
||||
array[i] = parse(dimension, elementType);
|
||||
}
|
||||
dimension.unshift(count);
|
||||
}
|
||||
else {
|
||||
for (i = 0; i < dimension[0]; i++) {
|
||||
array[i] = parseElement(elementType);
|
||||
}
|
||||
}
|
||||
|
||||
return array;
|
||||
};
|
||||
|
||||
return parse(dims, elementType);
|
||||
};
|
||||
|
||||
var parseText = function(value) {
|
||||
return value.toString('utf8');
|
||||
};
|
||||
|
||||
var parseBool = function(value) {
|
||||
return (parseBits(value, 8) > 0);
|
||||
};
|
||||
|
||||
var init = function(register) {
|
||||
register(21, parseInt16);
|
||||
register(23, parseInt32);
|
||||
register(26, parseInt32);
|
||||
register(1700, parseNumeric);
|
||||
register(700, parseFloat32);
|
||||
register(701, parseFloat64);
|
||||
register(16, parseBool);
|
||||
register(1114, parseDate.bind(null, false));
|
||||
register(1184, parseDate.bind(null, true));
|
||||
register(1007, parseArray);
|
||||
register(1016, parseArray);
|
||||
register(1008, parseArray);
|
||||
register(1009, parseArray);
|
||||
register(25, parseText);
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
init: init
|
||||
};
|
||||
@ -1,5 +1,5 @@
|
||||
var textParsers = require(__dirname + "/textParsers"),
|
||||
binaryParsers = require(__dirname + "/binaryParsers");
|
||||
var textParsers = require(__dirname + '/textParsers');
|
||||
var binaryParsers = require(__dirname + '/binaryParsers');
|
||||
|
||||
var typeParsers = {
|
||||
text: {},
|
||||
@ -9,21 +9,27 @@ var typeParsers = {
|
||||
//the empty parse function
|
||||
var noParse = function(val) {
|
||||
return String(val);
|
||||
}
|
||||
};
|
||||
|
||||
//returns a function used to convert a specific type (specified by
|
||||
//oid) into a result javascript type
|
||||
var getTypeParser = function(oid, format) {
|
||||
if (!typeParsers[format])
|
||||
if (!typeParsers[format]) {
|
||||
return noParse;
|
||||
|
||||
}
|
||||
return typeParsers[format][oid] || noParse;
|
||||
};
|
||||
|
||||
var setTypeParser = function(oid, format, parseFn) {
|
||||
if(typeof format == 'function') {
|
||||
parseFn = format;
|
||||
format = 'text';
|
||||
}
|
||||
typeParsers[format][oid] = parseFn;
|
||||
};
|
||||
|
||||
textParsers.init(function(oid, converter) {
|
||||
typeParsers.text[oid] = function(value) {
|
||||
return converter(String(value));
|
||||
};
|
||||
typeParsers.text[oid] = converter;
|
||||
});
|
||||
|
||||
binaryParsers.init(function(oid, converter) {
|
||||
@ -32,4 +38,5 @@ binaryParsers.init(function(oid, converter) {
|
||||
|
||||
module.exports = {
|
||||
getTypeParser: getTypeParser,
|
||||
}
|
||||
setTypeParser: setTypeParser
|
||||
};
|
||||
199
lib/types/textParsers.js
Normal file
199
lib/types/textParsers.js
Normal file
@ -0,0 +1,199 @@
|
||||
var arrayParser = require(__dirname + "/arrayParser.js");
|
||||
|
||||
//parses PostgreSQL server formatted date strings into javascript date objects
|
||||
var parseDate = function(isoDate) {
|
||||
//TODO this could do w/ a refactor
|
||||
var dateMatcher = /(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})(\.\d{1,})?/;
|
||||
|
||||
var match = dateMatcher.exec(isoDate);
|
||||
//could not parse date
|
||||
if(!match) {
|
||||
dateMatcher = /^(\d{4})-(\d{2})-(\d{2})$/;
|
||||
match = dateMatcher.test(isoDate);
|
||||
if(!match) {
|
||||
return null;
|
||||
} else {
|
||||
//it is a date in YYYY-MM-DD format
|
||||
return new Date(isoDate);
|
||||
}
|
||||
}
|
||||
var year = match[1];
|
||||
var month = parseInt(match[2],10)-1;
|
||||
var day = match[3];
|
||||
var hour = parseInt(match[4],10);
|
||||
var min = parseInt(match[5],10);
|
||||
var seconds = parseInt(match[6], 10);
|
||||
|
||||
var miliString = match[7];
|
||||
var mili = 0;
|
||||
if(miliString) {
|
||||
mili = 1000 * parseFloat(miliString);
|
||||
}
|
||||
|
||||
//match timezones like the following:
|
||||
//Z (UTC)
|
||||
//-05
|
||||
//+06:30
|
||||
var tZone = /([Z|+\-])(\d{2})?:?(\d{2})?/.exec(isoDate.split(' ')[1]);
|
||||
//minutes to adjust for timezone
|
||||
var tzAdjust = 0;
|
||||
if(tZone) {
|
||||
var type = tZone[1];
|
||||
switch(type) {
|
||||
case 'Z':
|
||||
break;
|
||||
case '-':
|
||||
tzAdjust = -(((parseInt(tZone[2],10)*60)+(parseInt(tZone[3]||0,10))));
|
||||
break;
|
||||
case '+':
|
||||
tzAdjust = (((parseInt(tZone[2],10)*60)+(parseInt(tZone[3]||0,10))));
|
||||
break;
|
||||
default:
|
||||
throw new Error("Unidentifed tZone part " + type);
|
||||
}
|
||||
|
||||
var utcOffset = Date.UTC(year, month, day, hour, min, seconds, mili);
|
||||
return new Date(utcOffset - (tzAdjust * 60* 1000));
|
||||
}
|
||||
//no timezone information
|
||||
else {
|
||||
return new Date(year, month, day, hour, min, seconds, mili);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
var parseBool = function(val) {
|
||||
return val === 't';
|
||||
};
|
||||
|
||||
var parseIntegerArray = function(val) {
|
||||
if(!val) { return null; }
|
||||
var p = arrayParser.create(val, function(entry){
|
||||
if(entry !== null) {
|
||||
entry = parseInt(entry, 10);
|
||||
}
|
||||
return entry;
|
||||
});
|
||||
|
||||
return p.parse();
|
||||
};
|
||||
|
||||
var parseFloatArray = function(val) {
|
||||
if(!val) { return null; }
|
||||
var p = arrayParser.create(val, function(entry) {
|
||||
return entry;
|
||||
});
|
||||
|
||||
return p.parse();
|
||||
};
|
||||
|
||||
var parseStringArray = function(val) {
|
||||
if(!val) { return null; }
|
||||
|
||||
var p = arrayParser.create(val);
|
||||
return p.parse();
|
||||
};
|
||||
|
||||
|
||||
var NUM = '([+-]?\\d+)';
|
||||
var YEAR = NUM + '\\s+years?';
|
||||
var MON = NUM + '\\s+mons?';
|
||||
var DAY = NUM + '\\s+days?';
|
||||
var TIME = '([+-])?(\\d\\d):(\\d\\d):(\\d\\d)';
|
||||
var INTERVAL = [YEAR,MON,DAY,TIME].map(function(p){
|
||||
return "("+p+")?";
|
||||
}).join('\\s*');
|
||||
|
||||
var parseInterval = function(val) {
|
||||
if (!val) { return {}; }
|
||||
var m = new RegExp(INTERVAL).exec(val);
|
||||
var i = {};
|
||||
if (m[2]) { i.years = parseInt(m[2], 10); }
|
||||
if (m[4]) { i.months = parseInt(m[4], 10); }
|
||||
if (m[6]) { i.days = parseInt(m[6], 10); }
|
||||
if (m[9]) { i.hours = parseInt(m[9], 10); }
|
||||
if (m[10]) { i.minutes = parseInt(m[10], 10); }
|
||||
if (m[11]) { i.seconds = parseInt(m[11], 10); }
|
||||
if (m[8] == '-'){
|
||||
if (i.hours) { i.hours *= -1; }
|
||||
if (i.minutes) { i.minutes *= -1; }
|
||||
if (i.seconds) { i.seconds *= -1; }
|
||||
}
|
||||
for (var field in i){
|
||||
if (i[field] === 0) {
|
||||
delete i[field];
|
||||
}
|
||||
}
|
||||
return i;
|
||||
};
|
||||
|
||||
var parseByteA = function(val) {
|
||||
if(/^\\x/.test(val)){
|
||||
// new 'hex' style response (pg >9.0)
|
||||
return new Buffer(val.substr(2), 'hex');
|
||||
}else{
|
||||
var out = "";
|
||||
var i = 0;
|
||||
while(i < val.length){
|
||||
if(val[i] != "\\"){
|
||||
out += val[i];
|
||||
++i;
|
||||
}else{
|
||||
if(val.substr(i+1,3).match(/[0-7]{3}/)){
|
||||
out += String.fromCharCode(parseInt(val.substr(i+1,3),8));
|
||||
i += 4;
|
||||
}else{
|
||||
backslashes = 1;
|
||||
while(i+backslashes < val.length && val[i+backslashes] == "\\")
|
||||
backslashes++;
|
||||
for(k=0; k<Math.floor(backslashes/2); ++k)
|
||||
out += "\\";
|
||||
i += Math.floor(backslashes / 2) * 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
return new Buffer(out,"binary");
|
||||
}
|
||||
};
|
||||
|
||||
var maxLen = Number.MAX_VALUE.toString().length;
|
||||
|
||||
var parseInteger = function(val) {
|
||||
return parseInt(val, 10);
|
||||
};
|
||||
|
||||
var parseBigInteger = function(val) {
|
||||
var valStr = String(val);
|
||||
if (/^\d+$/.test(valStr)) { return valStr; }
|
||||
return val;
|
||||
};
|
||||
|
||||
var init = function(register) {
|
||||
register(20, parseBigInteger); // int8
|
||||
register(21, parseInteger); // int2
|
||||
register(23, parseInteger); // int4
|
||||
register(26, parseInteger); // oid
|
||||
register(700, parseFloat); // float4/real
|
||||
register(701, parseFloat); // float8/double
|
||||
register(16, parseBool);
|
||||
register(1082, parseDate); // date
|
||||
register(1114, parseDate); // timestamp without timezone
|
||||
register(1184, parseDate); // timestamp
|
||||
register(1005, parseIntegerArray); // _int2
|
||||
register(1007, parseIntegerArray); // _int4
|
||||
register(1016, parseIntegerArray); // _int8
|
||||
register(1021, parseFloatArray); // _float4
|
||||
register(1022, parseFloatArray); // _float8
|
||||
register(1231, parseFloatArray); // _numeric
|
||||
register(1014, parseStringArray); //char
|
||||
register(1015, parseStringArray); //varchar
|
||||
register(1008, parseStringArray);
|
||||
register(1009, parseStringArray);
|
||||
register(1186, parseInterval);
|
||||
register(17, parseByteA);
|
||||
register(114, JSON.parse.bind(JSON));
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
init: init
|
||||
};
|
||||
168
lib/utils.js
168
lib/utils.js
@ -13,88 +13,98 @@ if(typeof events.EventEmitter.prototype.once !== 'function') {
|
||||
};
|
||||
}
|
||||
|
||||
var parseConnectionString = function(str) {
|
||||
//unix socket
|
||||
if(str.charAt(0) === '/') {
|
||||
return { host: str };
|
||||
}
|
||||
var result = url.parse(str);
|
||||
var config = {};
|
||||
config.host = result.hostname;
|
||||
config.database = result.pathname ? result.pathname.slice(1) : null
|
||||
var auth = (result.auth || ':').split(':');
|
||||
config.user = auth[0];
|
||||
config.password = auth[1];
|
||||
config.port = result.port;
|
||||
return config;
|
||||
};
|
||||
|
||||
//allows passing false as property to remove it from config
|
||||
var norm = function(config, propName) {
|
||||
config[propName] = (config[propName] || (config[propName] === false ? undefined : defaults[propName]))
|
||||
};
|
||||
|
||||
//normalizes connection info
|
||||
//which can be in the form of an object
|
||||
//or a connection string
|
||||
var normalizeConnectionInfo = function(config) {
|
||||
switch(typeof config) {
|
||||
case 'object':
|
||||
norm(config, 'user');
|
||||
norm(config, 'password');
|
||||
norm(config, 'host');
|
||||
norm(config, 'port');
|
||||
norm(config, 'database');
|
||||
return config;
|
||||
case 'string':
|
||||
return normalizeConnectionInfo(parseConnectionString(config));
|
||||
default:
|
||||
throw new Error("Unrecognized connection config parameter: " + config);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
var add = function(params, config, paramName) {
|
||||
var value = config[paramName];
|
||||
if(value) {
|
||||
params.push(paramName+"='"+value+"'");
|
||||
}
|
||||
// convert a JS array to a postgres array literal
|
||||
// uses comma separator so won't work for types like box that use
|
||||
// a different array separator.
|
||||
function arrayString(val) {
|
||||
var result = '{';
|
||||
for (var i = 0 ; i < val.length; i++) {
|
||||
if(i > 0) {
|
||||
result = result + ',';
|
||||
}
|
||||
if(val[i] instanceof Date) {
|
||||
result = result + JSON.stringify(val[i]);
|
||||
}
|
||||
else if(typeof val[i] === 'undefined') {
|
||||
result = result + 'NULL';
|
||||
}
|
||||
else if(Array.isArray(val[i])) {
|
||||
result = result + arrayString(val[i]);
|
||||
}
|
||||
else
|
||||
{
|
||||
result = result +
|
||||
(val[i] === null ? 'NULL' : JSON.stringify(val[i]));
|
||||
}
|
||||
}
|
||||
result = result + '}';
|
||||
return result;
|
||||
}
|
||||
|
||||
//builds libpq specific connection string
|
||||
//from a supplied config object
|
||||
//the config object conforms to the interface of the config object
|
||||
//accepted by the pure javascript client
|
||||
var getLibpgConString = function(config, callback) {
|
||||
if(typeof config == 'object') {
|
||||
var params = []
|
||||
add(params, config, 'user');
|
||||
add(params, config, 'password');
|
||||
add(params, config, 'port');
|
||||
if(config.database) {
|
||||
params.push("dbname='" + config.database + "'");
|
||||
}
|
||||
if(config.host) {
|
||||
if(config.host != 'localhost' && config.host != '127.0.0.1') {
|
||||
//do dns lookup
|
||||
return require('dns').lookup(config.host, 4, function(err, address) {
|
||||
if(err) return callback(err, null);
|
||||
params.push("hostaddr="+address)
|
||||
callback(null, params.join(" "))
|
||||
})
|
||||
}
|
||||
params.push("hostaddr=127.0.0.1 ");
|
||||
}
|
||||
callback(null, params.join(" "));
|
||||
} else {
|
||||
throw new Error("Unrecognized config type for connection");
|
||||
//converts values from javascript types
|
||||
//to their 'raw' counterparts for use as a postgres parameter
|
||||
//note: you can override this function to provide your own conversion mechanism
|
||||
//for complex types, etc...
|
||||
var prepareValue = function(val) {
|
||||
if(val instanceof Date) {
|
||||
return dateToString(val);
|
||||
}
|
||||
if(typeof val === 'undefined') {
|
||||
return null;
|
||||
}
|
||||
if(Array.isArray(val)) {
|
||||
return arrayString(val);
|
||||
}
|
||||
if(!val || typeof val !== 'object') {
|
||||
return val === null ? null : val.toString();
|
||||
}
|
||||
return JSON.stringify(val);
|
||||
};
|
||||
|
||||
function dateToString(date) {
|
||||
function pad(number, digits) {
|
||||
number = ""+number;
|
||||
while(number.length < digits)
|
||||
number = "0"+number;
|
||||
return number;
|
||||
}
|
||||
|
||||
var offset = -date.getTimezoneOffset();
|
||||
var ret = pad(date.getFullYear(), 4) + '-' +
|
||||
pad(date.getMonth() + 1, 2) + '-' +
|
||||
pad(date.getDate(), 2) + 'T' +
|
||||
pad(date.getHours(), 2) + ':' +
|
||||
pad(date.getMinutes(), 2) + ':' +
|
||||
pad(date.getSeconds(), 2) + '.' +
|
||||
pad(date.getMilliseconds(), 3);
|
||||
|
||||
if(offset < 0) {
|
||||
ret += "-";
|
||||
offset *= -1;
|
||||
}
|
||||
else
|
||||
ret += "+";
|
||||
|
||||
return ret + pad(Math.floor(offset/60), 2) + ":" + pad(offset%60, 2);
|
||||
}
|
||||
|
||||
function normalizeQueryConfig (config, values, callback) {
|
||||
//can take in strings or config objects
|
||||
config = (typeof(config) == 'string') ? { text: config } : config;
|
||||
if(values) {
|
||||
if(typeof values === 'function') {
|
||||
config.callback = values;
|
||||
} else {
|
||||
config.values = values;
|
||||
}
|
||||
}
|
||||
if(callback) {
|
||||
config.callback = callback;
|
||||
}
|
||||
return config;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
normalizeConnectionInfo: normalizeConnectionInfo,
|
||||
//only exported here to make testing of this method possible
|
||||
//since it contains quite a bit of logic and testing for
|
||||
//each connection scenario in an integration test is impractical
|
||||
buildLibpqConnectionString: getLibpgConString
|
||||
}
|
||||
prepareValue: prepareValue,
|
||||
normalizeQueryConfig: normalizeQueryConfig
|
||||
};
|
||||
|
||||
130
lib/writer.js
130
lib/writer.js
@ -1,130 +0,0 @@
|
||||
//binary data writer tuned for creating
|
||||
//postgres message packets as effeciently as possible by reusing the
|
||||
//same buffer to avoid memcpy and limit memory allocations
|
||||
var Writer = function(size) {
|
||||
this.size = size || 1024;
|
||||
this.buffer = Buffer(this.size + 5);
|
||||
this.offset = 5;
|
||||
this.headerPosition = 0;
|
||||
};
|
||||
|
||||
var p = Writer.prototype;
|
||||
|
||||
//resizes internal buffer if not enough size left
|
||||
p._ensure = function(size) {
|
||||
var remaining = this.buffer.length - this.offset;
|
||||
if(remaining < size) {
|
||||
var oldBuffer = this.buffer;
|
||||
this.buffer = new Buffer(oldBuffer.length + size);
|
||||
oldBuffer.copy(this.buffer);
|
||||
}
|
||||
}
|
||||
|
||||
p.addInt32 = function(num) {
|
||||
this._ensure(4)
|
||||
this.buffer[this.offset++] = (num >>> 24 & 0xFF)
|
||||
this.buffer[this.offset++] = (num >>> 16 & 0xFF)
|
||||
this.buffer[this.offset++] = (num >>> 8 & 0xFF)
|
||||
this.buffer[this.offset++] = (num >>> 0 & 0xFF)
|
||||
return this;
|
||||
}
|
||||
|
||||
p.addInt16 = function(num) {
|
||||
this._ensure(2)
|
||||
this.buffer[this.offset++] = (num >>> 8 & 0xFF)
|
||||
this.buffer[this.offset++] = (num >>> 0 & 0xFF)
|
||||
return this;
|
||||
}
|
||||
|
||||
//for versions of node requiring 'length' as 3rd argument to buffer.write
|
||||
var writeString = function(buffer, string, offset, len) {
|
||||
buffer.write(string, offset, len);
|
||||
}
|
||||
|
||||
//overwrite function for older versions of node
|
||||
if(Buffer.prototype.write.length === 3) {
|
||||
writeString = function(buffer, string, offset, len) {
|
||||
buffer.write(string, offset);
|
||||
}
|
||||
}
|
||||
|
||||
p.addCString = function(string) {
|
||||
//just write a 0 for empty or null strings
|
||||
if(!string) {
|
||||
this._ensure(1);
|
||||
} else {
|
||||
var len = Buffer.byteLength(string);
|
||||
this._ensure(len + 1); //+1 for null terminator
|
||||
writeString(this.buffer, string, this.offset, len);
|
||||
this.offset += len;
|
||||
}
|
||||
|
||||
this.buffer[this.offset++] = 0; // null terminator
|
||||
return this;
|
||||
}
|
||||
|
||||
p.addChar = function(char) {
|
||||
this._ensure(1);
|
||||
writeString(this.buffer, char, this.offset, 1);
|
||||
this.offset++;
|
||||
return this;
|
||||
}
|
||||
|
||||
p.addString = function(string) {
|
||||
var string = string || "";
|
||||
var len = Buffer.byteLength(string);
|
||||
this._ensure(len);
|
||||
this.buffer.write(string, this.offset);
|
||||
this.offset += len;
|
||||
return this;
|
||||
}
|
||||
|
||||
p.getByteLength = function() {
|
||||
return this.offset - 5;
|
||||
}
|
||||
|
||||
p.add = function(otherBuffer) {
|
||||
this._ensure(otherBuffer.length);
|
||||
otherBuffer.copy(this.buffer, this.offset);
|
||||
this.offset += otherBuffer.length;
|
||||
return this;
|
||||
}
|
||||
|
||||
p.clear = function() {
|
||||
this.offset = 5;
|
||||
this.headerPosition = 0;
|
||||
this.lastEnd = 0;
|
||||
}
|
||||
|
||||
//appends a header block to all the written data since the last
|
||||
//subsequent header or to the beginning if there is only one data block
|
||||
p.addHeader = function(code, last) {
|
||||
var origOffset = this.offset;
|
||||
this.offset = this.headerPosition;
|
||||
this.buffer[this.offset++] = code;
|
||||
//length is everything in this packet minus the code
|
||||
this.addInt32(origOffset - (this.headerPosition+1))
|
||||
//set next header position
|
||||
this.headerPosition = origOffset;
|
||||
//make space for next header
|
||||
this.offset = origOffset;
|
||||
if(!last) {
|
||||
this._ensure(5);
|
||||
this.offset += 5;
|
||||
}
|
||||
}
|
||||
|
||||
p.join = function(code) {
|
||||
if(code) {
|
||||
this.addHeader(code, true);
|
||||
}
|
||||
return this.buffer.slice(code ? 0 : 5, this.offset);
|
||||
}
|
||||
|
||||
p.flush = function(code) {
|
||||
var result = this.join(code);
|
||||
this.clear();
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = Writer;
|
||||
43
package.json
43
package.json
@ -1,20 +1,35 @@
|
||||
{ "name": "pg",
|
||||
"version": "0.6.10",
|
||||
{
|
||||
"name": "pg",
|
||||
"version": "2.5.0",
|
||||
"description": "PostgreSQL client - pure javascript & libpq with the same API",
|
||||
"keywords" : ["postgres", "pg", "libpq", "postgre", "database", "rdbms"],
|
||||
"keywords": [
|
||||
"postgres",
|
||||
"pg",
|
||||
"libpq",
|
||||
"postgre",
|
||||
"database",
|
||||
"rdbms"
|
||||
],
|
||||
"homepage": "http://github.com/brianc/node-postgres",
|
||||
"repository" : {
|
||||
"type" : "git",
|
||||
"url" : "git://github.com/brianc/node-postgres.git"
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/brianc/node-postgres.git"
|
||||
},
|
||||
"author" : "Brian Carlson <brian.m.carlson@gmail.com>",
|
||||
"main" : "./lib",
|
||||
"dependencies" : {
|
||||
"generic-pool" : "1.0.9"
|
||||
"author": "Brian Carlson <brian.m.carlson@gmail.com>",
|
||||
"main": "./lib",
|
||||
"dependencies": {
|
||||
"generic-pool": "2.0.3",
|
||||
"buffer-writer": "1.0.0"
|
||||
},
|
||||
"scripts" : {
|
||||
"test" : "make test",
|
||||
"install" : "node-waf configure build || (exit 0)"
|
||||
"devDependencies": {
|
||||
"jshint": "1.1.0",
|
||||
"semver": "~1.1.4"
|
||||
},
|
||||
"engines" : { "node": ">= 0.4.0" }
|
||||
"scripts": {
|
||||
"test": "make test-travis connectionString=postgres://postgres@localhost:5432/postgres",
|
||||
"install": "node-gyp rebuild || (exit 0)"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
var helper = require(__dirname + "/../test/integration/test-helper");
|
||||
var pg = helper.pg;
|
||||
pg.connect(helper.connectionString(), assert.success(function(client) {
|
||||
pg.connect(helper.config, assert.success(function(client) {
|
||||
var query = client.query('select oid, typname from pg_type where typtype = \'b\' order by oid');
|
||||
query.on('row', console.log);
|
||||
}))
|
||||
|
||||
5
script/setup-bench-data.js
Normal file
5
script/setup-bench-data.js
Normal file
@ -0,0 +1,5 @@
|
||||
var pg = require('../lib');
|
||||
var
|
||||
pg.connect(function(err, client) {
|
||||
|
||||
})
|
||||
@ -3,7 +3,7 @@ var helper = require(__dirname + '/../test/test-helper');
|
||||
console.log();
|
||||
console.log("testing ability to connect to '%j'", helper.config);
|
||||
var pg = require(__dirname + '/../lib');
|
||||
pg.connect(helper.config, function(err, client) {
|
||||
pg.connect(helper.config, function(err, client, done) {
|
||||
if(err !== null) {
|
||||
console.error("Recieved connection error when attempting to contact PostgreSQL:");
|
||||
console.error(err);
|
||||
@ -18,6 +18,7 @@ pg.connect(helper.config, function(err, client) {
|
||||
console.error(err);
|
||||
process.exit(255);
|
||||
}
|
||||
done();
|
||||
pg.end();
|
||||
})
|
||||
})
|
||||
|
||||
20
script/travis-pg-9.2-install.sh
Executable file
20
script/travis-pg-9.2-install.sh
Executable file
@ -0,0 +1,20 @@
|
||||
#! /usr/bin/env bash
|
||||
#sudo cat /etc/postgresql/9.1/main/pg_hba.conf
|
||||
#sudo cat /etc/postgresql/9.1/main/pg_ident.conf
|
||||
#sudo cat /etc/postgresql/9.1/main/postgresql.conf
|
||||
sudo /etc/init.d/postgresql stop
|
||||
sudo apt-get -y --purge remove postgresql
|
||||
echo "yes" | sudo add-apt-repository ppa:pitti/postgresql
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get -q -y -o Dpkg::Options::=--force-confdef install postgresql-9.2 postgresql-contrib-9.2
|
||||
sudo chmod 777 /etc/postgresql/9.2/main/pg_hba.conf
|
||||
sudo echo "local all postgres trust" > /etc/postgresql/9.2/main/pg_hba.conf
|
||||
sudo echo "local all all trust" >> /etc/postgresql/9.2/main/pg_hba.conf
|
||||
sudo echo "host all all 127.0.0.1/32 trust" >> /etc/postgresql/9.2/main/pg_hba.conf
|
||||
sudo echo "host all all ::1/128 trust" >> /etc/postgresql/9.2/main/pg_hba.conf
|
||||
sudo echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/9.2/main/pg_hba.conf
|
||||
sudo echo "host all all 0.0.0.0 255.255.255.255 trust" >> /etc/postgresql/9.2/main/pg_hba.conf
|
||||
sudo /etc/init.d/postgresql restart
|
||||
# for some reason both postgres 9.1 and 9.2 are started
|
||||
# 9.2 is running on port 5433
|
||||
node script/create-test-tables.js postgres://postgres@localhost:5433/postgres
|
||||
466
src/binding.cc
466
src/binding.cc
@ -1,5 +1,7 @@
|
||||
#include <pg_config.h>
|
||||
#include <libpq-fe.h>
|
||||
#include <node.h>
|
||||
#include <node_buffer.h>
|
||||
#include <string.h>
|
||||
#include <assert.h>
|
||||
#include <stdlib.h>
|
||||
@ -7,6 +9,9 @@
|
||||
#define LOG(msg) printf("%s\n",msg);
|
||||
#define TRACE(msg) //printf("%s\n", msg);
|
||||
|
||||
#if PG_VERSION_NUM >= 90000
|
||||
#define ESCAPE_SUPPORTED
|
||||
#endif
|
||||
|
||||
#define THROW(msg) return ThrowException(Exception::Error(String::New(msg)));
|
||||
|
||||
@ -30,6 +35,7 @@ static Persistent<String> type_symbol;
|
||||
static Persistent<String> channel_symbol;
|
||||
static Persistent<String> payload_symbol;
|
||||
static Persistent<String> emit_symbol;
|
||||
static Persistent<String> command_symbol;
|
||||
|
||||
class Connection : public ObjectWrap {
|
||||
|
||||
@ -59,28 +65,39 @@ public:
|
||||
routine_symbol = NODE_PSYMBOL("routine");
|
||||
name_symbol = NODE_PSYMBOL("name");
|
||||
value_symbol = NODE_PSYMBOL("value");
|
||||
type_symbol = NODE_PSYMBOL("type");
|
||||
type_symbol = NODE_PSYMBOL("dataTypeID");
|
||||
channel_symbol = NODE_PSYMBOL("channel");
|
||||
payload_symbol = NODE_PSYMBOL("payload");
|
||||
|
||||
command_symbol = NODE_PSYMBOL("command");
|
||||
|
||||
NODE_SET_PROTOTYPE_METHOD(t, "connect", Connect);
|
||||
#ifdef ESCAPE_SUPPORTED
|
||||
NODE_SET_PROTOTYPE_METHOD(t, "escapeIdentifier", EscapeIdentifier);
|
||||
NODE_SET_PROTOTYPE_METHOD(t, "escapeLiteral", EscapeLiteral);
|
||||
#endif
|
||||
NODE_SET_PROTOTYPE_METHOD(t, "_sendQuery", SendQuery);
|
||||
NODE_SET_PROTOTYPE_METHOD(t, "_sendQueryWithParams", SendQueryWithParams);
|
||||
NODE_SET_PROTOTYPE_METHOD(t, "_sendPrepare", SendPrepare);
|
||||
NODE_SET_PROTOTYPE_METHOD(t, "_sendQueryPrepared", SendQueryPrepared);
|
||||
NODE_SET_PROTOTYPE_METHOD(t, "cancel", Cancel);
|
||||
NODE_SET_PROTOTYPE_METHOD(t, "end", End);
|
||||
|
||||
NODE_SET_PROTOTYPE_METHOD(t, "_sendCopyFromChunk", SendCopyFromChunk);
|
||||
NODE_SET_PROTOTYPE_METHOD(t, "_endCopyFrom", EndCopyFrom);
|
||||
target->Set(String::NewSymbol("Connection"), t->GetFunction());
|
||||
TRACE("created class");
|
||||
}
|
||||
|
||||
//static function called by libev as callback entrypoint
|
||||
//static function called by libuv as callback entrypoint
|
||||
static void
|
||||
io_event(EV_P_ ev_io *w, int revents)
|
||||
io_event(uv_poll_t* w, int status, int revents)
|
||||
{
|
||||
|
||||
TRACE("Received IO event");
|
||||
|
||||
if(status == -1) {
|
||||
TRACE("Connection error. -1 status from lib_uv_poll");
|
||||
}
|
||||
|
||||
Connection *connection = static_cast<Connection*>(w->data);
|
||||
connection->HandleIOEvent(revents);
|
||||
}
|
||||
@ -121,12 +138,67 @@ public:
|
||||
return Undefined();
|
||||
}
|
||||
|
||||
#ifdef ESCAPE_SUPPORTED
|
||||
//v8 entry point into Connection#escapeIdentifier
|
||||
static Handle<Value>
|
||||
EscapeIdentifier(const Arguments& args)
|
||||
{
|
||||
HandleScope scope;
|
||||
Connection *self = ObjectWrap::Unwrap<Connection>(args.This());
|
||||
|
||||
char* inputStr = MallocCString(args[0]);
|
||||
|
||||
if(!inputStr) {
|
||||
THROW("Unable to allocate memory for a string in EscapeIdentifier.")
|
||||
}
|
||||
|
||||
char* escapedStr = self->EscapeIdentifier(inputStr);
|
||||
free(inputStr);
|
||||
|
||||
if(escapedStr == NULL) {
|
||||
THROW(self->GetLastError());
|
||||
}
|
||||
|
||||
Local<Value> jsStr = String::New(escapedStr, strlen(escapedStr));
|
||||
PQfreemem(escapedStr);
|
||||
|
||||
return scope.Close(jsStr);
|
||||
}
|
||||
|
||||
//v8 entry point into Connection#escapeLiteral
|
||||
static Handle<Value>
|
||||
EscapeLiteral(const Arguments& args)
|
||||
{
|
||||
HandleScope scope;
|
||||
Connection *self = ObjectWrap::Unwrap<Connection>(args.This());
|
||||
|
||||
char* inputStr = MallocCString(args[0]);
|
||||
|
||||
if(!inputStr) {
|
||||
THROW("Unable to allocate memory for a string in EscapeIdentifier.")
|
||||
}
|
||||
|
||||
char* escapedStr = self->EscapeLiteral(inputStr);
|
||||
free(inputStr);
|
||||
|
||||
if(escapedStr == NULL) {
|
||||
THROW(self->GetLastError());
|
||||
}
|
||||
|
||||
Local<Value> jsStr = String::New(escapedStr, strlen(escapedStr));
|
||||
PQfreemem(escapedStr);
|
||||
|
||||
return scope.Close(jsStr);
|
||||
}
|
||||
#endif
|
||||
|
||||
//v8 entry point into Connection#_sendQuery
|
||||
static Handle<Value>
|
||||
SendQuery(const Arguments& args)
|
||||
{
|
||||
HandleScope scope;
|
||||
Connection *self = ObjectWrap::Unwrap<Connection>(args.This());
|
||||
const char *lastErrorMessage;
|
||||
if(!args[0]->IsString()) {
|
||||
THROW("First parameter must be a string query");
|
||||
}
|
||||
@ -135,7 +207,8 @@ public:
|
||||
int result = self->Send(queryText);
|
||||
free(queryText);
|
||||
if(result == 0) {
|
||||
THROW("PQsendQuery returned error code");
|
||||
lastErrorMessage = self->GetLastError();
|
||||
THROW(lastErrorMessage);
|
||||
}
|
||||
//TODO should we flush before throw?
|
||||
self->Flush();
|
||||
@ -191,8 +264,6 @@ public:
|
||||
THROW("Values must be an array");
|
||||
}
|
||||
|
||||
Handle<Value> params = args[1];
|
||||
|
||||
Local<Array> jsParams = Local<Array>::Cast(args[1]);
|
||||
int len = jsParams->Length();
|
||||
|
||||
@ -232,19 +303,28 @@ public:
|
||||
return Undefined();
|
||||
}
|
||||
|
||||
ev_io read_watcher_;
|
||||
ev_io write_watcher_;
|
||||
uv_poll_t read_watcher_;
|
||||
uv_poll_t write_watcher_;
|
||||
PGconn *connection_;
|
||||
bool connecting_;
|
||||
bool ioInitialized_;
|
||||
bool copyOutMode_;
|
||||
bool copyInMode_;
|
||||
bool reading_;
|
||||
bool writing_;
|
||||
bool ended_;
|
||||
Connection () : ObjectWrap ()
|
||||
{
|
||||
connection_ = NULL;
|
||||
connecting_ = false;
|
||||
|
||||
ioInitialized_ = false;
|
||||
copyOutMode_ = false;
|
||||
copyInMode_ = false;
|
||||
reading_ = false;
|
||||
writing_ = false;
|
||||
ended_ = false;
|
||||
TRACE("Initializing ev watchers");
|
||||
ev_init(&read_watcher_, io_event);
|
||||
read_watcher_.data = this;
|
||||
ev_init(&write_watcher_, io_event);
|
||||
write_watcher_.data = this;
|
||||
}
|
||||
|
||||
@ -252,6 +332,31 @@ public:
|
||||
{
|
||||
}
|
||||
|
||||
static Handle<Value>
|
||||
SendCopyFromChunk(const Arguments& args) {
|
||||
HandleScope scope;
|
||||
Connection *self = ObjectWrap::Unwrap<Connection>(args.This());
|
||||
//TODO handle errors in some way
|
||||
if (args.Length() < 1 && !Buffer::HasInstance(args[0])) {
|
||||
THROW("SendCopyFromChunk requires 1 Buffer argument");
|
||||
}
|
||||
self->SendCopyFromChunk(args[0]->ToObject());
|
||||
return Undefined();
|
||||
}
|
||||
static Handle<Value>
|
||||
EndCopyFrom(const Arguments& args) {
|
||||
HandleScope scope;
|
||||
Connection *self = ObjectWrap::Unwrap<Connection>(args.This());
|
||||
char * error_msg = NULL;
|
||||
if (args[0]->IsString()) {
|
||||
error_msg = MallocCString(args[0]);
|
||||
}
|
||||
//TODO handle errors in some way
|
||||
self->EndCopyFrom(error_msg);
|
||||
free(error_msg);
|
||||
return Undefined();
|
||||
}
|
||||
|
||||
protected:
|
||||
//v8 entry point to constructor
|
||||
static Handle<Value>
|
||||
@ -264,33 +369,59 @@ protected:
|
||||
return args.This();
|
||||
}
|
||||
|
||||
#ifdef ESCAPE_SUPPORTED
|
||||
char * EscapeIdentifier(const char *str)
|
||||
{
|
||||
TRACE("js::EscapeIdentifier")
|
||||
return PQescapeIdentifier(connection_, str, strlen(str));
|
||||
}
|
||||
|
||||
char * EscapeLiteral(const char *str)
|
||||
{
|
||||
TRACE("js::EscapeLiteral")
|
||||
return PQescapeLiteral(connection_, str, strlen(str));
|
||||
}
|
||||
#endif
|
||||
|
||||
int Send(const char *queryText)
|
||||
{
|
||||
return PQsendQuery(connection_, queryText);
|
||||
TRACE("js::Send")
|
||||
int rv = PQsendQuery(connection_, queryText);
|
||||
StartWrite();
|
||||
return rv;
|
||||
}
|
||||
|
||||
int SendQueryParams(const char *command, const int nParams, const char * const *paramValues)
|
||||
{
|
||||
return PQsendQueryParams(connection_, command, nParams, NULL, paramValues, NULL, NULL, 0);
|
||||
TRACE("js::SendQueryParams")
|
||||
int rv = PQsendQueryParams(connection_, command, nParams, NULL, paramValues, NULL, NULL, 0);
|
||||
StartWrite();
|
||||
return rv;
|
||||
}
|
||||
|
||||
int SendPrepare(const char *name, const char *command, const int nParams)
|
||||
{
|
||||
return PQsendPrepare(connection_, name, command, nParams, NULL);
|
||||
TRACE("js::SendPrepare")
|
||||
int rv = PQsendPrepare(connection_, name, command, nParams, NULL);
|
||||
StartWrite();
|
||||
return rv;
|
||||
}
|
||||
|
||||
int SendPreparedQuery(const char *name, int nParams, const char * const *paramValues)
|
||||
{
|
||||
return PQsendQueryPrepared(connection_, name, nParams, paramValues, NULL, NULL, 0);
|
||||
int rv = PQsendQueryPrepared(connection_, name, nParams, paramValues, NULL, NULL, 0);
|
||||
StartWrite();
|
||||
return rv;
|
||||
}
|
||||
|
||||
int Cancel()
|
||||
bool Cancel()
|
||||
{
|
||||
PGcancel* pgCancel = PQgetCancel(connection_);
|
||||
char errbuf[256];
|
||||
int result = PQcancel(pgCancel, errbuf, 256);
|
||||
PQfreeCancel(pgCancel);
|
||||
return result;
|
||||
PGcancel* pgCancel = PQgetCancel(connection_);
|
||||
char errbuf[256];
|
||||
int result = PQcancel(pgCancel, errbuf, 256);
|
||||
StartWrite();
|
||||
PQfreeCancel(pgCancel);
|
||||
return result;
|
||||
}
|
||||
|
||||
//flushes socket
|
||||
@ -298,7 +429,7 @@ protected:
|
||||
{
|
||||
if(PQflush(connection_) == 1) {
|
||||
TRACE("Flushing");
|
||||
ev_io_start(EV_DEFAULT_ &write_watcher_);
|
||||
uv_poll_start(&write_watcher_, UV_WRITABLE, io_event);
|
||||
}
|
||||
}
|
||||
|
||||
@ -315,21 +446,21 @@ protected:
|
||||
//and hands off control to libev
|
||||
bool Connect(const char* conninfo)
|
||||
{
|
||||
if(ended_) return true;
|
||||
connection_ = PQconnectStart(conninfo);
|
||||
|
||||
if (!connection_) {
|
||||
LOG("Connection couldn't be created");
|
||||
}
|
||||
|
||||
if (PQsetnonblocking(connection_, 1) == -1) {
|
||||
LOG("Unable to set connection to non-blocking");
|
||||
return false;
|
||||
}
|
||||
|
||||
ConnStatusType status = PQstatus(connection_);
|
||||
|
||||
if(CONNECTION_BAD == status) {
|
||||
LOG("Bad connection status");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (PQsetnonblocking(connection_, 1) == -1) {
|
||||
LOG("Unable to set connection to non-blocking");
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -344,8 +475,10 @@ protected:
|
||||
PQsetNoticeProcessor(connection_, NoticeReceiver, this);
|
||||
|
||||
TRACE("Setting watchers to socket");
|
||||
ev_io_set(&read_watcher_, fd, EV_READ);
|
||||
ev_io_set(&write_watcher_, fd, EV_WRITE);
|
||||
uv_poll_init(uv_default_loop(), &read_watcher_, fd);
|
||||
uv_poll_init(uv_default_loop(), &write_watcher_, fd);
|
||||
|
||||
ioInitialized_ = true;
|
||||
|
||||
connecting_ = true;
|
||||
StartWrite();
|
||||
@ -367,13 +500,9 @@ protected:
|
||||
Emit("notice", ¬ice);
|
||||
}
|
||||
|
||||
//called to process io_events from libev
|
||||
//called to process io_events from libuv
|
||||
void HandleIOEvent(int revents)
|
||||
{
|
||||
if(revents & EV_ERROR) {
|
||||
LOG("Connection error.");
|
||||
return;
|
||||
}
|
||||
|
||||
if(connecting_) {
|
||||
TRACE("Processing connecting_ io");
|
||||
@ -381,24 +510,41 @@ protected:
|
||||
return;
|
||||
}
|
||||
|
||||
if(revents & EV_READ) {
|
||||
TRACE("revents & EV_READ");
|
||||
if(revents & UV_READABLE) {
|
||||
TRACE("revents & UV_READABLE");
|
||||
TRACE("about to consume input");
|
||||
if(PQconsumeInput(connection_) == 0) {
|
||||
LOG("Something happened, consume input is 0");
|
||||
TRACE("could not read, terminating");
|
||||
End();
|
||||
EmitLastError();
|
||||
//LOG("Something happened, consume input is 0");
|
||||
return;
|
||||
}
|
||||
TRACE("Consumed");
|
||||
|
||||
//declare handlescope as this method is entered via a libev callback
|
||||
//declare handlescope as this method is entered via a libuv callback
|
||||
//and not part of the public v8 interface
|
||||
HandleScope scope;
|
||||
|
||||
if (PQisBusy(connection_) == 0) {
|
||||
if (this->copyOutMode_) {
|
||||
this->HandleCopyOut();
|
||||
}
|
||||
if (!this->copyInMode_ && !this->copyOutMode_ && PQisBusy(connection_) == 0) {
|
||||
PGresult *result;
|
||||
bool didHandleResult = false;
|
||||
TRACE("PQgetResult");
|
||||
while ((result = PQgetResult(connection_))) {
|
||||
HandleResult(result);
|
||||
didHandleResult = true;
|
||||
TRACE("HandleResult");
|
||||
didHandleResult = HandleResult(result);
|
||||
TRACE("PQClear");
|
||||
PQclear(result);
|
||||
if(!didHandleResult) {
|
||||
//this means that we are in copy in or copy out mode
|
||||
//in this situation PQgetResult will return same
|
||||
//result untill all data will be read (copy out) or
|
||||
//until data end notification (copy in)
|
||||
//and because of this, we need to break cycle
|
||||
break;
|
||||
}
|
||||
}
|
||||
//might have fired from notification
|
||||
if(didHandleResult) {
|
||||
@ -407,6 +553,7 @@ protected:
|
||||
}
|
||||
|
||||
PGnotify *notify;
|
||||
TRACE("PQnotifies");
|
||||
while ((notify = PQnotifies(connection_))) {
|
||||
Local<Object> result = Object::New();
|
||||
result->Set(channel_symbol, String::New(notify->relname));
|
||||
@ -418,32 +565,122 @@ protected:
|
||||
|
||||
}
|
||||
|
||||
if(revents & EV_WRITE) {
|
||||
TRACE("revents & EV_WRITE");
|
||||
if(revents & UV_WRITABLE) {
|
||||
TRACE("revents & UV_WRITABLE");
|
||||
if (PQflush(connection_) == 0) {
|
||||
StopWrite();
|
||||
//nothing left to write, poll the socket for more to read
|
||||
StartRead();
|
||||
}
|
||||
}
|
||||
}
|
||||
bool HandleCopyOut () {
|
||||
char * buffer = NULL;
|
||||
int copied;
|
||||
Buffer * chunk;
|
||||
copied = PQgetCopyData(connection_, &buffer, 1);
|
||||
while (copied > 0) {
|
||||
chunk = Buffer::New(buffer, copied);
|
||||
Local<Value> node_chunk = Local<Value>::New(chunk->handle_);
|
||||
Emit("copyData", &node_chunk);
|
||||
PQfreemem(buffer);
|
||||
copied = PQgetCopyData(connection_, &buffer, 1);
|
||||
}
|
||||
if (copied == 0) {
|
||||
//wait for next read ready
|
||||
//result was not handled completely
|
||||
return false;
|
||||
} else if (copied == -1) {
|
||||
this->copyOutMode_ = false;
|
||||
return true;
|
||||
} else if (copied == -2) {
|
||||
this->copyOutMode_ = false;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void HandleResult(const PGresult* result)
|
||||
//maps the postgres tuple results to v8 objects
|
||||
//and emits row events
|
||||
//TODO look at emitting fewer events because the back & forth between
|
||||
//javascript & c++ might introduce overhead (requires benchmarking)
|
||||
void EmitRowDescription(const PGresult* result)
|
||||
{
|
||||
HandleScope scope;
|
||||
Local<Array> row = Array::New();
|
||||
int fieldCount = PQnfields(result);
|
||||
for(int fieldNumber = 0; fieldNumber < fieldCount; fieldNumber++) {
|
||||
Local<Object> field = Object::New();
|
||||
//name of field
|
||||
char* fieldName = PQfname(result, fieldNumber);
|
||||
field->Set(name_symbol, String::New(fieldName));
|
||||
|
||||
//oid of type of field
|
||||
int fieldType = PQftype(result, fieldNumber);
|
||||
field->Set(type_symbol, Integer::New(fieldType));
|
||||
|
||||
row->Set(Integer::New(fieldNumber), field);
|
||||
}
|
||||
|
||||
Handle<Value> e = (Handle<Value>)row;
|
||||
Emit("_rowDescription", &e);
|
||||
}
|
||||
|
||||
bool HandleResult(PGresult* result)
|
||||
{
|
||||
TRACE("PQresultStatus");
|
||||
ExecStatusType status = PQresultStatus(result);
|
||||
switch(status) {
|
||||
case PGRES_TUPLES_OK:
|
||||
HandleTuplesResult(result);
|
||||
{
|
||||
EmitRowDescription(result);
|
||||
HandleTuplesResult(result);
|
||||
EmitCommandMetaData(result);
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case PGRES_FATAL_ERROR:
|
||||
HandleErrorResult(result);
|
||||
{
|
||||
TRACE("HandleErrorResult");
|
||||
HandleErrorResult(result);
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case PGRES_COMMAND_OK:
|
||||
case PGRES_EMPTY_QUERY:
|
||||
//do nothing
|
||||
{
|
||||
EmitCommandMetaData(result);
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case PGRES_COPY_IN:
|
||||
{
|
||||
this->copyInMode_ = true;
|
||||
Emit("copyInResponse");
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case PGRES_COPY_OUT:
|
||||
{
|
||||
this->copyOutMode_ = true;
|
||||
Emit("copyOutResponse");
|
||||
return this->HandleCopyOut();
|
||||
}
|
||||
break;
|
||||
default:
|
||||
printf("Unrecogized query status: %s\n", PQresStatus(status));
|
||||
printf("YOU SHOULD NEVER SEE THIS! PLEASE OPEN AN ISSUE ON GITHUB! Unrecogized query status: %s\n", PQresStatus(status));
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void EmitCommandMetaData(PGresult* result)
|
||||
{
|
||||
HandleScope scope;
|
||||
Local<Object> info = Object::New();
|
||||
info->Set(command_symbol, String::New(PQcmdStatus(result)));
|
||||
info->Set(value_symbol, String::New(PQcmdTuples(result)));
|
||||
Handle<Value> e = (Handle<Value>)info;
|
||||
Emit("_cmdStatus", &e);
|
||||
}
|
||||
|
||||
//maps the postgres tuple results to v8 objects
|
||||
@ -452,33 +689,23 @@ protected:
|
||||
//javascript & c++ might introduce overhead (requires benchmarking)
|
||||
void HandleTuplesResult(const PGresult* result)
|
||||
{
|
||||
HandleScope scope;
|
||||
int rowCount = PQntuples(result);
|
||||
for(int rowNumber = 0; rowNumber < rowCount; rowNumber++) {
|
||||
//create result object for this row
|
||||
Local<Array> row = Array::New();
|
||||
int fieldCount = PQnfields(result);
|
||||
for(int fieldNumber = 0; fieldNumber < fieldCount; fieldNumber++) {
|
||||
Local<Object> field = Object::New();
|
||||
//name of field
|
||||
char* fieldName = PQfname(result, fieldNumber);
|
||||
field->Set(name_symbol, String::New(fieldName));
|
||||
|
||||
//oid of type of field
|
||||
int fieldType = PQftype(result, fieldNumber);
|
||||
field->Set(type_symbol, Integer::New(fieldType));
|
||||
|
||||
//value of field
|
||||
if(PQgetisnull(result, rowNumber, fieldNumber)) {
|
||||
field->Set(value_symbol, Null());
|
||||
row->Set(Integer::New(fieldNumber), Null());
|
||||
} else {
|
||||
char* fieldValue = PQgetvalue(result, rowNumber, fieldNumber);
|
||||
field->Set(value_symbol, String::New(fieldValue));
|
||||
row->Set(Integer::New(fieldNumber), String::New(fieldValue));
|
||||
}
|
||||
|
||||
row->Set(Integer::New(fieldNumber), field);
|
||||
}
|
||||
|
||||
//not sure about what to dealloc or scope#Close here
|
||||
Handle<Value> e = (Handle<Value>)row;
|
||||
Emit("_row", &e);
|
||||
}
|
||||
@ -488,8 +715,15 @@ protected:
|
||||
{
|
||||
HandleScope scope;
|
||||
//instantiate the return object as an Error with the summary Postgres message
|
||||
Local<Object> msg = Local<Object>::Cast(Exception::Error(String::New(PQresultErrorField(result, PG_DIAG_MESSAGE_PRIMARY))));
|
||||
|
||||
TRACE("ReadResultField");
|
||||
const char* errorMessage = PQresultErrorField(result, PG_DIAG_MESSAGE_PRIMARY);
|
||||
if(!errorMessage) {
|
||||
//there is no error, it has already been consumed in the last
|
||||
//read-loop callback
|
||||
return;
|
||||
}
|
||||
Local<Object> msg = Local<Object>::Cast(Exception::Error(String::New(errorMessage)));
|
||||
TRACE("AttachErrorFields");
|
||||
//add the other information returned by Postgres to the error object
|
||||
AttachErrorField(result, msg, severity_symbol, PG_DIAG_SEVERITY);
|
||||
AttachErrorField(result, msg, code_symbol, PG_DIAG_SQLSTATE);
|
||||
@ -503,6 +737,7 @@ protected:
|
||||
AttachErrorField(result, msg, line_symbol, PG_DIAG_SOURCE_LINE);
|
||||
AttachErrorField(result, msg, routine_symbol, PG_DIAG_SOURCE_FUNCTION);
|
||||
Handle<Value> m = msg;
|
||||
TRACE("EmitError");
|
||||
Emit("_error", &m);
|
||||
}
|
||||
|
||||
@ -516,9 +751,12 @@ protected:
|
||||
|
||||
void End()
|
||||
{
|
||||
TRACE("stopping read & write");
|
||||
StopRead();
|
||||
StopWrite();
|
||||
DestroyConnection();
|
||||
Emit("_end");
|
||||
ended_ = true;
|
||||
}
|
||||
|
||||
private:
|
||||
@ -553,30 +791,28 @@ private:
|
||||
{
|
||||
PostgresPollingStatusType status = PQconnectPoll(connection_);
|
||||
switch(status) {
|
||||
case PGRES_POLLING_READING:
|
||||
TRACE("Polled: PGRES_POLLING_READING");
|
||||
StopWrite();
|
||||
StartRead();
|
||||
break;
|
||||
case PGRES_POLLING_WRITING:
|
||||
TRACE("Polled: PGRES_POLLING_WRITING");
|
||||
StopRead();
|
||||
StartWrite();
|
||||
break;
|
||||
case PGRES_POLLING_FAILED:
|
||||
StopRead();
|
||||
StopWrite();
|
||||
TRACE("Polled: PGRES_POLLING_FAILED");
|
||||
EmitLastError();
|
||||
break;
|
||||
case PGRES_POLLING_OK:
|
||||
TRACE("Polled: PGRES_POLLING_OK");
|
||||
connecting_ = false;
|
||||
StartRead();
|
||||
Emit("connect");
|
||||
default:
|
||||
//printf("Unknown polling status: %d\n", status);
|
||||
break;
|
||||
case PGRES_POLLING_READING:
|
||||
TRACE("Polled: PGRES_POLLING_READING");
|
||||
StartRead();
|
||||
break;
|
||||
case PGRES_POLLING_WRITING:
|
||||
TRACE("Polled: PGRES_POLLING_WRITING");
|
||||
StartWrite();
|
||||
break;
|
||||
case PGRES_POLLING_FAILED:
|
||||
StopRead();
|
||||
StopWrite();
|
||||
TRACE("Polled: PGRES_POLLING_FAILED");
|
||||
EmitLastError();
|
||||
break;
|
||||
case PGRES_POLLING_OK:
|
||||
TRACE("Polled: PGRES_POLLING_OK");
|
||||
connecting_ = false;
|
||||
StartRead();
|
||||
Emit("connect");
|
||||
default:
|
||||
//printf("Unknown polling status: %d\n", status);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@ -591,28 +827,49 @@ private:
|
||||
EmitError(PQerrorMessage(connection_));
|
||||
}
|
||||
|
||||
const char *GetLastError()
|
||||
{
|
||||
return PQerrorMessage(connection_);
|
||||
}
|
||||
|
||||
void StopWrite()
|
||||
{
|
||||
TRACE("Stoping write watcher");
|
||||
ev_io_stop(EV_DEFAULT_ &write_watcher_);
|
||||
TRACE("write STOP");
|
||||
if(ioInitialized_ && writing_) {
|
||||
uv_poll_stop(&write_watcher_);
|
||||
writing_ = false;
|
||||
}
|
||||
}
|
||||
|
||||
void StartWrite()
|
||||
{
|
||||
TRACE("Starting write watcher");
|
||||
ev_io_start(EV_DEFAULT_ &write_watcher_);
|
||||
TRACE("write START");
|
||||
if(reading_) {
|
||||
TRACE("stop READ to start WRITE");
|
||||
StopRead();
|
||||
}
|
||||
uv_poll_start(&write_watcher_, UV_WRITABLE, io_event);
|
||||
writing_ = true;
|
||||
}
|
||||
|
||||
void StopRead()
|
||||
{
|
||||
TRACE("Stoping read watcher");
|
||||
ev_io_stop(EV_DEFAULT_ &read_watcher_);
|
||||
TRACE("read STOP");
|
||||
if(ioInitialized_ && reading_) {
|
||||
uv_poll_stop(&read_watcher_);
|
||||
reading_ = false;
|
||||
}
|
||||
}
|
||||
|
||||
void StartRead()
|
||||
{
|
||||
TRACE("Starting read watcher");
|
||||
ev_io_start(EV_DEFAULT_ &read_watcher_);
|
||||
TRACE("read START");
|
||||
if(writing_) {
|
||||
TRACE("stop WRITE to start READ");
|
||||
StopWrite();
|
||||
}
|
||||
uv_poll_start(&read_watcher_, UV_READABLE, io_event);
|
||||
reading_ = true;
|
||||
}
|
||||
//Converts a v8 array to an array of cstrings
|
||||
//the result char** array must be free() when it is no longer needed
|
||||
@ -664,6 +921,14 @@ private:
|
||||
strcpy(cString, *utf8String);
|
||||
return cString;
|
||||
}
|
||||
void SendCopyFromChunk(Handle<Object> chunk) {
|
||||
PQputCopyData(connection_, Buffer::Data(chunk), Buffer::Length(chunk));
|
||||
}
|
||||
void EndCopyFrom(char * error_msg) {
|
||||
PQputCopyEnd(connection_, error_msg);
|
||||
this->copyInMode_ = false;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
@ -672,3 +937,4 @@ extern "C" void init (Handle<Object> target)
|
||||
HandleScope scope;
|
||||
Connection::Init(target);
|
||||
}
|
||||
NODE_MODULE(binding, init)
|
||||
|
||||
55
test/cli.js
55
test/cli.js
@ -1,56 +1,17 @@
|
||||
var config = {
|
||||
port: 5432,
|
||||
host: 'localhost',
|
||||
user: 'postgres',
|
||||
database: 'postgres',
|
||||
password: '',
|
||||
test: 'unit'
|
||||
};
|
||||
var ConnectionParameters = require(__dirname + '/../lib/connection-parameters');
|
||||
var config = new ConnectionParameters(process.argv[2]);
|
||||
|
||||
var args = process.argv;
|
||||
for(var i = 0; i < args.length; i++) {
|
||||
switch(args[i].toLowerCase()) {
|
||||
case '-u':
|
||||
case '--user':
|
||||
config.user = args[++i];
|
||||
for(var i = 0; i < process.argv.length; i++) {
|
||||
switch(process.argv[i].toLowerCase()) {
|
||||
case 'native':
|
||||
config.native = true;
|
||||
break;
|
||||
case '--password':
|
||||
config.password = args[++i];
|
||||
case 'binary':
|
||||
config.binary = true;
|
||||
break;
|
||||
case '--verbose':
|
||||
config.verbose = (args[++i] == "true");
|
||||
break;
|
||||
case '-d':
|
||||
case '--database':
|
||||
config.database = args[++i];
|
||||
break;
|
||||
case '-p':
|
||||
case '--port':
|
||||
config.port = args[++i];
|
||||
break;
|
||||
case '-h':
|
||||
case '--host':
|
||||
config.host = args[++i];
|
||||
break;
|
||||
case '--down':
|
||||
config.down = true;
|
||||
break;
|
||||
case '-t':
|
||||
case '--test':
|
||||
config.test = args[++i];
|
||||
case '--native':
|
||||
config.native = (args[++i] == "true");
|
||||
case '--binary':
|
||||
config.binary = (args[++i] == "true");
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
var log = function(keys) {
|
||||
keys.forEach(function(key) {
|
||||
console.log(key + ": '" + config[key] + "'");
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = config;
|
||||
|
||||
@ -1,9 +1,5 @@
|
||||
var helper = require(__dirname + '/../test-helper');
|
||||
var pg = require(__dirname + '/../../../lib');
|
||||
|
||||
if(helper.args.native) {
|
||||
pg = require(__dirname + '/../../../lib').native;
|
||||
}
|
||||
var pg = helper.pg;
|
||||
|
||||
var log = function() {
|
||||
//console.log.apply(console, arguments);
|
||||
@ -16,7 +12,13 @@ var sink = new helper.Sink(5, 10000, function() {
|
||||
|
||||
test('api', function() {
|
||||
log("connecting to %j", helper.config)
|
||||
pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
//test weird callback behavior with node-pool
|
||||
pg.connect(helper.config, function(err) {
|
||||
assert.isNull(err);
|
||||
arguments[1].emit('drain');
|
||||
arguments[2]();
|
||||
});
|
||||
pg.connect(helper.config, assert.calls(function(err, client, done) {
|
||||
assert.equal(err, null, "Failed to connect: " + helper.sys.inspect(err));
|
||||
|
||||
client.query('CREATE TEMP TABLE band(name varchar(100))');
|
||||
@ -51,14 +53,14 @@ test('api', function() {
|
||||
assert.equal(result.rows.pop().name, 'the flaming lips');
|
||||
assert.equal(result.rows.pop().name, 'the beach boys');
|
||||
sink.add();
|
||||
done();
|
||||
}))
|
||||
}))
|
||||
|
||||
}))
|
||||
})
|
||||
|
||||
test('executing nested queries', function() {
|
||||
pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
pg.connect(helper.config, assert.calls(function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
log("connected for nested queriese")
|
||||
client.query('select now as now from NOW()', assert.calls(function(err, result) {
|
||||
@ -68,6 +70,7 @@ test('executing nested queries', function() {
|
||||
log('all nested queries recieved')
|
||||
assert.ok('all queries hit')
|
||||
sink.add();
|
||||
done();
|
||||
}))
|
||||
}))
|
||||
}))
|
||||
@ -77,27 +80,29 @@ test('executing nested queries', function() {
|
||||
test('raises error if cannot connect', function() {
|
||||
var connectionString = "pg://sfalsdkf:asdf@localhost/ieieie";
|
||||
log("trying to connect to invalid place for error")
|
||||
pg.connect(connectionString, assert.calls(function(err, client) {
|
||||
pg.connect(connectionString, assert.calls(function(err, client, done) {
|
||||
assert.ok(err, 'should have raised an error')
|
||||
log("invalid connection supplied error to callback")
|
||||
sink.add();
|
||||
done();
|
||||
}))
|
||||
})
|
||||
|
||||
test("query errors are handled and do not bubble if callback is provded", function() {
|
||||
pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
pg.connect(helper.config, assert.calls(function(err, client, done) {
|
||||
assert.isNull(err)
|
||||
log("checking for query error")
|
||||
client.query("SELECT OISDJF FROM LEIWLISEJLSE", assert.calls(function(err, result) {
|
||||
assert.ok(err);
|
||||
log("query error supplied error to callback")
|
||||
sink.add();
|
||||
done();
|
||||
}))
|
||||
}))
|
||||
})
|
||||
|
||||
test('callback is fired once and only once', function() {
|
||||
pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
pg.connect(helper.config, assert.calls(function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
client.query("CREATE TEMP TABLE boom(name varchar(10))");
|
||||
var callCount = 0;
|
||||
@ -108,12 +113,13 @@ test('callback is fired once and only once', function() {
|
||||
].join(";"), function(err, callback) {
|
||||
assert.equal(callCount++, 0, "Call count should be 0. More means this callback fired more than once.");
|
||||
sink.add();
|
||||
done();
|
||||
})
|
||||
}))
|
||||
})
|
||||
|
||||
test('can provide callback and config object', function() {
|
||||
pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
pg.connect(helper.config, assert.calls(function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
client.query({
|
||||
name: 'boom',
|
||||
@ -121,12 +127,13 @@ test('can provide callback and config object', function() {
|
||||
}, assert.calls(function(err, result) {
|
||||
assert.isNull(err);
|
||||
assert.equal(result.rows[0].now.getYear(), new Date().getYear())
|
||||
done();
|
||||
}))
|
||||
}))
|
||||
})
|
||||
|
||||
test('can provide callback and config and parameters', function() {
|
||||
pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
pg.connect(helper.config, assert.calls(function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
var config = {
|
||||
text: 'select $1::text as val'
|
||||
@ -135,12 +142,13 @@ test('can provide callback and config and parameters', function() {
|
||||
assert.isNull(err);
|
||||
assert.equal(result.rows.length, 1);
|
||||
assert.equal(result.rows[0].val, 'hi');
|
||||
done();
|
||||
}))
|
||||
}))
|
||||
})
|
||||
|
||||
test('null and undefined are both inserted as NULL', function() {
|
||||
pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
pg.connect(helper.config, assert.calls(function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
client.query("CREATE TEMP TABLE my_nulls(a varchar(1), b varchar(1), c integer, d integer, e date, f date)");
|
||||
client.query("INSERT INTO my_nulls(a,b,c,d,e,f) VALUES ($1,$2,$3,$4,$5,$6)", [ null, undefined, null, undefined, null, undefined ]);
|
||||
@ -153,6 +161,7 @@ test('null and undefined are both inserted as NULL', function() {
|
||||
assert.isNull(result.rows[0].d);
|
||||
assert.isNull(result.rows[0].e);
|
||||
assert.isNull(result.rows[0].f);
|
||||
done();
|
||||
}))
|
||||
}))
|
||||
})
|
||||
|
||||
@ -2,7 +2,7 @@ var helper = require(__dirname + "/test-helper");
|
||||
var pg = helper.pg;
|
||||
|
||||
test('parsing array results', function() {
|
||||
pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
pg.connect(helper.config, assert.calls(function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
client.query("CREATE TEMP TABLE why(names text[], numbors integer[])");
|
||||
client.query('INSERT INTO why(names, numbors) VALUES(\'{"aaron", "brian","a b c" }\', \'{1, 2, 3}\')').on('error', console.log);
|
||||
@ -23,9 +23,113 @@ test('parsing array results', function() {
|
||||
assert.equal(names[0], 'aaron');
|
||||
assert.equal(names[1], 'brian');
|
||||
assert.equal(names[2], "a b c");
|
||||
}))
|
||||
})
|
||||
|
||||
test('empty array', function(){
|
||||
client.query("SELECT '{}'::text[] as names", assert.success(function(result) {
|
||||
var names = result.rows[0].names;
|
||||
assert.lengthIs(names, 0);
|
||||
}))
|
||||
})
|
||||
|
||||
test('element containing comma', function(){
|
||||
client.query("SELECT '{\"joe,bob\",jim}'::text[] as names", assert.success(function(result) {
|
||||
var names = result.rows[0].names;
|
||||
assert.lengthIs(names, 2);
|
||||
assert.equal(names[0], 'joe,bob');
|
||||
assert.equal(names[1], 'jim');
|
||||
}))
|
||||
})
|
||||
|
||||
test('bracket in quotes', function(){
|
||||
client.query("SELECT '{\"{\",\"}\"}'::text[] as names", assert.success(function(result) {
|
||||
var names = result.rows[0].names;
|
||||
assert.lengthIs(names, 2);
|
||||
assert.equal(names[0], '{');
|
||||
assert.equal(names[1], '}');
|
||||
}))
|
||||
})
|
||||
|
||||
test('null value', function(){
|
||||
client.query("SELECT '{joe,null,bob,\"NULL\"}'::text[] as names", assert.success(function(result) {
|
||||
var names = result.rows[0].names;
|
||||
assert.lengthIs(names, 4);
|
||||
assert.equal(names[0], 'joe');
|
||||
assert.equal(names[1], null);
|
||||
assert.equal(names[2], 'bob');
|
||||
assert.equal(names[3], 'NULL');
|
||||
}))
|
||||
})
|
||||
|
||||
test('element containing quote char', function(){
|
||||
client.query("SELECT ARRAY['joe''', 'jim', 'bob\"'] AS names", assert.success(function(result) {
|
||||
var names = result.rows[0].names;
|
||||
assert.lengthIs(names, 3);
|
||||
assert.equal(names[0], 'joe\'');
|
||||
assert.equal(names[1], 'jim');
|
||||
assert.equal(names[2], 'bob"');
|
||||
}))
|
||||
})
|
||||
|
||||
test('nested array', function(){
|
||||
client.query("SELECT '{{1,joe},{2,bob}}'::text[] as names", assert.success(function(result) {
|
||||
var names = result.rows[0].names;
|
||||
assert.lengthIs(names, 2);
|
||||
|
||||
assert.lengthIs(names[0], 2);
|
||||
assert.equal(names[0][0], '1');
|
||||
assert.equal(names[0][1], 'joe');
|
||||
|
||||
assert.lengthIs(names[1], 2);
|
||||
assert.equal(names[1][0], '2');
|
||||
assert.equal(names[1][1], 'bob');
|
||||
|
||||
}))
|
||||
})
|
||||
|
||||
test('integer array', function(){
|
||||
client.query("SELECT '{1,2,3}'::integer[] as names", assert.success(function(result) {
|
||||
var names = result.rows[0].names;
|
||||
assert.lengthIs(names, 3);
|
||||
assert.equal(names[0], 1);
|
||||
assert.equal(names[1], 2);
|
||||
assert.equal(names[2], 3);
|
||||
}))
|
||||
})
|
||||
|
||||
test('integer nested array', function(){
|
||||
client.query("SELECT '{{1,100},{2,100},{3,100}}'::integer[] as names", assert.success(function(result) {
|
||||
var names = result.rows[0].names;
|
||||
assert.lengthIs(names, 3);
|
||||
assert.equal(names[0][0], 1);
|
||||
assert.equal(names[0][1], 100);
|
||||
|
||||
assert.equal(names[1][0], 2);
|
||||
assert.equal(names[1][1], 100);
|
||||
|
||||
assert.equal(names[2][0], 3);
|
||||
assert.equal(names[2][1], 100);
|
||||
}))
|
||||
})
|
||||
|
||||
test('JS array parameter', function(){
|
||||
client.query("SELECT $1::integer[] as names", [[[1,100],[2,100],[3,100]]], assert.success(function(result) {
|
||||
var names = result.rows[0].names;
|
||||
assert.lengthIs(names, 3);
|
||||
assert.equal(names[0][0], 1);
|
||||
assert.equal(names[0][1], 100);
|
||||
|
||||
assert.equal(names[1][0], 2);
|
||||
assert.equal(names[1][1], 100);
|
||||
|
||||
assert.equal(names[2][0], 3);
|
||||
assert.equal(names[2][1], 100);
|
||||
done();
|
||||
pg.end();
|
||||
}))
|
||||
})
|
||||
|
||||
}))
|
||||
})
|
||||
|
||||
|
||||
@ -5,42 +5,42 @@ test("cancellation of a query", function() {
|
||||
|
||||
var client = helper.client();
|
||||
|
||||
var qry = client.query("select name from person order by name");
|
||||
var qry = "select name from person order by name";
|
||||
|
||||
client.on('drain', client.end.bind(client));
|
||||
|
||||
var rows1 = 0, rows2 = 0, rows3 = 0, rows4 = 0;
|
||||
var rows1 = 0, rows2 = 0, rows3 = 0, rows4 = 0;
|
||||
|
||||
var query1 = client.query(qry);
|
||||
query1.on('row', function(row) {
|
||||
rows1++;
|
||||
});
|
||||
var query2 = client.query(qry);
|
||||
query2.on('row', function(row) {
|
||||
rows2++;
|
||||
});
|
||||
var query3 = client.query(qry);
|
||||
query3.on('row', function(row) {
|
||||
rows3++;
|
||||
});
|
||||
var query4 = client.query(qry);
|
||||
query4.on('row', function(row) {
|
||||
rows4++;
|
||||
});
|
||||
var query1 = client.query(qry);
|
||||
query1.on('row', function(row) {
|
||||
rows1++;
|
||||
});
|
||||
var query2 = client.query(qry);
|
||||
query2.on('row', function(row) {
|
||||
rows2++;
|
||||
});
|
||||
var query3 = client.query(qry);
|
||||
query3.on('row', function(row) {
|
||||
rows3++;
|
||||
});
|
||||
var query4 = client.query(qry);
|
||||
query4.on('row', function(row) {
|
||||
rows4++;
|
||||
});
|
||||
|
||||
helper.pg.cancel(helper.config, client, query1);
|
||||
helper.pg.cancel(helper.config, client, query2);
|
||||
helper.pg.cancel(helper.config, client, query4);
|
||||
helper.pg.cancel(helper.config, client, query1);
|
||||
helper.pg.cancel(helper.config, client, query2);
|
||||
helper.pg.cancel(helper.config, client, query4);
|
||||
|
||||
setTimeout(function() {
|
||||
assert.equal(rows1, 0);
|
||||
assert.equal(rows2, 0);
|
||||
assert.equal(rows4, 0);
|
||||
}, 2000);
|
||||
setTimeout(function() {
|
||||
assert.equal(rows1, 0);
|
||||
assert.equal(rows2, 0);
|
||||
assert.equal(rows4, 0);
|
||||
}, 2000);
|
||||
|
||||
assert.emits(query3, 'end', function() {
|
||||
test("returned right number of rows", function() {
|
||||
assert.equal(rows3, 26);
|
||||
});
|
||||
});
|
||||
test("returned right number of rows", function() {
|
||||
assert.equal(rows3, 26);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,6 +1,13 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
var pg = helper.pg;
|
||||
|
||||
//clear process.env
|
||||
var realEnv = {};
|
||||
for(var key in process.env) {
|
||||
realEnv[key] = process.env[key];
|
||||
if(!key.indexOf('PG')) delete process.env[key];
|
||||
}
|
||||
|
||||
test('default values', function() {
|
||||
assert.same(pg.defaults,{
|
||||
user: process.env.USER,
|
||||
@ -44,3 +51,8 @@ if(!helper.args.native) {
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
//restore process.env
|
||||
for(var key in realEnv) {
|
||||
process.env[key] = realEnv[key];
|
||||
}
|
||||
|
||||
167
test/integration/client/copy-tests.js
Normal file
167
test/integration/client/copy-tests.js
Normal file
@ -0,0 +1,167 @@
|
||||
var helper = require(__dirname + '/../test-helper');
|
||||
var pg = require(__dirname + '/../../../lib');
|
||||
if(helper.args.native) {
|
||||
pg = require(__dirname + '/../../../lib').native;
|
||||
}
|
||||
var ROWS_TO_INSERT = 1000;
|
||||
var prepareTable = function (client, callback) {
|
||||
client.query(
|
||||
'CREATE TEMP TABLE copy_test (id SERIAL, name CHARACTER VARYING(10), age INT)',
|
||||
assert.calls(function (err, result) {
|
||||
assert.equal(err, null, "create table query should not fail");
|
||||
callback();
|
||||
})
|
||||
);
|
||||
};
|
||||
test('COPY FROM', function () {
|
||||
pg.connect(helper.config, function (error, client, done) {
|
||||
assert.equal(error, null, "Failed to connect: " + helper.sys.inspect(error));
|
||||
prepareTable(client, function () {
|
||||
var stream = client.copyFrom("COPY copy_test (name, age) FROM stdin WITH CSV");
|
||||
stream.on('error', function (error) {
|
||||
assert.ok(false, "COPY FROM stream should not emit errors" + helper.sys.inspect(error));
|
||||
});
|
||||
for (var i = 0; i < ROWS_TO_INSERT; i++) {
|
||||
stream.write( String(Date.now() + Math.random()).slice(0,10) + ',' + i + '\n');
|
||||
}
|
||||
assert.emits(stream, 'close', function () {
|
||||
client.query("SELECT count(*), sum(age) from copy_test", function (err, result) {
|
||||
assert.equal(err, null, "Query should not fail");
|
||||
assert.lengthIs(result.rows, 1)
|
||||
assert.equal(result.rows[0].sum, ROWS_TO_INSERT * (0 + ROWS_TO_INSERT -1)/2);
|
||||
assert.equal(result.rows[0].count, ROWS_TO_INSERT);
|
||||
done();
|
||||
});
|
||||
}, "COPY FROM stream should emit close after query end");
|
||||
stream.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
test('COPY TO', function () {
|
||||
pg.connect(helper.config, function (error, client, done) {
|
||||
assert.equal(error, null, "Failed to connect: " + helper.sys.inspect(error));
|
||||
prepareTable(client, function () {
|
||||
var stream = client.copyTo("COPY person (id, name, age) TO stdin WITH CSV");
|
||||
var buf = new Buffer(0);
|
||||
stream.on('error', function (error) {
|
||||
assert.ok(false, "COPY TO stream should not emit errors" + helper.sys.inspect(error));
|
||||
});
|
||||
assert.emits(stream, 'data', function (chunk) {
|
||||
buf = Buffer.concat([buf, chunk]);
|
||||
}, "COPY IN stream should emit data event for each row");
|
||||
assert.emits(stream, 'end', function () {
|
||||
var lines = buf.toString().split('\n');
|
||||
assert.equal(lines.length >= 0, true, "copy in should return rows saved by copy from");
|
||||
assert.equal(lines[0].split(',').length, 3, "each line should consists of 3 fields");
|
||||
done();
|
||||
}, "COPY IN stream should emit end event after all rows");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('COPY TO, queue queries', function () {
|
||||
if(helper.config.native) return false;
|
||||
pg.connect(helper.config, assert.calls(function (error, client, done) {
|
||||
assert.equal(error, null, "Failed to connect: " + helper.sys.inspect(error));
|
||||
prepareTable(client, function () {
|
||||
var query1Done = false,
|
||||
copyQueryDone = false,
|
||||
query2Done = false;
|
||||
client.query("SELECT count(*) from person", function () {
|
||||
query1Done = true;
|
||||
assert.ok(!copyQueryDone && ! query2Done, "first query has to be executed before others");
|
||||
});
|
||||
var stream = client.copyTo("COPY person (id, name, age) TO stdin WITH CSV");
|
||||
//imitate long query, to make impossible,
|
||||
//that copy query end callback runs after
|
||||
//second query callback
|
||||
client.query("SELECT pg_sleep(1)", function () {
|
||||
query2Done = true;
|
||||
assert.ok(copyQueryDone && query2Done, "second query has to be executed after others");
|
||||
});
|
||||
var buf = new Buffer(0);
|
||||
stream.on('error', function (error) {
|
||||
assert.ok(false, "COPY TO stream should not emit errors" + helper.sys.inspect(error));
|
||||
});
|
||||
assert.emits(stream, 'data', function (chunk) {
|
||||
buf = Buffer.concat([buf, chunk]);
|
||||
}, "COPY IN stream should emit data event for each row");
|
||||
assert.emits(stream, 'end', function () {
|
||||
copyQueryDone = true;
|
||||
assert.ok(query1Done && ! query2Done, "copy query has to be executed before second query and after first");
|
||||
var lines = buf.toString().split('\n');
|
||||
assert.equal(lines.length >= 0, true, "copy in should return rows saved by copy from");
|
||||
assert.equal(lines[0].split(',').length, 3, "each line should consists of 3 fields");
|
||||
done();
|
||||
}, "COPY IN stream should emit end event after all rows");
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
test("COPY TO incorrect usage with large data", function () {
|
||||
if(helper.config.native) return false;
|
||||
//when many data is loaded from database (and it takes a lot of time)
|
||||
//there are chance, that query will be canceled before it ends
|
||||
//but if there are not so much data, cancel message may be
|
||||
//send after copy query ends
|
||||
//so we need to test both situations
|
||||
pg.connect(helper.config, assert.calls(function (error, client, done) {
|
||||
assert.equal(error, null, "Failed to connect: " + helper.sys.inspect(error));
|
||||
//intentionally incorrect usage of copy.
|
||||
//this has to report error in standart way, instead of just throwing exception
|
||||
client.query(
|
||||
"COPY (SELECT GENERATE_SERIES(1, 10000000)) TO STDOUT WITH CSV",
|
||||
assert.calls(function (error) {
|
||||
assert.ok(error, "error should be reported when sending copy to query with query method");
|
||||
client.query("SELECT 1", assert.calls(function (error, result) {
|
||||
assert.isNull(error, "incorrect copy usage should not break connection");
|
||||
assert.ok(result, "incorrect copy usage should not break connection");
|
||||
done();
|
||||
}));
|
||||
})
|
||||
);
|
||||
}));
|
||||
});
|
||||
|
||||
test("COPY TO incorrect usage with small data", function () {
|
||||
if(helper.config.native) return false;
|
||||
pg.connect(helper.config, assert.calls(function (error, client, done) {
|
||||
assert.equal(error, null, "Failed to connect: " + helper.sys.inspect(error));
|
||||
//intentionally incorrect usage of copy.
|
||||
//this has to report error in standart way, instead of just throwing exception
|
||||
client.query(
|
||||
"COPY (SELECT GENERATE_SERIES(1, 1)) TO STDOUT WITH CSV",
|
||||
assert.calls(function (error) {
|
||||
assert.ok(error, "error should be reported when sending copy to query with query method");
|
||||
client.query("SELECT 1", assert.calls(function (error, result) {
|
||||
assert.isNull(error, "incorrect copy usage should not break connection: " + error);
|
||||
assert.ok(result, "incorrect copy usage should not break connection");
|
||||
done();
|
||||
}));
|
||||
})
|
||||
);
|
||||
}));
|
||||
});
|
||||
|
||||
test("COPY FROM incorrect usage", function () {
|
||||
pg.connect(helper.config, function (error, client, done) {
|
||||
assert.equal(error, null, "Failed to connect: " + helper.sys.inspect(error));
|
||||
prepareTable(client, function () {
|
||||
//intentionally incorrect usage of copy.
|
||||
//this has to report error in standart way, instead of just throwing exception
|
||||
client.query(
|
||||
"COPY copy_test from STDIN WITH CSV",
|
||||
assert.calls(function (error) {
|
||||
assert.ok(error, "error should be reported when sending copy to query with query method");
|
||||
client.query("SELECT 1", assert.calls(function (error, result) {
|
||||
assert.isNull(error, "incorrect copy usage should not break connection: " + error);
|
||||
assert.ok(result, "incorrect copy usage should not break connection");
|
||||
done();
|
||||
pg.end(helper.config);
|
||||
}));
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,55 +0,0 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
var pg = require(__dirname + '/../../../lib');
|
||||
|
||||
if(helper.args.native) {
|
||||
pg = require(__dirname + '/../../../lib').native;
|
||||
}
|
||||
|
||||
var testDrainOfClientWithPendingQueries = function() {
|
||||
pg.connect(helper.config, assert.success(function(client) {
|
||||
test('when there are pending queries and client is resumed', function() {
|
||||
var drainCount = 0;
|
||||
client.on('drain', function() {
|
||||
drainCount++;
|
||||
});
|
||||
client.pauseDrain();
|
||||
client.query('SELECT NOW()', function() {
|
||||
client.query('SELECT NOW()', function() {
|
||||
assert.equal(drainCount, 0);
|
||||
process.nextTick(function() {
|
||||
assert.equal(drainCount, 1);
|
||||
pg.end();
|
||||
});
|
||||
});
|
||||
client.resumeDrain();
|
||||
assert.equal(drainCount, 0);
|
||||
});
|
||||
});
|
||||
}));
|
||||
};
|
||||
|
||||
pg.connect(helper.config, assert.success(function(client) {
|
||||
var drainCount = 0;
|
||||
client.on('drain', function() {
|
||||
drainCount++;
|
||||
});
|
||||
test('pauseDrain and resumeDrain on simple client', function() {
|
||||
client.pauseDrain();
|
||||
client.resumeDrain();
|
||||
process.nextTick(assert.calls(function() {
|
||||
assert.equal(drainCount, 0);
|
||||
test('drain is paused', function() {
|
||||
client.pauseDrain();
|
||||
client.query('SELECT NOW()', assert.success(function() {
|
||||
process.nextTick(function() {
|
||||
assert.equal(drainCount, 0);
|
||||
client.resumeDrain();
|
||||
assert.equal(drainCount, 1);
|
||||
testDrainOfClientWithPendingQueries();
|
||||
});
|
||||
}));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}));
|
||||
|
||||
@ -5,11 +5,11 @@ test("empty query message handling", function() {
|
||||
assert.emits(client, 'drain', function() {
|
||||
client.end();
|
||||
});
|
||||
client.query({text: "", binary: false});
|
||||
client.query({text: ""});
|
||||
});
|
||||
|
||||
test('callback supported', assert.calls(function() {
|
||||
client.query({text: "", binary: false}, function(err, result) {
|
||||
client.query("", function(err, result) {
|
||||
assert.isNull(err);
|
||||
assert.empty(result.rows);
|
||||
})
|
||||
|
||||
@ -11,7 +11,6 @@ var createErorrClient = function() {
|
||||
};
|
||||
|
||||
test('error handling', function(){
|
||||
|
||||
test('within a simple query', function() {
|
||||
|
||||
var client = createErorrClient();
|
||||
@ -115,18 +114,26 @@ test('non-error calls supplied callback', function() {
|
||||
});
|
||||
|
||||
test('when connecting to invalid host', function() {
|
||||
return false;
|
||||
//this test fails about 30% on travis and only on travis...
|
||||
//I'm not sure what the cause could be
|
||||
if(process.env.TRAVIS) return false;
|
||||
|
||||
var client = new Client({
|
||||
user: 'brian',
|
||||
user: 'aslkdjfsdf',
|
||||
password: '1234',
|
||||
host: 'asldkfjasdf!!#1308140.com'
|
||||
});
|
||||
assert.emits(client, 'error');
|
||||
var delay = 5000;
|
||||
var tid = setTimeout(function() {
|
||||
assert(false, "When connecting to an invalid host the error event should be emitted but it has been " + delay + " and still no error event.");
|
||||
}, delay);
|
||||
client.on('error', function() {
|
||||
clearTimeout(tid);
|
||||
})
|
||||
client.connect();
|
||||
});
|
||||
|
||||
test('when connecting to invalid host with callback', function() {
|
||||
return false;
|
||||
var client = new Client({
|
||||
user: 'brian',
|
||||
password: '1234',
|
||||
@ -156,9 +163,19 @@ test('multiple connection errors (gh#31)', function() {
|
||||
});
|
||||
|
||||
test('with callback method', function() {
|
||||
var badConString = "tcp://aslkdfj:oi14081@"+helper.args.host+":"+helper.args.port+"/"+helper.args.database;
|
||||
var badConString = "postgres://aslkdfj:oi14081@"+helper.args.host+":"+helper.args.port+"/"+helper.args.database;
|
||||
return false;
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
test('query receives error on client shutdown', function() {
|
||||
var client = new Client(helper.config);
|
||||
client.connect(assert.calls(function() {
|
||||
client.query('SELECT pg_sleep(5)', assert.calls(function(err, res) {
|
||||
assert(err);
|
||||
}));
|
||||
client.end();
|
||||
assert.emits(client, 'end');
|
||||
}));
|
||||
});
|
||||
|
||||
|
||||
153
test/integration/client/escape-tests.js
Normal file
153
test/integration/client/escape-tests.js
Normal file
@ -0,0 +1,153 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
|
||||
function createClient(callback) {
|
||||
var client = new Client(helper.config);
|
||||
client.connect(function(err) {
|
||||
return callback(client);
|
||||
});
|
||||
}
|
||||
|
||||
test('escapeLiteral: no special characters', function() {
|
||||
createClient(function(client) {
|
||||
var expected = "'hello world'";
|
||||
var actual = client.escapeLiteral('hello world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains double quotes only', function() {
|
||||
createClient(function(client) {
|
||||
var expected = "'hello \" world'";
|
||||
var actual = client.escapeLiteral('hello " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains single quotes only', function() {
|
||||
createClient(function(client) {
|
||||
var expected = "'hello \'\' world'";
|
||||
var actual = client.escapeLiteral('hello \' world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains backslashes only', function() {
|
||||
createClient(function(client) {
|
||||
var expected = " E'hello \\\\ world'";
|
||||
var actual = client.escapeLiteral('hello \\ world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains single quotes and double quotes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = "'hello '' \" world'";
|
||||
var actual = client.escapeLiteral('hello \' " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains double quotes and backslashes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = " E'hello \\\\ \" world'";
|
||||
var actual = client.escapeLiteral('hello \\ " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains single quotes and backslashes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = " E'hello \\\\ '' world'";
|
||||
var actual = client.escapeLiteral('hello \\ \' world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains single quotes, double quotes, and backslashes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = " E'hello \\\\ '' \" world'";
|
||||
var actual = client.escapeLiteral('hello \\ \' " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: no special characters', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello world"';
|
||||
var actual = client.escapeIdentifier('hello world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains double quotes only', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello "" world"';
|
||||
var actual = client.escapeIdentifier('hello " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains single quotes only', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello \' world"';
|
||||
var actual = client.escapeIdentifier('hello \' world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains backslashes only', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello \\ world"';
|
||||
var actual = client.escapeIdentifier('hello \\ world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains single quotes and double quotes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello \' "" world"';
|
||||
var actual = client.escapeIdentifier('hello \' " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains double quotes and backslashes', function() {
|
||||
return createClient(function(client) {
|
||||
var expected = '"hello \\ "" world"';
|
||||
var actual = client.escapeIdentifier('hello \\ " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
return;
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains single quotes and backslashes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello \\ \' world"';
|
||||
var actual = client.escapeIdentifier('hello \\ \' world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains single quotes, double quotes, and backslashes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello \\ \' "" world"';
|
||||
var actual = client.escapeIdentifier('hello \\ \' " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
38
test/integration/client/force-native-with-envvar-tests.js
Normal file
38
test/integration/client/force-native-with-envvar-tests.js
Normal file
@ -0,0 +1,38 @@
|
||||
/**
|
||||
* helper needs to be loaded for the asserts but it alos proloads
|
||||
* client which we don't want here
|
||||
*
|
||||
*/
|
||||
var helper = require(__dirname+"/test-helper")
|
||||
, path = require('path')
|
||||
;
|
||||
|
||||
var paths = {
|
||||
'pg' : path.join(__dirname, '..', '..', '..', 'lib', 'index.js') ,
|
||||
'query_js' : path.join(__dirname, '..', '..', '..', 'lib', 'query.js') ,
|
||||
'query_native' : path.join(__dirname, '..', '..', '..', 'lib', 'native', 'query.js') ,
|
||||
};
|
||||
|
||||
/**
|
||||
* delete the modules we are concerned about from the
|
||||
* module cache, so they get loaded cleanly and the env
|
||||
* var can kick in ...
|
||||
*/
|
||||
function emptyCache(){
|
||||
Object.keys(require.cache).forEach(function(key){
|
||||
delete require.cache[key];
|
||||
});
|
||||
};
|
||||
|
||||
emptyCache();
|
||||
process.env.NODE_PG_FORCE_NATIVE = '1';
|
||||
|
||||
var pg = require( paths.pg );
|
||||
var query_native = require( paths.query_native );
|
||||
var query_js = require( paths.query_js );
|
||||
|
||||
assert.deepEqual(pg.Client.Query, query_native);
|
||||
assert.notDeepEqual(pg.Client.Query, query_js);
|
||||
|
||||
emptyCache();
|
||||
delete process.env.NODE_PG_FORCE_NATIVE
|
||||
25
test/integration/client/heroku-ssl-tests.js
Normal file
25
test/integration/client/heroku-ssl-tests.js
Normal file
@ -0,0 +1,25 @@
|
||||
var helper = require(__dirname + '/../test-helper');
|
||||
var pg = helper.pg;
|
||||
|
||||
var host = 'ec2-107-20-224-218.compute-1.amazonaws.com';
|
||||
var database = 'db6kfntl5qhp2';
|
||||
var user = 'kwdzdnqpdiilfs';
|
||||
var port = 5432;
|
||||
|
||||
var config = {
|
||||
host: host,
|
||||
port: port,
|
||||
database: database,
|
||||
user: user,
|
||||
password: 'uaZoSSHgi7mVM7kYaROtusClKu',
|
||||
ssl: true
|
||||
};
|
||||
|
||||
//connect & disconnect from heroku
|
||||
pg.connect(config, assert.success(function(client, done) {
|
||||
client.query('SELECT NOW() as time', assert.success(function(res) {
|
||||
assert(res.rows[0].time.getTime());
|
||||
done();
|
||||
pg.end();
|
||||
}))
|
||||
}));
|
||||
22
test/integration/client/huge-numeric-tests.js
Normal file
22
test/integration/client/huge-numeric-tests.js
Normal file
@ -0,0 +1,22 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
|
||||
helper.pg.connect(helper.config, assert.success(function(client, done) {
|
||||
var types = require(__dirname + '/../../../lib/types');
|
||||
//1231 = numericOID
|
||||
types.setTypeParser(1700, function(){
|
||||
return 'yes';
|
||||
})
|
||||
types.setTypeParser(1700, 'binary', function(){
|
||||
return 'yes';
|
||||
})
|
||||
var bignum = '294733346389144765940638005275322203805';
|
||||
client.query('CREATE TEMP TABLE bignumz(id numeric)');
|
||||
client.query('INSERT INTO bignumz(id) VALUES ($1)', [bignum]);
|
||||
client.query('SELECT * FROM bignumz', assert.success(function(result) {
|
||||
assert.equal(result.rows[0].id, 'yes')
|
||||
helper.pg.end();
|
||||
done();
|
||||
}))
|
||||
}));
|
||||
|
||||
//custom type converter
|
||||
38
test/integration/client/json-type-parsing-tests.js
Normal file
38
test/integration/client/json-type-parsing-tests.js
Normal file
@ -0,0 +1,38 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
var assert = require('assert');
|
||||
//if you want binary support, pull request me!
|
||||
if (helper.config.binary) {
|
||||
console.log('binary mode does not support JSON right now');
|
||||
return;
|
||||
}
|
||||
|
||||
test('can read and write json', function() {
|
||||
helper.pg.connect(helper.config, function(err, client, done) {
|
||||
assert.ifError(err);
|
||||
helper.versionGTE(client, '9.2.0', assert.success(function(jsonSupported) {
|
||||
if(!jsonSupported) {
|
||||
console.log('skip json test on older versions of postgres');
|
||||
done();
|
||||
return helper.pg.end();
|
||||
}
|
||||
client.query('CREATE TEMP TABLE stuff(id SERIAL PRIMARY KEY, data JSON)');
|
||||
var value ={name: 'Brian', age: 250, alive: true, now: new Date()};
|
||||
client.query('INSERT INTO stuff (data) VALUES ($1)', [value]);
|
||||
client.query('SELECT * FROM stuff', assert.success(function(result) {
|
||||
assert.equal(result.rows.length, 1);
|
||||
assert.equal(typeof result.rows[0].data, 'object');
|
||||
var row = result.rows[0].data;
|
||||
assert.strictEqual(row.name, value.name);
|
||||
assert.strictEqual(row.age, value.age);
|
||||
assert.strictEqual(row.alive, value.alive);
|
||||
test('row should have "now" as a date', function() {
|
||||
return false;
|
||||
assert(row.now instanceof Date, 'row.now should be a date instance but is ' + typeof row.now);
|
||||
});
|
||||
assert.equal(JSON.stringify(row.now), JSON.stringify(value.now));
|
||||
done();
|
||||
helper.pg.end();
|
||||
}));
|
||||
}));
|
||||
});
|
||||
});
|
||||
23
test/integration/client/no-row-result-tests.js
Normal file
23
test/integration/client/no-row-result-tests.js
Normal file
@ -0,0 +1,23 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
var pg = helper.pg;
|
||||
var config = helper.config;
|
||||
|
||||
test('can access results when no rows are returned', function() {
|
||||
if(config.native) return false;
|
||||
var checkResult = function(result) {
|
||||
assert(result.fields, 'should have fields definition');
|
||||
assert.equal(result.fields.length, 1);
|
||||
assert.equal(result.fields[0].name, 'val');
|
||||
assert.equal(result.fields[0].dataTypeID, 25);
|
||||
pg.end();
|
||||
};
|
||||
|
||||
pg.connect(config, assert.success(function(client, done) {
|
||||
var query = client.query('select $1::text as val limit 0', ['hi'], assert.success(function(result) {
|
||||
checkResult(result);
|
||||
done();
|
||||
}));
|
||||
|
||||
assert.emits(query, 'end', checkResult);
|
||||
}));
|
||||
});
|
||||
@ -1,5 +1,7 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
test('emits notice message', function() {
|
||||
//TODO this doesn't work on all versions of postgres
|
||||
return false;
|
||||
var client = helper.client();
|
||||
client.query('create temp table boom(id serial, size integer)');
|
||||
assert.emits(client, 'notice', function(notice) {
|
||||
|
||||
18
test/integration/client/parse-int-8-tests.js
Normal file
18
test/integration/client/parse-int-8-tests.js
Normal file
@ -0,0 +1,18 @@
|
||||
|
||||
var helper = require(__dirname + '/../test-helper');
|
||||
var pg = helper.pg;
|
||||
test('ability to turn on and off parser', function() {
|
||||
if(helper.args.binary) return false;
|
||||
pg.connect(helper.config, assert.success(function(client, done) {
|
||||
pg.defaults.parseInt8 = true;
|
||||
client.query('CREATE TEMP TABLE asdf(id SERIAL PRIMARY KEY)');
|
||||
client.query('SELECT COUNT(*) as "count" FROM asdf', assert.success(function(res) {
|
||||
pg.defaults.parseInt8 = false;
|
||||
client.query('SELECT COUNT(*) as "count" FROM asdf', assert.success(function(res) {
|
||||
done();
|
||||
assert.strictEqual("0", res.rows[0].count);
|
||||
pg.end();
|
||||
}));
|
||||
}));
|
||||
}));
|
||||
});
|
||||
@ -82,8 +82,8 @@ test("named prepared statement", function() {
|
||||
|
||||
test("prepared statements on different clients", function() {
|
||||
var statementName = "differ";
|
||||
var statement1 = "select count(*) as count from person";
|
||||
var statement2 = "select count(*) as count from person where age < $1";
|
||||
var statement1 = "select count(*)::int4 as count from person";
|
||||
var statement2 = "select count(*)::int4 as count from person where age < $1";
|
||||
|
||||
var client1Finished = false;
|
||||
var client2Finished = false;
|
||||
|
||||
33
test/integration/client/query-callback-error-tests.js
Normal file
33
test/integration/client/query-callback-error-tests.js
Normal file
@ -0,0 +1,33 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
var util = require('util');
|
||||
|
||||
var withQuery = function(text, resultLength, cb) {
|
||||
test('error during query execution', function() {
|
||||
var client = new Client(helper.args);
|
||||
process.removeAllListeners('uncaughtException');
|
||||
assert.emits(process, 'uncaughtException', function() {
|
||||
assert.equal(client.activeQuery, null, 'should remove active query even if error happens in callback');
|
||||
client.query('SELECT * FROM blah', assert.success(function(result) {
|
||||
assert.equal(result.rows.length, resultLength);
|
||||
client.end();
|
||||
cb();
|
||||
}));
|
||||
});
|
||||
client.connect(assert.success(function() {
|
||||
client.query('CREATE TEMP TABLE "blah"(data text)', assert.success(function() {
|
||||
var q = client.query(text, ['yo'], assert.calls(function() {
|
||||
assert.emits(client, 'drain');
|
||||
throw new Error('WHOOOAAAHH!!');
|
||||
}));
|
||||
}));
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
//test with good query so our callback is called
|
||||
//as a successful callback
|
||||
withQuery('INSERT INTO blah(data) VALUES($1)', 1, function() {
|
||||
//test with an error query so our callback is called with an error
|
||||
withQuery('INSERT INTO asldkfjlaskfj eoooeoriiri', 0, function() {
|
||||
});
|
||||
});
|
||||
31
test/integration/client/query-error-handling-tests.js
Normal file
31
test/integration/client/query-error-handling-tests.js
Normal file
@ -0,0 +1,31 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
var util = require('util');
|
||||
|
||||
test('error during query execution', function() {
|
||||
var client = new Client(helper.args);
|
||||
client.connect(assert.success(function() {
|
||||
var sleepQuery = 'select pg_sleep(5)';
|
||||
var pidColName = 'procpid'
|
||||
var queryColName = 'current_query';
|
||||
helper.versionGTE(client, '9.2.0', assert.success(function(isGreater) {
|
||||
if(isGreater) {
|
||||
pidColName = 'pid';
|
||||
queryColName = 'query';
|
||||
}
|
||||
client.query(sleepQuery, assert.calls(function(err, result) {
|
||||
assert(err);
|
||||
client.end();
|
||||
}));
|
||||
var client2 = new Client(helper.args);
|
||||
client2.connect(assert.success(function() {
|
||||
var killIdleQuery = "SELECT " + pidColName + ", (SELECT pg_terminate_backend(" + pidColName + ")) AS killed FROM pg_stat_activity WHERE " + queryColName + " = $1";
|
||||
client2.query(killIdleQuery, [sleepQuery], assert.calls(function(err, res) {
|
||||
assert.ifError(err);
|
||||
assert.equal(res.rows.length, 1);
|
||||
client2.end();
|
||||
assert.emits(client2, 'end');
|
||||
}));
|
||||
}));
|
||||
}));
|
||||
}));
|
||||
});
|
||||
7
test/integration/client/quick-disconnect-tests.js
Normal file
7
test/integration/client/quick-disconnect-tests.js
Normal file
@ -0,0 +1,7 @@
|
||||
//test for issue #320
|
||||
//
|
||||
var helper = require('./test-helper');
|
||||
|
||||
var client = new helper.pg.Client(helper.config);
|
||||
client.connect();
|
||||
client.end();
|
||||
@ -2,26 +2,34 @@ var helper = require(__dirname + "/test-helper");
|
||||
var pg = helper.pg;
|
||||
|
||||
test('should return insert metadata', function() {
|
||||
return false;
|
||||
pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
pg.connect(helper.config, assert.calls(function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
client.query("CREATE TEMP TABLE zugzug(name varchar(10))", assert.calls(function(err, result) {
|
||||
assert.isNull(err);
|
||||
//let's list this as ignored for now
|
||||
// process.nextTick(function() {
|
||||
// test('should identify "CREATE TABLE" message', function() {
|
||||
// return false;
|
||||
// assert.equal(result.command, "CREATE TABLE");
|
||||
// assert.equal(result.rowCount, 0);
|
||||
// })
|
||||
// })
|
||||
assert.equal(result.oid, null);
|
||||
client.query("INSERT INTO zugzug(name) VALUES('more work?')", assert.calls(function(err, result) {
|
||||
assert.equal(result.command, "INSERT");
|
||||
assert.equal(result.rowCount, 1);
|
||||
process.nextTick(client.end.bind(client));
|
||||
return false;
|
||||
}))
|
||||
}))
|
||||
}))
|
||||
})
|
||||
|
||||
helper.versionGTE(client, '9.0.0', assert.success(function(hasRowCount) {
|
||||
client.query("CREATE TEMP TABLE zugzug(name varchar(10))", assert.calls(function(err, result) {
|
||||
assert.isNull(err);
|
||||
assert.equal(result.oid, null);
|
||||
assert.equal(result.command, 'CREATE');
|
||||
|
||||
var q = client.query("INSERT INTO zugzug(name) VALUES('more work?')", assert.calls(function(err, result) {
|
||||
assert.equal(result.command, "INSERT");
|
||||
assert.equal(result.rowCount, 1);
|
||||
|
||||
client.query('SELECT * FROM zugzug', assert.calls(function(err, result) {
|
||||
assert.isNull(err);
|
||||
if(hasRowCount) assert.equal(result.rowCount, 1);
|
||||
assert.equal(result.command, 'SELECT');
|
||||
process.nextTick(pg.end.bind(pg));
|
||||
}));
|
||||
}));
|
||||
|
||||
assert.emits(q, 'end', function(result) {
|
||||
assert.equal(result.command, "INSERT");
|
||||
if(hasRowCount) assert.equal(result.rowCount, 1);
|
||||
done();
|
||||
});
|
||||
|
||||
}));
|
||||
}));
|
||||
}));
|
||||
});
|
||||
|
||||
33
test/integration/client/results-as-array-tests.js
Normal file
33
test/integration/client/results-as-array-tests.js
Normal file
@ -0,0 +1,33 @@
|
||||
var util = require('util');
|
||||
var helper = require('./test-helper');
|
||||
|
||||
var Client = helper.Client;
|
||||
|
||||
var conInfo = helper.config;
|
||||
|
||||
test('returns results as array', function() {
|
||||
var client = new Client(conInfo);
|
||||
var checkRow = function(row) {
|
||||
assert(util.isArray(row), 'row should be an array');
|
||||
assert.equal(row.length, 4);
|
||||
assert.equal(row[0].getFullYear(), new Date().getFullYear());
|
||||
assert.strictEqual(row[1], 1);
|
||||
assert.strictEqual(row[2], 'hai');
|
||||
assert.strictEqual(row[3], null);
|
||||
}
|
||||
client.connect(assert.success(function() {
|
||||
var config = {
|
||||
text: 'SELECT NOW(), 1::int, $1::text, null',
|
||||
values: ['hai'],
|
||||
rowMode: 'array'
|
||||
};
|
||||
var query = client.query(config, assert.success(function(result) {
|
||||
assert.equal(result.rows.length, 1);
|
||||
checkRow(result.rows[0]);
|
||||
client.end();
|
||||
}));
|
||||
assert.emits(query, 'row', function(row) {
|
||||
checkRow(row);
|
||||
});
|
||||
}));
|
||||
});
|
||||
37
test/integration/client/row-description-on-results-tests.js
Normal file
37
test/integration/client/row-description-on-results-tests.js
Normal file
@ -0,0 +1,37 @@
|
||||
var helper = require('./test-helper');
|
||||
|
||||
var Client = helper.Client;
|
||||
|
||||
var conInfo = helper.config;
|
||||
|
||||
var checkResult = function(result) {
|
||||
assert(result.fields);
|
||||
assert.equal(result.fields.length, 3);
|
||||
var fields = result.fields;
|
||||
assert.equal(fields[0].name, 'now');
|
||||
assert.equal(fields[1].name, 'num');
|
||||
assert.equal(fields[2].name, 'texty');
|
||||
assert.equal(fields[0].dataTypeID, 1184);
|
||||
assert.equal(fields[1].dataTypeID, 23);
|
||||
assert.equal(fields[2].dataTypeID, 25);
|
||||
};
|
||||
|
||||
test('row descriptions on result object', function() {
|
||||
var client = new Client(conInfo);
|
||||
client.connect(assert.success(function() {
|
||||
client.query('SELECT NOW() as now, 1::int as num, $1::text as texty', ["hello"], assert.success(function(result) {
|
||||
checkResult(result);
|
||||
client.end();
|
||||
}));
|
||||
}));
|
||||
});
|
||||
|
||||
test('row description on no rows', function() {
|
||||
var client = new Client(conInfo);
|
||||
client.connect(assert.success(function() {
|
||||
client.query('SELECT NOW() as now, 1::int as num, $1::text as texty LIMIT 0', ["hello"], assert.success(function(result) {
|
||||
checkResult(result);
|
||||
client.end();
|
||||
}));
|
||||
}));
|
||||
});
|
||||
@ -9,15 +9,16 @@ test("simple query interface", function() {
|
||||
client.on('drain', client.end.bind(client));
|
||||
|
||||
var rows = [];
|
||||
query.on('row', function(row) {
|
||||
rows.push(row['name'])
|
||||
query.on('row', function(row, result) {
|
||||
assert.ok(result);
|
||||
rows.push(row['name']);
|
||||
});
|
||||
query.once('row', function(row) {
|
||||
test('Can iterate through columns', function () {
|
||||
var columnCount = 0;
|
||||
for (column in row) {
|
||||
columnCount++;
|
||||
};
|
||||
}
|
||||
if ('length' in row) {
|
||||
assert.lengthIs(row, columnCount, 'Iterating through the columns gives a different length from calling .length.');
|
||||
}
|
||||
@ -37,7 +38,7 @@ test("simple query interface", function() {
|
||||
|
||||
test("multiple simple queries", function() {
|
||||
var client = helper.client();
|
||||
client.query({ text: "create temp table bang(id serial, name varchar(5));insert into bang(name) VALUES('boom');", binary: false })
|
||||
client.query({ text: "create temp table bang(id serial, name varchar(5));insert into bang(name) VALUES('boom');"})
|
||||
client.query("insert into bang(name) VALUES ('yes');");
|
||||
var query = client.query("select name from bang");
|
||||
assert.emits(query, 'row', function(row) {
|
||||
@ -51,9 +52,9 @@ test("multiple simple queries", function() {
|
||||
|
||||
test("multiple select statements", function() {
|
||||
var client = helper.client();
|
||||
client.query({text: "create temp table boom(age integer); insert into boom(age) values(1); insert into boom(age) values(2); insert into boom(age) values(3)", binary: false});
|
||||
client.query({text: "create temp table bang(name varchar(5)); insert into bang(name) values('zoom');", binary: false});
|
||||
var result = client.query({text: "select age from boom where age < 2; select name from bang", binary: false});
|
||||
client.query("create temp table boom(age integer); insert into boom(age) values(1); insert into boom(age) values(2); insert into boom(age) values(3)");
|
||||
client.query({text: "create temp table bang(name varchar(5)); insert into bang(name) values('zoom');"});
|
||||
var result = client.query({text: "select age from boom where age < 2; select name from bang"});
|
||||
assert.emits(result, 'row', function(row) {
|
||||
assert.strictEqual(row['age'], 1);
|
||||
assert.emits(result, 'row', function(row) {
|
||||
|
||||
14
test/integration/client/ssl-tests.js
Normal file
14
test/integration/client/ssl-tests.js
Normal file
@ -0,0 +1,14 @@
|
||||
var pg = require(__dirname + '/../../../lib');
|
||||
var config = require(__dirname + '/test-helper').config;
|
||||
test('can connect with ssl', function() {
|
||||
return false;
|
||||
config.ssl = {
|
||||
rejectUnauthorized: false
|
||||
};
|
||||
pg.connect(config, assert.success(function(client) {
|
||||
return false;
|
||||
client.query('SELECT NOW()', assert.success(function() {
|
||||
pg.end();
|
||||
}));
|
||||
}));
|
||||
});
|
||||
@ -1,10 +1,3 @@
|
||||
var helper = require(__dirname+'/../test-helper');
|
||||
|
||||
//creates a client from cli parameters
|
||||
helper.client = function() {
|
||||
var client = new Client(helper.config);
|
||||
client.connect();
|
||||
return client;
|
||||
};
|
||||
|
||||
module.exports = helper;
|
||||
|
||||
29
test/integration/client/timezone-tests.js
Normal file
29
test/integration/client/timezone-tests.js
Normal file
@ -0,0 +1,29 @@
|
||||
var helper = require(__dirname + '/../test-helper');
|
||||
var exec = require('child_process').exec;
|
||||
|
||||
var oldTz = process.env.TZ;
|
||||
process.env.TZ = 'Europe/Berlin';
|
||||
|
||||
var date = new Date();
|
||||
|
||||
helper.pg.connect(helper.config, function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
|
||||
test('timestamp without time zone', function() {
|
||||
client.query("SELECT CAST($1 AS TIMESTAMP WITHOUT TIME ZONE) AS \"val\"", [ date ], function(err, result) {
|
||||
assert.isNull(err);
|
||||
assert.equal(result.rows[0].val.getTime(), date.getTime());
|
||||
|
||||
test('timestamp with time zone', function() {
|
||||
client.query("SELECT CAST($1 AS TIMESTAMP WITH TIME ZONE) AS \"val\"", [ date ], function(err, result) {
|
||||
assert.isNull(err);
|
||||
assert.equal(result.rows[0].val.getTime(), date.getTime());
|
||||
|
||||
done();
|
||||
helper.pg.end();
|
||||
process.env.TZ = oldTz;
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -5,8 +5,7 @@ var sink = new helper.Sink(2, function() {
|
||||
});
|
||||
|
||||
test('a single connection transaction', function() {
|
||||
helper.pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
assert.isNull(err);
|
||||
helper.pg.connect(helper.config, assert.success(function(client, done) {
|
||||
|
||||
client.query('begin');
|
||||
|
||||
@ -39,6 +38,7 @@ test('a single connection transaction', function() {
|
||||
client.query(getZed, assert.calls(function(err, result) {
|
||||
assert.isNull(err);
|
||||
assert.empty(result.rows);
|
||||
done();
|
||||
sink.add();
|
||||
}))
|
||||
})
|
||||
@ -46,8 +46,7 @@ test('a single connection transaction', function() {
|
||||
})
|
||||
|
||||
test('gh#36', function() {
|
||||
helper.pg.connect(helper.config, function(err, client) {
|
||||
if(err) throw err;
|
||||
helper.pg.connect(helper.config, assert.success(function(client, done) {
|
||||
client.query("BEGIN");
|
||||
client.query({
|
||||
name: 'X',
|
||||
@ -67,6 +66,7 @@ test('gh#36', function() {
|
||||
}))
|
||||
client.query("COMMIT", function() {
|
||||
sink.add();
|
||||
done();
|
||||
})
|
||||
})
|
||||
}));
|
||||
})
|
||||
|
||||
@ -2,7 +2,7 @@ var helper = require(__dirname + '/test-helper');
|
||||
var sink;
|
||||
|
||||
var testForTypeCoercion = function(type){
|
||||
helper.pg.connect(helper.config, function(err, client) {
|
||||
helper.pg.connect(helper.config, function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
client.query("create temp table test_type(col " + type.name + ")", assert.calls(function(err, result) {
|
||||
assert.isNull(err);
|
||||
@ -23,7 +23,9 @@ var testForTypeCoercion = function(type){
|
||||
});
|
||||
|
||||
assert.emits(query, 'row', function(row) {
|
||||
assert.strictEqual(row.col, val, "expected " + type.name + " of " + val + " but got " + row.col);
|
||||
var expected = val + " (" + typeof val + ")";
|
||||
var returned = row.col + " (" + typeof row.col + ")";
|
||||
assert.strictEqual(row.col, val, "expected " + type.name + " of " + expected + " but got " + returned);
|
||||
}, "row should have been called for " + type.name + " of " + val);
|
||||
|
||||
client.query('delete from test_type');
|
||||
@ -31,6 +33,7 @@ var testForTypeCoercion = function(type){
|
||||
|
||||
client.query('drop table test_type', function() {
|
||||
sink.add();
|
||||
done();
|
||||
});
|
||||
})
|
||||
}));
|
||||
@ -39,13 +42,21 @@ var testForTypeCoercion = function(type){
|
||||
|
||||
var types = [{
|
||||
name: 'integer',
|
||||
values: [1, -1, null]
|
||||
values: [-2147483648, -1, 0, 1, 2147483647, null]
|
||||
},{
|
||||
name: 'smallint',
|
||||
values: [-1, 0, 1, null]
|
||||
values: [-32768, -1, 0, 1, 32767, null]
|
||||
},{
|
||||
name: 'bigint',
|
||||
values: [-10000, 0, 10000, null]
|
||||
values: [
|
||||
'-9223372036854775808',
|
||||
'-9007199254740992',
|
||||
'0',
|
||||
'9007199254740992',
|
||||
'72057594037928030',
|
||||
'9223372036854775807',
|
||||
null
|
||||
]
|
||||
},{
|
||||
name: 'varchar(5)',
|
||||
values: ['yo', '', 'zomg!', null]
|
||||
@ -56,15 +67,21 @@ var types = [{
|
||||
name: 'bool',
|
||||
values: [true, false, null]
|
||||
},{
|
||||
//TODO get some actual huge numbers here
|
||||
name: 'numeric',
|
||||
values: [-12.34, 0, 12.34, null]
|
||||
values: [
|
||||
'-12.34',
|
||||
'0',
|
||||
'12.34',
|
||||
'-3141592653589793238462643383279502.1618033988749894848204586834365638',
|
||||
'3141592653589793238462643383279502.1618033988749894848204586834365638',
|
||||
null
|
||||
]
|
||||
},{
|
||||
name: 'real',
|
||||
values: [101.1, 0, -101.3, null]
|
||||
values: [-101.3, -1.2, 0, 1.2, 101.1, null]
|
||||
},{
|
||||
name: 'double precision',
|
||||
values: [-1.2, 0, 1.2, null]
|
||||
values: [-101.3, -1.2, 0, 1.2, 101.1, null]
|
||||
},{
|
||||
name: 'timestamptz',
|
||||
values: [null]
|
||||
@ -82,7 +99,7 @@ var types = [{
|
||||
// ignore some tests in binary mode
|
||||
if (helper.config.binary) {
|
||||
types = types.filter(function(type) {
|
||||
return !(type.name in {'real':1, 'timetz':1, 'time':1});
|
||||
return !(type.name in {'real': 1, 'timetz':1, 'time':1, 'numeric': 1, 'bigint': 1});
|
||||
});
|
||||
}
|
||||
|
||||
@ -133,7 +150,7 @@ test("timestampz round trip", function() {
|
||||
client.on('drain', client.end.bind(client));
|
||||
});
|
||||
|
||||
helper.pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
helper.pg.connect(helper.config, assert.calls(function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
client.query('select null as res;', assert.calls(function(err, res) {
|
||||
assert.isNull(err);
|
||||
@ -143,5 +160,21 @@ helper.pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
assert.isNull(err);
|
||||
assert.strictEqual(res.rows[0].res, null);
|
||||
sink.add();
|
||||
done();
|
||||
})
|
||||
}))
|
||||
|
||||
if(!helper.config.binary) {
|
||||
test("postgres date type", function() {
|
||||
var client = helper.client();
|
||||
client.on('error', function(err) {
|
||||
console.log(err);
|
||||
client.end();
|
||||
});
|
||||
client.query("SELECT '2010-10-31'::date", assert.calls(function(err, result){
|
||||
assert.isNull(err);
|
||||
assert.UTCDate(result.rows[0].date, 2010, 9, 31, 0, 0, 0, 0);
|
||||
}));
|
||||
client.on('drain', client.end.bind(client));
|
||||
});
|
||||
}
|
||||
|
||||
@ -8,12 +8,13 @@ test('disconnects', function() {
|
||||
helper.pg.end();
|
||||
});
|
||||
[helper.config, helper.config, helper.config, helper.config].forEach(function(config) {
|
||||
helper.pg.connect(config, function(err, client) {
|
||||
helper.pg.connect(config, function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
client.query("SELECT * FROM NOW()", function(err, result) {
|
||||
process.nextTick(function() {
|
||||
assert.equal(called, false, "Should not have disconnected yet")
|
||||
sink.add();
|
||||
done();
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -1,28 +1,41 @@
|
||||
var helper = require(__dirname + "/../test-helper");
|
||||
var pg = require(__dirname + "/../../../lib");
|
||||
helper.pg = pg;
|
||||
pg = pg;
|
||||
|
||||
//first make pool hold 2 clients
|
||||
helper.pg.defaults.poolSize = 2;
|
||||
pg.defaults.poolSize = 2;
|
||||
|
||||
var killIdleQuery = 'SELECT procpid, (SELECT pg_terminate_backend(procpid)) AS killed FROM pg_stat_activity WHERE current_query LIKE \'<IDLE>\'';
|
||||
|
||||
//get first client
|
||||
helper.pg.connect(helper.config, assert.success(function(client) {
|
||||
pg.connect(helper.config, assert.success(function(client, done) {
|
||||
client.id = 1;
|
||||
helper.pg.connect(helper.config, assert.success(function(client2) {
|
||||
client2.id = 2;
|
||||
//subscribe to the pg error event
|
||||
assert.emits(helper.pg, 'error', function(error, brokenClient) {
|
||||
assert.ok(error);
|
||||
assert.ok(brokenClient);
|
||||
assert.equal(client.id, brokenClient.id);
|
||||
helper.pg.end();
|
||||
});
|
||||
//kill the connection from client
|
||||
client2.query(killIdleQuery, assert.success(function(res) {
|
||||
//check to make sure client connection actually was killed
|
||||
assert.lengthIs(res.rows, 1);
|
||||
pg.connect(helper.config, assert.success(function(client2, done2) {
|
||||
client2.id = 2;
|
||||
var pidColName = 'procpid'
|
||||
helper.versionGTE(client2, '9.2.0', assert.success(function(isGreater) {
|
||||
console.log(isGreater)
|
||||
var killIdleQuery = 'SELECT pid, (SELECT pg_terminate_backend(pid)) AS killed FROM pg_stat_activity WHERE state = $1';
|
||||
var params = ['idle'];
|
||||
if(!isGreater) {
|
||||
killIdleQuery = 'SELECT procpid, (SELECT pg_terminate_backend(procpid)) AS killed FROM pg_stat_activity WHERE current_query LIKE $1';
|
||||
params = ['%IDLE%']
|
||||
}
|
||||
|
||||
//subscribe to the pg error event
|
||||
assert.emits(pg, 'error', function(error, brokenClient) {
|
||||
assert.ok(error);
|
||||
assert.ok(brokenClient);
|
||||
assert.equal(client.id, brokenClient.id);
|
||||
});
|
||||
|
||||
//kill the connection from client
|
||||
client2.query(killIdleQuery, params, assert.success(function(res) {
|
||||
//check to make sure client connection actually was killed
|
||||
assert.lengthIs(res.rows, 1);
|
||||
//return client2 to the pool
|
||||
done2();
|
||||
pg.end();
|
||||
}));
|
||||
}));
|
||||
}));
|
||||
}));
|
||||
}));
|
||||
|
||||
@ -3,10 +3,11 @@ var helper = require(__dirname + '/test-helper');
|
||||
helper.pg.defaults.poolIdleTimeout = 200;
|
||||
|
||||
test('idle timeout', function() {
|
||||
helper.pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
helper.pg.connect(helper.config, assert.calls(function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
client.query('SELECT NOW()');
|
||||
//just let this one time out
|
||||
//test will hang if pool doesn't timeout
|
||||
done();
|
||||
}));
|
||||
});
|
||||
|
||||
@ -8,7 +8,14 @@ helper.pg.defaults.port = helper.args.port;
|
||||
helper.pg.defaults.database = helper.args.database;
|
||||
helper.pg.defaults.poolSize = 1;
|
||||
|
||||
helper.pg.connect(assert.calls(function(err, client) {
|
||||
helper.pg.connect(assert.calls(function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
client.end();
|
||||
client.query('SELECT NOW()');
|
||||
client.once('drain', function() {
|
||||
setTimeout(function() {
|
||||
helper.pg.end();
|
||||
done();
|
||||
|
||||
}, 10);
|
||||
});
|
||||
}));
|
||||
|
||||
@ -9,7 +9,7 @@ helper.testPoolSize = function(max) {
|
||||
for(var i = 0; i < max; i++) {
|
||||
helper.pg.poolSize = 10;
|
||||
test("connection #" + i + " executes", function() {
|
||||
helper.pg.connect(helper.config, function(err, client) {
|
||||
helper.pg.connect(helper.config, function(err, client, done) {
|
||||
assert.isNull(err);
|
||||
client.query("select * from person", function(err, result) {
|
||||
assert.lengthIs(result.rows, 26)
|
||||
@ -19,7 +19,8 @@ helper.testPoolSize = function(max) {
|
||||
})
|
||||
var query = client.query("SELECT * FROM NOW()")
|
||||
query.on('end',function() {
|
||||
sink.add()
|
||||
sink.add();
|
||||
done();
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -1,63 +0,0 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
|
||||
helper.pg.defaults.poolSize = 1;
|
||||
helper.pg.defaults.user = helper.args.user;
|
||||
helper.pg.defaults.password = helper.args.password;
|
||||
helper.pg.defaults.database = helper.args.database;
|
||||
helper.pg.defaults.port = helper.args.port;
|
||||
helper.pg.defaults.host = helper.args.host;
|
||||
helper.pg.defaults.binary = helper.args.binary;
|
||||
helper.pg.defaults.poolIdleTimeout = 100;
|
||||
|
||||
var moreArgs = {};
|
||||
for (c in helper.config) {
|
||||
moreArgs[c] = helper.config[c];
|
||||
}
|
||||
moreArgs.zomg = true;
|
||||
|
||||
var badArgs = {};
|
||||
for (c in helper.config) {
|
||||
badArgs[c] = helper.config[c];
|
||||
}
|
||||
|
||||
badArgs.user = badArgs.user + 'laksdjfl';
|
||||
badArgs.password = badArgs.password + 'asldkfjlas';
|
||||
badArgs.zomg = true;
|
||||
|
||||
test('connecting with complete config', function() {
|
||||
|
||||
helper.pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
assert.isNull(err);
|
||||
client.iGotAccessed = true;
|
||||
client.query("SELECT NOW()")
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
test('connecting with different config object', function() {
|
||||
|
||||
helper.pg.connect(moreArgs, assert.calls(function(err, client) {
|
||||
assert.isNull(err);
|
||||
assert.ok(client.iGotAccessed === true)
|
||||
client.query("SELECT NOW()");
|
||||
}))
|
||||
|
||||
});
|
||||
|
||||
test('connecting with all defaults', function() {
|
||||
|
||||
helper.pg.connect(assert.calls(function(err, client) {
|
||||
assert.isNull(err);
|
||||
assert.ok(client.iGotAccessed === true);
|
||||
client.end();
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
test('connecting with invalid config', function() {
|
||||
|
||||
helper.pg.connect(badArgs, assert.calls(function(err, client) {
|
||||
assert.ok(err != null, "Expected connection error using invalid connection credentials");
|
||||
}));
|
||||
|
||||
});
|
||||
44
test/integration/connection/copy-tests.js
Normal file
44
test/integration/connection/copy-tests.js
Normal file
@ -0,0 +1,44 @@
|
||||
var helper = require(__dirname+"/test-helper");
|
||||
var assert = require('assert');
|
||||
|
||||
test('COPY FROM events check', function () {
|
||||
helper.connect(function (con) {
|
||||
var stdinStream = con.query('COPY person FROM STDIN');
|
||||
con.on('copyInResponse', function () {
|
||||
con.endCopyFrom();
|
||||
});
|
||||
assert.emits(con, 'copyInResponse',
|
||||
function () {
|
||||
con.endCopyFrom();
|
||||
},
|
||||
"backend should emit copyInResponse after COPY FROM query"
|
||||
);
|
||||
assert.emits(con, 'commandComplete',
|
||||
function () {
|
||||
con.end();
|
||||
},
|
||||
"backend should emit commandComplete after COPY FROM stream ends"
|
||||
)
|
||||
});
|
||||
});
|
||||
test('COPY TO events check', function () {
|
||||
helper.connect(function (con) {
|
||||
var stdoutStream = con.query('COPY person TO STDOUT');
|
||||
assert.emits(con, 'copyOutResponse',
|
||||
function () {
|
||||
},
|
||||
"backend should emit copyOutResponse after COPY TO query"
|
||||
);
|
||||
assert.emits(con, 'copyData',
|
||||
function () {
|
||||
},
|
||||
"backend should emit copyData on every data row"
|
||||
);
|
||||
assert.emits(con, 'copyDone',
|
||||
function () {
|
||||
con.end();
|
||||
},
|
||||
"backend should emit copyDone after all data rows"
|
||||
);
|
||||
});
|
||||
});
|
||||
17
test/integration/gh-issues/130.js
Normal file
17
test/integration/gh-issues/130.js
Normal file
@ -0,0 +1,17 @@
|
||||
var helper = require(__dirname + '/../test-helper');
|
||||
var exec = require('child_process').exec;
|
||||
|
||||
helper.pg.defaults.poolIdleTimeout = 1000;
|
||||
|
||||
helper.pg.connect(helper.config, function(err,client) {
|
||||
client.query("SELECT pg_backend_pid()", function(err, result) {
|
||||
var pid = result.rows[0].pg_backend_pid;
|
||||
exec('psql -c "select pg_terminate_backend('+pid+')" template1', assert.calls(function (error, stdout, stderr) {
|
||||
assert.isNull(error);
|
||||
}));
|
||||
});
|
||||
});
|
||||
|
||||
helper.pg.on('error', function(err, client) {
|
||||
//swallow errors
|
||||
});
|
||||
19
test/integration/gh-issues/131.js
Normal file
19
test/integration/gh-issues/131.js
Normal file
@ -0,0 +1,19 @@
|
||||
var helper = require(__dirname + "/../test-helper");
|
||||
var pg = helper.pg;
|
||||
|
||||
test('parsing array results', function() {
|
||||
pg.connect(helper.config, assert.calls(function(err, client) {
|
||||
assert.isNull(err);
|
||||
client.query("CREATE TEMP TABLE why(names text[], numbors integer[], decimals double precision[])");
|
||||
client.query('INSERT INTO why(names, numbors, decimals) VALUES(\'{"aaron", "brian","a b c" }\', \'{1, 2, 3}\', \'{.1, 0.05, 3.654}\')').on('error', console.log);
|
||||
test('decimals', function() {
|
||||
client.query('SELECT decimals FROM why', assert.success(function(result) {
|
||||
assert.lengthIs(result.rows[0].decimals, 3);
|
||||
assert.equal(result.rows[0].decimals[0], 0.1);
|
||||
assert.equal(result.rows[0].decimals[1], 0.05);
|
||||
assert.equal(result.rows[0].decimals[2], 3.654);
|
||||
pg.end();
|
||||
}))
|
||||
})
|
||||
}))
|
||||
})
|
||||
@ -1,10 +1,27 @@
|
||||
var helper = require(__dirname + '/../test-helper');
|
||||
|
||||
//TODO would this be better served set at ../test-helper?
|
||||
if(helper.args.native) {
|
||||
Client = require(__dirname + '/../../lib/native');
|
||||
helper.Client = Client;
|
||||
helper.pg = helper.pg.native;
|
||||
}
|
||||
|
||||
//creates a client from cli parameters
|
||||
helper.client = function() {
|
||||
var client = new Client(helper.config);
|
||||
client.connect();
|
||||
return client;
|
||||
};
|
||||
|
||||
var semver = require('semver');
|
||||
helper.versionGTE = function(client, versionString, callback) {
|
||||
client.query('SELECT version()', assert.calls(function(err, result) {
|
||||
if(err) return callback(err);
|
||||
var version = result.rows[0].version.split(' ')[1];
|
||||
return callback(null, semver.gte(version, versionString));
|
||||
}));
|
||||
};
|
||||
|
||||
//export parent helper stuffs
|
||||
module.exports = helper;
|
||||
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
var domain = require('domain');
|
||||
var helper = require(__dirname + "/../test-helper");
|
||||
var Client = require(__dirname + "/../../lib/native");
|
||||
|
||||
@ -14,3 +15,17 @@ test('fires callback with results', function() {
|
||||
}))
|
||||
}));
|
||||
})
|
||||
|
||||
test('preserves domain', function() {
|
||||
var dom = domain.create();
|
||||
|
||||
dom.run(function() {
|
||||
var client = new Client(helper.config);
|
||||
assert.ok(dom === require('domain').active, 'domain is active');
|
||||
client.connect()
|
||||
client.query('select 1', function() {
|
||||
assert.ok(dom === require('domain').active, 'domain is still active');
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
})
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
var helper = require(__dirname + "/../test-helper");
|
||||
var Client = require(__dirname + "/../../lib/native");
|
||||
var domain = require('domain');
|
||||
|
||||
test('connecting with wrong parameters', function() {
|
||||
var con = new Client("user=asldfkj hostaddr=127.0.0.1 port=5432 dbname=asldkfj");
|
||||
@ -20,3 +21,16 @@ test('connects', function() {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('preserves domain', function() {
|
||||
var dom = domain.create();
|
||||
|
||||
dom.run(function() {
|
||||
var con = new Client(helper.config);
|
||||
assert.ok(dom === require('domain').active, 'domain is active');
|
||||
con.connect(function() {
|
||||
assert.ok(dom === require('domain').active, 'domain is still active');
|
||||
con.end();
|
||||
});
|
||||
});
|
||||
})
|
||||
|
||||
40
test/native/copy-events-tests.js
Normal file
40
test/native/copy-events-tests.js
Normal file
@ -0,0 +1,40 @@
|
||||
var helper = require(__dirname+"/../test-helper");
|
||||
var Client = require(__dirname + "/../../lib/native");
|
||||
test('COPY FROM events check', function () {
|
||||
var con = new Client(helper.config),
|
||||
stdinStream = con.copyFrom('COPY person FROM STDIN');
|
||||
assert.emits(con, 'copyInResponse',
|
||||
function () {
|
||||
stdinStream.end();
|
||||
},
|
||||
"backend should emit copyInResponse after COPY FROM query"
|
||||
);
|
||||
assert.emits(con, '_readyForQuery',
|
||||
function () {
|
||||
con.end();
|
||||
},
|
||||
"backend should emit _readyForQuery after data will be coped to stdin stream"
|
||||
);
|
||||
con.connect();
|
||||
});
|
||||
test('COPY TO events check', function () {
|
||||
var con = new Client(helper.config),
|
||||
stdoutStream = con.copyTo('COPY person TO STDOUT');
|
||||
assert.emits(con, 'copyOutResponse',
|
||||
function () {},
|
||||
"backend should emit copyOutResponse on copyOutResponse message from server"
|
||||
);
|
||||
assert.emits(con, 'copyData',
|
||||
function () {
|
||||
},
|
||||
"backend should emit copyData on every data row"
|
||||
);
|
||||
assert.emits(con, '_readyForQuery',
|
||||
function () {
|
||||
con.end();
|
||||
},
|
||||
"backend should emit _readyForQuery after data will be coped to stdout stream"
|
||||
);
|
||||
con.connect();
|
||||
});
|
||||
|
||||
23
test/native/copyto-largedata-tests.js
Normal file
23
test/native/copyto-largedata-tests.js
Normal file
@ -0,0 +1,23 @@
|
||||
var helper = require(__dirname+"/../test-helper");
|
||||
var Client = require(__dirname + "/../../lib/native");
|
||||
test("COPY TO large amount of data from postgres", function () {
|
||||
//there were a bug in native implementation of COPY TO:
|
||||
//if there were too much data (if we face situation
|
||||
//when data is not ready while calling PQgetCopyData);
|
||||
//while loop in Connection::HandleIOEvent becomes infinite
|
||||
//in such way hanging node, consumes 100% cpu, and making connection unusable
|
||||
var con = new Client(helper.config),
|
||||
rowCount = 100000,
|
||||
stdoutStream = con.copyTo('COPY (select generate_series(1, ' + rowCount + ')) TO STDOUT');
|
||||
stdoutStream.on('data', function () {
|
||||
rowCount--;
|
||||
});
|
||||
stdoutStream.on('end', function () {
|
||||
assert.equal(rowCount, 0, "copy to should load exactly requested number of rows");
|
||||
con.query("SELECT 1", assert.calls(function (error, result) {
|
||||
assert.ok(!error && result, "loading large amount of data by copy to should not break connection");
|
||||
con.end();
|
||||
}));
|
||||
});
|
||||
con.connect();
|
||||
});
|
||||
@ -5,26 +5,30 @@ test('query with non-text as first parameter throws error', function() {
|
||||
var client = new Client(helper.config);
|
||||
client.connect();
|
||||
assert.emits(client, 'connect', function() {
|
||||
assert.throws(function() {
|
||||
client.query({text:{fail: true}});
|
||||
})
|
||||
client.end();
|
||||
})
|
||||
})
|
||||
assert.emits(client, 'end', function() {
|
||||
assert.throws(function() {
|
||||
client.query({text:{fail: true}});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('parameterized query with non-text as first parameter throws error', function() {
|
||||
var client = new Client(helper.config);
|
||||
client.connect();
|
||||
assert.emits(client, 'connect', function() {
|
||||
assert.throws(function() {
|
||||
client.query({
|
||||
text: {fail: true},
|
||||
values: [1, 2]
|
||||
})
|
||||
})
|
||||
client.end();
|
||||
})
|
||||
})
|
||||
assert.emits(client, 'end', function() {
|
||||
assert.throws(function() {
|
||||
client.query({
|
||||
text: {fail: true},
|
||||
values: [1, 2]
|
||||
})
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
var connect = function(callback) {
|
||||
var client = new Client(helper.config);
|
||||
@ -37,24 +41,28 @@ var connect = function(callback) {
|
||||
test('parameterized query with non-array for second value', function() {
|
||||
test('inline', function() {
|
||||
connect(function(client) {
|
||||
assert.throws(function() {
|
||||
client.query("SELECT *", "LKSDJF")
|
||||
})
|
||||
client.end();
|
||||
})
|
||||
})
|
||||
assert.emits(client, 'end', function() {
|
||||
assert.throws(function() {
|
||||
client.query("SELECT *", "LKSDJF")
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('config', function() {
|
||||
connect(function(client) {
|
||||
assert.throws(function() {
|
||||
client.query({
|
||||
text: "SELECT *",
|
||||
values: "ALSDKFJ"
|
||||
})
|
||||
})
|
||||
client.end();
|
||||
})
|
||||
})
|
||||
})
|
||||
assert.emits(client, 'end', function() {
|
||||
assert.throws(function() {
|
||||
client.query({
|
||||
text: "SELECT *",
|
||||
values: "ALSDKFJ"
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
|
||||
@ -6,7 +6,6 @@ var sys = require('util');
|
||||
var BufferList = require(__dirname+'/buffer-list')
|
||||
|
||||
var Connection = require(__dirname + '/../lib/connection');
|
||||
var args = require(__dirname + '/cli');
|
||||
|
||||
Client = require(__dirname + '/../lib').Client;
|
||||
|
||||
@ -29,10 +28,10 @@ assert.same = function(actual, expected) {
|
||||
assert.emits = function(item, eventName, callback, message) {
|
||||
var called = false;
|
||||
var id = setTimeout(function() {
|
||||
test("Should have called " + eventName, function() {
|
||||
test("Should have called '" + eventName + "' event", function() {
|
||||
assert.ok(called, message || "Expected '" + eventName + "' to be called.")
|
||||
});
|
||||
},2000);
|
||||
},5000);
|
||||
|
||||
item.once(eventName, function() {
|
||||
if (eventName === 'error') {
|
||||
@ -97,13 +96,25 @@ assert.empty = function(actual) {
|
||||
};
|
||||
|
||||
assert.success = function(callback) {
|
||||
return assert.calls(function(err, arg) {
|
||||
if(err) {
|
||||
console.log(err);
|
||||
}
|
||||
assert.isNull(err);
|
||||
callback(arg);
|
||||
})
|
||||
if(callback.length === 1 || callback.length === 0) {
|
||||
return assert.calls(function(err, arg) {
|
||||
if(err) {
|
||||
console.log(err);
|
||||
}
|
||||
assert.isNull(err);
|
||||
callback(arg);
|
||||
});
|
||||
} else if (callback.length === 2) {
|
||||
return assert.calls(function(err, arg1, arg2) {
|
||||
if(err) {
|
||||
console.log(err);
|
||||
}
|
||||
assert.isNull(err);
|
||||
callback(arg1, arg2);
|
||||
});
|
||||
} else {
|
||||
throw new Error('need to preserve arrity of wrapped function');
|
||||
}
|
||||
}
|
||||
|
||||
assert.throws = function(offender) {
|
||||
@ -124,15 +135,28 @@ var expect = function(callback, timeout) {
|
||||
var executed = false;
|
||||
var id = setTimeout(function() {
|
||||
assert.ok(executed, "Expected execution of function to be fired");
|
||||
}, timeout || 2000)
|
||||
}, timeout || 5000)
|
||||
|
||||
return function(err, queryResult) {
|
||||
clearTimeout(id);
|
||||
if (err) {
|
||||
assert.ok(err instanceof Error, "Expected errors to be instances of Error: " + sys.inspect(err));
|
||||
if(callback.length < 3) {
|
||||
return function(err, queryResult) {
|
||||
clearTimeout(id);
|
||||
if (err) {
|
||||
assert.ok(err instanceof Error, "Expected errors to be instances of Error: " + sys.inspect(err));
|
||||
}
|
||||
callback.apply(this, arguments)
|
||||
}
|
||||
callback.apply(this, arguments)
|
||||
} else if(callback.length == 3) {
|
||||
return function(err, arg1, arg2) {
|
||||
clearTimeout(id);
|
||||
if (err) {
|
||||
assert.ok(err instanceof Error, "Expected errors to be instances of Error: " + sys.inspect(err));
|
||||
}
|
||||
callback.apply(this, arguments)
|
||||
}
|
||||
} else {
|
||||
throw new Error("Unsupported arrity " + callback.length);
|
||||
}
|
||||
|
||||
}
|
||||
assert.calls = expect;
|
||||
|
||||
@ -143,47 +167,24 @@ assert.isNull = function(item, message) {
|
||||
|
||||
test = function(name, action) {
|
||||
test.testCount ++;
|
||||
if(args.verbose) {
|
||||
console.log(name);
|
||||
}
|
||||
var result = action();
|
||||
test[name] = action;
|
||||
var result = test[name]();
|
||||
if(result === false) {
|
||||
test.ignored.push(name);
|
||||
if(!args.verbose) {
|
||||
process.stdout.write('?');
|
||||
}
|
||||
process.stdout.write('?');
|
||||
}else{
|
||||
if(!args.verbose) {
|
||||
process.stdout.write('.');
|
||||
}
|
||||
process.stdout.write('.');
|
||||
}
|
||||
};
|
||||
|
||||
//print out the filename
|
||||
process.stdout.write(require('path').basename(process.argv[1]));
|
||||
//print a new line since we'll be printing test names
|
||||
if(args.verbose) {
|
||||
console.log();
|
||||
}
|
||||
test.testCount = test.testCount || 0;
|
||||
test.ignored = test.ignored || [];
|
||||
test.errors = test.errors || [];
|
||||
var args = require(__dirname + '/cli');
|
||||
if(args.binary) process.stdout.write(' (binary)');
|
||||
if(args.native) process.stdout.write(' (native)');
|
||||
|
||||
process.on('exit', function() {
|
||||
console.log('');
|
||||
if(test.ignored.length || test.errors.length) {
|
||||
test.ignored.forEach(function(name) {
|
||||
console.log("Ignored: " + name);
|
||||
});
|
||||
test.errors.forEach(function(error) {
|
||||
console.log("Error: " + error.name);
|
||||
});
|
||||
console.log('');
|
||||
}
|
||||
test.errors.forEach(function(error) {
|
||||
throw error.e;
|
||||
});
|
||||
});
|
||||
console.log('')
|
||||
})
|
||||
|
||||
process.on('uncaughtException', function(err) {
|
||||
console.error("\n %s", err.stack || err.toString())
|
||||
@ -194,7 +195,7 @@ process.on('uncaughtException', function(err) {
|
||||
var count = 0;
|
||||
|
||||
var Sink = function(expected, timeout, callback) {
|
||||
var defaultTimeout = 1000;
|
||||
var defaultTimeout = 5000;
|
||||
if(typeof timeout == 'function') {
|
||||
callback = timeout;
|
||||
timeout = defaultTimeout;
|
||||
@ -221,10 +222,11 @@ var Sink = function(expected, timeout, callback) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
module.exports = {
|
||||
args: args,
|
||||
Sink: Sink,
|
||||
pg: require(__dirname + '/../lib/'),
|
||||
args: args,
|
||||
config: args,
|
||||
sys: sys,
|
||||
Client: Client
|
||||
|
||||
@ -4,8 +4,8 @@ test('client settings', function() {
|
||||
|
||||
test('defaults', function() {
|
||||
var client = new Client();
|
||||
assert.equal(client.user, process.env.USER);
|
||||
assert.equal(client.database, process.env.USER);
|
||||
assert.equal(client.user, process.env['PGUSER'] || process.env.USER);
|
||||
assert.equal(client.database, process.env['PGDATABASE'] || process.env.USER);
|
||||
assert.equal(client.port, 5432);
|
||||
});
|
||||
|
||||
@ -31,7 +31,7 @@ test('client settings', function() {
|
||||
test('initializing from a config string', function() {
|
||||
|
||||
test('uses the correct values from the config string', function() {
|
||||
var client = new Client("pg://brian:pass@host1:333/databasename")
|
||||
var client = new Client("postgres://brian:pass@host1:333/databasename")
|
||||
assert.equal(client.user, 'brian')
|
||||
assert.equal(client.password, "pass")
|
||||
assert.equal(client.host, "host1")
|
||||
@ -39,13 +39,22 @@ test('initializing from a config string', function() {
|
||||
assert.equal(client.database, "databasename")
|
||||
})
|
||||
|
||||
test('when not including all values the defaults are used', function() {
|
||||
var client = new Client("pg://host1")
|
||||
assert.equal(client.user, process.env.USER)
|
||||
assert.equal(client.password, null)
|
||||
test('uses the correct values from the config string with space in password', function() {
|
||||
var client = new Client("postgres://brian:pass word@host1:333/databasename")
|
||||
assert.equal(client.user, 'brian')
|
||||
assert.equal(client.password, "pass word")
|
||||
assert.equal(client.host, "host1")
|
||||
assert.equal(client.port, 5432)
|
||||
assert.equal(client.database, process.env.USER)
|
||||
assert.equal(client.port, 333)
|
||||
assert.equal(client.database, "databasename")
|
||||
})
|
||||
|
||||
test('when not including all values the defaults are used', function() {
|
||||
var client = new Client("postgres://host1")
|
||||
assert.equal(client.user, process.env['PGUSER'] || process.env.USER)
|
||||
assert.equal(client.password, process.env['PGPASSWORD'] || null)
|
||||
assert.equal(client.host, "host1")
|
||||
assert.equal(client.port, process.env['PGPORT'] || 5432)
|
||||
assert.equal(client.database, process.env['PGDATABASE'] || process.env.USER)
|
||||
})
|
||||
|
||||
|
||||
|
||||
153
test/unit/client/escape-tests.js
Normal file
153
test/unit/client/escape-tests.js
Normal file
@ -0,0 +1,153 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
|
||||
function createClient(callback) {
|
||||
var client = new Client(helper.config);
|
||||
client.connect(function(err) {
|
||||
return callback(client);
|
||||
});
|
||||
}
|
||||
|
||||
test('escapeLiteral: no special characters', function() {
|
||||
createClient(function(client) {
|
||||
var expected = "'hello world'";
|
||||
var actual = client.escapeLiteral('hello world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains double quotes only', function() {
|
||||
createClient(function(client) {
|
||||
var expected = "'hello \" world'";
|
||||
var actual = client.escapeLiteral('hello " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains single quotes only', function() {
|
||||
createClient(function(client) {
|
||||
var expected = "'hello \'\' world'";
|
||||
var actual = client.escapeLiteral('hello \' world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains backslashes only', function() {
|
||||
createClient(function(client) {
|
||||
var expected = " E'hello \\\\ world'";
|
||||
var actual = client.escapeLiteral('hello \\ world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains single quotes and double quotes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = "'hello '' \" world'";
|
||||
var actual = client.escapeLiteral('hello \' " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains double quotes and backslashes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = " E'hello \\\\ \" world'";
|
||||
var actual = client.escapeLiteral('hello \\ " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains single quotes and backslashes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = " E'hello \\\\ '' world'";
|
||||
var actual = client.escapeLiteral('hello \\ \' world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeLiteral: contains single quotes, double quotes, and backslashes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = " E'hello \\\\ '' \" world'";
|
||||
var actual = client.escapeLiteral('hello \\ \' " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: no special characters', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello world"';
|
||||
var actual = client.escapeIdentifier('hello world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains double quotes only', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello "" world"';
|
||||
var actual = client.escapeIdentifier('hello " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains single quotes only', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello \' world"';
|
||||
var actual = client.escapeIdentifier('hello \' world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains backslashes only', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello \\ world"';
|
||||
var actual = client.escapeIdentifier('hello \\ world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains single quotes and double quotes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello \' "" world"';
|
||||
var actual = client.escapeIdentifier('hello \' " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains double quotes and backslashes', function() {
|
||||
return createClient(function(client) {
|
||||
var expected = '"hello \\ "" world"';
|
||||
var actual = client.escapeIdentifier('hello \\ " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
return;
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains single quotes and backslashes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello \\ \' world"';
|
||||
var actual = client.escapeIdentifier('hello \\ \' world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
|
||||
test('escapeIdentifier: contains single quotes, double quotes, and backslashes', function() {
|
||||
createClient(function(client) {
|
||||
var expected = '"hello \\ \' "" world"';
|
||||
var actual = client.escapeIdentifier('hello \\ \' " world');
|
||||
assert.equal(expected, actual);
|
||||
client.end();
|
||||
});
|
||||
});
|
||||
@ -50,63 +50,3 @@ test('drain', function() {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('with drain paused', function() {
|
||||
//mock out a fake connection
|
||||
var con = new Connection({stream: "NO"});
|
||||
con.connect = function() {
|
||||
con.emit('connect');
|
||||
};
|
||||
con.query = function() {
|
||||
};
|
||||
|
||||
var client = new Client({connection:con});
|
||||
|
||||
client.connect();
|
||||
|
||||
var drainCount = 0;
|
||||
client.on('drain', function() {
|
||||
drainCount++;
|
||||
});
|
||||
|
||||
test('normally unpaused', function() {
|
||||
con.emit('readyForQuery');
|
||||
client.query('boom');
|
||||
assert.emits(client, 'drain', function() {
|
||||
assert.equal(drainCount, 1);
|
||||
});
|
||||
con.emit('readyForQuery');
|
||||
});
|
||||
|
||||
test('pausing', function() {
|
||||
test('unpaused with no queries in between', function() {
|
||||
client.pauseDrain();
|
||||
client.resumeDrain();
|
||||
assert.equal(drainCount, 1);
|
||||
});
|
||||
|
||||
test('paused', function() {
|
||||
test('resumeDrain after empty', function() {
|
||||
client.pauseDrain();
|
||||
client.query('asdf');
|
||||
con.emit('readyForQuery');
|
||||
assert.equal(drainCount, 1);
|
||||
client.resumeDrain();
|
||||
assert.equal(drainCount, 2);
|
||||
});
|
||||
|
||||
test('resumDrain while still pending', function() {
|
||||
client.pauseDrain();
|
||||
client.query('asdf');
|
||||
client.query('asdf1');
|
||||
con.emit('readyForQuery');
|
||||
client.resumeDrain();
|
||||
assert.equal(drainCount, 2);
|
||||
con.emit('readyForQuery');
|
||||
assert.equal(drainCount, 3);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@ -4,7 +4,7 @@ q.dateParser = require(__dirname + "/../../../lib/types").getTypeParser(1114, 't
|
||||
q.stringArrayParser = require(__dirname + "/../../../lib/types").getTypeParser(1009, 'text');
|
||||
|
||||
test("testing dateParser", function() {
|
||||
assert.equal(q.dateParser("2010-12-11 09:09:04").toUTCString(),new Date("2010-12-11 09:09:04 GMT").toUTCString());
|
||||
assert.equal(q.dateParser("2010-12-11 09:09:04").toString(),new Date("2010-12-11 09:09:04").toString());
|
||||
});
|
||||
|
||||
var testForMs = function(part, expected) {
|
||||
@ -19,19 +19,19 @@ testForMs('.1', 100);
|
||||
testForMs('.01', 10);
|
||||
testForMs('.74', 740);
|
||||
|
||||
test("testing 2dateParser", function() {
|
||||
test("testing 2dateParser on dates without timezones", function() {
|
||||
var actual = "2010-12-11 09:09:04.1";
|
||||
var expected = "\"2010-12-11T09:09:04.100Z\"";
|
||||
var expected = JSON.stringify(new Date(2010,11,11,9,9,4,100))
|
||||
assert.equal(JSON.stringify(q.dateParser(actual)),expected);
|
||||
});
|
||||
|
||||
test("testing 2dateParser", function() {
|
||||
test("testing 2dateParser on dates with timezones", function() {
|
||||
var actual = "2011-01-23 22:15:51.28-06";
|
||||
var expected = "\"2011-01-24T04:15:51.280Z\"";
|
||||
assert.equal(JSON.stringify(q.dateParser(actual)),expected);
|
||||
});
|
||||
|
||||
test("testing 2dateParser", function() {
|
||||
test("testing 2dateParser on dates with huge millisecond value", function() {
|
||||
var actual = "2011-01-23 22:15:51.280843-06";
|
||||
var expected = "\"2011-01-24T04:15:51.280Z\"";
|
||||
assert.equal(JSON.stringify(q.dateParser(actual)),expected);
|
||||
|
||||
@ -82,7 +82,7 @@ test('executing query', function() {
|
||||
name: 'boom'
|
||||
}]
|
||||
});
|
||||
assert.ok(handled, "should have handlded rowDescritpion");
|
||||
assert.ok(handled, "should have handlded rowDescription");
|
||||
});
|
||||
|
||||
test('handles dataRow messages', function() {
|
||||
@ -116,7 +116,7 @@ test('executing query', function() {
|
||||
});
|
||||
con.emit("readyForQuery");
|
||||
//this would never actually happen
|
||||
['dataRow','rowDescritpion', 'commandComplete'].forEach(function(msg) {
|
||||
['dataRow','rowDescription', 'commandComplete'].forEach(function(msg) {
|
||||
assert.equal(con.emit(msg), false, "Should no longer be picking up '"+ msg +"' messages");
|
||||
});
|
||||
});
|
||||
|
||||
26
test/unit/client/stream-and-query-error-interaction-tests.js
Normal file
26
test/unit/client/stream-and-query-error-interaction-tests.js
Normal file
@ -0,0 +1,26 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
var Connection = require(__dirname + '/../../../lib/connection');
|
||||
var Client = require(__dirname + '/../../../lib/client');
|
||||
|
||||
test('emits end when not in query', function() {
|
||||
var stream = new (require('events').EventEmitter)();
|
||||
stream.write = function() {
|
||||
//NOOP
|
||||
}
|
||||
var client = new Client({connection: new Connection({stream: stream})});
|
||||
client.connect(assert.calls(function() {
|
||||
client.query('SELECT NOW()', assert.calls(function(err, result) {
|
||||
assert(err);
|
||||
}));
|
||||
}));
|
||||
assert.emits(client, 'end');
|
||||
client.connection.emit('connect');
|
||||
process.nextTick(function() {
|
||||
client.connection.emit('readyForQuery');
|
||||
assert.equal(client.queryQueue.length, 0);
|
||||
assert(client.activeQuery, 'client should have issued query');
|
||||
process.nextTick(function() {
|
||||
stream.emit('end');
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -1,5 +1,5 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
//http://www.postgresql.org/docs/8.4/static/datatype.html
|
||||
//http://www.postgresql.org/docs/9.2/static/datatype.html
|
||||
test('typed results', function() {
|
||||
var client = helper.client();
|
||||
var con = client.connection;
|
||||
@ -18,20 +18,20 @@ test('typed results', function() {
|
||||
name: 'integer/int4',
|
||||
format: 'text',
|
||||
dataTypeID: 23,
|
||||
actual: '100',
|
||||
expected: 100
|
||||
actual: '2147483647',
|
||||
expected: 2147483647
|
||||
},{
|
||||
name: 'smallint/int2',
|
||||
format: 'text',
|
||||
dataTypeID: 21,
|
||||
actual: '101',
|
||||
expected: 101
|
||||
actual: '32767',
|
||||
expected: 32767
|
||||
},{
|
||||
name: 'bigint/int8',
|
||||
format: 'text',
|
||||
dataTypeID: 20,
|
||||
actual: '102',
|
||||
expected: 102
|
||||
actual: '9223372036854775807',
|
||||
expected: '9223372036854775807'
|
||||
},{
|
||||
name: 'oid',
|
||||
format: 'text',
|
||||
@ -42,8 +42,8 @@ test('typed results', function() {
|
||||
name: 'numeric',
|
||||
format: 'text',
|
||||
dataTypeID: 1700,
|
||||
actual: '12.34',
|
||||
expected: 12.34
|
||||
actual: '31415926535897932384626433832795028841971693993751058.16180339887498948482045868343656381177203091798057628',
|
||||
expected: '31415926535897932384626433832795028841971693993751058.16180339887498948482045868343656381177203091798057628'
|
||||
},{
|
||||
name: 'real/float4',
|
||||
dataTypeID: 700,
|
||||
@ -54,8 +54,8 @@ test('typed results', function() {
|
||||
name: 'double precision / float8',
|
||||
format: 'text',
|
||||
dataTypeID: 701,
|
||||
actual: '1.2',
|
||||
expected: 1.2
|
||||
actual: '12345678.12345678',
|
||||
expected: 12345678.12345678
|
||||
},{
|
||||
name: 'boolean true',
|
||||
format: 'text',
|
||||
@ -78,11 +78,11 @@ test('typed results', function() {
|
||||
name: 'timestamptz with minutes in timezone',
|
||||
format: 'text',
|
||||
dataTypeID: 1184,
|
||||
actual: '2010-10-31 14:54:13.74-0530',
|
||||
actual: '2010-10-31 14:54:13.74-05:30',
|
||||
expected: function(val) {
|
||||
assert.UTCDate(val, 2010, 9, 31, 20, 24, 13, 740);
|
||||
}
|
||||
},{
|
||||
}, {
|
||||
name: 'timestamptz with other milisecond digits dropped',
|
||||
format: 'text',
|
||||
dataTypeID: 1184,
|
||||
@ -111,6 +111,15 @@ test('typed results', function() {
|
||||
format: 'text',
|
||||
dataTypeID: 1114,
|
||||
actual: '2010-10-31 00:00:00',
|
||||
expected: function(val) {
|
||||
assert.equal(val.toUTCString(), new Date(2010, 9, 31, 0, 0, 0, 0, 0).toUTCString());
|
||||
assert.equal(val.toString(), new Date(2010, 9, 31, 0, 0, 0, 0, 0, 0).toString());
|
||||
}
|
||||
},{
|
||||
name: 'date',
|
||||
format: 'text',
|
||||
dataTypeID: 1082,
|
||||
actual: '2010-10-31',
|
||||
expected: function(val) {
|
||||
assert.UTCDate(val, 2010, 9, 31, 0, 0, 0, 0);
|
||||
}
|
||||
@ -156,6 +165,39 @@ test('typed results', function() {
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
name : 'array/char',
|
||||
format : 'text',
|
||||
dataTypeID: 1014,
|
||||
actual: '{asdf,asdf}',
|
||||
expected : function(val){
|
||||
assert.deepEqual(val, ['asdf','asdf']);
|
||||
}
|
||||
},{
|
||||
name : 'array/varchar',
|
||||
format : 'text',
|
||||
dataTypeID: 1015,
|
||||
actual: '{asdf,asdf}',
|
||||
expected :function(val){
|
||||
assert.deepEqual(val, ['asdf','asdf']);
|
||||
}
|
||||
},{
|
||||
name : 'array/text',
|
||||
format : 'text',
|
||||
dataTypeID: 1008,
|
||||
actual: '{"hello world"}',
|
||||
expected :function(val){
|
||||
assert.deepEqual(val, ['hello world']);
|
||||
}
|
||||
},{
|
||||
name : 'array/numeric',
|
||||
format : 'text',
|
||||
dataTypeID: 1231,
|
||||
actual: '{1.2,3.4}',
|
||||
expected :function(val){
|
||||
assert.deepEqual(val, [1.2,3.4]);
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
name: 'binary-string/varchar',
|
||||
@ -176,18 +218,18 @@ test('typed results', function() {
|
||||
actual: [0, 101],
|
||||
expected: 101
|
||||
},{
|
||||
name: 'binary-bigint/int8',
|
||||
format: 'binary',
|
||||
dataTypeID: 20,
|
||||
actual: [0, 0, 0, 0, 0, 0, 0, 102],
|
||||
expected: 102
|
||||
},{
|
||||
name: 'binary-bigint/int8-full',
|
||||
format: 'binary',
|
||||
dataTypeID: 20,
|
||||
actual: [1, 0, 0, 0, 0, 0, 0, 102],
|
||||
expected: 72057594037928030
|
||||
},{
|
||||
// name: 'binary-bigint/int8',
|
||||
// format: 'binary',
|
||||
// dataTypeID: 20,
|
||||
// actual: [0, 0, 0, 0, 0, 0, 0, 102],
|
||||
// expected: '102'
|
||||
// },{
|
||||
// name: 'binary-bigint/int8-full',
|
||||
// format: 'binary',
|
||||
// dataTypeID: 20,
|
||||
// actual: [1, 0, 0, 0, 0, 0, 0, 102],
|
||||
// expected: '72057594037928038'
|
||||
// },{
|
||||
name: 'binary-oid',
|
||||
format: 'binary',
|
||||
dataTypeID: 26,
|
||||
|
||||
161
test/unit/connection-parameters/creation-tests.js
Normal file
161
test/unit/connection-parameters/creation-tests.js
Normal file
@ -0,0 +1,161 @@
|
||||
var helper = require(__dirname + '/../test-helper');
|
||||
var assert = require('assert');
|
||||
var ConnectionParameters = require(__dirname + '/../../../lib/connection-parameters');
|
||||
var defaults = require(__dirname + '/../../../lib').defaults;
|
||||
|
||||
//clear process.env
|
||||
for(var key in process.env) {
|
||||
delete process.env[key];
|
||||
}
|
||||
|
||||
test('ConnectionParameters construction', function() {
|
||||
assert.ok(new ConnectionParameters(), 'with null config');
|
||||
assert.ok(new ConnectionParameters({user: 'asdf'}), 'with config object');
|
||||
assert.ok(new ConnectionParameters('postgres://localhost/postgres'), 'with connection string');
|
||||
});
|
||||
|
||||
var compare = function(actual, expected, type) {
|
||||
assert.equal(actual.user, expected.user, type + ' user');
|
||||
assert.equal(actual.database, expected.database, type + ' database');
|
||||
assert.equal(actual.port, expected.port, type + ' port');
|
||||
assert.equal(actual.host, expected.host, type + ' host');
|
||||
assert.equal(actual.password, expected.password, type + ' password');
|
||||
assert.equal(actual.binary, expected.binary, type + ' binary');
|
||||
};
|
||||
|
||||
test('ConnectionParameters initializing from defaults', function() {
|
||||
var subject = new ConnectionParameters();
|
||||
compare(subject, defaults, 'defaults');
|
||||
assert.ok(subject.isDomainSocket === false);
|
||||
});
|
||||
|
||||
test('ConnectionParameters initializing from config', function() {
|
||||
var config = {
|
||||
user: 'brian',
|
||||
database: 'home',
|
||||
port: 7777,
|
||||
password: 'pizza',
|
||||
binary: true,
|
||||
encoding: 'utf8',
|
||||
host: 'yo',
|
||||
ssl: {
|
||||
asdf: 'blah'
|
||||
}
|
||||
};
|
||||
var subject = new ConnectionParameters(config);
|
||||
compare(subject, config, 'config');
|
||||
assert.ok(subject.isDomainSocket === false);
|
||||
});
|
||||
|
||||
test('initializing with unix domain socket', function() {
|
||||
var subject = new ConnectionParameters('/var/run/');
|
||||
assert.ok(subject.isDomainSocket);
|
||||
assert.equal(subject.host, '/var/run/');
|
||||
});
|
||||
|
||||
test('libpq connection string building', function() {
|
||||
var checkForPart = function(array, part) {
|
||||
assert.ok(array.indexOf(part) > -1, array.join(" ") + " did not contain " + part);
|
||||
}
|
||||
|
||||
test('builds simple string', function() {
|
||||
var config = {
|
||||
user: 'brian',
|
||||
password: 'xyz',
|
||||
port: 888,
|
||||
host: 'localhost',
|
||||
database: 'bam'
|
||||
}
|
||||
var subject = new ConnectionParameters(config);
|
||||
subject.getLibpqConnectionString(assert.calls(function(err, constring) {
|
||||
assert.isNull(err);
|
||||
var parts = constring.split(" ");
|
||||
checkForPart(parts, "user='brian'");
|
||||
checkForPart(parts, "password='xyz'");
|
||||
checkForPart(parts, "port='888'");
|
||||
checkForPart(parts, "hostaddr=127.0.0.1");
|
||||
checkForPart(parts, "dbname='bam'");
|
||||
}));
|
||||
});
|
||||
|
||||
test('builds dns string', function() {
|
||||
var config = {
|
||||
user: 'brian',
|
||||
password: 'asdf',
|
||||
port: 5432,
|
||||
host: 'localhost'
|
||||
};
|
||||
var subject = new ConnectionParameters(config);
|
||||
subject.getLibpqConnectionString(assert.calls(function(err, constring) {
|
||||
assert.isNull(err);
|
||||
var parts = constring.split(" ");
|
||||
checkForPart(parts, "user='brian'");
|
||||
checkForPart(parts, "hostaddr=127.0.0.1");
|
||||
}));
|
||||
});
|
||||
|
||||
test('error when dns fails', function() {
|
||||
var config = {
|
||||
user: 'brian',
|
||||
password: 'asf',
|
||||
port: 5432,
|
||||
host: 'asdlfkjasldfkksfd#!$!!!!..com'
|
||||
};
|
||||
var subject = new ConnectionParameters(config);
|
||||
subject.getLibpqConnectionString(assert.calls(function(err, constring) {
|
||||
assert.ok(err);
|
||||
assert.isNull(constring)
|
||||
}));
|
||||
});
|
||||
|
||||
test('connecting to unix domain socket', function() {
|
||||
var config = {
|
||||
user: 'brian',
|
||||
password: 'asf',
|
||||
port: 5432,
|
||||
host: '/tmp/'
|
||||
};
|
||||
var subject = new ConnectionParameters(config);
|
||||
subject.getLibpqConnectionString(assert.calls(function(err, constring) {
|
||||
assert.isNull(err);
|
||||
var parts = constring.split(" ");
|
||||
checkForPart(parts, "user='brian'");
|
||||
checkForPart(parts, "host=/tmp/");
|
||||
}));
|
||||
});
|
||||
|
||||
test("encoding can be specified by config", function() {
|
||||
var config = {
|
||||
client_encoding: "utf-8"
|
||||
}
|
||||
var subject = new ConnectionParameters(config);
|
||||
subject.getLibpqConnectionString(assert.calls(function(err, constring) {
|
||||
assert.isNull(err);
|
||||
var parts = constring.split(" ");
|
||||
checkForPart(parts, "client_encoding='utf-8'");
|
||||
}));
|
||||
})
|
||||
|
||||
test('password contains < and/or > characters', function () {
|
||||
return false;
|
||||
var sourceConfig = {
|
||||
user:'brian',
|
||||
password: 'hello<ther>e',
|
||||
port: 5432,
|
||||
host: 'localhost',
|
||||
database: 'postgres'
|
||||
}
|
||||
var connectionString = 'postgres://' + sourceConfig.user + ':' + sourceConfig.password + '@' + sourceConfig.host + ':' + sourceConfig.port + '/' + sourceConfig.database;
|
||||
var subject = new ConnectionParameters(connectionString);
|
||||
assert.equal(subject.password, sourceConfig.password);
|
||||
});
|
||||
|
||||
test('password contains weird characters', function() {
|
||||
var strang = 'postgres://my first name:is&%awesome!@localhost:9000';
|
||||
var subject = new ConnectionParameters(strang);
|
||||
assert.equal(subject.user, 'my first name');
|
||||
assert.equal(subject.password, 'is&%awesome!');
|
||||
assert.equal(subject.host, 'localhost');
|
||||
});
|
||||
|
||||
});
|
||||
@ -0,0 +1,82 @@
|
||||
var helper = require(__dirname + '/../test-helper');
|
||||
var assert = require('assert');
|
||||
var ConnectionParameters = require(__dirname + '/../../../lib/connection-parameters');
|
||||
var defaults = require(__dirname + '/../../../lib').defaults;
|
||||
|
||||
//clear process.env
|
||||
var realEnv = {};
|
||||
for(var key in process.env) {
|
||||
realEnv[key] = process.env[key];
|
||||
delete process.env[key];
|
||||
}
|
||||
|
||||
test('ConnectionParameters initialized from environment variables', function(t) {
|
||||
process.env['PGHOST'] = 'local';
|
||||
process.env['PGUSER'] = 'bmc2';
|
||||
process.env['PGPORT'] = 7890;
|
||||
process.env['PGDATABASE'] = 'allyerbase';
|
||||
process.env['PGPASSWORD'] = 'open';
|
||||
|
||||
var subject = new ConnectionParameters();
|
||||
assert.equal(subject.host, 'local', 'env host');
|
||||
assert.equal(subject.user, 'bmc2', 'env user');
|
||||
assert.equal(subject.port, 7890, 'env port');
|
||||
assert.equal(subject.database, 'allyerbase', 'env database');
|
||||
assert.equal(subject.password, 'open', 'env password');
|
||||
});
|
||||
|
||||
test('ConnectionParameters initialized from mix', function(t) {
|
||||
delete process.env['PGPASSWORD'];
|
||||
delete process.env['PGDATABASE'];
|
||||
var subject = new ConnectionParameters({
|
||||
user: 'testing',
|
||||
database: 'zugzug'
|
||||
});
|
||||
assert.equal(subject.host, 'local', 'env host');
|
||||
assert.equal(subject.user, 'testing', 'config user');
|
||||
assert.equal(subject.port, 7890, 'env port');
|
||||
assert.equal(subject.database, 'zugzug', 'config database');
|
||||
assert.equal(subject.password, defaults.password, 'defaults password');
|
||||
});
|
||||
|
||||
//clear process.env
|
||||
for(var key in process.env) {
|
||||
delete process.env[key];
|
||||
}
|
||||
|
||||
test('connection string parsing', function(t) {
|
||||
var string = 'postgres://brian:pw@boom:381/lala';
|
||||
var subject = new ConnectionParameters(string);
|
||||
assert.equal(subject.host, 'boom', 'string host');
|
||||
assert.equal(subject.user, 'brian', 'string user');
|
||||
assert.equal(subject.password, 'pw', 'string password');
|
||||
assert.equal(subject.port, 381, 'string port');
|
||||
assert.equal(subject.database, 'lala', 'string database');
|
||||
});
|
||||
|
||||
test('connection string parsing - ssl', function(t) {
|
||||
var string = 'postgres://brian:pw@boom:381/lala?ssl=true';
|
||||
var subject = new ConnectionParameters(string);
|
||||
assert.equal(subject.ssl, true, 'ssl');
|
||||
|
||||
string = 'postgres://brian:pw@boom:381/lala?ssl=1';
|
||||
subject = new ConnectionParameters(string);
|
||||
assert.equal(subject.ssl, true, 'ssl');
|
||||
|
||||
string = 'postgres://brian:pw@boom:381/lala?other&ssl=true';
|
||||
subject = new ConnectionParameters(string);
|
||||
assert.equal(subject.ssl, true, 'ssl');
|
||||
|
||||
string = 'postgres://brian:pw@boom:381/lala?ssl=0';
|
||||
subject = new ConnectionParameters(string);
|
||||
assert.equal(!!subject.ssl, false, 'ssl');
|
||||
|
||||
string = 'postgres://brian:pw@boom:381/lala';
|
||||
subject = new ConnectionParameters(string);
|
||||
assert.equal(!!subject.ssl, false, 'ssl');
|
||||
});
|
||||
|
||||
//restore process.env
|
||||
for(var key in realEnv) {
|
||||
process.env[key] = realEnv[key];
|
||||
}
|
||||
@ -1,10 +1,30 @@
|
||||
var helper = require(__dirname + '/test-helper');
|
||||
var Connection = require(__dirname + '/../../../lib/connection');
|
||||
var con = new Connection({stream: new MemoryStream()});
|
||||
test("connection emits stream errors", function() {
|
||||
var con = new Connection({stream: new MemoryStream()});
|
||||
assert.emits(con, 'error', function(err) {
|
||||
assert.equal(err.message, "OMG!");
|
||||
});
|
||||
con.connect();
|
||||
con.stream.emit('error', new Error("OMG!"));
|
||||
});
|
||||
|
||||
test('connection emits ECONNRESET errors during normal operation', function() {
|
||||
var con = new Connection({stream: new MemoryStream()});
|
||||
con.connect();
|
||||
assert.emits(con, 'error', function(err) {
|
||||
assert.equal(err.code, 'ECONNRESET');
|
||||
});
|
||||
var e = new Error('Connection Reset');
|
||||
e.code = 'ECONNRESET';
|
||||
con.stream.emit('error', e);
|
||||
});
|
||||
|
||||
test('connection does not emit ECONNRESET errors during disconnect', function() {
|
||||
var con = new Connection({stream: new MemoryStream()});
|
||||
con.connect();
|
||||
var e = new Error('Connection Reset');
|
||||
e.code = 'ECONNRESET';
|
||||
con.end();
|
||||
con.stream.emit('error', e);
|
||||
});
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
require(__dirname+'/test-helper');
|
||||
return false;
|
||||
var Connection = require(__dirname + '/../../../lib/connection');
|
||||
var buffers = require(__dirname + '/../../test-buffers');
|
||||
var PARSE = function(buffer) {
|
||||
|
||||
@ -23,6 +23,8 @@ test("sends startup message", function() {
|
||||
.addCString('brian')
|
||||
.addCString('database')
|
||||
.addCString('bang')
|
||||
.addCString('client_encoding')
|
||||
.addCString("'utf-8'")
|
||||
.addCString('').join(true))
|
||||
});
|
||||
|
||||
@ -94,7 +96,7 @@ test('bind messages', function() {
|
||||
con.bind({
|
||||
portal: 'bang',
|
||||
statement: 'woo',
|
||||
values: [1, 'hi', null, 'zing']
|
||||
values: ['1', 'hi', null, 'zing']
|
||||
});
|
||||
var expectedBuffer = new BufferList()
|
||||
.addCString('bang') //portal name
|
||||
|
||||
99
test/unit/copystream/copyfrom-tests.js
Normal file
99
test/unit/copystream/copyfrom-tests.js
Normal file
@ -0,0 +1,99 @@
|
||||
var helper = require(__dirname + '/../test-helper');
|
||||
var CopyFromStream = require(__dirname + '/../../../lib/copystream').CopyFromStream;
|
||||
var ConnectionImitation = function () {
|
||||
this.send = 0;
|
||||
this.hasToBeSend = 0;
|
||||
this.finished = 0;
|
||||
};
|
||||
ConnectionImitation.prototype = {
|
||||
endCopyFrom: function () {
|
||||
assert.ok(this.finished++ === 0, "end shoud be called only once");
|
||||
assert.equal(this.send, this.hasToBeSend, "at the moment of the end all data has to be sent");
|
||||
},
|
||||
sendCopyFromChunk: function (chunk) {
|
||||
this.send += chunk.length;
|
||||
return true;
|
||||
},
|
||||
updateHasToBeSend: function (chunk) {
|
||||
this.hasToBeSend += chunk.length;
|
||||
return chunk;
|
||||
}
|
||||
};
|
||||
var buf1 = new Buffer("asdfasd"),
|
||||
buf2 = new Buffer("q03r90arf0aospd;"),
|
||||
buf3 = new Buffer(542),
|
||||
buf4 = new Buffer("93jfemialfjkasjlfas");
|
||||
|
||||
test('CopyFromStream, start streaming before data, end after data. no drain event', function () {
|
||||
var stream = new CopyFromStream();
|
||||
var conn = new ConnectionImitation();
|
||||
stream.on('drain', function () {
|
||||
assert.ok(false, "there has not be drain event");
|
||||
});
|
||||
stream.startStreamingToConnection(conn);
|
||||
assert.ok(stream.write(conn.updateHasToBeSend(buf1)));
|
||||
assert.ok(stream.write(conn.updateHasToBeSend(buf2)));
|
||||
assert.ok(stream.write(conn.updateHasToBeSend(buf3)));
|
||||
assert.ok(stream.writable, "stream has to be writable");
|
||||
stream.end(conn.updateHasToBeSend(buf4));
|
||||
assert.ok(!stream.writable, "stream has not to be writable");
|
||||
stream.end();
|
||||
assert.equal(conn.hasToBeSend, conn.send);
|
||||
});
|
||||
test('CopyFromStream, start streaming after end, end after data. drain event', function () {
|
||||
var stream = new CopyFromStream();
|
||||
assert.emits(stream, 'drain', function() {}, 'drain have to be emitted');
|
||||
var conn = new ConnectionImitation()
|
||||
assert.ok(!stream.write(conn.updateHasToBeSend(buf1)));
|
||||
assert.ok(!stream.write(conn.updateHasToBeSend(buf2)));
|
||||
assert.ok(!stream.write(conn.updateHasToBeSend(buf3)));
|
||||
assert.ok(stream.writable, "stream has to be writable");
|
||||
stream.end(conn.updateHasToBeSend(buf4));
|
||||
assert.ok(!stream.writable, "stream has not to be writable");
|
||||
stream.end();
|
||||
stream.startStreamingToConnection(conn);
|
||||
assert.equal(conn.hasToBeSend, conn.send);
|
||||
});
|
||||
test('CopyFromStream, start streaming between data chunks. end after data. drain event', function () {
|
||||
var stream = new CopyFromStream();
|
||||
var conn = new ConnectionImitation()
|
||||
assert.emits(stream, 'drain', function() {}, 'drain have to be emitted');
|
||||
stream.write(conn.updateHasToBeSend(buf1));
|
||||
stream.write(conn.updateHasToBeSend(buf2));
|
||||
stream.startStreamingToConnection(conn);
|
||||
stream.write(conn.updateHasToBeSend(buf3));
|
||||
assert.ok(stream.writable, "stream has to be writable");
|
||||
stream.end(conn.updateHasToBeSend(buf4));
|
||||
assert.equal(conn.hasToBeSend, conn.send);
|
||||
assert.ok(!stream.writable, "stream has not to be writable");
|
||||
stream.end();
|
||||
});
|
||||
test('CopyFromStream, start sreaming before end. end stream with data. drain event', function () {
|
||||
var stream = new CopyFromStream();
|
||||
var conn = new ConnectionImitation()
|
||||
assert.emits(stream, 'drain', function() {}, 'drain have to be emitted');
|
||||
stream.write(conn.updateHasToBeSend(buf1));
|
||||
stream.write(conn.updateHasToBeSend(buf2));
|
||||
stream.write(conn.updateHasToBeSend(buf3));
|
||||
stream.startStreamingToConnection(conn);
|
||||
assert.ok(stream.writable, "stream has to be writable");
|
||||
stream.end(conn.updateHasToBeSend(buf4));
|
||||
assert.equal(conn.hasToBeSend, conn.send);
|
||||
assert.ok(!stream.writable, "stream has not to be writable");
|
||||
stream.end();
|
||||
});
|
||||
test('CopyFromStream, start streaming after end. end with data. drain event', function(){
|
||||
var stream = new CopyFromStream();
|
||||
var conn = new ConnectionImitation()
|
||||
assert.emits(stream, 'drain', function() {}, 'drain have to be emitted');
|
||||
stream.write(conn.updateHasToBeSend(buf1));
|
||||
stream.write(conn.updateHasToBeSend(buf2));
|
||||
stream.write(conn.updateHasToBeSend(buf3));
|
||||
stream.startStreamingToConnection(conn);
|
||||
assert.ok(stream.writable, "stream has to be writable");
|
||||
stream.end(conn.updateHasToBeSend(buf4));
|
||||
stream.startStreamingToConnection(conn);
|
||||
assert.equal(conn.hasToBeSend, conn.send);
|
||||
assert.ok(!stream.writable, "stream has not to be writable");
|
||||
stream.end();
|
||||
});
|
||||
122
test/unit/copystream/copyto-tests.js
Normal file
122
test/unit/copystream/copyto-tests.js
Normal file
@ -0,0 +1,122 @@
|
||||
var helper = require(__dirname + '/../test-helper');
|
||||
var CopyToStream = require(__dirname + '/../../../lib/copystream').CopyToStream;
|
||||
var DataCounter = function () {
|
||||
this.sendBytes = 0;
|
||||
this.recievedBytes = 0;
|
||||
};
|
||||
DataCounter.prototype = {
|
||||
send: function (buf) {
|
||||
this.sendBytes += buf.length;
|
||||
return buf;
|
||||
},
|
||||
recieve: function (chunk) {
|
||||
this.recievedBytes += chunk.length;
|
||||
},
|
||||
assert: function () {
|
||||
assert.equal(this.sendBytes, this.recievedBytes, "data bytes send and recieved has to match");
|
||||
}
|
||||
};
|
||||
var buf1 = new Buffer("asdfasd"),
|
||||
buf2 = new Buffer("q03r90arf0aospd;"),
|
||||
buf3 = new Buffer(542),
|
||||
buf4 = new Buffer("93jfemialfjkasjlfas");
|
||||
test('CopyToStream simple', function () {
|
||||
var stream = new CopyToStream(),
|
||||
dc = new DataCounter();
|
||||
assert.emits(stream, 'end', function () {}, '');
|
||||
stream.on('data', dc.recieve.bind(dc));
|
||||
stream.handleChunk(dc.send(buf1));
|
||||
stream.handleChunk(dc.send(buf2));
|
||||
stream.handleChunk(dc.send(buf3));
|
||||
stream.handleChunk(dc.send(buf4));
|
||||
dc.assert();
|
||||
stream.close();
|
||||
});
|
||||
test('CopyToStream pause/resume/close', function () {
|
||||
var stream = new CopyToStream(),
|
||||
dc = new DataCounter();
|
||||
stream.on('data', dc.recieve.bind(dc));
|
||||
assert.emits(stream, 'end', function () {}, 'stream has to emit end after closing');
|
||||
stream.pause();
|
||||
stream.handleChunk(dc.send(buf1));
|
||||
stream.handleChunk(dc.send(buf2));
|
||||
stream.handleChunk(dc.send(buf3));
|
||||
assert.equal(dc.recievedBytes, 0);
|
||||
stream.resume();
|
||||
dc.assert();
|
||||
stream.handleChunk(dc.send(buf2));
|
||||
dc.assert();
|
||||
stream.handleChunk(dc.send(buf3));
|
||||
dc.assert();
|
||||
stream.pause();
|
||||
stream.handleChunk(dc.send(buf4));
|
||||
assert(dc.sendBytes - dc.recievedBytes, buf4.length, "stream has not emit, data while it is in paused state");
|
||||
stream.resume();
|
||||
dc.assert();
|
||||
stream.close();
|
||||
});
|
||||
test('CopyToStream error', function () {
|
||||
var stream = new CopyToStream(),
|
||||
dc = new DataCounter();
|
||||
stream.on('data', dc.recieve.bind(dc));
|
||||
assert.emits(stream, 'error', function () {}, 'stream has to emit error event, when error method called');
|
||||
stream.handleChunk(dc.send(buf1));
|
||||
stream.handleChunk(dc.send(buf2));
|
||||
stream.error(new Error('test error'));
|
||||
});
|
||||
test('CopyToStream do not emit anything while paused', function () {
|
||||
var stream = new CopyToStream();
|
||||
stream.on('data', function () {
|
||||
assert.ok(false, "stream has not emit data when paused");
|
||||
});
|
||||
stream.on('end', function () {
|
||||
assert.ok(false, "stream has not emit end when paused");
|
||||
});
|
||||
stream.on('error', function () {
|
||||
assert.ok(false, "stream has not emit end when paused");
|
||||
});
|
||||
stream.pause();
|
||||
stream.handleChunk(buf2);
|
||||
stream.close();
|
||||
stream.error();
|
||||
});
|
||||
test('CopyToStream emit data and error after resume', function () {
|
||||
var stream = new CopyToStream(),
|
||||
paused;
|
||||
stream.on('data', function () {
|
||||
assert.ok(!paused, "stream has not emit data when paused");
|
||||
});
|
||||
stream.on('end', function () {
|
||||
assert.ok(!paused, "stream has not emit end when paused");
|
||||
});
|
||||
stream.on('error', function () {
|
||||
assert.ok(!paused, "stream has not emit end when paused");
|
||||
});
|
||||
paused = true;
|
||||
stream.pause();
|
||||
stream.handleChunk(buf2);
|
||||
stream.error();
|
||||
paused = false;
|
||||
stream.resume();
|
||||
});
|
||||
test('CopyToStream emit data and end after resume', function () {
|
||||
var stream = new CopyToStream(),
|
||||
paused;
|
||||
stream.on('data', function () {
|
||||
assert.ok(!paused, "stream has not emit data when paused");
|
||||
});
|
||||
stream.on('end', function () {
|
||||
assert.ok(!paused, "stream has not emit end when paused");
|
||||
});
|
||||
stream.on('error', function () {
|
||||
assert.ok(!paused, "stream has not emit end when paused");
|
||||
});
|
||||
paused = true;
|
||||
stream.pause();
|
||||
stream.handleChunk(buf2);
|
||||
stream.close();
|
||||
paused = false;
|
||||
stream.resume();
|
||||
});
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user