mirror of
https://github.com/brianc/node-postgres.git
synced 2025-12-08 20:16:25 +00:00
Compare commits
14 Commits
master
...
pg-cloudfl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
306fa83d84 | ||
|
|
69e56976c6 | ||
|
|
56762beebf | ||
|
|
6a9b873f5f | ||
|
|
ffd5adc583 | ||
|
|
0a0e298e06 | ||
|
|
0cb8fd4216 | ||
|
|
6949c7ad73 | ||
|
|
739308463d | ||
|
|
c484282ace | ||
|
|
8a58659ed3 | ||
|
|
b4c1a4e935 | ||
|
|
c2cbc2b0b1 | ||
|
|
98bc876b49 |
10
.eslintrc
10
.eslintrc
@ -14,15 +14,9 @@
|
|||||||
},
|
},
|
||||||
"rules": {
|
"rules": {
|
||||||
"@typescript-eslint/no-unused-vars": ["error", {
|
"@typescript-eslint/no-unused-vars": ["error", {
|
||||||
"args": "none",
|
"args": "none"
|
||||||
"varsIgnorePattern": "^_$"
|
|
||||||
}],
|
}],
|
||||||
"no-unused-vars": ["error", {
|
"no-unused-vars": "off"
|
||||||
"args": "none",
|
|
||||||
"varsIgnorePattern": "^_$"
|
|
||||||
}],
|
|
||||||
"no-var": "error",
|
|
||||||
"prefer-const": "error"
|
|
||||||
},
|
},
|
||||||
"overrides": [
|
"overrides": [
|
||||||
{
|
{
|
||||||
|
|||||||
3
.github/workflows/ci.yml
vendored
3
.github/workflows/ci.yml
vendored
@ -42,8 +42,7 @@ jobs:
|
|||||||
- '18'
|
- '18'
|
||||||
- '20'
|
- '20'
|
||||||
- '22'
|
- '22'
|
||||||
- '24'
|
- '23'
|
||||||
- '25'
|
|
||||||
os:
|
os:
|
||||||
- ubuntu-latest
|
- ubuntu-latest
|
||||||
name: Node.js ${{ matrix.node }}
|
name: Node.js ${{ matrix.node }}
|
||||||
|
|||||||
@ -4,14 +4,6 @@ For richer information consult the commit log on github with referenced pull req
|
|||||||
|
|
||||||
We do not include break-fix version release in this file.
|
We do not include break-fix version release in this file.
|
||||||
|
|
||||||
## pg@8.16.0
|
|
||||||
|
|
||||||
- Add support for [min connection pool size](https://github.com/brianc/node-postgres/pull/3438).
|
|
||||||
|
|
||||||
## pg@8.15.0
|
|
||||||
|
|
||||||
- Add support for [esm](https://github.com/brianc/node-postgres/pull/3423) importing. CommonJS importing is still also supported.
|
|
||||||
|
|
||||||
## pg@8.14.0
|
## pg@8.14.0
|
||||||
|
|
||||||
- Add support from SCRAM-SAH-256-PLUS i.e. [channel binding](https://github.com/brianc/node-postgres/pull/3356).
|
- Add support from SCRAM-SAH-256-PLUS i.e. [channel binding](https://github.com/brianc/node-postgres/pull/3356).
|
||||||
|
|||||||
@ -1,20 +0,0 @@
|
|||||||
# node-postgres docs website
|
|
||||||
|
|
||||||
This is the documentation for node-postgres which is currently hosted at [https://node-postgres.com](https://node-postgres.com).
|
|
||||||
|
|
||||||
## Development
|
|
||||||
|
|
||||||
To run the documentation locally, you need to have [Node.js](https://nodejs.org) installed. Then, you can clone the repository and install the dependencies:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd docs
|
|
||||||
yarn
|
|
||||||
```
|
|
||||||
|
|
||||||
Once you've installed the deps, you can run the development server:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
yarn dev
|
|
||||||
```
|
|
||||||
|
|
||||||
This will start a local server at [http://localhost:3000](http://localhost:3000) where you can view the documentation and see your changes.
|
|
||||||
@ -1,3 +1,4 @@
|
|||||||
|
import React from 'react'
|
||||||
import { Callout } from 'nextra-theme-docs'
|
import { Callout } from 'nextra-theme-docs'
|
||||||
|
|
||||||
export const Alert = ({ children }) => {
|
export const Alert = ({ children }) => {
|
||||||
|
|||||||
@ -1,3 +1,4 @@
|
|||||||
|
import React from 'react'
|
||||||
import { Callout } from 'nextra-theme-docs'
|
import { Callout } from 'nextra-theme-docs'
|
||||||
|
|
||||||
export const Info = ({ children }) => {
|
export const Info = ({ children }) => {
|
||||||
|
|||||||
@ -1,9 +0,0 @@
|
|||||||
type Props = {
|
|
||||||
src: string
|
|
||||||
alt?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export function Logo(props: Props) {
|
|
||||||
const alt = props.alt || 'Logo'
|
|
||||||
return <img src={props.src} alt={alt} width={100} height={100} style={{ width: 400, height: 'auto' }} />
|
|
||||||
}
|
|
||||||
@ -1,5 +1,9 @@
|
|||||||
import 'nextra-theme-docs/style.css'
|
import 'nextra-theme-docs/style.css'
|
||||||
|
|
||||||
export default function Nextra({ Component, pageProps }) {
|
export default function Nextra({ Component, pageProps }) {
|
||||||
return <Component {...pageProps} />
|
return (
|
||||||
|
<>
|
||||||
|
<Component {...pageProps} />
|
||||||
|
</>
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -23,18 +23,15 @@ type Config = {
|
|||||||
lock_timeout?: number, // number of milliseconds a query is allowed to be en lock state before it's cancelled due to lock timeout
|
lock_timeout?: number, // number of milliseconds a query is allowed to be en lock state before it's cancelled due to lock timeout
|
||||||
application_name?: string, // The name of the application that created this Client instance
|
application_name?: string, // The name of the application that created this Client instance
|
||||||
connectionTimeoutMillis?: number, // number of milliseconds to wait for connection, default is no timeout
|
connectionTimeoutMillis?: number, // number of milliseconds to wait for connection, default is no timeout
|
||||||
keepAliveInitialDelayMillis?: number, // set the initial delay before the first keepalive probe is sent on an idle socket
|
idle_in_transaction_session_timeout?: number // number of milliseconds before terminating any session with an open idle transaction, default is no timeout
|
||||||
idle_in_transaction_session_timeout?: number, // number of milliseconds before terminating any session with an open idle transaction, default is no timeout
|
|
||||||
client_encoding?: string, // specifies the character set encoding that the database uses for sending data to the client
|
|
||||||
fallback_application_name?: string, // provide an application name to use if application_name is not set
|
|
||||||
options?: string // command-line options to be sent to the server
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
example to create a client with specific connection information:
|
example to create a client with specific connection information:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Client } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Client } = pg
|
||||||
|
|
||||||
const client = new Client({
|
const client = new Client({
|
||||||
user: 'database-user',
|
user: 'database-user',
|
||||||
@ -48,7 +45,8 @@ const client = new Client({
|
|||||||
## client.connect
|
## client.connect
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Client } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Client } = pg
|
||||||
const client = new Client()
|
const client = new Client()
|
||||||
|
|
||||||
await client.connect()
|
await client.connect()
|
||||||
@ -90,7 +88,8 @@ client.query(text: string, values?: any[]) => Promise<Result>
|
|||||||
**Plain text query**
|
**Plain text query**
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Client } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Client } = pg
|
||||||
const client = new Client()
|
const client = new Client()
|
||||||
|
|
||||||
await client.connect()
|
await client.connect()
|
||||||
@ -104,7 +103,8 @@ await client.end()
|
|||||||
**Parameterized query**
|
**Parameterized query**
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Client } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Client } = pg
|
||||||
const client = new Client()
|
const client = new Client()
|
||||||
|
|
||||||
await client.connect()
|
await client.connect()
|
||||||
@ -142,7 +142,8 @@ await client.end()
|
|||||||
If you pass an object to `client.query` and the object has a `.submit` function on it, the client will pass it's PostgreSQL server connection to the object and delegate query dispatching to the supplied object. This is an advanced feature mostly intended for library authors. It is incidentally also currently how the callback and promise based queries above are handled internally, but this is subject to change. It is also how [pg-cursor](https://github.com/brianc/node-pg-cursor) and [pg-query-stream](https://github.com/brianc/node-pg-query-stream) work.
|
If you pass an object to `client.query` and the object has a `.submit` function on it, the client will pass it's PostgreSQL server connection to the object and delegate query dispatching to the supplied object. This is an advanced feature mostly intended for library authors. It is incidentally also currently how the callback and promise based queries above are handled internally, but this is subject to change. It is also how [pg-cursor](https://github.com/brianc/node-pg-cursor) and [pg-query-stream](https://github.com/brianc/node-pg-query-stream) work.
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Query } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Query } = pg
|
||||||
const query = new Query('select $1::text as name', ['brianc'])
|
const query = new Query('select $1::text as name', ['brianc'])
|
||||||
|
|
||||||
const result = client.query(query)
|
const result = client.query(query)
|
||||||
|
|||||||
@ -18,7 +18,8 @@ $ npm install pg pg-cursor
|
|||||||
Instantiates a new Cursor. A cursor is an instance of `Submittable` and should be passed directly to the `client.query` method.
|
Instantiates a new Cursor. A cursor is an instance of `Submittable` and should be passed directly to the `client.query` method.
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
import Cursor from 'pg-cursor'
|
import Cursor from 'pg-cursor'
|
||||||
|
|
||||||
const pool = new Pool()
|
const pool = new Pool()
|
||||||
@ -28,9 +29,11 @@ const values = [10]
|
|||||||
|
|
||||||
const cursor = client.query(new Cursor(text, values))
|
const cursor = client.query(new Cursor(text, values))
|
||||||
|
|
||||||
const { rows } = await cursor.read(100)
|
cursor.read(100, (err, rows) => {
|
||||||
console.log(rows.length) // 100 (unless the table has fewer than 100 rows)
|
cursor.close(() => {
|
||||||
client.release()
|
client.release()
|
||||||
|
})
|
||||||
|
})
|
||||||
```
|
```
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@ -55,7 +58,8 @@ If the cursor has read to the end of the result sets all subsequent calls to cur
|
|||||||
Here is an example of reading to the end of a cursor:
|
Here is an example of reading to the end of a cursor:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
import Cursor from 'pg-cursor'
|
import Cursor from 'pg-cursor'
|
||||||
|
|
||||||
const pool = new Pool()
|
const pool = new Pool()
|
||||||
|
|||||||
@ -29,17 +29,9 @@ type Config = {
|
|||||||
idleTimeoutMillis?: number
|
idleTimeoutMillis?: number
|
||||||
|
|
||||||
// maximum number of clients the pool should contain
|
// maximum number of clients the pool should contain
|
||||||
// by default this is set to 10. There is some nuance to setting the maximum size of your pool.
|
// by default this is set to 10.
|
||||||
// see https://node-postgres.com/guides/pool-sizing for more information
|
|
||||||
max?: number
|
max?: number
|
||||||
|
|
||||||
// minimum number of clients the pool should hold on to and _not_ destroy with the idleTimeoutMillis
|
|
||||||
// this can be useful if you get very bursty traffic and want to keep a few clients around.
|
|
||||||
// note: current the pool will not automatically create and connect new clients up to the min, it will
|
|
||||||
// only not evict and close clients except those which exceed the min count.
|
|
||||||
// the default is 0 which disables this behavior.
|
|
||||||
min?: number
|
|
||||||
|
|
||||||
// Default behavior is the pool will keep clients open & connected to the backend
|
// Default behavior is the pool will keep clients open & connected to the backend
|
||||||
// until idleTimeoutMillis expire for each client and node will maintain a ref
|
// until idleTimeoutMillis expire for each client and node will maintain a ref
|
||||||
// to the socket on the client, keeping the event loop alive until all clients are closed
|
// to the socket on the client, keeping the event loop alive until all clients are closed
|
||||||
@ -50,19 +42,14 @@ type Config = {
|
|||||||
// to the postgres server. This can be handy in scripts & tests
|
// to the postgres server. This can be handy in scripts & tests
|
||||||
// where you don't want to wait for your clients to go idle before your process exits.
|
// where you don't want to wait for your clients to go idle before your process exits.
|
||||||
allowExitOnIdle?: boolean
|
allowExitOnIdle?: boolean
|
||||||
|
|
||||||
// Sets a max overall life for the connection.
|
|
||||||
// A value of 60 would evict connections that have been around for over 60 seconds,
|
|
||||||
// regardless of whether they are idle. It's useful to force rotation of connection pools through
|
|
||||||
// middleware so that you can rotate the underlying servers. The default is disabled (value of zero)
|
|
||||||
maxLifetimeSeconds?: number
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
example to create a new pool with configuration:
|
example to create a new pool with configuration:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
|
||||||
const pool = new Pool({
|
const pool = new Pool({
|
||||||
host: 'localhost',
|
host: 'localhost',
|
||||||
@ -70,7 +57,6 @@ const pool = new Pool({
|
|||||||
max: 20,
|
max: 20,
|
||||||
idleTimeoutMillis: 30000,
|
idleTimeoutMillis: 30000,
|
||||||
connectionTimeoutMillis: 2000,
|
connectionTimeoutMillis: 2000,
|
||||||
maxLifetimeSeconds: 60
|
|
||||||
})
|
})
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -83,7 +69,8 @@ pool.query(text: string, values?: any[]) => Promise<pg.Result>
|
|||||||
```
|
```
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
|
||||||
const pool = new Pool()
|
const pool = new Pool()
|
||||||
|
|
||||||
@ -91,7 +78,7 @@ const result = await pool.query('SELECT $1::text as name', ['brianc'])
|
|||||||
console.log(result.rows[0].name) // brianc
|
console.log(result.rows[0].name) // brianc
|
||||||
```
|
```
|
||||||
|
|
||||||
Notice in the example above there is no need to check out or release a client. The pool is doing the acquiring and releasing internally. I find `pool.query` to be a handy shortcut in many situations and I use it exclusively unless I need a transaction.
|
Notice in the example above there is no need to check out or release a client. The pool is doing the acquiring and releasing internally. I find `pool.query` to be a handy shortcut many situations and use it exclusively unless I need a transaction.
|
||||||
|
|
||||||
<Alert>
|
<Alert>
|
||||||
<div>
|
<div>
|
||||||
@ -112,10 +99,11 @@ Acquires a client from the pool.
|
|||||||
|
|
||||||
- If there are idle clients in the pool one will be returned to the callback on `process.nextTick`.
|
- If there are idle clients in the pool one will be returned to the callback on `process.nextTick`.
|
||||||
- If the pool is not full but all current clients are checked out a new client will be created & returned to this callback.
|
- If the pool is not full but all current clients are checked out a new client will be created & returned to this callback.
|
||||||
- If the pool is 'full' and all clients are currently checked out, requests will wait in a FIFO queue until a client becomes available by being released back to the pool.
|
- If the pool is 'full' and all clients are currently checked out will wait in a FIFO queue until a client becomes available by it being released back to the pool.
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
|
||||||
const pool = new Pool()
|
const pool = new Pool()
|
||||||
|
|
||||||
@ -133,7 +121,8 @@ Client instances returned from `pool.connect` will have a `release` method which
|
|||||||
The `release` method on an acquired client returns it back to the pool. If you pass a truthy value in the `destroy` parameter, instead of releasing the client to the pool, the pool will be instructed to disconnect and destroy this client, leaving a space within itself for a new client.
|
The `release` method on an acquired client returns it back to the pool. If you pass a truthy value in the `destroy` parameter, instead of releasing the client to the pool, the pool will be instructed to disconnect and destroy this client, leaving a space within itself for a new client.
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
|
||||||
const pool = new Pool()
|
const pool = new Pool()
|
||||||
|
|
||||||
@ -145,7 +134,8 @@ client.release()
|
|||||||
```
|
```
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
|
||||||
const pool = new Pool()
|
const pool = new Pool()
|
||||||
assert(pool.totalCount === 0)
|
assert(pool.totalCount === 0)
|
||||||
@ -178,7 +168,8 @@ Calling `pool.end` will drain the pool of all active clients, disconnect them, a
|
|||||||
|
|
||||||
```js
|
```js
|
||||||
// again both promises and callbacks are supported:
|
// again both promises and callbacks are supported:
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
|
||||||
const pool = new Pool()
|
const pool = new Pool()
|
||||||
|
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import { Alert } from '/components/alert.tsx'
|
|||||||
Escapes a string as a [SQL identifier](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS).
|
Escapes a string as a [SQL identifier](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS).
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { escapeIdentifier } from 'pg';
|
const { escapeIdentifier } = require('pg')
|
||||||
const escapedIdentifier = escapeIdentifier('FooIdentifier')
|
const escapedIdentifier = escapeIdentifier('FooIdentifier')
|
||||||
console.log(escapedIdentifier) // '"FooIdentifier"'
|
console.log(escapedIdentifier) // '"FooIdentifier"'
|
||||||
```
|
```
|
||||||
@ -27,7 +27,7 @@ console.log(escapedIdentifier) // '"FooIdentifier"'
|
|||||||
Escapes a string as a [SQL literal](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-CONSTANTS).
|
Escapes a string as a [SQL literal](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-CONSTANTS).
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { escapeLiteral } from 'pg';
|
const { escapeLiteral } = require('pg')
|
||||||
const escapedLiteral = escapeLiteral("hello 'world'")
|
const escapedLiteral = escapeLiteral("hello 'world'")
|
||||||
console.log(escapedLiteral) // "'hello ''world'''"
|
console.log(escapedLiteral) // "'hello ''world'''"
|
||||||
```
|
```
|
||||||
|
|||||||
@ -5,7 +5,5 @@
|
|||||||
"transactions": "Transactions",
|
"transactions": "Transactions",
|
||||||
"types": "Data Types",
|
"types": "Data Types",
|
||||||
"ssl": "SSL",
|
"ssl": "SSL",
|
||||||
"native": "Native",
|
"native": "Native"
|
||||||
"esm": "ESM",
|
|
||||||
"callbacks": "Callbacks"
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,39 +0,0 @@
|
|||||||
---
|
|
||||||
title: Callbacks
|
|
||||||
---
|
|
||||||
|
|
||||||
## Callback Support
|
|
||||||
|
|
||||||
`async` / `await` is the preferred way to write async code these days with node, but callbacks are supported in the `pg` module and the `pg-pool` module. To use them, pass a callback function as the last argument to the following methods & it will be called and a promise will not be returned:
|
|
||||||
|
|
||||||
|
|
||||||
```js
|
|
||||||
const { Pool, Client } = require('pg')
|
|
||||||
|
|
||||||
// pool
|
|
||||||
const pool = new Pool()
|
|
||||||
// run a query on an available client
|
|
||||||
pool.query('SELECT NOW()', (err, res) => {
|
|
||||||
console.log(err, res)
|
|
||||||
})
|
|
||||||
|
|
||||||
// check out a client to do something more complex like a transaction
|
|
||||||
pool.connect((err, client, release) => {
|
|
||||||
client.query('SELECT NOW()', (err, res) => {
|
|
||||||
release()
|
|
||||||
console.log(err, res)
|
|
||||||
pool.end()
|
|
||||||
})
|
|
||||||
|
|
||||||
})
|
|
||||||
|
|
||||||
// single client
|
|
||||||
const client = new Client()
|
|
||||||
client.connect((err) => {
|
|
||||||
if (err) throw err
|
|
||||||
client.query('SELECT NOW()', (err, res) => {
|
|
||||||
console.log(err, res)
|
|
||||||
client.end()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
```
|
|
||||||
@ -101,9 +101,9 @@ const signerOptions = {
|
|||||||
username: 'api-user',
|
username: 'api-user',
|
||||||
}
|
}
|
||||||
|
|
||||||
const signer = new RDS.Signer(signerOptions)
|
const signer = new RDS.Signer()
|
||||||
|
|
||||||
const getPassword = () => signer.getAuthToken()
|
const getPassword = () => signer.getAuthToken(signerOptions)
|
||||||
|
|
||||||
const pool = new Pool({
|
const pool = new Pool({
|
||||||
user: signerOptions.username,
|
user: signerOptions.username,
|
||||||
|
|||||||
@ -1,37 +0,0 @@
|
|||||||
---
|
|
||||||
title: ESM
|
|
||||||
---
|
|
||||||
|
|
||||||
## ESM Support
|
|
||||||
|
|
||||||
As of v8.15.x node-postgres supporters the __ECMAScript Module__ (ESM) format. This means you can use `import` statements instead of `require` or `import pg from 'pg'`.
|
|
||||||
|
|
||||||
CommonJS modules are still supported. The ESM format is an opt-in feature and will not affect existing codebases that use CommonJS.
|
|
||||||
|
|
||||||
The docs have been changed to show ESM usage, but in a CommonJS context you can still use the same code, you just need to change the import format.
|
|
||||||
|
|
||||||
If you're using CommonJS, you can use the following code to import the `pg` module:
|
|
||||||
|
|
||||||
```js
|
|
||||||
const pg = require('pg')
|
|
||||||
const { Client } = pg
|
|
||||||
// etc...
|
|
||||||
```
|
|
||||||
|
|
||||||
### ESM Usage
|
|
||||||
|
|
||||||
If you're using ESM, you can use the following code to import the `pg` module:
|
|
||||||
|
|
||||||
```js
|
|
||||||
import { Client } from 'pg'
|
|
||||||
// etc...
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
Previously if you were using ESM you would have to use the following code:
|
|
||||||
|
|
||||||
```js
|
|
||||||
import pg from 'pg'
|
|
||||||
const { Client } = pg
|
|
||||||
// etc...
|
|
||||||
```
|
|
||||||
@ -22,7 +22,8 @@ const config = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
import { Client, Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Client, Pool } = pg
|
||||||
|
|
||||||
const client = new Client(config)
|
const client = new Client(config)
|
||||||
await client.connect()
|
await client.connect()
|
||||||
|
|||||||
@ -16,7 +16,8 @@ To execute a transaction with node-postgres you simply execute `BEGIN / COMMIT /
|
|||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
const pool = new Pool()
|
const pool = new Pool()
|
||||||
|
|
||||||
const client = await pool.connect()
|
const client = await pool.connect()
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
{
|
{
|
||||||
"project-structure": "Suggested Code Structure",
|
"project-structure": "Suggested Code Structure",
|
||||||
"async-express": "Express with Async/Await",
|
"async-express": "Express with Async/Await",
|
||||||
"pool-sizing": "Pool Sizing",
|
|
||||||
"upgrading": "Upgrading"
|
"upgrading": "Upgrading"
|
||||||
}
|
}
|
||||||
|
|||||||
@ -22,7 +22,8 @@ That's the same structure I used in the [project structure](/guides/project-stru
|
|||||||
My `db/index.js` file usually starts out like this:
|
My `db/index.js` file usually starts out like this:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
|
||||||
const pool = new Pool()
|
const pool = new Pool()
|
||||||
|
|
||||||
|
|||||||
@ -1,25 +0,0 @@
|
|||||||
---
|
|
||||||
title: Pool Sizing
|
|
||||||
---
|
|
||||||
|
|
||||||
If you're using a [pool](/apis/pool) in an application with multiple instances of your service running (common in most cloud/container environments currently), you'll need to think a bit about the `max` parameter of your pool across all services and all _instances_ of all services which are connecting to your Postgres server.
|
|
||||||
|
|
||||||
This can get pretty complex depending on your cloud environment. Further nuance is introduced with things like pg-bouncer, RDS connection proxies, etc., which will do some forms of connection pooling and connection multiplexing. So, it's definitely worth thinking about. Let's run through a few setups. While certainly not exhaustive, these examples hopefully prompt you into thinking about what's right for your setup.
|
|
||||||
|
|
||||||
## Simple apps, dev mode, fixed instance counts, etc.
|
|
||||||
|
|
||||||
If your app isn't running in a k8s style env with containers scaling automatically or lambdas or cloud functions etc., you can do some "napkin math" for the `max` pool config you can use. Let's assume your Postgres instance is configured to have a maximum of 200 connections at any one time. You know your service is going to run on 4 instances. You can set the `max` pool size to 50, but if all your services are saturated waiting on database connections, you won't be able to connect to the database from any mgmt tools or scale up your services without changing config/code to adjust the max size.
|
|
||||||
|
|
||||||
In this situation, I'd probably set the `max` to 20 or 25. This lets you have plenty of headroom for scaling more instances and realistically, if your app is starved for db connections, you probably want to take a look at your queries and make them execute faster, or cache, or something else to reduce the load on the database. I worked on a more reporting-heavy application with limited users, but each running 5-6 queries at a time which all took 100-200 milliseconds to run. In that situation, I upped the `max` to 50. Typically, though, I don't bother setting it to anything other than the default of `10` as that's usually _fine_.
|
|
||||||
|
|
||||||
## Auto-scaling, cloud-functions, multi-tenancy, etc.
|
|
||||||
|
|
||||||
If the number of instances of your services which connect to your database is more dynamic and based on things like load, auto-scaling containers, or running in cloud-functions, you need to be a bit more thoughtful about what your max might be. Often in these environments, there will be another database pooling proxy in front of the database like pg-bouncer or the RDS-proxy, etc. I'm not sure how all these function exactly, and they all have some trade-offs, but let's assume you're not using a proxy. Then I'd be pretty cautious about how large you set any individual pool. If you're running an application under pretty serious load where you need dynamic scaling or lots of lambdas spinning up and sending queries, your queries are likely fast and you should be fine setting the `max` to a low value like 10 -- or just leave it alone, since `10` is the default.
|
|
||||||
|
|
||||||
## pg-bouncer, RDS-proxy, etc.
|
|
||||||
|
|
||||||
I'm not sure of all the pooling services for Postgres. I haven't used any myself. Throughout the years of working on `pg`, I've addressed issues caused by various proxies behaving differently than an actual Postgres backend. There are also gotchas with things like transactions. On the other hand, plenty of people run these with much success. In this situation, I would just recommend using some small but reasonable `max` value like the default value of `10` as it can still be helpful to keep a few TCP sockets from your services to the Postgres proxy open.
|
|
||||||
|
|
||||||
## Conclusion, tl;dr
|
|
||||||
|
|
||||||
It's a bit of a complicated topic and doesn't have much impact on things until you need to start scaling. At that point, your number of connections _still_ probably won't be your scaling bottleneck. It's worth thinking about a bit, but mostly I'd just leave the pool size to the default of `10` until you run into troubles: hopefully you never do!
|
|
||||||
@ -27,12 +27,13 @@ The location doesn't really matter - I've found it usually ends up being somewha
|
|||||||
Typically I'll start out my `db/index.js` file like so:
|
Typically I'll start out my `db/index.js` file like so:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
|
||||||
const pool = new Pool()
|
const pool = new Pool()
|
||||||
|
|
||||||
export const query = (text, params) => {
|
export const query = (text, params, callback) => {
|
||||||
return pool.query(text, params)
|
return pool.query(text, params, callback)
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -54,7 +55,8 @@ app.get('/:id', async (req, res, next) => {
|
|||||||
Imagine we have lots of routes scattered throughout many files under our `routes/` directory. We now want to go back and log every single query that's executed, how long it took, and the number of rows it returned. If we had required node-postgres directly in every route file we'd have to go edit every single route - that would take forever & be really error prone! But thankfully we put our data access into `db/index.js`. Let's go add some logging:
|
Imagine we have lots of routes scattered throughout many files under our `routes/` directory. We now want to go back and log every single query that's executed, how long it took, and the number of rows it returned. If we had required node-postgres directly in every route file we'd have to go edit every single route - that would take forever & be really error prone! But thankfully we put our data access into `db/index.js`. Let's go add some logging:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
|
||||||
const pool = new Pool()
|
const pool = new Pool()
|
||||||
|
|
||||||
@ -74,7 +76,8 @@ _note: I didn't log the query parameters. Depending on your application you migh
|
|||||||
Now what if we need to check out a client from the pool to run several queries in a row in a transaction? We can add another method to our `db/index.js` file when we need to do this:
|
Now what if we need to check out a client from the pool to run several queries in a row in a transaction? We can add another method to our `db/index.js` file when we need to do this:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
|
||||||
const pool = new Pool()
|
const pool = new Pool()
|
||||||
|
|
||||||
|
|||||||
@ -50,7 +50,7 @@ pg.end()
|
|||||||
// new way, available since 6.0.0:
|
// new way, available since 6.0.0:
|
||||||
|
|
||||||
// create a pool
|
// create a pool
|
||||||
const pool = new pg.Pool()
|
var pool = new pg.Pool()
|
||||||
|
|
||||||
// connection using created pool
|
// connection using created pool
|
||||||
pool.connect(function (err, client, done) {
|
pool.connect(function (err, client, done) {
|
||||||
|
|||||||
@ -3,8 +3,6 @@ title: Welcome
|
|||||||
slug: /
|
slug: /
|
||||||
---
|
---
|
||||||
|
|
||||||
import { Logo } from '/components/logo.tsx'
|
|
||||||
|
|
||||||
node-postgres is a collection of node.js modules for interfacing with your PostgreSQL database. It has support for callbacks, promises, async/await, connection pooling, prepared statements, cursors, streaming results, C/C++ bindings, rich type parsing, and more! Just like PostgreSQL itself there are a lot of features: this documentation aims to get you up and running quickly and in the right direction. It also tries to provide guides for more advanced & edge-case topics allowing you to tap into the full power of PostgreSQL from node.js.
|
node-postgres is a collection of node.js modules for interfacing with your PostgreSQL database. It has support for callbacks, promises, async/await, connection pooling, prepared statements, cursors, streaming results, C/C++ bindings, rich type parsing, and more! Just like PostgreSQL itself there are a lot of features: this documentation aims to get you up and running quickly and in the right direction. It also tries to provide guides for more advanced & edge-case topics allowing you to tap into the full power of PostgreSQL from node.js.
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
@ -17,33 +15,19 @@ $ npm install pg
|
|||||||
|
|
||||||
node-postgres continued development and support is made possible by the many [supporters](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md).
|
node-postgres continued development and support is made possible by the many [supporters](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md).
|
||||||
|
|
||||||
Special thanks to [Medplum](https://www.medplum.com/) for sponsoring node-postgres for a whole year!
|
|
||||||
|
|
||||||
<a href="https://www.medplum.com/">
|
|
||||||
<img
|
|
||||||
alt="Medplum"
|
|
||||||
src="https://raw.githubusercontent.com/medplum/medplum-logo/refs/heads/main/medplum-logo.png"
|
|
||||||
style={{
|
|
||||||
width: '300px',
|
|
||||||
height: 'auto',
|
|
||||||
margin: '0 auto',
|
|
||||||
display: 'block',
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</a>
|
|
||||||
|
|
||||||
If you or your company would like to sponsor node-postgres stop by [GitHub Sponsors](https://github.com/sponsors/brianc) and sign up or feel free to [email me](mailto:brian@pecanware.com) if you want to add your logo to the documentation or discuss higher tiers of sponsorship!
|
If you or your company would like to sponsor node-postgres stop by [GitHub Sponsors](https://github.com/sponsors/brianc) and sign up or feel free to [email me](mailto:brian@pecanware.com) if you want to add your logo to the documentation or discuss higher tiers of sponsorship!
|
||||||
|
|
||||||
# Version compatibility
|
# Version compatibility
|
||||||
|
|
||||||
node-postgres strives to be compatible with all recent LTS versions of node & the most recent "stable" version. At the time of this writing node-postgres is compatible with node 18.x, 20.x, 22.x, and 24.x.
|
node-postgres strives to be compatible with all recent LTS versions of node & the most recent "stable" version. At the time of this writing node-postgres is compatible with node 8.x, 10.x, 12.x and 14.x To use node >= 14.x you will need to install `pg@8.2.x` or later due to some internal stream changes on the node 14 branch. Dropping support for an old node lts version will always be considered a breaking change in node-postgres and will be done on _major_ version number changes only, and we will try to keep support for 8.x for as long as reasonably possible.
|
||||||
|
|
||||||
## Getting started
|
## Getting started
|
||||||
|
|
||||||
The simplest possible way to connect, query, and disconnect is with async/await:
|
The simplest possible way to connect, query, and disconnect is with async/await:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Client } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Client } = pg
|
||||||
const client = new Client()
|
const client = new Client()
|
||||||
await client.connect()
|
await client.connect()
|
||||||
|
|
||||||
@ -57,7 +41,8 @@ await client.end()
|
|||||||
For the sake of simplicity, these docs will assume that the methods are successful. In real life use, make sure to properly handle errors thrown in the methods. A `try/catch` block is a great way to do so:
|
For the sake of simplicity, these docs will assume that the methods are successful. In real life use, make sure to properly handle errors thrown in the methods. A `try/catch` block is a great way to do so:
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
import { Client } from 'pg'
|
import pg from 'pg'
|
||||||
|
const { Client } = pg
|
||||||
const client = new Client()
|
const client = new Client()
|
||||||
await client.connect()
|
await client.connect()
|
||||||
|
|
||||||
@ -71,17 +56,22 @@ try {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Pooling
|
### Callbacks
|
||||||
|
|
||||||
In most applications you'll want to use a [connection pool](/features/pooling) to manage your connections. This is a more advanced topic, but here's a simple example of how to use it:
|
If you prefer a callback-style approach to asynchronous programming, all async methods support an optional callback parameter as well:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { Pool } from 'pg'
|
import pg from 'pg'
|
||||||
const pool = new Pool()
|
const { Client } = pg
|
||||||
const res = await pool.query('SELECT $1::text as message', ['Hello world!'])
|
const client = new Client()
|
||||||
console.log(res.rows[0].message) // Hello world!
|
|
||||||
|
client.connect((err) => {
|
||||||
|
client.query('SELECT $1::text as message', ['Hello world!'], (err, res) => {
|
||||||
|
console.log(err ? err.stack : res.rows[0].message) // Hello World!
|
||||||
|
client.end()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Our real-world apps are almost always more complicated than that, and I urge you to read on!
|
Our real-world apps are almost always more complicated than that, and I urge you to read on!
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 15 KiB |
@ -10,6 +10,7 @@ export default {
|
|||||||
docsRepositoryBase: 'https://github.com/brianc/node-postgres/blob/master/docs', // base URL for the docs repository
|
docsRepositoryBase: 'https://github.com/brianc/node-postgres/blob/master/docs', // base URL for the docs repository
|
||||||
titleSuffix: ' – node-postgres',
|
titleSuffix: ' – node-postgres',
|
||||||
darkMode: true,
|
darkMode: true,
|
||||||
|
footer: true,
|
||||||
navigation: {
|
navigation: {
|
||||||
prev: true,
|
prev: true,
|
||||||
next: true,
|
next: true,
|
||||||
@ -22,43 +23,13 @@ export default {
|
|||||||
},
|
},
|
||||||
logo: (
|
logo: (
|
||||||
<>
|
<>
|
||||||
<svg
|
<svg>...</svg>
|
||||||
version="1.0"
|
<span>node-postgres</span>
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
height={48}
|
|
||||||
width={48}
|
|
||||||
viewBox="0 0 1024.000000 1024.000000"
|
|
||||||
preserveAspectRatio="xMidYMid meet"
|
|
||||||
>
|
|
||||||
<g transform="translate(0.000000,1024.000000) scale(0.100000,-0.100000)" fill="#3c873a" stroke="none">
|
|
||||||
<path
|
|
||||||
d="M4990 7316 c-391 -87 -703 -397 -1003 -996 -285 -568 -477 -1260
|
|
||||||
-503 -1811 l-7 -142 -112 7 c-103 5 -207 27 -382 78 -37 11 -44 10 -63 -7 -61
|
|
||||||
-55 17 -180 177 -285 91 -60 194 -103 327 -137 l104 -26 17 -71 c44 -183 152
|
|
||||||
-441 256 -613 125 -207 322 -424 493 -541 331 -229 774 -291 1113 -156 112 45
|
|
||||||
182 94 209 147 13 24 13 35 -1 90 -22 87 -88 219 -134 267 -46 49 -79 52 -153
|
|
||||||
14 -168 -85 -360 -54 -508 83 -170 157 -244 440 -195 743 50 304 231 601 430
|
|
||||||
706 168 89 332 60 463 -81 66 -71 110 -140 197 -315 83 -166 116 -194 203
|
|
||||||
-170 88 23 370 258 637 531 411 420 685 806 808 1139 54 145 71 243 71 410 1
|
|
||||||
128 -3 157 -27 243 -86 310 -243 543 -467 690 -207 137 -440 157 -966 85
|
|
||||||
l-161 -22 -94 41 c-201 87 -327 113 -533 112 -77 -1 -166 -7 -196 -13z m-89
|
|
||||||
-1357 c15 -10 34 -38 43 -61 23 -56 13 -111 -28 -156 -59 -64 -171 -54 -216
|
|
||||||
21 -35 57 -22 145 28 190 44 40 122 43 173 6z m-234 -1361 c-46 -74 -156 -188
|
|
||||||
-249 -258 -211 -159 -459 -219 -734 -179 l-76 12 89 28 c187 60 485 229 683
|
|
||||||
388 l75 60 122 0 122 1 -32 -52z"
|
|
||||||
/>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
||||||
<span style={{ fontWeight: 800 }}>node-postgres</span>
|
|
||||||
</>
|
</>
|
||||||
),
|
),
|
||||||
chat: {
|
|
||||||
link: 'https://discord.gg/2afXp5vUWm',
|
|
||||||
},
|
|
||||||
head: (
|
head: (
|
||||||
<>
|
<>
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<link rel="shortcut icon" href="/favicon.ico" />
|
|
||||||
<meta
|
<meta
|
||||||
name="description"
|
name="description"
|
||||||
content="node-postgres is a collection of node.js modules for interfacing with your PostgreSQL database."
|
content="node-postgres is a collection of node.js modules for interfacing with your PostgreSQL database."
|
||||||
|
|||||||
14
lerna.json
14
lerna.json
@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"packages": ["packages/*"],
|
"packages": [
|
||||||
|
"packages/*"
|
||||||
|
],
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"useWorkspaces": true,
|
"useWorkspaces": true,
|
||||||
"version": "independent",
|
"version": "independent",
|
||||||
"command": {
|
"ignoreChanges": [
|
||||||
"version": {
|
"**/*.md",
|
||||||
"allowBranch": "master"
|
"**/test/**"
|
||||||
}
|
]
|
||||||
},
|
|
||||||
"ignoreChanges": ["**/*.md", "**/test/**"]
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -23,7 +23,7 @@
|
|||||||
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
||||||
"@typescript-eslint/parser": "^6.17.0",
|
"@typescript-eslint/parser": "^6.17.0",
|
||||||
"eslint": "^8.56.0",
|
"eslint": "^8.56.0",
|
||||||
"eslint-config-prettier": "^10.1.2",
|
"eslint-config-prettier": "^9.1.0",
|
||||||
"eslint-plugin-node": "^11.1.0",
|
"eslint-plugin-node": "^11.1.0",
|
||||||
"eslint-plugin-prettier": "^5.1.2",
|
"eslint-plugin-prettier": "^5.1.2",
|
||||||
"lerna": "^3.19.0",
|
"lerna": "^3.19.0",
|
||||||
|
|||||||
@ -1,8 +0,0 @@
|
|||||||
import * as esbuild from 'esbuild'
|
|
||||||
|
|
||||||
await esbuild.build({
|
|
||||||
entryPoints: ['./src/index.mjs'],
|
|
||||||
bundle: true,
|
|
||||||
outfile: './dist/esbuild-cloudflare.js',
|
|
||||||
conditions: ['import', 'workerd'],
|
|
||||||
})
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
import * as esbuild from 'esbuild'
|
|
||||||
|
|
||||||
await esbuild.build({
|
|
||||||
entryPoints: ['./src/index.mjs'],
|
|
||||||
bundle: true,
|
|
||||||
outfile: './dist/esbuild-empty.js',
|
|
||||||
})
|
|
||||||
@ -1,25 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "pg-bundler-test",
|
|
||||||
"version": "0.0.2",
|
|
||||||
"description": "Test bundlers with pg-cloudflare, https://github.com/brianc/node-postgres/issues/3452",
|
|
||||||
"license": "MIT",
|
|
||||||
"private": true,
|
|
||||||
"type": "module",
|
|
||||||
"devDependencies": {
|
|
||||||
"@rollup/plugin-commonjs": "^28.0.3",
|
|
||||||
"@rollup/plugin-node-resolve": "^16.0.1",
|
|
||||||
"esbuild": "^0.25.5",
|
|
||||||
"pg-cloudflare": "^1.2.7",
|
|
||||||
"rollup": "^4.41.1",
|
|
||||||
"vite": "^6.3.5",
|
|
||||||
"webpack": "^5.99.9",
|
|
||||||
"webpack-cli": "^6.0.1"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"test": "yarn webpack && yarn rollup && yarn vite && yarn esbuild",
|
|
||||||
"webpack": "webpack --config webpack-empty.config.mjs && webpack --config webpack-cloudflare.config.mjs",
|
|
||||||
"rollup": "rollup --config rollup-empty.config.mjs --failAfterWarnings && rollup --config rollup-cloudflare.config.mjs --failAfterWarnings",
|
|
||||||
"vite": "[ $(node --version | sed 's/v//' | cut -d'.' -f1) -ge 18 ] && vite build --config vite-empty.config.mjs && vite build --config vite-cloudflare.config.mjs || echo 'Skip Vite test'",
|
|
||||||
"esbuild": "node esbuild-empty.config.mjs && node esbuild-cloudflare.config.mjs"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
import { defineConfig } from 'rollup'
|
|
||||||
import { nodeResolve } from '@rollup/plugin-node-resolve'
|
|
||||||
import commonjs from '@rollup/plugin-commonjs'
|
|
||||||
|
|
||||||
export default defineConfig({
|
|
||||||
input: './src/index.mjs',
|
|
||||||
output: {
|
|
||||||
file: 'dist/rollup-cloudflare.js',
|
|
||||||
format: 'es',
|
|
||||||
},
|
|
||||||
plugins: [nodeResolve({ exportConditions: ['import', 'workerd'], preferBuiltins: true }), commonjs()],
|
|
||||||
external: ['cloudflare:sockets'],
|
|
||||||
})
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
import { defineConfig } from 'rollup'
|
|
||||||
import { nodeResolve } from '@rollup/plugin-node-resolve'
|
|
||||||
import commonjs from '@rollup/plugin-commonjs'
|
|
||||||
|
|
||||||
export default defineConfig({
|
|
||||||
input: './src/index.mjs',
|
|
||||||
output: {
|
|
||||||
file: 'dist/rollup-empty.js',
|
|
||||||
format: 'es',
|
|
||||||
},
|
|
||||||
plugins: [nodeResolve(), commonjs()],
|
|
||||||
})
|
|
||||||
@ -1 +0,0 @@
|
|||||||
import 'pg-cloudflare'
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
import { defineConfig } from 'vite'
|
|
||||||
import commonjs from '@rollup/plugin-commonjs'
|
|
||||||
|
|
||||||
export default defineConfig({
|
|
||||||
build: {
|
|
||||||
emptyOutDir: false,
|
|
||||||
lib: {
|
|
||||||
entry: './src/index.mjs',
|
|
||||||
fileName: 'vite-cloudflare',
|
|
||||||
formats: ['es'],
|
|
||||||
},
|
|
||||||
rollupOptions: {
|
|
||||||
external: ['cloudflare:sockets'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
resolve: {
|
|
||||||
conditions: ['import', 'workerd'],
|
|
||||||
},
|
|
||||||
plugins: [commonjs()],
|
|
||||||
})
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
import { defineConfig } from 'vite'
|
|
||||||
|
|
||||||
export default defineConfig({
|
|
||||||
build: {
|
|
||||||
emptyOutDir: false,
|
|
||||||
lib: {
|
|
||||||
entry: './src/index.mjs',
|
|
||||||
fileName: 'vite-empty',
|
|
||||||
formats: ['es'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
@ -1,16 +0,0 @@
|
|||||||
import webpack from 'webpack'
|
|
||||||
|
|
||||||
export default {
|
|
||||||
mode: 'production',
|
|
||||||
entry: './src/index.mjs',
|
|
||||||
output: {
|
|
||||||
filename: 'webpack-cloudflare.js',
|
|
||||||
},
|
|
||||||
resolve: { conditionNames: ['import', 'workerd'] },
|
|
||||||
plugins: [
|
|
||||||
// ignore cloudflare:sockets imports
|
|
||||||
new webpack.IgnorePlugin({
|
|
||||||
resourceRegExp: /^cloudflare:sockets$/,
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
export default {
|
|
||||||
mode: 'production',
|
|
||||||
entry: './src/index.mjs',
|
|
||||||
output: {
|
|
||||||
filename: 'webpack-empty.js',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@ -10,64 +10,6 @@
|
|||||||
npm i --save-dev pg-cloudflare
|
npm i --save-dev pg-cloudflare
|
||||||
```
|
```
|
||||||
|
|
||||||
The package uses conditional exports to support bundlers that don't know about
|
|
||||||
`cloudflare:sockets`, so the consumer code by default imports an empty file. To
|
|
||||||
enable the package, resolve to the `cloudflare` condition in your bundler's
|
|
||||||
config. For example:
|
|
||||||
|
|
||||||
- `webpack.config.js`
|
|
||||||
```js
|
|
||||||
export default {
|
|
||||||
...,
|
|
||||||
resolve: { conditionNames: [..., "workerd"] },
|
|
||||||
plugins: [
|
|
||||||
// ignore cloudflare:sockets imports
|
|
||||||
new webpack.IgnorePlugin({
|
|
||||||
resourceRegExp: /^cloudflare:sockets$/,
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
```
|
|
||||||
- `vite.config.js`
|
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> If you are using the [Cloudflare Vite plugin](https://www.npmjs.com/package/@cloudflare/vite-plugin) then the following configuration is not necessary.
|
|
||||||
|
|
||||||
```js
|
|
||||||
export default defineConfig({
|
|
||||||
...,
|
|
||||||
resolve: {
|
|
||||||
conditions: [..., "workerd"],
|
|
||||||
},
|
|
||||||
build: {
|
|
||||||
...,
|
|
||||||
// don't try to bundle cloudflare:sockets
|
|
||||||
rollupOptions: {
|
|
||||||
external: [..., 'cloudflare:sockets'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
- `rollup.config.js`
|
|
||||||
```js
|
|
||||||
export default defineConfig({
|
|
||||||
...,
|
|
||||||
plugins: [..., nodeResolve({ exportConditions: [..., 'workerd'] })],
|
|
||||||
// don't try to bundle cloudflare:sockets
|
|
||||||
external: [..., 'cloudflare:sockets'],
|
|
||||||
})
|
|
||||||
```
|
|
||||||
- `esbuild.config.js`
|
|
||||||
```js
|
|
||||||
await esbuild.build({
|
|
||||||
...,
|
|
||||||
conditions: [..., 'workerd'],
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
The concrete examples can be found in `packages/pg-bundler-test`.
|
|
||||||
|
|
||||||
## How to use conditionally, in non-Node.js environments
|
## How to use conditionally, in non-Node.js environments
|
||||||
|
|
||||||
As implemented in `pg` [here](https://github.com/brianc/node-postgres/commit/07553428e9c0eacf761a5d4541a3300ff7859578#diff-34588ad868ebcb232660aba7ee6a99d1e02f4bc93f73497d2688c3f074e60533R5-R13), a typical use case might look as follows, where in a Node.js environment the `net` module is used, while in a non-Node.js environment, where `net` is unavailable, `pg-cloudflare` is used instead, providing an equivalent interface:
|
As implemented in `pg` [here](https://github.com/brianc/node-postgres/commit/07553428e9c0eacf761a5d4541a3300ff7859578#diff-34588ad868ebcb232660aba7ee6a99d1e02f4bc93f73497d2688c3f074e60533R5-R13), a typical use case might look as follows, where in a Node.js environment the `net` module is used, while in a non-Node.js environment, where `net` is unavailable, `pg-cloudflare` is used instead, providing an equivalent interface:
|
||||||
@ -79,13 +21,14 @@ module.exports.getStream = function getStream(ssl = false) {
|
|||||||
return net.Socket()
|
return net.Socket()
|
||||||
}
|
}
|
||||||
const { CloudflareSocket } = require('pg-cloudflare')
|
const { CloudflareSocket } = require('pg-cloudflare')
|
||||||
return new CloudflareSocket(ssl)
|
return new CloudflareSocket(ssl);
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
## Node.js implementation of the Socket API proposal
|
## Node.js implementation of the Socket API proposal
|
||||||
|
|
||||||
If you're looking for a way to rely on `connect()` as the interface you use to interact with raw sockets, but need this interface to be available in a Node.js environment, [`@arrowood.dev/socket`](https://github.com/Ethan-Arrowood/socket) provides a Node.js implementation of the Socket API.
|
If you're looking for a way to rely on `connect()` as the interface you use to interact with raw sockets, but need this interface to be availble in a Node.js environment, [`@arrowood.dev/socket`](https://github.com/Ethan-Arrowood/socket) provides a Node.js implementation of the Socket API.
|
||||||
|
|
||||||
|
|
||||||
### license
|
### license
|
||||||
|
|
||||||
|
|||||||
@ -1,3 +1 @@
|
|||||||
import cf from '../dist/index.js'
|
export const TEST = 'true'
|
||||||
|
|
||||||
export const CloudflareSocket = cf.CloudflareSocket
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "pg-cloudflare",
|
"name": "pg-cloudflare",
|
||||||
"version": "1.2.7",
|
"version": "1.1.2-alpha.1",
|
||||||
"description": "A socket implementation that can run on Cloudflare Workers using native TCP connections.",
|
"description": "A socket implementation that can run on Cloudflare Workers using native TCP connections.",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@ -11,13 +11,10 @@
|
|||||||
},
|
},
|
||||||
"exports": {
|
"exports": {
|
||||||
".": {
|
".": {
|
||||||
"workerd": {
|
"import": "./esm/index.mjs",
|
||||||
"import": "./esm/index.mjs",
|
"require": "./dist/index.js",
|
||||||
"require": "./dist/index.js"
|
"default": "./dist/index.js"
|
||||||
},
|
}
|
||||||
"default": "./dist/empty.js"
|
|
||||||
},
|
|
||||||
"./package.json": "./package.json"
|
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc",
|
"build": "tsc",
|
||||||
|
|||||||
4
packages/pg-connection-string/.gitignore
vendored
4
packages/pg-connection-string/.gitignore
vendored
@ -12,7 +12,6 @@ lib-cov
|
|||||||
|
|
||||||
# Coverage directory used by tools like istanbul
|
# Coverage directory used by tools like istanbul
|
||||||
coverage
|
coverage
|
||||||
.nyc_output
|
|
||||||
|
|
||||||
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
.grunt
|
.grunt
|
||||||
@ -25,6 +24,3 @@ build/Release
|
|||||||
# see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git
|
# see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git
|
||||||
node_modules
|
node_modules
|
||||||
package-lock.json
|
package-lock.json
|
||||||
|
|
||||||
# TypeScript output directory
|
|
||||||
dist
|
|
||||||
|
|||||||
@ -1,4 +0,0 @@
|
|||||||
{
|
|
||||||
"extension": ["js", "ts"],
|
|
||||||
"require": "tsx"
|
|
||||||
}
|
|
||||||
@ -3,6 +3,9 @@ pg-connection-string
|
|||||||
|
|
||||||
[](https://nodei.co/npm/pg-connection-string/)
|
[](https://nodei.co/npm/pg-connection-string/)
|
||||||
|
|
||||||
|
[](https://travis-ci.org/iceddev/pg-connection-string)
|
||||||
|
[](https://coveralls.io/github/iceddev/pg-connection-string?branch=master)
|
||||||
|
|
||||||
Functions for dealing with a PostgresSQL connection string
|
Functions for dealing with a PostgresSQL connection string
|
||||||
|
|
||||||
`parse` method taken from [node-postgres](https://github.com/brianc/node-postgres.git)
|
`parse` method taken from [node-postgres](https://github.com/brianc/node-postgres.git)
|
||||||
@ -12,9 +15,9 @@ MIT License
|
|||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const parse = require('pg-connection-string').parse;
|
var parse = require('pg-connection-string').parse;
|
||||||
|
|
||||||
const config = parse('postgres://someuser:somepassword@somehost:381/somedatabase')
|
var config = parse('postgres://someuser:somepassword@somehost:381/somedatabase')
|
||||||
```
|
```
|
||||||
|
|
||||||
The resulting config contains a subset of the following properties:
|
The resulting config contains a subset of the following properties:
|
||||||
@ -85,11 +88,11 @@ Query parameters follow a `?` character, including the following special query p
|
|||||||
* `encoding=<encoding>` - sets the `client_encoding` property
|
* `encoding=<encoding>` - sets the `client_encoding` property
|
||||||
* `ssl=1`, `ssl=true`, `ssl=0`, `ssl=false` - sets `ssl` to true or false, accordingly
|
* `ssl=1`, `ssl=true`, `ssl=0`, `ssl=false` - sets `ssl` to true or false, accordingly
|
||||||
* `uselibpqcompat=true` - use libpq semantics
|
* `uselibpqcompat=true` - use libpq semantics
|
||||||
* `sslmode=<sslmode>` when `uselibpqcompat=true` is not set
|
* `sslmode=<sslmode>` when `sslcompat` is not set
|
||||||
* `sslmode=disable` - sets `ssl` to false
|
* `sslmode=disable` - sets `ssl` to false
|
||||||
* `sslmode=no-verify` - sets `ssl` to `{ rejectUnauthorized: false }`
|
* `sslmode=no-verify` - sets `ssl` to `{ rejectUnauthorized: false }`
|
||||||
* `sslmode=prefer`, `sslmode=require`, `sslmode=verify-ca`, `sslmode=verify-full` - sets `ssl` to true
|
* `sslmode=prefer`, `sslmode=require`, `sslmode=verify-ca`, `sslmode=verify-full` - sets `ssl` to true
|
||||||
* `sslmode=<sslmode>` when `uselibpqcompat=true`
|
* `sslmode=<sslmode>` when `sslcompat=libpq`
|
||||||
* `sslmode=disable` - sets `ssl` to false
|
* `sslmode=disable` - sets `ssl` to false
|
||||||
* `sslmode=prefer` - sets `ssl` to `{ rejectUnauthorized: false }`
|
* `sslmode=prefer` - sets `ssl` to `{ rejectUnauthorized: false }`
|
||||||
* `sslmode=require` - sets `ssl` to `{ rejectUnauthorized: false }` unless `sslrootcert` is specified, in which case it behaves like `verify-ca`
|
* `sslmode=require` - sets `ssl` to `{ rejectUnauthorized: false }` unless `sslrootcert` is specified, in which case it behaves like `verify-ca`
|
||||||
|
|||||||
@ -2,7 +2,6 @@
|
|||||||
import connectionString from '../index.js'
|
import connectionString from '../index.js'
|
||||||
|
|
||||||
// Re-export the parse function
|
// Re-export the parse function
|
||||||
export default connectionString.parse
|
|
||||||
export const parse = connectionString.parse
|
export const parse = connectionString.parse
|
||||||
export const toClientConfig = connectionString.toClientConfig
|
export const toClientConfig = connectionString.toClientConfig
|
||||||
export const parseIntoClientConfig = connectionString.parseIntoClientConfig
|
export const parseIntoClientConfig = connectionString.parseIntoClientConfig
|
||||||
|
|||||||
15
packages/pg-connection-string/index.d.ts
vendored
15
packages/pg-connection-string/index.d.ts
vendored
@ -1,19 +1,12 @@
|
|||||||
import { ClientConfig } from 'pg'
|
import { ClientConfig } from 'pg'
|
||||||
|
|
||||||
export function parse(connectionString: string, options?: Options): ConnectionOptions
|
export function parse(connectionString: string, options: Options): ConnectionOptions
|
||||||
|
|
||||||
export interface Options {
|
export interface Options {
|
||||||
// Use libpq semantics when interpreting the connection string
|
// Use libpq semantics when interpreting the connection string
|
||||||
useLibpqCompat?: boolean
|
useLibpqCompat?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SSLConfig {
|
|
||||||
ca?: string
|
|
||||||
cert?: string | null
|
|
||||||
key?: string
|
|
||||||
rejectUnauthorized?: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConnectionOptions {
|
export interface ConnectionOptions {
|
||||||
host: string | null
|
host: string | null
|
||||||
password?: string
|
password?: string
|
||||||
@ -21,15 +14,11 @@ export interface ConnectionOptions {
|
|||||||
port?: string | null
|
port?: string | null
|
||||||
database: string | null | undefined
|
database: string | null | undefined
|
||||||
client_encoding?: string
|
client_encoding?: string
|
||||||
ssl?: boolean | string | SSLConfig
|
ssl?: boolean | string
|
||||||
|
|
||||||
application_name?: string
|
application_name?: string
|
||||||
fallback_application_name?: string
|
fallback_application_name?: string
|
||||||
options?: string
|
options?: string
|
||||||
keepalives?: number
|
|
||||||
|
|
||||||
// We allow any other options to be passed through
|
|
||||||
[key: string]: unknown
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function toClientConfig(config: ConnectionOptions): ClientConfig
|
export function toClientConfig(config: ConnectionOptions): ClientConfig
|
||||||
|
|||||||
@ -1,7 +1,5 @@
|
|||||||
'use strict'
|
'use strict'
|
||||||
|
|
||||||
const { emitWarning } = require('node:process')
|
|
||||||
|
|
||||||
//Parse method copied from https://github.com/brianc/node-postgres
|
//Parse method copied from https://github.com/brianc/node-postgres
|
||||||
//Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com)
|
//Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com)
|
||||||
//MIT License
|
//MIT License
|
||||||
@ -25,17 +23,11 @@ function parse(str, options = {}) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
try {
|
result = new URL(str, 'postgres://base')
|
||||||
result = new URL(str, 'postgres://base')
|
} catch (e) {
|
||||||
} catch (e) {
|
// The URL is invalid so try again with a dummy host
|
||||||
// The URL is invalid so try again with a dummy host
|
result = new URL(str.replace('@/', '@___DUMMY___/'), 'postgres://base')
|
||||||
result = new URL(str.replace('@/', '@___DUMMY___/'), 'postgres://base')
|
dummyHost = true
|
||||||
dummyHost = true
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
// Remove the input from the error message to avoid leaking sensitive information
|
|
||||||
err.input && (err.input = '*****REDACTED*****')
|
|
||||||
throw err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// We'd like to use Object.fromEntries() here but Node.js 10 does not support it
|
// We'd like to use Object.fromEntries() here but Node.js 10 does not support it
|
||||||
@ -141,9 +133,6 @@ function parse(str, options = {}) {
|
|||||||
case 'require':
|
case 'require':
|
||||||
case 'verify-ca':
|
case 'verify-ca':
|
||||||
case 'verify-full': {
|
case 'verify-full': {
|
||||||
if (config.sslmode !== 'verify-full') {
|
|
||||||
deprecatedSslModeWarning(config.sslmode)
|
|
||||||
}
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
case 'no-verify': {
|
case 'no-verify': {
|
||||||
@ -180,8 +169,12 @@ function toClientConfig(config) {
|
|||||||
if (typeof sslConfig === 'boolean') {
|
if (typeof sslConfig === 'boolean') {
|
||||||
c[key] = sslConfig
|
c[key] = sslConfig
|
||||||
}
|
}
|
||||||
|
// else path is taken. multiple tests produce a sslConfig that is an object
|
||||||
if (typeof sslConfig === 'object') {
|
// and we can console.log to see that we take this path
|
||||||
|
//
|
||||||
|
// see https://github.com/istanbuljs/babel-plugin-istanbul/issues/186#issuecomment-1137765139
|
||||||
|
// istanbul ignore else
|
||||||
|
else if (typeof sslConfig === 'object') {
|
||||||
c[key] = toConnectionOptions(sslConfig)
|
c[key] = toConnectionOptions(sslConfig)
|
||||||
}
|
}
|
||||||
} else if (value !== undefined && value !== null) {
|
} else if (value !== undefined && value !== null) {
|
||||||
@ -212,20 +205,6 @@ function parseIntoClientConfig(str) {
|
|||||||
return toClientConfig(parse(str))
|
return toClientConfig(parse(str))
|
||||||
}
|
}
|
||||||
|
|
||||||
function deprecatedSslModeWarning(sslmode) {
|
|
||||||
if (!deprecatedSslModeWarning.warned) {
|
|
||||||
deprecatedSslModeWarning.warned = true
|
|
||||||
emitWarning(`SECURITY WARNING: The SSL modes 'prefer', 'require', and 'verify-ca' are treated as aliases for 'verify-full'.
|
|
||||||
In the next major version (pg-connection-string v3.0.0 and pg v9.0.0), these modes will adopt standard libpq semantics, which have weaker security guarantees.
|
|
||||||
|
|
||||||
To prepare for this change:
|
|
||||||
- If you want the current behavior, explicitly use 'sslmode=verify-full'
|
|
||||||
- If you want libpq compatibility now, use 'uselibpqcompat=true&sslmode=${sslmode}'
|
|
||||||
|
|
||||||
See https://www.postgresql.org/docs/current/libpq-ssl.html for libpq SSL mode definitions.`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = parse
|
module.exports = parse
|
||||||
|
|
||||||
parse.parse = parse
|
parse.parse = parse
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "pg-connection-string",
|
"name": "pg-connection-string",
|
||||||
"version": "2.9.1",
|
"version": "2.7.1-alpha.0",
|
||||||
"description": "Functions for dealing with a PostgresSQL connection string",
|
"description": "Functions for dealing with a PostgresSQL connection string",
|
||||||
"main": "./index.js",
|
"main": "./index.js",
|
||||||
"types": "./index.d.ts",
|
"types": "./index.d.ts",
|
||||||
@ -13,8 +13,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "nyc --reporter=lcov mocha && npm run check-coverage",
|
"test": "istanbul cover _mocha && npm run check-coverage",
|
||||||
"check-coverage": "nyc check-coverage --statements 100 --branches 100 --lines 100 --functions 100"
|
"check-coverage": "istanbul check-coverage --statements 100 --branches 100 --lines 100 --functions 100",
|
||||||
|
"coveralls": "cat ./coverage/lcov.info | ./node_modules/.bin/coveralls"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@ -34,14 +35,10 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string",
|
"homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/pg": "^8.12.0",
|
|
||||||
"chai": "^4.1.1",
|
"chai": "^4.1.1",
|
||||||
"coveralls": "^3.0.4",
|
"coveralls": "^3.0.4",
|
||||||
"istanbul": "^0.4.5",
|
"istanbul": "^0.4.5",
|
||||||
"mocha": "^10.5.2",
|
"mocha": "^10.5.2"
|
||||||
"nyc": "^15",
|
|
||||||
"tsx": "^4.19.4",
|
|
||||||
"typescript": "^4.0.3"
|
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"index.js",
|
"index.js",
|
||||||
|
|||||||
@ -1,19 +1,21 @@
|
|||||||
import chai from 'chai'
|
'use strict'
|
||||||
|
|
||||||
|
const chai = require('chai')
|
||||||
const expect = chai.expect
|
const expect = chai.expect
|
||||||
chai.should()
|
chai.should()
|
||||||
|
|
||||||
import { parse, toClientConfig, parseIntoClientConfig } from '../'
|
const { parse, toClientConfig, parseIntoClientConfig } = require('../')
|
||||||
|
|
||||||
describe('toClientConfig', function () {
|
describe('toClientConfig', function () {
|
||||||
it('converts connection info', function () {
|
it('converts connection info', function () {
|
||||||
const config = parse('postgres://brian:pw@boom:381/lala')
|
const config = parse('postgres://brian:pw@boom:381/lala')
|
||||||
const clientConfig = toClientConfig(config)
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
clientConfig.user?.should.equal('brian')
|
clientConfig.user.should.equal('brian')
|
||||||
clientConfig.password?.should.equal('pw')
|
clientConfig.password.should.equal('pw')
|
||||||
clientConfig.host?.should.equal('boom')
|
clientConfig.host.should.equal('boom')
|
||||||
clientConfig.port?.should.equal(381)
|
clientConfig.port.should.equal(381)
|
||||||
clientConfig.database?.should.equal('lala')
|
clientConfig.database.should.equal('lala')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('converts query params', function () {
|
it('converts query params', function () {
|
||||||
@ -22,47 +24,45 @@ describe('toClientConfig', function () {
|
|||||||
)
|
)
|
||||||
const clientConfig = toClientConfig(config)
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
clientConfig.application_name?.should.equal('TheApp')
|
clientConfig.application_name.should.equal('TheApp')
|
||||||
clientConfig.fallback_application_name?.should.equal('TheAppFallback')
|
clientConfig.fallback_application_name.should.equal('TheAppFallback')
|
||||||
clientConfig.client_encoding?.should.equal('utf8')
|
clientConfig.client_encoding.should.equal('utf8')
|
||||||
clientConfig.options?.should.equal('-c geqo=off')
|
clientConfig.options.should.equal('-c geqo=off')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('converts SSL boolean', function () {
|
it('converts SSL boolean', function () {
|
||||||
const config = parse('pg:///?ssl=true')
|
const config = parse('pg:///?ssl=true')
|
||||||
const clientConfig = toClientConfig(config)
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
clientConfig.ssl?.should.equal(true)
|
clientConfig.ssl.should.equal(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('converts sslmode=disable', function () {
|
it('converts sslmode=disable', function () {
|
||||||
const config = parse('pg:///?sslmode=disable')
|
const config = parse('pg:///?sslmode=disable')
|
||||||
const clientConfig = toClientConfig(config)
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
clientConfig.ssl?.should.equal(false)
|
clientConfig.ssl.should.equal(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('converts sslmode=noverify', function () {
|
it('converts sslmode=noverify', function () {
|
||||||
const config = parse('pg:///?sslmode=no-verify')
|
const config = parse('pg:///?sslmode=no-verify')
|
||||||
const clientConfig = toClientConfig(config)
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
clientConfig.ssl?.should.deep.equal({
|
clientConfig.ssl.rejectUnauthorized.should.equal(false)
|
||||||
rejectUnauthorized: false,
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('converts other sslmode options', function () {
|
it('converts other sslmode options', function () {
|
||||||
const config = parse('pg:///?sslmode=verify-ca')
|
const config = parse('pg:///?sslmode=verify-ca')
|
||||||
const clientConfig = toClientConfig(config)
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
clientConfig.ssl?.should.deep.equal({})
|
clientConfig.ssl.should.deep.equal({})
|
||||||
})
|
})
|
||||||
|
|
||||||
it('converts other sslmode options', function () {
|
it('converts other sslmode options', function () {
|
||||||
const config = parse('pg:///?sslmode=verify-ca')
|
const config = parse('pg:///?sslmode=verify-ca')
|
||||||
const clientConfig = toClientConfig(config)
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
clientConfig.ssl?.should.deep.equal({})
|
clientConfig.ssl.should.deep.equal({})
|
||||||
})
|
})
|
||||||
|
|
||||||
it('converts ssl cert options', function () {
|
it('converts ssl cert options', function () {
|
||||||
@ -77,7 +77,7 @@ describe('toClientConfig', function () {
|
|||||||
const config = parse(connectionString)
|
const config = parse(connectionString)
|
||||||
const clientConfig = toClientConfig(config)
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
clientConfig.ssl?.should.deep.equal({
|
clientConfig.ssl.should.deep.equal({
|
||||||
ca: 'example ca\n',
|
ca: 'example ca\n',
|
||||||
cert: 'example cert\n',
|
cert: 'example cert\n',
|
||||||
key: 'example key\n',
|
key: 'example key\n',
|
||||||
@ -87,9 +87,9 @@ describe('toClientConfig', function () {
|
|||||||
it('converts unix domain sockets', function () {
|
it('converts unix domain sockets', function () {
|
||||||
const config = parse('socket:/some path/?db=my[db]&encoding=utf8&client_encoding=bogus')
|
const config = parse('socket:/some path/?db=my[db]&encoding=utf8&client_encoding=bogus')
|
||||||
const clientConfig = toClientConfig(config)
|
const clientConfig = toClientConfig(config)
|
||||||
clientConfig.host?.should.equal('/some path/')
|
clientConfig.host.should.equal('/some path/')
|
||||||
clientConfig.database?.should.equal('my[db]', 'must to be escaped and unescaped through "my%5Bdb%5D"')
|
clientConfig.database.should.equal('my[db]', 'must to be escaped and unescaped through "my%5Bdb%5D"')
|
||||||
clientConfig.client_encoding?.should.equal('utf8')
|
clientConfig.client_encoding.should.equal('utf8')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('handles invalid port', function () {
|
it('handles invalid port', function () {
|
||||||
@ -106,9 +106,9 @@ describe('toClientConfig', function () {
|
|||||||
|
|
||||||
const clientConfig = toClientConfig(config)
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
clientConfig.host?.should.equal('boom')
|
clientConfig.host.should.equal('boom')
|
||||||
clientConfig.database?.should.equal('lala')
|
clientConfig.database.should.equal('lala')
|
||||||
clientConfig.ssl?.should.deep.equal({})
|
clientConfig.ssl.should.deep.equal({})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -116,10 +116,10 @@ describe('parseIntoClientConfig', function () {
|
|||||||
it('converts url', function () {
|
it('converts url', function () {
|
||||||
const clientConfig = parseIntoClientConfig('postgres://brian:pw@boom:381/lala')
|
const clientConfig = parseIntoClientConfig('postgres://brian:pw@boom:381/lala')
|
||||||
|
|
||||||
clientConfig.user?.should.equal('brian')
|
clientConfig.user.should.equal('brian')
|
||||||
clientConfig.password?.should.equal('pw')
|
clientConfig.password.should.equal('pw')
|
||||||
clientConfig.host?.should.equal('boom')
|
clientConfig.host.should.equal('boom')
|
||||||
clientConfig.port?.should.equal(381)
|
clientConfig.port.should.equal(381)
|
||||||
clientConfig.database?.should.equal('lala')
|
clientConfig.database.should.equal('lala')
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
435
packages/pg-connection-string/test/parse.js
Normal file
435
packages/pg-connection-string/test/parse.js
Normal file
@ -0,0 +1,435 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
var chai = require('chai')
|
||||||
|
var expect = chai.expect
|
||||||
|
chai.should()
|
||||||
|
|
||||||
|
var parse = require('../').parse
|
||||||
|
|
||||||
|
describe('parse', function () {
|
||||||
|
it('using connection string in client constructor', function () {
|
||||||
|
var subject = parse('postgres://brian:pw@boom:381/lala')
|
||||||
|
subject.user.should.equal('brian')
|
||||||
|
subject.password.should.equal('pw')
|
||||||
|
subject.host.should.equal('boom')
|
||||||
|
subject.port.should.equal('381')
|
||||||
|
subject.database.should.equal('lala')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('escape spaces if present', function () {
|
||||||
|
var subject = parse('postgres://localhost/post gres')
|
||||||
|
subject.database.should.equal('post gres')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('do not double escape spaces', function () {
|
||||||
|
var subject = parse('postgres://localhost/post%20gres')
|
||||||
|
subject.database.should.equal('post gres')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('initializing with unix domain socket', function () {
|
||||||
|
var subject = parse('/var/run/')
|
||||||
|
subject.host.should.equal('/var/run/')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('initializing with unix domain socket and a specific database, the simple way', function () {
|
||||||
|
var subject = parse('/var/run/ mydb')
|
||||||
|
subject.host.should.equal('/var/run/')
|
||||||
|
subject.database.should.equal('mydb')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('initializing with unix domain socket, the health way', function () {
|
||||||
|
var subject = parse('socket:/some path/?db=my[db]&encoding=utf8')
|
||||||
|
subject.host.should.equal('/some path/')
|
||||||
|
subject.database.should.equal('my[db]', 'must to be escaped and unescaped trough "my%5Bdb%5D"')
|
||||||
|
subject.client_encoding.should.equal('utf8')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('initializing with unix domain socket, the escaped health way', function () {
|
||||||
|
var subject = parse('socket:/some%20path/?db=my%2Bdb&encoding=utf8')
|
||||||
|
subject.host.should.equal('/some path/')
|
||||||
|
subject.database.should.equal('my+db')
|
||||||
|
subject.client_encoding.should.equal('utf8')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('initializing with unix domain socket, username and password', function () {
|
||||||
|
var subject = parse('socket://brian:pw@/var/run/?db=mydb')
|
||||||
|
subject.user.should.equal('brian')
|
||||||
|
subject.password.should.equal('pw')
|
||||||
|
subject.host.should.equal('/var/run/')
|
||||||
|
subject.database.should.equal('mydb')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('password contains < and/or > characters', function () {
|
||||||
|
var sourceConfig = {
|
||||||
|
user: 'brian',
|
||||||
|
password: 'hello<ther>e',
|
||||||
|
host: 'localhost',
|
||||||
|
port: 5432,
|
||||||
|
database: 'postgres',
|
||||||
|
}
|
||||||
|
var connectionString =
|
||||||
|
'postgres://' +
|
||||||
|
sourceConfig.user +
|
||||||
|
':' +
|
||||||
|
sourceConfig.password +
|
||||||
|
'@' +
|
||||||
|
sourceConfig.host +
|
||||||
|
':' +
|
||||||
|
sourceConfig.port +
|
||||||
|
'/' +
|
||||||
|
sourceConfig.database
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.password.should.equal(sourceConfig.password)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('password contains colons', function () {
|
||||||
|
var sourceConfig = {
|
||||||
|
user: 'brian',
|
||||||
|
password: 'hello:pass:world',
|
||||||
|
host: 'localhost',
|
||||||
|
port: 5432,
|
||||||
|
database: 'postgres',
|
||||||
|
}
|
||||||
|
var connectionString =
|
||||||
|
'postgres://' +
|
||||||
|
sourceConfig.user +
|
||||||
|
':' +
|
||||||
|
sourceConfig.password +
|
||||||
|
'@' +
|
||||||
|
sourceConfig.host +
|
||||||
|
':' +
|
||||||
|
sourceConfig.port +
|
||||||
|
'/' +
|
||||||
|
sourceConfig.database
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.password.should.equal(sourceConfig.password)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('username or password contains weird characters', function () {
|
||||||
|
var strang = 'pg://my f%irst name:is&%awesome!@localhost:9000'
|
||||||
|
var subject = parse(strang)
|
||||||
|
subject.user.should.equal('my f%irst name')
|
||||||
|
subject.password.should.equal('is&%awesome!')
|
||||||
|
subject.host.should.equal('localhost')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('url is properly encoded', function () {
|
||||||
|
var encoded = 'pg://bi%25na%25%25ry%20:s%40f%23@localhost/%20u%2520rl'
|
||||||
|
var subject = parse(encoded)
|
||||||
|
subject.user.should.equal('bi%na%%ry ')
|
||||||
|
subject.password.should.equal('s@f#')
|
||||||
|
subject.host.should.equal('localhost')
|
||||||
|
subject.database.should.equal(' u%20rl')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('relative url sets database', function () {
|
||||||
|
var relative = 'different_db_on_default_host'
|
||||||
|
var subject = parse(relative)
|
||||||
|
subject.database.should.equal('different_db_on_default_host')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('no pathname returns null database', function () {
|
||||||
|
var subject = parse('pg://myhost')
|
||||||
|
;(subject.database === null).should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('pathname of "/" returns null database', function () {
|
||||||
|
var subject = parse('pg://myhost/')
|
||||||
|
subject.host.should.equal('myhost')
|
||||||
|
;(subject.database === null).should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter host', function () {
|
||||||
|
var subject = parse('pg://user:pass@/dbname?host=/unix/socket')
|
||||||
|
subject.user.should.equal('user')
|
||||||
|
subject.password.should.equal('pass')
|
||||||
|
subject.host.should.equal('/unix/socket')
|
||||||
|
subject.database.should.equal('dbname')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter host overrides url host', function () {
|
||||||
|
var subject = parse('pg://user:pass@localhost/dbname?host=/unix/socket')
|
||||||
|
subject.database.should.equal('dbname')
|
||||||
|
subject.host.should.equal('/unix/socket')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('url with encoded socket', function () {
|
||||||
|
var subject = parse('pg://user:pass@%2Funix%2Fsocket/dbname')
|
||||||
|
subject.user.should.equal('user')
|
||||||
|
subject.password.should.equal('pass')
|
||||||
|
subject.host.should.equal('/unix/socket')
|
||||||
|
subject.database.should.equal('dbname')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('url with real host and an encoded db name', function () {
|
||||||
|
var subject = parse('pg://user:pass@localhost/%2Fdbname')
|
||||||
|
subject.user.should.equal('user')
|
||||||
|
subject.password.should.equal('pass')
|
||||||
|
subject.host.should.equal('localhost')
|
||||||
|
subject.database.should.equal('%2Fdbname')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter host treats encoded host as part of the db name', function () {
|
||||||
|
var subject = parse('pg://user:pass@%2Funix%2Fsocket/dbname?host=localhost')
|
||||||
|
subject.user.should.equal('user')
|
||||||
|
subject.password.should.equal('pass')
|
||||||
|
subject.host.should.equal('localhost')
|
||||||
|
subject.database.should.equal('%2Funix%2Fsocket/dbname')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter application_name', function () {
|
||||||
|
var connectionString = 'pg:///?application_name=TheApp'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.application_name.should.equal('TheApp')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter fallback_application_name', function () {
|
||||||
|
var connectionString = 'pg:///?fallback_application_name=TheAppFallback'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.fallback_application_name.should.equal('TheAppFallback')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter options', function () {
|
||||||
|
var connectionString = 'pg:///?options=-c geqo=off'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.options.should.equal('-c geqo=off')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter ssl=true', function () {
|
||||||
|
var connectionString = 'pg:///?ssl=true'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter ssl=1', function () {
|
||||||
|
var connectionString = 'pg:///?ssl=1'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter ssl=0', function () {
|
||||||
|
var connectionString = 'pg:///?ssl=0'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.equal(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('set ssl', function () {
|
||||||
|
var subject = parse('pg://myhost/db?ssl=1')
|
||||||
|
subject.ssl.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslcert=/path/to/cert', function () {
|
||||||
|
var connectionString = 'pg:///?sslcert=' + __dirname + '/example.cert'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql({
|
||||||
|
cert: 'example cert\n',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslkey=/path/to/key', function () {
|
||||||
|
var connectionString = 'pg:///?sslkey=' + __dirname + '/example.key'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql({
|
||||||
|
key: 'example key\n',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslrootcert=/path/to/ca', function () {
|
||||||
|
var connectionString = 'pg:///?sslrootcert=' + __dirname + '/example.ca'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql({
|
||||||
|
ca: 'example ca\n',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=no-verify', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=no-verify'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql({
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=disable', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=disable'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=prefer', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=prefer'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=require', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=require'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-ca', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=verify-ca'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-full', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=verify-full'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter ssl=true and sslmode=require still work with sslrootcert=/path/to/ca', function () {
|
||||||
|
var connectionString = 'pg:///?ssl=true&sslrootcert=' + __dirname + '/example.ca&sslmode=require'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql({
|
||||||
|
ca: 'example ca\n',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=disable with uselibpqcompat query param', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=disable&uselibpqcompat=true'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=prefer with uselibpqcompat query param', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=prefer&uselibpqcompat=true'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql({
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=require with uselibpqcompat query param', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=require&uselibpqcompat=true'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql({
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-ca with uselibpqcompat query param', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=verify-ca&uselibpqcompat=true'
|
||||||
|
expect(function () {
|
||||||
|
parse(connectionString)
|
||||||
|
}).to.throw()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-ca and sslrootcert with uselibpqcompat query param', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=verify-ca&uselibpqcompat=true&sslrootcert=' + __dirname + '/example.ca'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.have.property('checkServerIdentity').that.is.a('function')
|
||||||
|
expect(subject.ssl.checkServerIdentity()).be.undefined
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-full with uselibpqcompat query param', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=verify-full&uselibpqcompat=true'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.eql({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter ssl=true and sslmode=require still work with sslrootcert=/path/to/ca with uselibpqcompat query param', function () {
|
||||||
|
var connectionString =
|
||||||
|
'pg:///?ssl=true&sslrootcert=' + __dirname + '/example.ca&sslmode=require&uselibpqcompat=true'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ssl.should.have.property('ca', 'example ca\n')
|
||||||
|
subject.ssl.should.have.property('checkServerIdentity').that.is.a('function')
|
||||||
|
expect(subject.ssl.checkServerIdentity()).be.undefined
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=disable with useLibpqCompat option', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=disable'
|
||||||
|
var subject = parse(connectionString, { useLibpqCompat: true })
|
||||||
|
subject.ssl.should.eql(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=prefer with useLibpqCompat option', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=prefer'
|
||||||
|
var subject = parse(connectionString, { useLibpqCompat: true })
|
||||||
|
subject.ssl.should.eql({
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=require with useLibpqCompat option', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=require'
|
||||||
|
var subject = parse(connectionString, { useLibpqCompat: true })
|
||||||
|
subject.ssl.should.eql({
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-ca with useLibpqCompat option', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=verify-ca'
|
||||||
|
expect(function () {
|
||||||
|
parse(connectionString, { useLibpqCompat: true })
|
||||||
|
}).to.throw()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-ca and sslrootcert with useLibpqCompat option', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=verify-ca&sslrootcert=' + __dirname + '/example.ca'
|
||||||
|
var subject = parse(connectionString, { useLibpqCompat: true })
|
||||||
|
subject.ssl.should.have.property('checkServerIdentity').that.is.a('function')
|
||||||
|
expect(subject.ssl.checkServerIdentity()).be.undefined
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-full with useLibpqCompat option', function () {
|
||||||
|
var connectionString = 'pg:///?sslmode=verify-full'
|
||||||
|
var subject = parse(connectionString, { useLibpqCompat: true })
|
||||||
|
subject.ssl.should.eql({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter ssl=true and sslmode=require still work with sslrootcert=/path/to/ca with useLibpqCompat option', function () {
|
||||||
|
var connectionString = 'pg:///?ssl=true&sslrootcert=' + __dirname + '/example.ca&sslmode=require'
|
||||||
|
var subject = parse(connectionString, { useLibpqCompat: true })
|
||||||
|
subject.ssl.should.have.property('ca', 'example ca\n')
|
||||||
|
subject.ssl.should.have.property('checkServerIdentity').that.is.a('function')
|
||||||
|
expect(subject.ssl.checkServerIdentity()).be.undefined
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does not allow sslcompat query parameter and useLibpqCompat option at the same time', function () {
|
||||||
|
var connectionString = 'pg:///?uselibpqcompat=true'
|
||||||
|
expect(function () {
|
||||||
|
parse(connectionString, { useLibpqCompat: true })
|
||||||
|
}).to.throw()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('allow other params like max, ...', function () {
|
||||||
|
var subject = parse('pg://myhost/db?max=18&min=4')
|
||||||
|
subject.max.should.equal('18')
|
||||||
|
subject.min.should.equal('4')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter keepalives', function () {
|
||||||
|
var connectionString = 'pg:///?keepalives=1'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.keepalives.should.equal('1')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('unknown configuration parameter is passed into client', function () {
|
||||||
|
var connectionString = 'pg:///?ThereIsNoSuchPostgresParameter=1234'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.ThereIsNoSuchPostgresParameter.should.equal('1234')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('do not override a config field with value from query string', function () {
|
||||||
|
var subject = parse('socket:/some path/?db=my[db]&encoding=utf8&client_encoding=bogus')
|
||||||
|
subject.host.should.equal('/some path/')
|
||||||
|
subject.database.should.equal('my[db]', 'must to be escaped and unescaped through "my%5Bdb%5D"')
|
||||||
|
subject.client_encoding.should.equal('utf8')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('return last value of repeated parameter', function () {
|
||||||
|
var connectionString = 'pg:///?keepalives=1&keepalives=0'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.keepalives.should.equal('0')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('use the port specified in the query parameters', function () {
|
||||||
|
var connectionString = 'postgres:///?host=localhost&port=1234'
|
||||||
|
var subject = parse(connectionString)
|
||||||
|
subject.port.should.equal('1234')
|
||||||
|
})
|
||||||
|
})
|
||||||
@ -1,470 +0,0 @@
|
|||||||
import chai from 'chai'
|
|
||||||
const expect = chai.expect
|
|
||||||
chai.should()
|
|
||||||
|
|
||||||
import { parse } from '../'
|
|
||||||
|
|
||||||
describe('parse', function () {
|
|
||||||
it('using connection string in client constructor', function () {
|
|
||||||
const subject = parse('postgres://brian:pw@boom:381/lala')
|
|
||||||
subject.user?.should.equal('brian')
|
|
||||||
subject.password?.should.equal('pw')
|
|
||||||
subject.host?.should.equal('boom')
|
|
||||||
subject.port?.should.equal('381')
|
|
||||||
subject.database?.should.equal('lala')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('escape spaces if present', function () {
|
|
||||||
const subject = parse('postgres://localhost/post gres')
|
|
||||||
subject.database?.should.equal('post gres')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('do not double escape spaces', function () {
|
|
||||||
const subject = parse('postgres://localhost/post%20gres')
|
|
||||||
subject.database?.should.equal('post gres')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('initializing with unix domain socket', function () {
|
|
||||||
const subject = parse('/const/run/')
|
|
||||||
subject.host?.should.equal('/const/run/')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('initializing with unix domain socket and a specific database, the simple way', function () {
|
|
||||||
const subject = parse('/const/run/ mydb')
|
|
||||||
subject.host?.should.equal('/const/run/')
|
|
||||||
subject.database?.should.equal('mydb')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('initializing with unix domain socket, the health way', function () {
|
|
||||||
const subject = parse('socket:/some path/?db=my[db]&encoding=utf8')
|
|
||||||
subject.host?.should.equal('/some path/')
|
|
||||||
subject.database?.should.equal('my[db]', 'must to be escaped and unescaped trough "my%5Bdb%5D"')
|
|
||||||
subject.client_encoding?.should.equal('utf8')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('initializing with unix domain socket, the escaped health way', function () {
|
|
||||||
const subject = parse('socket:/some%20path/?db=my%2Bdb&encoding=utf8')
|
|
||||||
subject.host?.should.equal('/some path/')
|
|
||||||
subject.database?.should.equal('my+db')
|
|
||||||
subject.client_encoding?.should.equal('utf8')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('initializing with unix domain socket, username and password', function () {
|
|
||||||
const subject = parse('socket://brian:pw@/const/run/?db=mydb')
|
|
||||||
subject.user?.should.equal('brian')
|
|
||||||
subject.password?.should.equal('pw')
|
|
||||||
subject.host?.should.equal('/const/run/')
|
|
||||||
subject.database?.should.equal('mydb')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('password contains < and/or > characters', function () {
|
|
||||||
const sourceConfig = {
|
|
||||||
user: 'brian',
|
|
||||||
password: 'hello<ther>e',
|
|
||||||
host: 'localhost',
|
|
||||||
port: 5432,
|
|
||||||
database: 'postgres',
|
|
||||||
}
|
|
||||||
const connectionString =
|
|
||||||
'postgres://' +
|
|
||||||
sourceConfig.user +
|
|
||||||
':' +
|
|
||||||
sourceConfig.password +
|
|
||||||
'@' +
|
|
||||||
sourceConfig.host +
|
|
||||||
':' +
|
|
||||||
sourceConfig.port +
|
|
||||||
'/' +
|
|
||||||
sourceConfig.database
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.password?.should.equal(sourceConfig.password)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('password contains colons', function () {
|
|
||||||
const sourceConfig = {
|
|
||||||
user: 'brian',
|
|
||||||
password: 'hello:pass:world',
|
|
||||||
host: 'localhost',
|
|
||||||
port: 5432,
|
|
||||||
database: 'postgres',
|
|
||||||
}
|
|
||||||
const connectionString =
|
|
||||||
'postgres://' +
|
|
||||||
sourceConfig.user +
|
|
||||||
':' +
|
|
||||||
sourceConfig.password +
|
|
||||||
'@' +
|
|
||||||
sourceConfig.host +
|
|
||||||
':' +
|
|
||||||
sourceConfig.port +
|
|
||||||
'/' +
|
|
||||||
sourceConfig.database
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.password?.should.equal(sourceConfig.password)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('username or password contains weird characters', function () {
|
|
||||||
const strang = 'pg://my f%irst name:is&%awesome!@localhost:9000'
|
|
||||||
const subject = parse(strang)
|
|
||||||
subject.user?.should.equal('my f%irst name')
|
|
||||||
subject.password?.should.equal('is&%awesome!')
|
|
||||||
subject.host?.should.equal('localhost')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('url is properly encoded', function () {
|
|
||||||
const encoded = 'pg://bi%25na%25%25ry%20:s%40f%23@localhost/%20u%2520rl'
|
|
||||||
const subject = parse(encoded)
|
|
||||||
subject.user?.should.equal('bi%na%%ry ')
|
|
||||||
subject.password?.should.equal('s@f#')
|
|
||||||
subject.host?.should.equal('localhost')
|
|
||||||
subject.database?.should.equal(' u%20rl')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('relative url sets database', function () {
|
|
||||||
const relative = 'different_db_on_default_host'
|
|
||||||
const subject = parse(relative)
|
|
||||||
subject.database?.should.equal('different_db_on_default_host')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('no pathname returns null database', function () {
|
|
||||||
const subject = parse('pg://myhost')
|
|
||||||
;(subject.database === null).should.equal(true)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('pathname of "/" returns null database', function () {
|
|
||||||
const subject = parse('pg://myhost/')
|
|
||||||
subject.host?.should.equal('myhost')
|
|
||||||
;(subject.database === null).should.equal(true)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter host', function () {
|
|
||||||
const subject = parse('pg://user:pass@/dbname?host=/unix/socket')
|
|
||||||
subject.user?.should.equal('user')
|
|
||||||
subject.password?.should.equal('pass')
|
|
||||||
subject.host?.should.equal('/unix/socket')
|
|
||||||
subject.database?.should.equal('dbname')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter host overrides url host', function () {
|
|
||||||
const subject = parse('pg://user:pass@localhost/dbname?host=/unix/socket')
|
|
||||||
subject.database?.should.equal('dbname')
|
|
||||||
subject.host?.should.equal('/unix/socket')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('url with encoded socket', function () {
|
|
||||||
const subject = parse('pg://user:pass@%2Funix%2Fsocket/dbname')
|
|
||||||
subject.user?.should.equal('user')
|
|
||||||
subject.password?.should.equal('pass')
|
|
||||||
subject.host?.should.equal('/unix/socket')
|
|
||||||
subject.database?.should.equal('dbname')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('url with real host and an encoded db name', function () {
|
|
||||||
const subject = parse('pg://user:pass@localhost/%2Fdbname')
|
|
||||||
subject.user?.should.equal('user')
|
|
||||||
subject.password?.should.equal('pass')
|
|
||||||
subject.host?.should.equal('localhost')
|
|
||||||
subject.database?.should.equal('%2Fdbname')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter host treats encoded host as part of the db name', function () {
|
|
||||||
const subject = parse('pg://user:pass@%2Funix%2Fsocket/dbname?host=localhost')
|
|
||||||
subject.user?.should.equal('user')
|
|
||||||
subject.password?.should.equal('pass')
|
|
||||||
subject.host?.should.equal('localhost')
|
|
||||||
subject.database?.should.equal('%2Funix%2Fsocket/dbname')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter application_name', function () {
|
|
||||||
const connectionString = 'pg:///?application_name=TheApp'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.application_name?.should.equal('TheApp')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter fallback_application_name', function () {
|
|
||||||
const connectionString = 'pg:///?fallback_application_name=TheAppFallback'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.fallback_application_name?.should.equal('TheAppFallback')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter options', function () {
|
|
||||||
const connectionString = 'pg:///?options=-c geqo=off'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.options?.should.equal('-c geqo=off')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter ssl=true', function () {
|
|
||||||
const connectionString = 'pg:///?ssl=true'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.equal(true)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter ssl=1', function () {
|
|
||||||
const connectionString = 'pg:///?ssl=1'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.equal(true)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter ssl=0', function () {
|
|
||||||
const connectionString = 'pg:///?ssl=0'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.equal(false)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('set ssl', function () {
|
|
||||||
const subject = parse('pg://myhost/db?ssl=1')
|
|
||||||
subject.ssl?.should.equal(true)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslcert=/path/to/cert', function () {
|
|
||||||
const connectionString = 'pg:///?sslcert=' + __dirname + '/example.cert'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql({
|
|
||||||
cert: 'example cert\n',
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslkey=/path/to/key', function () {
|
|
||||||
const connectionString = 'pg:///?sslkey=' + __dirname + '/example.key'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql({
|
|
||||||
key: 'example key\n',
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslrootcert=/path/to/ca', function () {
|
|
||||||
const connectionString = 'pg:///?sslrootcert=' + __dirname + '/example.ca'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql({
|
|
||||||
ca: 'example ca\n',
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=no-verify', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=no-verify'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql({
|
|
||||||
rejectUnauthorized: false,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=disable', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=disable'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql(false)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=prefer', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=prefer'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql({})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=require', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=require'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql({})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=verify-ca', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=verify-ca'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql({})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=verify-full', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=verify-full'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql({})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter ssl=true and sslmode=require still work with sslrootcert=/path/to/ca', function () {
|
|
||||||
const connectionString = 'pg:///?ssl=true&sslrootcert=' + __dirname + '/example.ca&sslmode=require'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql({
|
|
||||||
ca: 'example ca\n',
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=disable with uselibpqcompat query param', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=disable&uselibpqcompat=true'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql(false)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=prefer with uselibpqcompat query param', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=prefer&uselibpqcompat=true'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql({
|
|
||||||
rejectUnauthorized: false,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=require with uselibpqcompat query param', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=require&uselibpqcompat=true'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql({
|
|
||||||
rejectUnauthorized: false,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=verify-ca with uselibpqcompat query param', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=verify-ca&uselibpqcompat=true'
|
|
||||||
expect(function () {
|
|
||||||
parse(connectionString)
|
|
||||||
}).to.throw()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('when throwing on invalid url does not print out the password in the error message', function () {
|
|
||||||
const host = 'localhost'
|
|
||||||
const port = 5432
|
|
||||||
const user = 'user'
|
|
||||||
const password = 'g#4624$@F$#v`'
|
|
||||||
const database = 'db'
|
|
||||||
|
|
||||||
const connectionString = `postgres://${user}:${password}@${host}:${port}/${database}`
|
|
||||||
expect(function () {
|
|
||||||
parse(connectionString)
|
|
||||||
}).to.throw()
|
|
||||||
try {
|
|
||||||
parse(connectionString)
|
|
||||||
} catch (err: unknown) {
|
|
||||||
expect(JSON.stringify(err)).to.not.include(password, 'Password should not be in the error message')
|
|
||||||
expect(JSON.stringify(err)).to.include('REDACTED', 'The thrown error should contain the redacted URL')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
throw new Error('Expected an error to be thrown')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=verify-ca and sslrootcert with uselibpqcompat query param', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=verify-ca&uselibpqcompat=true&sslrootcert=' + __dirname + '/example.ca'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.have.property('checkServerIdentity').that.is.a('function')
|
|
||||||
// We prove above that the checkServerIdentity function is defined
|
|
||||||
//
|
|
||||||
// FIXME: remove this if we upgrade to TypeScript 5
|
|
||||||
// @ts-ignore
|
|
||||||
expect(subject.ssl.checkServerIdentity()).be.undefined
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=verify-full with uselibpqcompat query param', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=verify-full&uselibpqcompat=true'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.eql({})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter ssl=true and sslmode=require still work with sslrootcert=/path/to/ca with uselibpqcompat query param', function () {
|
|
||||||
const connectionString =
|
|
||||||
'pg:///?ssl=true&sslrootcert=' + __dirname + '/example.ca&sslmode=require&uselibpqcompat=true'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ssl?.should.have.property('ca', 'example ca\n')
|
|
||||||
subject.ssl?.should.have.property('checkServerIdentity').that.is.a('function')
|
|
||||||
// We prove above that the checkServerIdentity function is defined
|
|
||||||
//
|
|
||||||
// FIXME: remove this if we upgrade to TypeScript 5
|
|
||||||
// @ts-ignore
|
|
||||||
expect(subject.ssl?.checkServerIdentity()).be.undefined
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=disable with useLibpqCompat option', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=disable'
|
|
||||||
const subject = parse(connectionString, { useLibpqCompat: true })
|
|
||||||
subject.ssl?.should.eql(false)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=prefer with useLibpqCompat option', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=prefer'
|
|
||||||
const subject = parse(connectionString, { useLibpqCompat: true })
|
|
||||||
subject.ssl?.should.eql({
|
|
||||||
rejectUnauthorized: false,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=require with useLibpqCompat option', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=require'
|
|
||||||
const subject = parse(connectionString, { useLibpqCompat: true })
|
|
||||||
subject.ssl?.should.eql({
|
|
||||||
rejectUnauthorized: false,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=verify-ca with useLibpqCompat option', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=verify-ca'
|
|
||||||
expect(function () {
|
|
||||||
parse(connectionString, { useLibpqCompat: true })
|
|
||||||
}).to.throw()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=verify-ca and sslrootcert with useLibpqCompat option', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=verify-ca&sslrootcert=' + __dirname + '/example.ca'
|
|
||||||
const subject = parse(connectionString, { useLibpqCompat: true })
|
|
||||||
subject.ssl?.should.have.property('checkServerIdentity').that.is.a('function')
|
|
||||||
// We prove above that the checkServerIdentity function is defined
|
|
||||||
//
|
|
||||||
// FIXME: remove this if we upgrade to TypeScript 5
|
|
||||||
// @ts-ignore
|
|
||||||
expect(subject.ssl?.checkServerIdentity()).be.undefined
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter sslmode=verify-full with useLibpqCompat option', function () {
|
|
||||||
const connectionString = 'pg:///?sslmode=verify-full'
|
|
||||||
const subject = parse(connectionString, { useLibpqCompat: true })
|
|
||||||
subject.ssl?.should.eql({})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter ssl=true and sslmode=require still work with sslrootcert=/path/to/ca with useLibpqCompat option', function () {
|
|
||||||
const connectionString = 'pg:///?ssl=true&sslrootcert=' + __dirname + '/example.ca&sslmode=require'
|
|
||||||
const subject = parse(connectionString, { useLibpqCompat: true })
|
|
||||||
subject.ssl?.should.have.property('ca', 'example ca\n')
|
|
||||||
subject.ssl?.should.have.property('checkServerIdentity').that.is.a('function')
|
|
||||||
// We prove above that the checkServerIdentity function is defined
|
|
||||||
//
|
|
||||||
// FIXME: remove this if we upgrade to TypeScript 5
|
|
||||||
// @ts-ignore
|
|
||||||
expect(subject.ssl?.checkServerIdentity()).be.undefined
|
|
||||||
})
|
|
||||||
|
|
||||||
it('does not allow uselibpqcompat query parameter and useLibpqCompat option at the same time', function () {
|
|
||||||
const connectionString = 'pg:///?uselibpqcompat=true'
|
|
||||||
expect(function () {
|
|
||||||
parse(connectionString, { useLibpqCompat: true })
|
|
||||||
}).to.throw()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('allow other params like max, ...', function () {
|
|
||||||
const subject = parse('pg://myhost/db?max=18&min=4')
|
|
||||||
subject.max?.should.equal('18')
|
|
||||||
subject.min?.should.equal('4')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('configuration parameter keepalives', function () {
|
|
||||||
const connectionString = 'pg:///?keepalives=1'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.keepalives?.should.equal('1')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('unknown configuration parameter is passed into client', function () {
|
|
||||||
const connectionString = 'pg:///?ThereIsNoSuchPostgresParameter=1234'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.ThereIsNoSuchPostgresParameter?.should.equal('1234')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('do not override a config field with value from query string', function () {
|
|
||||||
const subject = parse('socket:/some path/?db=my[db]&encoding=utf8&client_encoding=bogus')
|
|
||||||
subject.host?.should.equal('/some path/')
|
|
||||||
subject.database?.should.equal('my[db]', 'must to be escaped and unescaped through "my%5Bdb%5D"')
|
|
||||||
subject.client_encoding?.should.equal('utf8')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('return last value of repeated parameter', function () {
|
|
||||||
const connectionString = 'pg:///?keepalives=1&keepalives=0'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.keepalives?.should.equal('0')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('use the port specified in the query parameters', function () {
|
|
||||||
const connectionString = 'postgres:///?host=localhost&port=1234'
|
|
||||||
const subject = parse(connectionString)
|
|
||||||
subject.port?.should.equal('1234')
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@ -1,19 +0,0 @@
|
|||||||
{
|
|
||||||
"compilerOptions": {
|
|
||||||
"module": "commonjs",
|
|
||||||
"esModuleInterop": true,
|
|
||||||
"allowSyntheticDefaultImports": true,
|
|
||||||
"strict": true,
|
|
||||||
"target": "es6",
|
|
||||||
"noImplicitAny": true,
|
|
||||||
"moduleResolution": "node",
|
|
||||||
"sourceMap": true,
|
|
||||||
"outDir": "dist",
|
|
||||||
"incremental": true,
|
|
||||||
"baseUrl": ".",
|
|
||||||
"declaration": true
|
|
||||||
},
|
|
||||||
"include": [
|
|
||||||
"test/**/*"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@ -1,7 +1,7 @@
|
|||||||
'use strict'
|
'use strict'
|
||||||
// note: can remove these deep requires when we bump min version of pg to 9.x
|
const pg = require('pg')
|
||||||
const Result = require('pg/lib/result.js')
|
const { Result, utils } = pg
|
||||||
const prepare = require('pg/lib/utils.js').prepareValue
|
const prepare = utils.prepareValue
|
||||||
const EventEmitter = require('events').EventEmitter
|
const EventEmitter = require('events').EventEmitter
|
||||||
const util = require('util')
|
const util = require('util')
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "pg-cursor",
|
"name": "pg-cursor",
|
||||||
"version": "2.15.3",
|
"version": "2.13.2-alpha.1",
|
||||||
"description": "Query cursor extension for node-postgres",
|
"description": "Query cursor extension for node-postgres",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"exports": {
|
"exports": {
|
||||||
@ -25,7 +25,7 @@
|
|||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"mocha": "^10.5.2",
|
"mocha": "^10.5.2",
|
||||||
"pg": "^8.16.3"
|
"pg": "^8.14.2-alpha.1"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"pg": "^8"
|
"pg": "^8"
|
||||||
|
|||||||
@ -1,3 +0,0 @@
|
|||||||
This is an internal package for node-postgres used to test esm & cjs module export compatibility.
|
|
||||||
|
|
||||||
The only thing you really need to do is `yarn && yarn test` from the root of the project & these tests will run as well as all the other tests. So, basically, you can ignore this. 😄
|
|
||||||
@ -1,35 +0,0 @@
|
|||||||
const assert = require('node:assert')
|
|
||||||
const test = require('node:test')
|
|
||||||
const { describe, it } = test
|
|
||||||
|
|
||||||
const paths = [
|
|
||||||
'pg',
|
|
||||||
'pg/lib/index.js',
|
|
||||||
'pg/lib/index',
|
|
||||||
'pg/lib/connection-parameters',
|
|
||||||
'pg/lib/connection-parameters.js',
|
|
||||||
'pg/lib/type-overrides',
|
|
||||||
'pg-protocol/dist/messages.js',
|
|
||||||
'pg-protocol/dist/messages',
|
|
||||||
'pg-native/lib/build-result.js',
|
|
||||||
'pg-cloudflare/package.json',
|
|
||||||
]
|
|
||||||
for (const path of paths) {
|
|
||||||
describe(`importing ${path}`, () => {
|
|
||||||
it('works with require', () => {
|
|
||||||
const mod = require(path)
|
|
||||||
assert(mod)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('pg-native', () => {
|
|
||||||
it('should work with commonjs', async () => {
|
|
||||||
const pg = require('pg')
|
|
||||||
|
|
||||||
const pool = new pg.native.Pool()
|
|
||||||
const result = await pool.query('SELECT 1')
|
|
||||||
assert.strictEqual(result.rowCount, 1)
|
|
||||||
pool.end()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@ -1,11 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "pg-esm-test",
|
"name": "pg-esm-test",
|
||||||
"version": "1.2.3",
|
"version": "1.0.1-alpha.1",
|
||||||
"description": "A test module for PostgreSQL with ESM support",
|
"description": "A test module for PostgreSQL with ESM support",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "node --test --conditions=workerd"
|
"test": "node --test"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"postgres",
|
"postgres",
|
||||||
@ -14,13 +14,12 @@
|
|||||||
"test"
|
"test"
|
||||||
],
|
],
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"pg": "^8.16.3",
|
"pg": "^8.14.2-alpha.1",
|
||||||
"pg-cloudflare": "^1.2.7",
|
"pg-cloudflare": "^1.1.2-alpha.1",
|
||||||
"pg-cursor": "^2.15.3",
|
"pg-cursor": "^2.13.2-alpha.1",
|
||||||
"pg-native": "^3.5.2",
|
"pg-native": "^3.3.1-alpha.0",
|
||||||
"pg-pool": "^3.10.1",
|
"pg-pool": "^3.8.1-alpha.1",
|
||||||
"pg-protocol": "^1.10.3",
|
"pg-query-stream": "^4.8.2-alpha.1"
|
||||||
"pg-query-stream": "^4.10.3"
|
|
||||||
},
|
},
|
||||||
"author": "Brian M. Carlson <brian.m.carlson@gmail.com>",
|
"author": "Brian M. Carlson <brian.m.carlson@gmail.com>",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
|
|||||||
@ -1,18 +0,0 @@
|
|||||||
import protocol, { NoticeMessage, DatabaseError } from 'pg-protocol/dist/messages.js'
|
|
||||||
import { describe, it } from 'node:test'
|
|
||||||
import { strict as assert } from 'node:assert'
|
|
||||||
|
|
||||||
describe('pg-protocol', () => {
|
|
||||||
it('should export database error', () => {
|
|
||||||
assert.ok(DatabaseError)
|
|
||||||
})
|
|
||||||
it('should export protocol', () => {
|
|
||||||
assert.ok(protocol)
|
|
||||||
assert.ok(protocol.noData)
|
|
||||||
assert.ok(protocol.parseComplete)
|
|
||||||
assert.ok(protocol.NoticeMessage)
|
|
||||||
})
|
|
||||||
it('should export NoticeMessage from file in dist folder', () => {
|
|
||||||
assert.ok(NoticeMessage)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@ -1,17 +1,6 @@
|
|||||||
import assert from 'node:assert'
|
import assert from 'node:assert'
|
||||||
import { describe, it } from 'node:test'
|
import { describe, it } from 'node:test'
|
||||||
import pg, {
|
import pg, { Client, Pool } from 'pg'
|
||||||
Client,
|
|
||||||
Pool,
|
|
||||||
Connection,
|
|
||||||
defaults,
|
|
||||||
types,
|
|
||||||
DatabaseError,
|
|
||||||
escapeIdentifier,
|
|
||||||
escapeLiteral,
|
|
||||||
Result,
|
|
||||||
TypeOverrides,
|
|
||||||
} from 'pg'
|
|
||||||
|
|
||||||
describe('pg', () => {
|
describe('pg', () => {
|
||||||
it('should export Client constructor', () => {
|
it('should export Client constructor', () => {
|
||||||
@ -25,36 +14,4 @@ describe('pg', () => {
|
|||||||
it('should still provide default export', () => {
|
it('should still provide default export', () => {
|
||||||
assert.ok(new pg.Pool())
|
assert.ok(new pg.Pool())
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should export Connection constructor', () => {
|
|
||||||
assert.ok(new Connection())
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should export defaults', () => {
|
|
||||||
assert.ok(defaults)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should export types', () => {
|
|
||||||
assert.ok(types)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should export DatabaseError', () => {
|
|
||||||
assert.ok(DatabaseError)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should export escapeIdentifier', () => {
|
|
||||||
assert.ok(escapeIdentifier)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should export escapeLiteral', () => {
|
|
||||||
assert.ok(escapeLiteral)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should export Result', () => {
|
|
||||||
assert.ok(Result)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should export TypeOverrides', () => {
|
|
||||||
assert.ok(TypeOverrides)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|||||||
@ -30,40 +30,40 @@ $ npm i pg-native
|
|||||||
### async
|
### async
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const Client = require('pg-native')
|
var Client = require('pg-native')
|
||||||
|
|
||||||
const client = new Client();
|
var client = new Client();
|
||||||
client.connect(function(err) {
|
client.connect(function(err) {
|
||||||
if(err) throw err
|
if(err) throw err
|
||||||
|
|
||||||
// text queries
|
//text queries
|
||||||
client.query('SELECT NOW() AS the_date', function(err, rows) {
|
client.query('SELECT NOW() AS the_date', function(err, rows) {
|
||||||
if(err) throw err
|
if(err) throw err
|
||||||
|
|
||||||
console.log(rows[0].the_date) // Tue Sep 16 2014 23:42:39 GMT-0400 (EDT)
|
console.log(rows[0].the_date) //Tue Sep 16 2014 23:42:39 GMT-0400 (EDT)
|
||||||
|
|
||||||
// parameterized statements
|
//parameterized statements
|
||||||
client.query('SELECT $1::text as twitter_handle', ['@briancarlson'], function(err, rows) {
|
client.query('SELECT $1::text as twitter_handle', ['@briancarlson'], function(err, rows) {
|
||||||
if(err) throw err
|
if(err) throw err
|
||||||
|
|
||||||
console.log(rows[0].twitter_handle) //@briancarlson
|
console.log(rows[0].twitter_handle) //@briancarlson
|
||||||
})
|
})
|
||||||
|
|
||||||
// prepared statements
|
//prepared statements
|
||||||
client.prepare('get_twitter', 'SELECT $1::text as twitter_handle', 1, function(err) {
|
client.prepare('get_twitter', 'SELECT $1::text as twitter_handle', 1, function(err) {
|
||||||
if(err) throw err
|
if(err) throw err
|
||||||
|
|
||||||
// execute the prepared, named statement
|
//execute the prepared, named statement
|
||||||
client.execute('get_twitter', ['@briancarlson'], function(err, rows) {
|
client.execute('get_twitter', ['@briancarlson'], function(err, rows) {
|
||||||
if(err) throw err
|
if(err) throw err
|
||||||
|
|
||||||
console.log(rows[0].twitter_handle) //@briancarlson
|
console.log(rows[0].twitter_handle) //@briancarlson
|
||||||
|
|
||||||
// execute the prepared, named statement again
|
//execute the prepared, named statement again
|
||||||
client.execute('get_twitter', ['@realcarrotfacts'], function(err, rows) {
|
client.execute('get_twitter', ['@realcarrotfacts'], function(err, rows) {
|
||||||
if(err) throw err
|
if(err) throw err
|
||||||
|
|
||||||
console.log(rows[0].twitter_handle) // @realcarrotfacts
|
console.log(rows[0].twitter_handle) //@realcarrotfacts
|
||||||
|
|
||||||
client.end(function() {
|
client.end(function() {
|
||||||
console.log('ended')
|
console.log('ended')
|
||||||
@ -81,27 +81,27 @@ client.connect(function(err) {
|
|||||||
Because `pg-native` is bound to [libpq](https://github.com/brianc/node-libpq) it is able to provide _sync_ operations for both connecting and queries. This is a bad idea in _non-blocking systems_ like web servers, but is exteremly convienent in scripts and bootstrapping applications - much the same way `fs.readFileSync` comes in handy.
|
Because `pg-native` is bound to [libpq](https://github.com/brianc/node-libpq) it is able to provide _sync_ operations for both connecting and queries. This is a bad idea in _non-blocking systems_ like web servers, but is exteremly convienent in scripts and bootstrapping applications - much the same way `fs.readFileSync` comes in handy.
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const Client = require('pg-native')
|
var Client = require('pg-native')
|
||||||
|
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connectSync()
|
client.connectSync()
|
||||||
|
|
||||||
// text queries
|
//text queries
|
||||||
const rows = client.querySync('SELECT NOW() AS the_date')
|
var rows = client.querySync('SELECT NOW() AS the_date')
|
||||||
console.log(rows[0].the_date) // Tue Sep 16 2014 23:42:39 GMT-0400 (EDT)
|
console.log(rows[0].the_date) //Tue Sep 16 2014 23:42:39 GMT-0400 (EDT)
|
||||||
|
|
||||||
// parameterized queries
|
//parameterized queries
|
||||||
const rows = client.querySync('SELECT $1::text as twitter_handle', ['@briancarlson'])
|
var rows = client.querySync('SELECT $1::text as twitter_handle', ['@briancarlson'])
|
||||||
console.log(rows[0].twitter_handle) // @briancarlson
|
console.log(rows[0].twitter_handle) //@briancarlson
|
||||||
|
|
||||||
// prepared statements
|
//prepared statements
|
||||||
client.prepareSync('get_twitter', 'SELECT $1::text as twitter_handle', 1)
|
client.prepareSync('get_twitter', 'SELECT $1::text as twitter_handle', 1)
|
||||||
|
|
||||||
const rows = client.executeSync('get_twitter', ['@briancarlson'])
|
var rows = client.executeSync('get_twitter', ['@briancarlson'])
|
||||||
console.log(rows[0].twitter_handle) // @briancarlson
|
console.log(rows[0].twitter_handle) //@briancarlson
|
||||||
|
|
||||||
const rows = client.executeSync('get_twitter', ['@realcarrotfacts'])
|
var rows = client.executeSync('get_twitter', ['@realcarrotfacts'])
|
||||||
console.log(rows[0].twitter_handle) // @realcarrotfacts
|
console.log(rows[0].twitter_handle) //@realcarrotfacts
|
||||||
```
|
```
|
||||||
|
|
||||||
## api
|
## api
|
||||||
@ -125,14 +125,14 @@ Returns an `Error` to the `callback` if the connection was unsuccessful. `callb
|
|||||||
##### example
|
##### example
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connect(function(err) {
|
client.connect(function(err) {
|
||||||
if(err) throw err
|
if(err) throw err
|
||||||
|
|
||||||
console.log('connected!')
|
console.log('connected!')
|
||||||
})
|
})
|
||||||
|
|
||||||
const client2 = new Client()
|
var client2 = new Client()
|
||||||
client2.connect('postgresql://user:password@host:5432/database?param=value', function(err) {
|
client2.connect('postgresql://user:password@host:5432/database?param=value', function(err) {
|
||||||
if(err) throw err
|
if(err) throw err
|
||||||
|
|
||||||
@ -147,7 +147,7 @@ Execute a query with the text of `queryText` and _optional_ parameters specified
|
|||||||
##### example
|
##### example
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connect(function(err) {
|
client.connect(function(err) {
|
||||||
if (err) throw err
|
if (err) throw err
|
||||||
|
|
||||||
@ -175,7 +175,7 @@ Prepares a _named statement_ for later execution. You _must_ supply the name of
|
|||||||
##### example
|
##### example
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connect(function(err) {
|
client.connect(function(err) {
|
||||||
if(err) throw err
|
if(err) throw err
|
||||||
|
|
||||||
@ -197,7 +197,7 @@ Executes a previously prepared statement on this client with the name of `statem
|
|||||||
|
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connect(function(err) {
|
client.connect(function(err) {
|
||||||
if(err) throw err
|
if(err) throw err
|
||||||
|
|
||||||
@ -221,7 +221,7 @@ Ends the connection. Calls the _optional_ callback when the connection is termin
|
|||||||
##### example
|
##### example
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connect(function(err) {
|
client.connect(function(err) {
|
||||||
if(err) throw err
|
if(err) throw err
|
||||||
client.end(function() {
|
client.end(function() {
|
||||||
@ -236,9 +236,9 @@ Cancels the active query on the client. Callback receives an error if there was
|
|||||||
|
|
||||||
##### example
|
##### example
|
||||||
```js
|
```js
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connectSync()
|
client.connectSync()
|
||||||
// sleep for 100 seconds
|
//sleep for 100 seconds
|
||||||
client.query('select pg_sleep(100)', function(err) {
|
client.query('select pg_sleep(100)', function(err) {
|
||||||
console.log(err) // [Error: ERROR: canceling statement due to user request]
|
console.log(err) // [Error: ERROR: canceling statement due to user request]
|
||||||
})
|
})
|
||||||
@ -264,7 +264,7 @@ Prepares a name statement with name of `statementName` and a query text of `quer
|
|||||||
|
|
||||||
- __`client.executeSync(statementName:string, <values:string[]>) -> results:Object[]`__
|
- __`client.executeSync(statementName:string, <values:string[]>) -> results:Object[]`__
|
||||||
|
|
||||||
Executes a previously prepared statement on this client with the name of `statementName`, passing it the optional array of query parameters as a `values` array. Throws an `Error` if the execution fails, otherwise returns an array of results.
|
Executes a previously prepared statement on this client with the name of `statementName`, passing it the optional array of query paramters as a `values` array. Throws an `Error` if the execution fails, otherwas returns an array of results.
|
||||||
|
|
||||||
## testing
|
## testing
|
||||||
|
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
const pg = require('pg').native
|
var pg = require('pg').native
|
||||||
const Native = require('../')
|
var Native = require('../')
|
||||||
|
|
||||||
const warmup = function (fn, cb) {
|
var warmup = function (fn, cb) {
|
||||||
let count = 0
|
var count = 0
|
||||||
const max = 10
|
var max = 10
|
||||||
const run = function (err) {
|
var run = function (err) {
|
||||||
if (err) return cb(err)
|
if (err) return cb(err)
|
||||||
|
|
||||||
if (max >= count++) {
|
if (max >= count++) {
|
||||||
@ -16,26 +16,26 @@ const warmup = function (fn, cb) {
|
|||||||
run()
|
run()
|
||||||
}
|
}
|
||||||
|
|
||||||
const native = Native()
|
var native = Native()
|
||||||
native.connectSync()
|
native.connectSync()
|
||||||
|
|
||||||
const queryText = 'SELECT generate_series(0, 1000) as X, generate_series(0, 1000) as Y, generate_series(0, 1000) as Z'
|
var queryText = 'SELECT generate_series(0, 1000) as X, generate_series(0, 1000) as Y, generate_series(0, 1000) as Z'
|
||||||
const client = new pg.Client()
|
var client = new pg.Client()
|
||||||
client.connect(function () {
|
client.connect(function () {
|
||||||
const pure = function (cb) {
|
var pure = function (cb) {
|
||||||
client.query(queryText, function (err) {
|
client.query(queryText, function (err) {
|
||||||
if (err) throw err
|
if (err) throw err
|
||||||
cb(err)
|
cb(err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
const nativeQuery = function (cb) {
|
var nativeQuery = function (cb) {
|
||||||
native.query(queryText, function (err) {
|
native.query(queryText, function (err) {
|
||||||
if (err) throw err
|
if (err) throw err
|
||||||
cb(err)
|
cb(err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const run = function () {
|
var run = function () {
|
||||||
console.time('pure')
|
console.time('pure')
|
||||||
warmup(pure, function () {
|
warmup(pure, function () {
|
||||||
console.timeEnd('pure')
|
console.timeEnd('pure')
|
||||||
|
|||||||
@ -1,29 +1,29 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const async = require('async')
|
var async = require('async')
|
||||||
|
|
||||||
const loop = function () {
|
var loop = function () {
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
|
|
||||||
const connect = function (cb) {
|
var connect = function (cb) {
|
||||||
client.connect(cb)
|
client.connect(cb)
|
||||||
}
|
}
|
||||||
|
|
||||||
const simpleQuery = function (cb) {
|
var simpleQuery = function (cb) {
|
||||||
client.query('SELECT NOW()', cb)
|
client.query('SELECT NOW()', cb)
|
||||||
}
|
}
|
||||||
|
|
||||||
const paramsQuery = function (cb) {
|
var paramsQuery = function (cb) {
|
||||||
client.query('SELECT $1::text as name', ['Brian'], cb)
|
client.query('SELECT $1::text as name', ['Brian'], cb)
|
||||||
}
|
}
|
||||||
|
|
||||||
const prepared = function (cb) {
|
var prepared = function (cb) {
|
||||||
client.prepare('test', 'SELECT $1::text as name', 1, function (err) {
|
client.prepare('test', 'SELECT $1::text as name', 1, function (err) {
|
||||||
if (err) return cb(err)
|
if (err) return cb(err)
|
||||||
client.execute('test', ['Brian'], cb)
|
client.execute('test', ['Brian'], cb)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const sync = function (cb) {
|
var sync = function (cb) {
|
||||||
client.querySync('SELECT NOW()')
|
client.querySync('SELECT NOW()')
|
||||||
client.querySync('SELECT $1::text as name', ['Brian'])
|
client.querySync('SELECT $1::text as name', ['Brian'])
|
||||||
client.prepareSync('boom', 'SELECT $1::text as name', 1)
|
client.prepareSync('boom', 'SELECT $1::text as name', 1)
|
||||||
@ -31,16 +31,16 @@ const loop = function () {
|
|||||||
setImmediate(cb)
|
setImmediate(cb)
|
||||||
}
|
}
|
||||||
|
|
||||||
const end = function (cb) {
|
var end = function (cb) {
|
||||||
client.end(cb)
|
client.end(cb)
|
||||||
}
|
}
|
||||||
|
|
||||||
const ops = [connect, simpleQuery, paramsQuery, prepared, sync, end]
|
var ops = [connect, simpleQuery, paramsQuery, prepared, sync, end]
|
||||||
|
|
||||||
const start = performance.now()
|
var start = Date.now()
|
||||||
async.series(ops, function (err) {
|
async.series(ops, function (err) {
|
||||||
if (err) throw err
|
if (err) throw err
|
||||||
console.log(performance.now() - start)
|
console.log(Date.now() - start)
|
||||||
setImmediate(loop)
|
setImmediate(loop)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,12 +1,12 @@
|
|||||||
const Libpq = require('libpq')
|
var Libpq = require('libpq')
|
||||||
const EventEmitter = require('events').EventEmitter
|
var EventEmitter = require('events').EventEmitter
|
||||||
const util = require('util')
|
var util = require('util')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
const types = require('pg-types')
|
var types = require('pg-types')
|
||||||
const buildResult = require('./lib/build-result')
|
var buildResult = require('./lib/build-result')
|
||||||
const CopyStream = require('./lib/copy-stream')
|
var CopyStream = require('./lib/copy-stream')
|
||||||
|
|
||||||
const Client = (module.exports = function (config) {
|
var Client = (module.exports = function (config) {
|
||||||
if (!(this instanceof Client)) {
|
if (!(this instanceof Client)) {
|
||||||
return new Client(config)
|
return new Client(config)
|
||||||
}
|
}
|
||||||
@ -18,7 +18,7 @@ const Client = (module.exports = function (config) {
|
|||||||
this._reading = false
|
this._reading = false
|
||||||
this._read = this._read.bind(this)
|
this._read = this._read.bind(this)
|
||||||
|
|
||||||
// allow custom type conversion to be passed in
|
// allow custom type converstion to be passed in
|
||||||
this._types = config.types || types
|
this._types = config.types || types
|
||||||
|
|
||||||
// allow config to specify returning results
|
// allow config to specify returning results
|
||||||
@ -51,31 +51,34 @@ Client.prototype.connectSync = function (params) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Client.prototype.query = function (text, values, cb) {
|
Client.prototype.query = function (text, values, cb) {
|
||||||
let queryFn
|
var queryFn
|
||||||
|
|
||||||
if (typeof values === 'function') {
|
if (typeof values === 'function') {
|
||||||
cb = values
|
cb = values
|
||||||
}
|
}
|
||||||
|
|
||||||
if (Array.isArray(values)) {
|
if (Array.isArray(values)) {
|
||||||
queryFn = () => {
|
queryFn = function () {
|
||||||
return this.pq.sendQueryParams(text, values)
|
return self.pq.sendQueryParams(text, values)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
queryFn = () => {
|
queryFn = function () {
|
||||||
return this.pq.sendQuery(text)
|
return self.pq.sendQuery(text)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this._dispatchQuery(this.pq, queryFn, (err) => {
|
var self = this
|
||||||
|
|
||||||
|
self._dispatchQuery(self.pq, queryFn, function (err) {
|
||||||
if (err) return cb(err)
|
if (err) return cb(err)
|
||||||
this._awaitResult(cb)
|
|
||||||
|
self._awaitResult(cb)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
Client.prototype.prepare = function (statementName, text, nParams, cb) {
|
Client.prototype.prepare = function (statementName, text, nParams, cb) {
|
||||||
const self = this
|
var self = this
|
||||||
const fn = function () {
|
var fn = function () {
|
||||||
return self.pq.sendPrepare(statementName, text, nParams)
|
return self.pq.sendPrepare(statementName, text, nParams)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,9 +89,9 @@ Client.prototype.prepare = function (statementName, text, nParams, cb) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Client.prototype.execute = function (statementName, parameters, cb) {
|
Client.prototype.execute = function (statementName, parameters, cb) {
|
||||||
const self = this
|
var self = this
|
||||||
|
|
||||||
const fn = function () {
|
var fn = function () {
|
||||||
return self.pq.sendQueryPrepared(statementName, parameters)
|
return self.pq.sendQueryPrepared(statementName, parameters)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -108,7 +111,7 @@ Client.prototype.getCopyStream = function () {
|
|||||||
Client.prototype.cancel = function (cb) {
|
Client.prototype.cancel = function (cb) {
|
||||||
assert(cb, 'Callback is required')
|
assert(cb, 'Callback is required')
|
||||||
// result is either true or a string containing an error
|
// result is either true or a string containing an error
|
||||||
const result = this.pq.cancel()
|
var result = this.pq.cancel()
|
||||||
return setImmediate(function () {
|
return setImmediate(function () {
|
||||||
cb(result === true ? undefined : new Error(result))
|
cb(result === true ? undefined : new Error(result))
|
||||||
})
|
})
|
||||||
@ -155,7 +158,7 @@ Client.prototype.end = function (cb) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Client.prototype._readError = function (message) {
|
Client.prototype._readError = function (message) {
|
||||||
const err = new Error(message || this.pq.errorMessage())
|
var err = new Error(message || this.pq.errorMessage())
|
||||||
this.emit('error', err)
|
this.emit('error', err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -171,7 +174,7 @@ Client.prototype._consumeQueryResults = function (pq) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Client.prototype._emitResult = function (pq) {
|
Client.prototype._emitResult = function (pq) {
|
||||||
const status = pq.resultStatus()
|
var status = pq.resultStatus()
|
||||||
switch (status) {
|
switch (status) {
|
||||||
case 'PGRES_FATAL_ERROR':
|
case 'PGRES_FATAL_ERROR':
|
||||||
this._queryError = new Error(this.pq.resultErrorMessage())
|
this._queryError = new Error(this.pq.resultErrorMessage())
|
||||||
@ -200,7 +203,7 @@ Client.prototype._emitResult = function (pq) {
|
|||||||
|
|
||||||
// called when libpq is readable
|
// called when libpq is readable
|
||||||
Client.prototype._read = function () {
|
Client.prototype._read = function () {
|
||||||
const pq = this.pq
|
var pq = this.pq
|
||||||
// read waiting data from the socket
|
// read waiting data from the socket
|
||||||
// e.g. clear the pending 'select'
|
// e.g. clear the pending 'select'
|
||||||
if (!pq.consumeInput()) {
|
if (!pq.consumeInput()) {
|
||||||
@ -235,7 +238,7 @@ Client.prototype._read = function () {
|
|||||||
|
|
||||||
this.emit('readyForQuery')
|
this.emit('readyForQuery')
|
||||||
|
|
||||||
let notice = this.pq.notifies()
|
var notice = this.pq.notifies()
|
||||||
while (notice) {
|
while (notice) {
|
||||||
this.emit('notification', notice)
|
this.emit('notification', notice)
|
||||||
notice = this.pq.notifies()
|
notice = this.pq.notifies()
|
||||||
@ -251,8 +254,8 @@ Client.prototype._startReading = function () {
|
|||||||
this.pq.startReader()
|
this.pq.startReader()
|
||||||
}
|
}
|
||||||
|
|
||||||
const throwIfError = function (pq) {
|
var throwIfError = function (pq) {
|
||||||
const err = pq.resultErrorMessage() || pq.errorMessage()
|
var err = pq.resultErrorMessage() || pq.errorMessage()
|
||||||
if (err) {
|
if (err) {
|
||||||
throw new Error(err)
|
throw new Error(err)
|
||||||
}
|
}
|
||||||
@ -265,7 +268,7 @@ Client.prototype._awaitResult = function (cb) {
|
|||||||
|
|
||||||
// wait for the writable socket to drain
|
// wait for the writable socket to drain
|
||||||
Client.prototype._waitForDrain = function (pq, cb) {
|
Client.prototype._waitForDrain = function (pq, cb) {
|
||||||
const res = pq.flush()
|
var res = pq.flush()
|
||||||
// res of 0 is success
|
// res of 0 is success
|
||||||
if (res === 0) return cb()
|
if (res === 0) return cb()
|
||||||
|
|
||||||
@ -274,7 +277,7 @@ Client.prototype._waitForDrain = function (pq, cb) {
|
|||||||
|
|
||||||
// otherwise outgoing message didn't flush to socket
|
// otherwise outgoing message didn't flush to socket
|
||||||
// wait for it to flush and try again
|
// wait for it to flush and try again
|
||||||
const self = this
|
var self = this
|
||||||
// you cannot read & write on a socket at the same time
|
// you cannot read & write on a socket at the same time
|
||||||
return pq.writable(function () {
|
return pq.writable(function () {
|
||||||
self._waitForDrain(pq, cb)
|
self._waitForDrain(pq, cb)
|
||||||
@ -285,9 +288,9 @@ Client.prototype._waitForDrain = function (pq, cb) {
|
|||||||
// finish writing query text to the socket
|
// finish writing query text to the socket
|
||||||
Client.prototype._dispatchQuery = function (pq, fn, cb) {
|
Client.prototype._dispatchQuery = function (pq, fn, cb) {
|
||||||
this._stopReading()
|
this._stopReading()
|
||||||
const success = pq.setNonBlocking(true)
|
var success = pq.setNonBlocking(true)
|
||||||
if (!success) return cb(new Error('Unable to set non-blocking to true'))
|
if (!success) return cb(new Error('Unable to set non-blocking to true'))
|
||||||
const sent = fn()
|
var sent = fn()
|
||||||
if (!sent) return cb(new Error(pq.errorMessage() || 'Something went wrong dispatching the query'))
|
if (!sent) return cb(new Error(pq.errorMessage() || 'Something went wrong dispatching the query'))
|
||||||
this._waitForDrain(pq, cb)
|
this._waitForDrain(pq, cb)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -20,9 +20,9 @@ class Result {
|
|||||||
consumeFields(pq) {
|
consumeFields(pq) {
|
||||||
const nfields = pq.nfields()
|
const nfields = pq.nfields()
|
||||||
this.fields = new Array(nfields)
|
this.fields = new Array(nfields)
|
||||||
const row = {}
|
var row = {}
|
||||||
for (let x = 0; x < nfields; x++) {
|
for (var x = 0; x < nfields; x++) {
|
||||||
const name = pq.fname(x)
|
var name = pq.fname(x)
|
||||||
row[name] = null
|
row[name] = null
|
||||||
this.fields[x] = {
|
this.fields[x] = {
|
||||||
name: name,
|
name: name,
|
||||||
@ -35,14 +35,14 @@ class Result {
|
|||||||
consumeRows(pq) {
|
consumeRows(pq) {
|
||||||
const tupleCount = pq.ntuples()
|
const tupleCount = pq.ntuples()
|
||||||
this.rows = new Array(tupleCount)
|
this.rows = new Array(tupleCount)
|
||||||
for (let i = 0; i < tupleCount; i++) {
|
for (var i = 0; i < tupleCount; i++) {
|
||||||
this.rows[i] = this._arrayMode ? this.consumeRowAsArray(pq, i) : this.consumeRowAsObject(pq, i)
|
this.rows[i] = this._arrayMode ? this.consumeRowAsArray(pq, i) : this.consumeRowAsObject(pq, i)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
consumeRowAsObject(pq, rowIndex) {
|
consumeRowAsObject(pq, rowIndex) {
|
||||||
const row = { ...this._prebuiltEmptyResultObject }
|
const row = { ...this._prebuiltEmptyResultObject }
|
||||||
for (let j = 0; j < this.fields.length; j++) {
|
for (var j = 0; j < this.fields.length; j++) {
|
||||||
row[this.fields[j].name] = this.readValue(pq, rowIndex, j)
|
row[this.fields[j].name] = this.readValue(pq, rowIndex, j)
|
||||||
}
|
}
|
||||||
return row
|
return row
|
||||||
@ -50,14 +50,14 @@ class Result {
|
|||||||
|
|
||||||
consumeRowAsArray(pq, rowIndex) {
|
consumeRowAsArray(pq, rowIndex) {
|
||||||
const row = new Array(this.fields.length)
|
const row = new Array(this.fields.length)
|
||||||
for (let j = 0; j < this.fields.length; j++) {
|
for (var j = 0; j < this.fields.length; j++) {
|
||||||
row[j] = this.readValue(pq, rowIndex, j)
|
row[j] = this.readValue(pq, rowIndex, j)
|
||||||
}
|
}
|
||||||
return row
|
return row
|
||||||
}
|
}
|
||||||
|
|
||||||
readValue(pq, rowIndex, colIndex) {
|
readValue(pq, rowIndex, colIndex) {
|
||||||
const rawValue = pq.getvalue(rowIndex, colIndex)
|
var rawValue = pq.getvalue(rowIndex, colIndex)
|
||||||
if (rawValue === '' && pq.getisnull(rowIndex, colIndex)) {
|
if (rawValue === '' && pq.getisnull(rowIndex, colIndex)) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
const Duplex = require('stream').Duplex
|
var Duplex = require('stream').Duplex
|
||||||
const Writable = require('stream').Writable
|
var Writable = require('stream').Writable
|
||||||
const util = require('util')
|
var util = require('util')
|
||||||
|
|
||||||
const CopyStream = (module.exports = function (pq, options) {
|
var CopyStream = (module.exports = function (pq, options) {
|
||||||
Duplex.call(this, options)
|
Duplex.call(this, options)
|
||||||
this.pq = pq
|
this.pq = pq
|
||||||
this._reading = false
|
this._reading = false
|
||||||
@ -12,7 +12,7 @@ util.inherits(CopyStream, Duplex)
|
|||||||
|
|
||||||
// writer methods
|
// writer methods
|
||||||
CopyStream.prototype._write = function (chunk, encoding, cb) {
|
CopyStream.prototype._write = function (chunk, encoding, cb) {
|
||||||
const result = this.pq.putCopyData(chunk)
|
var result = this.pq.putCopyData(chunk)
|
||||||
|
|
||||||
// sent successfully
|
// sent successfully
|
||||||
if (result === 1) return cb()
|
if (result === 1) return cb()
|
||||||
@ -21,22 +21,22 @@ CopyStream.prototype._write = function (chunk, encoding, cb) {
|
|||||||
if (result === -1) return cb(new Error(this.pq.errorMessage()))
|
if (result === -1) return cb(new Error(this.pq.errorMessage()))
|
||||||
|
|
||||||
// command would block. wait for writable and call again.
|
// command would block. wait for writable and call again.
|
||||||
const self = this
|
var self = this
|
||||||
this.pq.writable(function () {
|
this.pq.writable(function () {
|
||||||
self._write(chunk, encoding, cb)
|
self._write(chunk, encoding, cb)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
CopyStream.prototype.end = function () {
|
CopyStream.prototype.end = function () {
|
||||||
const args = Array.prototype.slice.call(arguments, 0)
|
var args = Array.prototype.slice.call(arguments, 0)
|
||||||
const self = this
|
var self = this
|
||||||
|
|
||||||
const callback = args.pop()
|
var callback = args.pop()
|
||||||
|
|
||||||
if (args.length) {
|
if (args.length) {
|
||||||
this.write(args[0])
|
this.write(args[0])
|
||||||
}
|
}
|
||||||
const result = this.pq.putCopyEnd()
|
var result = this.pq.putCopyEnd()
|
||||||
|
|
||||||
// sent successfully
|
// sent successfully
|
||||||
if (result === 1) {
|
if (result === 1) {
|
||||||
@ -55,7 +55,7 @@ CopyStream.prototype.end = function () {
|
|||||||
|
|
||||||
// error
|
// error
|
||||||
if (result === -1) {
|
if (result === -1) {
|
||||||
const err = new Error(this.pq.errorMessage())
|
var err = new Error(this.pq.errorMessage())
|
||||||
return this.emit('error', err)
|
return this.emit('error', err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -70,7 +70,7 @@ CopyStream.prototype.end = function () {
|
|||||||
|
|
||||||
// reader methods
|
// reader methods
|
||||||
CopyStream.prototype._consumeBuffer = function (cb) {
|
CopyStream.prototype._consumeBuffer = function (cb) {
|
||||||
const result = this.pq.getCopyData(true)
|
var result = this.pq.getCopyData(true)
|
||||||
if (result instanceof Buffer) {
|
if (result instanceof Buffer) {
|
||||||
return setImmediate(function () {
|
return setImmediate(function () {
|
||||||
cb(null, result)
|
cb(null, result)
|
||||||
@ -81,7 +81,7 @@ CopyStream.prototype._consumeBuffer = function (cb) {
|
|||||||
return cb(null, null)
|
return cb(null, null)
|
||||||
}
|
}
|
||||||
if (result === 0) {
|
if (result === 0) {
|
||||||
const self = this
|
var self = this
|
||||||
this.pq.once('readable', function () {
|
this.pq.once('readable', function () {
|
||||||
self.pq.stopReader()
|
self.pq.stopReader()
|
||||||
self.pq.consumeInput()
|
self.pq.consumeInput()
|
||||||
@ -96,7 +96,7 @@ CopyStream.prototype._read = function (size) {
|
|||||||
if (this._reading) return
|
if (this._reading) return
|
||||||
this._reading = true
|
this._reading = true
|
||||||
// console.log('read begin');
|
// console.log('read begin');
|
||||||
const self = this
|
var self = this
|
||||||
this._consumeBuffer(function (err, buffer) {
|
this._consumeBuffer(function (err, buffer) {
|
||||||
self._reading = false
|
self._reading = false
|
||||||
if (err) {
|
if (err) {
|
||||||
@ -110,18 +110,18 @@ CopyStream.prototype._read = function (size) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const consumeResults = function (pq, cb) {
|
var consumeResults = function (pq, cb) {
|
||||||
const cleanup = function () {
|
var cleanup = function () {
|
||||||
pq.removeListener('readable', onReadable)
|
pq.removeListener('readable', onReadable)
|
||||||
pq.stopReader()
|
pq.stopReader()
|
||||||
}
|
}
|
||||||
|
|
||||||
const readError = function (message) {
|
var readError = function (message) {
|
||||||
cleanup()
|
cleanup()
|
||||||
return cb(new Error(message || pq.errorMessage()))
|
return cb(new Error(message || pq.errorMessage()))
|
||||||
}
|
}
|
||||||
|
|
||||||
const onReadable = function () {
|
var onReadable = function () {
|
||||||
// read waiting data from the socket
|
// read waiting data from the socket
|
||||||
// e.g. clear the pending 'select'
|
// e.g. clear the pending 'select'
|
||||||
if (!pq.consumeInput()) {
|
if (!pq.consumeInput()) {
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "pg-native",
|
"name": "pg-native",
|
||||||
"version": "3.5.2",
|
"version": "3.3.1-alpha.0",
|
||||||
"description": "A slightly nicer interface to Postgres over node-libpq",
|
"description": "A slightly nicer interface to Postgres over node-libpq",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"exports": {
|
"exports": {
|
||||||
@ -8,11 +8,6 @@
|
|||||||
"import": "./esm/index.mjs",
|
"import": "./esm/index.mjs",
|
||||||
"require": "./index.js",
|
"require": "./index.js",
|
||||||
"default": "./index.js"
|
"default": "./index.js"
|
||||||
},
|
|
||||||
"./lib/*": {
|
|
||||||
"import": "./lib/*",
|
|
||||||
"require": "./lib/*",
|
|
||||||
"default": "./lib/*"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@ -34,8 +29,8 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-native",
|
"homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-native",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"libpq": "^1.8.15",
|
"libpq": "1.8.14",
|
||||||
"pg-types": "2.2.0"
|
"pg-types": "^2.1.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"async": "^0.9.0",
|
"async": "^0.9.0",
|
||||||
@ -45,11 +40,10 @@
|
|||||||
"mocha": "10.5.2",
|
"mocha": "10.5.2",
|
||||||
"node-gyp": ">=10.x",
|
"node-gyp": ">=10.x",
|
||||||
"okay": "^0.3.0",
|
"okay": "^0.3.0",
|
||||||
"semver": "^7.7.2"
|
"semver": "^4.1.0"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"index.js",
|
"index.js",
|
||||||
"lib",
|
|
||||||
"esm"
|
"esm"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,16 +1,16 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
|
|
||||||
describe('client with arrayMode', function () {
|
describe('client with arrayMode', function () {
|
||||||
it('returns result as array', function (done) {
|
it('returns result as array', function (done) {
|
||||||
const client = new Client({ arrayMode: true })
|
var client = new Client({ arrayMode: true })
|
||||||
client.connectSync()
|
client.connectSync()
|
||||||
client.querySync('CREATE TEMP TABLE blah(name TEXT)')
|
client.querySync('CREATE TEMP TABLE blah(name TEXT)')
|
||||||
client.querySync('INSERT INTO blah (name) VALUES ($1)', ['brian'])
|
client.querySync('INSERT INTO blah (name) VALUES ($1)', ['brian'])
|
||||||
client.querySync('INSERT INTO blah (name) VALUES ($1)', ['aaron'])
|
client.querySync('INSERT INTO blah (name) VALUES ($1)', ['aaron'])
|
||||||
const rows = client.querySync('SELECT * FROM blah')
|
var rows = client.querySync('SELECT * FROM blah')
|
||||||
assert.equal(rows.length, 2)
|
assert.equal(rows.length, 2)
|
||||||
const row = rows[0]
|
var row = rows[0]
|
||||||
assert.equal(row.length, 1)
|
assert.equal(row.length, 1)
|
||||||
assert.equal(row[0], 'brian')
|
assert.equal(row[0], 'brian')
|
||||||
assert.equal(rows[1][0], 'aaron')
|
assert.equal(rows[1][0], 'aaron')
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const ok = require('okay')
|
var ok = require('okay')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
const concat = require('concat-stream')
|
var concat = require('concat-stream')
|
||||||
|
|
||||||
describe('async workflow', function () {
|
describe('async workflow', function () {
|
||||||
before(function (done) {
|
before(function (done) {
|
||||||
@ -9,7 +9,7 @@ describe('async workflow', function () {
|
|||||||
this.client.connect(done)
|
this.client.connect(done)
|
||||||
})
|
})
|
||||||
|
|
||||||
const echoParams = function (params, cb) {
|
var echoParams = function (params, cb) {
|
||||||
this.client.query(
|
this.client.query(
|
||||||
'SELECT $1::text as first, $2::text as second',
|
'SELECT $1::text as first, $2::text as second',
|
||||||
params,
|
params,
|
||||||
@ -20,20 +20,20 @@ describe('async workflow', function () {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const checkParams = function (params, rows) {
|
var checkParams = function (params, rows) {
|
||||||
assert.equal(rows.length, 1)
|
assert.equal(rows.length, 1)
|
||||||
assert.equal(rows[0].first, params[0])
|
assert.equal(rows[0].first, params[0])
|
||||||
assert.equal(rows[0].second, params[1])
|
assert.equal(rows[0].second, params[1])
|
||||||
}
|
}
|
||||||
|
|
||||||
it('sends async query', function (done) {
|
it('sends async query', function (done) {
|
||||||
const params = ['one', 'two']
|
var params = ['one', 'two']
|
||||||
echoParams.call(this, params, done)
|
echoParams.call(this, params, done)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('sends multiple async queries', function (done) {
|
it('sends multiple async queries', function (done) {
|
||||||
const self = this
|
var self = this
|
||||||
const params = ['bang', 'boom']
|
var params = ['bang', 'boom']
|
||||||
echoParams.call(
|
echoParams.call(
|
||||||
this,
|
this,
|
||||||
params,
|
params,
|
||||||
@ -44,13 +44,13 @@ describe('async workflow', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('sends an async query, copies in, copies out, and sends another query', function (done) {
|
it('sends an async query, copies in, copies out, and sends another query', function (done) {
|
||||||
const self = this
|
var self = this
|
||||||
this.client.querySync('CREATE TEMP TABLE test(name text, age int)')
|
this.client.querySync('CREATE TEMP TABLE test(name text, age int)')
|
||||||
this.client.query(
|
this.client.query(
|
||||||
"INSERT INTO test(name, age) VALUES('brian', 32)",
|
"INSERT INTO test(name, age) VALUES('brian', 32)",
|
||||||
ok(done, function () {
|
ok(done, function () {
|
||||||
self.client.querySync('COPY test FROM stdin')
|
self.client.querySync('COPY test FROM stdin')
|
||||||
const input = self.client.getCopyStream()
|
var input = self.client.getCopyStream()
|
||||||
input.write(Buffer.from('Aaron\t30\n', 'utf8'))
|
input.write(Buffer.from('Aaron\t30\n', 'utf8'))
|
||||||
input.end(function () {
|
input.end(function () {
|
||||||
self.client.query(
|
self.client.query(
|
||||||
@ -60,7 +60,7 @@ describe('async workflow', function () {
|
|||||||
self.client.query(
|
self.client.query(
|
||||||
'COPY test TO stdout',
|
'COPY test TO stdout',
|
||||||
ok(done, function () {
|
ok(done, function () {
|
||||||
const output = self.client.getCopyStream()
|
var output = self.client.getCopyStream()
|
||||||
|
|
||||||
// pump the stream
|
// pump the stream
|
||||||
output.read()
|
output.read()
|
||||||
|
|||||||
@ -1,9 +1,9 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
|
|
||||||
describe('cancel query', function () {
|
describe('cancel query', function () {
|
||||||
it('works', function (done) {
|
it('works', function (done) {
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connectSync()
|
client.connectSync()
|
||||||
client.query('SELECT pg_sleep(1000);', function (err) {
|
client.query('SELECT pg_sleep(1000);', function (err) {
|
||||||
assert(err instanceof Error)
|
assert(err instanceof Error)
|
||||||
@ -17,7 +17,7 @@ describe('cancel query', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('does not raise error if no active query', function (done) {
|
it('does not raise error if no active query', function (done) {
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connectSync()
|
client.connectSync()
|
||||||
client.cancel(function (err) {
|
client.cancel(function (err) {
|
||||||
assert.ifError(err)
|
assert.ifError(err)
|
||||||
|
|||||||
@ -1,11 +1,11 @@
|
|||||||
'use strict'
|
'use strict'
|
||||||
|
|
||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
|
|
||||||
describe('connection errors', function () {
|
describe('connection errors', function () {
|
||||||
it('raise error events', function (done) {
|
it('raise error events', function (done) {
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connectSync()
|
client.connectSync()
|
||||||
client.query('SELECT pg_terminate_backend(pg_backend_pid())', assert.fail)
|
client.query('SELECT pg_terminate_backend(pg_backend_pid())', assert.fail)
|
||||||
client.on('error', function (err) {
|
client.on('error', function (err) {
|
||||||
|
|||||||
@ -1,9 +1,9 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
|
|
||||||
describe('connection error', function () {
|
describe('connection error', function () {
|
||||||
it('doesnt segfault', function (done) {
|
it('doesnt segfault', function (done) {
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connect('asldgsdgasgdasdg', function (err) {
|
client.connect('asldgsdgasgdasdg', function (err) {
|
||||||
assert(err)
|
assert(err)
|
||||||
// calling error on a closed client was segfaulting
|
// calling error on a closed client was segfaulting
|
||||||
@ -15,7 +15,7 @@ describe('connection error', function () {
|
|||||||
|
|
||||||
describe('reading while not connected', function () {
|
describe('reading while not connected', function () {
|
||||||
it('does not seg fault but does throw execption', function () {
|
it('does not seg fault but does throw execption', function () {
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
assert.throws(function () {
|
assert.throws(function () {
|
||||||
client.on('notification', function (msg) {})
|
client.on('notification', function (msg) {})
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
|
|
||||||
describe('COPY FROM', function () {
|
describe('COPY FROM', function () {
|
||||||
before(function (done) {
|
before(function (done) {
|
||||||
@ -12,17 +12,17 @@ describe('COPY FROM', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('works', function (done) {
|
it('works', function (done) {
|
||||||
const client = this.client
|
var client = this.client
|
||||||
this.client.querySync('CREATE TEMP TABLE blah(name text, age int)')
|
this.client.querySync('CREATE TEMP TABLE blah(name text, age int)')
|
||||||
this.client.querySync('COPY blah FROM stdin')
|
this.client.querySync('COPY blah FROM stdin')
|
||||||
const stream = this.client.getCopyStream()
|
var stream = this.client.getCopyStream()
|
||||||
stream.write(Buffer.from('Brian\t32\n', 'utf8'))
|
stream.write(Buffer.from('Brian\t32\n', 'utf8'))
|
||||||
stream.write(Buffer.from('Aaron\t30\n', 'utf8'))
|
stream.write(Buffer.from('Aaron\t30\n', 'utf8'))
|
||||||
stream.write(Buffer.from('Shelley\t28\n', 'utf8'))
|
stream.write(Buffer.from('Shelley\t28\n', 'utf8'))
|
||||||
stream.end()
|
stream.end()
|
||||||
|
|
||||||
stream.once('finish', function () {
|
stream.once('finish', function () {
|
||||||
const rows = client.querySync('SELECT COUNT(*) FROM blah')
|
var rows = client.querySync('SELECT COUNT(*) FROM blah')
|
||||||
assert.equal(rows.length, 1)
|
assert.equal(rows.length, 1)
|
||||||
assert.equal(rows[0].count, 3)
|
assert.equal(rows[0].count, 3)
|
||||||
done()
|
done()
|
||||||
@ -30,14 +30,14 @@ describe('COPY FROM', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('works with a callback passed to end', function (done) {
|
it('works with a callback passed to end', function (done) {
|
||||||
const client = this.client
|
var client = this.client
|
||||||
this.client.querySync('CREATE TEMP TABLE boom(name text, age int)')
|
this.client.querySync('CREATE TEMP TABLE boom(name text, age int)')
|
||||||
this.client.querySync('COPY boom FROM stdin')
|
this.client.querySync('COPY boom FROM stdin')
|
||||||
const stream = this.client.getCopyStream()
|
var stream = this.client.getCopyStream()
|
||||||
stream.write(Buffer.from('Brian\t32\n', 'utf8'))
|
stream.write(Buffer.from('Brian\t32\n', 'utf8'))
|
||||||
stream.write(Buffer.from('Aaron\t30\n', 'utf8'), function () {
|
stream.write(Buffer.from('Aaron\t30\n', 'utf8'), function () {
|
||||||
stream.end(Buffer.from('Shelley\t28\n', 'utf8'), function () {
|
stream.end(Buffer.from('Shelley\t28\n', 'utf8'), function () {
|
||||||
const rows = client.querySync('SELECT COUNT(*) FROM boom')
|
var rows = client.querySync('SELECT COUNT(*) FROM boom')
|
||||||
assert.equal(rows.length, 1)
|
assert.equal(rows.length, 1)
|
||||||
assert.equal(rows[0].count, 3)
|
assert.equal(rows[0].count, 3)
|
||||||
done()
|
done()
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const concat = require('concat-stream')
|
var concat = require('concat-stream')
|
||||||
const _ = require('lodash')
|
var _ = require('lodash')
|
||||||
|
|
||||||
describe('COPY TO', function () {
|
describe('COPY TO', function () {
|
||||||
before(function (done) {
|
before(function (done) {
|
||||||
@ -14,18 +14,18 @@ describe('COPY TO', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('works - basic check', function (done) {
|
it('works - basic check', function (done) {
|
||||||
const limit = 1000
|
var limit = 1000
|
||||||
const qText = 'COPY (SELECT * FROM generate_series(0, ' + (limit - 1) + ')) TO stdout'
|
var qText = 'COPY (SELECT * FROM generate_series(0, ' + (limit - 1) + ')) TO stdout'
|
||||||
const self = this
|
var self = this
|
||||||
this.client.query(qText, function (err) {
|
this.client.query(qText, function (err) {
|
||||||
if (err) return done(err)
|
if (err) return done(err)
|
||||||
const stream = self.client.getCopyStream()
|
var stream = self.client.getCopyStream()
|
||||||
// pump the stream for node v0.11.x
|
// pump the stream for node v0.11.x
|
||||||
stream.read()
|
stream.read()
|
||||||
stream.pipe(
|
stream.pipe(
|
||||||
concat(function (buff) {
|
concat(function (buff) {
|
||||||
const res = buff.toString('utf8')
|
var res = buff.toString('utf8')
|
||||||
const expected = _.range(0, limit).join('\n') + '\n'
|
var expected = _.range(0, limit).join('\n') + '\n'
|
||||||
assert.equal(res, expected)
|
assert.equal(res, expected)
|
||||||
done()
|
done()
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const ok = require('okay')
|
var ok = require('okay')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
|
|
||||||
describe('Custom type parser', function () {
|
describe('Custom type parser', function () {
|
||||||
it('is used by client', function (done) {
|
it('is used by client', function (done) {
|
||||||
const client = new Client({
|
var client = new Client({
|
||||||
types: {
|
types: {
|
||||||
getTypeParser: function () {
|
getTypeParser: function () {
|
||||||
return function () {
|
return function () {
|
||||||
@ -14,7 +14,7 @@ describe('Custom type parser', function () {
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
client.connectSync()
|
client.connectSync()
|
||||||
const rows = client.querySync('SELECT NOW() AS when')
|
var rows = client.querySync('SELECT NOW() AS when')
|
||||||
assert.equal(rows[0].when, 'blah')
|
assert.equal(rows[0].when, 'blah')
|
||||||
client.query(
|
client.query(
|
||||||
'SELECT NOW() as when',
|
'SELECT NOW() as when',
|
||||||
|
|||||||
@ -1,16 +1,16 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
|
|
||||||
const checkDomain = function (domain, when) {
|
var checkDomain = function (domain, when) {
|
||||||
assert(process.domain, 'Domain was lost after ' + when)
|
assert(process.domain, 'Domain was lost after ' + when)
|
||||||
assert.strictEqual(process.domain, domain, 'Domain switched after ' + when)
|
assert.strictEqual(process.domain, domain, 'Domain switched after ' + when)
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('domains', function () {
|
describe('domains', function () {
|
||||||
it('remains bound after a query', function (done) {
|
it('remains bound after a query', function (done) {
|
||||||
const domain = require('domain').create()
|
var domain = require('domain').create() // eslint-disable-line
|
||||||
domain.run(function () {
|
domain.run(function () {
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connect(function () {
|
client.connect(function () {
|
||||||
checkDomain(domain, 'connection')
|
checkDomain(domain, 'connection')
|
||||||
client.query('SELECT NOW()', function () {
|
client.query('SELECT NOW()', function () {
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
|
|
||||||
describe('empty query', () => {
|
describe('empty query', () => {
|
||||||
it('has field metadata in result', (done) => {
|
it('has field metadata in result', (done) => {
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
|
|
||||||
describe('huge async query', function () {
|
describe('huge async query', function () {
|
||||||
before(function (done) {
|
before(function (done) {
|
||||||
@ -12,12 +12,12 @@ describe('huge async query', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('works', function (done) {
|
it('works', function (done) {
|
||||||
const params = ['']
|
var params = ['']
|
||||||
const len = 100000
|
var len = 100000
|
||||||
for (let i = 0; i < len; i++) {
|
for (var i = 0; i < len; i++) {
|
||||||
params[0] += 'A'
|
params[0] += 'A'
|
||||||
}
|
}
|
||||||
const qText = "SELECT '" + params[0] + "'::text as my_text"
|
var qText = "SELECT '" + params[0] + "'::text as my_text"
|
||||||
this.client.query(qText, function (err, rows) {
|
this.client.query(qText, function (err, rows) {
|
||||||
if (err) return done(err)
|
if (err) return done(err)
|
||||||
assert.equal(rows[0].my_text.length, len)
|
assert.equal(rows[0].my_text.length, len)
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
|
|
||||||
describe('connection', function () {
|
describe('connection', function () {
|
||||||
it('works', function (done) {
|
it('works', function (done) {
|
||||||
@ -24,7 +24,7 @@ describe('connectSync', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('works with args', function () {
|
it('works with args', function () {
|
||||||
const args = 'host=' + (process.env.PGHOST || 'localhost')
|
var args = 'host=' + (process.env.PGHOST || 'localhost')
|
||||||
Client().connectSync(args)
|
Client().connectSync(args)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@ -1,11 +1,11 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const async = require('async')
|
var async = require('async')
|
||||||
const ok = require('okay')
|
var ok = require('okay')
|
||||||
|
|
||||||
const execute = function (x, done) {
|
var execute = function (x, done) {
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connectSync()
|
client.connectSync()
|
||||||
const query = function (n, cb) {
|
var query = function (n, cb) {
|
||||||
client.query('SELECT $1::int as num', [n], function (err) {
|
client.query('SELECT $1::int as num', [n], function (err) {
|
||||||
cb(err)
|
cb(err)
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,16 +1,16 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const async = require('async')
|
var async = require('async')
|
||||||
const ok = require('okay')
|
var ok = require('okay')
|
||||||
const bytes = require('crypto').pseudoRandomBytes
|
var bytes = require('crypto').pseudoRandomBytes
|
||||||
|
|
||||||
describe('many connections', function () {
|
describe('many connections', function () {
|
||||||
describe('async', function () {
|
describe('async', function () {
|
||||||
const test = function (count, times) {
|
var test = function (count, times) {
|
||||||
it(`connecting ${count} clients ${times} times`, function (done) {
|
it(`connecting ${count} clients ${times} times`, function (done) {
|
||||||
this.timeout(200000)
|
this.timeout(200000)
|
||||||
|
|
||||||
const connectClient = function (n, cb) {
|
var connectClient = function (n, cb) {
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connect(
|
client.connect(
|
||||||
ok(cb, function () {
|
ok(cb, function () {
|
||||||
bytes(
|
bytes(
|
||||||
@ -29,7 +29,7 @@ describe('many connections', function () {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const run = function (n, cb) {
|
var run = function (n, cb) {
|
||||||
async.times(count, connectClient, cb)
|
async.times(count, connectClient, cb)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,14 +1,14 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const async = require('async')
|
var async = require('async')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
|
|
||||||
describe('many errors', function () {
|
describe('many errors', function () {
|
||||||
it('functions properly without segfault', function (done) {
|
it('functions properly without segfault', function (done) {
|
||||||
const throwError = function (n, cb) {
|
var throwError = function (n, cb) {
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connectSync()
|
client.connectSync()
|
||||||
|
|
||||||
const doIt = function (n, cb) {
|
var doIt = function (n, cb) {
|
||||||
client.query('select asdfiasdf', function (err) {
|
client.query('select asdfiasdf', function (err) {
|
||||||
assert(err, 'bad query should emit an error')
|
assert(err, 'bad query should emit an error')
|
||||||
cb(null)
|
cb(null)
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
|
|
||||||
describe('multiple commands in a single query', function () {
|
describe('multiple commands in a single query', function () {
|
||||||
before(function (done) {
|
before(function (done) {
|
||||||
@ -22,7 +22,7 @@ describe('multiple commands in a single query', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('inserts and reads at once', function (done) {
|
it('inserts and reads at once', function (done) {
|
||||||
let txt = 'CREATE TEMP TABLE boom(age int);'
|
var txt = 'CREATE TEMP TABLE boom(age int);'
|
||||||
txt += 'INSERT INTO boom(age) VALUES(10);'
|
txt += 'INSERT INTO boom(age) VALUES(10);'
|
||||||
txt += 'SELECT * FROM boom;'
|
txt += 'SELECT * FROM boom;'
|
||||||
this.client.query(txt, function (err, rows, results) {
|
this.client.query(txt, function (err, rows, results) {
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
|
|
||||||
describe('multiple statements', () => {
|
describe('multiple statements', () => {
|
||||||
before(() => {
|
before(() => {
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const ok = require('okay')
|
var ok = require('okay')
|
||||||
|
|
||||||
const notify = function (channel, payload) {
|
var notify = function (channel, payload) {
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connectSync()
|
client.connectSync()
|
||||||
client.querySync('NOTIFY ' + channel + ", '" + payload + "'")
|
client.querySync('NOTIFY ' + channel + ", '" + payload + "'")
|
||||||
client.end()
|
client.end()
|
||||||
@ -10,12 +10,12 @@ const notify = function (channel, payload) {
|
|||||||
|
|
||||||
describe('simple LISTEN/NOTIFY', function () {
|
describe('simple LISTEN/NOTIFY', function () {
|
||||||
before(function (done) {
|
before(function (done) {
|
||||||
const client = (this.client = new Client())
|
var client = (this.client = new Client())
|
||||||
client.connect(done)
|
client.connect(done)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('works', function (done) {
|
it('works', function (done) {
|
||||||
const client = this.client
|
var client = this.client
|
||||||
client.querySync('LISTEN boom')
|
client.querySync('LISTEN boom')
|
||||||
client.on('notification', function (msg) {
|
client.on('notification', function (msg) {
|
||||||
done()
|
done()
|
||||||
@ -31,14 +31,14 @@ describe('simple LISTEN/NOTIFY', function () {
|
|||||||
if (!process.env.TRAVIS_CI) {
|
if (!process.env.TRAVIS_CI) {
|
||||||
describe('async LISTEN/NOTIFY', function () {
|
describe('async LISTEN/NOTIFY', function () {
|
||||||
before(function (done) {
|
before(function (done) {
|
||||||
const client = (this.client = new Client())
|
var client = (this.client = new Client())
|
||||||
client.connect(done)
|
client.connect(done)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('works', function (done) {
|
it('works', function (done) {
|
||||||
const client = this.client
|
var client = this.client
|
||||||
let count = 0
|
var count = 0
|
||||||
const check = function () {
|
var check = function () {
|
||||||
count++
|
count++
|
||||||
if (count >= 2) return done()
|
if (count >= 2) return done()
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,13 +1,13 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const ok = require('okay')
|
var ok = require('okay')
|
||||||
const async = require('async')
|
var async = require('async')
|
||||||
|
|
||||||
describe('async prepare', function () {
|
describe('async prepare', function () {
|
||||||
const run = function (n, cb) {
|
var run = function (n, cb) {
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connectSync()
|
client.connectSync()
|
||||||
|
|
||||||
const exec = function (x, done) {
|
var exec = function (x, done) {
|
||||||
client.prepare('get_now' + x, 'SELECT NOW()', 0, done)
|
client.prepare('get_now' + x, 'SELECT NOW()', 0, done)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -20,7 +20,7 @@ describe('async prepare', function () {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const t = function (n) {
|
var t = function (n) {
|
||||||
it('works for ' + n + ' clients', function (done) {
|
it('works for ' + n + ' clients', function (done) {
|
||||||
async.times(n, run, function (err) {
|
async.times(n, run, function (err) {
|
||||||
done(err)
|
done(err)
|
||||||
@ -28,17 +28,17 @@ describe('async prepare', function () {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
for (let i = 0; i < 10; i++) {
|
for (var i = 0; i < 10; i++) {
|
||||||
t(i)
|
t(i)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('async execute', function () {
|
describe('async execute', function () {
|
||||||
const run = function (n, cb) {
|
var run = function (n, cb) {
|
||||||
const client = new Client()
|
var client = new Client()
|
||||||
client.connectSync()
|
client.connectSync()
|
||||||
client.prepareSync('get_now', 'SELECT NOW()', 0)
|
client.prepareSync('get_now', 'SELECT NOW()', 0)
|
||||||
const exec = function (x, cb) {
|
var exec = function (x, cb) {
|
||||||
client.execute('get_now', [], cb)
|
client.execute('get_now', [], cb)
|
||||||
}
|
}
|
||||||
async.timesSeries(
|
async.timesSeries(
|
||||||
@ -50,7 +50,7 @@ describe('async execute', function () {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const t = function (n) {
|
var t = function (n) {
|
||||||
it('works for ' + n + ' clients', function (done) {
|
it('works for ' + n + ' clients', function (done) {
|
||||||
async.times(n, run, function (err) {
|
async.times(n, run, function (err) {
|
||||||
done(err)
|
done(err)
|
||||||
@ -58,7 +58,7 @@ describe('async execute', function () {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
for (let i = 0; i < 10; i++) {
|
for (var i = 0; i < 10; i++) {
|
||||||
t(i)
|
t(i)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
const async = require('async')
|
var async = require('async')
|
||||||
const ok = require('okay')
|
var ok = require('okay')
|
||||||
|
|
||||||
describe('async query', function () {
|
describe('async query', function () {
|
||||||
before(function (done) {
|
before(function (done) {
|
||||||
@ -24,7 +24,7 @@ describe('async query', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('simple query works', function (done) {
|
it('simple query works', function (done) {
|
||||||
const runQuery = function (n, done) {
|
var runQuery = function (n, done) {
|
||||||
this.client.query('SELECT NOW() AS the_time', function (err, rows) {
|
this.client.query('SELECT NOW() AS the_time', function (err, rows) {
|
||||||
if (err) return done(err)
|
if (err) return done(err)
|
||||||
assert.equal(rows[0].the_time.getFullYear(), new Date().getFullYear())
|
assert.equal(rows[0].the_time.getFullYear(), new Date().getFullYear())
|
||||||
@ -35,14 +35,14 @@ describe('async query', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('parameters work', function (done) {
|
it('parameters work', function (done) {
|
||||||
const runQuery = function (n, done) {
|
var runQuery = function (n, done) {
|
||||||
this.client.query('SELECT $1::text AS name', ['Brian'], done)
|
this.client.query('SELECT $1::text AS name', ['Brian'], done)
|
||||||
}.bind(this)
|
}.bind(this)
|
||||||
async.timesSeries(3, runQuery, done)
|
async.timesSeries(3, runQuery, done)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('prepared, named statements work', function (done) {
|
it('prepared, named statements work', function (done) {
|
||||||
const client = this.client
|
var client = this.client
|
||||||
client.prepare('test', 'SELECT $1::text as name', 1, function (err) {
|
client.prepare('test', 'SELECT $1::text as name', 1, function (err) {
|
||||||
if (err) return done(err)
|
if (err) return done(err)
|
||||||
client.execute(
|
client.execute(
|
||||||
@ -80,7 +80,7 @@ describe('async query', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('returns an error if there was a query error', function (done) {
|
it('returns an error if there was a query error', function (done) {
|
||||||
const runErrorQuery = function (n, done) {
|
var runErrorQuery = function (n, done) {
|
||||||
this.client.query('SELECT ALKJSFDSLFKJ', function (err) {
|
this.client.query('SELECT ALKJSFDSLFKJ', function (err) {
|
||||||
assert(err instanceof Error, 'Should return an error instance')
|
assert(err instanceof Error, 'Should return an error instance')
|
||||||
done()
|
done()
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
|
|
||||||
describe('query sync', function () {
|
describe('query sync', function () {
|
||||||
before(function () {
|
before(function () {
|
||||||
@ -12,13 +12,13 @@ describe('query sync', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('simple query works', function () {
|
it('simple query works', function () {
|
||||||
const rows = this.client.querySync('SELECT NOW() AS the_time')
|
var rows = this.client.querySync('SELECT NOW() AS the_time')
|
||||||
assert.equal(rows.length, 1)
|
assert.equal(rows.length, 1)
|
||||||
assert.equal(rows[0].the_time.getFullYear(), new Date().getFullYear())
|
assert.equal(rows[0].the_time.getFullYear(), new Date().getFullYear())
|
||||||
})
|
})
|
||||||
|
|
||||||
it('parameterized query works', function () {
|
it('parameterized query works', function () {
|
||||||
const rows = this.client.querySync('SELECT $1::text AS name', ['Brian'])
|
var rows = this.client.querySync('SELECT $1::text AS name', ['Brian'])
|
||||||
assert.equal(rows.length, 1)
|
assert.equal(rows.length, 1)
|
||||||
assert.equal(rows[0].name, 'Brian')
|
assert.equal(rows[0].name, 'Brian')
|
||||||
})
|
})
|
||||||
@ -37,11 +37,11 @@ describe('query sync', function () {
|
|||||||
it('prepared statement works', function () {
|
it('prepared statement works', function () {
|
||||||
this.client.prepareSync('test', 'SELECT $1::text as name', 1)
|
this.client.prepareSync('test', 'SELECT $1::text as name', 1)
|
||||||
|
|
||||||
const rows = this.client.executeSync('test', ['Brian'])
|
var rows = this.client.executeSync('test', ['Brian'])
|
||||||
assert.equal(rows.length, 1)
|
assert.equal(rows.length, 1)
|
||||||
assert.equal(rows[0].name, 'Brian')
|
assert.equal(rows[0].name, 'Brian')
|
||||||
|
|
||||||
const rows2 = this.client.executeSync('test', ['Aaron'])
|
var rows2 = this.client.executeSync('test', ['Aaron'])
|
||||||
assert.equal(rows2.length, 1)
|
assert.equal(rows2.length, 1)
|
||||||
assert.equal(rows2[0].name, 'Aaron')
|
assert.equal(rows2[0].name, 'Aaron')
|
||||||
})
|
})
|
||||||
@ -70,13 +70,13 @@ describe('query sync', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('is still usable after an error', function () {
|
it('is still usable after an error', function () {
|
||||||
const rows = this.client.querySync('SELECT NOW()')
|
var rows = this.client.querySync('SELECT NOW()')
|
||||||
assert(rows, 'should have returned rows')
|
assert(rows, 'should have returned rows')
|
||||||
assert.equal(rows.length, 1)
|
assert.equal(rows.length, 1)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('supports empty query', function () {
|
it('supports empty query', function () {
|
||||||
const rows = this.client.querySync('')
|
var rows = this.client.querySync('')
|
||||||
assert(rows, 'should return rows')
|
assert(rows, 'should return rows')
|
||||||
assert.equal(rows.length, 0, 'should return no rows')
|
assert.equal(rows.length, 0, 'should return no rows')
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
const Client = require('../')
|
var Client = require('../')
|
||||||
const assert = require('assert')
|
var assert = require('assert')
|
||||||
const semver = require('semver')
|
var semver = require('semver')
|
||||||
|
|
||||||
describe('version', function () {
|
describe('version', function () {
|
||||||
it('is exported', function () {
|
it('is exported', function () {
|
||||||
|
|||||||
@ -1,11 +1,9 @@
|
|||||||
# pg-pool
|
# pg-pool
|
||||||
|
|
||||||
[](https://travis-ci.org/brianc/node-pg-pool)
|
[](https://travis-ci.org/brianc/node-pg-pool)
|
||||||
|
|
||||||
A connection pool for node-postgres
|
A connection pool for node-postgres
|
||||||
|
|
||||||
## install
|
## install
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
npm i pg-pool pg
|
npm i pg-pool pg
|
||||||
```
|
```
|
||||||
@ -17,17 +15,17 @@ npm i pg-pool pg
|
|||||||
to use pg-pool you must first create an instance of a pool
|
to use pg-pool you must first create an instance of a pool
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const Pool = require('pg-pool')
|
var Pool = require('pg-pool')
|
||||||
|
|
||||||
// by default the pool uses the same
|
// by default the pool uses the same
|
||||||
// configuration as whatever `pg` version you have installed
|
// configuration as whatever `pg` version you have installed
|
||||||
const pool = new Pool()
|
var pool = new Pool()
|
||||||
|
|
||||||
// you can pass properties to the pool
|
// you can pass properties to the pool
|
||||||
// these properties are passed unchanged to both the node-postgres Client constructor
|
// these properties are passed unchanged to both the node-postgres Client constructor
|
||||||
// and the node-pool (https://github.com/coopernurse/node-pool) constructor
|
// and the node-pool (https://github.com/coopernurse/node-pool) constructor
|
||||||
// allowing you to fully configure the behavior of both
|
// allowing you to fully configure the behavior of both
|
||||||
const pool2 = new Pool({
|
var pool2 = new Pool({
|
||||||
database: 'postgres',
|
database: 'postgres',
|
||||||
user: 'brianc',
|
user: 'brianc',
|
||||||
password: 'secret!',
|
password: 'secret!',
|
||||||
@ -39,26 +37,25 @@ const pool2 = new Pool({
|
|||||||
maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion)
|
maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion)
|
||||||
})
|
})
|
||||||
|
|
||||||
// you can supply a custom client constructor
|
//you can supply a custom client constructor
|
||||||
// if you want to use the native postgres client
|
//if you want to use the native postgres client
|
||||||
const NativeClient = require('pg').native.Client
|
var NativeClient = require('pg').native.Client
|
||||||
const nativePool = new Pool({ Client: NativeClient })
|
var nativePool = new Pool({ Client: NativeClient })
|
||||||
|
|
||||||
// you can even pool pg-native clients directly
|
//you can even pool pg-native clients directly
|
||||||
const PgNativeClient = require('pg-native')
|
var PgNativeClient = require('pg-native')
|
||||||
const pgNativePool = new Pool({ Client: PgNativeClient })
|
var pgNativePool = new Pool({ Client: PgNativeClient })
|
||||||
```
|
```
|
||||||
|
|
||||||
##### Note:
|
##### Note:
|
||||||
|
|
||||||
The Pool constructor does not support passing a Database URL as the parameter. To use pg-pool on heroku, for example, you need to parse the URL into a config object. Here is an example of how to parse a Database URL.
|
The Pool constructor does not support passing a Database URL as the parameter. To use pg-pool on heroku, for example, you need to parse the URL into a config object. Here is an example of how to parse a Database URL.
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const Pool = require('pg-pool')
|
const Pool = require('pg-pool');
|
||||||
const url = require('url')
|
const url = require('url')
|
||||||
|
|
||||||
const params = url.parse(process.env.DATABASE_URL)
|
const params = url.parse(process.env.DATABASE_URL);
|
||||||
const auth = params.auth.split(':')
|
const auth = params.auth.split(':');
|
||||||
|
|
||||||
const config = {
|
const config = {
|
||||||
user: auth[0],
|
user: auth[0],
|
||||||
@ -66,10 +63,10 @@ const config = {
|
|||||||
host: params.hostname,
|
host: params.hostname,
|
||||||
port: params.port,
|
port: params.port,
|
||||||
database: params.pathname.split('/')[1],
|
database: params.pathname.split('/')[1],
|
||||||
ssl: true,
|
ssl: true
|
||||||
}
|
};
|
||||||
|
|
||||||
const pool = new Pool(config)
|
const pool = new Pool(config);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Transforms, 'postgres://DBuser:secret@DBHost:#####/myDB', into
|
Transforms, 'postgres://DBuser:secret@DBHost:#####/myDB', into
|
||||||
@ -89,18 +86,16 @@ const pool = new Pool(config)
|
|||||||
pg-pool supports a fully promise-based api for acquiring clients
|
pg-pool supports a fully promise-based api for acquiring clients
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const pool = new Pool()
|
var pool = new Pool()
|
||||||
pool.connect().then((client) => {
|
pool.connect().then(client => {
|
||||||
client
|
client.query('select $1::text as name', ['pg-pool']).then(res => {
|
||||||
.query('select $1::text as name', ['pg-pool'])
|
client.release()
|
||||||
.then((res) => {
|
console.log('hello from', res.rows[0].name)
|
||||||
client.release()
|
})
|
||||||
console.log('hello from', res.rows[0].name)
|
.catch(e => {
|
||||||
})
|
client.release()
|
||||||
.catch((e) => {
|
console.error('query error', e.message, e.stack)
|
||||||
client.release()
|
})
|
||||||
console.error('query error', e.message, e.stack)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -110,27 +105,27 @@ this ends up looking much nicer if you're using [co](https://github.com/tj/co) o
|
|||||||
|
|
||||||
```js
|
```js
|
||||||
// with async/await
|
// with async/await
|
||||||
;(async () => {
|
(async () => {
|
||||||
const pool = new Pool()
|
var pool = new Pool()
|
||||||
const client = await pool.connect()
|
var client = await pool.connect()
|
||||||
try {
|
try {
|
||||||
const result = await client.query('select $1::text as name', ['brianc'])
|
var result = await client.query('select $1::text as name', ['brianc'])
|
||||||
console.log('hello from', result.rows[0])
|
console.log('hello from', result.rows[0])
|
||||||
} finally {
|
} finally {
|
||||||
client.release()
|
client.release()
|
||||||
}
|
}
|
||||||
})().catch((e) => console.error(e.message, e.stack))
|
})().catch(e => console.error(e.message, e.stack))
|
||||||
|
|
||||||
// with co
|
// with co
|
||||||
co(function* () {
|
co(function * () {
|
||||||
const client = yield pool.connect()
|
var client = yield pool.connect()
|
||||||
try {
|
try {
|
||||||
const result = yield client.query('select $1::text as name', ['brianc'])
|
var result = yield client.query('select $1::text as name', ['brianc'])
|
||||||
console.log('hello from', result.rows[0])
|
console.log('hello from', result.rows[0])
|
||||||
} finally {
|
} finally {
|
||||||
client.release()
|
client.release()
|
||||||
}
|
}
|
||||||
}).catch((e) => console.error(e.message, e.stack))
|
}).catch(e => console.error(e.message, e.stack))
|
||||||
```
|
```
|
||||||
|
|
||||||
### your new favorite helper method
|
### your new favorite helper method
|
||||||
@ -138,32 +133,32 @@ co(function* () {
|
|||||||
because its so common to just run a query and return the client to the pool afterward pg-pool has this built-in:
|
because its so common to just run a query and return the client to the pool afterward pg-pool has this built-in:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const pool = new Pool()
|
var pool = new Pool()
|
||||||
const time = await pool.query('SELECT NOW()')
|
var time = await pool.query('SELECT NOW()')
|
||||||
const name = await pool.query('select $1::text as name', ['brianc'])
|
var name = await pool.query('select $1::text as name', ['brianc'])
|
||||||
console.log(name.rows[0].name, 'says hello at', time.rows[0].now)
|
console.log(name.rows[0].name, 'says hello at', time.rows[0].now)
|
||||||
```
|
```
|
||||||
|
|
||||||
you can also use a callback here if you'd like:
|
you can also use a callback here if you'd like:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const pool = new Pool()
|
var pool = new Pool()
|
||||||
pool.query('SELECT $1::text as name', ['brianc'], function (err, res) {
|
pool.query('SELECT $1::text as name', ['brianc'], function (err, res) {
|
||||||
console.log(res.rows[0].name) // brianc
|
console.log(res.rows[0].name) // brianc
|
||||||
})
|
})
|
||||||
```
|
```
|
||||||
|
|
||||||
**pro tip:** unless you need to run a transaction (which requires a single client for multiple queries) or you
|
__pro tip:__ unless you need to run a transaction (which requires a single client for multiple queries) or you
|
||||||
have some other edge case like [streaming rows](https://github.com/brianc/node-pg-query-stream) or using a [cursor](https://github.com/brianc/node-pg-cursor)
|
have some other edge case like [streaming rows](https://github.com/brianc/node-pg-query-stream) or using a [cursor](https://github.com/brianc/node-pg-cursor)
|
||||||
you should almost always just use `pool.query`. Its easy, it does the right thing :tm:, and wont ever forget to return
|
you should almost always just use `pool.query`. Its easy, it does the right thing :tm:, and wont ever forget to return
|
||||||
clients back to the pool after the query is done.
|
clients back to the pool after the query is done.
|
||||||
|
|
||||||
### drop-in backwards compatible
|
### drop-in backwards compatible
|
||||||
|
|
||||||
pg-pool still and will always support the traditional callback api for acquiring a client. This is the exact API node-postgres has shipped with for years:
|
pg-pool still and will always support the traditional callback api for acquiring a client. This is the exact API node-postgres has shipped with for years:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const pool = new Pool()
|
var pool = new Pool()
|
||||||
pool.connect((err, client, done) => {
|
pool.connect((err, client, done) => {
|
||||||
if (err) return done(err)
|
if (err) return done(err)
|
||||||
|
|
||||||
@ -180,11 +175,11 @@ pool.connect((err, client, done) => {
|
|||||||
### shut it down
|
### shut it down
|
||||||
|
|
||||||
When you are finished with the pool if all the clients are idle the pool will close them after `config.idleTimeoutMillis` and your app
|
When you are finished with the pool if all the clients are idle the pool will close them after `config.idleTimeoutMillis` and your app
|
||||||
will shutdown gracefully. If you don't want to wait for the timeout you can end the pool as follows:
|
will shutdown gracefully. If you don't want to wait for the timeout you can end the pool as follows:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const pool = new Pool()
|
var pool = new Pool()
|
||||||
const client = await pool.connect()
|
var client = await pool.connect()
|
||||||
console.log(await client.query('select now()'))
|
console.log(await client.query('select now()'))
|
||||||
client.release()
|
client.release()
|
||||||
await pool.end()
|
await pool.end()
|
||||||
@ -192,14 +187,14 @@ await pool.end()
|
|||||||
|
|
||||||
### a note on instances
|
### a note on instances
|
||||||
|
|
||||||
The pool should be a **long-lived object** in your application. Generally you'll want to instantiate one pool when your app starts up and use the same instance of the pool throughout the lifetime of your application. If you are frequently creating a new pool within your code you likely don't have your pool initialization code in the correct place. Example:
|
The pool should be a __long-lived object__ in your application. Generally you'll want to instantiate one pool when your app starts up and use the same instance of the pool throughout the lifetime of your application. If you are frequently creating a new pool within your code you likely don't have your pool initialization code in the correct place. Example:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
// assume this is a file in your program at ./your-app/lib/db.js
|
// assume this is a file in your program at ./your-app/lib/db.js
|
||||||
|
|
||||||
// correct usage: create the pool and let it live
|
// correct usage: create the pool and let it live
|
||||||
// 'globally' here, controlling access to it through exported methods
|
// 'globally' here, controlling access to it through exported methods
|
||||||
const pool = new pg.Pool()
|
var pool = new pg.Pool()
|
||||||
|
|
||||||
// this is the right way to export the query method
|
// this is the right way to export the query method
|
||||||
module.exports.query = (text, values) => {
|
module.exports.query = (text, values) => {
|
||||||
@ -213,18 +208,18 @@ module.exports.connect = () => {
|
|||||||
// every time we called 'connect' to get a new client?
|
// every time we called 'connect' to get a new client?
|
||||||
// that's a bad thing & results in creating an unbounded
|
// that's a bad thing & results in creating an unbounded
|
||||||
// number of pools & therefore connections
|
// number of pools & therefore connections
|
||||||
const aPool = new pg.Pool()
|
var aPool = new pg.Pool()
|
||||||
return aPool.connect()
|
return aPool.connect()
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### events
|
### events
|
||||||
|
|
||||||
Every instance of a `Pool` is an event emitter. These instances emit the following events:
|
Every instance of a `Pool` is an event emitter. These instances emit the following events:
|
||||||
|
|
||||||
#### error
|
#### error
|
||||||
|
|
||||||
Emitted whenever an idle client in the pool encounters an error. This is common when your PostgreSQL server shuts down, reboots, or a network partition otherwise causes it to become unavailable while your pool has connected clients.
|
Emitted whenever an idle client in the pool encounters an error. This is common when your PostgreSQL server shuts down, reboots, or a network partition otherwise causes it to become unavailable while your pool has connected clients.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
@ -234,7 +229,7 @@ const pool = new Pool()
|
|||||||
|
|
||||||
// attach an error handler to the pool for when a connected, idle client
|
// attach an error handler to the pool for when a connected, idle client
|
||||||
// receives an error by being disconnected, etc
|
// receives an error by being disconnected, etc
|
||||||
pool.on('error', function (error, client) {
|
pool.on('error', function(error, client) {
|
||||||
// handle this in the same way you would treat process.on('uncaughtException')
|
// handle this in the same way you would treat process.on('uncaughtException')
|
||||||
// it is supplied the error as well as the idle client which received the error
|
// it is supplied the error as well as the idle client which received the error
|
||||||
})
|
})
|
||||||
@ -242,7 +237,7 @@ pool.on('error', function (error, client) {
|
|||||||
|
|
||||||
#### connect
|
#### connect
|
||||||
|
|
||||||
Fired whenever the pool creates a **new** `pg.Client` instance and successfully connects it to the backend.
|
Fired whenever the pool creates a __new__ `pg.Client` instance and successfully connects it to the backend.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
@ -250,21 +245,22 @@ Example:
|
|||||||
const Pool = require('pg-pool')
|
const Pool = require('pg-pool')
|
||||||
const pool = new Pool()
|
const pool = new Pool()
|
||||||
|
|
||||||
const count = 0
|
var count = 0
|
||||||
|
|
||||||
pool.on('connect', (client) => {
|
pool.on('connect', client => {
|
||||||
client.count = count++
|
client.count = count++
|
||||||
})
|
})
|
||||||
|
|
||||||
pool
|
pool
|
||||||
.connect()
|
.connect()
|
||||||
.then((client) => {
|
.then(client => {
|
||||||
return client
|
return client
|
||||||
.query('SELECT $1::int AS "clientCount"', [client.count])
|
.query('SELECT $1::int AS "clientCount"', [client.count])
|
||||||
.then((res) => console.log(res.rows[0].clientCount)) // outputs 0
|
.then(res => console.log(res.rows[0].clientCount)) // outputs 0
|
||||||
.then(() => client)
|
.then(() => client)
|
||||||
})
|
})
|
||||||
.then((client) => client.release())
|
.then(client => client.release())
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### acquire
|
#### acquire
|
||||||
@ -276,20 +272,20 @@ Example:
|
|||||||
This allows you to count the number of clients which have ever been acquired from the pool.
|
This allows you to count the number of clients which have ever been acquired from the pool.
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const Pool = require('pg-pool')
|
var Pool = require('pg-pool')
|
||||||
const pool = new Pool()
|
var pool = new Pool()
|
||||||
|
|
||||||
const acquireCount = 0
|
var acquireCount = 0
|
||||||
pool.on('acquire', function (client) {
|
pool.on('acquire', function (client) {
|
||||||
acquireCount++
|
acquireCount++
|
||||||
})
|
})
|
||||||
|
|
||||||
const connectCount = 0
|
var connectCount = 0
|
||||||
pool.on('connect', function () {
|
pool.on('connect', function () {
|
||||||
connectCount++
|
connectCount++
|
||||||
})
|
})
|
||||||
|
|
||||||
for (let i = 0; i < 200; i++) {
|
for (var i = 0; i < 200; i++) {
|
||||||
pool.query('SELECT NOW()')
|
pool.query('SELECT NOW()')
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -297,11 +293,12 @@ setTimeout(function () {
|
|||||||
console.log('connect count:', connectCount) // output: connect count: 10
|
console.log('connect count:', connectCount) // output: connect count: 10
|
||||||
console.log('acquire count:', acquireCount) // output: acquire count: 200
|
console.log('acquire count:', acquireCount) // output: acquire count: 200
|
||||||
}, 100)
|
}, 100)
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### environment variables
|
### environment variables
|
||||||
|
|
||||||
pg-pool & node-postgres support some of the same environment variables as `psql` supports. The most common are:
|
pg-pool & node-postgres support some of the same environment variables as `psql` supports. The most common are:
|
||||||
|
|
||||||
```
|
```
|
||||||
PGDATABASE=my_db
|
PGDATABASE=my_db
|
||||||
@ -311,19 +308,40 @@ PGPORT=5432
|
|||||||
PGSSLMODE=require
|
PGSSLMODE=require
|
||||||
```
|
```
|
||||||
|
|
||||||
Usually I will export these into my local environment via a `.env` file with environment settings or export them in `~/.bash_profile` or something similar. This way I get configurability which works with both the postgres suite of tools (`psql`, `pg_dump`, `pg_restore`) and node, I can vary the environment variables locally and in production, and it supports the concept of a [12-factor app](http://12factor.net/) out of the box.
|
Usually I will export these into my local environment via a `.env` file with environment settings or export them in `~/.bash_profile` or something similar. This way I get configurability which works with both the postgres suite of tools (`psql`, `pg_dump`, `pg_restore`) and node, I can vary the environment variables locally and in production, and it supports the concept of a [12-factor app](http://12factor.net/) out of the box.
|
||||||
|
|
||||||
|
## bring your own promise
|
||||||
|
|
||||||
|
In versions of node `<=0.12.x` there is no native promise implementation available globally. You can polyfill the promise globally like this:
|
||||||
|
|
||||||
|
```js
|
||||||
|
// first run `npm install promise-polyfill --save
|
||||||
|
if (typeof Promise == 'undefined') {
|
||||||
|
global.Promise = require('promise-polyfill')
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You can use any other promise implementation you'd like. The pool also allows you to configure the promise implementation on a per-pool level:
|
||||||
|
|
||||||
|
```js
|
||||||
|
var bluebirdPool = new Pool({
|
||||||
|
Promise: require('bluebird')
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
__please note:__ in node `<=0.12.x` the pool will throw if you do not provide a promise constructor in one of the two ways mentioned above. In node `>=4.0.0` the pool will use the native promise implementation by default; however, the two methods above still allow you to "bring your own."
|
||||||
|
|
||||||
## maxUses and read-replica autoscaling (e.g. AWS Aurora)
|
## maxUses and read-replica autoscaling (e.g. AWS Aurora)
|
||||||
|
|
||||||
The maxUses config option can help an application instance rebalance load against a replica set that has been auto-scaled after the connection pool is already full of healthy connections.
|
The maxUses config option can help an application instance rebalance load against a replica set that has been auto-scaled after the connection pool is already full of healthy connections.
|
||||||
|
|
||||||
The mechanism here is that a connection is considered "expended" after it has been acquired and released `maxUses` number of times. Depending on the load on your system, this means there will be an approximate time in which any given connection will live, thus creating a window for rebalancing.
|
The mechanism here is that a connection is considered "expended" after it has been acquired and released `maxUses` number of times. Depending on the load on your system, this means there will be an approximate time in which any given connection will live, thus creating a window for rebalancing.
|
||||||
|
|
||||||
Imagine a scenario where you have 10 app instances providing an API running against a replica cluster of 3 that are accessed via a round-robin DNS entry. Each instance runs a connection pool size of 20. With an ambient load of 50 requests per second, the connection pool will likely fill up in a few minutes with healthy connections.
|
Imagine a scenario where you have 10 app instances providing an API running against a replica cluster of 3 that are accessed via a round-robin DNS entry. Each instance runs a connection pool size of 20. With an ambient load of 50 requests per second, the connection pool will likely fill up in a few minutes with healthy connections.
|
||||||
|
|
||||||
If you have weekly bursts of traffic which peak at 1,000 requests per second, you might want to grow your replicas to 10 during this period. Without setting `maxUses`, the new replicas will not be adopted by the app servers without an intervention -- namely, restarting each in turn in order to build up new connection pools that are balanced against all the replicas. Adding additional app server instances will help to some extent because they will adopt all the replicas in an even way, but the initial app servers will continue to focus additional load on the original replicas.
|
If you have weekly bursts of traffic which peak at 1,000 requests per second, you might want to grow your replicas to 10 during this period. Without setting `maxUses`, the new replicas will not be adopted by the app servers without an intervention -- namely, restarting each in turn in order to build up new connection pools that are balanced against all the replicas. Adding additional app server instances will help to some extent because they will adopt all the replicas in an even way, but the initial app servers will continue to focus additional load on the original replicas.
|
||||||
|
|
||||||
This is where the `maxUses` configuration option comes into play. Setting `maxUses` to 7500 will ensure that over a period of 30 minutes or so the new replicas will be adopted as the pre-existing connections are closed and replaced with new ones, thus creating a window for eventual balance.
|
This is where the `maxUses` configuration option comes into play. Setting `maxUses` to 7500 will ensure that over a period of 30 minutes or so the new replicas will be adopted as the pre-existing connections are closed and replaced with new ones, thus creating a window for eventual balance.
|
||||||
|
|
||||||
You'll want to test based on your own scenarios, but one way to make a first guess at `maxUses` is to identify an acceptable window for rebalancing and then solve for the value:
|
You'll want to test based on your own scenarios, but one way to make a first guess at `maxUses` is to identify an acceptable window for rebalancing and then solve for the value:
|
||||||
|
|
||||||
@ -344,7 +362,7 @@ To run tests clone the repo, `npm i` in the working dir, and then run `npm test`
|
|||||||
|
|
||||||
## contributions
|
## contributions
|
||||||
|
|
||||||
I love contributions. Please make sure they have tests, and submit a PR. If you're not sure if the issue is worth it or will be accepted it never hurts to open an issue to begin the conversation. If you're interested in keeping up with node-postgres releated stuff, you can follow me on twitter at [@briancarlson](https://twitter.com/briancarlson) - I generally announce any noteworthy updates there.
|
I love contributions. Please make sure they have tests, and submit a PR. If you're not sure if the issue is worth it or will be accepted it never hurts to open an issue to begin the conversation. If you're interested in keeping up with node-postgres releated stuff, you can follow me on twitter at [@briancarlson](https://twitter.com/briancarlson) - I generally announce any noteworthy updates there.
|
||||||
|
|
||||||
## license
|
## license
|
||||||
|
|
||||||
|
|||||||
@ -87,7 +87,6 @@ class Pool extends EventEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
this.options.max = this.options.max || this.options.poolSize || 10
|
this.options.max = this.options.max || this.options.poolSize || 10
|
||||||
this.options.min = this.options.min || 0
|
|
||||||
this.options.maxUses = this.options.maxUses || Infinity
|
this.options.maxUses = this.options.maxUses || Infinity
|
||||||
this.options.allowExitOnIdle = this.options.allowExitOnIdle || false
|
this.options.allowExitOnIdle = this.options.allowExitOnIdle || false
|
||||||
this.options.maxLifetimeSeconds = this.options.maxLifetimeSeconds || 0
|
this.options.maxLifetimeSeconds = this.options.maxLifetimeSeconds || 0
|
||||||
@ -112,10 +111,6 @@ class Pool extends EventEmitter {
|
|||||||
return this._clients.length >= this.options.max
|
return this._clients.length >= this.options.max
|
||||||
}
|
}
|
||||||
|
|
||||||
_isAboveMin() {
|
|
||||||
return this._clients.length > this.options.min
|
|
||||||
}
|
|
||||||
|
|
||||||
_pulseQueue() {
|
_pulseQueue() {
|
||||||
this.log('pulse queue')
|
this.log('pulse queue')
|
||||||
if (this.ended) {
|
if (this.ended) {
|
||||||
@ -161,7 +156,7 @@ class Pool extends EventEmitter {
|
|||||||
throw new Error('unexpected condition')
|
throw new Error('unexpected condition')
|
||||||
}
|
}
|
||||||
|
|
||||||
_remove(client, callback) {
|
_remove(client) {
|
||||||
const removed = removeWhere(this._idle, (item) => item.client === client)
|
const removed = removeWhere(this._idle, (item) => item.client === client)
|
||||||
|
|
||||||
if (removed !== undefined) {
|
if (removed !== undefined) {
|
||||||
@ -169,14 +164,8 @@ class Pool extends EventEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
this._clients = this._clients.filter((c) => c !== client)
|
this._clients = this._clients.filter((c) => c !== client)
|
||||||
const context = this
|
client.end()
|
||||||
client.end(() => {
|
this.emit('remove', client)
|
||||||
context.emit('remove', client)
|
|
||||||
|
|
||||||
if (typeof callback === 'function') {
|
|
||||||
callback()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
connect(cb) {
|
connect(cb) {
|
||||||
@ -357,25 +346,26 @@ class Pool extends EventEmitter {
|
|||||||
if (client._poolUseCount >= this.options.maxUses) {
|
if (client._poolUseCount >= this.options.maxUses) {
|
||||||
this.log('remove expended client')
|
this.log('remove expended client')
|
||||||
}
|
}
|
||||||
|
this._remove(client)
|
||||||
return this._remove(client, this._pulseQueue.bind(this))
|
this._pulseQueue()
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const isExpired = this._expired.has(client)
|
const isExpired = this._expired.has(client)
|
||||||
if (isExpired) {
|
if (isExpired) {
|
||||||
this.log('remove expired client')
|
this.log('remove expired client')
|
||||||
this._expired.delete(client)
|
this._expired.delete(client)
|
||||||
return this._remove(client, this._pulseQueue.bind(this))
|
this._remove(client)
|
||||||
|
this._pulseQueue()
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// idle timeout
|
// idle timeout
|
||||||
let tid
|
let tid
|
||||||
if (this.options.idleTimeoutMillis && this._isAboveMin()) {
|
if (this.options.idleTimeoutMillis) {
|
||||||
tid = setTimeout(() => {
|
tid = setTimeout(() => {
|
||||||
if (this._isAboveMin()) {
|
this.log('remove idle client')
|
||||||
this.log('remove idle client')
|
this._remove(client)
|
||||||
this._remove(client, this._pulseQueue.bind(this))
|
|
||||||
}
|
|
||||||
}, this.options.idleTimeoutMillis)
|
}, this.options.idleTimeoutMillis)
|
||||||
|
|
||||||
if (this.options.allowExitOnIdle) {
|
if (this.options.allowExitOnIdle) {
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "pg-pool",
|
"name": "pg-pool",
|
||||||
"version": "3.10.1",
|
"version": "3.8.1-alpha.1",
|
||||||
"description": "Connection pool for node-postgres",
|
"description": "Connection pool for node-postgres",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"exports": {
|
"exports": {
|
||||||
@ -30,15 +30,16 @@
|
|||||||
"author": "Brian M. Carlson",
|
"author": "Brian M. Carlson",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/brianc/node-postgres/issues"
|
"url": "https://github.com/brianc/node-pg-pool/issues"
|
||||||
},
|
},
|
||||||
"homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-pool#readme",
|
"homepage": "https://github.com/brianc/node-pg-pool#readme",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"bluebird": "3.7.2",
|
"bluebird": "3.7.2",
|
||||||
"co": "4.6.0",
|
"co": "4.6.0",
|
||||||
"expect.js": "0.3.1",
|
"expect.js": "0.3.1",
|
||||||
"lodash": "^4.17.11",
|
"lodash": "^4.17.11",
|
||||||
"mocha": "^10.5.2"
|
"mocha": "^10.5.2",
|
||||||
|
"pg-cursor": "^2.13.2-alpha.1"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"pg": ">=8.0"
|
"pg": ">=8.0"
|
||||||
|
|||||||
42
packages/pg-pool/test/bring-your-own-promise.js
Normal file
42
packages/pg-pool/test/bring-your-own-promise.js
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
'use strict'
|
||||||
|
const co = require('co')
|
||||||
|
const expect = require('expect.js')
|
||||||
|
|
||||||
|
const describe = require('mocha').describe
|
||||||
|
const it = require('mocha').it
|
||||||
|
const BluebirdPromise = require('bluebird')
|
||||||
|
|
||||||
|
const Pool = require('../')
|
||||||
|
|
||||||
|
const checkType = (promise) => {
|
||||||
|
expect(promise).to.be.a(BluebirdPromise)
|
||||||
|
return promise.catch((e) => undefined)
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Bring your own promise', function () {
|
||||||
|
it(
|
||||||
|
'uses supplied promise for operations',
|
||||||
|
co.wrap(function* () {
|
||||||
|
const pool = new Pool({ Promise: BluebirdPromise })
|
||||||
|
const client1 = yield checkType(pool.connect())
|
||||||
|
client1.release()
|
||||||
|
yield checkType(pool.query('SELECT NOW()'))
|
||||||
|
const client2 = yield checkType(pool.connect())
|
||||||
|
// TODO - make sure pg supports BYOP as well
|
||||||
|
client2.release()
|
||||||
|
yield checkType(pool.end())
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
it(
|
||||||
|
'uses promises in errors',
|
||||||
|
co.wrap(function* () {
|
||||||
|
const pool = new Pool({ Promise: BluebirdPromise, port: 48484 })
|
||||||
|
yield checkType(pool.connect())
|
||||||
|
yield checkType(pool.end())
|
||||||
|
yield checkType(pool.connect())
|
||||||
|
yield checkType(pool.query())
|
||||||
|
yield checkType(pool.end())
|
||||||
|
})
|
||||||
|
)
|
||||||
|
})
|
||||||
@ -57,7 +57,7 @@ describe('connection timeout', () => {
|
|||||||
function* () {
|
function* () {
|
||||||
const errors = []
|
const errors = []
|
||||||
const pool = new Pool({ connectionTimeoutMillis: 1, port: this.port, host: 'localhost' })
|
const pool = new Pool({ connectionTimeoutMillis: 1, port: this.port, host: 'localhost' })
|
||||||
for (let i = 0; i < 15; i++) {
|
for (var i = 0; i < 15; i++) {
|
||||||
try {
|
try {
|
||||||
yield pool.connect()
|
yield pool.connect()
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
|||||||
@ -37,14 +37,4 @@ describe('pool ending', () => {
|
|||||||
expect(res.rows[0].name).to.equal('brianc')
|
expect(res.rows[0].name).to.equal('brianc')
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
it('pool.end() - finish pending queries', async () => {
|
|
||||||
const pool = new Pool({ max: 20 })
|
|
||||||
let completed = 0
|
|
||||||
for (let x = 1; x <= 20; x++) {
|
|
||||||
pool.query('SELECT $1::text as name', ['brianc']).then(() => completed++)
|
|
||||||
}
|
|
||||||
await pool.end()
|
|
||||||
expect(completed).to.equal(20)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|||||||
@ -198,7 +198,7 @@ describe('pool error handling', function () {
|
|||||||
co.wrap(function* () {
|
co.wrap(function* () {
|
||||||
const pool = new Pool({ max: 1 })
|
const pool = new Pool({ max: 1 })
|
||||||
const errors = []
|
const errors = []
|
||||||
for (let i = 0; i < 20; i++) {
|
for (var i = 0; i < 20; i++) {
|
||||||
try {
|
try {
|
||||||
yield pool.query('invalid sql')
|
yield pool.query('invalid sql')
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|||||||
@ -28,19 +28,11 @@ describe('idle timeout', () => {
|
|||||||
const pool = new Pool({ idleTimeoutMillis: 10 })
|
const pool = new Pool({ idleTimeoutMillis: 10 })
|
||||||
const clientA = yield pool.connect()
|
const clientA = yield pool.connect()
|
||||||
const clientB = yield pool.connect()
|
const clientB = yield pool.connect()
|
||||||
clientA.release() // this will put clientA in the idle pool
|
clientA.release()
|
||||||
clientB.release(new Error()) // an error will cause clientB to be removed immediately
|
clientB.release(new Error())
|
||||||
|
|
||||||
const removal = new Promise((resolve) => {
|
const removal = new Promise((resolve) => {
|
||||||
pool.on('remove', (client) => {
|
pool.on('remove', () => {
|
||||||
// clientB's stream may take a while to close, so we may get a remove
|
|
||||||
// event for it
|
|
||||||
// we only want to handle the remove event for clientA when it times out
|
|
||||||
// due to being idle
|
|
||||||
if (client !== clientA) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(pool.idleCount).to.equal(0)
|
expect(pool.idleCount).to.equal(0)
|
||||||
expect(pool.totalCount).to.equal(0)
|
expect(pool.totalCount).to.equal(0)
|
||||||
resolve()
|
resolve()
|
||||||
@ -62,8 +54,8 @@ describe('idle timeout', () => {
|
|||||||
co.wrap(function* () {
|
co.wrap(function* () {
|
||||||
const pool = new Pool({ idleTimeoutMillis: 1 })
|
const pool = new Pool({ idleTimeoutMillis: 1 })
|
||||||
const results = []
|
const results = []
|
||||||
for (let i = 0; i < 20; i++) {
|
for (var i = 0; i < 20; i++) {
|
||||||
const query = pool.query('SELECT NOW()')
|
let query = pool.query('SELECT NOW()')
|
||||||
expect(pool.idleCount).to.equal(0)
|
expect(pool.idleCount).to.equal(0)
|
||||||
expect(pool.totalCount).to.equal(1)
|
expect(pool.totalCount).to.equal(1)
|
||||||
results.push(yield query)
|
results.push(yield query)
|
||||||
@ -80,8 +72,8 @@ describe('idle timeout', () => {
|
|||||||
co.wrap(function* () {
|
co.wrap(function* () {
|
||||||
const pool = new Pool({ idleTimeoutMillis: 1 })
|
const pool = new Pool({ idleTimeoutMillis: 1 })
|
||||||
const results = []
|
const results = []
|
||||||
for (let i = 0; i < 20; i++) {
|
for (var i = 0; i < 20; i++) {
|
||||||
const client = yield pool.connect()
|
let client = yield pool.connect()
|
||||||
expect(pool.totalCount).to.equal(1)
|
expect(pool.totalCount).to.equal(1)
|
||||||
expect(pool.idleCount).to.equal(0)
|
expect(pool.idleCount).to.equal(0)
|
||||||
yield wait(10)
|
yield wait(10)
|
||||||
|
|||||||
@ -32,7 +32,7 @@ describe('lifetime timeout', () => {
|
|||||||
'can remove expired clients and recreate them',
|
'can remove expired clients and recreate them',
|
||||||
co.wrap(function* () {
|
co.wrap(function* () {
|
||||||
const pool = new Pool({ maxLifetimeSeconds: 1 })
|
const pool = new Pool({ maxLifetimeSeconds: 1 })
|
||||||
const query = pool.query('SELECT pg_sleep(1.4)')
|
let query = pool.query('SELECT pg_sleep(1.4)')
|
||||||
expect(pool.expiredCount).to.equal(0)
|
expect(pool.expiredCount).to.equal(0)
|
||||||
expect(pool.totalCount).to.equal(1)
|
expect(pool.totalCount).to.equal(1)
|
||||||
yield query
|
yield query
|
||||||
|
|||||||
@ -55,88 +55,4 @@ describe('pool size of 1', () => {
|
|||||||
return yield pool.end()
|
return yield pool.end()
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
it(
|
|
||||||
'does not remove clients when at or below min',
|
|
||||||
co.wrap(function* () {
|
|
||||||
const pool = new Pool({ max: 1, min: 1, idleTimeoutMillis: 10 })
|
|
||||||
const client = yield pool.connect()
|
|
||||||
client.release()
|
|
||||||
yield new Promise((resolve) => setTimeout(resolve, 20))
|
|
||||||
expect(pool.idleCount).to.equal(1)
|
|
||||||
return yield pool.end()
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
it(
|
|
||||||
'does remove clients when at or below min if maxUses is reached',
|
|
||||||
co.wrap(function* () {
|
|
||||||
const pool = new Pool({ max: 1, min: 1, idleTimeoutMillis: 10, maxUses: 1 })
|
|
||||||
const client = yield pool.connect()
|
|
||||||
client.release()
|
|
||||||
yield new Promise((resolve) => setTimeout(resolve, 20))
|
|
||||||
expect(pool.idleCount).to.equal(0)
|
|
||||||
return yield pool.end()
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
it(
|
|
||||||
'does remove clients when at or below min if maxLifetimeSeconds is reached',
|
|
||||||
co.wrap(function* () {
|
|
||||||
const pool = new Pool({ max: 1, min: 1, idleTimeoutMillis: 10, maxLifetimeSeconds: 1 })
|
|
||||||
const client = yield pool.connect()
|
|
||||||
client.release()
|
|
||||||
yield new Promise((resolve) => setTimeout(resolve, 1020))
|
|
||||||
expect(pool.idleCount).to.equal(0)
|
|
||||||
return yield pool.end()
|
|
||||||
})
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('pool size of 2', () => {
|
|
||||||
it(
|
|
||||||
'does not remove clients when at or below min',
|
|
||||||
co.wrap(function* () {
|
|
||||||
const pool = new Pool({ max: 2, min: 2, idleTimeoutMillis: 10 })
|
|
||||||
const client = yield pool.connect()
|
|
||||||
const client2 = yield pool.connect()
|
|
||||||
client.release()
|
|
||||||
yield new Promise((resolve) => setTimeout(resolve, 20))
|
|
||||||
client2.release()
|
|
||||||
yield new Promise((resolve) => setTimeout(resolve, 20))
|
|
||||||
expect(pool.idleCount).to.equal(2)
|
|
||||||
return yield pool.end()
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
it(
|
|
||||||
'does remove clients when above min',
|
|
||||||
co.wrap(function* () {
|
|
||||||
const pool = new Pool({ max: 2, min: 1, idleTimeoutMillis: 10 })
|
|
||||||
const client = yield pool.connect()
|
|
||||||
const client2 = yield pool.connect()
|
|
||||||
client.release()
|
|
||||||
yield new Promise((resolve) => setTimeout(resolve, 20))
|
|
||||||
client2.release()
|
|
||||||
yield new Promise((resolve) => setTimeout(resolve, 20))
|
|
||||||
expect(pool.idleCount).to.equal(1)
|
|
||||||
return yield pool.end()
|
|
||||||
})
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('pool min size', () => {
|
|
||||||
it(
|
|
||||||
'does not drop below min when clients released at same time',
|
|
||||||
co.wrap(function* () {
|
|
||||||
const pool = new Pool({ max: 2, min: 1, idleTimeoutMillis: 10 })
|
|
||||||
const client = yield pool.connect()
|
|
||||||
const client2 = yield pool.connect()
|
|
||||||
client.release()
|
|
||||||
client2.release()
|
|
||||||
yield new Promise((resolve) => setTimeout(resolve, 20))
|
|
||||||
expect(pool.idleCount).to.equal(1)
|
|
||||||
return yield pool.end()
|
|
||||||
})
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
// ESM wrapper for pg-protocol
|
// ESM wrapper for pg-protocol
|
||||||
import * as protocol from '../dist/index.js'
|
import protocol from '../dist/index.js'
|
||||||
|
|
||||||
// Re-export all the properties
|
// Re-export all the properties
|
||||||
export const DatabaseError = protocol.DatabaseError
|
export const DatabaseError = protocol.DatabaseError
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "pg-protocol",
|
"name": "pg-protocol",
|
||||||
"version": "1.10.3",
|
"version": "1.8.1-alpha.0",
|
||||||
"description": "The postgres client/server binary protocol, implemented in TypeScript",
|
"description": "The postgres client/server binary protocol, implemented in TypeScript",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@ -9,9 +9,7 @@
|
|||||||
"import": "./esm/index.js",
|
"import": "./esm/index.js",
|
||||||
"require": "./dist/index.js",
|
"require": "./dist/index.js",
|
||||||
"default": "./dist/index.js"
|
"default": "./dist/index.js"
|
||||||
},
|
}
|
||||||
"./dist/*": "./dist/*.js",
|
|
||||||
"./dist/*.js": "./dist/*.js"
|
|
||||||
},
|
},
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user