Compare commits

...

107 Commits

Author SHA1 Message Date
Nigro Simone
8d493f3b55
feat: add Node.js version 25 to CI workflow (#3556)
* feat: add Node.js version 25 to CI workflow

https://nodejs.org/en/blog/release/v25.0.0

* chore: update CI workflow to exclude Node.js version '23'

Remove Node.js version '23' from CI workflow.
2025-10-22 16:20:07 +02:00
Charmander
917478397b
test: Replace dead row length check with similar shape check (#3532)
These object-mode rows don’t include a `length`. Dead code since 721cf56eb331bd35243c1425095b98cf09adf814 (“Rows are now associative arrays rather than straight arrays.”)?
2025-08-22 01:42:17 +00:00
Prasad
f5c90a5484
docs: fix typo in pool docs (#3530) 2025-08-20 06:48:50 -05:00
dependabot[bot]
65bc3d4884
build(deps-dev): bump node-gyp from 11.2.0 to 11.3.0 (#3526)
Bumps [node-gyp](https://github.com/nodejs/node-gyp) from 11.2.0 to 11.3.0.
- [Release notes](https://github.com/nodejs/node-gyp/releases)
- [Changelog](https://github.com/nodejs/node-gyp/blob/main/CHANGELOG.md)
- [Commits](https://github.com/nodejs/node-gyp/compare/v11.2.0...v11.3.0)

---
updated-dependencies:
- dependency-name: node-gyp
  dependency-version: 11.3.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-08 15:04:02 -05:00
James Vaughan
a6c1084db1
Update bugs and homepage links for pg-pool (#3528) 2025-08-05 13:32:06 -05:00
Mathias Bockwoldt
1b2bedc9c8
feat(pg-connection-string): throw correct error when URL parsing fails
Fixes #3513
2025-07-28 08:48:29 -04:00
Brian C
27a2754787
Deprecations (#3510)
* Make active query a private prop

* Make client.queryQueue private (with deprecation)

* Deprecate some legacy features

* Update packages/pg/lib/client.js

Co-authored-by: Charmander <~@charmander.me>

---------

Co-authored-by: Brian Carlson <brian.carlson@getcruise.com>
Co-authored-by: Charmander <~@charmander.me>
2025-07-16 21:52:00 -05:00
Barry Hagan
01fadd93d7
fix #3508 - recheck min client count during idle callback (#3509) 2025-07-10 14:30:48 -05:00
Brian C
43b8692019
Add tests for export (#3507)
* Add tests for export

* Fix
2025-07-08 22:40:13 -05:00
Tommy D. Rossi
fab87b28af
Add package.json export to pg-cloudflare (#3506)
* Update package.json

* Update package.json

* Update package.json
2025-07-08 18:08:40 -05:00
Herman J. Radtke III
c8fb1e9261
feat(pg-connection-string): warn if non-standard ssl options are used (#3473)
* feat(pg-connection-string): warn if non-standard ssl options are used

In preparation for v3.0.0, we start warning users to be explicit about
the sslmode they want.

* Update index.js
2025-07-07 17:33:22 -05:00
dependabot[bot]
54e0424991
build(deps-dev): bump eslint-plugin-prettier from 5.2.6 to 5.5.1 (#3502)
Bumps [eslint-plugin-prettier](https://github.com/prettier/eslint-plugin-prettier) from 5.2.6 to 5.5.1.
- [Release notes](https://github.com/prettier/eslint-plugin-prettier/releases)
- [Changelog](https://github.com/prettier/eslint-plugin-prettier/blob/main/CHANGELOG.md)
- [Commits](https://github.com/prettier/eslint-plugin-prettier/compare/v5.2.6...v5.5.1)

---
updated-dependencies:
- dependency-name: eslint-plugin-prettier
  dependency-version: 5.5.1
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-07-02 11:57:38 -05:00
dependabot[bot]
235d7ad5e2
build(deps-dev): bump node-gyp from 10.2.0 to 11.2.0 (#3501)
Bumps [node-gyp](https://github.com/nodejs/node-gyp) from 10.2.0 to 11.2.0.
- [Release notes](https://github.com/nodejs/node-gyp/releases)
- [Changelog](https://github.com/nodejs/node-gyp/blob/main/CHANGELOG.md)
- [Commits](https://github.com/nodejs/node-gyp/compare/v10.2.0...v11.2.0)

---
updated-dependencies:
- dependency-name: node-gyp
  dependency-version: 11.2.0
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-07-02 11:57:28 -05:00
Brian Carlson
8f8e7315e8 Publish
- pg-bundler-test@0.0.2
 - pg-cloudflare@1.2.7
 - pg-cursor@2.15.3
 - pg-esm-test@1.2.3
 - pg-native@3.5.2
 - pg-protocol@1.10.3
 - pg-query-stream@4.10.3
 - pg@8.16.3
2025-06-27 09:51:27 -05:00
Brian C
f0d1c4868a
Update package exports to support more formats (#3500) 2025-06-27 09:51:01 -05:00
Nigro Simone
0ad6c9b71e
fix: typos (#3499)
* fix: typos

* fix: typo
2025-06-26 17:28:43 -05:00
James Opstad
54964ecff7
Replace cloudflare export condition in pg-cloudflare with workerd (#3498)
* Replace cloudflare export condition with workerd

* Add note about Cloudflare Vite plugin
2025-06-26 16:42:08 -05:00
Brian Carlson
1a25d12817 Publish
- pg-cursor@2.15.2
 - pg-esm-test@1.2.2
 - pg-protocol@1.10.2
 - pg-query-stream@4.10.2
 - pg@8.16.2
2025-06-19 16:15:21 -05:00
Herman J. Radtke III
e00aac1398
Fixes for binary protocol array handling (#3494)
* fix(pg-protocol): specify number of result column format codes

Fixes a bug when binary format. We must specify both:

- the number of result column format codes
- the result column format codes

The text format case was working by accident. When using text format, the
intention was to set the format code to 0. Instead, we set the number
of result column format codes was set to 0. This is valid because it indicates
that all result columns should use the default format (text).

When using binary format, the intention was to set the format code to 1.
Instead, we set the number of result column format codes to 1.
Importantly, we never set a result column format code. This caused an
error: 'insufficient data left in message'. 

We now always set the number of result column format codes to '1'. The
value of '1' has special meaning:

> or one, in which case the specified format code is applied to all result columns (if any)

We then set a single column format code based on whether the connection
(or query) is set to binary.


Fixes #3487

* fix(pg): use a Buffer when parsing binary

The call to parseArray was not working as expected because the value was
being sent as a string instead of a Buffer. The binary parsers in
pg-types all assume the incoming value is a Buffer.
2025-06-19 15:37:04 -05:00
Brian Carlson
cd877a5761 Publish
- pg-bundler-test@0.0.1
 - pg-cloudflare@1.2.6
 - pg-connection-string@2.9.1
 - pg-cursor@2.15.1
 - pg-esm-test@1.2.1
 - pg-native@3.5.1
 - pg-pool@3.10.1
 - pg-protocol@1.10.1
 - pg-query-stream@4.10.1
 - pg@8.16.1
2025-06-18 10:46:26 -05:00
Brian C
607efc1b6e
Bump engines since we do not test lower than 16 anymore (#3490) 2025-06-16 22:07:55 -05:00
Joan Miquel Torres
14dc8dd100
Use performance.now() instead of Date.now()... (#3483)
* Use performance.now() instead of Date.now()...

  * Wherever applicable (measuring performance, not time).
  * Failback to support node < 16.0.0 (perf_hook not globally exposed)
  * Failback to Date.now() for node < 8.5.0

  ✔ Tests passed with node > 16.0.0 (22.12.0)
  ✕ Couldn't pass with node prior 16.0.0 but not due to this changes.

https://nodejs.org/docs/latest-v8.x/api/perf_hooks.html#perf_hooks_performance_now
https://w3c.github.io/hr-time/

* Yarn prettier

* More lint fixes.

* Removed polyfill code for node <16

They are no longer supported:

https://github.com/brianc/node-postgres/pull/3483#issuecomment-2967119692
2025-06-14 16:38:51 -05:00
Brian C
8608fb84c8
fix: do not concatenate an array if passed to escapeLiteral. (#3489) 2025-06-14 16:36:32 -05:00
Tommy D. Rossi
114a03e887
Add package.json in exports (#3488) 2025-06-14 16:32:26 -05:00
Brian C
7ab5923fad
Redact input URL string to prevent console printing (#3486) 2025-06-13 22:59:32 -05:00
Henry Cai
6b016b37d4
fix(pg-cloudflare): use conditional export to support bundlers that don't know about cloudflare:sockets (#3482) 2025-06-12 09:31:57 -05:00
Johan Fagerberg
0ada72e608
docs: add maxLifetimeSeconds to Pool docs (#3484)
Wording taken from https://github.com/brianc/node-postgres/issues/3298#issuecomment-2305207256
2025-06-12 09:24:03 -05:00
dependabot[bot]
03642abec1
build(deps-dev): bump semver from 4.3.6 to 7.7.2 (#3477)
Bumps [semver](https://github.com/npm/node-semver) from 4.3.6 to 7.7.2.
- [Release notes](https://github.com/npm/node-semver/releases)
- [Changelog](https://github.com/npm/node-semver/blob/main/CHANGELOG.md)
- [Commits](https://github.com/npm/node-semver/compare/v4.3.6...v7.7.2)

---
updated-dependencies:
- dependency-name: semver
  dependency-version: 7.7.2
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-06-05 12:40:05 -05:00
dependabot[bot]
a4888ee028
build(deps-dev): bump eslint-plugin-promise from 6.2.0 to 7.2.1 (#3479)
Bumps [eslint-plugin-promise](https://github.com/eslint-community/eslint-plugin-promise) from 6.2.0 to 7.2.1.
- [Release notes](https://github.com/eslint-community/eslint-plugin-promise/releases)
- [Changelog](https://github.com/eslint-community/eslint-plugin-promise/blob/main/CHANGELOG.md)
- [Commits](https://github.com/eslint-community/eslint-plugin-promise/compare/v6.2.0...v7.2.1)

---
updated-dependencies:
- dependency-name: eslint-plugin-promise
  dependency-version: 7.2.1
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-06-05 12:39:52 -05:00
Herman J. Radtke III
411869df65
pool.end() resolves before the last pool.query() (#3461)
* Pass callback to client.end

* Add test for pool.end method

* fix: remove excessive _pulseQueue call

* fix: context problem

* fix: test resolve should be called when the last client is removed

* fix: wait for pool.end()

Because when you don't pass a callback to .end() it always returns a promise

* fix: handle idle timeout test data race

---------

Co-authored-by: Asadbek Raimov <asadbekraimov642@gmail.com>
2025-05-29 17:12:24 -05:00
Herman J. Radtke III
26ace0ac8f fix(pg-connection-string): remove .nyc_output 2025-05-27 09:32:54 -04:00
Ryan Staples
3e7bd2f681
Change instanceof(Date) to util.types.isDate(Date) (#2862)
* change instanceof to isDate

* use both methods to check for valid Date

* add test for PR 2862

* use only isDate(date) in place of instanceof Date

* Extend compatibility of `isDate` use back to Node 8

* Clean up test

---------

Co-authored-by: Charmander <~@charmander.me>
Reviewed-by: Charmander <~@charmander.me>
2025-05-26 22:37:13 +00:00
Noritaka Kobayashi
9cf2184d09
refactor: remove unused import & fix typos in docs (#3471) 2025-05-25 11:30:11 -05:00
Noritaka Kobayashi
c9353acbc0
chore: fix typos in README (#3470) 2025-05-25 09:56:54 -05:00
Brian C
26fa32c133
Update theme.config.js (#3468) 2025-05-15 19:40:03 -05:00
Lorentz Lasson
a47c480055 fix sslcompat remains 2025-05-14 13:50:38 -04:00
Brian Carlson
abff18d6f9 Publish
- pg-connection-string@2.9.0
 - pg-cursor@2.15.0
 - pg-esm-test@1.2.0
 - pg-native@3.5.0
 - pg-pool@3.10.0
 - pg-protocol@1.10.0
 - pg-query-stream@4.10.0
 - pg@8.16.0
2025-05-12 11:49:59 -05:00
Brian Carlson
e43d4b7eb6 Update changelog 2025-05-12 11:49:41 -05:00
Herman J. Radtke III
e8fde07227
chore: document keepAliveInitialDelayMillis option (#3460) 2025-05-12 01:15:55 -05:00
Brian C
27f34c6aee
Bump libpq & nan version dep for node24 compat (#3462) 2025-05-12 01:07:01 -05:00
maltewirz
e30b41d481
Update connecting.mdx (#3266)
Changes tested myself and inspired by documention on aws rds signer https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-rds-signer/
2025-05-10 22:23:27 -05:00
Herman J. Radtke III
6be857e9d3 chore(pg-connection-string): use tsx for tests 2025-05-06 06:49:24 -04:00
dependabot[bot]
9bfc967e91
build(deps-dev): bump eslint-config-prettier from 9.1.0 to 10.1.2 (#3453)
Bumps [eslint-config-prettier](https://github.com/prettier/eslint-config-prettier) from 9.1.0 to 10.1.2.
- [Release notes](https://github.com/prettier/eslint-config-prettier/releases)
- [Changelog](https://github.com/prettier/eslint-config-prettier/blob/main/CHANGELOG.md)
- [Commits](https://github.com/prettier/eslint-config-prettier/compare/v9.1.0...v10.1.2)

---
updated-dependencies:
- dependency-name: eslint-config-prettier
  dependency-version: 10.1.2
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-05-01 12:33:50 -05:00
dependabot[bot]
79351af32e
build(deps-dev): bump eslint-plugin-prettier from 5.2.5 to 5.2.6 (#3455)
Bumps [eslint-plugin-prettier](https://github.com/prettier/eslint-plugin-prettier) from 5.2.5 to 5.2.6.
- [Release notes](https://github.com/prettier/eslint-plugin-prettier/releases)
- [Changelog](https://github.com/prettier/eslint-plugin-prettier/blob/main/CHANGELOG.md)
- [Commits](https://github.com/prettier/eslint-plugin-prettier/compare/v5.2.5...v5.2.6)

---
updated-dependencies:
- dependency-name: eslint-plugin-prettier
  dependency-version: 5.2.6
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-05-01 12:33:37 -05:00
dependabot[bot]
52ec1293f2
build(deps-dev): bump @cloudflare/vitest-pool-workers (#3456)
Bumps [@cloudflare/vitest-pool-workers](https://github.com/cloudflare/workers-sdk/tree/HEAD/packages/vitest-pool-workers) from 0.8.12 to 0.8.23.
- [Release notes](https://github.com/cloudflare/workers-sdk/releases)
- [Changelog](https://github.com/cloudflare/workers-sdk/blob/main/packages/vitest-pool-workers/CHANGELOG.md)
- [Commits](https://github.com/cloudflare/workers-sdk/commits/@cloudflare/vitest-pool-workers@0.8.23/packages/vitest-pool-workers)

---
updated-dependencies:
- dependency-name: "@cloudflare/vitest-pool-workers"
  dependency-version: 0.8.23
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-05-01 12:33:24 -05:00
Brian C
2647f7ecaf
Create README.md (#3451) 2025-04-30 20:20:35 -05:00
Brian C
980752ce00
Hard-pin 3rd party dependencies (#3450)
Co-authored-by: Brian Carlson <brian.carlson@getcruise.com>
2025-04-29 13:34:14 -05:00
francesco
264e30f424
docs: require to import (#3448) 2025-04-29 11:06:32 -05:00
Davide Violante
f528433e9d
chore: minor eslint fixes, reenable no-unused vars (#3445) 2025-04-28 11:59:26 -05:00
Davide Violante
93aa1ba2f1
chore: replace var with const in md files (#3446) 2025-04-28 09:54:46 +00:00
Brian C
9e7a5d97cf
Replace all usages of var with let / const - eslint auto applied (#3444) 2025-04-27 11:50:33 -05:00
Brian C
7a009381e6
Update pool docs (#3442)
* Update docs for pool

* Letting my robot overlords proofread for me

---------

Co-authored-by: Brian Carlson <brian.carlson@getcruise.com>
2025-04-25 15:21:44 -05:00
Harish T
ff40638868
Support Min connection pool parameter #3009 (#3438)
* Support Min connection pool parameter #3009

* Remove extraneous change

* streamline code
2025-04-25 14:41:02 -05:00
Brian C
229de3006b
Remove circluar dep for test dependency - I think this is managed w learna and not needed (#3441)
Co-authored-by: Brian Carlson <brian.carlson@getcruise.com>
2025-04-25 14:34:31 -05:00
Brian Carlson
81d875fe09 Publish
- pg-cursor@2.14.6
 - pg-esm-test@1.1.6
 - pg-pool@3.9.6
 - pg-query-stream@4.9.6
 - pg@8.15.6
2025-04-25 14:09:08 -05:00
Brian C
39e134d0b5
Make pg-cursor compatible with older versions of pg (#3440)
Co-authored-by: Brian Carlson <brian.carlson@getcruise.com>
2025-04-24 15:12:48 -05:00
Brian C
0c1629bea2
Update docs - add ESM info
* Update docs - start

* Add logo & discord

* Start updating docs for esm style imports

* Update docs with logo & info on pooling

* Update more import statements

---------

Co-authored-by: Brian Carlson <brian.carlson@getcruise.com>
2025-04-23 16:46:21 -05:00
Brian Carlson
56e2862577 Publish
- pg-cloudflare@1.2.5
 - pg-connection-string@2.8.5
 - pg-cursor@2.14.5
 - pg-esm-test@1.1.5
 - pg-native@3.4.5
 - pg-pool@3.9.5
 - pg-protocol@1.9.5
 - pg-query-stream@4.9.5
 - pg@8.15.5
2025-04-23 13:48:25 -05:00
Brian Carlson
2919f28d31 Manually advance patch versions to re-align with lerna 2025-04-23 13:48:13 -05:00
Brian Carlson
36fd0a61db Only allow publish from master 2025-04-23 13:46:01 -05:00
Brian C
6ab0c4608c
More tests & exports from pg-protocol (#3436) 2025-04-23 13:22:31 -05:00
Brian C
ad3e6035f4
Expose TypeOverrides in esm & cjs from root of package (#3433) 2025-04-23 12:37:52 -05:00
Brian C
e8280d58f6
Add tests for issues fixed in #3428 (#3432) 2025-04-23 11:53:54 -05:00
Breno A.
2da196cc1f
fix(exports): resolve issues with module imports and requires (#3428)
* fix(exports): resolve issues with module imports and requires

* fix(pg-native): add support for lib module resolution in package.json
2025-04-23 11:39:35 -05:00
Meghan Denny
bbc84b2690 pg-connection-string: fix 'parse' type signature 2025-04-23 10:43:34 -04:00
Brian Carlson
fb25f7bdb3 Publish
- pg-connection-string@2.8.1
 - pg-cursor@2.14.1
 - pg-esm-test@1.1.1
 - pg-pool@3.9.1
 - pg-query-stream@4.9.1
 - pg@8.15.1
2025-04-22 15:27:31 -05:00
Herman J. Radtke III
557716d1fa
fix(pg-connection-string): export default from esm wrapper (#3425)
Prior to v2.8.0, the parse function was the default when using import.
When esm compatibility was introduced in v2.8.0, there was not default
specified. This broke existing code that relied on that default.

Fixes #3424
2025-04-22 15:24:48 -05:00
Herman J. Radtke III
9cc7d8eb94
docs: add missing parameters on pg.Client Config (#3422)
- client_encoding
- fallback_application_name
- options
2025-04-22 11:04:17 -05:00
Brian Carlson
9ec9e5f58d Publish
- pg-cloudflare@1.2.0
 - pg-connection-string@2.8.0
 - pg-cursor@2.14.0
 - pg-esm-test@1.1.0
 - pg-native@3.4.0
 - pg-pool@3.9.0
 - pg-protocol@1.9.0
 - pg-query-stream@4.9.0
 - pg@8.15.0
2025-04-22 10:55:24 -05:00
Brian Carlson
60817cd914 Update changelog 2025-04-22 10:55:10 -05:00
Brian C
940479bc4b
Add esm exports (#3423)
* build: add esm exports

* fix: add defaults as per arethetypeswrong report

* fix: add missing types

* lint

* Fix broken tests

* Add (failing) test for esm compat

* Begin moving files to proper extension and adding tests

* Add tests for connection-string and fix cloudflare module type and esm compat

* Add query-stream and cursor as esm exports

* Update PR copilot review

* Publish

 - pg-cloudflare@1.1.2-alpha.0
 - pg-connection-string@2.7.1-alpha.0
 - pg-cursor@2.13.2-alpha.0
 - pg-esm-test@1.0.1-alpha.0
 - pg-native@3.3.1-alpha.0
 - pg-pool@3.8.1-alpha.0
 - pg-protocol@1.8.1-alpha.0
 - pg-query-stream@4.8.2-alpha.0
 - pg@8.14.2-alpha.0

* More cf compat work

* Publish

 - pg-cloudflare@1.1.2-alpha.1
 - pg-cursor@2.13.2-alpha.1
 - pg-esm-test@1.0.1-alpha.1
 - pg-pool@3.8.1-alpha.1
 - pg-query-stream@4.8.2-alpha.1
 - pg@8.14.2-alpha.1

* Add more cf compat and update tests

* Make tests pass - update exports for esm

* Use env vars for test connection in cf tests

* Fix lint

* Fit vitest into existing legacy framework

* Skip worker tests on node below 18

* Revert doc changes for now

* Remove legacy worker test in favor of vitest

---------

Co-authored-by: Luca Ban <mesqueeb@users.noreply.github.com>
2025-04-22 10:53:22 -05:00
Patrick Malouin
81ec0635fc
feat(pg-connection-string): get closer to libpq semantics for sslmode
Allows user to change the semantics of `sslmode` to be as close as possible to libpq semantics. The opt in can be enabled using `useLibpqCompat` parsing option or the non-standard `uselibpqcompat` query string parameter.

---------

Co-authored-by: Charmander <~@charmander.me>
Co-authored-by: Herman J. Radtke III <herman@hermanradtke.com>
2025-04-20 08:13:33 -04:00
Charmander
d8fb2f9c35
test: Avoid silencing errors from idle timeout test’s child process (#3419)
This hid the error fixed in #3263, for example.
2025-04-13 07:18:33 +00:00
Alex Anderson
9b510373a6
eslint: enable recommended ruleset (#3263) 2025-04-12 08:17:33 +00:00
Herman J. Radtke III
5a8b1a7d24 feat(pg-connection-string): ClientConfig helper functions
Two new functions are introduced to make it easy for TypeScript
users to use a PostgresSQL connection string with pg Client.

Fixes #2280
2025-04-08 20:06:28 -04:00
Charmander
a9fd34fb42
Revert "docs: fix bug in transaction example (#3414)"
This reverts commit dcb4257898d1d8d37110a4364922206dad33f9fe.

The change doesn’t fix the bug it claims to (`finally` always runs) and introduces a resource leak if the `ROLLBACK` query fails. The related bug that a broken client can be returned to the pool remains unaffected either way.
2025-04-03 15:10:49 -07:00
dependabot[bot]
a5d03a0774
build(deps-dev): bump eslint-plugin-prettier from 5.1.2 to 5.2.5 (#3412)
Bumps [eslint-plugin-prettier](https://github.com/prettier/eslint-plugin-prettier) from 5.1.2 to 5.2.5.
- [Release notes](https://github.com/prettier/eslint-plugin-prettier/releases)
- [Changelog](https://github.com/prettier/eslint-plugin-prettier/blob/main/CHANGELOG.md)
- [Commits](https://github.com/prettier/eslint-plugin-prettier/compare/v5.1.2...v5.2.5)

---
updated-dependencies:
- dependency-name: eslint-plugin-prettier
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-04-01 15:38:44 -05:00
lucaesposto
dcb4257898
docs: fix bug in transaction example (#3414)
Throwing error will not allow "finally" execution, so client.release() must be invoked before it.
2025-04-01 15:38:14 -05:00
Brian Carlson
a3fefe3183 Update sponsorship info 2025-03-22 10:58:51 -05:00
Brian Carlson
477f812984 Publish
- pg-cursor@2.13.1
 - pg-query-stream@4.8.1
 - pg@8.14.1
2025-03-17 10:37:54 -05:00
Brian C
c53a472a60
Batch network packets in prepared statements (#3402)
* Batch network packets in prepared statements

Fixes #3340
Fixes #3325
Fixes #3098

* Fix type-o but mostly retrigger build for CF pages preview
2025-03-17 10:28:46 -05:00
Brian Carlson
f7c92e487c Publish
- pg-cursor@2.13.0
 - pg-native@3.3.0
 - pg-pool@3.8.0
 - pg-protocol@1.8.0
 - pg-query-stream@4.8.0
 - pg@8.14.0
2025-03-11 10:25:26 -05:00
Brian Carlson
1c45dd2828 Update changelog 2025-03-11 10:25:12 -05:00
Alex Anderson
b823a23f67
simple-query-tests: remove no-op test (#3390)
Originally skipped in daa370a61

Co-authored-by: alxndrsn <alxndrsn>
2025-03-10 13:14:01 -05:00
George MacKerron
b4022aa5c0
Add support for SCRAM-SHA-256-PLUS i.e. channel binding (#3356)
* Added support for SCRAM-SHA-256-PLUS i.e. channel binding

* Requested tweaks to channel binding

* Additional tweaks to channel binding

* Fixed lint complaints

* Update packages/pg/lib/crypto/sasl.js

Co-authored-by: Charmander <~@charmander.me>

* Update packages/pg/lib/crypto/sasl.js

Co-authored-by: Charmander <~@charmander.me>

* Update packages/pg/lib/client.js

Co-authored-by: Charmander <~@charmander.me>

* Tweaks to channel binding

* Now using homegrown certificate signature algorithm identification

* Update ssl.mdx with channel binding changes

* Allow for config object being undefined when assigning enableChannelBinding

* Fixed a test failing on an updated error message

* Removed - from hash names like SHA-256 for legacy crypto (Node 14 and below)

* Removed packageManager key from package.json

* Added some SASL/channel binding unit tests

* Added a unit test for continueSession to check expected SASL session data

* Modify tests: don't require channel binding (which cannot then work) if not using SSL

---------

Co-authored-by: Charmander <~@charmander.me>
2025-03-10 13:13:32 -05:00
francesco
1876f2000a
Add "unref" to timers (#3396)
* chore: add unref to timer

see https://nodejs.org/api/timers.html#timeoutunref

* chore: add unref to timer

see https://nodejs.org/api/timers.html#timeoutunref

* fix: lint

* fix: lint

* fix: lint

* fix: lint
2025-03-10 12:16:57 -05:00
Alex Anderson
88311c17a5
test-helper: re-add missing function spit() (#3248)
It looks like this was removed in d615ebee177ed57c7a7df861b1db675c9e0ebb0f while it still had references to it.

Reviewed-by: Charmander <~@charmander.me>
2025-02-20 01:34:23 +00:00
Nigro Simone
5a6734429c
fix(devcontainer): upgrade node to version 20 (#3385)
* fix(devcontainer): upgrade node to version 20

* chore: since Windows and Linux use different default line endings, Git may report a large number of modified files that have no differences aside from their line endings.
2025-02-13 17:22:50 -06:00
dependabot[bot]
79ee1ad15f
build(deps-dev): bump workerd from 1.20240529.0 to 1.20250129.0 (#3366)
Bumps [workerd](https://github.com/cloudflare/workerd) from 1.20240529.0 to 1.20250129.0.
- [Release notes](https://github.com/cloudflare/workerd/releases)
- [Changelog](https://github.com/cloudflare/workerd/blob/main/RELEASE.md)
- [Commits](https://github.com/cloudflare/workerd/compare/v1.20240529.0...v1.20250129.0)

---
updated-dependencies:
- dependency-name: workerd
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-13 09:46:54 -06:00
dependabot[bot]
1230c86ba9
build(deps-dev): bump lodash from 2.4.2 to 4.17.21 (#3367)
Bumps [lodash](https://github.com/lodash/lodash) from 2.4.2 to 4.17.21.
- [Release notes](https://github.com/lodash/lodash/releases)
- [Commits](https://github.com/lodash/lodash/compare/2.4.2...4.17.21)

---
updated-dependencies:
- dependency-name: lodash
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-13 09:42:45 -06:00
Charmander
582cdaf919
Assorted test fixes and cleanup (#3383)
* test: Actually test split messages in split message parsing test

* cleanup: Fix spelling in tests

* test: Wait on asynchronous tests

* cleanup: Remove unused parameter from test method `BufferList#getByteLength`

If someone did want this functionality, it would be better to use addition separate from the method anyway.

* cleanup: Remove unused test function `BufferList.concat`
2025-02-13 09:42:33 -06:00
Brian Carlson
5755b78386 Publish
- pg-cursor@2.12.3
 - pg-native@3.2.2
 - pg-query-stream@4.7.3
 - pg@8.13.3
2025-02-12 18:29:56 -06:00
francesco
f6e829c564
fix: revert PR #3379 (avoid useless spread) (#3382)
* Update result.js

* Update build-result.js

* fix: lint

* fix: lint
2025-02-12 18:28:05 -06:00
Brian Carlson
732580782f Publish
- pg-cursor@2.12.2
 - pg-native@3.2.1
 - pg-pool@3.7.1
 - pg-protocol@1.7.1
 - pg-query-stream@4.7.2
 - pg@8.13.2
2025-02-11 10:19:52 -06:00
Brian Carlson
2dc9e7f2fd Update lockfile 2025-02-11 10:19:26 -06:00
francesco
5f6a6e6596
perf(pg): avoid useless spread (#3379)
* perf(pg): avoid useless spread

* fix: missing initialization
2025-02-11 10:13:15 -06:00
francesco
f1586932fd
perf(pg-native): pre-shaped result rows (#3369)
* perf(pg-native): pre-shaped result rows

Porting on pg-native https://github.com/brianc/node-postgres/issues/3042

* fix: lint

* perf(pg-native): avoid useless spread
2025-02-11 10:12:10 -06:00
francesco
0792f0904a
Update libpq to 1.8.14 and fix #3332 (#3364)
* Update libpq to 1.8.14 and fix #3332

Update libpq to 1.8.14 and fix #3332

* Add node 23
2025-02-11 10:11:25 -06:00
Andy Young
95bec690b3
fix(pg-pool): don't throw on readonly .message e.g. ErrorEvent, preserve original exception (#3374)
* fix(pg-pool): preserve original error on connection timeout, don't throw on readonly .message e.g. ErrorEvent

Fixes https://github.com/brianc/node-postgres/issues/3373

* Fix lint
2025-02-10 16:27:55 -06:00
francesco
751e7410d9
perf(utils): fast prepareValue (#3370)
* perf(utils): fast prepareValue

This PR add a performance improvements at prepare Value for non-object by skipping useless condition

* fix: lint

* fix: case of undefined

* fix: review
2025-02-10 16:26:33 -06:00
francesco
f10f569a8a
perf(utils): replace pad with String.padStart (#3371) 2025-02-07 23:59:58 +00:00
Nigro Simone
3c48f22b22
perf: pre allocate array instead of push item (#3250)
* fix: typo

* perf: pre allocate array instead of push item

* perf: refractoring missing push

* perf: avoid useless varible declaration

* perf: short control flow

* fix: lint

* more precise bench

* fix: lint
2025-01-17 15:27:35 -06:00
Arya
2de02f0a63
updated pg-native github url in package.json (#3320) 2025-01-13 13:32:45 -06:00
francesco
f12e555b73
chore: align pg-types to vanilla (#3336) 2025-01-13 13:31:25 -06:00
Davide Violante
39e5ef8370
chore: update README.md, add how to install (#3339)
Fix #3314
2025-01-13 13:30:28 -06:00
Alexandre Weinberger
9fbcf17908
read dataTypeID and tableID as unsigned uint (#3347)
* read dataTypeID and tableID as unsigned uint

this is causing issues in other projects, like https://github.com/sequelize/sequelize/issues/15466

* added tests for oids larger than 2^31
2025-01-13 13:28:25 -06:00
Islam
373093d176
docs: fix pg-pool readme grammar (#3350)
[skip ci]
2024-12-08 08:14:53 +00:00
255 changed files with 6799 additions and 2635 deletions

View File

@ -3,7 +3,7 @@
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#------------------------------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------------------------------
FROM node:12 FROM node:20
# Avoid warnings by switching to noninteractive # Avoid warnings by switching to noninteractive
ENV DEBIAN_FRONTEND=noninteractive ENV DEBIAN_FRONTEND=noninteractive

View File

@ -1,6 +1,6 @@
// If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml. // If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml.
{ {
"name": "Node.js 12 & Postgres", "name": "Node.js 20 & Postgres",
"dockerComposeFile": "docker-compose.yml", "dockerComposeFile": "docker-compose.yml",
"service": "web", "service": "web",
"workspaceFolder": "/workspace", "workspaceFolder": "/workspace",

View File

@ -1,7 +1,7 @@
{ {
"plugins": ["@typescript-eslint", "prettier"], "plugins": ["@typescript-eslint", "prettier"],
"parser": "@typescript-eslint/parser", "parser": "@typescript-eslint/parser",
"extends": ["plugin:prettier/recommended", "prettier"], "extends": ["eslint:recommended", "plugin:prettier/recommended", "prettier"],
"ignorePatterns": ["node_modules", "coverage", "packages/pg-protocol/dist/**/*", "packages/pg-query-stream/dist/**/*"], "ignorePatterns": ["node_modules", "coverage", "packages/pg-protocol/dist/**/*", "packages/pg-query-stream/dist/**/*"],
"parserOptions": { "parserOptions": {
"ecmaVersion": 2017, "ecmaVersion": 2017,
@ -14,8 +14,22 @@
}, },
"rules": { "rules": {
"@typescript-eslint/no-unused-vars": ["error", { "@typescript-eslint/no-unused-vars": ["error", {
"args": "none" "args": "none",
"varsIgnorePattern": "^_$"
}], }],
"no-unused-vars": "off" "no-unused-vars": ["error", {
"args": "none",
"varsIgnorePattern": "^_$"
}],
"no-var": "error",
"prefer-const": "error"
},
"overrides": [
{
"files": ["*.ts", "*.mts", "*.cts", "*.tsx"],
"rules": {
"no-undef": "off"
} }
}
]
} }

1
.gitattributes vendored Normal file
View File

@ -0,0 +1 @@
* text=auto eol=lf

View File

@ -42,6 +42,8 @@ jobs:
- '18' - '18'
- '20' - '20'
- '22' - '22'
- '24'
- '25'
os: os:
- ubuntu-latest - ubuntu-latest
name: Node.js ${{ matrix.node }} name: Node.js ${{ matrix.node }}

View File

@ -4,6 +4,18 @@ For richer information consult the commit log on github with referenced pull req
We do not include break-fix version release in this file. We do not include break-fix version release in this file.
## pg@8.16.0
- Add support for [min connection pool size](https://github.com/brianc/node-postgres/pull/3438).
## pg@8.15.0
- Add support for [esm](https://github.com/brianc/node-postgres/pull/3423) importing. CommonJS importing is still also supported.
## pg@8.14.0
- Add support from SCRAM-SAH-256-PLUS i.e. [channel binding](https://github.com/brianc/node-postgres/pull/3356).
## pg@8.13.0 ## pg@8.13.0
- Add ability to specify query timeout on [per-query basis](https://github.com/brianc/node-postgres/pull/3074). - Add ability to specify query timeout on [per-query basis](https://github.com/brianc/node-postgres/pull/3074).

View File

@ -18,6 +18,12 @@ This repo is a monorepo which contains the core [pg](https://github.com/brianc/n
- [pg-connection-string](https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string) - [pg-connection-string](https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string)
- [pg-protocol](https://github.com/brianc/node-postgres/tree/master/packages/pg-protocol) - [pg-protocol](https://github.com/brianc/node-postgres/tree/master/packages/pg-protocol)
## Install
```
npm install pg
```
## Documentation ## Documentation
Each package in this repo should have its own readme more focused on how to develop/contribute. For overall documentation on the project and the related modules managed by this repo please see: Each package in this repo should have its own readme more focused on how to develop/contribute. For overall documentation on the project and the related modules managed by this repo please see:
@ -60,6 +66,12 @@ node-postgres's continued development has been made possible in part by generous
If you or your company are benefiting from node-postgres and would like to help keep the project financially sustainable [please consider supporting](https://github.com/sponsors/brianc) its development. If you or your company are benefiting from node-postgres and would like to help keep the project financially sustainable [please consider supporting](https://github.com/sponsors/brianc) its development.
### Featured sponsor
Special thanks to [medplum](https://medplum.com) for their generous and thoughtful support of node-postgres!
![medplum](https://raw.githubusercontent.com/medplum/medplum-logo/refs/heads/main/medplum-logo.png)
## Contributing ## Contributing
**:heart: contributions!** **:heart: contributions!**

View File

@ -15,8 +15,11 @@ node-postgres is made possible by the helpful contributors from the community as
- [mpirik](https://github.com/mpirik) - [mpirik](https://github.com/mpirik)
- [@BLUE-DEVIL1134](https://github.com/BLUE-DEVIL1134) - [@BLUE-DEVIL1134](https://github.com/BLUE-DEVIL1134)
- [bubble.io](https://bubble.io/) - [bubble.io](https://bubble.io/)
- GitHub[https://github.com/github] - [GitHub](https://github.com/github)
- loveland [https://github.com/loveland] - [n8n](https://n8n.io/)
- [loveland](https://github.com/loveland)
- [gajus](https://github.com/gajus)
- [thirdiron](https://github.com/thirdiron)
# Supporters # Supporters
@ -50,3 +53,4 @@ node-postgres is made possible by the helpful contributors from the community as
- [Sideline Sports](https://github.com/SidelineSports) - [Sideline Sports](https://github.com/SidelineSports)
- [Gadget](https://github.com/gadget-inc) - [Gadget](https://github.com/gadget-inc)
- [Sentry](https://sentry.io/welcome/) - [Sentry](https://sentry.io/welcome/)
- [devlikeapro](https://github.com/devlikepro)

20
docs/README.md Normal file
View File

@ -0,0 +1,20 @@
# node-postgres docs website
This is the documentation for node-postgres which is currently hosted at [https://node-postgres.com](https://node-postgres.com).
## Development
To run the documentation locally, you need to have [Node.js](https://nodejs.org) installed. Then, you can clone the repository and install the dependencies:
```bash
cd docs
yarn
```
Once you've installed the deps, you can run the development server:
```bash
yarn dev
```
This will start a local server at [http://localhost:3000](http://localhost:3000) where you can view the documentation and see your changes.

View File

@ -1,4 +1,3 @@
import React from 'react'
import { Callout } from 'nextra-theme-docs' import { Callout } from 'nextra-theme-docs'
export const Alert = ({ children }) => { export const Alert = ({ children }) => {

View File

@ -1,4 +1,3 @@
import React from 'react'
import { Callout } from 'nextra-theme-docs' import { Callout } from 'nextra-theme-docs'
export const Info = ({ children }) => { export const Info = ({ children }) => {

9
docs/components/logo.tsx Normal file
View File

@ -0,0 +1,9 @@
type Props = {
src: string
alt?: string
}
export function Logo(props: Props) {
const alt = props.alt || 'Logo'
return <img src={props.src} alt={alt} width={100} height={100} style={{ width: 400, height: 'auto' }} />
}

View File

@ -1,9 +1,5 @@
import 'nextra-theme-docs/style.css' import 'nextra-theme-docs/style.css'
export default function Nextra({ Component, pageProps }) { export default function Nextra({ Component, pageProps }) {
return ( return <Component {...pageProps} />
<>
<Component {...pageProps} />
</>
)
} }

View File

@ -23,15 +23,18 @@ type Config = {
lock_timeout?: number, // number of milliseconds a query is allowed to be en lock state before it's cancelled due to lock timeout lock_timeout?: number, // number of milliseconds a query is allowed to be en lock state before it's cancelled due to lock timeout
application_name?: string, // The name of the application that created this Client instance application_name?: string, // The name of the application that created this Client instance
connectionTimeoutMillis?: number, // number of milliseconds to wait for connection, default is no timeout connectionTimeoutMillis?: number, // number of milliseconds to wait for connection, default is no timeout
idle_in_transaction_session_timeout?: number // number of milliseconds before terminating any session with an open idle transaction, default is no timeout keepAliveInitialDelayMillis?: number, // set the initial delay before the first keepalive probe is sent on an idle socket
idle_in_transaction_session_timeout?: number, // number of milliseconds before terminating any session with an open idle transaction, default is no timeout
client_encoding?: string, // specifies the character set encoding that the database uses for sending data to the client
fallback_application_name?: string, // provide an application name to use if application_name is not set
options?: string // command-line options to be sent to the server
} }
``` ```
example to create a client with specific connection information: example to create a client with specific connection information:
```js ```js
import pg from 'pg' import { Client } from 'pg'
const { Client } = pg
const client = new Client({ const client = new Client({
user: 'database-user', user: 'database-user',
@ -45,8 +48,7 @@ const client = new Client({
## client.connect ## client.connect
```js ```js
import pg from 'pg' import { Client } from 'pg'
const { Client } = pg
const client = new Client() const client = new Client()
await client.connect() await client.connect()
@ -88,8 +90,7 @@ client.query(text: string, values?: any[]) => Promise<Result>
**Plain text query** **Plain text query**
```js ```js
import pg from 'pg' import { Client } from 'pg'
const { Client } = pg
const client = new Client() const client = new Client()
await client.connect() await client.connect()
@ -103,8 +104,7 @@ await client.end()
**Parameterized query** **Parameterized query**
```js ```js
import pg from 'pg' import { Client } from 'pg'
const { Client } = pg
const client = new Client() const client = new Client()
await client.connect() await client.connect()
@ -142,8 +142,7 @@ await client.end()
If you pass an object to `client.query` and the object has a `.submit` function on it, the client will pass it's PostgreSQL server connection to the object and delegate query dispatching to the supplied object. This is an advanced feature mostly intended for library authors. It is incidentally also currently how the callback and promise based queries above are handled internally, but this is subject to change. It is also how [pg-cursor](https://github.com/brianc/node-pg-cursor) and [pg-query-stream](https://github.com/brianc/node-pg-query-stream) work. If you pass an object to `client.query` and the object has a `.submit` function on it, the client will pass it's PostgreSQL server connection to the object and delegate query dispatching to the supplied object. This is an advanced feature mostly intended for library authors. It is incidentally also currently how the callback and promise based queries above are handled internally, but this is subject to change. It is also how [pg-cursor](https://github.com/brianc/node-pg-cursor) and [pg-query-stream](https://github.com/brianc/node-pg-query-stream) work.
```js ```js
import pg from 'pg' import { Query } from 'pg'
const { Query } = pg
const query = new Query('select $1::text as name', ['brianc']) const query = new Query('select $1::text as name', ['brianc'])
const result = client.query(query) const result = client.query(query)

View File

@ -18,8 +18,7 @@ $ npm install pg pg-cursor
Instantiates a new Cursor. A cursor is an instance of `Submittable` and should be passed directly to the `client.query` method. Instantiates a new Cursor. A cursor is an instance of `Submittable` and should be passed directly to the `client.query` method.
```js ```js
import pg from 'pg' import { Pool } from 'pg'
const { Pool } = pg
import Cursor from 'pg-cursor' import Cursor from 'pg-cursor'
const pool = new Pool() const pool = new Pool()
@ -29,11 +28,9 @@ const values = [10]
const cursor = client.query(new Cursor(text, values)) const cursor = client.query(new Cursor(text, values))
cursor.read(100, (err, rows) => { const { rows } = await cursor.read(100)
cursor.close(() => { console.log(rows.length) // 100 (unless the table has fewer than 100 rows)
client.release() client.release()
})
})
``` ```
```ts ```ts
@ -58,8 +55,7 @@ If the cursor has read to the end of the result sets all subsequent calls to cur
Here is an example of reading to the end of a cursor: Here is an example of reading to the end of a cursor:
```js ```js
import pg from 'pg' import { Pool } from 'pg'
const { Pool } = pg
import Cursor from 'pg-cursor' import Cursor from 'pg-cursor'
const pool = new Pool() const pool = new Pool()

View File

@ -29,9 +29,17 @@ type Config = {
idleTimeoutMillis?: number idleTimeoutMillis?: number
// maximum number of clients the pool should contain // maximum number of clients the pool should contain
// by default this is set to 10. // by default this is set to 10. There is some nuance to setting the maximum size of your pool.
// see https://node-postgres.com/guides/pool-sizing for more information
max?: number max?: number
// minimum number of clients the pool should hold on to and _not_ destroy with the idleTimeoutMillis
// this can be useful if you get very bursty traffic and want to keep a few clients around.
// note: current the pool will not automatically create and connect new clients up to the min, it will
// only not evict and close clients except those which exceed the min count.
// the default is 0 which disables this behavior.
min?: number
// Default behavior is the pool will keep clients open & connected to the backend // Default behavior is the pool will keep clients open & connected to the backend
// until idleTimeoutMillis expire for each client and node will maintain a ref // until idleTimeoutMillis expire for each client and node will maintain a ref
// to the socket on the client, keeping the event loop alive until all clients are closed // to the socket on the client, keeping the event loop alive until all clients are closed
@ -42,14 +50,19 @@ type Config = {
// to the postgres server. This can be handy in scripts & tests // to the postgres server. This can be handy in scripts & tests
// where you don't want to wait for your clients to go idle before your process exits. // where you don't want to wait for your clients to go idle before your process exits.
allowExitOnIdle?: boolean allowExitOnIdle?: boolean
// Sets a max overall life for the connection.
// A value of 60 would evict connections that have been around for over 60 seconds,
// regardless of whether they are idle. It's useful to force rotation of connection pools through
// middleware so that you can rotate the underlying servers. The default is disabled (value of zero)
maxLifetimeSeconds?: number
} }
``` ```
example to create a new pool with configuration: example to create a new pool with configuration:
```js ```js
import pg from 'pg' import { Pool } from 'pg'
const { Pool } = pg
const pool = new Pool({ const pool = new Pool({
host: 'localhost', host: 'localhost',
@ -57,6 +70,7 @@ const pool = new Pool({
max: 20, max: 20,
idleTimeoutMillis: 30000, idleTimeoutMillis: 30000,
connectionTimeoutMillis: 2000, connectionTimeoutMillis: 2000,
maxLifetimeSeconds: 60
}) })
``` ```
@ -69,8 +83,7 @@ pool.query(text: string, values?: any[]) => Promise<pg.Result>
``` ```
```js ```js
import pg from 'pg' import { Pool } from 'pg'
const { Pool } = pg
const pool = new Pool() const pool = new Pool()
@ -78,7 +91,7 @@ const result = await pool.query('SELECT $1::text as name', ['brianc'])
console.log(result.rows[0].name) // brianc console.log(result.rows[0].name) // brianc
``` ```
Notice in the example above there is no need to check out or release a client. The pool is doing the acquiring and releasing internally. I find `pool.query` to be a handy shortcut many situations and use it exclusively unless I need a transaction. Notice in the example above there is no need to check out or release a client. The pool is doing the acquiring and releasing internally. I find `pool.query` to be a handy shortcut in many situations and I use it exclusively unless I need a transaction.
<Alert> <Alert>
<div> <div>
@ -99,11 +112,10 @@ Acquires a client from the pool.
- If there are idle clients in the pool one will be returned to the callback on `process.nextTick`. - If there are idle clients in the pool one will be returned to the callback on `process.nextTick`.
- If the pool is not full but all current clients are checked out a new client will be created & returned to this callback. - If the pool is not full but all current clients are checked out a new client will be created & returned to this callback.
- If the pool is 'full' and all clients are currently checked out will wait in a FIFO queue until a client becomes available by it being released back to the pool. - If the pool is 'full' and all clients are currently checked out, requests will wait in a FIFO queue until a client becomes available by being released back to the pool.
```js ```js
import pg from 'pg' import { Pool } from 'pg'
const { Pool } = pg
const pool = new Pool() const pool = new Pool()
@ -121,8 +133,7 @@ Client instances returned from `pool.connect` will have a `release` method which
The `release` method on an acquired client returns it back to the pool. If you pass a truthy value in the `destroy` parameter, instead of releasing the client to the pool, the pool will be instructed to disconnect and destroy this client, leaving a space within itself for a new client. The `release` method on an acquired client returns it back to the pool. If you pass a truthy value in the `destroy` parameter, instead of releasing the client to the pool, the pool will be instructed to disconnect and destroy this client, leaving a space within itself for a new client.
```js ```js
import pg from 'pg' import { Pool } from 'pg'
const { Pool } = pg
const pool = new Pool() const pool = new Pool()
@ -134,8 +145,7 @@ client.release()
``` ```
```js ```js
import pg from 'pg' import { Pool } from 'pg'
const { Pool } = pg
const pool = new Pool() const pool = new Pool()
assert(pool.totalCount === 0) assert(pool.totalCount === 0)
@ -168,8 +178,7 @@ Calling `pool.end` will drain the pool of all active clients, disconnect them, a
```js ```js
// again both promises and callbacks are supported: // again both promises and callbacks are supported:
import pg from 'pg' import { Pool } from 'pg'
const { Pool } = pg
const pool = new Pool() const pool = new Pool()

View File

@ -9,7 +9,7 @@ import { Alert } from '/components/alert.tsx'
Escapes a string as a [SQL identifier](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS). Escapes a string as a [SQL identifier](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS).
```js ```js
const { escapeIdentifier } = require('pg') import { escapeIdentifier } from 'pg';
const escapedIdentifier = escapeIdentifier('FooIdentifier') const escapedIdentifier = escapeIdentifier('FooIdentifier')
console.log(escapedIdentifier) // '"FooIdentifier"' console.log(escapedIdentifier) // '"FooIdentifier"'
``` ```
@ -27,7 +27,7 @@ console.log(escapedIdentifier) // '"FooIdentifier"'
Escapes a string as a [SQL literal](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-CONSTANTS). Escapes a string as a [SQL literal](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-CONSTANTS).
```js ```js
const { escapeLiteral } = require('pg') import { escapeLiteral } from 'pg';
const escapedLiteral = escapeLiteral("hello 'world'") const escapedLiteral = escapeLiteral("hello 'world'")
console.log(escapedLiteral) // "'hello ''world'''" console.log(escapedLiteral) // "'hello ''world'''"
``` ```

View File

@ -5,5 +5,7 @@
"transactions": "Transactions", "transactions": "Transactions",
"types": "Data Types", "types": "Data Types",
"ssl": "SSL", "ssl": "SSL",
"native": "Native" "native": "Native",
"esm": "ESM",
"callbacks": "Callbacks"
} }

View File

@ -0,0 +1,39 @@
---
title: Callbacks
---
## Callback Support
`async` / `await` is the preferred way to write async code these days with node, but callbacks are supported in the `pg` module and the `pg-pool` module. To use them, pass a callback function as the last argument to the following methods & it will be called and a promise will not be returned:
```js
const { Pool, Client } = require('pg')
// pool
const pool = new Pool()
// run a query on an available client
pool.query('SELECT NOW()', (err, res) => {
console.log(err, res)
})
// check out a client to do something more complex like a transaction
pool.connect((err, client, release) => {
client.query('SELECT NOW()', (err, res) => {
release()
console.log(err, res)
pool.end()
})
})
// single client
const client = new Client()
client.connect((err) => {
if (err) throw err
client.query('SELECT NOW()', (err, res) => {
console.log(err, res)
client.end()
})
})
```

View File

@ -101,9 +101,9 @@ const signerOptions = {
username: 'api-user', username: 'api-user',
} }
const signer = new RDS.Signer() const signer = new RDS.Signer(signerOptions)
const getPassword = () => signer.getAuthToken(signerOptions) const getPassword = () => signer.getAuthToken()
const pool = new Pool({ const pool = new Pool({
user: signerOptions.username, user: signerOptions.username,

View File

@ -0,0 +1,37 @@
---
title: ESM
---
## ESM Support
As of v8.15.x node-postgres supporters the __ECMAScript Module__ (ESM) format. This means you can use `import` statements instead of `require` or `import pg from 'pg'`.
CommonJS modules are still supported. The ESM format is an opt-in feature and will not affect existing codebases that use CommonJS.
The docs have been changed to show ESM usage, but in a CommonJS context you can still use the same code, you just need to change the import format.
If you're using CommonJS, you can use the following code to import the `pg` module:
```js
const pg = require('pg')
const { Client } = pg
// etc...
```
### ESM Usage
If you're using ESM, you can use the following code to import the `pg` module:
```js
import { Client } from 'pg'
// etc...
```
Previously if you were using ESM you would have to use the following code:
```js
import pg from 'pg'
const { Client } = pg
// etc...
```

View File

@ -22,8 +22,7 @@ const config = {
}, },
} }
import pg from 'pg' import { Client, Pool } from 'pg'
const { Client, Pool } = pg
const client = new Client(config) const client = new Client(config)
await client.connect() await client.connect()
@ -51,3 +50,17 @@ const config = {
}, },
} }
``` ```
## Channel binding
If the PostgreSQL server offers SCRAM-SHA-256-PLUS (i.e. channel binding) for TLS/SSL connections, you can enable this as follows:
```js
const client = new Client({ ...config, enableChannelBinding: true})
```
or
```js
const pool = new Pool({ ...config, enableChannelBinding: true})
```

View File

@ -16,8 +16,7 @@ To execute a transaction with node-postgres you simply execute `BEGIN / COMMIT /
## Examples ## Examples
```js ```js
import pg from 'pg' import { Pool } from 'pg'
const { Pool } = pg
const pool = new Pool() const pool = new Pool()
const client = await pool.connect() const client = await pool.connect()

View File

@ -1,5 +1,6 @@
{ {
"project-structure": "Suggested Code Structure", "project-structure": "Suggested Code Structure",
"async-express": "Express with Async/Await", "async-express": "Express with Async/Await",
"pool-sizing": "Pool Sizing",
"upgrading": "Upgrading" "upgrading": "Upgrading"
} }

View File

@ -22,8 +22,7 @@ That's the same structure I used in the [project structure](/guides/project-stru
My `db/index.js` file usually starts out like this: My `db/index.js` file usually starts out like this:
```js ```js
import pg from 'pg' import { Pool } from 'pg'
const { Pool } = pg
const pool = new Pool() const pool = new Pool()

View File

@ -0,0 +1,25 @@
---
title: Pool Sizing
---
If you're using a [pool](/apis/pool) in an application with multiple instances of your service running (common in most cloud/container environments currently), you'll need to think a bit about the `max` parameter of your pool across all services and all _instances_ of all services which are connecting to your Postgres server.
This can get pretty complex depending on your cloud environment. Further nuance is introduced with things like pg-bouncer, RDS connection proxies, etc., which will do some forms of connection pooling and connection multiplexing. So, it's definitely worth thinking about. Let's run through a few setups. While certainly not exhaustive, these examples hopefully prompt you into thinking about what's right for your setup.
## Simple apps, dev mode, fixed instance counts, etc.
If your app isn't running in a k8s style env with containers scaling automatically or lambdas or cloud functions etc., you can do some "napkin math" for the `max` pool config you can use. Let's assume your Postgres instance is configured to have a maximum of 200 connections at any one time. You know your service is going to run on 4 instances. You can set the `max` pool size to 50, but if all your services are saturated waiting on database connections, you won't be able to connect to the database from any mgmt tools or scale up your services without changing config/code to adjust the max size.
In this situation, I'd probably set the `max` to 20 or 25. This lets you have plenty of headroom for scaling more instances and realistically, if your app is starved for db connections, you probably want to take a look at your queries and make them execute faster, or cache, or something else to reduce the load on the database. I worked on a more reporting-heavy application with limited users, but each running 5-6 queries at a time which all took 100-200 milliseconds to run. In that situation, I upped the `max` to 50. Typically, though, I don't bother setting it to anything other than the default of `10` as that's usually _fine_.
## Auto-scaling, cloud-functions, multi-tenancy, etc.
If the number of instances of your services which connect to your database is more dynamic and based on things like load, auto-scaling containers, or running in cloud-functions, you need to be a bit more thoughtful about what your max might be. Often in these environments, there will be another database pooling proxy in front of the database like pg-bouncer or the RDS-proxy, etc. I'm not sure how all these function exactly, and they all have some trade-offs, but let's assume you're not using a proxy. Then I'd be pretty cautious about how large you set any individual pool. If you're running an application under pretty serious load where you need dynamic scaling or lots of lambdas spinning up and sending queries, your queries are likely fast and you should be fine setting the `max` to a low value like 10 -- or just leave it alone, since `10` is the default.
## pg-bouncer, RDS-proxy, etc.
I'm not sure of all the pooling services for Postgres. I haven't used any myself. Throughout the years of working on `pg`, I've addressed issues caused by various proxies behaving differently than an actual Postgres backend. There are also gotchas with things like transactions. On the other hand, plenty of people run these with much success. In this situation, I would just recommend using some small but reasonable `max` value like the default value of `10` as it can still be helpful to keep a few TCP sockets from your services to the Postgres proxy open.
## Conclusion, tl;dr
It's a bit of a complicated topic and doesn't have much impact on things until you need to start scaling. At that point, your number of connections _still_ probably won't be your scaling bottleneck. It's worth thinking about a bit, but mostly I'd just leave the pool size to the default of `10` until you run into troubles: hopefully you never do!

View File

@ -27,13 +27,12 @@ The location doesn't really matter - I've found it usually ends up being somewha
Typically I'll start out my `db/index.js` file like so: Typically I'll start out my `db/index.js` file like so:
```js ```js
import pg from 'pg' import { Pool } from 'pg'
const { Pool } = pg
const pool = new Pool() const pool = new Pool()
export const query = (text, params, callback) => { export const query = (text, params) => {
return pool.query(text, params, callback) return pool.query(text, params)
} }
``` ```
@ -55,8 +54,7 @@ app.get('/:id', async (req, res, next) => {
Imagine we have lots of routes scattered throughout many files under our `routes/` directory. We now want to go back and log every single query that's executed, how long it took, and the number of rows it returned. If we had required node-postgres directly in every route file we'd have to go edit every single route - that would take forever & be really error prone! But thankfully we put our data access into `db/index.js`. Let's go add some logging: Imagine we have lots of routes scattered throughout many files under our `routes/` directory. We now want to go back and log every single query that's executed, how long it took, and the number of rows it returned. If we had required node-postgres directly in every route file we'd have to go edit every single route - that would take forever & be really error prone! But thankfully we put our data access into `db/index.js`. Let's go add some logging:
```js ```js
import pg from 'pg' import { Pool } from 'pg'
const { Pool } = pg
const pool = new Pool() const pool = new Pool()
@ -76,8 +74,7 @@ _note: I didn't log the query parameters. Depending on your application you migh
Now what if we need to check out a client from the pool to run several queries in a row in a transaction? We can add another method to our `db/index.js` file when we need to do this: Now what if we need to check out a client from the pool to run several queries in a row in a transaction? We can add another method to our `db/index.js` file when we need to do this:
```js ```js
import pg from 'pg' import { Pool } from 'pg'
const { Pool } = pg
const pool = new Pool() const pool = new Pool()

View File

@ -50,7 +50,7 @@ pg.end()
// new way, available since 6.0.0: // new way, available since 6.0.0:
// create a pool // create a pool
var pool = new pg.Pool() const pool = new pg.Pool()
// connection using created pool // connection using created pool
pool.connect(function (err, client, done) { pool.connect(function (err, client, done) {

View File

@ -3,6 +3,8 @@ title: Welcome
slug: / slug: /
--- ---
import { Logo } from '/components/logo.tsx'
node-postgres is a collection of node.js modules for interfacing with your PostgreSQL database. It has support for callbacks, promises, async/await, connection pooling, prepared statements, cursors, streaming results, C/C++ bindings, rich type parsing, and more! Just like PostgreSQL itself there are a lot of features: this documentation aims to get you up and running quickly and in the right direction. It also tries to provide guides for more advanced & edge-case topics allowing you to tap into the full power of PostgreSQL from node.js. node-postgres is a collection of node.js modules for interfacing with your PostgreSQL database. It has support for callbacks, promises, async/await, connection pooling, prepared statements, cursors, streaming results, C/C++ bindings, rich type parsing, and more! Just like PostgreSQL itself there are a lot of features: this documentation aims to get you up and running quickly and in the right direction. It also tries to provide guides for more advanced & edge-case topics allowing you to tap into the full power of PostgreSQL from node.js.
## Install ## Install
@ -15,19 +17,33 @@ $ npm install pg
node-postgres continued development and support is made possible by the many [supporters](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md). node-postgres continued development and support is made possible by the many [supporters](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md).
Special thanks to [Medplum](https://www.medplum.com/) for sponsoring node-postgres for a whole year!
<a href="https://www.medplum.com/">
<img
alt="Medplum"
src="https://raw.githubusercontent.com/medplum/medplum-logo/refs/heads/main/medplum-logo.png"
style={{
width: '300px',
height: 'auto',
margin: '0 auto',
display: 'block',
}}
/>
</a>
If you or your company would like to sponsor node-postgres stop by [GitHub Sponsors](https://github.com/sponsors/brianc) and sign up or feel free to [email me](mailto:brian@pecanware.com) if you want to add your logo to the documentation or discuss higher tiers of sponsorship! If you or your company would like to sponsor node-postgres stop by [GitHub Sponsors](https://github.com/sponsors/brianc) and sign up or feel free to [email me](mailto:brian@pecanware.com) if you want to add your logo to the documentation or discuss higher tiers of sponsorship!
# Version compatibility # Version compatibility
node-postgres strives to be compatible with all recent LTS versions of node & the most recent "stable" version. At the time of this writing node-postgres is compatible with node 8.x, 10.x, 12.x and 14.x To use node >= 14.x you will need to install `pg@8.2.x` or later due to some internal stream changes on the node 14 branch. Dropping support for an old node lts version will always be considered a breaking change in node-postgres and will be done on _major_ version number changes only, and we will try to keep support for 8.x for as long as reasonably possible. node-postgres strives to be compatible with all recent LTS versions of node & the most recent "stable" version. At the time of this writing node-postgres is compatible with node 18.x, 20.x, 22.x, and 24.x.
## Getting started ## Getting started
The simplest possible way to connect, query, and disconnect is with async/await: The simplest possible way to connect, query, and disconnect is with async/await:
```js ```js
import pg from 'pg' import { Client } from 'pg'
const { Client } = pg
const client = new Client() const client = new Client()
await client.connect() await client.connect()
@ -41,8 +57,7 @@ await client.end()
For the sake of simplicity, these docs will assume that the methods are successful. In real life use, make sure to properly handle errors thrown in the methods. A `try/catch` block is a great way to do so: For the sake of simplicity, these docs will assume that the methods are successful. In real life use, make sure to properly handle errors thrown in the methods. A `try/catch` block is a great way to do so:
```ts ```ts
import pg from 'pg' import { Client } from 'pg'
const { Client } = pg
const client = new Client() const client = new Client()
await client.connect() await client.connect()
@ -56,22 +71,17 @@ try {
} }
``` ```
### Callbacks ### Pooling
If you prefer a callback-style approach to asynchronous programming, all async methods support an optional callback parameter as well: In most applications you'll want to use a [connection pool](/features/pooling) to manage your connections. This is a more advanced topic, but here's a simple example of how to use it:
```js ```js
import pg from 'pg' import { Pool } from 'pg'
const { Client } = pg const pool = new Pool()
const client = new Client() const res = await pool.query('SELECT $1::text as message', ['Hello world!'])
console.log(res.rows[0].message) // Hello world!
client.connect((err) => {
client.query('SELECT $1::text as message', ['Hello world!'], (err, res) => {
console.log(err ? err.stack : res.rows[0].message) // Hello World!
client.end()
})
})
``` ```
Our real-world apps are almost always more complicated than that, and I urge you to read on! Our real-world apps are almost always more complicated than that, and I urge you to read on!

BIN
docs/public/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@ -10,7 +10,6 @@ export default {
docsRepositoryBase: 'https://github.com/brianc/node-postgres/blob/master/docs', // base URL for the docs repository docsRepositoryBase: 'https://github.com/brianc/node-postgres/blob/master/docs', // base URL for the docs repository
titleSuffix: ' node-postgres', titleSuffix: ' node-postgres',
darkMode: true, darkMode: true,
footer: true,
navigation: { navigation: {
prev: true, prev: true,
next: true, next: true,
@ -23,13 +22,43 @@ export default {
}, },
logo: ( logo: (
<> <>
<svg>...</svg> <svg
<span>node-postgres</span> version="1.0"
xmlns="http://www.w3.org/2000/svg"
height={48}
width={48}
viewBox="0 0 1024.000000 1024.000000"
preserveAspectRatio="xMidYMid meet"
>
<g transform="translate(0.000000,1024.000000) scale(0.100000,-0.100000)" fill="#3c873a" stroke="none">
<path
d="M4990 7316 c-391 -87 -703 -397 -1003 -996 -285 -568 -477 -1260
-503 -1811 l-7 -142 -112 7 c-103 5 -207 27 -382 78 -37 11 -44 10 -63 -7 -61
-55 17 -180 177 -285 91 -60 194 -103 327 -137 l104 -26 17 -71 c44 -183 152
-441 256 -613 125 -207 322 -424 493 -541 331 -229 774 -291 1113 -156 112 45
182 94 209 147 13 24 13 35 -1 90 -22 87 -88 219 -134 267 -46 49 -79 52 -153
14 -168 -85 -360 -54 -508 83 -170 157 -244 440 -195 743 50 304 231 601 430
706 168 89 332 60 463 -81 66 -71 110 -140 197 -315 83 -166 116 -194 203
-170 88 23 370 258 637 531 411 420 685 806 808 1139 54 145 71 243 71 410 1
128 -3 157 -27 243 -86 310 -243 543 -467 690 -207 137 -440 157 -966 85
l-161 -22 -94 41 c-201 87 -327 113 -533 112 -77 -1 -166 -7 -196 -13z m-89
-1357 c15 -10 34 -38 43 -61 23 -56 13 -111 -28 -156 -59 -64 -171 -54 -216
21 -35 57 -22 145 28 190 44 40 122 43 173 6z m-234 -1361 c-46 -74 -156 -188
-249 -258 -211 -159 -459 -219 -734 -179 l-76 12 89 28 c187 60 485 229 683
388 l75 60 122 0 122 1 -32 -52z"
/>
</g>
</svg>
<span style={{ fontWeight: 800 }}>node-postgres</span>
</> </>
), ),
chat: {
link: 'https://discord.gg/2afXp5vUWm',
},
head: ( head: (
<> <>
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="shortcut icon" href="/favicon.ico" />
<meta <meta
name="description" name="description"
content="node-postgres is a collection of node.js modules for interfacing with your PostgreSQL database." content="node-postgres is a collection of node.js modules for interfacing with your PostgreSQL database."

View File

@ -1,12 +1,12 @@
{ {
"packages": [ "packages": ["packages/*"],
"packages/*"
],
"npmClient": "yarn", "npmClient": "yarn",
"useWorkspaces": true, "useWorkspaces": true,
"version": "independent", "version": "independent",
"ignoreChanges": [ "command": {
"**/*.md", "version": {
"**/test/**" "allowBranch": "master"
] }
},
"ignoreChanges": ["**/*.md", "**/test/**"]
} }

View File

@ -10,7 +10,7 @@
"packages/*" "packages/*"
], ],
"scripts": { "scripts": {
"test": "yarn lerna exec yarn test", "test": "yarn lerna exec --concurrency 1 yarn test",
"build": "tsc --build", "build": "tsc --build",
"build:watch": "tsc --build --watch", "build:watch": "tsc --build --watch",
"docs:build": "cd docs && yarn build", "docs:build": "cd docs && yarn build",
@ -23,7 +23,7 @@
"@typescript-eslint/eslint-plugin": "^7.0.0", "@typescript-eslint/eslint-plugin": "^7.0.0",
"@typescript-eslint/parser": "^6.17.0", "@typescript-eslint/parser": "^6.17.0",
"eslint": "^8.56.0", "eslint": "^8.56.0",
"eslint-config-prettier": "^9.1.0", "eslint-config-prettier": "^10.1.2",
"eslint-plugin-node": "^11.1.0", "eslint-plugin-node": "^11.1.0",
"eslint-plugin-prettier": "^5.1.2", "eslint-plugin-prettier": "^5.1.2",
"lerna": "^3.19.0", "lerna": "^3.19.0",

View File

@ -0,0 +1,8 @@
import * as esbuild from 'esbuild'
await esbuild.build({
entryPoints: ['./src/index.mjs'],
bundle: true,
outfile: './dist/esbuild-cloudflare.js',
conditions: ['import', 'workerd'],
})

View File

@ -0,0 +1,7 @@
import * as esbuild from 'esbuild'
await esbuild.build({
entryPoints: ['./src/index.mjs'],
bundle: true,
outfile: './dist/esbuild-empty.js',
})

View File

@ -0,0 +1,25 @@
{
"name": "pg-bundler-test",
"version": "0.0.2",
"description": "Test bundlers with pg-cloudflare, https://github.com/brianc/node-postgres/issues/3452",
"license": "MIT",
"private": true,
"type": "module",
"devDependencies": {
"@rollup/plugin-commonjs": "^28.0.3",
"@rollup/plugin-node-resolve": "^16.0.1",
"esbuild": "^0.25.5",
"pg-cloudflare": "^1.2.7",
"rollup": "^4.41.1",
"vite": "^6.3.5",
"webpack": "^5.99.9",
"webpack-cli": "^6.0.1"
},
"scripts": {
"test": "yarn webpack && yarn rollup && yarn vite && yarn esbuild",
"webpack": "webpack --config webpack-empty.config.mjs && webpack --config webpack-cloudflare.config.mjs",
"rollup": "rollup --config rollup-empty.config.mjs --failAfterWarnings && rollup --config rollup-cloudflare.config.mjs --failAfterWarnings",
"vite": "[ $(node --version | sed 's/v//' | cut -d'.' -f1) -ge 18 ] && vite build --config vite-empty.config.mjs && vite build --config vite-cloudflare.config.mjs || echo 'Skip Vite test'",
"esbuild": "node esbuild-empty.config.mjs && node esbuild-cloudflare.config.mjs"
}
}

View File

@ -0,0 +1,13 @@
import { defineConfig } from 'rollup'
import { nodeResolve } from '@rollup/plugin-node-resolve'
import commonjs from '@rollup/plugin-commonjs'
export default defineConfig({
input: './src/index.mjs',
output: {
file: 'dist/rollup-cloudflare.js',
format: 'es',
},
plugins: [nodeResolve({ exportConditions: ['import', 'workerd'], preferBuiltins: true }), commonjs()],
external: ['cloudflare:sockets'],
})

View File

@ -0,0 +1,12 @@
import { defineConfig } from 'rollup'
import { nodeResolve } from '@rollup/plugin-node-resolve'
import commonjs from '@rollup/plugin-commonjs'
export default defineConfig({
input: './src/index.mjs',
output: {
file: 'dist/rollup-empty.js',
format: 'es',
},
plugins: [nodeResolve(), commonjs()],
})

View File

@ -0,0 +1 @@
import 'pg-cloudflare'

View File

@ -0,0 +1,20 @@
import { defineConfig } from 'vite'
import commonjs from '@rollup/plugin-commonjs'
export default defineConfig({
build: {
emptyOutDir: false,
lib: {
entry: './src/index.mjs',
fileName: 'vite-cloudflare',
formats: ['es'],
},
rollupOptions: {
external: ['cloudflare:sockets'],
},
},
resolve: {
conditions: ['import', 'workerd'],
},
plugins: [commonjs()],
})

View File

@ -0,0 +1,12 @@
import { defineConfig } from 'vite'
export default defineConfig({
build: {
emptyOutDir: false,
lib: {
entry: './src/index.mjs',
fileName: 'vite-empty',
formats: ['es'],
},
},
})

View File

@ -0,0 +1,16 @@
import webpack from 'webpack'
export default {
mode: 'production',
entry: './src/index.mjs',
output: {
filename: 'webpack-cloudflare.js',
},
resolve: { conditionNames: ['import', 'workerd'] },
plugins: [
// ignore cloudflare:sockets imports
new webpack.IgnorePlugin({
resourceRegExp: /^cloudflare:sockets$/,
}),
],
}

View File

@ -0,0 +1,7 @@
export default {
mode: 'production',
entry: './src/index.mjs',
output: {
filename: 'webpack-empty.js',
},
}

View File

@ -10,6 +10,64 @@
npm i --save-dev pg-cloudflare npm i --save-dev pg-cloudflare
``` ```
The package uses conditional exports to support bundlers that don't know about
`cloudflare:sockets`, so the consumer code by default imports an empty file. To
enable the package, resolve to the `cloudflare` condition in your bundler's
config. For example:
- `webpack.config.js`
```js
export default {
...,
resolve: { conditionNames: [..., "workerd"] },
plugins: [
// ignore cloudflare:sockets imports
new webpack.IgnorePlugin({
resourceRegExp: /^cloudflare:sockets$/,
}),
],
}
```
- `vite.config.js`
> [!NOTE]
> If you are using the [Cloudflare Vite plugin](https://www.npmjs.com/package/@cloudflare/vite-plugin) then the following configuration is not necessary.
```js
export default defineConfig({
...,
resolve: {
conditions: [..., "workerd"],
},
build: {
...,
// don't try to bundle cloudflare:sockets
rollupOptions: {
external: [..., 'cloudflare:sockets'],
},
},
})
```
- `rollup.config.js`
```js
export default defineConfig({
...,
plugins: [..., nodeResolve({ exportConditions: [..., 'workerd'] })],
// don't try to bundle cloudflare:sockets
external: [..., 'cloudflare:sockets'],
})
```
- `esbuild.config.js`
```js
await esbuild.build({
...,
conditions: [..., 'workerd'],
})
```
The concrete examples can be found in `packages/pg-bundler-test`.
## How to use conditionally, in non-Node.js environments ## How to use conditionally, in non-Node.js environments
As implemented in `pg` [here](https://github.com/brianc/node-postgres/commit/07553428e9c0eacf761a5d4541a3300ff7859578#diff-34588ad868ebcb232660aba7ee6a99d1e02f4bc93f73497d2688c3f074e60533R5-R13), a typical use case might look as follows, where in a Node.js environment the `net` module is used, while in a non-Node.js environment, where `net` is unavailable, `pg-cloudflare` is used instead, providing an equivalent interface: As implemented in `pg` [here](https://github.com/brianc/node-postgres/commit/07553428e9c0eacf761a5d4541a3300ff7859578#diff-34588ad868ebcb232660aba7ee6a99d1e02f4bc93f73497d2688c3f074e60533R5-R13), a typical use case might look as follows, where in a Node.js environment the `net` module is used, while in a non-Node.js environment, where `net` is unavailable, `pg-cloudflare` is used instead, providing an equivalent interface:
@ -21,14 +79,13 @@ module.exports.getStream = function getStream(ssl = false) {
return net.Socket() return net.Socket()
} }
const { CloudflareSocket } = require('pg-cloudflare') const { CloudflareSocket } = require('pg-cloudflare')
return new CloudflareSocket(ssl); return new CloudflareSocket(ssl)
} }
``` ```
## Node.js implementation of the Socket API proposal ## Node.js implementation of the Socket API proposal
If you're looking for a way to rely on `connect()` as the interface you use to interact with raw sockets, but need this interface to be availble in a Node.js environment, [`@arrowood.dev/socket`](https://github.com/Ethan-Arrowood/socket) provides a Node.js implementation of the Socket API. If you're looking for a way to rely on `connect()` as the interface you use to interact with raw sockets, but need this interface to be available in a Node.js environment, [`@arrowood.dev/socket`](https://github.com/Ethan-Arrowood/socket) provides a Node.js implementation of the Socket API.
### license ### license

View File

@ -0,0 +1,3 @@
import cf from '../dist/index.js'
export const CloudflareSocket = cf.CloudflareSocket

View File

@ -1,8 +1,8 @@
{ {
"name": "pg-cloudflare", "name": "pg-cloudflare",
"version": "1.1.1", "version": "1.2.7",
"description": "A socket implementation that can run on Cloudflare Workers using native TCP connections.", "description": "A socket implementation that can run on Cloudflare Workers using native TCP connections.",
"main": "dist/empty.js", "main": "dist/index.js",
"types": "dist/index.d.ts", "types": "dist/index.d.ts",
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
@ -10,9 +10,15 @@
"typescript": "^4.0.3" "typescript": "^4.0.3"
}, },
"exports": { "exports": {
"workerd": "./dist/index.js", ".": {
"workerd": {
"import": "./esm/index.mjs",
"require": "./dist/index.js"
},
"default": "./dist/empty.js" "default": "./dist/empty.js"
}, },
"./package.json": "./package.json"
},
"scripts": { "scripts": {
"build": "tsc", "build": "tsc",
"build:watch": "tsc --watch", "build:watch": "tsc --watch",
@ -26,6 +32,7 @@
}, },
"files": [ "files": [
"/dist/*{js,ts,map}", "/dist/*{js,ts,map}",
"/src" "/src",
"/esm"
] ]
} }

View File

@ -37,7 +37,8 @@ export class CloudflareSocket extends EventEmitter {
if (connectListener) this.once('connect', connectListener) if (connectListener) this.once('connect', connectListener)
const options: SocketOptions = this.ssl ? { secureTransport: 'starttls' } : {} const options: SocketOptions = this.ssl ? { secureTransport: 'starttls' } : {}
const { connect } = await import('cloudflare:sockets') const mod = await import('cloudflare:sockets')
const connect = mod.connect
this._cfSocket = connect(`${host}:${port}`, options) this._cfSocket = connect(`${host}:${port}`, options)
this._cfWriter = this._cfSocket.writable.getWriter() this._cfWriter = this._cfSocket.writable.getWriter()
this._addClosedHandler() this._addClosedHandler()
@ -61,6 +62,7 @@ export class CloudflareSocket extends EventEmitter {
} }
async _listen() { async _listen() {
// eslint-disable-next-line no-constant-condition
while (true) { while (true) {
log('awaiting receive from CF socket') log('awaiting receive from CF socket')
const { done, value } = await this._cfReader!.read() const { done, value } = await this._cfReader!.read()

View File

@ -1,12 +1,12 @@
{ {
"compilerOptions": { "compilerOptions": {
"module": "ES2020", "module": "node16",
"esModuleInterop": true, "esModuleInterop": true,
"allowSyntheticDefaultImports": true, "allowSyntheticDefaultImports": true,
"strict": true, "strict": true,
"target": "ES2020", "target": "es2020",
"noImplicitAny": true, "noImplicitAny": true,
"moduleResolution": "node", "moduleResolution": "node16",
"sourceMap": true, "sourceMap": true,
"outDir": "dist", "outDir": "dist",
"incremental": true, "incremental": true,

View File

@ -12,6 +12,7 @@ lib-cov
# Coverage directory used by tools like istanbul # Coverage directory used by tools like istanbul
coverage coverage
.nyc_output
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt .grunt
@ -24,3 +25,6 @@ build/Release
# see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git # see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git
node_modules node_modules
package-lock.json package-lock.json
# TypeScript output directory
dist

View File

@ -0,0 +1,4 @@
{
"extension": ["js", "ts"],
"require": "tsx"
}

View File

@ -3,9 +3,6 @@ pg-connection-string
[![NPM](https://nodei.co/npm/pg-connection-string.png?compact=true)](https://nodei.co/npm/pg-connection-string/) [![NPM](https://nodei.co/npm/pg-connection-string.png?compact=true)](https://nodei.co/npm/pg-connection-string/)
[![Build Status](https://travis-ci.org/iceddev/pg-connection-string.svg?branch=master)](https://travis-ci.org/iceddev/pg-connection-string)
[![Coverage Status](https://coveralls.io/repos/github/iceddev/pg-connection-string/badge.svg?branch=master)](https://coveralls.io/github/iceddev/pg-connection-string?branch=master)
Functions for dealing with a PostgresSQL connection string Functions for dealing with a PostgresSQL connection string
`parse` method taken from [node-postgres](https://github.com/brianc/node-postgres.git) `parse` method taken from [node-postgres](https://github.com/brianc/node-postgres.git)
@ -15,9 +12,9 @@ MIT License
## Usage ## Usage
```js ```js
var parse = require('pg-connection-string').parse; const parse = require('pg-connection-string').parse;
var config = parse('postgres://someuser:somepassword@somehost:381/somedatabase') const config = parse('postgres://someuser:somepassword@somehost:381/somedatabase')
``` ```
The resulting config contains a subset of the following properties: The resulting config contains a subset of the following properties:
@ -35,6 +32,27 @@ The resulting config contains a subset of the following properties:
* `ca` * `ca`
* any other query parameters (for example, `application_name`) are preserved intact. * any other query parameters (for example, `application_name`) are preserved intact.
### ClientConfig Compatibility for TypeScript
The pg-connection-string `ConnectionOptions` interface is not compatible with the `ClientConfig` interface that [pg.Client](https://node-postgres.com/apis/client) expects. To remedy this, use the `parseIntoClientConfig` function instead of `parse`:
```ts
import { ClientConfig } from 'pg';
import { parseIntoClientConfig } from 'pg-connection-string';
const config: ClientConfig = parseIntoClientConfig('postgres://someuser:somepassword@somehost:381/somedatabase')
```
You can also use `toClientConfig` to convert an existing `ConnectionOptions` interface into a `ClientConfig` interface:
```ts
import { ClientConfig } from 'pg';
import { parse, toClientConfig } from 'pg-connection-string';
const config = parse('postgres://someuser:somepassword@somehost:381/somedatabase')
const clientConfig: ClientConfig = toClientConfig(config)
```
## Connection Strings ## Connection Strings
The short summary of acceptable URLs is: The short summary of acceptable URLs is:
@ -66,12 +84,22 @@ Query parameters follow a `?` character, including the following special query p
* `host=<host>` - sets `host` property, overriding the URL's host * `host=<host>` - sets `host` property, overriding the URL's host
* `encoding=<encoding>` - sets the `client_encoding` property * `encoding=<encoding>` - sets the `client_encoding` property
* `ssl=1`, `ssl=true`, `ssl=0`, `ssl=false` - sets `ssl` to true or false, accordingly * `ssl=1`, `ssl=true`, `ssl=0`, `ssl=false` - sets `ssl` to true or false, accordingly
* `sslmode=<sslmode>` * `uselibpqcompat=true` - use libpq semantics
* `sslmode=<sslmode>` when `uselibpqcompat=true` is not set
* `sslmode=disable` - sets `ssl` to false * `sslmode=disable` - sets `ssl` to false
* `sslmode=no-verify` - sets `ssl` to `{ rejectUnauthorized: false }` * `sslmode=no-verify` - sets `ssl` to `{ rejectUnauthorized: false }`
* `sslmode=prefer`, `sslmode=require`, `sslmode=verify-ca`, `sslmode=verify-full` - sets `ssl` to true * `sslmode=prefer`, `sslmode=require`, `sslmode=verify-ca`, `sslmode=verify-full` - sets `ssl` to true
* `sslmode=<sslmode>` when `uselibpqcompat=true`
* `sslmode=disable` - sets `ssl` to false
* `sslmode=prefer` - sets `ssl` to `{ rejectUnauthorized: false }`
* `sslmode=require` - sets `ssl` to `{ rejectUnauthorized: false }` unless `sslrootcert` is specified, in which case it behaves like `verify-ca`
* `sslmode=verify-ca` - sets `ssl` to `{ checkServerIdentity: no-op }` (verify CA, but not server identity). This verifies the presented certificate against the effective CA specified in sslrootcert.
* `sslmode=verify-full` - sets `ssl` to `{}` (verify CA and server identity)
* `sslcert=<filename>` - reads data from the given file and includes the result as `ssl.cert` * `sslcert=<filename>` - reads data from the given file and includes the result as `ssl.cert`
* `sslkey=<filename>` - reads data from the given file and includes the result as `ssl.key` * `sslkey=<filename>` - reads data from the given file and includes the result as `ssl.key`
* `sslrootcert=<filename>` - reads data from the given file and includes the result as `ssl.ca` * `sslrootcert=<filename>` - reads data from the given file and includes the result as `ssl.ca`
A bare relative URL, such as `salesdata`, will indicate a database name while leaving other properties empty. A bare relative URL, such as `salesdata`, will indicate a database name while leaving other properties empty.
> [!CAUTION]
> Choosing an sslmode other than verify-full has serious security implications. Please read https://www.postgresql.org/docs/current/libpq-ssl.html#LIBPQ-SSL-SSLMODE-STATEMENTS to understand the trade-offs.

View File

@ -0,0 +1,8 @@
// ESM wrapper for pg-connection-string
import connectionString from '../index.js'
// Re-export the parse function
export default connectionString.parse
export const parse = connectionString.parse
export const toClientConfig = connectionString.toClientConfig
export const parseIntoClientConfig = connectionString.parseIntoClientConfig

View File

@ -1,4 +1,18 @@
export function parse(connectionString: string): ConnectionOptions import { ClientConfig } from 'pg'
export function parse(connectionString: string, options?: Options): ConnectionOptions
export interface Options {
// Use libpq semantics when interpreting the connection string
useLibpqCompat?: boolean
}
interface SSLConfig {
ca?: string
cert?: string | null
key?: string
rejectUnauthorized?: boolean
}
export interface ConnectionOptions { export interface ConnectionOptions {
host: string | null host: string | null
@ -7,9 +21,16 @@ export interface ConnectionOptions {
port?: string | null port?: string | null
database: string | null | undefined database: string | null | undefined
client_encoding?: string client_encoding?: string
ssl?: boolean | string ssl?: boolean | string | SSLConfig
application_name?: string application_name?: string
fallback_application_name?: string fallback_application_name?: string
options?: string options?: string
keepalives?: number
// We allow any other options to be passed through
[key: string]: unknown
} }
export function toClientConfig(config: ConnectionOptions): ClientConfig
export function parseIntoClientConfig(connectionString: string): ClientConfig

View File

@ -1,11 +1,13 @@
'use strict' 'use strict'
const { emitWarning } = require('node:process')
//Parse method copied from https://github.com/brianc/node-postgres //Parse method copied from https://github.com/brianc/node-postgres
//Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com) //Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com)
//MIT License //MIT License
//parses a connection string //parses a connection string
function parse(str) { function parse(str, options = {}) {
//unix socket //unix socket
if (str.charAt(0) === '/') { if (str.charAt(0) === '/') {
const config = str.split(' ') const config = str.split(' ')
@ -19,9 +21,10 @@ function parse(str) {
let dummyHost = false let dummyHost = false
if (/ |%[^a-f0-9]|%[a-f0-9][^a-f0-9]/i.test(str)) { if (/ |%[^a-f0-9]|%[a-f0-9][^a-f0-9]/i.test(str)) {
// Ensure spaces are encoded as %20 // Ensure spaces are encoded as %20
str = encodeURI(str).replace(/\%25(\d\d)/g, '%$1') str = encodeURI(str).replace(/%25(\d\d)/g, '%$1')
} }
try {
try { try {
result = new URL(str, 'postgres://base') result = new URL(str, 'postgres://base')
} catch (e) { } catch (e) {
@ -29,6 +32,11 @@ function parse(str) {
result = new URL(str.replace('@/', '@___DUMMY___/'), 'postgres://base') result = new URL(str.replace('@/', '@___DUMMY___/'), 'postgres://base')
dummyHost = true dummyHost = true
} }
} catch (err) {
// Remove the input from the error message to avoid leaking sensitive information
err.input && (err.input = '*****REDACTED*****')
throw err
}
// We'd like to use Object.fromEntries() here but Node.js 10 does not support it // We'd like to use Object.fromEntries() here but Node.js 10 does not support it
for (const entry of result.searchParams.entries()) { for (const entry of result.searchParams.entries()) {
@ -87,6 +95,43 @@ function parse(str) {
config.ssl.ca = fs.readFileSync(config.sslrootcert).toString() config.ssl.ca = fs.readFileSync(config.sslrootcert).toString()
} }
if (options.useLibpqCompat && config.uselibpqcompat) {
throw new Error('Both useLibpqCompat and uselibpqcompat are set. Please use only one of them.')
}
if (config.uselibpqcompat === 'true' || options.useLibpqCompat) {
switch (config.sslmode) {
case 'disable': {
config.ssl = false
break
}
case 'prefer': {
config.ssl.rejectUnauthorized = false
break
}
case 'require': {
if (config.sslrootcert) {
// If a root CA is specified, behavior of `sslmode=require` will be the same as that of `verify-ca`
config.ssl.checkServerIdentity = function () {}
} else {
config.ssl.rejectUnauthorized = false
}
break
}
case 'verify-ca': {
if (!config.ssl.ca) {
throw new Error(
'SECURITY WARNING: Using sslmode=verify-ca requires specifying a CA with sslrootcert. If a public CA is used, verify-ca allows connections to a server that somebody else may have registered with the CA, making you vulnerable to Man-in-the-Middle attacks. Either specify a custom CA certificate with sslrootcert parameter or use sslmode=verify-full for proper security.'
)
}
config.ssl.checkServerIdentity = function () {}
break
}
case 'verify-full': {
break
}
}
} else {
switch (config.sslmode) { switch (config.sslmode) {
case 'disable': { case 'disable': {
config.ssl = false config.ssl = false
@ -96,6 +141,9 @@ function parse(str) {
case 'require': case 'require':
case 'verify-ca': case 'verify-ca':
case 'verify-full': { case 'verify-full': {
if (config.sslmode !== 'verify-full') {
deprecatedSslModeWarning(config.sslmode)
}
break break
} }
case 'no-verify': { case 'no-verify': {
@ -103,10 +151,83 @@ function parse(str) {
break break
} }
} }
}
return config return config
} }
// convert pg-connection-string ssl config to a ClientConfig.ConnectionOptions
function toConnectionOptions(sslConfig) {
const connectionOptions = Object.entries(sslConfig).reduce((c, [key, value]) => {
// we explicitly check for undefined and null instead of `if (value)` because some
// options accept falsy values. Example: `ssl.rejectUnauthorized = false`
if (value !== undefined && value !== null) {
c[key] = value
}
return c
}, {})
return connectionOptions
}
// convert pg-connection-string config to a ClientConfig
function toClientConfig(config) {
const poolConfig = Object.entries(config).reduce((c, [key, value]) => {
if (key === 'ssl') {
const sslConfig = value
if (typeof sslConfig === 'boolean') {
c[key] = sslConfig
}
if (typeof sslConfig === 'object') {
c[key] = toConnectionOptions(sslConfig)
}
} else if (value !== undefined && value !== null) {
if (key === 'port') {
// when port is not specified, it is converted into an empty string
// we want to avoid NaN or empty string as a values in ClientConfig
if (value !== '') {
const v = parseInt(value, 10)
if (isNaN(v)) {
throw new Error(`Invalid ${key}: ${value}`)
}
c[key] = v
}
} else {
c[key] = value
}
}
return c
}, {})
return poolConfig
}
// parses a connection string into ClientConfig
function parseIntoClientConfig(str) {
return toClientConfig(parse(str))
}
function deprecatedSslModeWarning(sslmode) {
if (!deprecatedSslModeWarning.warned) {
deprecatedSslModeWarning.warned = true
emitWarning(`SECURITY WARNING: The SSL modes 'prefer', 'require', and 'verify-ca' are treated as aliases for 'verify-full'.
In the next major version (pg-connection-string v3.0.0 and pg v9.0.0), these modes will adopt standard libpq semantics, which have weaker security guarantees.
To prepare for this change:
- If you want the current behavior, explicitly use 'sslmode=verify-full'
- If you want libpq compatibility now, use 'uselibpqcompat=true&sslmode=${sslmode}'
See https://www.postgresql.org/docs/current/libpq-ssl.html for libpq SSL mode definitions.`)
}
}
module.exports = parse module.exports = parse
parse.parse = parse parse.parse = parse
parse.toClientConfig = toClientConfig
parse.parseIntoClientConfig = parseIntoClientConfig

View File

@ -1,13 +1,20 @@
{ {
"name": "pg-connection-string", "name": "pg-connection-string",
"version": "2.7.0", "version": "2.9.1",
"description": "Functions for dealing with a PostgresSQL connection string", "description": "Functions for dealing with a PostgresSQL connection string",
"main": "./index.js", "main": "./index.js",
"types": "./index.d.ts", "types": "./index.d.ts",
"exports": {
".": {
"types": "./index.d.ts",
"import": "./esm/index.mjs",
"require": "./index.js",
"default": "./index.js"
}
},
"scripts": { "scripts": {
"test": "istanbul cover _mocha && npm run check-coverage", "test": "nyc --reporter=lcov mocha && npm run check-coverage",
"check-coverage": "istanbul check-coverage --statements 100 --branches 100 --lines 100 --functions 100", "check-coverage": "nyc check-coverage --statements 100 --branches 100 --lines 100 --functions 100"
"coveralls": "cat ./coverage/lcov.info | ./node_modules/.bin/coveralls"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
@ -27,13 +34,18 @@
}, },
"homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string", "homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string",
"devDependencies": { "devDependencies": {
"@types/pg": "^8.12.0",
"chai": "^4.1.1", "chai": "^4.1.1",
"coveralls": "^3.0.4", "coveralls": "^3.0.4",
"istanbul": "^0.4.5", "istanbul": "^0.4.5",
"mocha": "^10.5.2" "mocha": "^10.5.2",
"nyc": "^15",
"tsx": "^4.19.4",
"typescript": "^4.0.3"
}, },
"files": [ "files": [
"index.js", "index.js",
"index.d.ts" "index.d.ts",
"esm"
] ]
} }

View File

@ -0,0 +1,125 @@
import chai from 'chai'
const expect = chai.expect
chai.should()
import { parse, toClientConfig, parseIntoClientConfig } from '../'
describe('toClientConfig', function () {
it('converts connection info', function () {
const config = parse('postgres://brian:pw@boom:381/lala')
const clientConfig = toClientConfig(config)
clientConfig.user?.should.equal('brian')
clientConfig.password?.should.equal('pw')
clientConfig.host?.should.equal('boom')
clientConfig.port?.should.equal(381)
clientConfig.database?.should.equal('lala')
})
it('converts query params', function () {
const config = parse(
'postgres:///?application_name=TheApp&fallback_application_name=TheAppFallback&client_encoding=utf8&options=-c geqo=off'
)
const clientConfig = toClientConfig(config)
clientConfig.application_name?.should.equal('TheApp')
clientConfig.fallback_application_name?.should.equal('TheAppFallback')
clientConfig.client_encoding?.should.equal('utf8')
clientConfig.options?.should.equal('-c geqo=off')
})
it('converts SSL boolean', function () {
const config = parse('pg:///?ssl=true')
const clientConfig = toClientConfig(config)
clientConfig.ssl?.should.equal(true)
})
it('converts sslmode=disable', function () {
const config = parse('pg:///?sslmode=disable')
const clientConfig = toClientConfig(config)
clientConfig.ssl?.should.equal(false)
})
it('converts sslmode=noverify', function () {
const config = parse('pg:///?sslmode=no-verify')
const clientConfig = toClientConfig(config)
clientConfig.ssl?.should.deep.equal({
rejectUnauthorized: false,
})
})
it('converts other sslmode options', function () {
const config = parse('pg:///?sslmode=verify-ca')
const clientConfig = toClientConfig(config)
clientConfig.ssl?.should.deep.equal({})
})
it('converts other sslmode options', function () {
const config = parse('pg:///?sslmode=verify-ca')
const clientConfig = toClientConfig(config)
clientConfig.ssl?.should.deep.equal({})
})
it('converts ssl cert options', function () {
const connectionString =
'pg:///?sslcert=' +
__dirname +
'/example.cert&sslkey=' +
__dirname +
'/example.key&sslrootcert=' +
__dirname +
'/example.ca'
const config = parse(connectionString)
const clientConfig = toClientConfig(config)
clientConfig.ssl?.should.deep.equal({
ca: 'example ca\n',
cert: 'example cert\n',
key: 'example key\n',
})
})
it('converts unix domain sockets', function () {
const config = parse('socket:/some path/?db=my[db]&encoding=utf8&client_encoding=bogus')
const clientConfig = toClientConfig(config)
clientConfig.host?.should.equal('/some path/')
clientConfig.database?.should.equal('my[db]', 'must to be escaped and unescaped through "my%5Bdb%5D"')
clientConfig.client_encoding?.should.equal('utf8')
})
it('handles invalid port', function () {
const config = parse('postgres://@boom:381/lala')
config.port = 'bogus'
expect(() => toClientConfig(config)).to.throw()
})
it('handles invalid sslconfig values', function () {
const config = parse('postgres://@boom/lala')
config.ssl = {}
config.ssl.cert = null
config.ssl.key = undefined
const clientConfig = toClientConfig(config)
clientConfig.host?.should.equal('boom')
clientConfig.database?.should.equal('lala')
clientConfig.ssl?.should.deep.equal({})
})
})
describe('parseIntoClientConfig', function () {
it('converts url', function () {
const clientConfig = parseIntoClientConfig('postgres://brian:pw@boom:381/lala')
clientConfig.user?.should.equal('brian')
clientConfig.password?.should.equal('pw')
clientConfig.host?.should.equal('boom')
clientConfig.port?.should.equal(381)
clientConfig.database?.should.equal('lala')
})
})

View File

@ -1,326 +0,0 @@
'use strict'
var chai = require('chai')
chai.should()
var parse = require('../').parse
describe('parse', function () {
it('using connection string in client constructor', function () {
var subject = parse('postgres://brian:pw@boom:381/lala')
subject.user.should.equal('brian')
subject.password.should.equal('pw')
subject.host.should.equal('boom')
subject.port.should.equal('381')
subject.database.should.equal('lala')
})
it('escape spaces if present', function () {
var subject = parse('postgres://localhost/post gres')
subject.database.should.equal('post gres')
})
it('do not double escape spaces', function () {
var subject = parse('postgres://localhost/post%20gres')
subject.database.should.equal('post gres')
})
it('initializing with unix domain socket', function () {
var subject = parse('/var/run/')
subject.host.should.equal('/var/run/')
})
it('initializing with unix domain socket and a specific database, the simple way', function () {
var subject = parse('/var/run/ mydb')
subject.host.should.equal('/var/run/')
subject.database.should.equal('mydb')
})
it('initializing with unix domain socket, the health way', function () {
var subject = parse('socket:/some path/?db=my[db]&encoding=utf8')
subject.host.should.equal('/some path/')
subject.database.should.equal('my[db]', 'must to be escaped and unescaped trough "my%5Bdb%5D"')
subject.client_encoding.should.equal('utf8')
})
it('initializing with unix domain socket, the escaped health way', function () {
var subject = parse('socket:/some%20path/?db=my%2Bdb&encoding=utf8')
subject.host.should.equal('/some path/')
subject.database.should.equal('my+db')
subject.client_encoding.should.equal('utf8')
})
it('initializing with unix domain socket, username and password', function () {
var subject = parse('socket://brian:pw@/var/run/?db=mydb')
subject.user.should.equal('brian')
subject.password.should.equal('pw')
subject.host.should.equal('/var/run/')
subject.database.should.equal('mydb')
})
it('password contains < and/or > characters', function () {
var sourceConfig = {
user: 'brian',
password: 'hello<ther>e',
host: 'localhost',
port: 5432,
database: 'postgres',
}
var connectionString =
'postgres://' +
sourceConfig.user +
':' +
sourceConfig.password +
'@' +
sourceConfig.host +
':' +
sourceConfig.port +
'/' +
sourceConfig.database
var subject = parse(connectionString)
subject.password.should.equal(sourceConfig.password)
})
it('password contains colons', function () {
var sourceConfig = {
user: 'brian',
password: 'hello:pass:world',
host: 'localhost',
port: 5432,
database: 'postgres',
}
var connectionString =
'postgres://' +
sourceConfig.user +
':' +
sourceConfig.password +
'@' +
sourceConfig.host +
':' +
sourceConfig.port +
'/' +
sourceConfig.database
var subject = parse(connectionString)
subject.password.should.equal(sourceConfig.password)
})
it('username or password contains weird characters', function () {
var strang = 'pg://my f%irst name:is&%awesome!@localhost:9000'
var subject = parse(strang)
subject.user.should.equal('my f%irst name')
subject.password.should.equal('is&%awesome!')
subject.host.should.equal('localhost')
})
it('url is properly encoded', function () {
var encoded = 'pg://bi%25na%25%25ry%20:s%40f%23@localhost/%20u%2520rl'
var subject = parse(encoded)
subject.user.should.equal('bi%na%%ry ')
subject.password.should.equal('s@f#')
subject.host.should.equal('localhost')
subject.database.should.equal(' u%20rl')
})
it('relative url sets database', function () {
var relative = 'different_db_on_default_host'
var subject = parse(relative)
subject.database.should.equal('different_db_on_default_host')
})
it('no pathname returns null database', function () {
var subject = parse('pg://myhost')
;(subject.database === null).should.equal(true)
})
it('pathname of "/" returns null database', function () {
var subject = parse('pg://myhost/')
subject.host.should.equal('myhost')
;(subject.database === null).should.equal(true)
})
it('configuration parameter host', function () {
var subject = parse('pg://user:pass@/dbname?host=/unix/socket')
subject.user.should.equal('user')
subject.password.should.equal('pass')
subject.host.should.equal('/unix/socket')
subject.database.should.equal('dbname')
})
it('configuration parameter host overrides url host', function () {
var subject = parse('pg://user:pass@localhost/dbname?host=/unix/socket')
subject.database.should.equal('dbname')
subject.host.should.equal('/unix/socket')
})
it('url with encoded socket', function () {
var subject = parse('pg://user:pass@%2Funix%2Fsocket/dbname')
subject.user.should.equal('user')
subject.password.should.equal('pass')
subject.host.should.equal('/unix/socket')
subject.database.should.equal('dbname')
})
it('url with real host and an encoded db name', function () {
var subject = parse('pg://user:pass@localhost/%2Fdbname')
subject.user.should.equal('user')
subject.password.should.equal('pass')
subject.host.should.equal('localhost')
subject.database.should.equal('%2Fdbname')
})
it('configuration parameter host treats encoded host as part of the db name', function () {
var subject = parse('pg://user:pass@%2Funix%2Fsocket/dbname?host=localhost')
subject.user.should.equal('user')
subject.password.should.equal('pass')
subject.host.should.equal('localhost')
subject.database.should.equal('%2Funix%2Fsocket/dbname')
})
it('configuration parameter application_name', function () {
var connectionString = 'pg:///?application_name=TheApp'
var subject = parse(connectionString)
subject.application_name.should.equal('TheApp')
})
it('configuration parameter fallback_application_name', function () {
var connectionString = 'pg:///?fallback_application_name=TheAppFallback'
var subject = parse(connectionString)
subject.fallback_application_name.should.equal('TheAppFallback')
})
it('configuration parameter options', function () {
var connectionString = 'pg:///?options=-c geqo=off'
var subject = parse(connectionString)
subject.options.should.equal('-c geqo=off')
})
it('configuration parameter ssl=true', function () {
var connectionString = 'pg:///?ssl=true'
var subject = parse(connectionString)
subject.ssl.should.equal(true)
})
it('configuration parameter ssl=1', function () {
var connectionString = 'pg:///?ssl=1'
var subject = parse(connectionString)
subject.ssl.should.equal(true)
})
it('configuration parameter ssl=0', function () {
var connectionString = 'pg:///?ssl=0'
var subject = parse(connectionString)
subject.ssl.should.equal(false)
})
it('set ssl', function () {
var subject = parse('pg://myhost/db?ssl=1')
subject.ssl.should.equal(true)
})
it('configuration parameter sslcert=/path/to/cert', function () {
var connectionString = 'pg:///?sslcert=' + __dirname + '/example.cert'
var subject = parse(connectionString)
subject.ssl.should.eql({
cert: 'example cert\n',
})
})
it('configuration parameter sslkey=/path/to/key', function () {
var connectionString = 'pg:///?sslkey=' + __dirname + '/example.key'
var subject = parse(connectionString)
subject.ssl.should.eql({
key: 'example key\n',
})
})
it('configuration parameter sslrootcert=/path/to/ca', function () {
var connectionString = 'pg:///?sslrootcert=' + __dirname + '/example.ca'
var subject = parse(connectionString)
subject.ssl.should.eql({
ca: 'example ca\n',
})
})
it('configuration parameter sslmode=no-verify', function () {
var connectionString = 'pg:///?sslmode=no-verify'
var subject = parse(connectionString)
subject.ssl.should.eql({
rejectUnauthorized: false,
})
})
it('configuration parameter sslmode=disable', function () {
var connectionString = 'pg:///?sslmode=disable'
var subject = parse(connectionString)
subject.ssl.should.eql(false)
})
it('configuration parameter sslmode=prefer', function () {
var connectionString = 'pg:///?sslmode=prefer'
var subject = parse(connectionString)
subject.ssl.should.eql({})
})
it('configuration parameter sslmode=require', function () {
var connectionString = 'pg:///?sslmode=require'
var subject = parse(connectionString)
subject.ssl.should.eql({})
})
it('configuration parameter sslmode=verify-ca', function () {
var connectionString = 'pg:///?sslmode=verify-ca'
var subject = parse(connectionString)
subject.ssl.should.eql({})
})
it('configuration parameter sslmode=verify-full', function () {
var connectionString = 'pg:///?sslmode=verify-full'
var subject = parse(connectionString)
subject.ssl.should.eql({})
})
it('configuration parameter ssl=true and sslmode=require still work with sslrootcert=/path/to/ca', function () {
var connectionString = 'pg:///?ssl=true&sslrootcert=' + __dirname + '/example.ca&sslmode=require'
var subject = parse(connectionString)
subject.ssl.should.eql({
ca: 'example ca\n',
})
})
it('allow other params like max, ...', function () {
var subject = parse('pg://myhost/db?max=18&min=4')
subject.max.should.equal('18')
subject.min.should.equal('4')
})
it('configuration parameter keepalives', function () {
var connectionString = 'pg:///?keepalives=1'
var subject = parse(connectionString)
subject.keepalives.should.equal('1')
})
it('unknown configuration parameter is passed into client', function () {
var connectionString = 'pg:///?ThereIsNoSuchPostgresParameter=1234'
var subject = parse(connectionString)
subject.ThereIsNoSuchPostgresParameter.should.equal('1234')
})
it('do not override a config field with value from query string', function () {
var subject = parse('socket:/some path/?db=my[db]&encoding=utf8&client_encoding=bogus')
subject.host.should.equal('/some path/')
subject.database.should.equal('my[db]', 'must to be escaped and unescaped through "my%5Bdb%5D"')
subject.client_encoding.should.equal('utf8')
})
it('return last value of repeated parameter', function () {
var connectionString = 'pg:///?keepalives=1&keepalives=0'
var subject = parse(connectionString)
subject.keepalives.should.equal('0')
})
it('use the port specified in the query parameters', function () {
var connectionString = 'postgres:///?host=localhost&port=1234'
var subject = parse(connectionString)
subject.port.should.equal('1234')
})
})

View File

@ -0,0 +1,470 @@
import chai from 'chai'
const expect = chai.expect
chai.should()
import { parse } from '../'
describe('parse', function () {
it('using connection string in client constructor', function () {
const subject = parse('postgres://brian:pw@boom:381/lala')
subject.user?.should.equal('brian')
subject.password?.should.equal('pw')
subject.host?.should.equal('boom')
subject.port?.should.equal('381')
subject.database?.should.equal('lala')
})
it('escape spaces if present', function () {
const subject = parse('postgres://localhost/post gres')
subject.database?.should.equal('post gres')
})
it('do not double escape spaces', function () {
const subject = parse('postgres://localhost/post%20gres')
subject.database?.should.equal('post gres')
})
it('initializing with unix domain socket', function () {
const subject = parse('/const/run/')
subject.host?.should.equal('/const/run/')
})
it('initializing with unix domain socket and a specific database, the simple way', function () {
const subject = parse('/const/run/ mydb')
subject.host?.should.equal('/const/run/')
subject.database?.should.equal('mydb')
})
it('initializing with unix domain socket, the health way', function () {
const subject = parse('socket:/some path/?db=my[db]&encoding=utf8')
subject.host?.should.equal('/some path/')
subject.database?.should.equal('my[db]', 'must to be escaped and unescaped trough "my%5Bdb%5D"')
subject.client_encoding?.should.equal('utf8')
})
it('initializing with unix domain socket, the escaped health way', function () {
const subject = parse('socket:/some%20path/?db=my%2Bdb&encoding=utf8')
subject.host?.should.equal('/some path/')
subject.database?.should.equal('my+db')
subject.client_encoding?.should.equal('utf8')
})
it('initializing with unix domain socket, username and password', function () {
const subject = parse('socket://brian:pw@/const/run/?db=mydb')
subject.user?.should.equal('brian')
subject.password?.should.equal('pw')
subject.host?.should.equal('/const/run/')
subject.database?.should.equal('mydb')
})
it('password contains < and/or > characters', function () {
const sourceConfig = {
user: 'brian',
password: 'hello<ther>e',
host: 'localhost',
port: 5432,
database: 'postgres',
}
const connectionString =
'postgres://' +
sourceConfig.user +
':' +
sourceConfig.password +
'@' +
sourceConfig.host +
':' +
sourceConfig.port +
'/' +
sourceConfig.database
const subject = parse(connectionString)
subject.password?.should.equal(sourceConfig.password)
})
it('password contains colons', function () {
const sourceConfig = {
user: 'brian',
password: 'hello:pass:world',
host: 'localhost',
port: 5432,
database: 'postgres',
}
const connectionString =
'postgres://' +
sourceConfig.user +
':' +
sourceConfig.password +
'@' +
sourceConfig.host +
':' +
sourceConfig.port +
'/' +
sourceConfig.database
const subject = parse(connectionString)
subject.password?.should.equal(sourceConfig.password)
})
it('username or password contains weird characters', function () {
const strang = 'pg://my f%irst name:is&%awesome!@localhost:9000'
const subject = parse(strang)
subject.user?.should.equal('my f%irst name')
subject.password?.should.equal('is&%awesome!')
subject.host?.should.equal('localhost')
})
it('url is properly encoded', function () {
const encoded = 'pg://bi%25na%25%25ry%20:s%40f%23@localhost/%20u%2520rl'
const subject = parse(encoded)
subject.user?.should.equal('bi%na%%ry ')
subject.password?.should.equal('s@f#')
subject.host?.should.equal('localhost')
subject.database?.should.equal(' u%20rl')
})
it('relative url sets database', function () {
const relative = 'different_db_on_default_host'
const subject = parse(relative)
subject.database?.should.equal('different_db_on_default_host')
})
it('no pathname returns null database', function () {
const subject = parse('pg://myhost')
;(subject.database === null).should.equal(true)
})
it('pathname of "/" returns null database', function () {
const subject = parse('pg://myhost/')
subject.host?.should.equal('myhost')
;(subject.database === null).should.equal(true)
})
it('configuration parameter host', function () {
const subject = parse('pg://user:pass@/dbname?host=/unix/socket')
subject.user?.should.equal('user')
subject.password?.should.equal('pass')
subject.host?.should.equal('/unix/socket')
subject.database?.should.equal('dbname')
})
it('configuration parameter host overrides url host', function () {
const subject = parse('pg://user:pass@localhost/dbname?host=/unix/socket')
subject.database?.should.equal('dbname')
subject.host?.should.equal('/unix/socket')
})
it('url with encoded socket', function () {
const subject = parse('pg://user:pass@%2Funix%2Fsocket/dbname')
subject.user?.should.equal('user')
subject.password?.should.equal('pass')
subject.host?.should.equal('/unix/socket')
subject.database?.should.equal('dbname')
})
it('url with real host and an encoded db name', function () {
const subject = parse('pg://user:pass@localhost/%2Fdbname')
subject.user?.should.equal('user')
subject.password?.should.equal('pass')
subject.host?.should.equal('localhost')
subject.database?.should.equal('%2Fdbname')
})
it('configuration parameter host treats encoded host as part of the db name', function () {
const subject = parse('pg://user:pass@%2Funix%2Fsocket/dbname?host=localhost')
subject.user?.should.equal('user')
subject.password?.should.equal('pass')
subject.host?.should.equal('localhost')
subject.database?.should.equal('%2Funix%2Fsocket/dbname')
})
it('configuration parameter application_name', function () {
const connectionString = 'pg:///?application_name=TheApp'
const subject = parse(connectionString)
subject.application_name?.should.equal('TheApp')
})
it('configuration parameter fallback_application_name', function () {
const connectionString = 'pg:///?fallback_application_name=TheAppFallback'
const subject = parse(connectionString)
subject.fallback_application_name?.should.equal('TheAppFallback')
})
it('configuration parameter options', function () {
const connectionString = 'pg:///?options=-c geqo=off'
const subject = parse(connectionString)
subject.options?.should.equal('-c geqo=off')
})
it('configuration parameter ssl=true', function () {
const connectionString = 'pg:///?ssl=true'
const subject = parse(connectionString)
subject.ssl?.should.equal(true)
})
it('configuration parameter ssl=1', function () {
const connectionString = 'pg:///?ssl=1'
const subject = parse(connectionString)
subject.ssl?.should.equal(true)
})
it('configuration parameter ssl=0', function () {
const connectionString = 'pg:///?ssl=0'
const subject = parse(connectionString)
subject.ssl?.should.equal(false)
})
it('set ssl', function () {
const subject = parse('pg://myhost/db?ssl=1')
subject.ssl?.should.equal(true)
})
it('configuration parameter sslcert=/path/to/cert', function () {
const connectionString = 'pg:///?sslcert=' + __dirname + '/example.cert'
const subject = parse(connectionString)
subject.ssl?.should.eql({
cert: 'example cert\n',
})
})
it('configuration parameter sslkey=/path/to/key', function () {
const connectionString = 'pg:///?sslkey=' + __dirname + '/example.key'
const subject = parse(connectionString)
subject.ssl?.should.eql({
key: 'example key\n',
})
})
it('configuration parameter sslrootcert=/path/to/ca', function () {
const connectionString = 'pg:///?sslrootcert=' + __dirname + '/example.ca'
const subject = parse(connectionString)
subject.ssl?.should.eql({
ca: 'example ca\n',
})
})
it('configuration parameter sslmode=no-verify', function () {
const connectionString = 'pg:///?sslmode=no-verify'
const subject = parse(connectionString)
subject.ssl?.should.eql({
rejectUnauthorized: false,
})
})
it('configuration parameter sslmode=disable', function () {
const connectionString = 'pg:///?sslmode=disable'
const subject = parse(connectionString)
subject.ssl?.should.eql(false)
})
it('configuration parameter sslmode=prefer', function () {
const connectionString = 'pg:///?sslmode=prefer'
const subject = parse(connectionString)
subject.ssl?.should.eql({})
})
it('configuration parameter sslmode=require', function () {
const connectionString = 'pg:///?sslmode=require'
const subject = parse(connectionString)
subject.ssl?.should.eql({})
})
it('configuration parameter sslmode=verify-ca', function () {
const connectionString = 'pg:///?sslmode=verify-ca'
const subject = parse(connectionString)
subject.ssl?.should.eql({})
})
it('configuration parameter sslmode=verify-full', function () {
const connectionString = 'pg:///?sslmode=verify-full'
const subject = parse(connectionString)
subject.ssl?.should.eql({})
})
it('configuration parameter ssl=true and sslmode=require still work with sslrootcert=/path/to/ca', function () {
const connectionString = 'pg:///?ssl=true&sslrootcert=' + __dirname + '/example.ca&sslmode=require'
const subject = parse(connectionString)
subject.ssl?.should.eql({
ca: 'example ca\n',
})
})
it('configuration parameter sslmode=disable with uselibpqcompat query param', function () {
const connectionString = 'pg:///?sslmode=disable&uselibpqcompat=true'
const subject = parse(connectionString)
subject.ssl?.should.eql(false)
})
it('configuration parameter sslmode=prefer with uselibpqcompat query param', function () {
const connectionString = 'pg:///?sslmode=prefer&uselibpqcompat=true'
const subject = parse(connectionString)
subject.ssl?.should.eql({
rejectUnauthorized: false,
})
})
it('configuration parameter sslmode=require with uselibpqcompat query param', function () {
const connectionString = 'pg:///?sslmode=require&uselibpqcompat=true'
const subject = parse(connectionString)
subject.ssl?.should.eql({
rejectUnauthorized: false,
})
})
it('configuration parameter sslmode=verify-ca with uselibpqcompat query param', function () {
const connectionString = 'pg:///?sslmode=verify-ca&uselibpqcompat=true'
expect(function () {
parse(connectionString)
}).to.throw()
})
it('when throwing on invalid url does not print out the password in the error message', function () {
const host = 'localhost'
const port = 5432
const user = 'user'
const password = 'g#4624$@F$#v`'
const database = 'db'
const connectionString = `postgres://${user}:${password}@${host}:${port}/${database}`
expect(function () {
parse(connectionString)
}).to.throw()
try {
parse(connectionString)
} catch (err: unknown) {
expect(JSON.stringify(err)).to.not.include(password, 'Password should not be in the error message')
expect(JSON.stringify(err)).to.include('REDACTED', 'The thrown error should contain the redacted URL')
return
}
throw new Error('Expected an error to be thrown')
})
it('configuration parameter sslmode=verify-ca and sslrootcert with uselibpqcompat query param', function () {
const connectionString = 'pg:///?sslmode=verify-ca&uselibpqcompat=true&sslrootcert=' + __dirname + '/example.ca'
const subject = parse(connectionString)
subject.ssl?.should.have.property('checkServerIdentity').that.is.a('function')
// We prove above that the checkServerIdentity function is defined
//
// FIXME: remove this if we upgrade to TypeScript 5
// @ts-ignore
expect(subject.ssl.checkServerIdentity()).be.undefined
})
it('configuration parameter sslmode=verify-full with uselibpqcompat query param', function () {
const connectionString = 'pg:///?sslmode=verify-full&uselibpqcompat=true'
const subject = parse(connectionString)
subject.ssl?.should.eql({})
})
it('configuration parameter ssl=true and sslmode=require still work with sslrootcert=/path/to/ca with uselibpqcompat query param', function () {
const connectionString =
'pg:///?ssl=true&sslrootcert=' + __dirname + '/example.ca&sslmode=require&uselibpqcompat=true'
const subject = parse(connectionString)
subject.ssl?.should.have.property('ca', 'example ca\n')
subject.ssl?.should.have.property('checkServerIdentity').that.is.a('function')
// We prove above that the checkServerIdentity function is defined
//
// FIXME: remove this if we upgrade to TypeScript 5
// @ts-ignore
expect(subject.ssl?.checkServerIdentity()).be.undefined
})
it('configuration parameter sslmode=disable with useLibpqCompat option', function () {
const connectionString = 'pg:///?sslmode=disable'
const subject = parse(connectionString, { useLibpqCompat: true })
subject.ssl?.should.eql(false)
})
it('configuration parameter sslmode=prefer with useLibpqCompat option', function () {
const connectionString = 'pg:///?sslmode=prefer'
const subject = parse(connectionString, { useLibpqCompat: true })
subject.ssl?.should.eql({
rejectUnauthorized: false,
})
})
it('configuration parameter sslmode=require with useLibpqCompat option', function () {
const connectionString = 'pg:///?sslmode=require'
const subject = parse(connectionString, { useLibpqCompat: true })
subject.ssl?.should.eql({
rejectUnauthorized: false,
})
})
it('configuration parameter sslmode=verify-ca with useLibpqCompat option', function () {
const connectionString = 'pg:///?sslmode=verify-ca'
expect(function () {
parse(connectionString, { useLibpqCompat: true })
}).to.throw()
})
it('configuration parameter sslmode=verify-ca and sslrootcert with useLibpqCompat option', function () {
const connectionString = 'pg:///?sslmode=verify-ca&sslrootcert=' + __dirname + '/example.ca'
const subject = parse(connectionString, { useLibpqCompat: true })
subject.ssl?.should.have.property('checkServerIdentity').that.is.a('function')
// We prove above that the checkServerIdentity function is defined
//
// FIXME: remove this if we upgrade to TypeScript 5
// @ts-ignore
expect(subject.ssl?.checkServerIdentity()).be.undefined
})
it('configuration parameter sslmode=verify-full with useLibpqCompat option', function () {
const connectionString = 'pg:///?sslmode=verify-full'
const subject = parse(connectionString, { useLibpqCompat: true })
subject.ssl?.should.eql({})
})
it('configuration parameter ssl=true and sslmode=require still work with sslrootcert=/path/to/ca with useLibpqCompat option', function () {
const connectionString = 'pg:///?ssl=true&sslrootcert=' + __dirname + '/example.ca&sslmode=require'
const subject = parse(connectionString, { useLibpqCompat: true })
subject.ssl?.should.have.property('ca', 'example ca\n')
subject.ssl?.should.have.property('checkServerIdentity').that.is.a('function')
// We prove above that the checkServerIdentity function is defined
//
// FIXME: remove this if we upgrade to TypeScript 5
// @ts-ignore
expect(subject.ssl?.checkServerIdentity()).be.undefined
})
it('does not allow uselibpqcompat query parameter and useLibpqCompat option at the same time', function () {
const connectionString = 'pg:///?uselibpqcompat=true'
expect(function () {
parse(connectionString, { useLibpqCompat: true })
}).to.throw()
})
it('allow other params like max, ...', function () {
const subject = parse('pg://myhost/db?max=18&min=4')
subject.max?.should.equal('18')
subject.min?.should.equal('4')
})
it('configuration parameter keepalives', function () {
const connectionString = 'pg:///?keepalives=1'
const subject = parse(connectionString)
subject.keepalives?.should.equal('1')
})
it('unknown configuration parameter is passed into client', function () {
const connectionString = 'pg:///?ThereIsNoSuchPostgresParameter=1234'
const subject = parse(connectionString)
subject.ThereIsNoSuchPostgresParameter?.should.equal('1234')
})
it('do not override a config field with value from query string', function () {
const subject = parse('socket:/some path/?db=my[db]&encoding=utf8&client_encoding=bogus')
subject.host?.should.equal('/some path/')
subject.database?.should.equal('my[db]', 'must to be escaped and unescaped through "my%5Bdb%5D"')
subject.client_encoding?.should.equal('utf8')
})
it('return last value of repeated parameter', function () {
const connectionString = 'pg:///?keepalives=1&keepalives=0'
const subject = parse(connectionString)
subject.keepalives?.should.equal('0')
})
it('use the port specified in the query parameters', function () {
const connectionString = 'postgres:///?host=localhost&port=1234'
const subject = parse(connectionString)
subject.port?.should.equal('1234')
})
})

View File

@ -0,0 +1,19 @@
{
"compilerOptions": {
"module": "commonjs",
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"strict": true,
"target": "es6",
"noImplicitAny": true,
"moduleResolution": "node",
"sourceMap": true,
"outDir": "dist",
"incremental": true,
"baseUrl": ".",
"declaration": true
},
"include": [
"test/**/*"
]
}

View File

@ -0,0 +1,5 @@
// ESM wrapper for pg-cursor
import Cursor from '../index.js'
// Export as default only to match CJS module
export default Cursor

View File

@ -1,4 +1,5 @@
'use strict' 'use strict'
// note: can remove these deep requires when we bump min version of pg to 9.x
const Result = require('pg/lib/result.js') const Result = require('pg/lib/result.js')
const prepare = require('pg/lib/utils.js').prepareValue const prepare = require('pg/lib/utils.js').prepareValue
const EventEmitter = require('events').EventEmitter const EventEmitter = require('events').EventEmitter

View File

@ -1,8 +1,15 @@
{ {
"name": "pg-cursor", "name": "pg-cursor",
"version": "2.12.1", "version": "2.15.3",
"description": "Query cursor extension for node-postgres", "description": "Query cursor extension for node-postgres",
"main": "index.js", "main": "index.js",
"exports": {
".": {
"import": "./esm/index.mjs",
"require": "./index.js",
"default": "./index.js"
}
},
"directories": { "directories": {
"test": "test" "test": "test"
}, },
@ -18,9 +25,13 @@
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"mocha": "^10.5.2", "mocha": "^10.5.2",
"pg": "^8.13.1" "pg": "^8.16.3"
}, },
"peerDependencies": { "peerDependencies": {
"pg": "^8" "pg": "^8"
} },
"files": [
"index.js",
"esm"
]
} }

View File

@ -0,0 +1,3 @@
This is an internal package for node-postgres used to test esm & cjs module export compatibility.
The only thing you really need to do is `yarn && yarn test` from the root of the project & these tests will run as well as all the other tests. So, basically, you can ignore this. 😄

View File

@ -0,0 +1,35 @@
const assert = require('node:assert')
const test = require('node:test')
const { describe, it } = test
const paths = [
'pg',
'pg/lib/index.js',
'pg/lib/index',
'pg/lib/connection-parameters',
'pg/lib/connection-parameters.js',
'pg/lib/type-overrides',
'pg-protocol/dist/messages.js',
'pg-protocol/dist/messages',
'pg-native/lib/build-result.js',
'pg-cloudflare/package.json',
]
for (const path of paths) {
describe(`importing ${path}`, () => {
it('works with require', () => {
const mod = require(path)
assert(mod)
})
})
}
describe('pg-native', () => {
it('should work with commonjs', async () => {
const pg = require('pg')
const pool = new pg.native.Pool()
const result = await pool.query('SELECT 1')
assert.strictEqual(result.rowCount, 1)
pool.end()
})
})

View File

@ -0,0 +1,27 @@
{
"name": "pg-esm-test",
"version": "1.2.3",
"description": "A test module for PostgreSQL with ESM support",
"main": "index.js",
"type": "module",
"scripts": {
"test": "node --test --conditions=workerd"
},
"keywords": [
"postgres",
"postgresql",
"esm",
"test"
],
"devDependencies": {
"pg": "^8.16.3",
"pg-cloudflare": "^1.2.7",
"pg-cursor": "^2.15.3",
"pg-native": "^3.5.2",
"pg-pool": "^3.10.1",
"pg-protocol": "^1.10.3",
"pg-query-stream": "^4.10.3"
},
"author": "Brian M. Carlson <brian.m.carlson@gmail.com>",
"license": "MIT"
}

View File

@ -0,0 +1,9 @@
import assert from 'node:assert'
import { describe, it } from 'node:test'
import { CloudflareSocket } from 'pg-cloudflare'
describe('pg-cloudflare', () => {
it('should export CloudflareSocket constructor', () => {
assert.ok(new CloudflareSocket())
})
})

View File

@ -0,0 +1,17 @@
import assert from 'node:assert'
import { describe, it } from 'node:test'
import { parse, toClientConfig, parseIntoClientConfig } from 'pg-connection-string'
describe('pg-connection-string', () => {
it('should export parse function', () => {
assert.strictEqual(typeof parse, 'function')
})
it('should export toClientConfig function', () => {
assert.strictEqual(typeof toClientConfig, 'function')
})
it('should export parseIntoClientConfig function', () => {
assert.strictEqual(typeof parseIntoClientConfig, 'function')
})
})

View File

@ -0,0 +1,9 @@
import assert from 'node:assert'
import { describe, it } from 'node:test'
import Cursor from 'pg-cursor'
describe('pg-cursor', () => {
it('should export Cursor constructor as default', () => {
assert.ok(new Cursor())
})
})

View File

@ -0,0 +1,9 @@
import assert from 'node:assert'
import { describe, it } from 'node:test'
import Client from 'pg-native'
describe('pg-native', () => {
it('should export Client constructor', () => {
assert.ok(new Client())
})
})

View File

@ -0,0 +1,9 @@
import assert from 'node:assert'
import { describe, it } from 'node:test'
import Pool from 'pg-pool'
describe('pg-pool', () => {
it('should export Pool constructor', () => {
assert.ok(new Pool())
})
})

View File

@ -0,0 +1,18 @@
import protocol, { NoticeMessage, DatabaseError } from 'pg-protocol/dist/messages.js'
import { describe, it } from 'node:test'
import { strict as assert } from 'node:assert'
describe('pg-protocol', () => {
it('should export database error', () => {
assert.ok(DatabaseError)
})
it('should export protocol', () => {
assert.ok(protocol)
assert.ok(protocol.noData)
assert.ok(protocol.parseComplete)
assert.ok(protocol.NoticeMessage)
})
it('should export NoticeMessage from file in dist folder', () => {
assert.ok(NoticeMessage)
})
})

View File

@ -0,0 +1,9 @@
import assert from 'node:assert'
import { describe, it } from 'node:test'
import QueryStream from 'pg-query-stream'
describe('pg-query-stream', () => {
it('should export QueryStream constructor as default', () => {
assert.ok(new QueryStream())
})
})

View File

@ -0,0 +1,60 @@
import assert from 'node:assert'
import { describe, it } from 'node:test'
import pg, {
Client,
Pool,
Connection,
defaults,
types,
DatabaseError,
escapeIdentifier,
escapeLiteral,
Result,
TypeOverrides,
} from 'pg'
describe('pg', () => {
it('should export Client constructor', () => {
assert.ok(new Client())
})
it('should export Pool constructor', () => {
assert.ok(new Pool())
})
it('should still provide default export', () => {
assert.ok(new pg.Pool())
})
it('should export Connection constructor', () => {
assert.ok(new Connection())
})
it('should export defaults', () => {
assert.ok(defaults)
})
it('should export types', () => {
assert.ok(types)
})
it('should export DatabaseError', () => {
assert.ok(DatabaseError)
})
it('should export escapeIdentifier', () => {
assert.ok(escapeIdentifier)
})
it('should export escapeLiteral', () => {
assert.ok(escapeLiteral)
})
it('should export Result', () => {
assert.ok(Result)
})
it('should export TypeOverrides', () => {
assert.ok(TypeOverrides)
})
})

View File

@ -30,40 +30,40 @@ $ npm i pg-native
### async ### async
```js ```js
var Client = require('pg-native') const Client = require('pg-native')
var client = new Client(); const client = new Client();
client.connect(function(err) { client.connect(function(err) {
if(err) throw err if(err) throw err
//text queries // text queries
client.query('SELECT NOW() AS the_date', function(err, rows) { client.query('SELECT NOW() AS the_date', function(err, rows) {
if(err) throw err if(err) throw err
console.log(rows[0].the_date) //Tue Sep 16 2014 23:42:39 GMT-0400 (EDT) console.log(rows[0].the_date) // Tue Sep 16 2014 23:42:39 GMT-0400 (EDT)
//parameterized statements // parameterized statements
client.query('SELECT $1::text as twitter_handle', ['@briancarlson'], function(err, rows) { client.query('SELECT $1::text as twitter_handle', ['@briancarlson'], function(err, rows) {
if(err) throw err if(err) throw err
console.log(rows[0].twitter_handle) //@briancarlson console.log(rows[0].twitter_handle) //@briancarlson
}) })
//prepared statements // prepared statements
client.prepare('get_twitter', 'SELECT $1::text as twitter_handle', 1, function(err) { client.prepare('get_twitter', 'SELECT $1::text as twitter_handle', 1, function(err) {
if(err) throw err if(err) throw err
//execute the prepared, named statement // execute the prepared, named statement
client.execute('get_twitter', ['@briancarlson'], function(err, rows) { client.execute('get_twitter', ['@briancarlson'], function(err, rows) {
if(err) throw err if(err) throw err
console.log(rows[0].twitter_handle) //@briancarlson console.log(rows[0].twitter_handle) //@briancarlson
//execute the prepared, named statement again // execute the prepared, named statement again
client.execute('get_twitter', ['@realcarrotfacts'], function(err, rows) { client.execute('get_twitter', ['@realcarrotfacts'], function(err, rows) {
if(err) throw err if(err) throw err
console.log(rows[0].twitter_handle) //@realcarrotfacts console.log(rows[0].twitter_handle) // @realcarrotfacts
client.end(function() { client.end(function() {
console.log('ended') console.log('ended')
@ -81,27 +81,27 @@ client.connect(function(err) {
Because `pg-native` is bound to [libpq](https://github.com/brianc/node-libpq) it is able to provide _sync_ operations for both connecting and queries. This is a bad idea in _non-blocking systems_ like web servers, but is exteremly convienent in scripts and bootstrapping applications - much the same way `fs.readFileSync` comes in handy. Because `pg-native` is bound to [libpq](https://github.com/brianc/node-libpq) it is able to provide _sync_ operations for both connecting and queries. This is a bad idea in _non-blocking systems_ like web servers, but is exteremly convienent in scripts and bootstrapping applications - much the same way `fs.readFileSync` comes in handy.
```js ```js
var Client = require('pg-native') const Client = require('pg-native')
var client = new Client() const client = new Client()
client.connectSync() client.connectSync()
//text queries // text queries
var rows = client.querySync('SELECT NOW() AS the_date') const rows = client.querySync('SELECT NOW() AS the_date')
console.log(rows[0].the_date) //Tue Sep 16 2014 23:42:39 GMT-0400 (EDT) console.log(rows[0].the_date) // Tue Sep 16 2014 23:42:39 GMT-0400 (EDT)
//parameterized queries // parameterized queries
var rows = client.querySync('SELECT $1::text as twitter_handle', ['@briancarlson']) const rows = client.querySync('SELECT $1::text as twitter_handle', ['@briancarlson'])
console.log(rows[0].twitter_handle) //@briancarlson console.log(rows[0].twitter_handle) // @briancarlson
//prepared statements // prepared statements
client.prepareSync('get_twitter', 'SELECT $1::text as twitter_handle', 1) client.prepareSync('get_twitter', 'SELECT $1::text as twitter_handle', 1)
var rows = client.executeSync('get_twitter', ['@briancarlson']) const rows = client.executeSync('get_twitter', ['@briancarlson'])
console.log(rows[0].twitter_handle) //@briancarlson console.log(rows[0].twitter_handle) // @briancarlson
var rows = client.executeSync('get_twitter', ['@realcarrotfacts']) const rows = client.executeSync('get_twitter', ['@realcarrotfacts'])
console.log(rows[0].twitter_handle) //@realcarrotfacts console.log(rows[0].twitter_handle) // @realcarrotfacts
``` ```
## api ## api
@ -125,14 +125,14 @@ Returns an `Error` to the `callback` if the connection was unsuccessful. `callb
##### example ##### example
```js ```js
var client = new Client() const client = new Client()
client.connect(function(err) { client.connect(function(err) {
if(err) throw err if(err) throw err
console.log('connected!') console.log('connected!')
}) })
var client2 = new Client() const client2 = new Client()
client2.connect('postgresql://user:password@host:5432/database?param=value', function(err) { client2.connect('postgresql://user:password@host:5432/database?param=value', function(err) {
if(err) throw err if(err) throw err
@ -147,7 +147,7 @@ Execute a query with the text of `queryText` and _optional_ parameters specified
##### example ##### example
```js ```js
var client = new Client() const client = new Client()
client.connect(function(err) { client.connect(function(err) {
if (err) throw err if (err) throw err
@ -175,7 +175,7 @@ Prepares a _named statement_ for later execution. You _must_ supply the name of
##### example ##### example
```js ```js
var client = new Client() const client = new Client()
client.connect(function(err) { client.connect(function(err) {
if(err) throw err if(err) throw err
@ -197,7 +197,7 @@ Executes a previously prepared statement on this client with the name of `statem
```js ```js
var client = new Client() const client = new Client()
client.connect(function(err) { client.connect(function(err) {
if(err) throw err if(err) throw err
@ -221,7 +221,7 @@ Ends the connection. Calls the _optional_ callback when the connection is termin
##### example ##### example
```js ```js
var client = new Client() const client = new Client()
client.connect(function(err) { client.connect(function(err) {
if(err) throw err if(err) throw err
client.end(function() { client.end(function() {
@ -236,9 +236,9 @@ Cancels the active query on the client. Callback receives an error if there was
##### example ##### example
```js ```js
var client = new Client() const client = new Client()
client.connectSync() client.connectSync()
//sleep for 100 seconds // sleep for 100 seconds
client.query('select pg_sleep(100)', function(err) { client.query('select pg_sleep(100)', function(err) {
console.log(err) // [Error: ERROR: canceling statement due to user request] console.log(err) // [Error: ERROR: canceling statement due to user request]
}) })
@ -264,7 +264,7 @@ Prepares a name statement with name of `statementName` and a query text of `quer
- __`client.executeSync(statementName:string, <values:string[]>) -> results:Object[]`__ - __`client.executeSync(statementName:string, <values:string[]>) -> results:Object[]`__
Executes a previously prepared statement on this client with the name of `statementName`, passing it the optional array of query paramters as a `values` array. Throws an `Error` if the execution fails, otherwas returns an array of results. Executes a previously prepared statement on this client with the name of `statementName`, passing it the optional array of query parameters as a `values` array. Throws an `Error` if the execution fails, otherwise returns an array of results.
## testing ## testing

View File

@ -1,10 +1,10 @@
var pg = require('pg').native const pg = require('pg').native
var Native = require('../') const Native = require('../')
var warmup = function (fn, cb) { const warmup = function (fn, cb) {
var count = 0 let count = 0
var max = 10 const max = 10
var run = function (err) { const run = function (err) {
if (err) return cb(err) if (err) return cb(err)
if (max >= count++) { if (max >= count++) {
@ -16,32 +16,32 @@ var warmup = function (fn, cb) {
run() run()
} }
var native = Native() const native = Native()
native.connectSync() native.connectSync()
var queryText = 'SELECT generate_series(0, 1000)' const queryText = 'SELECT generate_series(0, 1000) as X, generate_series(0, 1000) as Y, generate_series(0, 1000) as Z'
var client = new pg.Client() const client = new pg.Client()
client.connect(function () { client.connect(function () {
var pure = function (cb) { const pure = function (cb) {
client.query(queryText, function (err) { client.query(queryText, function (err) {
if (err) throw err if (err) throw err
cb(err) cb(err)
}) })
} }
var nativeQuery = function (cb) { const nativeQuery = function (cb) {
native.query(queryText, function (err) { native.query(queryText, function (err) {
if (err) throw err if (err) throw err
cb(err) cb(err)
}) })
} }
var run = function () { const run = function () {
var start = Date.now() console.time('pure')
warmup(pure, function () { warmup(pure, function () {
console.log('pure done', Date.now() - start) console.timeEnd('pure')
start = Date.now() console.time('native')
warmup(nativeQuery, function () { warmup(nativeQuery, function () {
console.log('native done', Date.now() - start) console.timeEnd('native')
}) })
}) })
} }

View File

@ -1,29 +1,29 @@
var Client = require('../') const Client = require('../')
var async = require('async') const async = require('async')
var loop = function () { const loop = function () {
var client = new Client() const client = new Client()
var connect = function (cb) { const connect = function (cb) {
client.connect(cb) client.connect(cb)
} }
var simpleQuery = function (cb) { const simpleQuery = function (cb) {
client.query('SELECT NOW()', cb) client.query('SELECT NOW()', cb)
} }
var paramsQuery = function (cb) { const paramsQuery = function (cb) {
client.query('SELECT $1::text as name', ['Brian'], cb) client.query('SELECT $1::text as name', ['Brian'], cb)
} }
var prepared = function (cb) { const prepared = function (cb) {
client.prepare('test', 'SELECT $1::text as name', 1, function (err) { client.prepare('test', 'SELECT $1::text as name', 1, function (err) {
if (err) return cb(err) if (err) return cb(err)
client.execute('test', ['Brian'], cb) client.execute('test', ['Brian'], cb)
}) })
} }
var sync = function (cb) { const sync = function (cb) {
client.querySync('SELECT NOW()') client.querySync('SELECT NOW()')
client.querySync('SELECT $1::text as name', ['Brian']) client.querySync('SELECT $1::text as name', ['Brian'])
client.prepareSync('boom', 'SELECT $1::text as name', 1) client.prepareSync('boom', 'SELECT $1::text as name', 1)
@ -31,16 +31,16 @@ var loop = function () {
setImmediate(cb) setImmediate(cb)
} }
var end = function (cb) { const end = function (cb) {
client.end(cb) client.end(cb)
} }
var ops = [connect, simpleQuery, paramsQuery, prepared, sync, end] const ops = [connect, simpleQuery, paramsQuery, prepared, sync, end]
var start = Date.now() const start = performance.now()
async.series(ops, function (err) { async.series(ops, function (err) {
if (err) throw err if (err) throw err
console.log(Date.now() - start) console.log(performance.now() - start)
setImmediate(loop) setImmediate(loop)
}) })
} }

View File

@ -0,0 +1,5 @@
// ESM wrapper for pg-native
import Client from '../index.js'
// Export as default only to match CJS module
export default Client

View File

@ -1,12 +1,12 @@
var Libpq = require('libpq') const Libpq = require('libpq')
var EventEmitter = require('events').EventEmitter const EventEmitter = require('events').EventEmitter
var util = require('util') const util = require('util')
var assert = require('assert') const assert = require('assert')
var types = require('pg-types') const types = require('pg-types')
var buildResult = require('./lib/build-result') const buildResult = require('./lib/build-result')
var CopyStream = require('./lib/copy-stream') const CopyStream = require('./lib/copy-stream')
var Client = (module.exports = function (config) { const Client = (module.exports = function (config) {
if (!(this instanceof Client)) { if (!(this instanceof Client)) {
return new Client(config) return new Client(config)
} }
@ -18,7 +18,7 @@ var Client = (module.exports = function (config) {
this._reading = false this._reading = false
this._read = this._read.bind(this) this._read = this._read.bind(this)
// allow custom type converstion to be passed in // allow custom type conversion to be passed in
this._types = config.types || types this._types = config.types || types
// allow config to specify returning results // allow config to specify returning results
@ -51,34 +51,31 @@ Client.prototype.connectSync = function (params) {
} }
Client.prototype.query = function (text, values, cb) { Client.prototype.query = function (text, values, cb) {
var queryFn let queryFn
if (typeof values === 'function') { if (typeof values === 'function') {
cb = values cb = values
} }
if (Array.isArray(values)) { if (Array.isArray(values)) {
queryFn = function () { queryFn = () => {
return self.pq.sendQueryParams(text, values) return this.pq.sendQueryParams(text, values)
} }
} else { } else {
queryFn = function () { queryFn = () => {
return self.pq.sendQuery(text) return this.pq.sendQuery(text)
} }
} }
var self = this this._dispatchQuery(this.pq, queryFn, (err) => {
self._dispatchQuery(self.pq, queryFn, function (err) {
if (err) return cb(err) if (err) return cb(err)
this._awaitResult(cb)
self._awaitResult(cb)
}) })
} }
Client.prototype.prepare = function (statementName, text, nParams, cb) { Client.prototype.prepare = function (statementName, text, nParams, cb) {
var self = this const self = this
var fn = function () { const fn = function () {
return self.pq.sendPrepare(statementName, text, nParams) return self.pq.sendPrepare(statementName, text, nParams)
} }
@ -89,9 +86,9 @@ Client.prototype.prepare = function (statementName, text, nParams, cb) {
} }
Client.prototype.execute = function (statementName, parameters, cb) { Client.prototype.execute = function (statementName, parameters, cb) {
var self = this const self = this
var fn = function () { const fn = function () {
return self.pq.sendQueryPrepared(statementName, parameters) return self.pq.sendQueryPrepared(statementName, parameters)
} }
@ -111,7 +108,7 @@ Client.prototype.getCopyStream = function () {
Client.prototype.cancel = function (cb) { Client.prototype.cancel = function (cb) {
assert(cb, 'Callback is required') assert(cb, 'Callback is required')
// result is either true or a string containing an error // result is either true or a string containing an error
var result = this.pq.cancel() const result = this.pq.cancel()
return setImmediate(function () { return setImmediate(function () {
cb(result === true ? undefined : new Error(result)) cb(result === true ? undefined : new Error(result))
}) })
@ -158,7 +155,7 @@ Client.prototype.end = function (cb) {
} }
Client.prototype._readError = function (message) { Client.prototype._readError = function (message) {
var err = new Error(message || this.pq.errorMessage()) const err = new Error(message || this.pq.errorMessage())
this.emit('error', err) this.emit('error', err)
} }
@ -174,7 +171,7 @@ Client.prototype._consumeQueryResults = function (pq) {
} }
Client.prototype._emitResult = function (pq) { Client.prototype._emitResult = function (pq) {
var status = pq.resultStatus() const status = pq.resultStatus()
switch (status) { switch (status) {
case 'PGRES_FATAL_ERROR': case 'PGRES_FATAL_ERROR':
this._queryError = new Error(this.pq.resultErrorMessage()) this._queryError = new Error(this.pq.resultErrorMessage())
@ -183,8 +180,10 @@ Client.prototype._emitResult = function (pq) {
case 'PGRES_TUPLES_OK': case 'PGRES_TUPLES_OK':
case 'PGRES_COMMAND_OK': case 'PGRES_COMMAND_OK':
case 'PGRES_EMPTY_QUERY': case 'PGRES_EMPTY_QUERY':
{
const result = this._consumeQueryResults(this.pq) const result = this._consumeQueryResults(this.pq)
this.emit('result', result) this.emit('result', result)
}
break break
case 'PGRES_COPY_OUT': case 'PGRES_COPY_OUT':
@ -201,7 +200,7 @@ Client.prototype._emitResult = function (pq) {
// called when libpq is readable // called when libpq is readable
Client.prototype._read = function () { Client.prototype._read = function () {
var pq = this.pq const pq = this.pq
// read waiting data from the socket // read waiting data from the socket
// e.g. clear the pending 'select' // e.g. clear the pending 'select'
if (!pq.consumeInput()) { if (!pq.consumeInput()) {
@ -236,7 +235,7 @@ Client.prototype._read = function () {
this.emit('readyForQuery') this.emit('readyForQuery')
var notice = this.pq.notifies() let notice = this.pq.notifies()
while (notice) { while (notice) {
this.emit('notification', notice) this.emit('notification', notice)
notice = this.pq.notifies() notice = this.pq.notifies()
@ -252,8 +251,8 @@ Client.prototype._startReading = function () {
this.pq.startReader() this.pq.startReader()
} }
var throwIfError = function (pq) { const throwIfError = function (pq) {
var err = pq.resultErrorMessage() || pq.errorMessage() const err = pq.resultErrorMessage() || pq.errorMessage()
if (err) { if (err) {
throw new Error(err) throw new Error(err)
} }
@ -266,7 +265,7 @@ Client.prototype._awaitResult = function (cb) {
// wait for the writable socket to drain // wait for the writable socket to drain
Client.prototype._waitForDrain = function (pq, cb) { Client.prototype._waitForDrain = function (pq, cb) {
var res = pq.flush() const res = pq.flush()
// res of 0 is success // res of 0 is success
if (res === 0) return cb() if (res === 0) return cb()
@ -275,7 +274,7 @@ Client.prototype._waitForDrain = function (pq, cb) {
// otherwise outgoing message didn't flush to socket // otherwise outgoing message didn't flush to socket
// wait for it to flush and try again // wait for it to flush and try again
var self = this const self = this
// you cannot read & write on a socket at the same time // you cannot read & write on a socket at the same time
return pq.writable(function () { return pq.writable(function () {
self._waitForDrain(pq, cb) self._waitForDrain(pq, cb)
@ -286,9 +285,9 @@ Client.prototype._waitForDrain = function (pq, cb) {
// finish writing query text to the socket // finish writing query text to the socket
Client.prototype._dispatchQuery = function (pq, fn, cb) { Client.prototype._dispatchQuery = function (pq, fn, cb) {
this._stopReading() this._stopReading()
var success = pq.setNonBlocking(true) const success = pq.setNonBlocking(true)
if (!success) return cb(new Error('Unable to set non-blocking to true')) if (!success) return cb(new Error('Unable to set non-blocking to true'))
var sent = fn() const sent = fn()
if (!sent) return cb(new Error(pq.errorMessage() || 'Something went wrong dispatching the query')) if (!sent) return cb(new Error(pq.errorMessage() || 'Something went wrong dispatching the query'))
this._waitForDrain(pq, cb) this._waitForDrain(pq, cb)
} }

View File

@ -9,6 +9,7 @@ class Result {
this.rowCount = undefined this.rowCount = undefined
this.fields = [] this.fields = []
this.rows = [] this.rows = []
this._prebuiltEmptyResultObject = null
} }
consumeCommand(pq) { consumeCommand(pq) {
@ -18,47 +19,48 @@ class Result {
consumeFields(pq) { consumeFields(pq) {
const nfields = pq.nfields() const nfields = pq.nfields()
for (var x = 0; x < nfields; x++) { this.fields = new Array(nfields)
this.fields.push({ const row = {}
name: pq.fname(x), for (let x = 0; x < nfields; x++) {
const name = pq.fname(x)
row[name] = null
this.fields[x] = {
name: name,
dataTypeID: pq.ftype(x), dataTypeID: pq.ftype(x),
})
} }
} }
this._prebuiltEmptyResultObject = { ...row }
}
consumeRows(pq) { consumeRows(pq) {
const tupleCount = pq.ntuples() const tupleCount = pq.ntuples()
for (var i = 0; i < tupleCount; i++) { this.rows = new Array(tupleCount)
const row = this._arrayMode ? this.consumeRowAsArray(pq, i) : this.consumeRowAsObject(pq, i) for (let i = 0; i < tupleCount; i++) {
this.rows.push(row) this.rows[i] = this._arrayMode ? this.consumeRowAsArray(pq, i) : this.consumeRowAsObject(pq, i)
} }
} }
consumeRowAsObject(pq, rowIndex) { consumeRowAsObject(pq, rowIndex) {
const row = {} const row = { ...this._prebuiltEmptyResultObject }
for (var j = 0; j < this.fields.length; j++) { for (let j = 0; j < this.fields.length; j++) {
const value = this.readValue(pq, rowIndex, j) row[this.fields[j].name] = this.readValue(pq, rowIndex, j)
row[this.fields[j].name] = value
} }
return row return row
} }
consumeRowAsArray(pq, rowIndex) { consumeRowAsArray(pq, rowIndex) {
const row = [] const row = new Array(this.fields.length)
for (var j = 0; j < this.fields.length; j++) { for (let j = 0; j < this.fields.length; j++) {
const value = this.readValue(pq, rowIndex, j) row[j] = this.readValue(pq, rowIndex, j)
row.push(value)
} }
return row return row
} }
readValue(pq, rowIndex, colIndex) { readValue(pq, rowIndex, colIndex) {
var rawValue = pq.getvalue(rowIndex, colIndex) const rawValue = pq.getvalue(rowIndex, colIndex)
if (rawValue === '') { if (rawValue === '' && pq.getisnull(rowIndex, colIndex)) {
if (pq.getisnull(rowIndex, colIndex)) {
return null return null
} }
}
const dataTypeId = this.fields[colIndex].dataTypeID const dataTypeId = this.fields[colIndex].dataTypeID
return this._types.getTypeParser(dataTypeId)(rawValue) return this._types.getTypeParser(dataTypeId)(rawValue)
} }

View File

@ -1,8 +1,8 @@
var Duplex = require('stream').Duplex const Duplex = require('stream').Duplex
var Writable = require('stream').Writable const Writable = require('stream').Writable
var util = require('util') const util = require('util')
var CopyStream = (module.exports = function (pq, options) { const CopyStream = (module.exports = function (pq, options) {
Duplex.call(this, options) Duplex.call(this, options)
this.pq = pq this.pq = pq
this._reading = false this._reading = false
@ -12,7 +12,7 @@ util.inherits(CopyStream, Duplex)
// writer methods // writer methods
CopyStream.prototype._write = function (chunk, encoding, cb) { CopyStream.prototype._write = function (chunk, encoding, cb) {
var result = this.pq.putCopyData(chunk) const result = this.pq.putCopyData(chunk)
// sent successfully // sent successfully
if (result === 1) return cb() if (result === 1) return cb()
@ -21,22 +21,22 @@ CopyStream.prototype._write = function (chunk, encoding, cb) {
if (result === -1) return cb(new Error(this.pq.errorMessage())) if (result === -1) return cb(new Error(this.pq.errorMessage()))
// command would block. wait for writable and call again. // command would block. wait for writable and call again.
var self = this const self = this
this.pq.writable(function () { this.pq.writable(function () {
self._write(chunk, encoding, cb) self._write(chunk, encoding, cb)
}) })
} }
CopyStream.prototype.end = function () { CopyStream.prototype.end = function () {
var args = Array.prototype.slice.call(arguments, 0) const args = Array.prototype.slice.call(arguments, 0)
var self = this const self = this
var callback = args.pop() const callback = args.pop()
if (args.length) { if (args.length) {
this.write(args[0]) this.write(args[0])
} }
var result = this.pq.putCopyEnd() const result = this.pq.putCopyEnd()
// sent successfully // sent successfully
if (result === 1) { if (result === 1) {
@ -55,7 +55,7 @@ CopyStream.prototype.end = function () {
// error // error
if (result === -1) { if (result === -1) {
var err = new Error(this.pq.errorMessage()) const err = new Error(this.pq.errorMessage())
return this.emit('error', err) return this.emit('error', err)
} }
@ -70,7 +70,7 @@ CopyStream.prototype.end = function () {
// reader methods // reader methods
CopyStream.prototype._consumeBuffer = function (cb) { CopyStream.prototype._consumeBuffer = function (cb) {
var result = this.pq.getCopyData(true) const result = this.pq.getCopyData(true)
if (result instanceof Buffer) { if (result instanceof Buffer) {
return setImmediate(function () { return setImmediate(function () {
cb(null, result) cb(null, result)
@ -81,7 +81,7 @@ CopyStream.prototype._consumeBuffer = function (cb) {
return cb(null, null) return cb(null, null)
} }
if (result === 0) { if (result === 0) {
var self = this const self = this
this.pq.once('readable', function () { this.pq.once('readable', function () {
self.pq.stopReader() self.pq.stopReader()
self.pq.consumeInput() self.pq.consumeInput()
@ -96,7 +96,7 @@ CopyStream.prototype._read = function (size) {
if (this._reading) return if (this._reading) return
this._reading = true this._reading = true
// console.log('read begin'); // console.log('read begin');
var self = this const self = this
this._consumeBuffer(function (err, buffer) { this._consumeBuffer(function (err, buffer) {
self._reading = false self._reading = false
if (err) { if (err) {
@ -110,18 +110,18 @@ CopyStream.prototype._read = function (size) {
}) })
} }
var consumeResults = function (pq, cb) { const consumeResults = function (pq, cb) {
var cleanup = function () { const cleanup = function () {
pq.removeListener('readable', onReadable) pq.removeListener('readable', onReadable)
pq.stopReader() pq.stopReader()
} }
var readError = function (message) { const readError = function (message) {
cleanup() cleanup()
return cb(new Error(message || pq.errorMessage())) return cb(new Error(message || pq.errorMessage()))
} }
var onReadable = function () { const onReadable = function () {
// read waiting data from the socket // read waiting data from the socket
// e.g. clear the pending 'select' // e.g. clear the pending 'select'
if (!pq.consumeInput()) { if (!pq.consumeInput()) {

View File

@ -1,14 +1,26 @@
{ {
"name": "pg-native", "name": "pg-native",
"version": "3.2.0", "version": "3.5.2",
"description": "A slightly nicer interface to Postgres over node-libpq", "description": "A slightly nicer interface to Postgres over node-libpq",
"main": "index.js", "main": "index.js",
"exports": {
".": {
"import": "./esm/index.mjs",
"require": "./index.js",
"default": "./index.js"
},
"./lib/*": {
"import": "./lib/*",
"require": "./lib/*",
"default": "./lib/*"
}
},
"scripts": { "scripts": {
"test": "mocha" "test": "mocha"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git://github.com/brianc/node-pg-native.git" "url": "https://github.com/brianc/node-postgres.git"
}, },
"keywords": [ "keywords": [
"postgres", "postgres",
@ -18,21 +30,26 @@
"author": "Brian M. Carlson", "author": "Brian M. Carlson",
"license": "MIT", "license": "MIT",
"bugs": { "bugs": {
"url": "https://github.com/brianc/node-pg-native/issues" "url": "https://github.com/brianc/node-postgres/issues"
}, },
"homepage": "https://github.com/brianc/node-pg-native", "homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-native",
"dependencies": { "dependencies": {
"libpq": "1.8.13", "libpq": "^1.8.15",
"pg-types": "^1.12.1" "pg-types": "2.2.0"
}, },
"devDependencies": { "devDependencies": {
"async": "^0.9.0", "async": "^0.9.0",
"concat-stream": "^1.4.6", "concat-stream": "^1.4.6",
"generic-pool": "^2.1.1", "generic-pool": "^2.1.1",
"lodash": "^2.4.1", "lodash": "^4.17.21",
"mocha": "10.5.2", "mocha": "10.5.2",
"node-gyp": ">=10.x", "node-gyp": ">=10.x",
"okay": "^0.3.0", "okay": "^0.3.0",
"semver": "^4.1.0" "semver": "^7.7.2"
} },
"files": [
"index.js",
"lib",
"esm"
]
} }

View File

@ -1,16 +1,16 @@
var Client = require('../') const Client = require('../')
var assert = require('assert') const assert = require('assert')
describe('client with arrayMode', function () { describe('client with arrayMode', function () {
it('returns result as array', function (done) { it('returns result as array', function (done) {
var client = new Client({ arrayMode: true }) const client = new Client({ arrayMode: true })
client.connectSync() client.connectSync()
client.querySync('CREATE TEMP TABLE blah(name TEXT)') client.querySync('CREATE TEMP TABLE blah(name TEXT)')
client.querySync('INSERT INTO blah (name) VALUES ($1)', ['brian']) client.querySync('INSERT INTO blah (name) VALUES ($1)', ['brian'])
client.querySync('INSERT INTO blah (name) VALUES ($1)', ['aaron']) client.querySync('INSERT INTO blah (name) VALUES ($1)', ['aaron'])
var rows = client.querySync('SELECT * FROM blah') const rows = client.querySync('SELECT * FROM blah')
assert.equal(rows.length, 2) assert.equal(rows.length, 2)
var row = rows[0] const row = rows[0]
assert.equal(row.length, 1) assert.equal(row.length, 1)
assert.equal(row[0], 'brian') assert.equal(row[0], 'brian')
assert.equal(rows[1][0], 'aaron') assert.equal(rows[1][0], 'aaron')

View File

@ -1,7 +1,7 @@
var Client = require('../') const Client = require('../')
var ok = require('okay') const ok = require('okay')
var assert = require('assert') const assert = require('assert')
var concat = require('concat-stream') const concat = require('concat-stream')
describe('async workflow', function () { describe('async workflow', function () {
before(function (done) { before(function (done) {
@ -9,7 +9,7 @@ describe('async workflow', function () {
this.client.connect(done) this.client.connect(done)
}) })
var echoParams = function (params, cb) { const echoParams = function (params, cb) {
this.client.query( this.client.query(
'SELECT $1::text as first, $2::text as second', 'SELECT $1::text as first, $2::text as second',
params, params,
@ -20,20 +20,20 @@ describe('async workflow', function () {
) )
} }
var checkParams = function (params, rows) { const checkParams = function (params, rows) {
assert.equal(rows.length, 1) assert.equal(rows.length, 1)
assert.equal(rows[0].first, params[0]) assert.equal(rows[0].first, params[0])
assert.equal(rows[0].second, params[1]) assert.equal(rows[0].second, params[1])
} }
it('sends async query', function (done) { it('sends async query', function (done) {
var params = ['one', 'two'] const params = ['one', 'two']
echoParams.call(this, params, done) echoParams.call(this, params, done)
}) })
it('sends multiple async queries', function (done) { it('sends multiple async queries', function (done) {
var self = this const self = this
var params = ['bang', 'boom'] const params = ['bang', 'boom']
echoParams.call( echoParams.call(
this, this,
params, params,
@ -44,13 +44,13 @@ describe('async workflow', function () {
}) })
it('sends an async query, copies in, copies out, and sends another query', function (done) { it('sends an async query, copies in, copies out, and sends another query', function (done) {
var self = this const self = this
this.client.querySync('CREATE TEMP TABLE test(name text, age int)') this.client.querySync('CREATE TEMP TABLE test(name text, age int)')
this.client.query( this.client.query(
"INSERT INTO test(name, age) VALUES('brian', 32)", "INSERT INTO test(name, age) VALUES('brian', 32)",
ok(done, function () { ok(done, function () {
self.client.querySync('COPY test FROM stdin') self.client.querySync('COPY test FROM stdin')
var input = self.client.getCopyStream() const input = self.client.getCopyStream()
input.write(Buffer.from('Aaron\t30\n', 'utf8')) input.write(Buffer.from('Aaron\t30\n', 'utf8'))
input.end(function () { input.end(function () {
self.client.query( self.client.query(
@ -60,7 +60,7 @@ describe('async workflow', function () {
self.client.query( self.client.query(
'COPY test TO stdout', 'COPY test TO stdout',
ok(done, function () { ok(done, function () {
var output = self.client.getCopyStream() const output = self.client.getCopyStream()
// pump the stream // pump the stream
output.read() output.read()

View File

@ -1,9 +1,9 @@
var Client = require('../') const Client = require('../')
var assert = require('assert') const assert = require('assert')
describe('cancel query', function () { describe('cancel query', function () {
it('works', function (done) { it('works', function (done) {
var client = new Client() const client = new Client()
client.connectSync() client.connectSync()
client.query('SELECT pg_sleep(1000);', function (err) { client.query('SELECT pg_sleep(1000);', function (err) {
assert(err instanceof Error) assert(err instanceof Error)
@ -17,7 +17,7 @@ describe('cancel query', function () {
}) })
it('does not raise error if no active query', function (done) { it('does not raise error if no active query', function (done) {
var client = new Client() const client = new Client()
client.connectSync() client.connectSync()
client.cancel(function (err) { client.cancel(function (err) {
assert.ifError(err) assert.ifError(err)

View File

@ -1,11 +1,11 @@
'use strict' 'use strict'
var Client = require('../') const Client = require('../')
var assert = require('assert') const assert = require('assert')
describe('connection errors', function () { describe('connection errors', function () {
it('raise error events', function (done) { it('raise error events', function (done) {
var client = new Client() const client = new Client()
client.connectSync() client.connectSync()
client.query('SELECT pg_terminate_backend(pg_backend_pid())', assert.fail) client.query('SELECT pg_terminate_backend(pg_backend_pid())', assert.fail)
client.on('error', function (err) { client.on('error', function (err) {

View File

@ -1,9 +1,9 @@
var Client = require('../') const Client = require('../')
var assert = require('assert') const assert = require('assert')
describe('connection error', function () { describe('connection error', function () {
it('doesnt segfault', function (done) { it('doesnt segfault', function (done) {
var client = new Client() const client = new Client()
client.connect('asldgsdgasgdasdg', function (err) { client.connect('asldgsdgasgdasdg', function (err) {
assert(err) assert(err)
// calling error on a closed client was segfaulting // calling error on a closed client was segfaulting
@ -15,7 +15,7 @@ describe('connection error', function () {
describe('reading while not connected', function () { describe('reading while not connected', function () {
it('does not seg fault but does throw execption', function () { it('does not seg fault but does throw execption', function () {
var client = new Client() const client = new Client()
assert.throws(function () { assert.throws(function () {
client.on('notification', function (msg) {}) client.on('notification', function (msg) {})
}) })

View File

@ -1,5 +1,5 @@
var assert = require('assert') const assert = require('assert')
var Client = require('../') const Client = require('../')
describe('COPY FROM', function () { describe('COPY FROM', function () {
before(function (done) { before(function (done) {
@ -12,17 +12,17 @@ describe('COPY FROM', function () {
}) })
it('works', function (done) { it('works', function (done) {
var client = this.client const client = this.client
this.client.querySync('CREATE TEMP TABLE blah(name text, age int)') this.client.querySync('CREATE TEMP TABLE blah(name text, age int)')
this.client.querySync('COPY blah FROM stdin') this.client.querySync('COPY blah FROM stdin')
var stream = this.client.getCopyStream() const stream = this.client.getCopyStream()
stream.write(Buffer.from('Brian\t32\n', 'utf8')) stream.write(Buffer.from('Brian\t32\n', 'utf8'))
stream.write(Buffer.from('Aaron\t30\n', 'utf8')) stream.write(Buffer.from('Aaron\t30\n', 'utf8'))
stream.write(Buffer.from('Shelley\t28\n', 'utf8')) stream.write(Buffer.from('Shelley\t28\n', 'utf8'))
stream.end() stream.end()
stream.once('finish', function () { stream.once('finish', function () {
var rows = client.querySync('SELECT COUNT(*) FROM blah') const rows = client.querySync('SELECT COUNT(*) FROM blah')
assert.equal(rows.length, 1) assert.equal(rows.length, 1)
assert.equal(rows[0].count, 3) assert.equal(rows[0].count, 3)
done() done()
@ -30,14 +30,14 @@ describe('COPY FROM', function () {
}) })
it('works with a callback passed to end', function (done) { it('works with a callback passed to end', function (done) {
var client = this.client const client = this.client
this.client.querySync('CREATE TEMP TABLE boom(name text, age int)') this.client.querySync('CREATE TEMP TABLE boom(name text, age int)')
this.client.querySync('COPY boom FROM stdin') this.client.querySync('COPY boom FROM stdin')
var stream = this.client.getCopyStream() const stream = this.client.getCopyStream()
stream.write(Buffer.from('Brian\t32\n', 'utf8')) stream.write(Buffer.from('Brian\t32\n', 'utf8'))
stream.write(Buffer.from('Aaron\t30\n', 'utf8'), function () { stream.write(Buffer.from('Aaron\t30\n', 'utf8'), function () {
stream.end(Buffer.from('Shelley\t28\n', 'utf8'), function () { stream.end(Buffer.from('Shelley\t28\n', 'utf8'), function () {
var rows = client.querySync('SELECT COUNT(*) FROM boom') const rows = client.querySync('SELECT COUNT(*) FROM boom')
assert.equal(rows.length, 1) assert.equal(rows.length, 1)
assert.equal(rows[0].count, 3) assert.equal(rows[0].count, 3)
done() done()

View File

@ -1,7 +1,7 @@
var assert = require('assert') const assert = require('assert')
var Client = require('../') const Client = require('../')
var concat = require('concat-stream') const concat = require('concat-stream')
var _ = require('lodash') const _ = require('lodash')
describe('COPY TO', function () { describe('COPY TO', function () {
before(function (done) { before(function (done) {
@ -14,18 +14,18 @@ describe('COPY TO', function () {
}) })
it('works - basic check', function (done) { it('works - basic check', function (done) {
var limit = 1000 const limit = 1000
var qText = 'COPY (SELECT * FROM generate_series(0, ' + (limit - 1) + ')) TO stdout' const qText = 'COPY (SELECT * FROM generate_series(0, ' + (limit - 1) + ')) TO stdout'
var self = this const self = this
this.client.query(qText, function (err) { this.client.query(qText, function (err) {
if (err) return done(err) if (err) return done(err)
var stream = self.client.getCopyStream() const stream = self.client.getCopyStream()
// pump the stream for node v0.11.x // pump the stream for node v0.11.x
stream.read() stream.read()
stream.pipe( stream.pipe(
concat(function (buff) { concat(function (buff) {
var res = buff.toString('utf8') const res = buff.toString('utf8')
var expected = _.range(0, limit).join('\n') + '\n' const expected = _.range(0, limit).join('\n') + '\n'
assert.equal(res, expected) assert.equal(res, expected)
done() done()
}) })

View File

@ -1,10 +1,10 @@
var Client = require('../') const Client = require('../')
var ok = require('okay') const ok = require('okay')
var assert = require('assert') const assert = require('assert')
describe('Custom type parser', function () { describe('Custom type parser', function () {
it('is used by client', function (done) { it('is used by client', function (done) {
var client = new Client({ const client = new Client({
types: { types: {
getTypeParser: function () { getTypeParser: function () {
return function () { return function () {
@ -14,7 +14,7 @@ describe('Custom type parser', function () {
}, },
}) })
client.connectSync() client.connectSync()
var rows = client.querySync('SELECT NOW() AS when') const rows = client.querySync('SELECT NOW() AS when')
assert.equal(rows[0].when, 'blah') assert.equal(rows[0].when, 'blah')
client.query( client.query(
'SELECT NOW() as when', 'SELECT NOW() as when',

View File

@ -1,16 +1,16 @@
var Client = require('../') const Client = require('../')
var assert = require('assert') const assert = require('assert')
var checkDomain = function (domain, when) { const checkDomain = function (domain, when) {
assert(process.domain, 'Domain was lost after ' + when) assert(process.domain, 'Domain was lost after ' + when)
assert.strictEqual(process.domain, domain, 'Domain switched after ' + when) assert.strictEqual(process.domain, domain, 'Domain switched after ' + when)
} }
describe('domains', function () { describe('domains', function () {
it('remains bound after a query', function (done) { it('remains bound after a query', function (done) {
var domain = require('domain').create() // eslint-disable-line const domain = require('domain').create()
domain.run(function () { domain.run(function () {
var client = new Client() const client = new Client()
client.connect(function () { client.connect(function () {
checkDomain(domain, 'connection') checkDomain(domain, 'connection')
client.query('SELECT NOW()', function () { client.query('SELECT NOW()', function () {

View File

@ -1,5 +1,5 @@
var Client = require('../') const Client = require('../')
var assert = require('assert') const assert = require('assert')
describe('empty query', () => { describe('empty query', () => {
it('has field metadata in result', (done) => { it('has field metadata in result', (done) => {

View File

@ -1,5 +1,5 @@
var Client = require('../') const Client = require('../')
var assert = require('assert') const assert = require('assert')
describe('huge async query', function () { describe('huge async query', function () {
before(function (done) { before(function (done) {
@ -12,12 +12,12 @@ describe('huge async query', function () {
}) })
it('works', function (done) { it('works', function (done) {
var params = [''] const params = ['']
var len = 100000 const len = 100000
for (var i = 0; i < len; i++) { for (let i = 0; i < len; i++) {
params[0] += 'A' params[0] += 'A'
} }
var qText = "SELECT '" + params[0] + "'::text as my_text" const qText = "SELECT '" + params[0] + "'::text as my_text"
this.client.query(qText, function (err, rows) { this.client.query(qText, function (err, rows) {
if (err) return done(err) if (err) return done(err)
assert.equal(rows[0].my_text.length, len) assert.equal(rows[0].my_text.length, len)

View File

@ -1,5 +1,5 @@
var Client = require('../') const Client = require('../')
var assert = require('assert') const assert = require('assert')
describe('connection', function () { describe('connection', function () {
it('works', function (done) { it('works', function (done) {
@ -24,7 +24,7 @@ describe('connectSync', function () {
}) })
it('works with args', function () { it('works with args', function () {
var args = 'host=' + (process.env.PGHOST || 'localhost') const args = 'host=' + (process.env.PGHOST || 'localhost')
Client().connectSync(args) Client().connectSync(args)
}) })

View File

@ -1,11 +1,11 @@
var Client = require('../') const Client = require('../')
var async = require('async') const async = require('async')
var ok = require('okay') const ok = require('okay')
var execute = function (x, done) { const execute = function (x, done) {
var client = new Client() const client = new Client()
client.connectSync() client.connectSync()
var query = function (n, cb) { const query = function (n, cb) {
client.query('SELECT $1::int as num', [n], function (err) { client.query('SELECT $1::int as num', [n], function (err) {
cb(err) cb(err)
}) })

View File

@ -1,16 +1,16 @@
var Client = require('../') const Client = require('../')
var async = require('async') const async = require('async')
var ok = require('okay') const ok = require('okay')
var bytes = require('crypto').pseudoRandomBytes const bytes = require('crypto').pseudoRandomBytes
describe('many connections', function () { describe('many connections', function () {
describe('async', function () { describe('async', function () {
var test = function (count, times) { const test = function (count, times) {
it(`connecting ${count} clients ${times} times`, function (done) { it(`connecting ${count} clients ${times} times`, function (done) {
this.timeout(200000) this.timeout(200000)
var connectClient = function (n, cb) { const connectClient = function (n, cb) {
var client = new Client() const client = new Client()
client.connect( client.connect(
ok(cb, function () { ok(cb, function () {
bytes( bytes(
@ -29,7 +29,7 @@ describe('many connections', function () {
) )
} }
var run = function (n, cb) { const run = function (n, cb) {
async.times(count, connectClient, cb) async.times(count, connectClient, cb)
} }

View File

@ -1,14 +1,14 @@
var Client = require('../') const Client = require('../')
var async = require('async') const async = require('async')
var assert = require('assert') const assert = require('assert')
describe('many errors', function () { describe('many errors', function () {
it('functions properly without segfault', function (done) { it('functions properly without segfault', function (done) {
var throwError = function (n, cb) { const throwError = function (n, cb) {
var client = new Client() const client = new Client()
client.connectSync() client.connectSync()
var doIt = function (n, cb) { const doIt = function (n, cb) {
client.query('select asdfiasdf', function (err) { client.query('select asdfiasdf', function (err) {
assert(err, 'bad query should emit an error') assert(err, 'bad query should emit an error')
cb(null) cb(null)

View File

@ -1,5 +1,5 @@
var Client = require('../') const Client = require('../')
var assert = require('assert') const assert = require('assert')
describe('multiple commands in a single query', function () { describe('multiple commands in a single query', function () {
before(function (done) { before(function (done) {
@ -22,7 +22,7 @@ describe('multiple commands in a single query', function () {
}) })
it('inserts and reads at once', function (done) { it('inserts and reads at once', function (done) {
var txt = 'CREATE TEMP TABLE boom(age int);' let txt = 'CREATE TEMP TABLE boom(age int);'
txt += 'INSERT INTO boom(age) VALUES(10);' txt += 'INSERT INTO boom(age) VALUES(10);'
txt += 'SELECT * FROM boom;' txt += 'SELECT * FROM boom;'
this.client.query(txt, function (err, rows, results) { this.client.query(txt, function (err, rows, results) {

View File

@ -1,5 +1,5 @@
var Client = require('../') const Client = require('../')
var assert = require('assert') const assert = require('assert')
describe('multiple statements', () => { describe('multiple statements', () => {
before(() => { before(() => {

View File

@ -1,8 +1,8 @@
var Client = require('../') const Client = require('../')
var ok = require('okay') const ok = require('okay')
var notify = function (channel, payload) { const notify = function (channel, payload) {
var client = new Client() const client = new Client()
client.connectSync() client.connectSync()
client.querySync('NOTIFY ' + channel + ", '" + payload + "'") client.querySync('NOTIFY ' + channel + ", '" + payload + "'")
client.end() client.end()
@ -10,12 +10,12 @@ var notify = function (channel, payload) {
describe('simple LISTEN/NOTIFY', function () { describe('simple LISTEN/NOTIFY', function () {
before(function (done) { before(function (done) {
var client = (this.client = new Client()) const client = (this.client = new Client())
client.connect(done) client.connect(done)
}) })
it('works', function (done) { it('works', function (done) {
var client = this.client const client = this.client
client.querySync('LISTEN boom') client.querySync('LISTEN boom')
client.on('notification', function (msg) { client.on('notification', function (msg) {
done() done()
@ -31,14 +31,14 @@ describe('simple LISTEN/NOTIFY', function () {
if (!process.env.TRAVIS_CI) { if (!process.env.TRAVIS_CI) {
describe('async LISTEN/NOTIFY', function () { describe('async LISTEN/NOTIFY', function () {
before(function (done) { before(function (done) {
var client = (this.client = new Client()) const client = (this.client = new Client())
client.connect(done) client.connect(done)
}) })
it('works', function (done) { it('works', function (done) {
var client = this.client const client = this.client
var count = 0 let count = 0
var check = function () { const check = function () {
count++ count++
if (count >= 2) return done() if (count >= 2) return done()
} }

View File

@ -1,13 +1,13 @@
var Client = require('../') const Client = require('../')
var ok = require('okay') const ok = require('okay')
var async = require('async') const async = require('async')
describe('async prepare', function () { describe('async prepare', function () {
var run = function (n, cb) { const run = function (n, cb) {
var client = new Client() const client = new Client()
client.connectSync() client.connectSync()
var exec = function (x, done) { const exec = function (x, done) {
client.prepare('get_now' + x, 'SELECT NOW()', 0, done) client.prepare('get_now' + x, 'SELECT NOW()', 0, done)
} }
@ -20,7 +20,7 @@ describe('async prepare', function () {
) )
} }
var t = function (n) { const t = function (n) {
it('works for ' + n + ' clients', function (done) { it('works for ' + n + ' clients', function (done) {
async.times(n, run, function (err) { async.times(n, run, function (err) {
done(err) done(err)
@ -28,17 +28,17 @@ describe('async prepare', function () {
}) })
} }
for (var i = 0; i < 10; i++) { for (let i = 0; i < 10; i++) {
t(i) t(i)
} }
}) })
describe('async execute', function () { describe('async execute', function () {
var run = function (n, cb) { const run = function (n, cb) {
var client = new Client() const client = new Client()
client.connectSync() client.connectSync()
client.prepareSync('get_now', 'SELECT NOW()', 0) client.prepareSync('get_now', 'SELECT NOW()', 0)
var exec = function (x, cb) { const exec = function (x, cb) {
client.execute('get_now', [], cb) client.execute('get_now', [], cb)
} }
async.timesSeries( async.timesSeries(
@ -50,7 +50,7 @@ describe('async execute', function () {
) )
} }
var t = function (n) { const t = function (n) {
it('works for ' + n + ' clients', function (done) { it('works for ' + n + ' clients', function (done) {
async.times(n, run, function (err) { async.times(n, run, function (err) {
done(err) done(err)
@ -58,7 +58,7 @@ describe('async execute', function () {
}) })
} }
for (var i = 0; i < 10; i++) { for (let i = 0; i < 10; i++) {
t(i) t(i)
} }
}) })

Some files were not shown because too many files have changed in this diff Show More