mirror of
https://github.com/brianc/node-postgres.git
synced 2025-12-08 20:16:25 +00:00
Compare commits
1034 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8d493f3b55 | ||
|
|
917478397b | ||
|
|
f5c90a5484 | ||
|
|
65bc3d4884 | ||
|
|
a6c1084db1 | ||
|
|
1b2bedc9c8 | ||
|
|
27a2754787 | ||
|
|
01fadd93d7 | ||
|
|
43b8692019 | ||
|
|
fab87b28af | ||
|
|
c8fb1e9261 | ||
|
|
54e0424991 | ||
|
|
235d7ad5e2 | ||
|
|
8f8e7315e8 | ||
|
|
f0d1c4868a | ||
|
|
0ad6c9b71e | ||
|
|
54964ecff7 | ||
|
|
1a25d12817 | ||
|
|
e00aac1398 | ||
|
|
cd877a5761 | ||
|
|
607efc1b6e | ||
|
|
14dc8dd100 | ||
|
|
8608fb84c8 | ||
|
|
114a03e887 | ||
|
|
7ab5923fad | ||
|
|
6b016b37d4 | ||
|
|
0ada72e608 | ||
|
|
03642abec1 | ||
|
|
a4888ee028 | ||
|
|
411869df65 | ||
|
|
26ace0ac8f | ||
|
|
3e7bd2f681 | ||
|
|
9cf2184d09 | ||
|
|
c9353acbc0 | ||
|
|
26fa32c133 | ||
|
|
a47c480055 | ||
|
|
abff18d6f9 | ||
|
|
e43d4b7eb6 | ||
|
|
e8fde07227 | ||
|
|
27f34c6aee | ||
|
|
e30b41d481 | ||
|
|
6be857e9d3 | ||
|
|
9bfc967e91 | ||
|
|
79351af32e | ||
|
|
52ec1293f2 | ||
|
|
2647f7ecaf | ||
|
|
980752ce00 | ||
|
|
264e30f424 | ||
|
|
f528433e9d | ||
|
|
93aa1ba2f1 | ||
|
|
9e7a5d97cf | ||
|
|
7a009381e6 | ||
|
|
ff40638868 | ||
|
|
229de3006b | ||
|
|
81d875fe09 | ||
|
|
39e134d0b5 | ||
|
|
0c1629bea2 | ||
|
|
56e2862577 | ||
|
|
2919f28d31 | ||
|
|
36fd0a61db | ||
|
|
6ab0c4608c | ||
|
|
ad3e6035f4 | ||
|
|
e8280d58f6 | ||
|
|
2da196cc1f | ||
|
|
bbc84b2690 | ||
|
|
fb25f7bdb3 | ||
|
|
557716d1fa | ||
|
|
9cc7d8eb94 | ||
|
|
9ec9e5f58d | ||
|
|
60817cd914 | ||
|
|
940479bc4b | ||
|
|
81ec0635fc | ||
|
|
d8fb2f9c35 | ||
|
|
9b510373a6 | ||
|
|
5a8b1a7d24 | ||
|
|
a9fd34fb42 | ||
|
|
a5d03a0774 | ||
|
|
dcb4257898 | ||
|
|
a3fefe3183 | ||
|
|
477f812984 | ||
|
|
c53a472a60 | ||
|
|
f7c92e487c | ||
|
|
1c45dd2828 | ||
|
|
b823a23f67 | ||
|
|
b4022aa5c0 | ||
|
|
1876f2000a | ||
|
|
88311c17a5 | ||
|
|
5a6734429c | ||
|
|
79ee1ad15f | ||
|
|
1230c86ba9 | ||
|
|
582cdaf919 | ||
|
|
5755b78386 | ||
|
|
f6e829c564 | ||
|
|
732580782f | ||
|
|
2dc9e7f2fd | ||
|
|
5f6a6e6596 | ||
|
|
f1586932fd | ||
|
|
0792f0904a | ||
|
|
95bec690b3 | ||
|
|
751e7410d9 | ||
|
|
f10f569a8a | ||
|
|
3c48f22b22 | ||
|
|
2de02f0a63 | ||
|
|
f12e555b73 | ||
|
|
39e5ef8370 | ||
|
|
9fbcf17908 | ||
|
|
373093d176 | ||
|
|
95d7e620ef | ||
|
|
072015ac0f | ||
|
|
1af6321219 | ||
|
|
8b2768f91d | ||
|
|
92cb640fd3 | ||
|
|
f73b22f96e | ||
|
|
92bb9a24b7 | ||
|
|
fb12280340 | ||
|
|
54eb0fa216 | ||
|
|
b5281f5b0a | ||
|
|
50c06f9bc6 | ||
|
|
f7e484ed61 | ||
|
|
3e4d545c20 | ||
|
|
9baa56eaa2 | ||
|
|
b7d5b3b667 | ||
|
|
4f457e12e0 | ||
|
|
53dc4e6cb3 | ||
|
|
83a0e3e90e | ||
|
|
a24a24dea1 | ||
|
|
d650741eea | ||
|
|
d1548d6e77 | ||
|
|
cc419341e8 | ||
|
|
e4cb1cdc38 | ||
|
|
cd73e9bf86 | ||
|
|
f2279f3529 | ||
|
|
1625861981 | ||
|
|
6e96e45bae | ||
|
|
ba07d19459 | ||
|
|
f41afdafe6 | ||
|
|
5c846ca06e | ||
|
|
7fcf941a33 | ||
|
|
b3f0dddbcf | ||
|
|
0f42880861 | ||
|
|
ff47a97f28 | ||
|
|
fe88e825e5 | ||
|
|
46156956e2 | ||
|
|
988314ae00 | ||
|
|
e25428c8dc | ||
|
|
81e2ff0524 | ||
|
|
0cebc513e3 | ||
|
|
0096856e2e | ||
|
|
13ec7d106e | ||
|
|
c3bd279953 | ||
|
|
3cde785e38 | ||
|
|
408bebd990 | ||
|
|
68171dd00f | ||
|
|
a37a93bf79 | ||
|
|
95655fea0b | ||
|
|
ef24134a5a | ||
|
|
50dbcd2453 | ||
|
|
b03c071d2d | ||
|
|
5807a00d93 | ||
|
|
2ab6f367c0 | ||
|
|
b400d33c33 | ||
|
|
91de4b9453 | ||
|
|
ebba3d812f | ||
|
|
a717c00b7b | ||
|
|
aedae81c75 | ||
|
|
641ab436a0 | ||
|
|
1481f8d1f4 | ||
|
|
119078230e | ||
|
|
0ee010e5a0 | ||
|
|
b4bfd63f63 | ||
|
|
2a8efbee09 | ||
|
|
81c287a49b | ||
|
|
df0f4d19fb | ||
|
|
6cd0aeb212 | ||
|
|
9c3ecdca69 | ||
|
|
5164959d20 | ||
|
|
a733b40f99 | ||
|
|
e8259e04fc | ||
|
|
16322c2d50 | ||
|
|
b9a528cb3b | ||
|
|
b1a8947738 | ||
|
|
d21cc09556 | ||
|
|
da0f5c5eb2 | ||
|
|
106ca8a178 | ||
|
|
a84ebb3fe8 | ||
|
|
b5c5e52aa0 | ||
|
|
58865b2c04 | ||
|
|
a2a355a680 | ||
|
|
cf24ef28ee | ||
|
|
3644730d2b | ||
|
|
970804b6c1 | ||
|
|
8d211e2be8 | ||
|
|
d17da9e1d9 | ||
|
|
eaafac36dc | ||
|
|
735683c5cb | ||
|
|
46cfb25baf | ||
|
|
e2d8fa2dc2 | ||
|
|
4dbf1af069 | ||
|
|
d59cd15ed2 | ||
|
|
0dfd955be5 | ||
|
|
20d2c08027 | ||
|
|
dee3ae5cd6 | ||
|
|
c38ecf3405 | ||
|
|
65406985b9 | ||
|
|
3039f1da77 | ||
|
|
522e2dcb76 | ||
|
|
14b840e96e | ||
|
|
f2062936b9 | ||
|
|
7152d4db5d | ||
|
|
07553428e9 | ||
|
|
5532ca51db | ||
|
|
2b469d01da | ||
|
|
f305419676 | ||
|
|
26f7504531 | ||
|
|
18b8ceae17 | ||
|
|
249182ea9f | ||
|
|
d63c761be8 | ||
|
|
0870442776 | ||
|
|
b357e1884a | ||
|
|
48f4398fa7 | ||
|
|
92351b5f3e | ||
|
|
65ca2458fd | ||
|
|
0f76fb3bb7 | ||
|
|
661f870e1c | ||
|
|
ee302cbcf1 | ||
|
|
810b125581 | ||
|
|
8804e5caaf | ||
|
|
5703791640 | ||
|
|
adbe86d4a0 | ||
|
|
20a243e8b3 | ||
|
|
5bdc61a33d | ||
|
|
47afe5cded | ||
|
|
bb8745b215 | ||
|
|
f82f39c20c | ||
|
|
3e34816f6f | ||
|
|
c6c05f823c | ||
|
|
16118cecdd | ||
|
|
27d612a2ac | ||
|
|
12b9a69776 | ||
|
|
c7dc621d3f | ||
|
|
c7133eb67f | ||
|
|
15b502d4c1 | ||
|
|
c253eb6696 | ||
|
|
0965531cda | ||
|
|
89b4e7f2a2 | ||
|
|
5538df6b44 | ||
|
|
406f141a1a | ||
|
|
c7dc7fd93a | ||
|
|
1aa08274a5 | ||
|
|
5bcc05d1e9 | ||
|
|
9dfb3dccbf | ||
|
|
9e2d7c4ad5 | ||
|
|
9a95ee719b | ||
|
|
34d173d9e3 | ||
|
|
659ac37ba3 | ||
|
|
8250af4aed | ||
|
|
ad6c4a4693 | ||
|
|
c99fb2c127 | ||
|
|
8d498959c3 | ||
|
|
6e386eb294 | ||
|
|
ff85ac2459 | ||
|
|
a4ef6ce38c | ||
|
|
747485d342 | ||
|
|
8032fbad43 | ||
|
|
3e53d06cd8 | ||
|
|
68160a29bd | ||
|
|
28ac2a17bc | ||
|
|
3ca56027d3 | ||
|
|
c7743646cd | ||
|
|
ec06473c16 | ||
|
|
b812ec1e65 | ||
|
|
4b4d97b8f3 | ||
|
|
f5e87ac0b1 | ||
|
|
21ccd4f1b6 | ||
|
|
4fa7ee891a | ||
|
|
9a61e9ac58 | ||
|
|
edf1a864d6 | ||
|
|
6849cc6868 | ||
|
|
e4115854cb | ||
|
|
8392918d7b | ||
|
|
5508c0ee6b | ||
|
|
998f573244 | ||
|
|
f3ff3e2d1f | ||
|
|
a09412c603 | ||
|
|
1f7b8cb6fa | ||
|
|
392a7f4a66 | ||
|
|
2c3adf25f9 | ||
|
|
97eea2d7a4 | ||
|
|
b0bd1c32f1 | ||
|
|
3aba3794cf | ||
|
|
947ccee346 | ||
|
|
98cd59e3e7 | ||
|
|
92b4d37926 | ||
|
|
86d31a6fad | ||
|
|
f3b0ee4c09 | ||
|
|
779803fbce | ||
|
|
0da7882f45 | ||
|
|
83aae778e8 | ||
|
|
d8ce457e83 | ||
|
|
f824d74afe | ||
|
|
684cd09bce | ||
|
|
aedaa59afe | ||
|
|
9d2c977ce9 | ||
|
|
a04003164b | ||
|
|
d6ed9e756e | ||
|
|
7667e7c9e7 | ||
|
|
8f0db306d9 | ||
|
|
d459479382 | ||
|
|
3115be6890 | ||
|
|
8faf8a0937 | ||
|
|
d99b5741f8 | ||
|
|
6121bd3bb0 | ||
|
|
3dc79b605c | ||
|
|
4b229275cf | ||
|
|
45fa27ea4a | ||
|
|
69af1cc934 | ||
|
|
61dfda7439 | ||
|
|
2a7c614583 | ||
|
|
5a41a56862 | ||
|
|
25f658f227 | ||
|
|
4cb73ebc2c | ||
|
|
b4f61ad4c0 | ||
|
|
4bc55834b9 | ||
|
|
fae2c98870 | ||
|
|
3f3f1a77c3 | ||
|
|
daeafe82b4 | ||
|
|
a109e8c6d2 | ||
|
|
afb3bf3d43 | ||
|
|
54b87523e2 | ||
|
|
fa4549af4f | ||
|
|
5de36c7f7f | ||
|
|
4fde8b78f1 | ||
|
|
c6aa29ade9 | ||
|
|
0b9bb349dc | ||
|
|
ebe412cf24 | ||
|
|
4d203aedee | ||
|
|
3d0f68aa7b | ||
|
|
897d774509 | ||
|
|
ec1dcab966 | ||
|
|
dce02e8d77 | ||
|
|
0012a43d95 | ||
|
|
8bed670aee | ||
|
|
07988f985a | ||
|
|
78a14a164d | ||
|
|
52dfca493c | ||
|
|
c22c2f0ebd | ||
|
|
415bf09041 | ||
|
|
b6d69d5bc2 | ||
|
|
80c500ffbf | ||
|
|
e82137e6d3 | ||
|
|
fd2c3563a5 | ||
|
|
36342c9a84 | ||
|
|
d8681fc2cd | ||
|
|
dd3ce616d0 | ||
|
|
d31486fb7c | ||
|
|
b45051d72a | ||
|
|
f55d879c52 | ||
|
|
17e7e9ed3d | ||
|
|
9c678e108c | ||
|
|
7ffe68eba0 | ||
|
|
125a2686e8 | ||
|
|
da2bb85987 | ||
|
|
7649890bfa | ||
|
|
c5445f0288 | ||
|
|
a02dfac5ad | ||
|
|
58258430d5 | ||
|
|
e421167d46 | ||
|
|
9cbea21587 | ||
|
|
6be3b9022f | ||
|
|
f0fc470d88 | ||
|
|
95b5daadaa | ||
|
|
1f0d3d567f | ||
|
|
0758b766aa | ||
|
|
acfbafac82 | ||
|
|
07ee1bad37 | ||
|
|
65156e7d24 | ||
|
|
61e4b7f03b | ||
|
|
f4d123b09e | ||
|
|
316bec3b43 | ||
|
|
3edcbb784f | ||
|
|
1b022f8c5f | ||
|
|
b8773ce236 | ||
|
|
692e418e0f | ||
|
|
7b74392ce3 | ||
|
|
8291b233b8 | ||
|
|
2793ca74dc | ||
|
|
9ba4ebb803 | ||
|
|
66d32c6f3f | ||
|
|
fdf13bac34 | ||
|
|
5425bc15d2 | ||
|
|
3c176bdf86 | ||
|
|
966278a5cc | ||
|
|
9bf31060e1 | ||
|
|
5ba7e3fb48 | ||
|
|
9d1dce9c5d | ||
|
|
63e15d15fa | ||
|
|
0b424cfff1 | ||
|
|
66e1e76c9b | ||
|
|
04e5297d2e | ||
|
|
80d07c489f | ||
|
|
d7b22b390d | ||
|
|
54048235c5 | ||
|
|
f0bf3cda7b | ||
|
|
cf203431d6 | ||
|
|
c22cc33a10 | ||
|
|
3360697bbd | ||
|
|
da5d4efef4 | ||
|
|
dec892ed01 | ||
|
|
9ba49b73c7 | ||
|
|
1d3f155d4f | ||
|
|
69af2672ed | ||
|
|
410a6ab248 | ||
|
|
bf53552a15 | ||
|
|
64c78b0b0e | ||
|
|
344731959e | ||
|
|
f49db313c1 | ||
|
|
6d18f6104f | ||
|
|
27029ba7c7 | ||
|
|
5e0d684446 | ||
|
|
89758cee2f | ||
|
|
316b119e63 | ||
|
|
13ff0e11ed | ||
|
|
c31205f437 | ||
|
|
0455504e22 | ||
|
|
ea6ac2ad23 | ||
|
|
ff302b10ce | ||
|
|
96e2f20a1d | ||
|
|
02c4fc5b95 | ||
|
|
87559bdbfa | ||
|
|
bd28c0f15c | ||
|
|
eeb62ba40d | ||
|
|
f3136a7d5d | ||
|
|
59cbf03e1b | ||
|
|
a79c8e7992 | ||
|
|
d5b615e98e | ||
|
|
06cdf3e9f0 | ||
|
|
d8422552d1 | ||
|
|
bf40f03788 | ||
|
|
bf469399b8 | ||
|
|
8404434279 | ||
|
|
70c8e5f451 | ||
|
|
9e55a7073b | ||
|
|
5930e4fa38 | ||
|
|
72b5f6d669 | ||
|
|
08afb12dcc | ||
|
|
520bd35319 | ||
|
|
1c441d2378 | ||
|
|
c25e88916a | ||
|
|
4a80468a8a | ||
|
|
c55758fca0 | ||
|
|
bd7caf5742 | ||
|
|
3f5bc58a86 | ||
|
|
70cf4dc6ed | ||
|
|
7929f6ae44 | ||
|
|
2b7e4b9399 | ||
|
|
a7aa1bbb1d | ||
|
|
8d1b200a3a | ||
|
|
1864910778 | ||
|
|
e9073f5a00 | ||
|
|
b89eb0f81d | ||
|
|
d8c7005115 | ||
|
|
88da148b18 | ||
|
|
db6a023bec | ||
|
|
9e11004e8a | ||
|
|
3a2af0f52c | ||
|
|
698993ec6d | ||
|
|
77e45c989f | ||
|
|
6937a2428b | ||
|
|
abb1f34020 | ||
|
|
afd14cb5f9 | ||
|
|
e93ecacbac | ||
|
|
ddf81128ab | ||
|
|
16344cbfcd | ||
|
|
3a831fc77c | ||
|
|
4aff01ef8f | ||
|
|
35328807e3 | ||
|
|
a86cb90043 | ||
|
|
932c89ded7 | ||
|
|
c8fb4168d4 | ||
|
|
149f482324 | ||
|
|
0729130c57 | ||
|
|
7de8b49ad7 | ||
|
|
2ef5550373 | ||
|
|
3d9678e2e9 | ||
|
|
12049b7dbc | ||
|
|
8591d94fcc | ||
|
|
6353affeca | ||
|
|
c13cf81ee8 | ||
|
|
6adbcabf50 | ||
|
|
cb928ded2a | ||
|
|
3002d5cbdd | ||
|
|
a8471aa54b | ||
|
|
41c899c5a2 | ||
|
|
da03b3f905 | ||
|
|
0399fe5f83 | ||
|
|
ae5dae4fa4 | ||
|
|
de81f71417 | ||
|
|
0a90e018cd | ||
|
|
3ff91eaa32 | ||
|
|
2013d77b28 | ||
|
|
90c6d1390e | ||
|
|
a227d3e8d4 | ||
|
|
aafd8ac64e | ||
|
|
c036779d9c | ||
|
|
5233b3e77e | ||
|
|
11d7c591fa | ||
|
|
1c8b6b93cf | ||
|
|
29877530c6 | ||
|
|
069c2e4ba7 | ||
|
|
c2f4b284b1 | ||
|
|
4356679356 | ||
|
|
b4e0ba329a | ||
|
|
823153138f | ||
|
|
e404dd517e | ||
|
|
9b89828fac | ||
|
|
b3f0728a11 | ||
|
|
5be3d95f62 | ||
|
|
c0df3b3e95 | ||
|
|
d9fcda8cf7 | ||
|
|
717ffd0e70 | ||
|
|
11ab1daadd | ||
|
|
7ec9b70180 | ||
|
|
b309db074f | ||
|
|
0ff40e733b | ||
|
|
727f1a0ee3 | ||
|
|
3f6760c62e | ||
|
|
ee8d32f97c | ||
|
|
d456f1cda0 | ||
|
|
ae3f13fad6 | ||
|
|
5cf8f5f8d7 | ||
|
|
a046a5a4a5 | ||
|
|
0895460046 | ||
|
|
19308f9ceb | ||
|
|
8eca181d20 | ||
|
|
6d93951783 | ||
|
|
c8b9488d7c | ||
|
|
6b39253a54 | ||
|
|
af4d05445d | ||
|
|
0b87d494ad | ||
|
|
6ddbe6ab60 | ||
|
|
839043206d | ||
|
|
6363778675 | ||
|
|
bb8e806bc5 | ||
|
|
637bcf355c | ||
|
|
4c27ad294f | ||
|
|
69345eb96a | ||
|
|
1cf64444ad | ||
|
|
cc3b09301b | ||
|
|
01e0644b99 | ||
|
|
3278dced6e | ||
|
|
2431a63853 | ||
|
|
6168f2ee0d | ||
|
|
89b451e934 | ||
|
|
68e063e30c | ||
|
|
766e48f34a | ||
|
|
47af4e810f | ||
|
|
5a6166d0ae | ||
|
|
d28d826857 | ||
|
|
5c5dcc5b34 | ||
|
|
75c94dc7fd | ||
|
|
dfae78e383 | ||
|
|
30fb8fbec4 | ||
|
|
b5d584743a | ||
|
|
0167a4177c | ||
|
|
e034010811 | ||
|
|
582b56d701 | ||
|
|
bd3efaac02 | ||
|
|
c090e4fdaf | ||
|
|
8b7e874a37 | ||
|
|
fdae8516e6 | ||
|
|
6b7b8d19f5 | ||
|
|
ef2f2d264d | ||
|
|
86073026ee | ||
|
|
db1b95e5f3 | ||
|
|
b0be9da986 | ||
|
|
a7c70a9acf | ||
|
|
0189c958f6 | ||
|
|
e500479382 | ||
|
|
fa44905b30 | ||
|
|
cb96ae2d6e | ||
|
|
e302c4c6ff | ||
|
|
5edcfcb68d | ||
|
|
d2cad38452 | ||
|
|
e5d46749c0 | ||
|
|
cccf84e14b | ||
|
|
69f30df541 | ||
|
|
b14cf678cc | ||
|
|
57177d749e | ||
|
|
5c0c93ce1d | ||
|
|
423baa644a | ||
|
|
37d15740ed | ||
|
|
492fbdbb65 | ||
|
|
e20d0128fc | ||
|
|
e34c6021ef | ||
|
|
8f819a0e8d | ||
|
|
236db3813d | ||
|
|
ebb81dbfa6 | ||
|
|
7feaafd771 | ||
|
|
1b5f3e33c4 | ||
|
|
2b59209cf3 | ||
|
|
124c89b173 | ||
|
|
c1f954b7a6 | ||
|
|
b03a3bd76d | ||
|
|
8f56b8c2fd | ||
|
|
30f67bb246 | ||
|
|
b05ea526b8 | ||
|
|
510a273ce4 | ||
|
|
c10a96c54d | ||
|
|
eac3e4dcaf | ||
|
|
bf029c8270 | ||
|
|
ced31dd911 | ||
|
|
c8c41c5b65 | ||
|
|
06fbe19923 | ||
|
|
cd66c0b261 | ||
|
|
caa6517999 | ||
|
|
9ced05e8aa | ||
|
|
08072a90b8 | ||
|
|
05b4c573d2 | ||
|
|
3e59f28df4 | ||
|
|
37906091e1 | ||
|
|
5055b3a244 | ||
|
|
cedce4bded | ||
|
|
507c7eaca4 | ||
|
|
d0e67a93ff | ||
|
|
bd86514c72 | ||
|
|
389d5d8c14 | ||
|
|
a84db5ffd7 | ||
|
|
a756ee30e4 | ||
|
|
fb52c52304 | ||
|
|
e153e3f5fd | ||
|
|
c95a650a73 | ||
|
|
d3aee3dfc8 | ||
|
|
ff09b3f21d | ||
|
|
6d47026083 | ||
|
|
be0321299b | ||
|
|
4164686c4b | ||
|
|
414fac6a05 | ||
|
|
fde5ec586e | ||
|
|
2d2a87392c | ||
|
|
60d8df659c | ||
|
|
05d20a6a6d | ||
|
|
e4578d2c7b | ||
|
|
d44376ad06 | ||
|
|
94ad322eb9 | ||
|
|
3ead900349 | ||
|
|
0894a3ce07 | ||
|
|
70d5c09958 | ||
|
|
e59a7667ff | ||
|
|
f9fc232db3 | ||
|
|
0acaf9d8ff | ||
|
|
d8a0e1e950 | ||
|
|
b0f7958299 | ||
|
|
a483bdf5d9 | ||
|
|
e4c1002e2e | ||
|
|
c75c392965 | ||
|
|
0f72f29a73 | ||
|
|
c9ee9cd199 | ||
|
|
06c46ac12b | ||
|
|
2c7be86104 | ||
|
|
4db1a7e9ab | ||
|
|
726f6202fa | ||
|
|
d7b96e6f44 | ||
|
|
7b62226d57 | ||
|
|
43114f4c99 | ||
|
|
b53c2bd224 | ||
|
|
ac63695b60 | ||
|
|
e9270e89af | ||
|
|
8ba1d2c572 | ||
|
|
61cc3d26e2 | ||
|
|
697bdae507 | ||
|
|
0993e4b61a | ||
|
|
c11dbb1c2b | ||
|
|
4b530a9e0f | ||
|
|
0f50d92ea6 | ||
|
|
13c14f1de0 | ||
|
|
566058de17 | ||
|
|
43ebcfb6bc | ||
|
|
4d84909cbd | ||
|
|
6b8176e841 | ||
|
|
b12881209b | ||
|
|
9cf3d32467 | ||
|
|
5a92ba3701 | ||
|
|
41706e64d4 | ||
|
|
bae9fd734a | ||
|
|
e6a878cbb5 | ||
|
|
4c6c0e9b77 | ||
|
|
e0ebdeff88 | ||
|
|
bf84db7c22 | ||
|
|
fcd0f02210 | ||
|
|
00123861fb | ||
|
|
896faa56f7 | ||
|
|
c8f2f23845 | ||
|
|
0e11b5c781 | ||
|
|
ccbccc9716 | ||
|
|
7d27bd2086 | ||
|
|
5b2816a6f5 | ||
|
|
28e43b8b73 | ||
|
|
d822fc8e7a | ||
|
|
c999aae6af | ||
|
|
1cdad4d8d2 | ||
|
|
71dde045ea | ||
|
|
67b880a4bd | ||
|
|
1200da5c74 | ||
|
|
0a1052516f | ||
|
|
73506d36b9 | ||
|
|
d7f6ed0c7c | ||
|
|
4d2ad36951 | ||
|
|
f91769538d | ||
|
|
35a285c9a7 | ||
|
|
140f9a1242 | ||
|
|
7ef3f4aa4a | ||
|
|
6fc07b4a63 | ||
|
|
060a35faeb | ||
|
|
93df471d98 | ||
|
|
f52a0fe8f7 | ||
|
|
77866d0264 | ||
|
|
eb076db5d4 | ||
|
|
4b9669eaa7 | ||
|
|
3620e23899 | ||
|
|
daddd4ffd6 | ||
|
|
a3295b4355 | ||
|
|
034eb34f3c | ||
|
|
d468b6a1f1 | ||
|
|
badf0a1c65 | ||
|
|
1cf1e05ab9 | ||
|
|
ff6fe1e01e | ||
|
|
37997fed76 | ||
|
|
19c68c753e | ||
|
|
6177ff95a6 | ||
|
|
2446fdb8d0 | ||
|
|
04a0ec71b4 | ||
|
|
8cf5a84539 | ||
|
|
3828aa8608 | ||
|
|
fed6375e0a | ||
|
|
11a4793452 | ||
|
|
e7602bc678 | ||
|
|
00d749cdfa | ||
|
|
6c840aabb0 | ||
|
|
9bfd4bff40 | ||
|
|
a9b3ef7cd3 | ||
|
|
28e66ccd4b | ||
|
|
2ea5f91f4a | ||
|
|
1cbd507b8c | ||
|
|
91bdbbd3d7 | ||
|
|
7eabfbe0ba | ||
|
|
2dc5c6864b | ||
|
|
3ac356a812 | ||
|
|
7de137f9f8 | ||
|
|
83ede28e18 | ||
|
|
277dc508da | ||
|
|
6b2883d290 | ||
|
|
1871d0f9e1 | ||
|
|
fabf39c606 | ||
|
|
72db7902fa | ||
|
|
49054717b4 | ||
|
|
831dfb1b4c | ||
|
|
9389527609 | ||
|
|
3eb73751f5 | ||
|
|
7dd3b50e41 | ||
|
|
5d32be4a90 | ||
|
|
0902d145f4 | ||
|
|
860928e2d5 | ||
|
|
272e9a5998 | ||
|
|
b0c60cf288 | ||
|
|
2f14cb1a0f | ||
|
|
8fb641ee91 | ||
|
|
87dd65fda5 | ||
|
|
875236fc0b | ||
|
|
50b1221e11 | ||
|
|
3f1d7b9bc6 | ||
|
|
94d38f941e | ||
|
|
be57714f16 | ||
|
|
a664983cbb | ||
|
|
9825e7c733 | ||
|
|
70a8ee1334 | ||
|
|
4cf67b23d4 | ||
|
|
c9ca1dad69 | ||
|
|
1e48733b20 | ||
|
|
2398e992a8 | ||
|
|
ece7645187 | ||
|
|
929fcb73c3 | ||
|
|
6c723b2f14 | ||
|
|
56633a5989 | ||
|
|
279fdeae2f | ||
|
|
9870c86fde | ||
|
|
0b80abacaf | ||
|
|
9da3a85cbc | ||
|
|
fccf8e818c | ||
|
|
894e2f2f1e | ||
|
|
32537d5345 | ||
|
|
19bfb2f9b8 | ||
|
|
c2da0ed978 | ||
|
|
74aaced74a | ||
|
|
24e485e81e | ||
|
|
e087305f31 | ||
|
|
f66379f5fe | ||
|
|
dfc7214d14 | ||
|
|
f1336fcb00 | ||
|
|
ecab41c3f3 | ||
|
|
64eb77e94c | ||
|
|
175b688b90 | ||
|
|
689bb25e86 | ||
|
|
78fc90366d | ||
|
|
8839d42547 | ||
|
|
ad36063ca5 | ||
|
|
4936033adf | ||
|
|
d6e7dfee83 | ||
|
|
3b3e52cdc2 | ||
|
|
27492efc11 | ||
|
|
c961888ceb | ||
|
|
ffc7653b9e | ||
|
|
eafb7acd95 | ||
|
|
13687353c9 | ||
|
|
ef3379a480 | ||
|
|
e74c13ddad | ||
|
|
3ad0680e8d | ||
|
|
cf107b00da | ||
|
|
beba66f2f1 | ||
|
|
80d22da975 | ||
|
|
1b55c7bb7b | ||
|
|
7d1342e03b | ||
|
|
03c2270d0e | ||
|
|
884e21e1ca | ||
|
|
4d7734a711 | ||
|
|
4e35226340 | ||
|
|
c3417e95eb | ||
|
|
53584b704a | ||
|
|
e762b48e48 | ||
|
|
090b759e9f | ||
|
|
17e19e55a0 | ||
|
|
c0f5518341 | ||
|
|
465ac5caf3 | ||
|
|
57f62df315 | ||
|
|
b1f8f8d60d | ||
|
|
796d141386 | ||
|
|
b97a442f07 | ||
|
|
e517b8ce14 | ||
|
|
e0b2e41e57 | ||
|
|
4ff97f54bf | ||
|
|
5b4bb7b615 | ||
|
|
71f30faeda | ||
|
|
bbefeb8670 | ||
|
|
2ced8f1f2b | ||
|
|
3675d2b041 | ||
|
|
a720dc774b | ||
|
|
5a0af8cdd1 | ||
|
|
25d978e593 | ||
|
|
620ddc0ded | ||
|
|
6072bcea8e | ||
|
|
9c7d2c853e | ||
|
|
9ab62ff9f3 | ||
|
|
a446537377 | ||
|
|
40f5126b6e | ||
|
|
2421a769cb | ||
|
|
139cbdea16 | ||
|
|
a0eb36d819 | ||
|
|
45d82320b7 | ||
|
|
e6643e4cb4 | ||
|
|
0c32c57e0e | ||
|
|
f7b1edc7bb | ||
|
|
5061068b04 | ||
|
|
959d89e043 | ||
|
|
f93385284d | ||
|
|
a51fe56bc1 | ||
|
|
52c96a4b2e | ||
|
|
6e462ffae6 | ||
|
|
bbc2b416ed | ||
|
|
4427e31661 | ||
|
|
2f480217cb | ||
|
|
acae15de53 | ||
|
|
42af014483 | ||
|
|
4bf66e65de | ||
|
|
3cad54e061 | ||
|
|
557e5f879d | ||
|
|
a3204168b7 | ||
|
|
24d85f5e17 | ||
|
|
54c2044416 | ||
|
|
c9e21f4161 | ||
|
|
659a448fab | ||
|
|
c89b74bb5d | ||
|
|
5918a9e105 | ||
|
|
0b3d68ef31 | ||
|
|
0f323999fc | ||
|
|
2aed8bf7b3 | ||
|
|
2a46c8a3d4 | ||
|
|
af84d5cd4b | ||
|
|
ab70f57923 | ||
|
|
fd802a385c | ||
|
|
fbdfc15b89 | ||
|
|
6a7edabc22 | ||
|
|
cf28f9357f | ||
|
|
b091cc0d05 | ||
|
|
f2221a4040 | ||
|
|
afce7ed6e3 | ||
|
|
9964208fe8 | ||
|
|
eca2ea0ede | ||
|
|
51fb7db8fa | ||
|
|
1d89029ba7 | ||
|
|
e2d77719e7 | ||
|
|
8c42c4172b | ||
|
|
9ab7aff029 | ||
|
|
ef1b15e13a | ||
|
|
4f6208521b | ||
|
|
02dc31f925 | ||
|
|
74b6891b20 | ||
|
|
22a76ddd1d | ||
|
|
d1c70ec9c1 | ||
|
|
d653234a0c | ||
|
|
ce173f8c28 | ||
|
|
d316ef5524 | ||
|
|
baa5800a70 | ||
|
|
aa1f10b0c0 | ||
|
|
2d446d4953 | ||
|
|
f47bc5f23b | ||
|
|
ce59164ba1 | ||
|
|
8b45ea1e7d | ||
|
|
4758ea660e | ||
|
|
cc40403de9 | ||
|
|
955d6ba797 | ||
|
|
c95036c362 | ||
|
|
8c058a300a | ||
|
|
d2775fc023 | ||
|
|
63caf7cd4c | ||
|
|
7ef08fd861 | ||
|
|
276b50d69f | ||
|
|
cc20f8b747 | ||
|
|
d21ed42fc6 | ||
|
|
ef8530aeb7 | ||
|
|
d09cf3b9c3 | ||
|
|
a6f641eb2c | ||
|
|
e38cfe078c | ||
|
|
cb21c2a2e7 | ||
|
|
36d50eceee | ||
|
|
d42770ae2c | ||
|
|
d73538550c | ||
|
|
5c88bd6965 | ||
|
|
da6a2b0b0c | ||
|
|
1decf9693a | ||
|
|
2aa207eea6 | ||
|
|
ad73407aad | ||
|
|
c0d39055f2 | ||
|
|
cdf06edd14 | ||
|
|
802616b028 | ||
|
|
aa72d9b16a | ||
|
|
df8acf0aaa | ||
|
|
9aca077f3e | ||
|
|
ca21462f1b | ||
|
|
edfe1aa9a3 | ||
|
|
27bba8de04 | ||
|
|
d1ac31c105 | ||
|
|
68819dffda | ||
|
|
07a7143fc9 | ||
|
|
c612dfabd5 | ||
|
|
a01a555ad6 | ||
|
|
56ebc6a656 | ||
|
|
e9d1872c70 | ||
|
|
52f5c709ba | ||
|
|
b38d092fa6 | ||
|
|
aa61055029 | ||
|
|
02ff00a374 | ||
|
|
1dd2d3a938 | ||
|
|
0b45eda5a3 | ||
|
|
6ab80d3995 | ||
|
|
4c151b9403 | ||
|
|
0a07e3d415 | ||
|
|
fbdd033d6c | ||
|
|
8d395ff8c0 | ||
|
|
40f361fd8e | ||
|
|
19a2d26fc1 | ||
|
|
245abd6daf | ||
|
|
c502985c60 | ||
|
|
ba511f7803 | ||
|
|
cb9bee1bc9 | ||
|
|
df2a24c555 | ||
|
|
92c1fede8e | ||
|
|
88aafd73fa | ||
|
|
df63cbbab7 | ||
|
|
7fb1f50023 | ||
|
|
adf86b89f6 | ||
|
|
6763e09cb2 | ||
|
|
99fe666956 | ||
|
|
e242b94e6c | ||
|
|
b9fd38df15 | ||
|
|
8bfd3a5bde | ||
|
|
8283fd9b22 | ||
|
|
5084624e8d | ||
|
|
9cc3e527da | ||
|
|
f7b6572399 | ||
|
|
fec090972b | ||
|
|
41b7d7d4de | ||
|
|
7593a44f79 | ||
|
|
d82386e1ac | ||
|
|
cab956ba50 | ||
|
|
87b52f9e51 | ||
|
|
0f13c8068f | ||
|
|
1961125476 | ||
|
|
37de20e826 | ||
|
|
1a9fd7ff76 | ||
|
|
0a7da37ab7 | ||
|
|
37de9c2ab0 | ||
|
|
b1b39bbd4f | ||
|
|
f1dbe7884c | ||
|
|
122bcfb27b | ||
|
|
b66be5e934 | ||
|
|
f8f2a92897 | ||
|
|
290906294d | ||
|
|
0ed940cbfc | ||
|
|
cc9f08a042 | ||
|
|
4925172530 | ||
|
|
a3074e9d54 | ||
|
|
87c3cf5e5e | ||
|
|
0df516c549 | ||
|
|
e1117155ae | ||
|
|
dc92b1220e | ||
|
|
a275adae52 | ||
|
|
9af987fa63 | ||
|
|
31b2b1da6f | ||
|
|
2e33d4acc0 | ||
|
|
9e2f622403 | ||
|
|
40af7c2f4a | ||
|
|
0ebd4c3bbb | ||
|
|
33be525dbb | ||
|
|
278c5ceb87 | ||
|
|
58881357a2 | ||
|
|
1b249e9ceb | ||
|
|
aec85ce0d6 | ||
|
|
48687fc182 | ||
|
|
cc20d98cb0 | ||
|
|
5400dfeffd | ||
|
|
722296f8d2 | ||
|
|
8a3b3d4167 | ||
|
|
31fd30d329 | ||
|
|
fc875b0c1d | ||
|
|
5d27cf24e2 |
75
.devcontainer/Dockerfile
Normal file
75
.devcontainer/Dockerfile
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
#-------------------------------------------------------------------------------------------------------------
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||||
|
#-------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
FROM node:20
|
||||||
|
|
||||||
|
# Avoid warnings by switching to noninteractive
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
# The node image includes a non-root user with sudo access. Use the
|
||||||
|
# "remoteUser" property in devcontainer.json to use it. On Linux, update
|
||||||
|
# these values to ensure the container user's UID/GID matches your local values.
|
||||||
|
# See https://aka.ms/vscode-remote/containers/non-root-user for details.
|
||||||
|
ARG USERNAME=node
|
||||||
|
ARG USER_UID=1000
|
||||||
|
ARG USER_GID=$USER_UID
|
||||||
|
|
||||||
|
RUN echo "deb http://archive.debian.org/debian stretch main" > /etc/apt/sources.list
|
||||||
|
|
||||||
|
# Configure apt and install packages
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get -y install --no-install-recommends dialog 2>&1 \
|
||||||
|
#
|
||||||
|
# Verify git and needed tools are installed
|
||||||
|
&& apt-get -y install git iproute2 procps \
|
||||||
|
#
|
||||||
|
# Remove outdated yarn from /opt and install via package
|
||||||
|
# so it can be easily updated via apt-get upgrade yarn
|
||||||
|
&& rm -rf /opt/yarn-* \
|
||||||
|
&& rm -f /usr/local/bin/yarn \
|
||||||
|
&& rm -f /usr/local/bin/yarnpkg \
|
||||||
|
&& apt-get install -y curl apt-transport-https lsb-release \
|
||||||
|
&& curl -sS https://dl.yarnpkg.com/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/pubkey.gpg | apt-key add - 2>/dev/null \
|
||||||
|
&& echo "deb https://dl.yarnpkg.com/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get -y install --no-install-recommends yarn tmux locales postgresql \
|
||||||
|
&& apt-get install libpq-dev g++ make \
|
||||||
|
#
|
||||||
|
# Install eslint globally
|
||||||
|
&& npm install -g eslint \
|
||||||
|
#
|
||||||
|
# [Optional] Update a non-root user to UID/GID if needed.
|
||||||
|
&& if [ "$USER_GID" != "1000" ] || [ "$USER_UID" != "1000" ]; then \
|
||||||
|
groupmod --gid $USER_GID $USERNAME \
|
||||||
|
&& usermod --uid $USER_UID --gid $USER_GID $USERNAME \
|
||||||
|
&& chown -R $USER_UID:$USER_GID /home/$USERNAME; \
|
||||||
|
fi \
|
||||||
|
# [Optional] Add add sudo support for non-root user
|
||||||
|
&& apt-get install -y sudo \
|
||||||
|
&& echo node ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME \
|
||||||
|
&& chmod 0440 /etc/sudoers.d/$USERNAME \
|
||||||
|
# Clean up
|
||||||
|
&& apt-get autoremove -y \
|
||||||
|
&& apt-get clean -y \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN curl https://raw.githubusercontent.com/brianc/dotfiles/master/.tmux.conf > ~/.tmux.conf
|
||||||
|
|
||||||
|
# install nvm
|
||||||
|
RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.2/install.sh | bash
|
||||||
|
|
||||||
|
# set up a nicer prompt
|
||||||
|
RUN git clone https://github.com/magicmonty/bash-git-prompt.git ~/.bash-git-prompt --depth=1
|
||||||
|
|
||||||
|
RUN echo "source $HOME/.bash-git-prompt/gitprompt.sh" >> ~/.bashrc
|
||||||
|
|
||||||
|
# Set the locale
|
||||||
|
RUN sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen
|
||||||
|
ENV LANG en_US.UTF-8
|
||||||
|
ENV LANGUAGE en_US:en
|
||||||
|
ENV LC_ALL en_US.UTF-8
|
||||||
|
|
||||||
|
# Switch back to dialog for any ad-hoc use of apt-get
|
||||||
|
ENV DEBIAN_FRONTEND=dialog
|
||||||
16
.devcontainer/devcontainer.json
Normal file
16
.devcontainer/devcontainer.json
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
// If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml.
|
||||||
|
{
|
||||||
|
"name": "Node.js 20 & Postgres",
|
||||||
|
"dockerComposeFile": "docker-compose.yml",
|
||||||
|
"service": "web",
|
||||||
|
"workspaceFolder": "/workspace",
|
||||||
|
// Add the IDs of extensions you want installed when the container is created in the array below.
|
||||||
|
"customizations":{
|
||||||
|
"vscode": {
|
||||||
|
"extensions": ["dbaeumer.vscode-eslint"],
|
||||||
|
"settings": {
|
||||||
|
"terminal.integrated.shell.linux": "/bin/bash"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
52
.devcontainer/docker-compose.yml
Normal file
52
.devcontainer/docker-compose.yml
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
#-------------------------------------------------------------------------------------------------------------
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||||
|
#-------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
version: '3.9'
|
||||||
|
services:
|
||||||
|
web:
|
||||||
|
# Uncomment the next line to use a non-root user for all processes. You can also
|
||||||
|
# simply use the "remoteUser" property in devcontainer.json if you just want VS Code
|
||||||
|
# and its sub-processes (terminals, tasks, debugging) to execute as the user. On Linux,
|
||||||
|
# you may need to update USER_UID and USER_GID in .devcontainer/Dockerfile to match your
|
||||||
|
# user if not 1000. See https://aka.ms/vscode-remote/containers/non-root for details.
|
||||||
|
# user: node
|
||||||
|
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
- ..:/workspace:cached
|
||||||
|
|
||||||
|
environment:
|
||||||
|
PGPASSWORD: pass
|
||||||
|
PGUSER: user
|
||||||
|
PGDATABASE: data
|
||||||
|
PGHOST: db
|
||||||
|
# set this to true in the development environment until I can get SSL setup on the
|
||||||
|
# docker postgres instance
|
||||||
|
PGTESTNOSSL: 'true'
|
||||||
|
|
||||||
|
# Overrides default command so things don't shut down after the process ends.
|
||||||
|
command: sleep infinity
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- db
|
||||||
|
|
||||||
|
links:
|
||||||
|
- db:db
|
||||||
|
|
||||||
|
db:
|
||||||
|
image: postgres:14-alpine
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
command: postgres -c password_encryption=md5
|
||||||
|
environment:
|
||||||
|
POSTGRES_HOST_AUTH_METHOD: trust
|
||||||
|
POSTGRES_INITDB_ARGS: "--auth-local=md5"
|
||||||
|
POSTGRES_PASSWORD: pass
|
||||||
|
POSTGRES_USER: user
|
||||||
|
POSTGRES_DB: data
|
||||||
1
.eslintignore
Normal file
1
.eslintignore
Normal file
@ -0,0 +1 @@
|
|||||||
|
/packages/*/dist/
|
||||||
35
.eslintrc
35
.eslintrc
@ -1,6 +1,35 @@
|
|||||||
{
|
{
|
||||||
"extends": "standard",
|
"plugins": ["@typescript-eslint", "prettier"],
|
||||||
|
"parser": "@typescript-eslint/parser",
|
||||||
|
"extends": ["eslint:recommended", "plugin:prettier/recommended", "prettier"],
|
||||||
|
"ignorePatterns": ["node_modules", "coverage", "packages/pg-protocol/dist/**/*", "packages/pg-query-stream/dist/**/*"],
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 2017,
|
||||||
|
"sourceType": "module"
|
||||||
|
},
|
||||||
|
"env": {
|
||||||
|
"node": true,
|
||||||
|
"es6": true,
|
||||||
|
"mocha": true
|
||||||
|
},
|
||||||
"rules": {
|
"rules": {
|
||||||
"no-new-func": "off"
|
"@typescript-eslint/no-unused-vars": ["error", {
|
||||||
}
|
"args": "none",
|
||||||
|
"varsIgnorePattern": "^_$"
|
||||||
|
}],
|
||||||
|
"no-unused-vars": ["error", {
|
||||||
|
"args": "none",
|
||||||
|
"varsIgnorePattern": "^_$"
|
||||||
|
}],
|
||||||
|
"no-var": "error",
|
||||||
|
"prefer-const": "error"
|
||||||
|
},
|
||||||
|
"overrides": [
|
||||||
|
{
|
||||||
|
"files": ["*.ts", "*.mts", "*.cts", "*.tsx"],
|
||||||
|
"rules": {
|
||||||
|
"no-undef": "off"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
* text=auto eol=lf
|
||||||
1
.github/CODEOWNERS
vendored
Normal file
1
.github/CODEOWNERS
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
/packages/pg-connection-string @hjr3
|
||||||
3
.github/FUNDING.yml
vendored
Normal file
3
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
github: [brianc]
|
||||||
7
.github/dependabot.yaml
vendored
Normal file
7
.github/dependabot.yaml
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "monthly"
|
||||||
76
.github/workflows/ci.yml
vendored
Normal file
76
.github/workflows/ci.yml
vendored
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
timeout-minutes: 5
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
- name: Setup node
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 18
|
||||||
|
cache: yarn
|
||||||
|
- run: yarn install --frozen-lockfile
|
||||||
|
- run: yarn lint
|
||||||
|
build:
|
||||||
|
timeout-minutes: 15
|
||||||
|
needs: lint
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: ghcr.io/railwayapp-templates/postgres-ssl
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
POSTGRES_HOST_AUTH_METHOD: 'md5'
|
||||||
|
POSTGRES_DB: ci_db_test
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
node:
|
||||||
|
- '16'
|
||||||
|
- '18'
|
||||||
|
- '20'
|
||||||
|
- '22'
|
||||||
|
- '24'
|
||||||
|
- '25'
|
||||||
|
os:
|
||||||
|
- ubuntu-latest
|
||||||
|
name: Node.js ${{ matrix.node }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
PGUSER: postgres
|
||||||
|
PGPASSWORD: postgres
|
||||||
|
PGHOST: localhost
|
||||||
|
PGDATABASE: ci_db_test
|
||||||
|
PGTESTNOSSL: 'true'
|
||||||
|
SCRAM_TEST_PGUSER: scram_test
|
||||||
|
SCRAM_TEST_PGPASSWORD: test4scram
|
||||||
|
steps:
|
||||||
|
- name: Show OS
|
||||||
|
run: |
|
||||||
|
uname -a
|
||||||
|
- run: |
|
||||||
|
psql \
|
||||||
|
-c "SET password_encryption = 'scram-sha-256'" \
|
||||||
|
-c "CREATE ROLE scram_test LOGIN PASSWORD 'test4scram'"
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
- name: Setup node
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ matrix.node }}
|
||||||
|
cache: yarn
|
||||||
|
- run: yarn install --frozen-lockfile
|
||||||
|
- run: yarn test
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
@ -5,3 +5,8 @@ build/
|
|||||||
node_modules/
|
node_modules/
|
||||||
package-lock.json
|
package-lock.json
|
||||||
*.swp
|
*.swp
|
||||||
|
dist
|
||||||
|
.DS_Store
|
||||||
|
/.eslintcache
|
||||||
|
.vscode/
|
||||||
|
manually-test-on-heroku.js
|
||||||
|
|||||||
@ -1,8 +0,0 @@
|
|||||||
*~
|
|
||||||
build/
|
|
||||||
.lock-wscript
|
|
||||||
*.log
|
|
||||||
node_modules/
|
|
||||||
script/
|
|
||||||
*.swp
|
|
||||||
test/
|
|
||||||
35
.travis.yml
35
.travis.yml
@ -1,35 +0,0 @@
|
|||||||
language: node_js
|
|
||||||
sudo: false
|
|
||||||
dist: trusty
|
|
||||||
before_script:
|
|
||||||
- node script/create-test-tables.js pg://postgres@127.0.0.1:5432/postgres
|
|
||||||
env:
|
|
||||||
- CC=clang CXX=clang++ npm_config_clang=1 PGUSER=postgres PGDATABASE=postgres
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- node_js: "lts/argon"
|
|
||||||
addons:
|
|
||||||
postgresql: "9.6"
|
|
||||||
- node_js: "lts/boron"
|
|
||||||
addons:
|
|
||||||
postgresql: "9.1"
|
|
||||||
dist: precise
|
|
||||||
- node_js: "lts/boron"
|
|
||||||
addons:
|
|
||||||
postgresql: "9.2"
|
|
||||||
- node_js: "lts/boron"
|
|
||||||
addons:
|
|
||||||
postgresql: "9.3"
|
|
||||||
- node_js: "lts/boron"
|
|
||||||
addons:
|
|
||||||
postgresql: "9.4"
|
|
||||||
- node_js: "lts/boron"
|
|
||||||
addons:
|
|
||||||
postgresql: "9.5"
|
|
||||||
- node_js: "lts/boron"
|
|
||||||
addons:
|
|
||||||
postgresql: "9.6"
|
|
||||||
- node_js: "8"
|
|
||||||
addons:
|
|
||||||
postgresql: "9.6"
|
|
||||||
261
CHANGELOG.md
261
CHANGELOG.md
@ -4,9 +4,198 @@ For richer information consult the commit log on github with referenced pull req
|
|||||||
|
|
||||||
We do not include break-fix version release in this file.
|
We do not include break-fix version release in this file.
|
||||||
|
|
||||||
|
## pg@8.16.0
|
||||||
|
|
||||||
|
- Add support for [min connection pool size](https://github.com/brianc/node-postgres/pull/3438).
|
||||||
|
|
||||||
|
## pg@8.15.0
|
||||||
|
|
||||||
|
- Add support for [esm](https://github.com/brianc/node-postgres/pull/3423) importing. CommonJS importing is still also supported.
|
||||||
|
|
||||||
|
## pg@8.14.0
|
||||||
|
|
||||||
|
- Add support from SCRAM-SAH-256-PLUS i.e. [channel binding](https://github.com/brianc/node-postgres/pull/3356).
|
||||||
|
|
||||||
|
## pg@8.13.0
|
||||||
|
|
||||||
|
- Add ability to specify query timeout on [per-query basis](https://github.com/brianc/node-postgres/pull/3074).
|
||||||
|
|
||||||
|
## pg@8.12.0
|
||||||
|
|
||||||
|
- Add `queryMode` config option to [force use of the extended query protocol](https://github.com/brianc/node-postgres/pull/3214) on queries without any parameters.
|
||||||
|
|
||||||
|
## pg-pool@8.10.0
|
||||||
|
|
||||||
|
- Emit `release` event when client is returned to [the pool](https://github.com/brianc/node-postgres/pull/2845).
|
||||||
|
|
||||||
|
## pg@8.9.0
|
||||||
|
|
||||||
|
- Add support for [stream factory](https://github.com/brianc/node-postgres/pull/2898).
|
||||||
|
- [Better errors](https://github.com/brianc/node-postgres/pull/2901) for SASL authentication.
|
||||||
|
- [Use native crypto module](https://github.com/brianc/node-postgres/pull/2815) for SASL authentication.
|
||||||
|
|
||||||
|
## pg@8.8.0
|
||||||
|
|
||||||
|
- Bump minimum required version of [native bindings](https://github.com/brianc/node-postgres/pull/2787).
|
||||||
|
- Catch previously uncatchable errors thrown in [`pool.query`](https://github.com/brianc/node-postgres/pull/2569).
|
||||||
|
- Prevent the pool from blocking the event loop if all clients are [idle](https://github.com/brianc/node-postgres/pull/2721) (and `allowExitOnIdle` is enabled).
|
||||||
|
- Support `lock_timeout` in [client config](https://github.com/brianc/node-postgres/pull/2779).
|
||||||
|
- Fix errors thrown in callbacks from [interfering with cleanup](https://github.com/brianc/node-postgres/pull/2753).
|
||||||
|
|
||||||
|
### pg-pool@3.5.0
|
||||||
|
|
||||||
|
- Add connection [lifetime limit](https://github.com/brianc/node-postgres/pull/2698) config option.
|
||||||
|
|
||||||
|
### pg@8.7.0
|
||||||
|
|
||||||
|
- Add optional config to [pool](https://github.com/brianc/node-postgres/pull/2568) to allow process to exit if pool is idle.
|
||||||
|
|
||||||
|
### pg-cursor@2.7.0
|
||||||
|
|
||||||
|
- Convert to [es6 class](https://github.com/brianc/node-postgres/pull/2553)
|
||||||
|
- Add support for promises [to cursor methods](https://github.com/brianc/node-postgres/pull/2554)
|
||||||
|
|
||||||
|
### pg@8.6.0
|
||||||
|
|
||||||
|
- Better [SASL](https://github.com/brianc/node-postgres/pull/2436) error messages & more validation on bad configuration.
|
||||||
|
- Export [DatabaseError](https://github.com/brianc/node-postgres/pull/2445).
|
||||||
|
- Add [ParameterDescription](https://github.com/brianc/node-postgres/pull/2464) support to protocol parsing.
|
||||||
|
- Fix typescript [typedefs](https://github.com/brianc/node-postgres/pull/2490) with `--isolatedModules`.
|
||||||
|
|
||||||
|
### pg-query-stream@4.0.0
|
||||||
|
|
||||||
|
- Library has been [converted](https://github.com/brianc/node-postgres/pull/2376) to Typescript. The behavior is identical, but there could be subtle breaking changes due to class names changing or other small inconsistencies introduced by the conversion.
|
||||||
|
|
||||||
|
### pg@8.5.0
|
||||||
|
|
||||||
|
- Fix bug forwarding [ssl key](https://github.com/brianc/node-postgres/pull/2394).
|
||||||
|
- Convert pg-query-stream internals to [typescript](https://github.com/brianc/node-postgres/pull/2376).
|
||||||
|
- Performance [improvements](https://github.com/brianc/node-postgres/pull/2286).
|
||||||
|
|
||||||
|
### pg@8.4.0
|
||||||
|
|
||||||
|
- Switch to optional peer dependencies & remove [semver](https://github.com/brianc/node-postgres/commit/a02dfac5ad2e2abf0dc3a9817f953938acdc19b1) package which has been a small thorn in the side of a few users.
|
||||||
|
- Export `DatabaseError` from [pg-protocol](https://github.com/brianc/node-postgres/commit/58258430d52ee446721cc3e6611e26f8bcaa67f5).
|
||||||
|
- Add support for `sslmode` in the [connection string](https://github.com/brianc/node-postgres/commit/6be3b9022f83efc721596cc41165afaa07bfceb0).
|
||||||
|
|
||||||
|
### pg@8.3.0
|
||||||
|
|
||||||
|
- Support passing a [string of command line options flags](https://github.com/brianc/node-postgres/pull/2216) via the `{ options: string }` field on client/pool config.
|
||||||
|
|
||||||
|
### pg@8.2.0
|
||||||
|
|
||||||
|
- Switch internal protocol parser & serializer to [pg-protocol](https://github.com/brianc/node-postgres/tree/master/packages/pg-protocol). The change is backwards compatible but results in a significant performance improvement across the board, with some queries as much as 50% faster. This is the first work to land in an on-going performance improvement initiative I'm working on. Stay tuned as things are set to get much faster still! :rocket:
|
||||||
|
|
||||||
|
### pg-cursor@2.2.0
|
||||||
|
|
||||||
|
- Switch internal protocol parser & serializer to [pg-protocol](https://github.com/brianc/node-postgres/tree/master/packages/pg-protocol). The change is backwards compatible but results in a significant performance improvement across the board, with some queries as much as 50% faster.
|
||||||
|
|
||||||
|
### pg-query-stream@3.1.0
|
||||||
|
|
||||||
|
- Switch internal protocol parser & serializer to [pg-protocol](https://github.com/brianc/node-postgres/tree/master/packages/pg-protocol). The change is backwards compatible but results in a significant performance improvement across the board, with some queries as much as 50% faster.
|
||||||
|
|
||||||
|
### pg@8.1.0
|
||||||
|
|
||||||
|
- Switch to using [monorepo](https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string) version of `pg-connection-string`. This includes better support for SSL argument parsing from connection strings and ensures continuity of support.
|
||||||
|
- Add `&ssl=no-verify` option to connection string and `PGSSLMODE=no-verify` environment variable support for the pure JS driver. This is equivalent of passing `{ ssl: { rejectUnauthorized: false } }` to the client/pool constructor. The advantage of having support in connection strings and environment variables is it can be "externally" configured via environment variables and CLI arguments much more easily, and should remove the need to directly edit any application code for [the SSL default changes in 8.0](https://node-postgres.com/announcements#2020-02-25). This should make using `pg@8.x` significantly less difficult on environments like Heroku for example.
|
||||||
|
|
||||||
|
### pg-pool@3.2.0
|
||||||
|
|
||||||
|
- Same changes to `pg` impact `pg-pool` as they both use the same connection parameter and connection string parsing code for configuring SSL.
|
||||||
|
|
||||||
|
### pg-pool@3.1.0
|
||||||
|
|
||||||
|
- Add [maxUses](https://github.com/brianc/node-postgres/pull/2157) config option.
|
||||||
|
|
||||||
|
### pg@8.0.0
|
||||||
|
|
||||||
|
#### note: for detailed release notes please [check here](https://node-postgres.com/announcements#2020-02-25)
|
||||||
|
|
||||||
|
- Remove versions of node older than `6 lts` from the test matrix. `pg>=8.0` may still work on older versions but it is no longer officially supported.
|
||||||
|
- Change default behavior when not specifying `rejectUnauthorized` with the SSL connection parameters. Previously we defaulted to `rejectUnauthorized: false` when it was not specifically included. We now default to `rejectUnauthorized: true.` Manually specify `{ ssl: { rejectUnauthorized: false } }` for old behavior.
|
||||||
|
- Change [default database](https://github.com/brianc/node-postgres/pull/1679) when not specified to use the `user` config option if available. Previously `process.env.USER` was used.
|
||||||
|
- Change `pg.Pool` and `pg.Query` to [be](https://github.com/brianc/node-postgres/pull/2126) an [es6 class](https://github.com/brianc/node-postgres/pull/2063).
|
||||||
|
- Make `pg.native` non enumerable.
|
||||||
|
- `notice` messages are [no longer instances](https://github.com/brianc/node-postgres/pull/2090) of `Error`.
|
||||||
|
- Passwords no longer [show up](https://github.com/brianc/node-postgres/pull/2070) when instances of clients or pools are logged.
|
||||||
|
|
||||||
|
### pg@7.18.0
|
||||||
|
|
||||||
|
- This will likely be the last minor release before pg@8.0.
|
||||||
|
- This version contains a few bug fixes and adds a deprecation warning for [a pending change in 8.0](https://github.com/brianc/node-postgres/issues/2009#issuecomment-579371651) which will flip the default behavior over SSL from `rejectUnauthorized` from `false` to `true` making things more secure in the general use case.
|
||||||
|
|
||||||
|
### pg-query-stream@3.0.0
|
||||||
|
|
||||||
|
- [Rewrote stream internals](https://github.com/brianc/node-postgres/pull/2051) to better conform to node stream semantics. This should make pg-query-stream much better at respecting [highWaterMark](https://nodejs.org/api/stream.html#stream_new_stream_readable_options) and getting rid of some edge case bugs when using pg-query-stream as an async iterator. Due to the size and nature of this change (effectively a full re-write) it's safest to bump the semver major here, though almost all tests remain untouched and still passing, which brings us to a breaking change to the API....
|
||||||
|
- Changed `stream.close` to `stream.destroy` which is the [official](https://nodejs.org/api/stream.html#stream_readable_destroy_error) way to terminate a readable stream. This is a **breaking change** if you rely on the `stream.close` method on pg-query-stream...though should be just a find/replace type operation to upgrade as the semantics remain very similar (not exactly the same, since internals are rewritten, but more in line with how streams are "supposed" to behave).
|
||||||
|
- Unified the `config.batchSize` and `config.highWaterMark` to both do the same thing: control how many rows are buffered in memory. The `ReadableStream` will manage exactly how many rows are requested from the cursor at a time. This should give better out of the box performance and help with efficient async iteration.
|
||||||
|
|
||||||
|
### pg@7.17.0
|
||||||
|
|
||||||
|
- Add support for `idle_in_transaction_session_timeout` [option](https://github.com/brianc/node-postgres/pull/2049).
|
||||||
|
|
||||||
|
### 7.16.0
|
||||||
|
|
||||||
|
- Add optional, opt-in behavior to test new, [faster query pipeline](https://github.com/brianc/node-postgres/pull/2044). This is experimental, and not documented yet. The pipeline changes will grow significantly after the 8.0 release.
|
||||||
|
|
||||||
|
### 7.15.0
|
||||||
|
|
||||||
|
- Change repository structure to support lerna & future monorepo [development](https://github.com/brianc/node-postgres/pull/2014).
|
||||||
|
- [Warn about deprecation](https://github.com/brianc/node-postgres/pull/2021) for calling constructors without `new`.
|
||||||
|
|
||||||
|
### 7.14.0
|
||||||
|
|
||||||
|
- Reverts 7.13.0 as it contained [an accidental breaking change](https://github.com/brianc/node-postgres/pull/2010) for self-signed SSL cert verification. 7.14.0 is identical to 7.12.1.
|
||||||
|
|
||||||
|
### 7.13.0
|
||||||
|
|
||||||
|
- Add support for [all tls.connect()](https://github.com/brianc/node-postgres/pull/1996) options.
|
||||||
|
|
||||||
|
### 7.12.0
|
||||||
|
|
||||||
|
- Add support for [async password lookup](https://github.com/brianc/node-postgres/pull/1926).
|
||||||
|
|
||||||
|
### 7.11.0
|
||||||
|
|
||||||
|
- Add support for [connection_timeout](https://github.com/brianc/node-postgres/pull/1847/files#diff-5391bde944956870128be1136e7bc176R63) and [keepalives_idle](https://github.com/brianc/node-postgres/pull/1847).
|
||||||
|
|
||||||
|
### 7.10.0
|
||||||
|
|
||||||
|
- Add support for [per-query types](https://github.com/brianc/node-postgres/pull/1825).
|
||||||
|
|
||||||
|
### 7.9.0
|
||||||
|
|
||||||
|
- Add support for [sasl/scram authentication](https://github.com/brianc/node-postgres/pull/1835).
|
||||||
|
|
||||||
|
### 7.8.0
|
||||||
|
|
||||||
|
- Add support for passing [secureOptions](https://github.com/brianc/node-postgres/pull/1804) SSL config.
|
||||||
|
- Upgrade [pg-types](https://github.com/brianc/node-postgres/pull/1806) to 2.0.
|
||||||
|
|
||||||
|
### 7.7.0
|
||||||
|
|
||||||
|
- Add support for configurable [query timeout](https://github.com/brianc/node-postgres/pull/1760) on a client level.
|
||||||
|
|
||||||
|
### 7.6.0
|
||||||
|
|
||||||
|
- Add support for ["bring your own promise"](https://github.com/brianc/node-postgres/pull/1518)
|
||||||
|
|
||||||
|
### 7.5.0
|
||||||
|
|
||||||
|
- Better [error message](https://github.com/brianc/node-postgres/commit/11a4793452d618c53e019416cc886ad38deb1aa7) when passing `null` or `undefined` to `client.query`.
|
||||||
|
- Better [error handling](https://github.com/brianc/node-postgres/pull/1503) on queued queries.
|
||||||
|
|
||||||
|
### 7.4.0
|
||||||
|
|
||||||
|
- Add support for [Uint8Array](https://github.com/brianc/node-postgres/pull/1448) values.
|
||||||
|
|
||||||
|
### 7.3.0
|
||||||
|
|
||||||
|
- Add support for [statement timeout](https://github.com/brianc/node-postgres/pull/1436).
|
||||||
|
|
||||||
### 7.2.0
|
### 7.2.0
|
||||||
|
|
||||||
- Pinned pg-pool and pg-types to a tighter semver range. This is likely not a noticable change for you unless you were specifically installing older versions of those libraries for some reason, but making it a minor bump here just in case it could cause any confusion.
|
- Pinned pg-pool and pg-types to a tighter semver range. This is likely not a noticeable change for you unless you were specifically installing older versions of those libraries for some reason, but making it a minor bump here just in case it could cause any confusion.
|
||||||
|
|
||||||
### 7.1.0
|
### 7.1.0
|
||||||
|
|
||||||
@ -40,16 +229,17 @@ We do not include break-fix version release in this file.
|
|||||||
|
|
||||||
### v6.1.0
|
### v6.1.0
|
||||||
|
|
||||||
- Add optional callback parameter to the pure JavaScript `client.end` method. The native client already supported this.
|
- Add optional callback parameter to the pure JavaScript `client.end` method. The native client already supported this.
|
||||||
|
|
||||||
### v6.0.0
|
### v6.0.0
|
||||||
|
|
||||||
#### Breaking Changes
|
#### Breaking Changes
|
||||||
- Remove `pg.pools`. There is still a reference kept to the pools created & tracked by `pg.connect` but it has been renamed, is considered private, and should not be used. Accessing this API directly was uncommon and was _supposed_ to be private but was incorrectly documented on the wiki. Therefore, it is a breaking change of an (unintentionally) public interface to remove it by renaming it & making it private. Eventually `pg.connect` itself will be deprecated in favor of instantiating pools directly via `new pg.Pool()` so this property should become completely moot at some point. In the mean time...check out the new features...
|
|
||||||
|
- Remove `pg.pools`. There is still a reference kept to the pools created & tracked by `pg.connect` but it has been renamed, is considered private, and should not be used. Accessing this API directly was uncommon and was _supposed_ to be private but was incorrectly documented on the wiki. Therefore, it is a breaking change of an (unintentionally) public interface to remove it by renaming it & making it private. Eventually `pg.connect` itself will be deprecated in favor of instantiating pools directly via `new pg.Pool()` so this property should become completely moot at some point. In the mean time...check out the new features...
|
||||||
|
|
||||||
#### New features
|
#### New features
|
||||||
|
|
||||||
- Replace internal pooling code with [pg-pool](https://github.com/brianc/node-pg-pool). This is the first step in eventually deprecating and removing the singleton `pg.connect`. The pg-pool constructor is exported from node-postgres at `require('pg').Pool`. It provides a backwards compatible interface with `pg.connect` as well as a promise based interface & additional niceties.
|
- Replace internal pooling code with [pg-pool](https://github.com/brianc/node-pg-pool). This is the first step in eventually deprecating and removing the singleton `pg.connect`. The pg-pool constructor is exported from node-postgres at `require('pg').Pool`. It provides a backwards compatible interface with `pg.connect` as well as a promise based interface & additional niceties.
|
||||||
|
|
||||||
You can now create an instance of a pool and don't have to rely on the `pg` singleton for anything:
|
You can now create an instance of a pool and don't have to rely on the `pg` singleton for anything:
|
||||||
|
|
||||||
@ -58,7 +248,7 @@ var pg = require('pg')
|
|||||||
|
|
||||||
var pool = new pg.Pool()
|
var pool = new pg.Pool()
|
||||||
|
|
||||||
// your friendly neighboorhood pool interface, without the singleton
|
// your friendly neighborhood pool interface, without the singleton
|
||||||
pool.connect(function(err, client, done) {
|
pool.connect(function(err, client, done) {
|
||||||
// ...
|
// ...
|
||||||
})
|
})
|
||||||
@ -66,9 +256,9 @@ pool.connect(function(err, client, done) {
|
|||||||
|
|
||||||
Promise support & other goodness lives now in [pg-pool](https://github.com/brianc/node-pg-pool).
|
Promise support & other goodness lives now in [pg-pool](https://github.com/brianc/node-pg-pool).
|
||||||
|
|
||||||
__Please__ read the readme at [pg-pool](https://github.com/brianc/node-pg-pool) for the full api.
|
**Please** read the readme at [pg-pool](https://github.com/brianc/node-pg-pool) for the full api.
|
||||||
|
|
||||||
- Included support for tcp keep alive. Enable it as follows:
|
- Included support for tcp keep alive. Enable it as follows:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
var client = new Client({ keepAlive: true })
|
var client = new Client({ keepAlive: true })
|
||||||
@ -76,58 +266,69 @@ var client = new Client({ keepAlive: true })
|
|||||||
|
|
||||||
This should help with backends incorrectly considering idle clients to be dead and prematurely disconnecting them.
|
This should help with backends incorrectly considering idle clients to be dead and prematurely disconnecting them.
|
||||||
|
|
||||||
|
|
||||||
### v5.1.0
|
### v5.1.0
|
||||||
|
|
||||||
- Make the query object returned from `client.query` implement the promise interface. This is the first step towards promisifying more of the node-postgres api.
|
- Make the query object returned from `client.query` implement the promise interface. This is the first step towards promisifying more of the node-postgres api.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
var client = new Client()
|
var client = new Client()
|
||||||
client.connect()
|
client.connect()
|
||||||
client.query('SELECT $1::text as name', ['brianc'])
|
client.query('SELECT $1::text as name', ['brianc']).then(function (res) {
|
||||||
.then(function(res) {
|
console.log('hello from', res.rows[0])
|
||||||
console.log('hello from', res.rows[0])
|
client.end()
|
||||||
client.end()
|
})
|
||||||
})
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### v5.0.0
|
### v5.0.0
|
||||||
|
|
||||||
#### Breaking Changes
|
#### Breaking Changes
|
||||||
|
|
||||||
- `require('pg').native` now returns null if the native bindings cannot be found; previously, this threw an exception.
|
- `require('pg').native` now returns null if the native bindings cannot be found; previously, this threw an exception.
|
||||||
|
|
||||||
#### New Features
|
#### New Features
|
||||||
|
|
||||||
- better error message when passing `undefined` as a query parameter
|
- better error message when passing `undefined` as a query parameter
|
||||||
- support for `defaults.connectionString`
|
- support for `defaults.connectionString`
|
||||||
- support for `returnToHead` being passed to [generic pool](https://github.com/coopernurse/node-pool)
|
- support for `returnToHead` being passed to [generic pool](https://github.com/coopernurse/node-pool)
|
||||||
|
|
||||||
### v4.5.0
|
### v4.5.0
|
||||||
|
|
||||||
- Add option to parse JS date objects in query parameters as [UTC](https://github.com/brianc/node-postgres/pull/943)
|
- Add option to parse JS date objects in query parameters as [UTC](https://github.com/brianc/node-postgres/pull/943)
|
||||||
|
|
||||||
### v4.4.0
|
### v4.4.0
|
||||||
- Warn to `stderr` if a named query exceeds 63 characters which is the max lenght supported by postgres.
|
|
||||||
|
- Warn to `stderr` if a named query exceeds 63 characters which is the max length supported by postgres.
|
||||||
|
|
||||||
### v4.3.0
|
### v4.3.0
|
||||||
|
|
||||||
- Unpin `pg-types` semver. Allow it to float against `pg-types@1.x`.
|
- Unpin `pg-types` semver. Allow it to float against `pg-types@1.x`.
|
||||||
|
|
||||||
### v4.2.0
|
### v4.2.0
|
||||||
|
|
||||||
- Support for additional error fields in postgres >= 9.3 if available.
|
- Support for additional error fields in postgres >= 9.3 if available.
|
||||||
|
|
||||||
### v4.1.0
|
### v4.1.0
|
||||||
|
|
||||||
- Allow type parser overrides on a [per-client basis](https://github.com/brianc/node-postgres/pull/679)
|
- Allow type parser overrides on a [per-client basis](https://github.com/brianc/node-postgres/pull/679)
|
||||||
|
|
||||||
### v4.0.0
|
### v4.0.0
|
||||||
|
|
||||||
- Make [native bindings](https://github.com/brianc/node-pg-native.git) an optional install with `npm install pg-native`
|
- Make [native bindings](https://github.com/brianc/node-pg-native.git) an optional install with `npm install pg-native`
|
||||||
- No longer surround query result callback with `try/catch` block.
|
- No longer surround query result callback with `try/catch` block.
|
||||||
- Remove built in COPY IN / COPY OUT support - better implementations provided by [pg-copy-streams](https://github.com/brianc/node-pg-copy-streams.git) and [pg-native](https://github.com/brianc/node-pg-native.git)
|
- Remove built in COPY IN / COPY OUT support - better implementations provided by [pg-copy-streams](https://github.com/brianc/node-pg-copy-streams.git) and [pg-native](https://github.com/brianc/node-pg-native.git)
|
||||||
|
|
||||||
### v3.6.0
|
### v3.6.0
|
||||||
|
|
||||||
- Include support for (parsing JSONB)[https://github.com/brianc/node-pg-types/pull/13] (supported in postgres 9.4)
|
- Include support for (parsing JSONB)[https://github.com/brianc/node-pg-types/pull/13] (supported in postgres 9.4)
|
||||||
|
|
||||||
### v3.5.0
|
### v3.5.0
|
||||||
|
|
||||||
- Include support for parsing boolean arrays
|
- Include support for parsing boolean arrays
|
||||||
|
|
||||||
### v3.4.0
|
### v3.4.0
|
||||||
|
|
||||||
- Include port as connection parameter to [unix sockets](https://github.com/brianc/node-postgres/pull/604)
|
- Include port as connection parameter to [unix sockets](https://github.com/brianc/node-postgres/pull/604)
|
||||||
- Better support for odd [date parsing](https://github.com/brianc/node-pg-types/pull/8)
|
- Better support for odd [date parsing](https://github.com/brianc/node-pg-types/pull/8)
|
||||||
|
|
||||||
@ -137,7 +338,6 @@ client.query('SELECT $1::text as name', ['brianc'])
|
|||||||
- Expose array parsers on [pg.types](https://github.com/brianc/node-pg-types/pull/2)
|
- Expose array parsers on [pg.types](https://github.com/brianc/node-pg-types/pull/2)
|
||||||
- Allow [pool](https://github.com/brianc/node-postgres/pull/591) to be configured
|
- Allow [pool](https://github.com/brianc/node-postgres/pull/591) to be configured
|
||||||
|
|
||||||
|
|
||||||
### v3.1.0
|
### v3.1.0
|
||||||
|
|
||||||
- Add [count of the number of times a client has been checked out from the pool](https://github.com/brianc/node-postgres/pull/556)
|
- Add [count of the number of times a client has been checked out from the pool](https://github.com/brianc/node-postgres/pull/556)
|
||||||
@ -146,27 +346,29 @@ client.query('SELECT $1::text as name', ['brianc'])
|
|||||||
### v3.0.0
|
### v3.0.0
|
||||||
|
|
||||||
#### Breaking changes
|
#### Breaking changes
|
||||||
|
|
||||||
- [Parse the DATE PostgreSQL type as local time](https://github.com/brianc/node-postgres/pull/514)
|
- [Parse the DATE PostgreSQL type as local time](https://github.com/brianc/node-postgres/pull/514)
|
||||||
|
|
||||||
After [some discussion](https://github.com/brianc/node-postgres/issues/510) it was decided node-postgres was non-compliant in how it was handling DATE results. They were being converted to UTC, but the PostgreSQL documentation specifies they should be returned in the client timezone. This is a breaking change, and if you use the `date` type you might want to examine your code and make sure nothing is impacted.
|
After [some discussion](https://github.com/brianc/node-postgres/issues/510) it was decided node-postgres was non-compliant in how it was handling DATE results. They were being converted to UTC, but the PostgreSQL documentation specifies they should be returned in the client timezone. This is a breaking change, and if you use the `date` type you might want to examine your code and make sure nothing is impacted.
|
||||||
|
|
||||||
- [Fix possible numeric precision loss on numeric & int8 arrays](https://github.com/brianc/node-postgres/pull/501)
|
- [Fix possible numeric precision loss on numeric & int8 arrays](https://github.com/brianc/node-postgres/pull/501)
|
||||||
|
|
||||||
pg@v2.0 included changes to not convert large integers into their JavaScript number representation because of possibility for numeric precision loss. The same types in arrays were not taken into account. This fix applies the same type of type-coercion rules to arrays of those types, so there will be no more possible numeric loss on an array of very large int8s for example. This is a breaking change because now a return type from a query of `int8[]` will contain _string_ representations
|
pg@v2.0 included changes to not convert large integers into their JavaScript number representation because of possibility for numeric precision loss. The same types in arrays were not taken into account. This fix applies the same type of type-coercion rules to arrays of those types, so there will be no more possible numeric loss on an array of very large int8s for example. This is a breaking change because now a return type from a query of `int8[]` will contain _string_ representations
|
||||||
of the integers. Use your favorite JavaScript bignum module to represent them without precision loss, or punch over the type converter to return the old style arrays again.
|
of the integers. Use your favorite JavaScript bignum module to represent them without precision loss, or punch over the type converter to return the old style arrays again.
|
||||||
|
|
||||||
- [Fix to input array of dates being improperly converted to utc](https://github.com/benesch/node-postgres/commit/c41eedc3e01e5527a3d5c242fa1896f02ef0b261#diff-7172adb1fec2457a2700ed29008a8e0aR108)
|
- [Fix to input array of dates being improperly converted to utc](https://github.com/benesch/node-postgres/commit/c41eedc3e01e5527a3d5c242fa1896f02ef0b261#diff-7172adb1fec2457a2700ed29008a8e0aR108)
|
||||||
|
|
||||||
Single `date` parameters were properly sent to the PostgreSQL server properly in local time, but an input array of dates was being changed into utc dates. This is a violation of what PostgreSQL expects. Small breaking change, but none-the-less something you should check out if you are inserting an array of dates.
|
Single `date` parameters were properly sent to the PostgreSQL server properly in local time, but an input array of dates was being changed into utc dates. This is a violation of what PostgreSQL expects. Small breaking change, but none-the-less something you should check out if you are inserting an array of dates.
|
||||||
|
|
||||||
- [Query no longer emits `end` event if it ends due to an error](https://github.com/brianc/node-postgres/commit/357b64d70431ec5ca721eb45a63b082c18e6ffa3)
|
- [Query no longer emits `end` event if it ends due to an error](https://github.com/brianc/node-postgres/commit/357b64d70431ec5ca721eb45a63b082c18e6ffa3)
|
||||||
|
|
||||||
This is a small change to bring the semantics of query more in line with other EventEmitters. The tests all passed after this change, but I suppose it could still be a breaking change in certain use cases. If you are doing clever things with the `end` and `error` events of a query object you might want to check to make sure its still behaving normally, though it is most likely not an issue.
|
This is a small change to bring the semantics of query more in line with other EventEmitters. The tests all passed after this change, but I suppose it could still be a breaking change in certain use cases. If you are doing clever things with the `end` and `error` events of a query object you might want to check to make sure its still behaving normally, though it is most likely not an issue.
|
||||||
|
|
||||||
#### New features
|
#### New features
|
||||||
|
|
||||||
- [Supercharge `prepareValue`](https://github.com/brianc/node-postgres/pull/555)
|
- [Supercharge `prepareValue`](https://github.com/brianc/node-postgres/pull/555)
|
||||||
|
|
||||||
The long & short of it is now any object you supply in the list of query values will be inspected for a `.toPostgres` method. If the method is present it will be called and its result used as the raw text value sent to PostgreSQL for that value. This allows the same type of custom type coercion on query parameters as was previously afforded to query result values.
|
The long & short of it is now any object you supply in the list of query values will be inspected for a `.toPostgres` method. If the method is present it will be called and its result used as the raw text value sent to PostgreSQL for that value. This allows the same type of custom type coercion on query parameters as was previously afforded to query result values.
|
||||||
|
|
||||||
- [Domain aware connection pool](https://github.com/brianc/node-postgres/pull/531)
|
- [Domain aware connection pool](https://github.com/brianc/node-postgres/pull/531)
|
||||||
|
|
||||||
@ -181,41 +383,52 @@ Avoids a scenario where your pool could fill up with disconnected & unusable cli
|
|||||||
To provide better documentation and a clearer explanation of how to override the query result parsing system we broke the type converters [into their own module](https://github.com/brianc/node-pg-types). There is still work around removing the 'global-ness' of the type converters so each query or connection can return types differently, but this is a good first step and allow a lot more obvious way to return int8 results as JavaScript numbers, for example
|
To provide better documentation and a clearer explanation of how to override the query result parsing system we broke the type converters [into their own module](https://github.com/brianc/node-pg-types). There is still work around removing the 'global-ness' of the type converters so each query or connection can return types differently, but this is a good first step and allow a lot more obvious way to return int8 results as JavaScript numbers, for example
|
||||||
|
|
||||||
### v2.11.0
|
### v2.11.0
|
||||||
|
|
||||||
- Add support for [application_name](https://github.com/brianc/node-postgres/pull/497)
|
- Add support for [application_name](https://github.com/brianc/node-postgres/pull/497)
|
||||||
|
|
||||||
### v2.10.0
|
### v2.10.0
|
||||||
|
|
||||||
- Add support for [the password file](http://www.postgresql.org/docs/9.3/static/libpq-pgpass.html)
|
- Add support for [the password file](http://www.postgresql.org/docs/9.3/static/libpq-pgpass.html)
|
||||||
|
|
||||||
### v2.9.0
|
### v2.9.0
|
||||||
|
|
||||||
- Add better support for [unix domain socket](https://github.com/brianc/node-postgres/pull/487) connections
|
- Add better support for [unix domain socket](https://github.com/brianc/node-postgres/pull/487) connections
|
||||||
|
|
||||||
### v2.8.0
|
### v2.8.0
|
||||||
|
|
||||||
- Add support for parsing JSON[] and UUID[] result types
|
- Add support for parsing JSON[] and UUID[] result types
|
||||||
|
|
||||||
### v2.7.0
|
### v2.7.0
|
||||||
|
|
||||||
- Use single row mode in native bindings when available [@rpedela]
|
- Use single row mode in native bindings when available [@rpedela]
|
||||||
- reduces memory consumption when handling row values in 'row' event
|
- reduces memory consumption when handling row values in 'row' event
|
||||||
- Automatically bind buffer type parameters as binary [@eugeneware]
|
- Automatically bind buffer type parameters as binary [@eugeneware]
|
||||||
|
|
||||||
### v2.6.0
|
### v2.6.0
|
||||||
|
|
||||||
- Respect PGSSLMODE environment variable
|
- Respect PGSSLMODE environment variable
|
||||||
|
|
||||||
### v2.5.0
|
### v2.5.0
|
||||||
|
|
||||||
- Ability to opt-in to int8 parsing via `pg.defaults.parseInt8 = true`
|
- Ability to opt-in to int8 parsing via `pg.defaults.parseInt8 = true`
|
||||||
|
|
||||||
### v2.4.0
|
### v2.4.0
|
||||||
|
|
||||||
- Use eval in the result set parser to increase performance
|
- Use eval in the result set parser to increase performance
|
||||||
|
|
||||||
### v2.3.0
|
### v2.3.0
|
||||||
|
|
||||||
- Remove built-in support for binary Int64 parsing.
|
- Remove built-in support for binary Int64 parsing.
|
||||||
_Due to the low usage & required compiled dependency this will be pushed into a 3rd party add-on_
|
_Due to the low usage & required compiled dependency this will be pushed into a 3rd party add-on_
|
||||||
|
|
||||||
### v2.2.0
|
### v2.2.0
|
||||||
|
|
||||||
- [Add support for excapeLiteral and escapeIdentifier in both JavaScript and the native bindings](https://github.com/brianc/node-postgres/pull/396)
|
- [Add support for excapeLiteral and escapeIdentifier in both JavaScript and the native bindings](https://github.com/brianc/node-postgres/pull/396)
|
||||||
|
|
||||||
### v2.1.0
|
### v2.1.0
|
||||||
|
|
||||||
- Add support for SSL connections in JavaScript driver
|
- Add support for SSL connections in JavaScript driver
|
||||||
- this means you can connect to heroku postgres from your local machine without the native bindings!
|
- this means you can connect to heroku postgres from your local machine without the native bindings!
|
||||||
- [Add field metadata to result object](https://github.com/brianc/node-postgres/blob/master/test/integration/client/row-description-on-results-tests.js)
|
- [Add field metadata to result object](https://github.com/brianc/node-postgres/blob/master/test/integration/client/row-description-on-results-tests.js)
|
||||||
- [Add ability for rows to be returned as arrays instead of objects](https://github.com/brianc/node-postgres/blob/master/test/integration/client/results-as-array-tests.js)
|
- [Add ability for rows to be returned as arrays instead of objects](https://github.com/brianc/node-postgres/blob/master/test/integration/client/results-as-array-tests.js)
|
||||||
|
|
||||||
@ -251,7 +464,7 @@ If you are unhappy with these changes you can always [override the built in type
|
|||||||
### v1.0.0
|
### v1.0.0
|
||||||
|
|
||||||
- remove deprecated functionality
|
- remove deprecated functionality
|
||||||
- Callback function passed to `pg.connect` now __requires__ 3 arguments
|
- Callback function passed to `pg.connect` now **requires** 3 arguments
|
||||||
- Client#pauseDrain() / Client#resumeDrain removed
|
- Client#pauseDrain() / Client#resumeDrain removed
|
||||||
- numeric, decimal, and float data types no longer parsed into float before being returned. Will be returned from query results as `String`
|
- numeric, decimal, and float data types no longer parsed into float before being returned. Will be returned from query results as `String`
|
||||||
|
|
||||||
|
|||||||
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2010 - 2017 Brian Carlson
|
Copyright (c) 2010 - 2021 Brian Carlson
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
43
LOCAL_DEV.md
Normal file
43
LOCAL_DEV.md
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
# Local development
|
||||||
|
|
||||||
|
Steps to install and configure Postgres on Mac for developing against locally
|
||||||
|
|
||||||
|
1. Install homebrew
|
||||||
|
2. Install postgres
|
||||||
|
```sh
|
||||||
|
brew install postgresql
|
||||||
|
```
|
||||||
|
3. Create a database
|
||||||
|
```sh
|
||||||
|
createdb test
|
||||||
|
```
|
||||||
|
4. Create SSL certificates
|
||||||
|
```sh
|
||||||
|
cd /opt/homebrew/var/postgresql@14
|
||||||
|
openssl genrsa -aes128 2048 > server.key
|
||||||
|
openssl rsa -in server.key -out server.key
|
||||||
|
chmod 400 server.key
|
||||||
|
openssl req -new -key server.key -days 365 -out server.crt -x509
|
||||||
|
cp server.crt root.crt
|
||||||
|
```
|
||||||
|
5. Update config in `/opt/homebrew/var/postgresql@14/postgresql.conf`
|
||||||
|
|
||||||
|
```conf
|
||||||
|
listen_addresses = '*'
|
||||||
|
|
||||||
|
password_encryption = md5
|
||||||
|
|
||||||
|
ssl = on
|
||||||
|
ssl_ca_file = 'root.crt'
|
||||||
|
ssl_cert_file = 'server.crt'
|
||||||
|
ssl_crl_file = ''
|
||||||
|
ssl_crl_dir = ''
|
||||||
|
ssl_key_file = 'server.key'
|
||||||
|
ssl_ciphers = 'HIGH:MEDIUM:+3DES:!aNULL' # allowed SSL ciphers
|
||||||
|
ssl_prefer_server_ciphers = on
|
||||||
|
```
|
||||||
|
|
||||||
|
6. Start Postgres server
|
||||||
|
```sh
|
||||||
|
/opt/homebrew/opt/postgresql@14/bin/postgres -D /opt/homebrew/var/postgresql@14
|
||||||
|
```
|
||||||
122
README.md
122
README.md
@ -1,88 +1,120 @@
|
|||||||
# node-postgres
|
# node-postgres
|
||||||
|
|
||||||
[](http://travis-ci.org/brianc/node-postgres)
|

|
||||||
[](https://david-dm.org/brianc/node-postgres)
|
|
||||||
<span class="badge-npmversion"><a href="https://npmjs.org/package/pg" title="View this project on NPM"><img src="https://img.shields.io/npm/v/pg.svg" alt="NPM version" /></a></span>
|
<span class="badge-npmversion"><a href="https://npmjs.org/package/pg" title="View this project on NPM"><img src="https://img.shields.io/npm/v/pg.svg" alt="NPM version" /></a></span>
|
||||||
<span class="badge-npmdownloads"><a href="https://npmjs.org/package/pg" title="View this project on NPM"><img src="https://img.shields.io/npm/dm/pg.svg" alt="NPM downloads" /></a></span>
|
<span class="badge-npmdownloads"><a href="https://npmjs.org/package/pg" title="View this project on NPM"><img src="https://img.shields.io/npm/dm/pg.svg" alt="NPM downloads" /></a></span>
|
||||||
|
|
||||||
Non-blocking PostgreSQL client for node.js. Pure JavaScript and optional native libpq bindings.
|
Non-blocking PostgreSQL client for Node.js. Pure JavaScript and optional native libpq bindings.
|
||||||
|
|
||||||
|
## Monorepo
|
||||||
|
|
||||||
|
This repo is a monorepo which contains the core [pg](https://github.com/brianc/node-postgres/tree/master/packages/pg) module as well as a handful of related modules.
|
||||||
|
|
||||||
|
- [pg](https://github.com/brianc/node-postgres/tree/master/packages/pg)
|
||||||
|
- [pg-pool](https://github.com/brianc/node-postgres/tree/master/packages/pg-pool)
|
||||||
|
- [pg-native](https://github.com/brianc/node-postgres/tree/master/packages/pg-native)
|
||||||
|
- [pg-cursor](https://github.com/brianc/node-postgres/tree/master/packages/pg-cursor)
|
||||||
|
- [pg-query-stream](https://github.com/brianc/node-postgres/tree/master/packages/pg-query-stream)
|
||||||
|
- [pg-connection-string](https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string)
|
||||||
|
- [pg-protocol](https://github.com/brianc/node-postgres/tree/master/packages/pg-protocol)
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
|
|
||||||
```sh
|
```
|
||||||
$ npm install pg
|
npm install pg
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
## Documentation
|
||||||
## :star: [Documentation](https://node-postgres.com) :star:
|
|
||||||
|
|
||||||
|
Each package in this repo should have its own readme more focused on how to develop/contribute. For overall documentation on the project and the related modules managed by this repo please see:
|
||||||
|
|
||||||
|
### :star: [Documentation](https://node-postgres.com) :star:
|
||||||
|
|
||||||
|
The source repo for the documentation is available for contribution [here](https://github.com/brianc/node-postgres/tree/master/docs).
|
||||||
|
|
||||||
### Features
|
### Features
|
||||||
|
|
||||||
* pure JavaScript client and native libpq bindings share _the same api_
|
- Pure JavaScript client and native libpq bindings share _the same API_
|
||||||
* connection pooling
|
- Connection pooling
|
||||||
* extensible js<->postgresql data-type coercion
|
- Extensible JS ↔ PostgreSQL data-type coercion
|
||||||
* supported PostgreSQL features
|
- Supported PostgreSQL features
|
||||||
* parameterized queries
|
- Parameterized queries
|
||||||
* named statements with query plan caching
|
- Named statements with query plan caching
|
||||||
* async notifications with `LISTEN/NOTIFY`
|
- Async notifications with `LISTEN/NOTIFY`
|
||||||
* bulk import & export with `COPY TO/COPY FROM`
|
- Bulk import & export with `COPY TO/COPY FROM`
|
||||||
|
|
||||||
### Extras
|
### Extras
|
||||||
|
|
||||||
node-postgres is by design pretty light on abstractions. These are some handy modules we've been using over the years to complete the picture.
|
node-postgres is by design pretty light on abstractions. These are some handy modules we've been using over the years to complete the picture.
|
||||||
Entire list can be found on [wiki](https://github.com/brianc/node-postgres/wiki/Extras)
|
The entire list can be found on our [wiki](https://github.com/brianc/node-postgres/wiki/Extras).
|
||||||
|
|
||||||
## Support
|
## Support
|
||||||
|
|
||||||
node-postgres is free software. If you encounter a bug with the library please open an issue on the [github repo](https://github.com/brianc/node-postgres). If you have questions unanswered by the documentation please open an issue pointing out how the documentation was unclear & I will do my best to make it better!
|
node-postgres is free software. If you encounter a bug with the library please open an issue on the [GitHub repo](https://github.com/brianc/node-postgres). If you have questions unanswered by the documentation please open an issue pointing out how the documentation was unclear & I will do my best to make it better!
|
||||||
|
|
||||||
When you open an issue please provide:
|
When you open an issue please provide:
|
||||||
- version of node
|
|
||||||
- version of postgres
|
- version of Node
|
||||||
|
- version of Postgres
|
||||||
- smallest possible snippet of code to reproduce the problem
|
- smallest possible snippet of code to reproduce the problem
|
||||||
|
|
||||||
You can also follow me [@briancarlson](https://twitter.com/briancarlson) if that's your thing. I try to always announce noteworthy changes & developments with node-postgres on twitter.
|
You can also follow me [@briancarlson](https://twitter.com/briancarlson) if that's your thing. I try to always announce noteworthy changes & developments with node-postgres on Twitter.
|
||||||
|
|
||||||
### Professional Support
|
## Sponsorship :two_hearts:
|
||||||
|
|
||||||
I offer professional support for node-postgres. I provide implementation, training, and many years of expertise on how to build applications with node, express, PostgreSQL, and react/redux. Please contact me at [brian.m.carlson@gmail.com](mailto:brian.m.carlson@gmail.com) to discuss how I can help your company be more successful!
|
node-postgres's continued development has been made possible in part by generous financial support from [the community](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md).
|
||||||
|
|
||||||
### Sponsorship :star:
|
If you or your company are benefiting from node-postgres and would like to help keep the project financially sustainable [please consider supporting](https://github.com/sponsors/brianc) its development.
|
||||||
|
|
||||||
If you are benefiting from node-postgres and would like to help keep the project financially sustainable please visit Brian Carlson's [Patreon page](https://www.patreon.com/node_postgres).
|
### Featured sponsor
|
||||||
|
|
||||||
|
Special thanks to [medplum](https://medplum.com) for their generous and thoughtful support of node-postgres!
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
__:heart: contributions!__
|
**:heart: contributions!**
|
||||||
|
|
||||||
I will __happily__ accept your pull request if it:
|
I will **happily** accept your pull request if it:
|
||||||
- __has tests__
|
|
||||||
|
- **has tests**
|
||||||
- looks reasonable
|
- looks reasonable
|
||||||
- does not break backwards compatibility
|
- does not break backwards compatibility
|
||||||
|
|
||||||
|
If your change involves breaking backwards compatibility please please point that out in the pull request & we can discuss & plan when and how to release it and what type of documentation or communication it will require.
|
||||||
|
|
||||||
|
### Setting up for local development
|
||||||
|
|
||||||
|
1. Clone the repo
|
||||||
|
2. Ensure you have installed libpq-dev in your system.
|
||||||
|
3. From your workspace root run `yarn` and then `yarn lerna bootstrap`
|
||||||
|
4. Ensure you have a PostgreSQL instance running with SSL enabled and an empty database for tests
|
||||||
|
5. Ensure you have the proper environment variables configured for connecting to the instance
|
||||||
|
6. Run `yarn test` to run all the tests
|
||||||
|
|
||||||
## Troubleshooting and FAQ
|
## Troubleshooting and FAQ
|
||||||
|
|
||||||
The causes and solutions to common errors can be found among the [Frequently Asked Questions(FAQ)](https://github.com/brianc/node-postgres/wiki/FAQ)
|
The causes and solutions to common errors can be found among the [Frequently Asked Questions (FAQ)](https://github.com/brianc/node-postgres/wiki/FAQ)
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com)
|
Copyright (c) 2010-2020 Brian Carlson (brian.m.carlson@gmail.com)
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
in the Software without restriction, including without limitation the rights
|
in the Software without restriction, including without limitation the rights
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
furnished to do so, subject to the following conditions:
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
The above copyright notice and this permission notice shall be included in
|
||||||
all copies or substantial portions of the Software.
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
THE SOFTWARE.
|
THE SOFTWARE.
|
||||||
|
|||||||
49
SPONSORS.md
49
SPONSORS.md
@ -1,11 +1,56 @@
|
|||||||
node-postgres is made possible by the helpful contributors from the community well as the following generous supporters on [Patreon](https://www.patreon.com/node_postgres).
|
node-postgres is made possible by the helpful contributors from the community as well as the following generous supporters on [GitHub Sponsors](https://github.com/sponsors/brianc) and [Patreon](https://www.patreon.com/node_postgres).
|
||||||
|
|
||||||
# Leaders
|
# Leaders
|
||||||
|
|
||||||
- [MadKudu](https://www.madkudu.com) - [@madkudu](https://twitter.com/madkudu)
|
- [MadKudu](https://www.madkudu.com) - [@madkudu](https://twitter.com/madkudu)
|
||||||
|
- [Third Iron](https://thirdiron.com/)
|
||||||
|
- [Timescale](https://timescale.com)
|
||||||
|
- [Nafundi](https://nafundi.com)
|
||||||
|
- [CrateDB](https://crate.io/)
|
||||||
|
- [BitMEX](https://www.bitmex.com/app/trade/XBTUSD)
|
||||||
|
- [Dataform](https://dataform.co/)
|
||||||
|
- [Eaze](https://www.eaze.com/)
|
||||||
|
- [simpleanalytics](https://simpleanalytics.com/)
|
||||||
|
- [n8n.io](https://n8n.io/)
|
||||||
|
- [mpirik](https://github.com/mpirik)
|
||||||
|
- [@BLUE-DEVIL1134](https://github.com/BLUE-DEVIL1134)
|
||||||
|
- [bubble.io](https://bubble.io/)
|
||||||
|
- [GitHub](https://github.com/github)
|
||||||
|
- [n8n](https://n8n.io/)
|
||||||
|
- [loveland](https://github.com/loveland)
|
||||||
|
- [gajus](https://github.com/gajus)
|
||||||
|
- [thirdiron](https://github.com/thirdiron)
|
||||||
|
|
||||||
# Supporters
|
# Supporters
|
||||||
|
|
||||||
- John Fawcett
|
- John Fawcett
|
||||||
- Lalit Kapoor [@lalitkapoor](https://twitter.com/lalitkapoor)
|
- Lalit Kapoor [@lalitkapoor](https://twitter.com/lalitkapoor)
|
||||||
- Paul Frazee [@pfrazee](https://twitter.com/pfrazee)
|
- Paul Frazee [@pfrazee](https://twitter.com/pfrazee)
|
||||||
- Rein Petersen
|
- Rein Petersen
|
||||||
- Arnaud Benhamdine
|
- Arnaud Benhamdine [@abenhamdine](https://twitter.com/abenhamdine)
|
||||||
|
- Matthew Welke
|
||||||
|
- Matthew Weber
|
||||||
|
- Andrea De Simon
|
||||||
|
- Todd Kennedy
|
||||||
|
- Alexander Robson
|
||||||
|
- Benjie Gillam
|
||||||
|
- David Hanson
|
||||||
|
- Franklin Davenport
|
||||||
|
- [Eventbot](https://geteventbot.com/)
|
||||||
|
- Chuck T
|
||||||
|
- Paul Cothenet
|
||||||
|
- Pelle Wessman
|
||||||
|
- Raul Murray
|
||||||
|
- Simple Analytics
|
||||||
|
- Trevor Linton
|
||||||
|
- Ian Walter
|
||||||
|
- @Guido4000
|
||||||
|
- [Martti Laine](https://github.com/codeclown)
|
||||||
|
- [Tim Nolet](https://github.com/tnolet)
|
||||||
|
- [Ideal Postcodes](https://github.com/ideal-postcodes)
|
||||||
|
- [checkly](https://github.com/checkly)
|
||||||
|
- [Scout APM](https://github.com/scoutapm-sponsorships)
|
||||||
|
- [Sideline Sports](https://github.com/SidelineSports)
|
||||||
|
- [Gadget](https://github.com/gadget-inc)
|
||||||
|
- [Sentry](https://sentry.io/welcome/)
|
||||||
|
- [devlikeapro](https://github.com/devlikepro)
|
||||||
|
|||||||
2
docs/.gitignore
vendored
Normal file
2
docs/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
.next
|
||||||
|
out
|
||||||
20
docs/README.md
Normal file
20
docs/README.md
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
# node-postgres docs website
|
||||||
|
|
||||||
|
This is the documentation for node-postgres which is currently hosted at [https://node-postgres.com](https://node-postgres.com).
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
To run the documentation locally, you need to have [Node.js](https://nodejs.org) installed. Then, you can clone the repository and install the dependencies:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd docs
|
||||||
|
yarn
|
||||||
|
```
|
||||||
|
|
||||||
|
Once you've installed the deps, you can run the development server:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn dev
|
||||||
|
```
|
||||||
|
|
||||||
|
This will start a local server at [http://localhost:3000](http://localhost:3000) where you can view the documentation and see your changes.
|
||||||
9
docs/components/alert.tsx
Normal file
9
docs/components/alert.tsx
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
import { Callout } from 'nextra-theme-docs'
|
||||||
|
|
||||||
|
export const Alert = ({ children }) => {
|
||||||
|
return (
|
||||||
|
<Callout type="warning" emoji="⚠️">
|
||||||
|
{children}
|
||||||
|
</Callout>
|
||||||
|
)
|
||||||
|
}
|
||||||
5
docs/components/info.tsx
Normal file
5
docs/components/info.tsx
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
import { Callout } from 'nextra-theme-docs'
|
||||||
|
|
||||||
|
export const Info = ({ children }) => {
|
||||||
|
return <Callout emoji="ℹ️">{children}</Callout>
|
||||||
|
}
|
||||||
9
docs/components/logo.tsx
Normal file
9
docs/components/logo.tsx
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
type Props = {
|
||||||
|
src: string
|
||||||
|
alt?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export function Logo(props: Props) {
|
||||||
|
const alt = props.alt || 'Logo'
|
||||||
|
return <img src={props.src} alt={alt} width={100} height={100} style={{ width: 400, height: 'auto' }} />
|
||||||
|
}
|
||||||
8
docs/next.config.js
Normal file
8
docs/next.config.js
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
// next.config.js
|
||||||
|
const withNextra = require('nextra')({
|
||||||
|
theme: 'nextra-theme-docs',
|
||||||
|
themeConfig: './theme.config.js',
|
||||||
|
// optional: add `unstable_staticImage: true` to enable Nextra's auto image import
|
||||||
|
})
|
||||||
|
|
||||||
|
module.exports = withNextra()
|
||||||
20
docs/package.json
Normal file
20
docs/package.json
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"name": "docs",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "",
|
||||||
|
"main": "next.config.js",
|
||||||
|
"scripts": {
|
||||||
|
"start": "next dev",
|
||||||
|
"build": "next build && next export"
|
||||||
|
},
|
||||||
|
"keywords": [],
|
||||||
|
"author": "",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"next": "^12.3.1",
|
||||||
|
"nextra": "2.0.0-beta.29",
|
||||||
|
"nextra-theme-docs": "2.0.0-beta.29",
|
||||||
|
"react": "^17.0.1",
|
||||||
|
"react-dom": "^17.0.1"
|
||||||
|
}
|
||||||
|
}
|
||||||
5
docs/pages/_app.js
Normal file
5
docs/pages/_app.js
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
import 'nextra-theme-docs/style.css'
|
||||||
|
|
||||||
|
export default function Nextra({ Component, pageProps }) {
|
||||||
|
return <Component {...pageProps} />
|
||||||
|
}
|
||||||
5
docs/pages/_meta.json
Normal file
5
docs/pages/_meta.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"index": "Welcome",
|
||||||
|
"announcements": "Announcements",
|
||||||
|
"apis": "API"
|
||||||
|
}
|
||||||
146
docs/pages/announcements.mdx
Normal file
146
docs/pages/announcements.mdx
Normal file
@ -0,0 +1,146 @@
|
|||||||
|
import { Alert } from '/components/alert.tsx'
|
||||||
|
|
||||||
|
## 2020-02-25
|
||||||
|
|
||||||
|
### pg@8.0 release
|
||||||
|
|
||||||
|
`pg@8.0` is [being released](https://github.com/brianc/node-postgres/pull/2117) which contains a handful of breaking changes.
|
||||||
|
|
||||||
|
I will outline each breaking change here and try to give some historical context on them. Most of them are small and subtle and likely wont impact you; **however**, there is one larger breaking change you will likely run into:
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
- Support all `tls.connect` [options](https://nodejs.org/api/tls.html#tls_tls_connect_options_callback) being passed to the client/pool constructor under the `ssl` option.
|
||||||
|
|
||||||
|
Previously we white listed the parameters passed here and did slight massaging of some of them. The main **breaking** change here is that now if you do this:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const client = new Client({ ssl: true })
|
||||||
|
```
|
||||||
|
|
||||||
|
<Alert>
|
||||||
|
Now we will use the default ssl options to tls.connect which includes rejectUnauthorized being enabled. This means
|
||||||
|
your connection attempt may fail if you are using a self-signed cert. To use the old behavior you should do this:
|
||||||
|
</Alert>
|
||||||
|
|
||||||
|
```js
|
||||||
|
const client = new Client({ ssl: { rejectUnauthorized: false } })
|
||||||
|
```
|
||||||
|
|
||||||
|
This makes pg a bit more secure "out of the box" while still enabling you to opt in to the old behavior.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
The rest of the changes are relatively minor & you likely wont need to do anything, but good to be aware none the less!
|
||||||
|
|
||||||
|
- change default database name
|
||||||
|
|
||||||
|
If a database name is not specified, available in the environment at `PGDATABASE`, or available at `pg.defaults`, we used to use the username of the process user as the name of the database. Now we will use the `user` property supplied to the client as the database name, if it exists. What this means is this:
|
||||||
|
|
||||||
|
```jsx
|
||||||
|
new Client({
|
||||||
|
user: 'foo',
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
`pg@7.x` will default the database name to the _process_ user. `pg@8.x` will use the `user` property supplied to the client. If you have not supplied `user` to the client, and it isn't available through any of its existing lookup mechanisms (environment variables, pg.defaults) then it will still use the process user for the database name.
|
||||||
|
|
||||||
|
- drop support for versions of node older than 8.0
|
||||||
|
|
||||||
|
Node@6.0 has been out of LTS for quite some time now, and I've removed it from our test matrix. `pg@8.0` _may_ still work on older versions of node, but it isn't a goal of the project anymore. Node@8.0 is actually no longer in the LTS support line, but pg will continue to test against and support 8.0 until there is a compelling reason to drop support for it. Any security vulnerability issues which come up I will back-port fixes to the `pg@7.x` line and do a release, but any other fixes or improvements will not be back ported.
|
||||||
|
|
||||||
|
- prevent password from being logged accidentally
|
||||||
|
|
||||||
|
`pg@8.0` makes the password field on the pool and client non-enumerable. This means when you do `console.log(client)` you wont have your database password printed out unintentionally. You can still do `console.log(client.password)` if you really want to see it!
|
||||||
|
|
||||||
|
- make `pg.native` non-enumerable
|
||||||
|
|
||||||
|
You can use `pg.native.Client` to access the native client. The first time you access the `pg.native` getter it imports the native bindings...which must be installed. In some cases (such as webpacking the pg code for lambda deployment) the `.native` property would be traversed and trigger an import of the native bindings as a side-effect. Making this property non-enumerable will fix this issue. An easy fix, but its technically a breaking change in cases where people _are_ relying on this side effect for any reason.
|
||||||
|
|
||||||
|
- make `pg.Pool` an es6 class
|
||||||
|
|
||||||
|
This makes extending `pg.Pool` possible. Previously it was not a "proper" es6 class and `class MyPool extends pg.Pool` wouldn't work.
|
||||||
|
|
||||||
|
- make `Notice` messages _not_ an instance of a JavaScript error
|
||||||
|
|
||||||
|
The code path for parsing `notice` and `error` messages from the postgres backend is the same. Previously created a JavaScript `Error` instance for _both_ of these message types. Now, only actual `errors` from the postgres backend will be an instance of an `Error`. The _shape_ and _properties_ of the two messages did not change outside of this.
|
||||||
|
|
||||||
|
- monorepo
|
||||||
|
|
||||||
|
While not technically a breaking change for the module itself, I have begun the process of [consolidating](https://github.com/brianc/node-pg-query-stream) [separate](https://github.com/brianc/node-pg-cursor/) [repos](https://github.com/brianc/node-pg-pool) into the main [repo](https://github.com/brianc/node-postgres) and converted it into a monorepo managed by lerna. This will help me stay on top of issues better (it was hard to bounce between 3-4 separate repos) and coordinate bug fixes and changes between dependant modules.
|
||||||
|
|
||||||
|
Thanks for reading that! pg tries to be super pedantic about not breaking backwards-compatibility in non semver major releases....even for seemingly small things. If you ever notice a breaking change on a semver minor/patch release please stop by the [repo](https://github.com/brianc/node-postgres) and open an issue!
|
||||||
|
|
||||||
|
_If you find `pg` valuable to you or your business please consider [supporting](http://github.com/sponsors/brianc) it's continued development! Big performance improvements, typescript, better docs, query pipelining and more are all in the works!_
|
||||||
|
|
||||||
|
## 2019-07-18
|
||||||
|
|
||||||
|
### New documentation
|
||||||
|
|
||||||
|
After a _very_ long time on my todo list I've ported the docs from my old hand-rolled webapp running on route53 + elb + ec2 + dokku (I know, I went overboard!) to [gatsby](https://www.gatsbyjs.org/) hosted on [netlify](https://www.netlify.com/) which is _so_ much easier to manage. I've released the code at [https://github.com/brianc/node-postgres-docs](https://github.com/brianc/node-postgres-docs) and invite your contributions! Let's make this documentation better together. Any time changes are merged to master on the documentation repo it will automatically deploy.
|
||||||
|
|
||||||
|
If you see an error in the docs, big or small, use the "edit on GitHub" button to edit the page & submit a pull request right there. I'll get a new version out ASAP with your changes! If you want to add new pages of documentation open an issue if you need guidance, and I'll help you get started.
|
||||||
|
|
||||||
|
I want to extend a special **thank you** to all the [supporters](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md) and [contributors](https://github.com/brianc/node-postgres/graphs/contributors) to the project that have helped keep me going through times of burnout or life "getting in the way." ❤️
|
||||||
|
|
||||||
|
It's been quite a journey, and I look forward continuing it for as long as I can provide value to all y'all. 🤠
|
||||||
|
|
||||||
|
## 2017-08-12
|
||||||
|
|
||||||
|
### code execution vulnerability
|
||||||
|
|
||||||
|
Today [@sehrope](https://github.com/sehrope) found and reported a code execution vulnerability in node-postgres. This affects all versions from `pg@2.x` through `pg@7.1.0`.
|
||||||
|
|
||||||
|
I have published a fix on the tip of each major version branch of all affected versions as well as a fix on each minor version branch of `pg@6.x` and `pg@7.x`:
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
The following versions have been published to npm & contain a patch to fix the vulnerability:
|
||||||
|
|
||||||
|
```
|
||||||
|
pg@2.11.2
|
||||||
|
pg@3.6.4
|
||||||
|
pg@4.5.7
|
||||||
|
pg@5.2.1
|
||||||
|
pg@6.0.5
|
||||||
|
pg@6.1.6
|
||||||
|
pg@6.2.5
|
||||||
|
pg@6.3.3
|
||||||
|
pg@6.4.2
|
||||||
|
pg@7.0.3
|
||||||
|
pg@7.1.2
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
To demonstrate the issue & see if you are vulnerable execute the following in node:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import pg from 'pg'
|
||||||
|
const { Client } = pg
|
||||||
|
const client = new Client()
|
||||||
|
client.connect()
|
||||||
|
|
||||||
|
const sql = `SELECT 1 AS "\\'/*", 2 AS "\\'*/\n + console.log(process.env)] = null;\n//"`
|
||||||
|
|
||||||
|
client.query(sql, (err, res) => {
|
||||||
|
client.end()
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
You will see your environment variables printed to your console. An attacker can use this exploit to execute any arbitrary node code within your process.
|
||||||
|
|
||||||
|
### Impact
|
||||||
|
|
||||||
|
This vulnerability _likely_ does not impact you if you are connecting to a database you control and not executing user-supplied sql. Still, you should **absolutely** upgrade to the most recent patch version as soon as possible to be safe.
|
||||||
|
|
||||||
|
Two attack vectors we quickly thought of:
|
||||||
|
|
||||||
|
- 1 - executing unsafe, user-supplied sql which contains a malicious column name like the one above.
|
||||||
|
- 2 - connecting to an untrusted database and executing a query which returns results where any of the column names are malicious.
|
||||||
|
|
||||||
|
### Support
|
||||||
|
|
||||||
|
I have created [an issue](https://github.com/brianc/node-postgres/issues/1408) you can use to discuss the vulnerability with me or ask questions, and I have reported this issue [on twitter](https://twitter.com/briancarlson) and directly to Heroku and [nodesecurity.io](https://nodesecurity.io/).
|
||||||
|
|
||||||
|
I take security very seriously. If you or your company benefit from node-postgres **[please sponsor my work](https://www.patreon.com/node_postgres)**: this type of issue is one of the many things I am responsible for, and I want to be able to continue to tirelessly provide a world-class PostgreSQL experience in node for years to come.
|
||||||
8
docs/pages/apis/_meta.json
Normal file
8
docs/pages/apis/_meta.json
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"client": "pg.Client",
|
||||||
|
"pool": "pg.Pool",
|
||||||
|
"result": "pg.Result",
|
||||||
|
"types": "pg.Types",
|
||||||
|
"cursor": "Cursor",
|
||||||
|
"utilities": "Utilities"
|
||||||
|
}
|
||||||
243
docs/pages/apis/client.mdx
Normal file
243
docs/pages/apis/client.mdx
Normal file
@ -0,0 +1,243 @@
|
|||||||
|
---
|
||||||
|
title: pg.Client
|
||||||
|
---
|
||||||
|
|
||||||
|
## new Client
|
||||||
|
|
||||||
|
`new Client(config: Config)`
|
||||||
|
|
||||||
|
Every field of the `config` object is entirely optional. A `Client` instance will use [environment variables](/features/connecting#environment-variables) for all missing values.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
type Config = {
|
||||||
|
user?: string, // default process.env.PGUSER || process.env.USER
|
||||||
|
password?: string or function, //default process.env.PGPASSWORD
|
||||||
|
host?: string, // default process.env.PGHOST
|
||||||
|
port?: number, // default process.env.PGPORT
|
||||||
|
database?: string, // default process.env.PGDATABASE || user
|
||||||
|
connectionString?: string, // e.g. postgres://user:password@host:5432/database
|
||||||
|
ssl?: any, // passed directly to node.TLSSocket, supports all tls.connect options
|
||||||
|
types?: any, // custom type parsers
|
||||||
|
statement_timeout?: number, // number of milliseconds before a statement in query will time out, default is no timeout
|
||||||
|
query_timeout?: number, // number of milliseconds before a query call will timeout, default is no timeout
|
||||||
|
lock_timeout?: number, // number of milliseconds a query is allowed to be en lock state before it's cancelled due to lock timeout
|
||||||
|
application_name?: string, // The name of the application that created this Client instance
|
||||||
|
connectionTimeoutMillis?: number, // number of milliseconds to wait for connection, default is no timeout
|
||||||
|
keepAliveInitialDelayMillis?: number, // set the initial delay before the first keepalive probe is sent on an idle socket
|
||||||
|
idle_in_transaction_session_timeout?: number, // number of milliseconds before terminating any session with an open idle transaction, default is no timeout
|
||||||
|
client_encoding?: string, // specifies the character set encoding that the database uses for sending data to the client
|
||||||
|
fallback_application_name?: string, // provide an application name to use if application_name is not set
|
||||||
|
options?: string // command-line options to be sent to the server
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
example to create a client with specific connection information:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Client } from 'pg'
|
||||||
|
|
||||||
|
const client = new Client({
|
||||||
|
user: 'database-user',
|
||||||
|
password: 'secretpassword!!',
|
||||||
|
host: 'my.database-server.com',
|
||||||
|
port: 5334,
|
||||||
|
database: 'database-name',
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## client.connect
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Client } from 'pg'
|
||||||
|
const client = new Client()
|
||||||
|
|
||||||
|
await client.connect()
|
||||||
|
```
|
||||||
|
|
||||||
|
## client.query
|
||||||
|
|
||||||
|
### QueryConfig
|
||||||
|
|
||||||
|
You can pass an object to `client.query` with the signature of:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
type QueryConfig {
|
||||||
|
// the raw query text
|
||||||
|
text: string;
|
||||||
|
|
||||||
|
// an array of query parameters
|
||||||
|
values?: Array<any>;
|
||||||
|
|
||||||
|
// name of the query - used for prepared statements
|
||||||
|
name?: string;
|
||||||
|
|
||||||
|
// by default rows come out as a key/value pair for each row
|
||||||
|
// pass the string 'array' here to receive rows as an array of values
|
||||||
|
rowMode?: string;
|
||||||
|
|
||||||
|
// custom type parsers just for this query result
|
||||||
|
types?: Types;
|
||||||
|
|
||||||
|
// TODO: document
|
||||||
|
queryMode?: string;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```ts
|
||||||
|
client.query(text: string, values?: any[]) => Promise<Result>
|
||||||
|
```
|
||||||
|
|
||||||
|
**Plain text query**
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Client } from 'pg'
|
||||||
|
const client = new Client()
|
||||||
|
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
const result = await client.query('SELECT NOW()')
|
||||||
|
console.log(result)
|
||||||
|
|
||||||
|
await client.end()
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameterized query**
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Client } from 'pg'
|
||||||
|
const client = new Client()
|
||||||
|
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
const result = await client.query('SELECT $1::text as name', ['brianc'])
|
||||||
|
console.log(result)
|
||||||
|
|
||||||
|
await client.end()
|
||||||
|
```
|
||||||
|
|
||||||
|
```ts
|
||||||
|
client.query(config: QueryConfig) => Promise<Result>
|
||||||
|
```
|
||||||
|
|
||||||
|
**client.query with a QueryConfig**
|
||||||
|
|
||||||
|
If you pass a `name` parameter to the `client.query` method, the client will create a [prepared statement](/features/queries#prepared-statements).
|
||||||
|
|
||||||
|
```js
|
||||||
|
const query = {
|
||||||
|
name: 'get-name',
|
||||||
|
text: 'SELECT $1::text',
|
||||||
|
values: ['brianc'],
|
||||||
|
rowMode: 'array',
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await client.query(query)
|
||||||
|
console.log(result.rows) // ['brianc']
|
||||||
|
|
||||||
|
await client.end()
|
||||||
|
```
|
||||||
|
|
||||||
|
**client.query with a `Submittable`**
|
||||||
|
|
||||||
|
If you pass an object to `client.query` and the object has a `.submit` function on it, the client will pass it's PostgreSQL server connection to the object and delegate query dispatching to the supplied object. This is an advanced feature mostly intended for library authors. It is incidentally also currently how the callback and promise based queries above are handled internally, but this is subject to change. It is also how [pg-cursor](https://github.com/brianc/node-pg-cursor) and [pg-query-stream](https://github.com/brianc/node-pg-query-stream) work.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Query } from 'pg'
|
||||||
|
const query = new Query('select $1::text as name', ['brianc'])
|
||||||
|
|
||||||
|
const result = client.query(query)
|
||||||
|
|
||||||
|
assert(query === result) // true
|
||||||
|
|
||||||
|
query.on('row', (row) => {
|
||||||
|
console.log('row!', row) // { name: 'brianc' }
|
||||||
|
})
|
||||||
|
|
||||||
|
query.on('end', () => {
|
||||||
|
console.log('query done')
|
||||||
|
})
|
||||||
|
|
||||||
|
query.on('error', (err) => {
|
||||||
|
console.error(err.stack)
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## client.end
|
||||||
|
|
||||||
|
Disconnects the client from the PostgreSQL server.
|
||||||
|
|
||||||
|
```js
|
||||||
|
await client.end()
|
||||||
|
console.log('client has disconnected')
|
||||||
|
```
|
||||||
|
|
||||||
|
## events
|
||||||
|
|
||||||
|
### error
|
||||||
|
|
||||||
|
```ts
|
||||||
|
client.on('error', (err: Error) => void) => void
|
||||||
|
```
|
||||||
|
|
||||||
|
When the client is in the process of connecting, dispatching a query, or disconnecting it will catch and forward errors from the PostgreSQL server to the respective `client.connect` `client.query` or `client.end` promise; however, the client maintains a long-lived connection to the PostgreSQL back-end and due to network partitions, back-end crashes, fail-overs, etc the client can (and over a long enough time period _will_) eventually be disconnected while it is idle. To handle this you may want to attach an error listener to a client to catch errors. Here's a contrived example:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const client = new pg.Client()
|
||||||
|
client.connect()
|
||||||
|
|
||||||
|
client.on('error', (err) => {
|
||||||
|
console.error('something bad has happened!', err.stack)
|
||||||
|
})
|
||||||
|
|
||||||
|
// walk over to server, unplug network cable
|
||||||
|
|
||||||
|
// process output: 'something bad has happened!' followed by stacktrace :P
|
||||||
|
```
|
||||||
|
|
||||||
|
### end
|
||||||
|
|
||||||
|
```ts
|
||||||
|
client.on('end') => void
|
||||||
|
```
|
||||||
|
|
||||||
|
When the client disconnects from the PostgreSQL server it will emit an end event once.
|
||||||
|
|
||||||
|
### notification
|
||||||
|
|
||||||
|
Used for `listen/notify` events:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
type Notification {
|
||||||
|
processId: number,
|
||||||
|
channel: string,
|
||||||
|
payload?: string
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
const client = new pg.Client()
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
client.query('LISTEN foo')
|
||||||
|
|
||||||
|
client.on('notification', (msg) => {
|
||||||
|
console.log(msg.channel) // foo
|
||||||
|
console.log(msg.payload) // bar!
|
||||||
|
})
|
||||||
|
|
||||||
|
client.query(`NOTIFY foo, 'bar!'`)
|
||||||
|
```
|
||||||
|
|
||||||
|
### notice
|
||||||
|
|
||||||
|
```ts
|
||||||
|
client.on('notice', (notice: Error) => void) => void
|
||||||
|
```
|
||||||
|
|
||||||
|
Used to log out [notice messages](https://www.postgresql.org/docs/9.6/static/plpgsql-errors-and-messages.html) from the PostgreSQL server.
|
||||||
|
|
||||||
|
```js
|
||||||
|
client.on('notice', (msg) => console.warn('notice:', msg))
|
||||||
|
```
|
||||||
76
docs/pages/apis/cursor.mdx
Normal file
76
docs/pages/apis/cursor.mdx
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
---
|
||||||
|
title: pg.Cursor
|
||||||
|
slug: /apis/cursor
|
||||||
|
---
|
||||||
|
|
||||||
|
A cursor can be used to efficiently read through large result sets without loading the entire result-set into memory ahead of time. It's useful to simulate a 'streaming' style read of data, or exit early from a large result set. The cursor is passed to `client.query` and is dispatched internally in a way very similar to how normal queries are sent, but the API it presents for consuming the result set is different.
|
||||||
|
|
||||||
|
## install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install pg pg-cursor
|
||||||
|
```
|
||||||
|
|
||||||
|
## constructor
|
||||||
|
|
||||||
|
### `new Cursor(text: String, values: Any[][, config: CursorQueryConfig])`
|
||||||
|
|
||||||
|
Instantiates a new Cursor. A cursor is an instance of `Submittable` and should be passed directly to the `client.query` method.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
import Cursor from 'pg-cursor'
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
const client = await pool.connect()
|
||||||
|
const text = 'SELECT * FROM my_large_table WHERE something > $1'
|
||||||
|
const values = [10]
|
||||||
|
|
||||||
|
const cursor = client.query(new Cursor(text, values))
|
||||||
|
|
||||||
|
const { rows } = await cursor.read(100)
|
||||||
|
console.log(rows.length) // 100 (unless the table has fewer than 100 rows)
|
||||||
|
client.release()
|
||||||
|
```
|
||||||
|
|
||||||
|
```ts
|
||||||
|
type CursorQueryConfig {
|
||||||
|
// by default rows come out as a key/value pair for each row
|
||||||
|
// pass the string 'array' here to receive rows as an array of values
|
||||||
|
rowMode?: string;
|
||||||
|
|
||||||
|
// custom type parsers just for this query result
|
||||||
|
types?: Types;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## read
|
||||||
|
|
||||||
|
### `cursor.read(rowCount: Number) => Promise<pg.Result>`
|
||||||
|
|
||||||
|
Read `rowCount` rows from the cursor instance. The callback will be called when the rows are available, loaded into memory, parsed, and converted to JavaScript types.
|
||||||
|
|
||||||
|
If the cursor has read to the end of the result sets all subsequent calls to cursor#read will return a 0 length array of rows. Calling `read` on a cursor that has read to the end.
|
||||||
|
|
||||||
|
Here is an example of reading to the end of a cursor:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
import Cursor from 'pg-cursor'
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
const client = await pool.connect()
|
||||||
|
const cursor = client.query(new Cursor('select * from generate_series(0, 5)'))
|
||||||
|
|
||||||
|
let rows = await cursor.read(100)
|
||||||
|
assert(rows.length == 6)
|
||||||
|
|
||||||
|
rows = await cursor.read(100)
|
||||||
|
assert(rows.length == 0)
|
||||||
|
```
|
||||||
|
|
||||||
|
## close
|
||||||
|
|
||||||
|
### `cursor.close() => Promise<void>`
|
||||||
|
|
||||||
|
Used to close the cursor early. If you want to stop reading from the cursor before you get all of the rows returned, call this.
|
||||||
251
docs/pages/apis/pool.mdx
Normal file
251
docs/pages/apis/pool.mdx
Normal file
@ -0,0 +1,251 @@
|
|||||||
|
---
|
||||||
|
title: pg.Pool
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Alert } from '/components/alert.tsx'
|
||||||
|
|
||||||
|
## new Pool
|
||||||
|
|
||||||
|
```ts
|
||||||
|
new Pool(config: Config)
|
||||||
|
```
|
||||||
|
|
||||||
|
Constructs a new pool instance.
|
||||||
|
|
||||||
|
The pool is initially created empty and will create new clients lazily as they are needed. Every field of the `config` object is entirely optional. The config passed to the pool is also passed to every client instance within the pool when the pool creates that client.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
type Config = {
|
||||||
|
// all valid client config options are also valid here
|
||||||
|
// in addition here are the pool specific configuration parameters:
|
||||||
|
|
||||||
|
// number of milliseconds to wait before timing out when connecting a new client
|
||||||
|
// by default this is 0 which means no timeout
|
||||||
|
connectionTimeoutMillis?: number
|
||||||
|
|
||||||
|
// number of milliseconds a client must sit idle in the pool and not be checked out
|
||||||
|
// before it is disconnected from the backend and discarded
|
||||||
|
// default is 10000 (10 seconds) - set to 0 to disable auto-disconnection of idle clients
|
||||||
|
idleTimeoutMillis?: number
|
||||||
|
|
||||||
|
// maximum number of clients the pool should contain
|
||||||
|
// by default this is set to 10. There is some nuance to setting the maximum size of your pool.
|
||||||
|
// see https://node-postgres.com/guides/pool-sizing for more information
|
||||||
|
max?: number
|
||||||
|
|
||||||
|
// minimum number of clients the pool should hold on to and _not_ destroy with the idleTimeoutMillis
|
||||||
|
// this can be useful if you get very bursty traffic and want to keep a few clients around.
|
||||||
|
// note: current the pool will not automatically create and connect new clients up to the min, it will
|
||||||
|
// only not evict and close clients except those which exceed the min count.
|
||||||
|
// the default is 0 which disables this behavior.
|
||||||
|
min?: number
|
||||||
|
|
||||||
|
// Default behavior is the pool will keep clients open & connected to the backend
|
||||||
|
// until idleTimeoutMillis expire for each client and node will maintain a ref
|
||||||
|
// to the socket on the client, keeping the event loop alive until all clients are closed
|
||||||
|
// after being idle or the pool is manually shutdown with `pool.end()`.
|
||||||
|
//
|
||||||
|
// Setting `allowExitOnIdle: true` in the config will allow the node event loop to exit
|
||||||
|
// as soon as all clients in the pool are idle, even if their socket is still open
|
||||||
|
// to the postgres server. This can be handy in scripts & tests
|
||||||
|
// where you don't want to wait for your clients to go idle before your process exits.
|
||||||
|
allowExitOnIdle?: boolean
|
||||||
|
|
||||||
|
// Sets a max overall life for the connection.
|
||||||
|
// A value of 60 would evict connections that have been around for over 60 seconds,
|
||||||
|
// regardless of whether they are idle. It's useful to force rotation of connection pools through
|
||||||
|
// middleware so that you can rotate the underlying servers. The default is disabled (value of zero)
|
||||||
|
maxLifetimeSeconds?: number
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
example to create a new pool with configuration:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
|
||||||
|
const pool = new Pool({
|
||||||
|
host: 'localhost',
|
||||||
|
user: 'database-user',
|
||||||
|
max: 20,
|
||||||
|
idleTimeoutMillis: 30000,
|
||||||
|
connectionTimeoutMillis: 2000,
|
||||||
|
maxLifetimeSeconds: 60
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## pool.query
|
||||||
|
|
||||||
|
Often we only need to run a single query on the database, so as convenience the pool has a method to run a query on the first available idle client and return its result.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
pool.query(text: string, values?: any[]) => Promise<pg.Result>
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
const result = await pool.query('SELECT $1::text as name', ['brianc'])
|
||||||
|
console.log(result.rows[0].name) // brianc
|
||||||
|
```
|
||||||
|
|
||||||
|
Notice in the example above there is no need to check out or release a client. The pool is doing the acquiring and releasing internally. I find `pool.query` to be a handy shortcut in many situations and I use it exclusively unless I need a transaction.
|
||||||
|
|
||||||
|
<Alert>
|
||||||
|
<div>
|
||||||
|
Do <strong>not</strong> use <code>pool.query</code> if you are using a transaction.
|
||||||
|
</div>
|
||||||
|
The pool will dispatch every query passed to pool.query on the first available idle client. Transactions within PostgreSQL
|
||||||
|
are scoped to a single client and so dispatching individual queries within a single transaction across multiple, random
|
||||||
|
clients will cause big problems in your app and not work. For more info please read <a href="/features/transactions">
|
||||||
|
transactions
|
||||||
|
</a>.
|
||||||
|
</Alert>
|
||||||
|
|
||||||
|
## pool.connect
|
||||||
|
|
||||||
|
`pool.connect() => Promise<pg.Client>`
|
||||||
|
|
||||||
|
Acquires a client from the pool.
|
||||||
|
|
||||||
|
- If there are idle clients in the pool one will be returned to the callback on `process.nextTick`.
|
||||||
|
- If the pool is not full but all current clients are checked out a new client will be created & returned to this callback.
|
||||||
|
- If the pool is 'full' and all clients are currently checked out, requests will wait in a FIFO queue until a client becomes available by being released back to the pool.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
const client = await pool.connect()
|
||||||
|
await client.query('SELECT NOW()')
|
||||||
|
client.release()
|
||||||
|
```
|
||||||
|
|
||||||
|
### releasing clients
|
||||||
|
|
||||||
|
`client.release(destroy?: boolean) => void`
|
||||||
|
|
||||||
|
Client instances returned from `pool.connect` will have a `release` method which will release them from the pool.
|
||||||
|
|
||||||
|
The `release` method on an acquired client returns it back to the pool. If you pass a truthy value in the `destroy` parameter, instead of releasing the client to the pool, the pool will be instructed to disconnect and destroy this client, leaving a space within itself for a new client.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
// check out a single client
|
||||||
|
const client = await pool.connect()
|
||||||
|
|
||||||
|
// release the client
|
||||||
|
client.release()
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
assert(pool.totalCount === 0)
|
||||||
|
assert(pool.idleCount === 0)
|
||||||
|
|
||||||
|
const client = await pool.connect()
|
||||||
|
await client.query('SELECT NOW()')
|
||||||
|
assert(pool.totalCount === 1)
|
||||||
|
assert(pool.idleCount === 0)
|
||||||
|
|
||||||
|
// tell the pool to destroy this client
|
||||||
|
await client.release(true)
|
||||||
|
assert(pool.idleCount === 0)
|
||||||
|
assert(pool.totalCount === 0)
|
||||||
|
```
|
||||||
|
|
||||||
|
<Alert>
|
||||||
|
<div>
|
||||||
|
You <strong>must</strong> release a client when you are finished with it.
|
||||||
|
</div>
|
||||||
|
If you forget to release the client then your application will quickly exhaust available, idle clients in the pool and
|
||||||
|
all further calls to <code>pool.connect</code> will timeout with an error or hang indefinitely if you have <code>
|
||||||
|
connectionTimeoutMillis
|
||||||
|
</code> configured to 0.
|
||||||
|
</Alert>
|
||||||
|
|
||||||
|
## pool.end
|
||||||
|
|
||||||
|
Calling `pool.end` will drain the pool of all active clients, disconnect them, and shut down any internal timers in the pool. It is common to call this at the end of a script using the pool or when your process is attempting to shut down cleanly.
|
||||||
|
|
||||||
|
```js
|
||||||
|
// again both promises and callbacks are supported:
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
await pool.end()
|
||||||
|
```
|
||||||
|
|
||||||
|
## properties
|
||||||
|
|
||||||
|
`pool.totalCount: number`
|
||||||
|
|
||||||
|
The total number of clients existing within the pool.
|
||||||
|
|
||||||
|
`pool.idleCount: number`
|
||||||
|
|
||||||
|
The number of clients which are not checked out but are currently idle in the pool.
|
||||||
|
|
||||||
|
`pool.waitingCount: number`
|
||||||
|
|
||||||
|
The number of queued requests waiting on a client when all clients are checked out. It can be helpful to monitor this number to see if you need to adjust the size of the pool.
|
||||||
|
|
||||||
|
## events
|
||||||
|
|
||||||
|
`Pool` instances are also instances of [`EventEmitter`](https://nodejs.org/api/events.html).
|
||||||
|
|
||||||
|
### connect
|
||||||
|
|
||||||
|
`pool.on('connect', (client: Client) => void) => void`
|
||||||
|
|
||||||
|
Whenever the pool establishes a new client connection to the PostgreSQL backend it will emit the `connect` event with the newly connected client. This presents an opportunity for you to run setup commands on a client.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const pool = new Pool()
|
||||||
|
pool.on('connect', (client) => {
|
||||||
|
client.query('SET DATESTYLE = iso, mdy')
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### acquire
|
||||||
|
|
||||||
|
`pool.on('acquire', (client: Client) => void) => void`
|
||||||
|
|
||||||
|
Whenever a client is checked out from the pool the pool will emit the `acquire` event with the client that was acquired.
|
||||||
|
|
||||||
|
### error
|
||||||
|
|
||||||
|
`pool.on('error', (err: Error, client: Client) => void) => void`
|
||||||
|
|
||||||
|
When a client is sitting idly in the pool it can still emit errors because it is connected to a live backend.
|
||||||
|
|
||||||
|
If the backend goes down or a network partition is encountered all the idle, connected clients in your application will emit an error _through_ the pool's error event emitter.
|
||||||
|
|
||||||
|
The error listener is passed the error as the first argument and the client upon which the error occurred as the 2nd argument. The client will be automatically terminated and removed from the pool, it is only passed to the error handler in case you want to inspect it.
|
||||||
|
|
||||||
|
<Alert>
|
||||||
|
<div>You probably want to add an event listener to the pool to catch background errors!</div>
|
||||||
|
Just like other event emitters, if a pool emits an <code>error</code> event and no listeners are added node will emit an
|
||||||
|
uncaught error and potentially crash your node process.
|
||||||
|
</Alert>
|
||||||
|
|
||||||
|
### release
|
||||||
|
|
||||||
|
`pool.on('release', (err: Error, client: Client) => void) => void`
|
||||||
|
|
||||||
|
Whenever a client is released back into the pool, the pool will emit the `release` event.
|
||||||
|
|
||||||
|
### remove
|
||||||
|
|
||||||
|
`pool.on('remove', (client: Client) => void) => void`
|
||||||
|
|
||||||
|
Whenever a client is closed & removed from the pool the pool will emit the `remove` event.
|
||||||
53
docs/pages/apis/result.mdx
Normal file
53
docs/pages/apis/result.mdx
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
---
|
||||||
|
title: pg.Result
|
||||||
|
slug: /apis/result
|
||||||
|
---
|
||||||
|
|
||||||
|
The `pg.Result` shape is returned for every successful query.
|
||||||
|
|
||||||
|
<div className="alert alert-info">note: you cannot instantiate this directly</div>
|
||||||
|
|
||||||
|
## properties
|
||||||
|
|
||||||
|
### `result.rows: Array<any>`
|
||||||
|
|
||||||
|
Every result will have a rows array. If no rows are returned the array will be empty. Otherwise the array will contain one item for each row returned from the query. By default node-postgres creates a map from the name to value of each column, giving you a json-like object back for each row.
|
||||||
|
|
||||||
|
### `result.fields: Array<FieldInfo>`
|
||||||
|
|
||||||
|
Every result will have a fields array. This array contains the `name` and `dataTypeID` of each field in the result. These fields are ordered in the same order as the columns if you are using `arrayMode` for the query:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
const client = await pool.connect()
|
||||||
|
const result = await client.query({
|
||||||
|
rowMode: 'array',
|
||||||
|
text: 'SELECT 1 as one, 2 as two;',
|
||||||
|
})
|
||||||
|
console.log(result.fields[0].name) // one
|
||||||
|
console.log(result.fields[1].name) // two
|
||||||
|
console.log(result.rows) // [ [ 1, 2 ] ]
|
||||||
|
await client.end()
|
||||||
|
```
|
||||||
|
|
||||||
|
### `result.command: string`
|
||||||
|
|
||||||
|
The command type last executed: `INSERT` `UPDATE` `CREATE` `SELECT` etc.
|
||||||
|
|
||||||
|
### `result.rowCount: int | null`
|
||||||
|
|
||||||
|
The number of rows processed by the last command. Can be `null` for commands that never affect rows, such as the `LOCK`-command. More specifically, some commands, including `LOCK`, only return a command tag of the form `COMMAND`, without any `[ROWS]`-field to parse. For such commands `rowCount` will be `null`.
|
||||||
|
|
||||||
|
_note: this does not reflect the number of rows __returned__ from a query. e.g. an update statement could update many rows (so high `result.rowCount` value) but `result.rows.length` would be zero. To check for an empty query response on a `SELECT` query use `result.rows.length === 0`_.
|
||||||
|
|
||||||
|
[@sehrope](https://github.com/brianc/node-postgres/issues/2182#issuecomment-620553915) has a good explanation:
|
||||||
|
|
||||||
|
The `rowCount` is populated from the command tag supplied by the PostgreSQL server. It's generally of the form: `COMMAND [OID] [ROWS]`
|
||||||
|
|
||||||
|
For DML commands (INSERT, UPDATE, etc), it reflects how many rows the server modified to process the command. For SELECT or COPY commands it reflects how many rows were retrieved or copied. More info on the specifics here: https://www.postgresql.org/docs/current/protocol-message-formats.html (search for CommandComplete for the message type)
|
||||||
|
|
||||||
|
The note in the docs about the difference is because that value is controlled by the server. It's possible for a non-standard server (ex: PostgreSQL fork) or a server version in the future to provide different information in some situations so it'd be best not to rely on it to assume that the rows array length matches the `rowCount`. It's fine to use it for DML counts though.
|
||||||
6
docs/pages/apis/types.mdx
Normal file
6
docs/pages/apis/types.mdx
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
title: Types
|
||||||
|
slug: /apis/types
|
||||||
|
---
|
||||||
|
|
||||||
|
These docs are incomplete, for now please reference [pg-types docs](https://github.com/brianc/node-pg-types).
|
||||||
33
docs/pages/apis/utilities.mdx
Normal file
33
docs/pages/apis/utilities.mdx
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
---
|
||||||
|
title: Utilities
|
||||||
|
---
|
||||||
|
import { Alert } from '/components/alert.tsx'
|
||||||
|
|
||||||
|
## Utility Functions
|
||||||
|
### pg.escapeIdentifier
|
||||||
|
|
||||||
|
Escapes a string as a [SQL identifier](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS).
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { escapeIdentifier } from 'pg';
|
||||||
|
const escapedIdentifier = escapeIdentifier('FooIdentifier')
|
||||||
|
console.log(escapedIdentifier) // '"FooIdentifier"'
|
||||||
|
```
|
||||||
|
|
||||||
|
<Alert>
|
||||||
|
**Note**: When using an identifier that is the result of this function in an operation like `CREATE TABLE ${escapedIdentifier(identifier)}`, the table that is created will be CASE SENSITIVE. If you use any capital letters in the escaped identifier, you must always refer to the created table like `SELECT * from "MyCaseSensitiveTable"`; queries like `SELECT * FROM MyCaseSensitiveTable` will result in a "Non-existent table" error since case information is stripped from the query.
|
||||||
|
</Alert>
|
||||||
|
|
||||||
|
### pg.escapeLiteral
|
||||||
|
|
||||||
|
<Alert>
|
||||||
|
**Note**: Instead of manually escaping SQL literals, it is recommended to use parameterized queries. Refer to [parameterized queries](/features/queries#parameterized-query) and the [client.query](/apis/client#clientquery) API for more information.
|
||||||
|
</Alert>
|
||||||
|
|
||||||
|
Escapes a string as a [SQL literal](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-CONSTANTS).
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { escapeLiteral } from 'pg';
|
||||||
|
const escapedLiteral = escapeLiteral("hello 'world'")
|
||||||
|
console.log(escapedLiteral) // "'hello ''world'''"
|
||||||
|
```
|
||||||
11
docs/pages/features/_meta.json
Normal file
11
docs/pages/features/_meta.json
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"connecting": "Connecting",
|
||||||
|
"queries": "Queries",
|
||||||
|
"pooling": "Pooling",
|
||||||
|
"transactions": "Transactions",
|
||||||
|
"types": "Data Types",
|
||||||
|
"ssl": "SSL",
|
||||||
|
"native": "Native",
|
||||||
|
"esm": "ESM",
|
||||||
|
"callbacks": "Callbacks"
|
||||||
|
}
|
||||||
39
docs/pages/features/callbacks.mdx
Normal file
39
docs/pages/features/callbacks.mdx
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
---
|
||||||
|
title: Callbacks
|
||||||
|
---
|
||||||
|
|
||||||
|
## Callback Support
|
||||||
|
|
||||||
|
`async` / `await` is the preferred way to write async code these days with node, but callbacks are supported in the `pg` module and the `pg-pool` module. To use them, pass a callback function as the last argument to the following methods & it will be called and a promise will not be returned:
|
||||||
|
|
||||||
|
|
||||||
|
```js
|
||||||
|
const { Pool, Client } = require('pg')
|
||||||
|
|
||||||
|
// pool
|
||||||
|
const pool = new Pool()
|
||||||
|
// run a query on an available client
|
||||||
|
pool.query('SELECT NOW()', (err, res) => {
|
||||||
|
console.log(err, res)
|
||||||
|
})
|
||||||
|
|
||||||
|
// check out a client to do something more complex like a transaction
|
||||||
|
pool.connect((err, client, release) => {
|
||||||
|
client.query('SELECT NOW()', (err, res) => {
|
||||||
|
release()
|
||||||
|
console.log(err, res)
|
||||||
|
pool.end()
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
// single client
|
||||||
|
const client = new Client()
|
||||||
|
client.connect((err) => {
|
||||||
|
if (err) throw err
|
||||||
|
client.query('SELECT NOW()', (err, res) => {
|
||||||
|
console.log(err, res)
|
||||||
|
client.end()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
```
|
||||||
157
docs/pages/features/connecting.mdx
Normal file
157
docs/pages/features/connecting.mdx
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
---
|
||||||
|
title: Connecting
|
||||||
|
---
|
||||||
|
|
||||||
|
## Environment variables
|
||||||
|
|
||||||
|
node-postgres uses the same [environment variables](https://www.postgresql.org/docs/9.1/static/libpq-envars.html) as libpq and psql to connect to a PostgreSQL server. Both individual clients & pools will use these environment variables. Here's a tiny program connecting node.js to the PostgreSQL server:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import pg from 'pg'
|
||||||
|
const { Pool, Client } = pg
|
||||||
|
|
||||||
|
// pools will use environment variables
|
||||||
|
// for connection information
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
// you can also use async/await
|
||||||
|
const res = await pool.query('SELECT NOW()')
|
||||||
|
await pool.end()
|
||||||
|
|
||||||
|
// clients will also use environment variables
|
||||||
|
// for connection information
|
||||||
|
const client = new Client()
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
const res = await client.query('SELECT NOW()')
|
||||||
|
await client.end()
|
||||||
|
```
|
||||||
|
|
||||||
|
To run the above program and specify which database to connect to we can invoke it like so:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ PGUSER=dbuser \
|
||||||
|
PGPASSWORD=secretpassword \
|
||||||
|
PGHOST=database.server.com \
|
||||||
|
PGPORT=3211 \
|
||||||
|
PGDATABASE=mydb \
|
||||||
|
node script.js
|
||||||
|
```
|
||||||
|
|
||||||
|
This allows us to write our programs without having to specify connection information in the program and lets us reuse them to connect to different databases without having to modify the code.
|
||||||
|
|
||||||
|
The default values for the environment variables used are:
|
||||||
|
|
||||||
|
```
|
||||||
|
PGUSER=process.env.USER
|
||||||
|
PGPASSWORD=null
|
||||||
|
PGHOST=localhost
|
||||||
|
PGPORT=5432
|
||||||
|
PGDATABASE=process.env.USER
|
||||||
|
```
|
||||||
|
|
||||||
|
## Programmatic
|
||||||
|
|
||||||
|
node-postgres also supports configuring a pool or client programmatically with connection information. Here's our same script from above modified to use programmatic (hard-coded in this case) values. This can be useful if your application already has a way to manage config values or you don't want to use environment variables.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import pg from 'pg'
|
||||||
|
const { Pool, Client } = pg
|
||||||
|
|
||||||
|
const pool = new Pool({
|
||||||
|
user: 'dbuser',
|
||||||
|
password: 'secretpassword',
|
||||||
|
host: 'database.server.com',
|
||||||
|
port: 3211,
|
||||||
|
database: 'mydb',
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log(await pool.query('SELECT NOW()'))
|
||||||
|
|
||||||
|
const client = new Client({
|
||||||
|
user: 'dbuser',
|
||||||
|
password: 'secretpassword',
|
||||||
|
host: 'database.server.com',
|
||||||
|
port: 3211,
|
||||||
|
database: 'mydb',
|
||||||
|
})
|
||||||
|
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
console.log(await client.query('SELECT NOW()'))
|
||||||
|
|
||||||
|
await client.end()
|
||||||
|
```
|
||||||
|
|
||||||
|
Many cloud providers include alternative methods for connecting to database instances using short-lived authentication tokens. node-postgres supports dynamic passwords via a callback function, either synchronous or asynchronous. The callback function must resolve to a string.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
import { RDS } from 'aws-sdk'
|
||||||
|
|
||||||
|
const signerOptions = {
|
||||||
|
credentials: {
|
||||||
|
accessKeyId: 'YOUR-ACCESS-KEY',
|
||||||
|
secretAccessKey: 'YOUR-SECRET-ACCESS-KEY',
|
||||||
|
},
|
||||||
|
region: 'us-east-1',
|
||||||
|
hostname: 'example.aslfdewrlk.us-east-1.rds.amazonaws.com',
|
||||||
|
port: 5432,
|
||||||
|
username: 'api-user',
|
||||||
|
}
|
||||||
|
|
||||||
|
const signer = new RDS.Signer(signerOptions)
|
||||||
|
|
||||||
|
const getPassword = () => signer.getAuthToken()
|
||||||
|
|
||||||
|
const pool = new Pool({
|
||||||
|
user: signerOptions.username,
|
||||||
|
password: getPassword,
|
||||||
|
host: signerOptions.hostname,
|
||||||
|
port: signerOptions.port,
|
||||||
|
database: 'my-db',
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Unix Domain Sockets
|
||||||
|
|
||||||
|
Connections to unix sockets can also be made. This can be useful on distros like Ubuntu, where authentication is managed via the socket connection instead of a password.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import pg from 'pg'
|
||||||
|
const { Client } = pg
|
||||||
|
client = new Client({
|
||||||
|
user: 'username',
|
||||||
|
password: 'password',
|
||||||
|
host: '/cloudsql/myproject:zone:mydb',
|
||||||
|
database: 'database_name',
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Connection URI
|
||||||
|
|
||||||
|
You can initialize both a pool and a client with a connection string URI as well. This is common in environments like Heroku where the database connection string is supplied to your application dyno through an environment variable. Connection string parsing brought to you by [pg-connection-string](https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string).
|
||||||
|
|
||||||
|
```js
|
||||||
|
import pg from 'pg'
|
||||||
|
const { Pool, Client } = pg
|
||||||
|
const connectionString = 'postgresql://dbuser:secretpassword@database.server.com:3211/mydb'
|
||||||
|
|
||||||
|
const pool = new Pool({
|
||||||
|
connectionString,
|
||||||
|
})
|
||||||
|
|
||||||
|
await pool.query('SELECT NOW()')
|
||||||
|
await pool.end()
|
||||||
|
|
||||||
|
const client = new Client({
|
||||||
|
connectionString,
|
||||||
|
})
|
||||||
|
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
await client.query('SELECT NOW()')
|
||||||
|
|
||||||
|
await client.end()
|
||||||
|
```
|
||||||
37
docs/pages/features/esm.mdx
Normal file
37
docs/pages/features/esm.mdx
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
---
|
||||||
|
title: ESM
|
||||||
|
---
|
||||||
|
|
||||||
|
## ESM Support
|
||||||
|
|
||||||
|
As of v8.15.x node-postgres supporters the __ECMAScript Module__ (ESM) format. This means you can use `import` statements instead of `require` or `import pg from 'pg'`.
|
||||||
|
|
||||||
|
CommonJS modules are still supported. The ESM format is an opt-in feature and will not affect existing codebases that use CommonJS.
|
||||||
|
|
||||||
|
The docs have been changed to show ESM usage, but in a CommonJS context you can still use the same code, you just need to change the import format.
|
||||||
|
|
||||||
|
If you're using CommonJS, you can use the following code to import the `pg` module:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const pg = require('pg')
|
||||||
|
const { Client } = pg
|
||||||
|
// etc...
|
||||||
|
```
|
||||||
|
|
||||||
|
### ESM Usage
|
||||||
|
|
||||||
|
If you're using ESM, you can use the following code to import the `pg` module:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Client } from 'pg'
|
||||||
|
// etc...
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
Previously if you were using ESM you would have to use the following code:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import pg from 'pg'
|
||||||
|
const { Client } = pg
|
||||||
|
// etc...
|
||||||
|
```
|
||||||
29
docs/pages/features/native.mdx
Normal file
29
docs/pages/features/native.mdx
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
---
|
||||||
|
title: Native Bindings
|
||||||
|
slug: /features/native
|
||||||
|
metaTitle: bar
|
||||||
|
---
|
||||||
|
|
||||||
|
Native bindings between node.js & [libpq](https://www.postgresql.org/docs/9.5/static/libpq.html) are provided by the [node-pg-native](https://github.com/brianc/node-pg-native) package. node-postgres can consume this package & use the native bindings to access the PostgreSQL server while giving you the same interface that is used with the JavaScript version of the library.
|
||||||
|
|
||||||
|
To use the native bindings first you'll need to install them:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ npm install pg pg-native
|
||||||
|
```
|
||||||
|
|
||||||
|
Once `pg-native` is installed instead of requiring a `Client` or `Pool` constructor from `pg` you do the following:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import pg from 'pg'
|
||||||
|
const { native } = pg
|
||||||
|
const { Client, Pool } = native
|
||||||
|
```
|
||||||
|
|
||||||
|
When you access the `.native` property on `'pg'` it will automatically require the `pg-native` package and wrap it in the same API.
|
||||||
|
|
||||||
|
<div class='alert alert-warning'>
|
||||||
|
Care has been taken to normalize between the two, but there might still be edge cases where things behave subtly differently due to the nature of using libpq over handling the binary protocol directly in JavaScript, so it's recommended you chose to either use the JavaScript driver or the native bindings both in development and production. For what its worth: I use the pure JavaScript driver because the JavaScript driver is more portable (doesn't need a compiler), and the pure JavaScript driver is <em>plenty</em> fast.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
Some of the modules using advanced features of PostgreSQL such as [pg-query-stream](https://github.com/brianc/node-pg-query-stream), [pg-cursor](https://github.com/brianc/node-pg-cursor),and [pg-copy-streams](https://github.com/brianc/node-pg-copy-streams) need to operate directly on the binary stream and therefore are incompatible with the native bindings.
|
||||||
109
docs/pages/features/pooling.mdx
Normal file
109
docs/pages/features/pooling.mdx
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
---
|
||||||
|
title: Pooling
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Alert } from '/components/alert.tsx'
|
||||||
|
import { Info } from '/components/info.tsx'
|
||||||
|
|
||||||
|
If you're working on a web application or other software which makes frequent queries you'll want to use a connection pool.
|
||||||
|
|
||||||
|
The easiest and by far most common way to use node-postgres is through a connection pool.
|
||||||
|
|
||||||
|
## Why?
|
||||||
|
|
||||||
|
- Connecting a new client to the PostgreSQL server requires a handshake which can take 20-30 milliseconds. During this time passwords are negotiated, SSL may be established, and configuration information is shared with the client & server. Incurring this cost _every time_ we want to execute a query would substantially slow down our application.
|
||||||
|
|
||||||
|
- The PostgreSQL server can only handle a [limited number of clients at a time](https://wiki.postgresql.org/wiki/Number_Of_Database_Connections). Depending on the available memory of your PostgreSQL server you may even crash the server if you connect an unbounded number of clients. _note: I have crashed a large production PostgreSQL server instance in RDS by opening new clients and never disconnecting them in a python application long ago. It was not fun._
|
||||||
|
|
||||||
|
- PostgreSQL can only process one query at a time on a single connected client in a first-in first-out manner. If your multi-tenant web application is using only a single connected client all queries among all simultaneous requests will be pipelined and executed serially, one after the other. No good!
|
||||||
|
|
||||||
|
### Good news
|
||||||
|
|
||||||
|
node-postgres ships with built-in connection pooling via the [pg-pool](/apis/pool) module.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
The client pool allows you to have a reusable pool of clients you can check out, use, and return. You generally want a limited number of these in your application and usually just 1. Creating an unbounded number of pools defeats the purpose of pooling at all.
|
||||||
|
|
||||||
|
### Checkout, use, and return
|
||||||
|
|
||||||
|
```js
|
||||||
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
// the pool will emit an error on behalf of any idle clients
|
||||||
|
// it contains if a backend error or network partition happens
|
||||||
|
pool.on('error', (err, client) => {
|
||||||
|
console.error('Unexpected error on idle client', err)
|
||||||
|
process.exit(-1)
|
||||||
|
})
|
||||||
|
|
||||||
|
const client = await pool.connect()
|
||||||
|
const res = await client.query('SELECT * FROM users WHERE id = $1', [1])
|
||||||
|
console.log(res.rows[0])
|
||||||
|
|
||||||
|
client.release()
|
||||||
|
```
|
||||||
|
|
||||||
|
<Alert>
|
||||||
|
<div>
|
||||||
|
You must <b>always</b> return the client to the pool if you successfully check it out, regardless of whether or not
|
||||||
|
there was an error with the queries you ran on the client.
|
||||||
|
</div>
|
||||||
|
If you don't release the client your application will leak them and eventually your pool will be empty forever and all
|
||||||
|
future requests to check out a client from the pool will wait forever.
|
||||||
|
</Alert>
|
||||||
|
|
||||||
|
### Single query
|
||||||
|
|
||||||
|
If you don't need a transaction or you just need to run a single query, the pool has a convenience method to run a query on any available client in the pool. This is the preferred way to query with node-postgres if you can as it removes the risk of leaking a client.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
const res = await pool.query('SELECT * FROM users WHERE id = $1', [1])
|
||||||
|
console.log('user:', res.rows[0])
|
||||||
|
```
|
||||||
|
|
||||||
|
### Shutdown
|
||||||
|
|
||||||
|
To shut down a pool call `pool.end()` on the pool. This will wait for all checked-out clients to be returned and then shut down all the clients and the pool timers.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import pg from 'pg'
|
||||||
|
const { Pool } = pg
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
console.log('starting async query')
|
||||||
|
const result = await pool.query('SELECT NOW()')
|
||||||
|
console.log('async query finished')
|
||||||
|
|
||||||
|
console.log('starting callback query')
|
||||||
|
pool.query('SELECT NOW()', (err, res) => {
|
||||||
|
console.log('callback query finished')
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log('calling end')
|
||||||
|
await pool.end()
|
||||||
|
console.log('pool has drained')
|
||||||
|
```
|
||||||
|
|
||||||
|
The output of the above will be:
|
||||||
|
|
||||||
|
```
|
||||||
|
starting async query
|
||||||
|
async query finished
|
||||||
|
starting callback query
|
||||||
|
calling end
|
||||||
|
callback query finished
|
||||||
|
pool has drained
|
||||||
|
```
|
||||||
|
|
||||||
|
<Info>
|
||||||
|
The pool will return errors when attempting to check out a client after you've called pool.end() on the pool.
|
||||||
|
</Info>
|
||||||
135
docs/pages/features/queries.mdx
Normal file
135
docs/pages/features/queries.mdx
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
---
|
||||||
|
title: Queries
|
||||||
|
slug: /features/queries
|
||||||
|
---
|
||||||
|
|
||||||
|
For the sake of brevity I am using the `client.query` method instead of the `pool.query` method - both methods support the same API. In fact, `pool.query` delegates directly to `client.query` internally.
|
||||||
|
|
||||||
|
## Text only
|
||||||
|
|
||||||
|
If your query has no parameters you do not need to include them to the query method:
|
||||||
|
|
||||||
|
```js
|
||||||
|
await client.query('SELECT NOW() as now')
|
||||||
|
```
|
||||||
|
|
||||||
|
## Parameterized query
|
||||||
|
|
||||||
|
If you are passing parameters to your queries you will want to avoid string concatenating parameters into the query text directly. This can (and often does) lead to sql injection vulnerabilities. node-postgres supports parameterized queries, passing your query text _unaltered_ as well as your parameters to the PostgreSQL server where the parameters are safely substituted into the query with battle-tested parameter substitution code within the server itself.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const text = 'INSERT INTO users(name, email) VALUES($1, $2) RETURNING *'
|
||||||
|
const values = ['brianc', 'brian.m.carlson@gmail.com']
|
||||||
|
|
||||||
|
const res = await client.query(text, values)
|
||||||
|
console.log(res.rows[0])
|
||||||
|
// { name: 'brianc', email: 'brian.m.carlson@gmail.com' }
|
||||||
|
```
|
||||||
|
|
||||||
|
<div class="alert alert-warning">
|
||||||
|
PostgreSQL does not support parameters for identifiers. If you need to have dynamic database, schema, table, or column names (e.g. in DDL statements) use [pg-format](https://www.npmjs.com/package/pg-format) package for handling escaping these values to ensure you do not have SQL injection!
|
||||||
|
</div>
|
||||||
|
|
||||||
|
Parameters passed as the second argument to `query()` will be converted to raw data types using the following rules:
|
||||||
|
|
||||||
|
**null and undefined**
|
||||||
|
|
||||||
|
If parameterizing `null` and `undefined` then both will be converted to `null`.
|
||||||
|
|
||||||
|
**Date**
|
||||||
|
|
||||||
|
Custom conversion to a UTC date string.
|
||||||
|
|
||||||
|
**Buffer**
|
||||||
|
|
||||||
|
Buffer instances are unchanged.
|
||||||
|
|
||||||
|
**Array**
|
||||||
|
|
||||||
|
Converted to a string that describes a Postgres array. Each array item is recursively converted using the rules described here.
|
||||||
|
|
||||||
|
**Object**
|
||||||
|
|
||||||
|
If a parameterized value has the method `toPostgres` then it will be called and its return value will be used in the query.
|
||||||
|
The signature of `toPostgres` is the following:
|
||||||
|
|
||||||
|
```
|
||||||
|
toPostgres (prepareValue: (value) => any): any
|
||||||
|
```
|
||||||
|
|
||||||
|
The `prepareValue` function provided can be used to convert nested types to raw data types suitable for the database.
|
||||||
|
|
||||||
|
Otherwise if no `toPostgres` method is defined then `JSON.stringify` is called on the parameterized value.
|
||||||
|
|
||||||
|
**Everything else**
|
||||||
|
|
||||||
|
All other parameterized values will be converted by calling `value.toString` on the value.
|
||||||
|
|
||||||
|
## Query config object
|
||||||
|
|
||||||
|
`pool.query` and `client.query` both support taking a config object as an argument instead of taking a string and optional array of parameters. The same example above could also be performed like so:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const query = {
|
||||||
|
text: 'INSERT INTO users(name, email) VALUES($1, $2)',
|
||||||
|
values: ['brianc', 'brian.m.carlson@gmail.com'],
|
||||||
|
}
|
||||||
|
|
||||||
|
const res = await client.query(query)
|
||||||
|
console.log(res.rows[0])
|
||||||
|
```
|
||||||
|
|
||||||
|
The query config object allows for a few more advanced scenarios:
|
||||||
|
|
||||||
|
### Prepared statements
|
||||||
|
|
||||||
|
PostgreSQL has the concept of a [prepared statement](https://www.postgresql.org/docs/9.3/static/sql-prepare.html). node-postgres supports this by supplying a `name` parameter to the query config object. If you supply a `name` parameter the query execution plan will be cached on the PostgreSQL server on a **per connection basis**. This means if you use two different connections each will have to parse & plan the query once. node-postgres handles this transparently for you: a client only requests a query to be parsed the first time that particular client has seen that query name:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const query = {
|
||||||
|
// give the query a unique name
|
||||||
|
name: 'fetch-user',
|
||||||
|
text: 'SELECT * FROM user WHERE id = $1',
|
||||||
|
values: [1],
|
||||||
|
}
|
||||||
|
|
||||||
|
const res = await client.query(query)
|
||||||
|
console.log(res.rows[0])
|
||||||
|
```
|
||||||
|
|
||||||
|
In the above example the first time the client sees a query with the name `'fetch-user'` it will send a 'parse' request to the PostgreSQL server & execute the query as normal. The second time, it will skip the 'parse' request and send the _name_ of the query to the PostgreSQL server.
|
||||||
|
|
||||||
|
<div class='message is-warning'>
|
||||||
|
<div class='message-body'>
|
||||||
|
Be careful not to fall into the trap of premature optimization. Most of your queries will likely not benefit much, if at all, from using prepared statements. This is a somewhat "power user" feature of PostgreSQL that is best used when you know how to use it - namely with very complex queries with lots of joins and advanced operations like union and switch statements. I rarely use this feature in my own apps unless writing complex aggregate queries for reports and I know the reports are going to be executed very frequently.
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
### Row mode
|
||||||
|
|
||||||
|
By default node-postgres reads rows and collects them into JavaScript objects with the keys matching the column names and the values matching the corresponding row value for each column. If you do not need or do not want this behavior you can pass `rowMode: 'array'` to a query object. This will inform the result parser to bypass collecting rows into a JavaScript object, and instead will return each row as an array of values.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const query = {
|
||||||
|
text: 'SELECT $1::text as first_name, $2::text as last_name',
|
||||||
|
values: ['Brian', 'Carlson'],
|
||||||
|
rowMode: 'array',
|
||||||
|
}
|
||||||
|
|
||||||
|
const res = await client.query(query)
|
||||||
|
console.log(res.fields.map(field => field.name)) // ['first_name', 'last_name']
|
||||||
|
console.log(res.rows[0]) // ['Brian', 'Carlson']
|
||||||
|
```
|
||||||
|
|
||||||
|
### Types
|
||||||
|
|
||||||
|
You can pass in a custom set of type parsers to use when parsing the results of a particular query. The `types` property must conform to the [Types](/apis/types) API. Here is an example in which every value is returned as a string:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const query = {
|
||||||
|
text: 'SELECT * from some_table',
|
||||||
|
types: {
|
||||||
|
getTypeParser: () => val => val,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
66
docs/pages/features/ssl.mdx
Normal file
66
docs/pages/features/ssl.mdx
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
---
|
||||||
|
title: SSL
|
||||||
|
slug: /features/ssl
|
||||||
|
---
|
||||||
|
|
||||||
|
node-postgres supports TLS/SSL connections to your PostgreSQL server as long as the server is configured to support it. When instantiating a pool or a client you can provide an `ssl` property on the config object and it will be passed to the constructor for the [node TLSSocket](https://nodejs.org/api/tls.html#tls_class_tls_tlssocket).
|
||||||
|
|
||||||
|
## Self-signed cert
|
||||||
|
|
||||||
|
Here's an example of a configuration you can use to connect a client or a pool to a PostgreSQL server.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const config = {
|
||||||
|
database: 'database-name',
|
||||||
|
host: 'host-or-ip',
|
||||||
|
// this object will be passed to the TLSSocket constructor
|
||||||
|
ssl: {
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
ca: fs.readFileSync('/path/to/server-certificates/root.crt').toString(),
|
||||||
|
key: fs.readFileSync('/path/to/client-key/postgresql.key').toString(),
|
||||||
|
cert: fs.readFileSync('/path/to/client-certificates/postgresql.crt').toString(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
import { Client, Pool } from 'pg'
|
||||||
|
|
||||||
|
const client = new Client(config)
|
||||||
|
await client.connect()
|
||||||
|
console.log('connected')
|
||||||
|
await client.end()
|
||||||
|
|
||||||
|
const pool = new Pool(config)
|
||||||
|
const pooledClient = await pool.connect()
|
||||||
|
console.log('connected')
|
||||||
|
pooledClient.release()
|
||||||
|
await pool.end()
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage with `connectionString`
|
||||||
|
|
||||||
|
If you plan to use a combination of a database connection string from the environment and SSL settings in the config object directly, then you must avoid including any of `sslcert`, `sslkey`, `sslrootcert`, or `sslmode` in the connection string. If any of these options are used then the `ssl` object is replaced and any additional options provided there will be lost.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const config = {
|
||||||
|
connectionString: 'postgres://user:password@host:port/db?sslmode=require',
|
||||||
|
// Beware! The ssl object is overwritten when parsing the connectionString
|
||||||
|
ssl: {
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
ca: fs.readFileSync('/path/to/server-certificates/root.crt').toString(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Channel binding
|
||||||
|
|
||||||
|
If the PostgreSQL server offers SCRAM-SHA-256-PLUS (i.e. channel binding) for TLS/SSL connections, you can enable this as follows:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const client = new Client({ ...config, enableChannelBinding: true})
|
||||||
|
```
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
```js
|
||||||
|
const pool = new Pool({ ...config, enableChannelBinding: true})
|
||||||
|
```
|
||||||
39
docs/pages/features/transactions.mdx
Normal file
39
docs/pages/features/transactions.mdx
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
---
|
||||||
|
title: Transactions
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Alert } from '/components/alert.tsx'
|
||||||
|
|
||||||
|
To execute a transaction with node-postgres you simply execute `BEGIN / COMMIT / ROLLBACK` queries yourself through a client. Because node-postgres strives to be low level and un-opinionated, it doesn't provide any higher level abstractions specifically around transactions.
|
||||||
|
|
||||||
|
<Alert>
|
||||||
|
You <strong>must</strong> use the <em>same</em> client instance for all statements within a transaction. PostgreSQL
|
||||||
|
isolates a transaction to individual clients. This means if you initialize or use transactions with the{' '}
|
||||||
|
<span className="code">pool.query</span> method you <strong>will</strong> have problems. Do not use transactions with
|
||||||
|
the <span className="code">pool.query</span> method.
|
||||||
|
</Alert>
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
const client = await pool.connect()
|
||||||
|
|
||||||
|
try {
|
||||||
|
await client.query('BEGIN')
|
||||||
|
const queryText = 'INSERT INTO users(name) VALUES($1) RETURNING id'
|
||||||
|
const res = await client.query(queryText, ['brianc'])
|
||||||
|
|
||||||
|
const insertPhotoText = 'INSERT INTO photos(user_id, photo_url) VALUES ($1, $2)'
|
||||||
|
const insertPhotoValues = [res.rows[0].id, 's3.bucket.foo']
|
||||||
|
await client.query(insertPhotoText, insertPhotoValues)
|
||||||
|
await client.query('COMMIT')
|
||||||
|
} catch (e) {
|
||||||
|
await client.query('ROLLBACK')
|
||||||
|
throw e
|
||||||
|
} finally {
|
||||||
|
client.release()
|
||||||
|
}
|
||||||
|
```
|
||||||
106
docs/pages/features/types.mdx
Normal file
106
docs/pages/features/types.mdx
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
---
|
||||||
|
title: Data Types
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Alert } from '/components/alert.tsx'
|
||||||
|
|
||||||
|
PostgreSQL has a rich system of supported [data types](https://www.postgresql.org/docs/current/datatype.html). node-postgres does its best to support the most common data types out of the box and supplies an extensible type parser to allow for custom type serialization and parsing.
|
||||||
|
|
||||||
|
## strings by default
|
||||||
|
|
||||||
|
node-postgres will convert a database type to a JavaScript string if it doesn't have a registered type parser for the database type. Furthermore, you can send any type to the PostgreSQL server as a string and node-postgres will pass it through without modifying it in any way. To circumvent the type parsing completely do something like the following.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const queryText = 'SELECT int_col::text, date_col::text, json_col::text FROM my_table'
|
||||||
|
const result = await client.query(queryText)
|
||||||
|
|
||||||
|
console.log(result.rows[0]) // will contain the unparsed string value of each column
|
||||||
|
```
|
||||||
|
|
||||||
|
## type parsing examples
|
||||||
|
|
||||||
|
### uuid + json / jsonb
|
||||||
|
|
||||||
|
There is no data type in JavaScript for a uuid/guid so node-postgres converts a uuid to a string. JavaScript has great support for JSON and node-postgres converts json/jsonb objects directly into their JavaScript object via [`JSON.parse`](https://github.com/brianc/node-pg-types/blob/master/lib/textParsers.js#L193). Likewise sending an object to the PostgreSQL server via a query from node-postgres, node-postgres will call [`JSON.stringify`](https://github.com/brianc/node-postgres/blob/e5f0e5d36a91a72dda93c74388ac890fa42b3be0/lib/utils.js#L47) on your outbound value, automatically converting it to json for the server.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const createTableText = `
|
||||||
|
CREATE EXTENSION IF NOT EXISTS "pgcrypto";
|
||||||
|
|
||||||
|
CREATE TEMP TABLE IF NOT EXISTS users (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
data JSONB
|
||||||
|
);
|
||||||
|
`
|
||||||
|
// create our temp table
|
||||||
|
await client.query(createTableText)
|
||||||
|
|
||||||
|
const newUser = { email: 'brian.m.carlson@gmail.com' }
|
||||||
|
// create a new user
|
||||||
|
await client.query('INSERT INTO users(data) VALUES($1)', [newUser])
|
||||||
|
|
||||||
|
const { rows } = await client.query('SELECT * FROM users')
|
||||||
|
|
||||||
|
console.log(rows)
|
||||||
|
/*
|
||||||
|
output:
|
||||||
|
[{
|
||||||
|
id: 'd70195fd-608e-42dc-b0f5-eee975a621e9',
|
||||||
|
data: { email: 'brian.m.carlson@gmail.com' }
|
||||||
|
}]
|
||||||
|
*/
|
||||||
|
```
|
||||||
|
|
||||||
|
### date / timestamp / timestamptz
|
||||||
|
|
||||||
|
node-postgres will convert instances of JavaScript date objects into the expected input value for your PostgreSQL server. Likewise, when reading a `date`, `timestamp`, or `timestamptz` column value back into JavaScript, node-postgres will parse the value into an instance of a JavaScript `Date` object.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const createTableText = `
|
||||||
|
CREATE TEMP TABLE dates(
|
||||||
|
date_col DATE,
|
||||||
|
timestamp_col TIMESTAMP,
|
||||||
|
timestamptz_col TIMESTAMPTZ
|
||||||
|
);
|
||||||
|
`
|
||||||
|
// create our temp table
|
||||||
|
await client.query(createTableText)
|
||||||
|
|
||||||
|
// insert the current time into it
|
||||||
|
const now = new Date()
|
||||||
|
const insertText = 'INSERT INTO dates(date_col, timestamp_col, timestamptz_col) VALUES ($1, $2, $3)'
|
||||||
|
await client.query(insertText, [now, now, now])
|
||||||
|
|
||||||
|
// read the row back out
|
||||||
|
const result = await client.query('SELECT * FROM dates')
|
||||||
|
|
||||||
|
console.log(result.rows)
|
||||||
|
// {
|
||||||
|
// date_col: 2017-05-29T05:00:00.000Z,
|
||||||
|
// timestamp_col: 2017-05-29T23:18:13.263Z,
|
||||||
|
// timestamptz_col: 2017-05-29T23:18:13.263Z
|
||||||
|
// }
|
||||||
|
```
|
||||||
|
|
||||||
|
psql output:
|
||||||
|
|
||||||
|
```
|
||||||
|
bmc=# select * from dates;
|
||||||
|
date_col | timestamp_col | timestamptz_col
|
||||||
|
------------+-------------------------+----------------------------
|
||||||
|
2017-05-29 | 2017-05-29 18:18:13.263 | 2017-05-29 18:18:13.263-05
|
||||||
|
(1 row)
|
||||||
|
```
|
||||||
|
|
||||||
|
node-postgres converts `DATE` and `TIMESTAMP` columns into the **local** time of the node process set at `process.env.TZ`.
|
||||||
|
|
||||||
|
_note: I generally use `TIMESTAMPTZ` when storing dates; otherwise, inserting a time from a process in one timezone and reading it out in a process in another timezone can cause unexpected differences in the time._
|
||||||
|
|
||||||
|
<Alert>
|
||||||
|
<div class="message-body">
|
||||||
|
Although PostgreSQL supports microseconds in dates, JavaScript only supports dates to the millisecond precision.
|
||||||
|
Keep this in mind when you send dates to and from PostgreSQL from node: your microseconds will be truncated when
|
||||||
|
converting to a JavaScript date object even if they exist in the database. If you need to preserve them, I recommend
|
||||||
|
using a custom type parser.
|
||||||
|
</div>
|
||||||
|
</Alert>
|
||||||
6
docs/pages/guides/_meta.json
Normal file
6
docs/pages/guides/_meta.json
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"project-structure": "Suggested Code Structure",
|
||||||
|
"async-express": "Express with Async/Await",
|
||||||
|
"pool-sizing": "Pool Sizing",
|
||||||
|
"upgrading": "Upgrading"
|
||||||
|
}
|
||||||
82
docs/pages/guides/async-express.md
Normal file
82
docs/pages/guides/async-express.md
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
---
|
||||||
|
title: Express with async/await
|
||||||
|
---
|
||||||
|
|
||||||
|
My preferred way to use node-postgres (and all async code in node.js) is with `async/await`. I find it makes reasoning about control-flow easier and allows me to write more concise and maintainable code.
|
||||||
|
|
||||||
|
This is how I typically structure express web-applications with node-postgres to use `async/await`:
|
||||||
|
|
||||||
|
```
|
||||||
|
- app.js
|
||||||
|
- index.js
|
||||||
|
- routes/
|
||||||
|
- index.js
|
||||||
|
- photos.js
|
||||||
|
- user.js
|
||||||
|
- db/
|
||||||
|
- index.js <--- this is where I put data access code
|
||||||
|
```
|
||||||
|
|
||||||
|
That's the same structure I used in the [project structure](/guides/project-structure) example.
|
||||||
|
|
||||||
|
My `db/index.js` file usually starts out like this:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
export const query = (text, params) => pool.query(text, params)
|
||||||
|
```
|
||||||
|
|
||||||
|
Then I will install [express-promise-router](https://www.npmjs.com/package/express-promise-router) and use it to define my routes. Here is my `routes/user.js` file:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import Router from 'express-promise-router'
|
||||||
|
import db from '../db.js'
|
||||||
|
|
||||||
|
// create a new express-promise-router
|
||||||
|
// this has the same API as the normal express router except
|
||||||
|
// it allows you to use async functions as route handlers
|
||||||
|
const router = new Router()
|
||||||
|
|
||||||
|
// export our router to be mounted by the parent application
|
||||||
|
export default router
|
||||||
|
|
||||||
|
router.get('/:id', async (req, res) => {
|
||||||
|
const { id } = req.params
|
||||||
|
const { rows } = await db.query('SELECT * FROM users WHERE id = $1', [id])
|
||||||
|
res.send(rows[0])
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
Then in my `routes/index.js` file I'll have something like this which mounts each individual router into the main application:
|
||||||
|
|
||||||
|
```js
|
||||||
|
// ./routes/index.js
|
||||||
|
import users from './user.js'
|
||||||
|
import photos from './photos.js'
|
||||||
|
|
||||||
|
const mountRoutes = (app) => {
|
||||||
|
app.use('/users', users)
|
||||||
|
app.use('/photos', photos)
|
||||||
|
// etc..
|
||||||
|
}
|
||||||
|
|
||||||
|
export default mountRoutes
|
||||||
|
```
|
||||||
|
|
||||||
|
And finally in my `app.js` file where I bootstrap express I will have my `routes/index.js` file mount all my routes. The routes know they're using async functions but because of express-promise-router the main express app doesn't know and doesn't care!
|
||||||
|
|
||||||
|
```js
|
||||||
|
// ./app.js
|
||||||
|
import express from 'express'
|
||||||
|
import mountRoutes from './routes.js'
|
||||||
|
|
||||||
|
const app = express()
|
||||||
|
mountRoutes(app)
|
||||||
|
|
||||||
|
// ... more express setup stuff can follow
|
||||||
|
```
|
||||||
|
|
||||||
|
Now you've got `async/await`, node-postgres, and express all working together!
|
||||||
25
docs/pages/guides/pool-sizing.md
Normal file
25
docs/pages/guides/pool-sizing.md
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
---
|
||||||
|
title: Pool Sizing
|
||||||
|
---
|
||||||
|
|
||||||
|
If you're using a [pool](/apis/pool) in an application with multiple instances of your service running (common in most cloud/container environments currently), you'll need to think a bit about the `max` parameter of your pool across all services and all _instances_ of all services which are connecting to your Postgres server.
|
||||||
|
|
||||||
|
This can get pretty complex depending on your cloud environment. Further nuance is introduced with things like pg-bouncer, RDS connection proxies, etc., which will do some forms of connection pooling and connection multiplexing. So, it's definitely worth thinking about. Let's run through a few setups. While certainly not exhaustive, these examples hopefully prompt you into thinking about what's right for your setup.
|
||||||
|
|
||||||
|
## Simple apps, dev mode, fixed instance counts, etc.
|
||||||
|
|
||||||
|
If your app isn't running in a k8s style env with containers scaling automatically or lambdas or cloud functions etc., you can do some "napkin math" for the `max` pool config you can use. Let's assume your Postgres instance is configured to have a maximum of 200 connections at any one time. You know your service is going to run on 4 instances. You can set the `max` pool size to 50, but if all your services are saturated waiting on database connections, you won't be able to connect to the database from any mgmt tools or scale up your services without changing config/code to adjust the max size.
|
||||||
|
|
||||||
|
In this situation, I'd probably set the `max` to 20 or 25. This lets you have plenty of headroom for scaling more instances and realistically, if your app is starved for db connections, you probably want to take a look at your queries and make them execute faster, or cache, or something else to reduce the load on the database. I worked on a more reporting-heavy application with limited users, but each running 5-6 queries at a time which all took 100-200 milliseconds to run. In that situation, I upped the `max` to 50. Typically, though, I don't bother setting it to anything other than the default of `10` as that's usually _fine_.
|
||||||
|
|
||||||
|
## Auto-scaling, cloud-functions, multi-tenancy, etc.
|
||||||
|
|
||||||
|
If the number of instances of your services which connect to your database is more dynamic and based on things like load, auto-scaling containers, or running in cloud-functions, you need to be a bit more thoughtful about what your max might be. Often in these environments, there will be another database pooling proxy in front of the database like pg-bouncer or the RDS-proxy, etc. I'm not sure how all these function exactly, and they all have some trade-offs, but let's assume you're not using a proxy. Then I'd be pretty cautious about how large you set any individual pool. If you're running an application under pretty serious load where you need dynamic scaling or lots of lambdas spinning up and sending queries, your queries are likely fast and you should be fine setting the `max` to a low value like 10 -- or just leave it alone, since `10` is the default.
|
||||||
|
|
||||||
|
## pg-bouncer, RDS-proxy, etc.
|
||||||
|
|
||||||
|
I'm not sure of all the pooling services for Postgres. I haven't used any myself. Throughout the years of working on `pg`, I've addressed issues caused by various proxies behaving differently than an actual Postgres backend. There are also gotchas with things like transactions. On the other hand, plenty of people run these with much success. In this situation, I would just recommend using some small but reasonable `max` value like the default value of `10` as it can still be helpful to keep a few TCP sockets from your services to the Postgres proxy open.
|
||||||
|
|
||||||
|
## Conclusion, tl;dr
|
||||||
|
|
||||||
|
It's a bit of a complicated topic and doesn't have much impact on things until you need to start scaling. At that point, your number of connections _still_ probably won't be your scaling bottleneck. It's worth thinking about a bit, but mostly I'd just leave the pool size to the default of `10` until you run into troubles: hopefully you never do!
|
||||||
131
docs/pages/guides/project-structure.md
Normal file
131
docs/pages/guides/project-structure.md
Normal file
@ -0,0 +1,131 @@
|
|||||||
|
---
|
||||||
|
title: Suggested Project Structure
|
||||||
|
---
|
||||||
|
|
||||||
|
Whenever I am writing a project & using node-postgres I like to create a file within it and make all interactions with the database go through this file. This serves a few purposes:
|
||||||
|
|
||||||
|
- Allows my project to adjust to any changes to the node-postgres API without having to trace down all the places I directly use node-postgres in my application.
|
||||||
|
- Allows me to have a single place to put logging and diagnostics around my database.
|
||||||
|
- Allows me to make custom extensions to my database access code & share it throughout the project.
|
||||||
|
- Allows a single place to bootstrap & configure the database.
|
||||||
|
|
||||||
|
## example
|
||||||
|
|
||||||
|
The location doesn't really matter - I've found it usually ends up being somewhat app specific and in line with whatever folder structure conventions you're using. For this example I'll use an express app structured like so:
|
||||||
|
|
||||||
|
```
|
||||||
|
- app.js
|
||||||
|
- index.js
|
||||||
|
- routes/
|
||||||
|
- index.js
|
||||||
|
- photos.js
|
||||||
|
- user.js
|
||||||
|
- db/
|
||||||
|
- index.js <--- this is where I put data access code
|
||||||
|
```
|
||||||
|
|
||||||
|
Typically I'll start out my `db/index.js` file like so:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
export const query = (text, params) => {
|
||||||
|
return pool.query(text, params)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
That's it. But now everywhere else in my application instead of requiring `pg` directly, I'll require this file. Here's an example of a route within `routes/user.js`:
|
||||||
|
|
||||||
|
```js
|
||||||
|
// notice here I'm requiring my database adapter file
|
||||||
|
// and not requiring node-postgres directly
|
||||||
|
import * as db from '../db/index.js'
|
||||||
|
|
||||||
|
app.get('/:id', async (req, res, next) => {
|
||||||
|
const result = await db.query('SELECT * FROM users WHERE id = $1', [req.params.id])
|
||||||
|
res.send(result.rows[0])
|
||||||
|
})
|
||||||
|
|
||||||
|
// ... many other routes in this file
|
||||||
|
```
|
||||||
|
|
||||||
|
Imagine we have lots of routes scattered throughout many files under our `routes/` directory. We now want to go back and log every single query that's executed, how long it took, and the number of rows it returned. If we had required node-postgres directly in every route file we'd have to go edit every single route - that would take forever & be really error prone! But thankfully we put our data access into `db/index.js`. Let's go add some logging:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
export const query = async (text, params) => {
|
||||||
|
const start = Date.now()
|
||||||
|
const res = await pool.query(text, params)
|
||||||
|
const duration = Date.now() - start
|
||||||
|
console.log('executed query', { text, duration, rows: res.rowCount })
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
That was pretty quick! And now all of our queries everywhere in our application are being logged.
|
||||||
|
|
||||||
|
_note: I didn't log the query parameters. Depending on your application you might be storing encrypted passwords or other sensitive information in your database. If you log your query parameters you might accidentally log sensitive information. Every app is different though so do what suits you best!_
|
||||||
|
|
||||||
|
Now what if we need to check out a client from the pool to run several queries in a row in a transaction? We can add another method to our `db/index.js` file when we need to do this:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
|
||||||
|
const pool = new Pool()
|
||||||
|
|
||||||
|
export const query = async (text, params) => {
|
||||||
|
const start = Date.now()
|
||||||
|
const res = await pool.query(text, params)
|
||||||
|
const duration = Date.now() - start
|
||||||
|
console.log('executed query', { text, duration, rows: res.rowCount })
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getClient = () => {
|
||||||
|
return pool.connect()
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Okay. Great - the simplest thing that could possibly work. It seems like one of our routes that checks out a client to run a transaction is forgetting to call `release` in some situation! Oh no! We are leaking a client & have hundreds of these routes to go audit. Good thing we have all our client access going through this single file. Lets add some deeper diagnostic information here to help us track down where the client leak is happening.
|
||||||
|
|
||||||
|
```js
|
||||||
|
export const query = async (text, params) => {
|
||||||
|
const start = Date.now()
|
||||||
|
const res = await pool.query(text, params)
|
||||||
|
const duration = Date.now() - start
|
||||||
|
console.log('executed query', { text, duration, rows: res.rowCount })
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getClient = async () => {
|
||||||
|
const client = await pool.connect()
|
||||||
|
const query = client.query
|
||||||
|
const release = client.release
|
||||||
|
// set a timeout of 5 seconds, after which we will log this client's last query
|
||||||
|
const timeout = setTimeout(() => {
|
||||||
|
console.error('A client has been checked out for more than 5 seconds!')
|
||||||
|
console.error(`The last executed query on this client was: ${client.lastQuery}`)
|
||||||
|
}, 5000)
|
||||||
|
// monkey patch the query method to keep track of the last query executed
|
||||||
|
client.query = (...args) => {
|
||||||
|
client.lastQuery = args
|
||||||
|
return query.apply(client, args)
|
||||||
|
}
|
||||||
|
client.release = () => {
|
||||||
|
// clear our timeout
|
||||||
|
clearTimeout(timeout)
|
||||||
|
// set the methods back to their old un-monkey-patched version
|
||||||
|
client.query = query
|
||||||
|
client.release = release
|
||||||
|
return release.apply(client)
|
||||||
|
}
|
||||||
|
return client
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
That should hopefully give us enough diagnostic information to track down any leaks.
|
||||||
115
docs/pages/guides/upgrading.md
Normal file
115
docs/pages/guides/upgrading.md
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
---
|
||||||
|
title: Upgrading
|
||||||
|
slug: /guides/upgrading
|
||||||
|
---
|
||||||
|
|
||||||
|
# Upgrading to 8.0
|
||||||
|
|
||||||
|
node-postgres at 8.0 introduces a breaking change to ssl-verified connections. If you connect with ssl and use
|
||||||
|
|
||||||
|
```
|
||||||
|
const client = new Client({ ssl: true })
|
||||||
|
```
|
||||||
|
|
||||||
|
and the server's SSL certificate is self-signed, connections will fail as of node-postgres 8.0. To keep the existing behavior, modify the invocation to
|
||||||
|
|
||||||
|
```
|
||||||
|
const client = new Client({ ssl: { rejectUnauthorized: false } })
|
||||||
|
```
|
||||||
|
|
||||||
|
The rest of the changes are relatively minor and unlikely to cause issues; see [the announcement](/announcements#2020-02-25) for full details.
|
||||||
|
|
||||||
|
# Upgrading to 7.0
|
||||||
|
|
||||||
|
node-postgres at 7.0 introduces somewhat significant breaking changes to the public API.
|
||||||
|
|
||||||
|
## node version support
|
||||||
|
|
||||||
|
Starting with `pg@7.0` the earliest version of node supported will be `node@4.x LTS`. Support for `node@0.12.x` and `node@.10.x` is dropped, and the module wont work as it relies on new es6 features not available in older versions of node.
|
||||||
|
|
||||||
|
## pg singleton
|
||||||
|
|
||||||
|
In the past there was a singleton pool manager attached to the root `pg` object in the package. This singleton could be used to provision connection pools automatically by calling `pg.connect`. This API caused a lot of confusion for users. It also introduced a opaque module-managed singleton which was difficult to reason about, debug, error-prone, and inflexible. Starting in pg@6.0 the methods' documentation was removed, and starting in pg@6.3 the methods were deprecated with a warning message.
|
||||||
|
|
||||||
|
If your application still relies on these they will be _gone_ in `pg@7.0`. In order to migrate you can do the following:
|
||||||
|
|
||||||
|
```js
|
||||||
|
// old way, deprecated in 6.3.0:
|
||||||
|
|
||||||
|
// connection using global singleton
|
||||||
|
pg.connect(function (err, client, done) {
|
||||||
|
client.query(/* etc, etc */)
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
|
||||||
|
// singleton pool shutdown
|
||||||
|
pg.end()
|
||||||
|
|
||||||
|
// ------------------
|
||||||
|
|
||||||
|
// new way, available since 6.0.0:
|
||||||
|
|
||||||
|
// create a pool
|
||||||
|
const pool = new pg.Pool()
|
||||||
|
|
||||||
|
// connection using created pool
|
||||||
|
pool.connect(function (err, client, done) {
|
||||||
|
client.query(/* etc, etc */)
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
|
||||||
|
// pool shutdown
|
||||||
|
pool.end()
|
||||||
|
```
|
||||||
|
|
||||||
|
node-postgres ships with a built-in pool object provided by [pg-pool](https://github.com/brianc/node-pg-pool) which is already used internally by the `pg.connect` and `pg.end` methods. Migrating to a user-managed pool (or set of pools) allows you to more directly control their set up their life-cycle.
|
||||||
|
|
||||||
|
## client.query(...).on
|
||||||
|
|
||||||
|
Before `pg@7.0` the `client.query` method would _always_ return an instance of a query. The query instance was an event emitter, accepted a callback, and was also a promise. A few problems...
|
||||||
|
|
||||||
|
- too many flow control options on a single object was confusing
|
||||||
|
- event emitter `.on('error')` does not mix well with promise `.catch`
|
||||||
|
- the `row` event was a common source of errors: it looks like a stream but has no support for back-pressure, misleading users into trying to pipe results or handling them in the event emitter for a desired performance gain.
|
||||||
|
- error handling with a `.done` and `.error` emitter pair for every query is cumbersome and returning the emitter from `client.query` indicated this sort of pattern may be encouraged: it is not.
|
||||||
|
|
||||||
|
Starting with `pg@7.0` the return value `client.query` will be dependent on what you pass to the method: I think this aligns more with how most node libraries handle the callback/promise combo, and I hope it will make the "just works" :tm: feeling better while reducing surface area and surprises around event emitter / callback combos.
|
||||||
|
|
||||||
|
### client.query with a callback
|
||||||
|
|
||||||
|
```js
|
||||||
|
const query = client.query('SELECT NOW()', (err, res) => {
|
||||||
|
/* etc, etc */
|
||||||
|
})
|
||||||
|
assert(query === undefined) // true
|
||||||
|
```
|
||||||
|
|
||||||
|
If you pass a callback to the method `client.query` will return `undefined`. This limits flow control to the callback which is in-line with almost all of node's core APIs.
|
||||||
|
|
||||||
|
### client.query without a callback
|
||||||
|
|
||||||
|
```js
|
||||||
|
const query = client.query('SELECT NOW()')
|
||||||
|
assert(query instanceof Promise) // true
|
||||||
|
assert(query.on === undefined) // true
|
||||||
|
query.then((res) => /* etc, etc */)
|
||||||
|
```
|
||||||
|
|
||||||
|
If you do **not** pass a callback `client.query` will return an instance of a `Promise`. This will **not** be a query instance and will not be an event emitter. This is in line with how most promise-based APIs work in node.
|
||||||
|
|
||||||
|
### client.query(Submittable)
|
||||||
|
|
||||||
|
`client.query` has always accepted any object that has a `.submit` method on it. In this scenario the client calls `.submit` on the object, delegating execution responsibility to it. In this situation the client also **returns the instance it was passed**. This is how [pg-cursor](https://github.com/brianc/node-pg-cursor) and [pg-query-stream](https://github.com/brianc/node-pg-query-stream) work. So, if you need the event emitter functionality on your queries for some reason, it is still possible because `Query` is an instance of `Submittable`:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import pg from 'pg'
|
||||||
|
const { Client, Query } = pg
|
||||||
|
const query = client.query(new Query('SELECT NOW()'))
|
||||||
|
query.on('row', (row) => {})
|
||||||
|
query.on('end', (res) => {})
|
||||||
|
query.on('error', (res) => {})
|
||||||
|
```
|
||||||
|
|
||||||
|
`Query` is considered a public, documented part of the API of node-postgres and this form will be supported indefinitely.
|
||||||
|
|
||||||
|
_note: I have been building apps with node-postgres for almost 7 years. In that time I have never used the event emitter API as the primary way to execute queries. I used to use callbacks and now I use async/await. If you need to stream results I highly recommend you use [pg-cursor](https://github.com/brianc/node-pg-cursor) or [pg-query-stream](https://github.com/brianc/node-pg-query-stream) and **not** the query object as an event emitter._
|
||||||
87
docs/pages/index.mdx
Normal file
87
docs/pages/index.mdx
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
---
|
||||||
|
title: Welcome
|
||||||
|
slug: /
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Logo } from '/components/logo.tsx'
|
||||||
|
|
||||||
|
node-postgres is a collection of node.js modules for interfacing with your PostgreSQL database. It has support for callbacks, promises, async/await, connection pooling, prepared statements, cursors, streaming results, C/C++ bindings, rich type parsing, and more! Just like PostgreSQL itself there are a lot of features: this documentation aims to get you up and running quickly and in the right direction. It also tries to provide guides for more advanced & edge-case topics allowing you to tap into the full power of PostgreSQL from node.js.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ npm install pg
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supporters
|
||||||
|
|
||||||
|
node-postgres continued development and support is made possible by the many [supporters](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md).
|
||||||
|
|
||||||
|
Special thanks to [Medplum](https://www.medplum.com/) for sponsoring node-postgres for a whole year!
|
||||||
|
|
||||||
|
<a href="https://www.medplum.com/">
|
||||||
|
<img
|
||||||
|
alt="Medplum"
|
||||||
|
src="https://raw.githubusercontent.com/medplum/medplum-logo/refs/heads/main/medplum-logo.png"
|
||||||
|
style={{
|
||||||
|
width: '300px',
|
||||||
|
height: 'auto',
|
||||||
|
margin: '0 auto',
|
||||||
|
display: 'block',
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</a>
|
||||||
|
|
||||||
|
If you or your company would like to sponsor node-postgres stop by [GitHub Sponsors](https://github.com/sponsors/brianc) and sign up or feel free to [email me](mailto:brian@pecanware.com) if you want to add your logo to the documentation or discuss higher tiers of sponsorship!
|
||||||
|
|
||||||
|
# Version compatibility
|
||||||
|
|
||||||
|
node-postgres strives to be compatible with all recent LTS versions of node & the most recent "stable" version. At the time of this writing node-postgres is compatible with node 18.x, 20.x, 22.x, and 24.x.
|
||||||
|
|
||||||
|
## Getting started
|
||||||
|
|
||||||
|
The simplest possible way to connect, query, and disconnect is with async/await:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Client } from 'pg'
|
||||||
|
const client = new Client()
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
const res = await client.query('SELECT $1::text as message', ['Hello world!'])
|
||||||
|
console.log(res.rows[0].message) // Hello world!
|
||||||
|
await client.end()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
|
||||||
|
For the sake of simplicity, these docs will assume that the methods are successful. In real life use, make sure to properly handle errors thrown in the methods. A `try/catch` block is a great way to do so:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { Client } from 'pg'
|
||||||
|
const client = new Client()
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const res = await client.query('SELECT $1::text as message', ['Hello world!'])
|
||||||
|
console.log(res.rows[0].message) // Hello world!
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err);
|
||||||
|
} finally {
|
||||||
|
await client.end()
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pooling
|
||||||
|
|
||||||
|
In most applications you'll want to use a [connection pool](/features/pooling) to manage your connections. This is a more advanced topic, but here's a simple example of how to use it:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
const pool = new Pool()
|
||||||
|
const res = await pool.query('SELECT $1::text as message', ['Hello world!'])
|
||||||
|
console.log(res.rows[0].message) // Hello world!
|
||||||
|
```
|
||||||
|
|
||||||
|
Our real-world apps are almost always more complicated than that, and I urge you to read on!
|
||||||
|
|
||||||
|
|
||||||
BIN
docs/public/favicon.ico
Normal file
BIN
docs/public/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
83
docs/theme.config.js
Normal file
83
docs/theme.config.js
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
// theme.config.js
|
||||||
|
export default {
|
||||||
|
project: {
|
||||||
|
link: 'https://github.com/brianc/node-postgres',
|
||||||
|
},
|
||||||
|
twitter: {
|
||||||
|
cardType: 'summary_large_image',
|
||||||
|
site: 'https://node-postgres.com',
|
||||||
|
},
|
||||||
|
docsRepositoryBase: 'https://github.com/brianc/node-postgres/blob/master/docs', // base URL for the docs repository
|
||||||
|
titleSuffix: ' – node-postgres',
|
||||||
|
darkMode: true,
|
||||||
|
navigation: {
|
||||||
|
prev: true,
|
||||||
|
next: true,
|
||||||
|
},
|
||||||
|
footer: {
|
||||||
|
text: `MIT ${new Date().getFullYear()} © Brian Carlson.`,
|
||||||
|
},
|
||||||
|
editLink: {
|
||||||
|
text: 'Edit this page on GitHub',
|
||||||
|
},
|
||||||
|
logo: (
|
||||||
|
<>
|
||||||
|
<svg
|
||||||
|
version="1.0"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
height={48}
|
||||||
|
width={48}
|
||||||
|
viewBox="0 0 1024.000000 1024.000000"
|
||||||
|
preserveAspectRatio="xMidYMid meet"
|
||||||
|
>
|
||||||
|
<g transform="translate(0.000000,1024.000000) scale(0.100000,-0.100000)" fill="#3c873a" stroke="none">
|
||||||
|
<path
|
||||||
|
d="M4990 7316 c-391 -87 -703 -397 -1003 -996 -285 -568 -477 -1260
|
||||||
|
-503 -1811 l-7 -142 -112 7 c-103 5 -207 27 -382 78 -37 11 -44 10 -63 -7 -61
|
||||||
|
-55 17 -180 177 -285 91 -60 194 -103 327 -137 l104 -26 17 -71 c44 -183 152
|
||||||
|
-441 256 -613 125 -207 322 -424 493 -541 331 -229 774 -291 1113 -156 112 45
|
||||||
|
182 94 209 147 13 24 13 35 -1 90 -22 87 -88 219 -134 267 -46 49 -79 52 -153
|
||||||
|
14 -168 -85 -360 -54 -508 83 -170 157 -244 440 -195 743 50 304 231 601 430
|
||||||
|
706 168 89 332 60 463 -81 66 -71 110 -140 197 -315 83 -166 116 -194 203
|
||||||
|
-170 88 23 370 258 637 531 411 420 685 806 808 1139 54 145 71 243 71 410 1
|
||||||
|
128 -3 157 -27 243 -86 310 -243 543 -467 690 -207 137 -440 157 -966 85
|
||||||
|
l-161 -22 -94 41 c-201 87 -327 113 -533 112 -77 -1 -166 -7 -196 -13z m-89
|
||||||
|
-1357 c15 -10 34 -38 43 -61 23 -56 13 -111 -28 -156 -59 -64 -171 -54 -216
|
||||||
|
21 -35 57 -22 145 28 190 44 40 122 43 173 6z m-234 -1361 c-46 -74 -156 -188
|
||||||
|
-249 -258 -211 -159 -459 -219 -734 -179 l-76 12 89 28 c187 60 485 229 683
|
||||||
|
388 l75 60 122 0 122 1 -32 -52z"
|
||||||
|
/>
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
<span style={{ fontWeight: 800 }}>node-postgres</span>
|
||||||
|
</>
|
||||||
|
),
|
||||||
|
chat: {
|
||||||
|
link: 'https://discord.gg/2afXp5vUWm',
|
||||||
|
},
|
||||||
|
head: (
|
||||||
|
<>
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
<link rel="shortcut icon" href="/favicon.ico" />
|
||||||
|
<meta
|
||||||
|
name="description"
|
||||||
|
content="node-postgres is a collection of node.js modules for interfacing with your PostgreSQL database."
|
||||||
|
/>
|
||||||
|
<meta name="og:title" content="node-postgres" />
|
||||||
|
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-100138145-1"></script>
|
||||||
|
<script
|
||||||
|
dangerouslySetInnerHTML={{
|
||||||
|
__html: `
|
||||||
|
|
||||||
|
window.dataLayer = window.dataLayer || [];
|
||||||
|
function gtag(){dataLayer.push(arguments);}
|
||||||
|
gtag('js', new Date());
|
||||||
|
|
||||||
|
gtag('config', 'UA-100138145-1');
|
||||||
|
|
||||||
|
`,
|
||||||
|
}}
|
||||||
|
></script>
|
||||||
|
</>
|
||||||
|
),
|
||||||
|
}
|
||||||
1892
docs/yarn.lock
Normal file
1892
docs/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
12
lerna.json
Normal file
12
lerna.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"packages": ["packages/*"],
|
||||||
|
"npmClient": "yarn",
|
||||||
|
"useWorkspaces": true,
|
||||||
|
"version": "independent",
|
||||||
|
"command": {
|
||||||
|
"version": {
|
||||||
|
"allowBranch": "master"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ignoreChanges": ["**/*.md", "**/test/**"]
|
||||||
|
}
|
||||||
419
lib/client.js
419
lib/client.js
@ -1,419 +0,0 @@
|
|||||||
'use strict'
|
|
||||||
/**
|
|
||||||
* Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com)
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* README.md file in the root directory of this source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
var EventEmitter = require('events').EventEmitter
|
|
||||||
var util = require('util')
|
|
||||||
var utils = require('./utils')
|
|
||||||
var pgPass = require('pgpass')
|
|
||||||
var TypeOverrides = require('./type-overrides')
|
|
||||||
|
|
||||||
var ConnectionParameters = require('./connection-parameters')
|
|
||||||
var Query = require('./query')
|
|
||||||
var defaults = require('./defaults')
|
|
||||||
var Connection = require('./connection')
|
|
||||||
|
|
||||||
var Client = function (config) {
|
|
||||||
EventEmitter.call(this)
|
|
||||||
|
|
||||||
this.connectionParameters = new ConnectionParameters(config)
|
|
||||||
this.user = this.connectionParameters.user
|
|
||||||
this.database = this.connectionParameters.database
|
|
||||||
this.port = this.connectionParameters.port
|
|
||||||
this.host = this.connectionParameters.host
|
|
||||||
this.password = this.connectionParameters.password
|
|
||||||
this.replication = this.connectionParameters.replication
|
|
||||||
|
|
||||||
var c = config || {}
|
|
||||||
|
|
||||||
this._types = new TypeOverrides(c.types)
|
|
||||||
this._ending = false
|
|
||||||
this._connecting = false
|
|
||||||
this._connected = false
|
|
||||||
this._connectionError = false
|
|
||||||
|
|
||||||
this.connection = c.connection || new Connection({
|
|
||||||
stream: c.stream,
|
|
||||||
ssl: this.connectionParameters.ssl,
|
|
||||||
keepAlive: c.keepAlive || false,
|
|
||||||
encoding: this.connectionParameters.client_encoding || 'utf8'
|
|
||||||
})
|
|
||||||
this.queryQueue = []
|
|
||||||
this.binary = c.binary || defaults.binary
|
|
||||||
this.processID = null
|
|
||||||
this.secretKey = null
|
|
||||||
this.ssl = this.connectionParameters.ssl || false
|
|
||||||
}
|
|
||||||
|
|
||||||
util.inherits(Client, EventEmitter)
|
|
||||||
|
|
||||||
Client.prototype.connect = function (callback) {
|
|
||||||
var self = this
|
|
||||||
var con = this.connection
|
|
||||||
if (this._connecting || this._connected) {
|
|
||||||
const err = new Error('Client has already been connected. You cannot reuse a client.')
|
|
||||||
if (callback) {
|
|
||||||
callback(err)
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
return Promise.reject(err)
|
|
||||||
}
|
|
||||||
this._connecting = true
|
|
||||||
|
|
||||||
if (this.host && this.host.indexOf('/') === 0) {
|
|
||||||
con.connect(this.host + '/.s.PGSQL.' + this.port)
|
|
||||||
} else {
|
|
||||||
con.connect(this.port, this.host)
|
|
||||||
}
|
|
||||||
|
|
||||||
// once connection is established send startup message
|
|
||||||
con.on('connect', function () {
|
|
||||||
if (self.ssl) {
|
|
||||||
con.requestSsl()
|
|
||||||
} else {
|
|
||||||
con.startup(self.getStartupConf())
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
con.on('sslconnect', function () {
|
|
||||||
con.startup(self.getStartupConf())
|
|
||||||
})
|
|
||||||
|
|
||||||
function checkPgPass (cb) {
|
|
||||||
return function (msg) {
|
|
||||||
if (self.password !== null) {
|
|
||||||
cb(msg)
|
|
||||||
} else {
|
|
||||||
pgPass(self.connectionParameters, function (pass) {
|
|
||||||
if (undefined !== pass) {
|
|
||||||
self.connectionParameters.password = self.password = pass
|
|
||||||
}
|
|
||||||
cb(msg)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// password request handling
|
|
||||||
con.on('authenticationCleartextPassword', checkPgPass(function () {
|
|
||||||
con.password(self.password)
|
|
||||||
}))
|
|
||||||
|
|
||||||
// password request handling
|
|
||||||
con.on('authenticationMD5Password', checkPgPass(function (msg) {
|
|
||||||
var inner = utils.md5(self.password + self.user)
|
|
||||||
var outer = utils.md5(Buffer.concat([Buffer.from(inner), msg.salt]))
|
|
||||||
var md5password = 'md5' + outer
|
|
||||||
con.password(md5password)
|
|
||||||
}))
|
|
||||||
|
|
||||||
con.once('backendKeyData', function (msg) {
|
|
||||||
self.processID = msg.processID
|
|
||||||
self.secretKey = msg.secretKey
|
|
||||||
})
|
|
||||||
|
|
||||||
const connectingErrorHandler = (err) => {
|
|
||||||
if (this._connectionError) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
this._connectionError = true
|
|
||||||
if (callback) {
|
|
||||||
return callback(err)
|
|
||||||
}
|
|
||||||
this.emit('error', err)
|
|
||||||
}
|
|
||||||
|
|
||||||
const connectedErrorHandler = (err) => {
|
|
||||||
if (this.activeQuery) {
|
|
||||||
var activeQuery = self.activeQuery
|
|
||||||
this.activeQuery = null
|
|
||||||
return activeQuery.handleError(err, con)
|
|
||||||
}
|
|
||||||
this.emit('error', err)
|
|
||||||
}
|
|
||||||
|
|
||||||
con.on('error', connectingErrorHandler)
|
|
||||||
|
|
||||||
// hook up query handling events to connection
|
|
||||||
// after the connection initially becomes ready for queries
|
|
||||||
con.once('readyForQuery', function () {
|
|
||||||
self._connecting = false
|
|
||||||
self._connected = true
|
|
||||||
self._attachListeners(con)
|
|
||||||
con.removeListener('error', connectingErrorHandler)
|
|
||||||
con.on('error', connectedErrorHandler)
|
|
||||||
|
|
||||||
// process possible callback argument to Client#connect
|
|
||||||
if (callback) {
|
|
||||||
callback(null, self)
|
|
||||||
// remove callback for proper error handling
|
|
||||||
// after the connect event
|
|
||||||
callback = null
|
|
||||||
}
|
|
||||||
self.emit('connect')
|
|
||||||
})
|
|
||||||
|
|
||||||
con.on('readyForQuery', function () {
|
|
||||||
var activeQuery = self.activeQuery
|
|
||||||
self.activeQuery = null
|
|
||||||
self.readyForQuery = true
|
|
||||||
if (activeQuery) {
|
|
||||||
activeQuery.handleReadyForQuery(con)
|
|
||||||
}
|
|
||||||
self._pulseQueryQueue()
|
|
||||||
})
|
|
||||||
|
|
||||||
con.once('end', () => {
|
|
||||||
if (this.activeQuery) {
|
|
||||||
var disconnectError = new Error('Connection terminated')
|
|
||||||
this.activeQuery.handleError(disconnectError, con)
|
|
||||||
this.activeQuery = null
|
|
||||||
}
|
|
||||||
if (!this._ending) {
|
|
||||||
// if the connection is ended without us calling .end()
|
|
||||||
// on this client then we have an unexpected disconnection
|
|
||||||
// treat this as an error unless we've already emitted an error
|
|
||||||
// during connection.
|
|
||||||
const error = new Error('Connection terminated unexpectedly')
|
|
||||||
if (this._connecting && !this._connectionError) {
|
|
||||||
if (callback) {
|
|
||||||
callback(error)
|
|
||||||
} else {
|
|
||||||
this.emit('error', error)
|
|
||||||
}
|
|
||||||
} else if (!this._connectionError) {
|
|
||||||
this.emit('error', error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this.emit('end')
|
|
||||||
})
|
|
||||||
|
|
||||||
con.on('notice', function (msg) {
|
|
||||||
self.emit('notice', msg)
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!callback) {
|
|
||||||
return new global.Promise((resolve, reject) => {
|
|
||||||
this.once('error', reject)
|
|
||||||
this.once('connect', () => {
|
|
||||||
this.removeListener('error', reject)
|
|
||||||
resolve()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Client.prototype._attachListeners = function (con) {
|
|
||||||
const self = this
|
|
||||||
// delegate rowDescription to active query
|
|
||||||
con.on('rowDescription', function (msg) {
|
|
||||||
self.activeQuery.handleRowDescription(msg)
|
|
||||||
})
|
|
||||||
|
|
||||||
// delegate dataRow to active query
|
|
||||||
con.on('dataRow', function (msg) {
|
|
||||||
self.activeQuery.handleDataRow(msg)
|
|
||||||
})
|
|
||||||
|
|
||||||
// delegate portalSuspended to active query
|
|
||||||
con.on('portalSuspended', function (msg) {
|
|
||||||
self.activeQuery.handlePortalSuspended(con)
|
|
||||||
})
|
|
||||||
|
|
||||||
// deletagate emptyQuery to active query
|
|
||||||
con.on('emptyQuery', function (msg) {
|
|
||||||
self.activeQuery.handleEmptyQuery(con)
|
|
||||||
})
|
|
||||||
|
|
||||||
// delegate commandComplete to active query
|
|
||||||
con.on('commandComplete', function (msg) {
|
|
||||||
self.activeQuery.handleCommandComplete(msg, con)
|
|
||||||
})
|
|
||||||
|
|
||||||
// if a prepared statement has a name and properly parses
|
|
||||||
// we track that its already been executed so we don't parse
|
|
||||||
// it again on the same client
|
|
||||||
con.on('parseComplete', function (msg) {
|
|
||||||
if (self.activeQuery.name) {
|
|
||||||
con.parsedStatements[self.activeQuery.name] = true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
con.on('copyInResponse', function (msg) {
|
|
||||||
self.activeQuery.handleCopyInResponse(self.connection)
|
|
||||||
})
|
|
||||||
|
|
||||||
con.on('copyData', function (msg) {
|
|
||||||
self.activeQuery.handleCopyData(msg, self.connection)
|
|
||||||
})
|
|
||||||
|
|
||||||
con.on('notification', function (msg) {
|
|
||||||
self.emit('notification', msg)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
Client.prototype.getStartupConf = function () {
|
|
||||||
var params = this.connectionParameters
|
|
||||||
|
|
||||||
var data = {
|
|
||||||
user: params.user,
|
|
||||||
database: params.database
|
|
||||||
}
|
|
||||||
|
|
||||||
var appName = params.application_name || params.fallback_application_name
|
|
||||||
if (appName) {
|
|
||||||
data.application_name = appName
|
|
||||||
}
|
|
||||||
if (params.replication) {
|
|
||||||
data.replication = '' + params.replication
|
|
||||||
}
|
|
||||||
|
|
||||||
return data
|
|
||||||
}
|
|
||||||
|
|
||||||
Client.prototype.cancel = function (client, query) {
|
|
||||||
if (client.activeQuery === query) {
|
|
||||||
var con = this.connection
|
|
||||||
|
|
||||||
if (this.host && this.host.indexOf('/') === 0) {
|
|
||||||
con.connect(this.host + '/.s.PGSQL.' + this.port)
|
|
||||||
} else {
|
|
||||||
con.connect(this.port, this.host)
|
|
||||||
}
|
|
||||||
|
|
||||||
// once connection is established send cancel message
|
|
||||||
con.on('connect', function () {
|
|
||||||
con.cancel(client.processID, client.secretKey)
|
|
||||||
})
|
|
||||||
} else if (client.queryQueue.indexOf(query) !== -1) {
|
|
||||||
client.queryQueue.splice(client.queryQueue.indexOf(query), 1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Client.prototype.setTypeParser = function (oid, format, parseFn) {
|
|
||||||
return this._types.setTypeParser(oid, format, parseFn)
|
|
||||||
}
|
|
||||||
|
|
||||||
Client.prototype.getTypeParser = function (oid, format) {
|
|
||||||
return this._types.getTypeParser(oid, format)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c
|
|
||||||
Client.prototype.escapeIdentifier = function (str) {
|
|
||||||
var escaped = '"'
|
|
||||||
|
|
||||||
for (var i = 0; i < str.length; i++) {
|
|
||||||
var c = str[i]
|
|
||||||
if (c === '"') {
|
|
||||||
escaped += c + c
|
|
||||||
} else {
|
|
||||||
escaped += c
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
escaped += '"'
|
|
||||||
|
|
||||||
return escaped
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c
|
|
||||||
Client.prototype.escapeLiteral = function (str) {
|
|
||||||
var hasBackslash = false
|
|
||||||
var escaped = '\''
|
|
||||||
|
|
||||||
for (var i = 0; i < str.length; i++) {
|
|
||||||
var c = str[i]
|
|
||||||
if (c === '\'') {
|
|
||||||
escaped += c + c
|
|
||||||
} else if (c === '\\') {
|
|
||||||
escaped += c + c
|
|
||||||
hasBackslash = true
|
|
||||||
} else {
|
|
||||||
escaped += c
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
escaped += '\''
|
|
||||||
|
|
||||||
if (hasBackslash === true) {
|
|
||||||
escaped = ' E' + escaped
|
|
||||||
}
|
|
||||||
|
|
||||||
return escaped
|
|
||||||
}
|
|
||||||
|
|
||||||
Client.prototype._pulseQueryQueue = function () {
|
|
||||||
if (this.readyForQuery === true) {
|
|
||||||
this.activeQuery = this.queryQueue.shift()
|
|
||||||
if (this.activeQuery) {
|
|
||||||
this.readyForQuery = false
|
|
||||||
this.hasExecuted = true
|
|
||||||
this.activeQuery.submit(this.connection)
|
|
||||||
} else if (this.hasExecuted) {
|
|
||||||
this.activeQuery = null
|
|
||||||
this.emit('drain')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Client.prototype.query = function (config, values, callback) {
|
|
||||||
// can take in strings, config object or query object
|
|
||||||
var query
|
|
||||||
var result
|
|
||||||
if (typeof config.submit === 'function') {
|
|
||||||
result = query = config
|
|
||||||
if (typeof values === 'function') {
|
|
||||||
query.callback = query.callback || values
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
query = new Query(config, values, callback)
|
|
||||||
if (!query.callback) {
|
|
||||||
let resolveOut, rejectOut
|
|
||||||
result = new Promise((resolve, reject) => {
|
|
||||||
resolveOut = resolve
|
|
||||||
rejectOut = reject
|
|
||||||
})
|
|
||||||
query.callback = (err, res) => err ? rejectOut(err) : resolveOut(res)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.binary && !query.binary) {
|
|
||||||
query.binary = true
|
|
||||||
}
|
|
||||||
if (query._result) {
|
|
||||||
query._result._getTypeParser = this._types.getTypeParser.bind(this._types)
|
|
||||||
}
|
|
||||||
|
|
||||||
this.queryQueue.push(query)
|
|
||||||
this._pulseQueryQueue()
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
Client.prototype.end = function (cb) {
|
|
||||||
this._ending = true
|
|
||||||
if (this.activeQuery) {
|
|
||||||
// if we have an active query we need to force a disconnect
|
|
||||||
// on the socket - otherwise a hung query could block end forever
|
|
||||||
this.connection.stream.destroy(new Error('Connection terminated by user'))
|
|
||||||
return cb ? cb() : Promise.resolve()
|
|
||||||
}
|
|
||||||
if (cb) {
|
|
||||||
this.connection.end()
|
|
||||||
this.connection.once('end', cb)
|
|
||||||
} else {
|
|
||||||
return new global.Promise((resolve, reject) => {
|
|
||||||
this.connection.end()
|
|
||||||
this.connection.once('end', resolve)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// expose a Query constructor
|
|
||||||
Client.Query = Query
|
|
||||||
|
|
||||||
module.exports = Client
|
|
||||||
@ -1,117 +0,0 @@
|
|||||||
'use strict'
|
|
||||||
/**
|
|
||||||
* Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com)
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* README.md file in the root directory of this source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
var dns = require('dns')
|
|
||||||
|
|
||||||
var defaults = require('./defaults')
|
|
||||||
|
|
||||||
var parse = require('pg-connection-string').parse // parses a connection string
|
|
||||||
|
|
||||||
var val = function (key, config, envVar) {
|
|
||||||
if (envVar === undefined) {
|
|
||||||
envVar = process.env[ 'PG' + key.toUpperCase() ]
|
|
||||||
} else if (envVar === false) {
|
|
||||||
// do nothing ... use false
|
|
||||||
} else {
|
|
||||||
envVar = process.env[ envVar ]
|
|
||||||
}
|
|
||||||
|
|
||||||
return config[key] ||
|
|
||||||
envVar ||
|
|
||||||
defaults[key]
|
|
||||||
}
|
|
||||||
|
|
||||||
var useSsl = function () {
|
|
||||||
switch (process.env.PGSSLMODE) {
|
|
||||||
case 'disable':
|
|
||||||
return false
|
|
||||||
case 'prefer':
|
|
||||||
case 'require':
|
|
||||||
case 'verify-ca':
|
|
||||||
case 'verify-full':
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return defaults.ssl
|
|
||||||
}
|
|
||||||
|
|
||||||
var ConnectionParameters = function (config) {
|
|
||||||
// if a string is passed, it is a raw connection string so we parse it into a config
|
|
||||||
config = typeof config === 'string' ? parse(config) : config || {}
|
|
||||||
|
|
||||||
// if the config has a connectionString defined, parse IT into the config we use
|
|
||||||
// this will override other default values with what is stored in connectionString
|
|
||||||
if (config.connectionString) {
|
|
||||||
config = Object.assign({}, config, parse(config.connectionString))
|
|
||||||
}
|
|
||||||
|
|
||||||
this.user = val('user', config)
|
|
||||||
this.database = val('database', config)
|
|
||||||
this.port = parseInt(val('port', config), 10)
|
|
||||||
this.host = val('host', config)
|
|
||||||
this.password = val('password', config)
|
|
||||||
this.binary = val('binary', config)
|
|
||||||
this.ssl = typeof config.ssl === 'undefined' ? useSsl() : config.ssl
|
|
||||||
this.client_encoding = val('client_encoding', config)
|
|
||||||
this.replication = val('replication', config)
|
|
||||||
// a domain socket begins with '/'
|
|
||||||
this.isDomainSocket = (!(this.host || '').indexOf('/'))
|
|
||||||
|
|
||||||
this.application_name = val('application_name', config, 'PGAPPNAME')
|
|
||||||
this.fallback_application_name = val('fallback_application_name', config, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert arg to a string, surround in single quotes, and escape single quotes and backslashes
|
|
||||||
var quoteParamValue = function (value) {
|
|
||||||
return "'" + ('' + value).replace(/\\/g, '\\\\').replace(/'/g, "\\'") + "'"
|
|
||||||
}
|
|
||||||
|
|
||||||
var add = function (params, config, paramName) {
|
|
||||||
var value = config[paramName]
|
|
||||||
if (value) {
|
|
||||||
params.push(paramName + '=' + quoteParamValue(value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ConnectionParameters.prototype.getLibpqConnectionString = function (cb) {
|
|
||||||
var params = []
|
|
||||||
add(params, this, 'user')
|
|
||||||
add(params, this, 'password')
|
|
||||||
add(params, this, 'port')
|
|
||||||
add(params, this, 'application_name')
|
|
||||||
add(params, this, 'fallback_application_name')
|
|
||||||
|
|
||||||
var ssl = typeof this.ssl === 'object' ? this.ssl : {sslmode: this.ssl}
|
|
||||||
add(params, ssl, 'sslmode')
|
|
||||||
add(params, ssl, 'sslca')
|
|
||||||
add(params, ssl, 'sslkey')
|
|
||||||
add(params, ssl, 'sslcert')
|
|
||||||
|
|
||||||
if (this.database) {
|
|
||||||
params.push('dbname=' + quoteParamValue(this.database))
|
|
||||||
}
|
|
||||||
if (this.replication) {
|
|
||||||
params.push('replication=' + quoteParamValue(this.replication))
|
|
||||||
}
|
|
||||||
if (this.host) {
|
|
||||||
params.push('host=' + quoteParamValue(this.host))
|
|
||||||
}
|
|
||||||
if (this.isDomainSocket) {
|
|
||||||
return cb(null, params.join(' '))
|
|
||||||
}
|
|
||||||
if (this.client_encoding) {
|
|
||||||
params.push('client_encoding=' + quoteParamValue(this.client_encoding))
|
|
||||||
}
|
|
||||||
dns.lookup(this.host, function (err, address) {
|
|
||||||
if (err) return cb(err, null)
|
|
||||||
params.push('hostaddr=' + quoteParamValue(address))
|
|
||||||
return cb(null, params.join(' '))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = ConnectionParameters
|
|
||||||
@ -1,647 +0,0 @@
|
|||||||
'use strict'
|
|
||||||
/**
|
|
||||||
* Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com)
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* README.md file in the root directory of this source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
var net = require('net')
|
|
||||||
var EventEmitter = require('events').EventEmitter
|
|
||||||
var util = require('util')
|
|
||||||
|
|
||||||
var Writer = require('buffer-writer')
|
|
||||||
var Reader = require('packet-reader')
|
|
||||||
|
|
||||||
var TEXT_MODE = 0
|
|
||||||
var BINARY_MODE = 1
|
|
||||||
var Connection = function (config) {
|
|
||||||
EventEmitter.call(this)
|
|
||||||
config = config || {}
|
|
||||||
this.stream = config.stream || new net.Stream()
|
|
||||||
this._keepAlive = config.keepAlive
|
|
||||||
this.lastBuffer = false
|
|
||||||
this.lastOffset = 0
|
|
||||||
this.buffer = null
|
|
||||||
this.offset = null
|
|
||||||
this.encoding = config.encoding || 'utf8'
|
|
||||||
this.parsedStatements = {}
|
|
||||||
this.writer = new Writer()
|
|
||||||
this.ssl = config.ssl || false
|
|
||||||
this._ending = false
|
|
||||||
this._mode = TEXT_MODE
|
|
||||||
this._emitMessage = false
|
|
||||||
this._reader = new Reader({
|
|
||||||
headerSize: 1,
|
|
||||||
lengthPadding: -4
|
|
||||||
})
|
|
||||||
var self = this
|
|
||||||
this.on('newListener', function (eventName) {
|
|
||||||
if (eventName === 'message') {
|
|
||||||
self._emitMessage = true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
util.inherits(Connection, EventEmitter)
|
|
||||||
|
|
||||||
Connection.prototype.connect = function (port, host) {
|
|
||||||
if (this.stream.readyState === 'closed') {
|
|
||||||
this.stream.connect(port, host)
|
|
||||||
} else if (this.stream.readyState === 'open') {
|
|
||||||
this.emit('connect')
|
|
||||||
}
|
|
||||||
|
|
||||||
var self = this
|
|
||||||
|
|
||||||
this.stream.on('connect', function () {
|
|
||||||
if (self._keepAlive) {
|
|
||||||
self.stream.setKeepAlive(true)
|
|
||||||
}
|
|
||||||
self.emit('connect')
|
|
||||||
})
|
|
||||||
|
|
||||||
this.stream.on('error', function (error) {
|
|
||||||
// don't raise ECONNRESET errors - they can & should be ignored
|
|
||||||
// during disconnect
|
|
||||||
if (self._ending && error.code === 'ECONNRESET') {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
self.emit('error', error)
|
|
||||||
})
|
|
||||||
|
|
||||||
this.stream.on('close', function () {
|
|
||||||
self.emit('end')
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!this.ssl) {
|
|
||||||
return this.attachListeners(this.stream)
|
|
||||||
}
|
|
||||||
|
|
||||||
this.stream.once('data', function (buffer) {
|
|
||||||
var responseCode = buffer.toString('utf8')
|
|
||||||
if (responseCode !== 'S') {
|
|
||||||
return self.emit('error', new Error('The server does not support SSL connections'))
|
|
||||||
}
|
|
||||||
var tls = require('tls')
|
|
||||||
self.stream = tls.connect({
|
|
||||||
socket: self.stream,
|
|
||||||
servername: host,
|
|
||||||
rejectUnauthorized: self.ssl.rejectUnauthorized,
|
|
||||||
ca: self.ssl.ca,
|
|
||||||
pfx: self.ssl.pfx,
|
|
||||||
key: self.ssl.key,
|
|
||||||
passphrase: self.ssl.passphrase,
|
|
||||||
cert: self.ssl.cert,
|
|
||||||
NPNProtocols: self.ssl.NPNProtocols
|
|
||||||
})
|
|
||||||
self.attachListeners(self.stream)
|
|
||||||
self.emit('sslconnect')
|
|
||||||
|
|
||||||
self.stream.on('error', function (error) {
|
|
||||||
self.emit('error', error)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.attachListeners = function (stream) {
|
|
||||||
var self = this
|
|
||||||
stream.on('data', function (buff) {
|
|
||||||
self._reader.addChunk(buff)
|
|
||||||
var packet = self._reader.read()
|
|
||||||
while (packet) {
|
|
||||||
var msg = self.parseMessage(packet)
|
|
||||||
if (self._emitMessage) {
|
|
||||||
self.emit('message', msg)
|
|
||||||
}
|
|
||||||
self.emit(msg.name, msg)
|
|
||||||
packet = self._reader.read()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
stream.on('end', function () {
|
|
||||||
self.emit('end')
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.requestSsl = function () {
|
|
||||||
var bodyBuffer = this.writer
|
|
||||||
.addInt16(0x04D2)
|
|
||||||
.addInt16(0x162F).flush()
|
|
||||||
|
|
||||||
var length = bodyBuffer.length + 4
|
|
||||||
|
|
||||||
var buffer = new Writer()
|
|
||||||
.addInt32(length)
|
|
||||||
.add(bodyBuffer)
|
|
||||||
.join()
|
|
||||||
this.stream.write(buffer)
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.startup = function (config) {
|
|
||||||
var writer = this.writer
|
|
||||||
.addInt16(3)
|
|
||||||
.addInt16(0)
|
|
||||||
|
|
||||||
Object.keys(config).forEach(function (key) {
|
|
||||||
var val = config[key]
|
|
||||||
writer.addCString(key).addCString(val)
|
|
||||||
})
|
|
||||||
|
|
||||||
writer.addCString('client_encoding').addCString("'utf-8'")
|
|
||||||
|
|
||||||
var bodyBuffer = writer.addCString('').flush()
|
|
||||||
// this message is sent without a code
|
|
||||||
|
|
||||||
var length = bodyBuffer.length + 4
|
|
||||||
|
|
||||||
var buffer = new Writer()
|
|
||||||
.addInt32(length)
|
|
||||||
.add(bodyBuffer)
|
|
||||||
.join()
|
|
||||||
this.stream.write(buffer)
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.cancel = function (processID, secretKey) {
|
|
||||||
var bodyBuffer = this.writer
|
|
||||||
.addInt16(1234)
|
|
||||||
.addInt16(5678)
|
|
||||||
.addInt32(processID)
|
|
||||||
.addInt32(secretKey)
|
|
||||||
.flush()
|
|
||||||
|
|
||||||
var length = bodyBuffer.length + 4
|
|
||||||
|
|
||||||
var buffer = new Writer()
|
|
||||||
.addInt32(length)
|
|
||||||
.add(bodyBuffer)
|
|
||||||
.join()
|
|
||||||
this.stream.write(buffer)
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.password = function (password) {
|
|
||||||
// 0x70 = 'p'
|
|
||||||
this._send(0x70, this.writer.addCString(password))
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype._send = function (code, more) {
|
|
||||||
if (!this.stream.writable) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if (more === true) {
|
|
||||||
this.writer.addHeader(code)
|
|
||||||
} else {
|
|
||||||
return this.stream.write(this.writer.flush(code))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.query = function (text) {
|
|
||||||
// 0x51 = Q
|
|
||||||
this.stream.write(this.writer.addCString(text).flush(0x51))
|
|
||||||
}
|
|
||||||
|
|
||||||
// send parse message
|
|
||||||
// "more" === true to buffer the message until flush() is called
|
|
||||||
Connection.prototype.parse = function (query, more) {
|
|
||||||
// expect something like this:
|
|
||||||
// { name: 'queryName',
|
|
||||||
// text: 'select * from blah',
|
|
||||||
// types: ['int8', 'bool'] }
|
|
||||||
|
|
||||||
// normalize missing query names to allow for null
|
|
||||||
query.name = query.name || ''
|
|
||||||
if (query.name.length > 63) {
|
|
||||||
console.error('Warning! Postgres only supports 63 characters for query names.')
|
|
||||||
console.error('You supplied', query.name, '(', query.name.length, ')')
|
|
||||||
console.error('This can cause conflicts and silent errors executing queries')
|
|
||||||
}
|
|
||||||
// normalize null type array
|
|
||||||
query.types = query.types || []
|
|
||||||
var len = query.types.length
|
|
||||||
var buffer = this.writer
|
|
||||||
.addCString(query.name) // name of query
|
|
||||||
.addCString(query.text) // actual query text
|
|
||||||
.addInt16(len)
|
|
||||||
for (var i = 0; i < len; i++) {
|
|
||||||
buffer.addInt32(query.types[i])
|
|
||||||
}
|
|
||||||
|
|
||||||
var code = 0x50
|
|
||||||
this._send(code, more)
|
|
||||||
}
|
|
||||||
|
|
||||||
// send bind message
|
|
||||||
// "more" === true to buffer the message until flush() is called
|
|
||||||
Connection.prototype.bind = function (config, more) {
|
|
||||||
// normalize config
|
|
||||||
config = config || {}
|
|
||||||
config.portal = config.portal || ''
|
|
||||||
config.statement = config.statement || ''
|
|
||||||
config.binary = config.binary || false
|
|
||||||
var values = config.values || []
|
|
||||||
var len = values.length
|
|
||||||
var useBinary = false
|
|
||||||
for (var j = 0; j < len; j++) { useBinary |= values[j] instanceof Buffer }
|
|
||||||
var buffer = this.writer
|
|
||||||
.addCString(config.portal)
|
|
||||||
.addCString(config.statement)
|
|
||||||
if (!useBinary) { buffer.addInt16(0) } else {
|
|
||||||
buffer.addInt16(len)
|
|
||||||
for (j = 0; j < len; j++) { buffer.addInt16(values[j] instanceof Buffer) }
|
|
||||||
}
|
|
||||||
buffer.addInt16(len)
|
|
||||||
for (var i = 0; i < len; i++) {
|
|
||||||
var val = values[i]
|
|
||||||
if (val === null || typeof val === 'undefined') {
|
|
||||||
buffer.addInt32(-1)
|
|
||||||
} else if (val instanceof Buffer) {
|
|
||||||
buffer.addInt32(val.length)
|
|
||||||
buffer.add(val)
|
|
||||||
} else {
|
|
||||||
buffer.addInt32(Buffer.byteLength(val))
|
|
||||||
buffer.addString(val)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.binary) {
|
|
||||||
buffer.addInt16(1) // format codes to use binary
|
|
||||||
buffer.addInt16(1)
|
|
||||||
} else {
|
|
||||||
buffer.addInt16(0) // format codes to use text
|
|
||||||
}
|
|
||||||
// 0x42 = 'B'
|
|
||||||
this._send(0x42, more)
|
|
||||||
}
|
|
||||||
|
|
||||||
// send execute message
|
|
||||||
// "more" === true to buffer the message until flush() is called
|
|
||||||
Connection.prototype.execute = function (config, more) {
|
|
||||||
config = config || {}
|
|
||||||
config.portal = config.portal || ''
|
|
||||||
config.rows = config.rows || ''
|
|
||||||
this.writer
|
|
||||||
.addCString(config.portal)
|
|
||||||
.addInt32(config.rows)
|
|
||||||
|
|
||||||
// 0x45 = 'E'
|
|
||||||
this._send(0x45, more)
|
|
||||||
}
|
|
||||||
|
|
||||||
var emptyBuffer = Buffer.alloc(0)
|
|
||||||
|
|
||||||
Connection.prototype.flush = function () {
|
|
||||||
// 0x48 = 'H'
|
|
||||||
this.writer.add(emptyBuffer)
|
|
||||||
this._send(0x48)
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.sync = function () {
|
|
||||||
// clear out any pending data in the writer
|
|
||||||
this.writer.flush(0)
|
|
||||||
|
|
||||||
this.writer.add(emptyBuffer)
|
|
||||||
this._ending = true
|
|
||||||
this._send(0x53)
|
|
||||||
}
|
|
||||||
|
|
||||||
const END_BUFFER = Buffer.from([0x58, 0x00, 0x00, 0x00, 0x04])
|
|
||||||
|
|
||||||
Connection.prototype.end = function () {
|
|
||||||
// 0x58 = 'X'
|
|
||||||
this.writer.add(emptyBuffer)
|
|
||||||
this._ending = true
|
|
||||||
return this.stream.write(END_BUFFER)
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.close = function (msg, more) {
|
|
||||||
this.writer.addCString(msg.type + (msg.name || ''))
|
|
||||||
this._send(0x43, more)
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.describe = function (msg, more) {
|
|
||||||
this.writer.addCString(msg.type + (msg.name || ''))
|
|
||||||
this._send(0x44, more)
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.sendCopyFromChunk = function (chunk) {
|
|
||||||
this.stream.write(this.writer.add(chunk).flush(0x64))
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.endCopyFrom = function () {
|
|
||||||
this.stream.write(this.writer.add(emptyBuffer).flush(0x63))
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.sendCopyFail = function (msg) {
|
|
||||||
// this.stream.write(this.writer.add(emptyBuffer).flush(0x66));
|
|
||||||
this.writer.addCString(msg)
|
|
||||||
this._send(0x66)
|
|
||||||
}
|
|
||||||
|
|
||||||
var Message = function (name, length) {
|
|
||||||
this.name = name
|
|
||||||
this.length = length
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parseMessage = function (buffer) {
|
|
||||||
this.offset = 0
|
|
||||||
var length = buffer.length + 4
|
|
||||||
switch (this._reader.header) {
|
|
||||||
case 0x52: // R
|
|
||||||
return this.parseR(buffer, length)
|
|
||||||
|
|
||||||
case 0x53: // S
|
|
||||||
return this.parseS(buffer, length)
|
|
||||||
|
|
||||||
case 0x4b: // K
|
|
||||||
return this.parseK(buffer, length)
|
|
||||||
|
|
||||||
case 0x43: // C
|
|
||||||
return this.parseC(buffer, length)
|
|
||||||
|
|
||||||
case 0x5a: // Z
|
|
||||||
return this.parseZ(buffer, length)
|
|
||||||
|
|
||||||
case 0x54: // T
|
|
||||||
return this.parseT(buffer, length)
|
|
||||||
|
|
||||||
case 0x44: // D
|
|
||||||
return this.parseD(buffer, length)
|
|
||||||
|
|
||||||
case 0x45: // E
|
|
||||||
return this.parseE(buffer, length)
|
|
||||||
|
|
||||||
case 0x4e: // N
|
|
||||||
return this.parseN(buffer, length)
|
|
||||||
|
|
||||||
case 0x31: // 1
|
|
||||||
return new Message('parseComplete', length)
|
|
||||||
|
|
||||||
case 0x32: // 2
|
|
||||||
return new Message('bindComplete', length)
|
|
||||||
|
|
||||||
case 0x33: // 3
|
|
||||||
return new Message('closeComplete', length)
|
|
||||||
|
|
||||||
case 0x41: // A
|
|
||||||
return this.parseA(buffer, length)
|
|
||||||
|
|
||||||
case 0x6e: // n
|
|
||||||
return new Message('noData', length)
|
|
||||||
|
|
||||||
case 0x49: // I
|
|
||||||
return new Message('emptyQuery', length)
|
|
||||||
|
|
||||||
case 0x73: // s
|
|
||||||
return new Message('portalSuspended', length)
|
|
||||||
|
|
||||||
case 0x47: // G
|
|
||||||
return this.parseG(buffer, length)
|
|
||||||
|
|
||||||
case 0x48: // H
|
|
||||||
return this.parseH(buffer, length)
|
|
||||||
|
|
||||||
case 0x57: // W
|
|
||||||
return new Message('replicationStart', length)
|
|
||||||
|
|
||||||
case 0x63: // c
|
|
||||||
return new Message('copyDone', length)
|
|
||||||
|
|
||||||
case 0x64: // d
|
|
||||||
return this.parsed(buffer, length)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parseR = function (buffer, length) {
|
|
||||||
var code = 0
|
|
||||||
var msg = new Message('authenticationOk', length)
|
|
||||||
if (msg.length === 8) {
|
|
||||||
code = this.parseInt32(buffer)
|
|
||||||
if (code === 3) {
|
|
||||||
msg.name = 'authenticationCleartextPassword'
|
|
||||||
}
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
if (msg.length === 12) {
|
|
||||||
code = this.parseInt32(buffer)
|
|
||||||
if (code === 5) { // md5 required
|
|
||||||
msg.name = 'authenticationMD5Password'
|
|
||||||
msg.salt = Buffer.alloc(4)
|
|
||||||
buffer.copy(msg.salt, 0, this.offset, this.offset + 4)
|
|
||||||
this.offset += 4
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
}
|
|
||||||
throw new Error('Unknown authenticationOk message type' + util.inspect(msg))
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parseS = function (buffer, length) {
|
|
||||||
var msg = new Message('parameterStatus', length)
|
|
||||||
msg.parameterName = this.parseCString(buffer)
|
|
||||||
msg.parameterValue = this.parseCString(buffer)
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parseK = function (buffer, length) {
|
|
||||||
var msg = new Message('backendKeyData', length)
|
|
||||||
msg.processID = this.parseInt32(buffer)
|
|
||||||
msg.secretKey = this.parseInt32(buffer)
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parseC = function (buffer, length) {
|
|
||||||
var msg = new Message('commandComplete', length)
|
|
||||||
msg.text = this.parseCString(buffer)
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parseZ = function (buffer, length) {
|
|
||||||
var msg = new Message('readyForQuery', length)
|
|
||||||
msg.name = 'readyForQuery'
|
|
||||||
msg.status = this.readString(buffer, 1)
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
|
|
||||||
var ROW_DESCRIPTION = 'rowDescription'
|
|
||||||
Connection.prototype.parseT = function (buffer, length) {
|
|
||||||
var msg = new Message(ROW_DESCRIPTION, length)
|
|
||||||
msg.fieldCount = this.parseInt16(buffer)
|
|
||||||
var fields = []
|
|
||||||
for (var i = 0; i < msg.fieldCount; i++) {
|
|
||||||
fields.push(this.parseField(buffer))
|
|
||||||
}
|
|
||||||
msg.fields = fields
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
|
|
||||||
var Field = function () {
|
|
||||||
this.name = null
|
|
||||||
this.tableID = null
|
|
||||||
this.columnID = null
|
|
||||||
this.dataTypeID = null
|
|
||||||
this.dataTypeSize = null
|
|
||||||
this.dataTypeModifier = null
|
|
||||||
this.format = null
|
|
||||||
}
|
|
||||||
|
|
||||||
var FORMAT_TEXT = 'text'
|
|
||||||
var FORMAT_BINARY = 'binary'
|
|
||||||
Connection.prototype.parseField = function (buffer) {
|
|
||||||
var field = new Field()
|
|
||||||
field.name = this.parseCString(buffer)
|
|
||||||
field.tableID = this.parseInt32(buffer)
|
|
||||||
field.columnID = this.parseInt16(buffer)
|
|
||||||
field.dataTypeID = this.parseInt32(buffer)
|
|
||||||
field.dataTypeSize = this.parseInt16(buffer)
|
|
||||||
field.dataTypeModifier = this.parseInt32(buffer)
|
|
||||||
if (this.parseInt16(buffer) === TEXT_MODE) {
|
|
||||||
this._mode = TEXT_MODE
|
|
||||||
field.format = FORMAT_TEXT
|
|
||||||
} else {
|
|
||||||
this._mode = BINARY_MODE
|
|
||||||
field.format = FORMAT_BINARY
|
|
||||||
}
|
|
||||||
return field
|
|
||||||
}
|
|
||||||
|
|
||||||
var DATA_ROW = 'dataRow'
|
|
||||||
var DataRowMessage = function (length, fieldCount) {
|
|
||||||
this.name = DATA_ROW
|
|
||||||
this.length = length
|
|
||||||
this.fieldCount = fieldCount
|
|
||||||
this.fields = []
|
|
||||||
}
|
|
||||||
|
|
||||||
// extremely hot-path code
|
|
||||||
Connection.prototype.parseD = function (buffer, length) {
|
|
||||||
var fieldCount = this.parseInt16(buffer)
|
|
||||||
var msg = new DataRowMessage(length, fieldCount)
|
|
||||||
for (var i = 0; i < fieldCount; i++) {
|
|
||||||
msg.fields.push(this._readValue(buffer))
|
|
||||||
}
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
|
|
||||||
// extremely hot-path code
|
|
||||||
Connection.prototype._readValue = function (buffer) {
|
|
||||||
var length = this.parseInt32(buffer)
|
|
||||||
if (length === -1) return null
|
|
||||||
if (this._mode === TEXT_MODE) {
|
|
||||||
return this.readString(buffer, length)
|
|
||||||
}
|
|
||||||
return this.readBytes(buffer, length)
|
|
||||||
}
|
|
||||||
|
|
||||||
// parses error
|
|
||||||
Connection.prototype.parseE = function (buffer, length) {
|
|
||||||
var fields = {}
|
|
||||||
var msg, item
|
|
||||||
var input = new Message('error', length)
|
|
||||||
var fieldType = this.readString(buffer, 1)
|
|
||||||
while (fieldType !== '\0') {
|
|
||||||
fields[fieldType] = this.parseCString(buffer)
|
|
||||||
fieldType = this.readString(buffer, 1)
|
|
||||||
}
|
|
||||||
if (input.name === 'error') {
|
|
||||||
// the msg is an Error instance
|
|
||||||
msg = new Error(fields.M)
|
|
||||||
for (item in input) {
|
|
||||||
// copy input properties to the error
|
|
||||||
if (input.hasOwnProperty(item)) {
|
|
||||||
msg[item] = input[item]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// the msg is an object literal
|
|
||||||
msg = input
|
|
||||||
msg.message = fields.M
|
|
||||||
}
|
|
||||||
msg.severity = fields.S
|
|
||||||
msg.code = fields.C
|
|
||||||
msg.detail = fields.D
|
|
||||||
msg.hint = fields.H
|
|
||||||
msg.position = fields.P
|
|
||||||
msg.internalPosition = fields.p
|
|
||||||
msg.internalQuery = fields.q
|
|
||||||
msg.where = fields.W
|
|
||||||
msg.schema = fields.s
|
|
||||||
msg.table = fields.t
|
|
||||||
msg.column = fields.c
|
|
||||||
msg.dataType = fields.d
|
|
||||||
msg.constraint = fields.n
|
|
||||||
msg.file = fields.F
|
|
||||||
msg.line = fields.L
|
|
||||||
msg.routine = fields.R
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
|
|
||||||
// same thing, different name
|
|
||||||
Connection.prototype.parseN = function (buffer, length) {
|
|
||||||
var msg = this.parseE(buffer, length)
|
|
||||||
msg.name = 'notice'
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parseA = function (buffer, length) {
|
|
||||||
var msg = new Message('notification', length)
|
|
||||||
msg.processId = this.parseInt32(buffer)
|
|
||||||
msg.channel = this.parseCString(buffer)
|
|
||||||
msg.payload = this.parseCString(buffer)
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parseG = function (buffer, length) {
|
|
||||||
var msg = new Message('copyInResponse', length)
|
|
||||||
return this.parseGH(buffer, msg)
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parseH = function (buffer, length) {
|
|
||||||
var msg = new Message('copyOutResponse', length)
|
|
||||||
return this.parseGH(buffer, msg)
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parseGH = function (buffer, msg) {
|
|
||||||
var isBinary = buffer[this.offset] !== 0
|
|
||||||
this.offset++
|
|
||||||
msg.binary = isBinary
|
|
||||||
var columnCount = this.parseInt16(buffer)
|
|
||||||
msg.columnTypes = []
|
|
||||||
for (var i = 0; i < columnCount; i++) {
|
|
||||||
msg.columnTypes.push(this.parseInt16(buffer))
|
|
||||||
}
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parsed = function (buffer, length) {
|
|
||||||
var msg = new Message('copyData', length)
|
|
||||||
msg.chunk = this.readBytes(buffer, msg.length - 4)
|
|
||||||
return msg
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parseInt32 = function (buffer) {
|
|
||||||
var value = buffer.readInt32BE(this.offset, true)
|
|
||||||
this.offset += 4
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parseInt16 = function (buffer) {
|
|
||||||
var value = buffer.readInt16BE(this.offset, true)
|
|
||||||
this.offset += 2
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.readString = function (buffer, length) {
|
|
||||||
return buffer.toString(this.encoding, this.offset, (this.offset += length))
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.readBytes = function (buffer, length) {
|
|
||||||
return buffer.slice(this.offset, (this.offset += length))
|
|
||||||
}
|
|
||||||
|
|
||||||
Connection.prototype.parseCString = function (buffer) {
|
|
||||||
var start = this.offset
|
|
||||||
var end = buffer.indexOf(0, start)
|
|
||||||
this.offset = end + 1
|
|
||||||
return buffer.toString(this.encoding, start, end)
|
|
||||||
}
|
|
||||||
// end parsing methods
|
|
||||||
module.exports = Connection
|
|
||||||
57
lib/index.js
57
lib/index.js
@ -1,57 +0,0 @@
|
|||||||
'use strict'
|
|
||||||
/**
|
|
||||||
* Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com)
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* README.md file in the root directory of this source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
var util = require('util')
|
|
||||||
var Client = require('./client')
|
|
||||||
var defaults = require('./defaults')
|
|
||||||
var Connection = require('./connection')
|
|
||||||
var Pool = require('pg-pool')
|
|
||||||
|
|
||||||
const poolFactory = (Client) => {
|
|
||||||
var BoundPool = function (options) {
|
|
||||||
var config = Object.assign({ Client: Client }, options)
|
|
||||||
return new Pool(config)
|
|
||||||
}
|
|
||||||
|
|
||||||
util.inherits(BoundPool, Pool)
|
|
||||||
|
|
||||||
return BoundPool
|
|
||||||
}
|
|
||||||
|
|
||||||
var PG = function (clientConstructor) {
|
|
||||||
this.defaults = defaults
|
|
||||||
this.Client = clientConstructor
|
|
||||||
this.Query = this.Client.Query
|
|
||||||
this.Pool = poolFactory(this.Client)
|
|
||||||
this._pools = []
|
|
||||||
this.Connection = Connection
|
|
||||||
this.types = require('pg-types')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof process.env.NODE_PG_FORCE_NATIVE !== 'undefined') {
|
|
||||||
module.exports = new PG(require('./native'))
|
|
||||||
} else {
|
|
||||||
module.exports = new PG(Client)
|
|
||||||
|
|
||||||
// lazy require native module...the native module may not have installed
|
|
||||||
module.exports.__defineGetter__('native', function () {
|
|
||||||
delete module.exports.native
|
|
||||||
var native = null
|
|
||||||
try {
|
|
||||||
native = new PG(require('./native'))
|
|
||||||
} catch (err) {
|
|
||||||
if (err.code !== 'MODULE_NOT_FOUND') {
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
console.error(err.message)
|
|
||||||
}
|
|
||||||
module.exports.native = native
|
|
||||||
return native
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,226 +0,0 @@
|
|||||||
'use strict'
|
|
||||||
/**
|
|
||||||
* Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com)
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* README.md file in the root directory of this source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
var Native = require('pg-native')
|
|
||||||
var TypeOverrides = require('../type-overrides')
|
|
||||||
var semver = require('semver')
|
|
||||||
var pkg = require('../../package.json')
|
|
||||||
var assert = require('assert')
|
|
||||||
var EventEmitter = require('events').EventEmitter
|
|
||||||
var util = require('util')
|
|
||||||
var ConnectionParameters = require('../connection-parameters')
|
|
||||||
|
|
||||||
var msg = 'Version >= ' + pkg.minNativeVersion + ' of pg-native required.'
|
|
||||||
assert(semver.gte(Native.version, pkg.minNativeVersion), msg)
|
|
||||||
|
|
||||||
var NativeQuery = require('./query')
|
|
||||||
|
|
||||||
var Client = module.exports = function (config) {
|
|
||||||
EventEmitter.call(this)
|
|
||||||
config = config || {}
|
|
||||||
|
|
||||||
this._types = new TypeOverrides(config.types)
|
|
||||||
|
|
||||||
this.native = new Native({
|
|
||||||
types: this._types
|
|
||||||
})
|
|
||||||
|
|
||||||
this._queryQueue = []
|
|
||||||
this._connected = false
|
|
||||||
this._connecting = false
|
|
||||||
|
|
||||||
// keep these on the object for legacy reasons
|
|
||||||
// for the time being. TODO: deprecate all this jazz
|
|
||||||
var cp = this.connectionParameters = new ConnectionParameters(config)
|
|
||||||
this.user = cp.user
|
|
||||||
this.password = cp.password
|
|
||||||
this.database = cp.database
|
|
||||||
this.host = cp.host
|
|
||||||
this.port = cp.port
|
|
||||||
|
|
||||||
// a hash to hold named queries
|
|
||||||
this.namedQueries = {}
|
|
||||||
}
|
|
||||||
|
|
||||||
Client.Query = NativeQuery
|
|
||||||
|
|
||||||
util.inherits(Client, EventEmitter)
|
|
||||||
|
|
||||||
// connect to the backend
|
|
||||||
// pass an optional callback to be called once connected
|
|
||||||
// or with an error if there was a connection error
|
|
||||||
// if no callback is passed and there is a connection error
|
|
||||||
// the client will emit an error event.
|
|
||||||
Client.prototype.connect = function (cb) {
|
|
||||||
var self = this
|
|
||||||
|
|
||||||
var onError = function (err) {
|
|
||||||
if (cb) return cb(err)
|
|
||||||
return self.emit('error', err)
|
|
||||||
}
|
|
||||||
|
|
||||||
var result
|
|
||||||
if (!cb) {
|
|
||||||
var resolveOut, rejectOut
|
|
||||||
cb = (err) => err ? rejectOut(err) : resolveOut()
|
|
||||||
result = new global.Promise(function (resolve, reject) {
|
|
||||||
resolveOut = resolve
|
|
||||||
rejectOut = reject
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this._connecting) {
|
|
||||||
process.nextTick(() => cb(new Error('Client has already been connected. You cannot reuse a client.')))
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
this._connecting = true
|
|
||||||
|
|
||||||
this.connectionParameters.getLibpqConnectionString(function (err, conString) {
|
|
||||||
if (err) return onError(err)
|
|
||||||
self.native.connect(conString, function (err) {
|
|
||||||
if (err) return onError(err)
|
|
||||||
|
|
||||||
// set internal states to connected
|
|
||||||
self._connected = true
|
|
||||||
|
|
||||||
// handle connection errors from the native layer
|
|
||||||
self.native.on('error', function (err) {
|
|
||||||
// error will be handled by active query
|
|
||||||
if (self._activeQuery && self._activeQuery.state !== 'end') {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
self.emit('error', err)
|
|
||||||
})
|
|
||||||
|
|
||||||
self.native.on('notification', function (msg) {
|
|
||||||
self.emit('notification', {
|
|
||||||
channel: msg.relname,
|
|
||||||
payload: msg.extra
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// signal we are connected now
|
|
||||||
self.emit('connect')
|
|
||||||
self._pulseQueryQueue(true)
|
|
||||||
|
|
||||||
// possibly call the optional callback
|
|
||||||
if (cb) cb()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// send a query to the server
|
|
||||||
// this method is highly overloaded to take
|
|
||||||
// 1) string query, optional array of parameters, optional function callback
|
|
||||||
// 2) object query with {
|
|
||||||
// string query
|
|
||||||
// optional array values,
|
|
||||||
// optional function callback instead of as a separate parameter
|
|
||||||
// optional string name to name & cache the query plan
|
|
||||||
// optional string rowMode = 'array' for an array of results
|
|
||||||
// }
|
|
||||||
Client.prototype.query = function (config, values, callback) {
|
|
||||||
if (typeof config.submit === 'function') {
|
|
||||||
// accept query(new Query(...), (err, res) => { }) style
|
|
||||||
if (typeof values === 'function') {
|
|
||||||
config.callback = values
|
|
||||||
}
|
|
||||||
this._queryQueue.push(config)
|
|
||||||
this._pulseQueryQueue()
|
|
||||||
return config
|
|
||||||
}
|
|
||||||
|
|
||||||
var query = new NativeQuery(config, values, callback)
|
|
||||||
var result
|
|
||||||
if (!query.callback) {
|
|
||||||
let resolveOut, rejectOut
|
|
||||||
result = new Promise((resolve, reject) => {
|
|
||||||
resolveOut = resolve
|
|
||||||
rejectOut = reject
|
|
||||||
})
|
|
||||||
query.callback = (err, res) => err ? rejectOut(err) : resolveOut(res)
|
|
||||||
}
|
|
||||||
this._queryQueue.push(query)
|
|
||||||
this._pulseQueryQueue()
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// disconnect from the backend server
|
|
||||||
Client.prototype.end = function (cb) {
|
|
||||||
var self = this
|
|
||||||
if (!this._connected) {
|
|
||||||
this.once('connect', this.end.bind(this, cb))
|
|
||||||
}
|
|
||||||
var result
|
|
||||||
if (!cb) {
|
|
||||||
var resolve, reject
|
|
||||||
cb = (err) => err ? reject(err) : resolve()
|
|
||||||
result = new global.Promise(function (res, rej) {
|
|
||||||
resolve = res
|
|
||||||
reject = rej
|
|
||||||
})
|
|
||||||
}
|
|
||||||
this.native.end(function () {
|
|
||||||
// send an error to the active query
|
|
||||||
if (self._hasActiveQuery()) {
|
|
||||||
var msg = 'Connection terminated'
|
|
||||||
self._queryQueue.length = 0
|
|
||||||
self._activeQuery.handleError(new Error(msg))
|
|
||||||
}
|
|
||||||
self.emit('end')
|
|
||||||
if (cb) cb()
|
|
||||||
})
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
Client.prototype._hasActiveQuery = function () {
|
|
||||||
return this._activeQuery && this._activeQuery.state !== 'error' && this._activeQuery.state !== 'end'
|
|
||||||
}
|
|
||||||
|
|
||||||
Client.prototype._pulseQueryQueue = function (initialConnection) {
|
|
||||||
if (!this._connected) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (this._hasActiveQuery()) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
var query = this._queryQueue.shift()
|
|
||||||
if (!query) {
|
|
||||||
if (!initialConnection) {
|
|
||||||
this.emit('drain')
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
this._activeQuery = query
|
|
||||||
query.submit(this)
|
|
||||||
var self = this
|
|
||||||
query.once('_done', function () {
|
|
||||||
self._pulseQueryQueue()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// attempt to cancel an in-progress query
|
|
||||||
Client.prototype.cancel = function (query) {
|
|
||||||
if (this._activeQuery === query) {
|
|
||||||
this.native.cancel(function () {})
|
|
||||||
} else if (this._queryQueue.indexOf(query) !== -1) {
|
|
||||||
this._queryQueue.splice(this._queryQueue.indexOf(query), 1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Client.prototype.setTypeParser = function (oid, format, parseFn) {
|
|
||||||
return this._types.setTypeParser(oid, format, parseFn)
|
|
||||||
}
|
|
||||||
|
|
||||||
Client.prototype.getTypeParser = function (oid, format) {
|
|
||||||
return this._types.getTypeParser(oid, format)
|
|
||||||
}
|
|
||||||
232
lib/query.js
232
lib/query.js
@ -1,232 +0,0 @@
|
|||||||
'use strict'
|
|
||||||
/**
|
|
||||||
* Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com)
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* README.md file in the root directory of this source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
var EventEmitter = require('events').EventEmitter
|
|
||||||
var util = require('util')
|
|
||||||
|
|
||||||
var Result = require('./result')
|
|
||||||
var utils = require('./utils')
|
|
||||||
|
|
||||||
var Query = function (config, values, callback) {
|
|
||||||
// use of "new" optional
|
|
||||||
if (!(this instanceof Query)) { return new Query(config, values, callback) }
|
|
||||||
|
|
||||||
config = utils.normalizeQueryConfig(config, values, callback)
|
|
||||||
|
|
||||||
this.text = config.text
|
|
||||||
this.values = config.values
|
|
||||||
this.rows = config.rows
|
|
||||||
this.types = config.types
|
|
||||||
this.name = config.name
|
|
||||||
this.binary = config.binary
|
|
||||||
this.stream = config.stream
|
|
||||||
// use unique portal name each time
|
|
||||||
this.portal = config.portal || ''
|
|
||||||
this.callback = config.callback
|
|
||||||
this._rowMode = config.rowMode
|
|
||||||
if (process.domain && config.callback) {
|
|
||||||
this.callback = process.domain.bind(config.callback)
|
|
||||||
}
|
|
||||||
this._result = new Result(this._rowMode, this.types)
|
|
||||||
|
|
||||||
// potential for multiple results
|
|
||||||
this._results = this._result
|
|
||||||
this.isPreparedStatement = false
|
|
||||||
this._canceledDueToError = false
|
|
||||||
this._promise = null
|
|
||||||
EventEmitter.call(this)
|
|
||||||
}
|
|
||||||
|
|
||||||
util.inherits(Query, EventEmitter)
|
|
||||||
|
|
||||||
Query.prototype.requiresPreparation = function () {
|
|
||||||
// named queries must always be prepared
|
|
||||||
if (this.name) { return true }
|
|
||||||
// always prepare if there are max number of rows expected per
|
|
||||||
// portal execution
|
|
||||||
if (this.rows) { return true }
|
|
||||||
// don't prepare empty text queries
|
|
||||||
if (!this.text) { return false }
|
|
||||||
// prepare if there are values
|
|
||||||
if (!this.values) { return false }
|
|
||||||
return this.values.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
Query.prototype._checkForMultirow = function () {
|
|
||||||
// if we already have a result with a command property
|
|
||||||
// then we've already executed one query in a multi-statement simple query
|
|
||||||
// turn our results into an array of results
|
|
||||||
if (this._result.command) {
|
|
||||||
if (!Array.isArray(this._results)) {
|
|
||||||
this._results = [this._result]
|
|
||||||
}
|
|
||||||
this._result = new Result(this._rowMode, this.types)
|
|
||||||
this._results.push(this._result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// associates row metadata from the supplied
|
|
||||||
// message with this query object
|
|
||||||
// metadata used when parsing row results
|
|
||||||
Query.prototype.handleRowDescription = function (msg) {
|
|
||||||
this._checkForMultirow()
|
|
||||||
this._result.addFields(msg.fields)
|
|
||||||
this._accumulateRows = this.callback || !this.listeners('row').length
|
|
||||||
}
|
|
||||||
|
|
||||||
Query.prototype.handleDataRow = function (msg) {
|
|
||||||
var row
|
|
||||||
|
|
||||||
if (this._canceledDueToError) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
row = this._result.parseRow(msg.fields)
|
|
||||||
} catch (err) {
|
|
||||||
this._canceledDueToError = err
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
this.emit('row', row, this._result)
|
|
||||||
if (this._accumulateRows) {
|
|
||||||
this._result.addRow(row)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Query.prototype.handleCommandComplete = function (msg, con) {
|
|
||||||
this._checkForMultirow()
|
|
||||||
this._result.addCommandComplete(msg)
|
|
||||||
// need to sync after each command complete of a prepared statement
|
|
||||||
if (this.isPreparedStatement) {
|
|
||||||
con.sync()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// if a named prepared statement is created with empty query text
|
|
||||||
// the backend will send an emptyQuery message but *not* a command complete message
|
|
||||||
// execution on the connection will hang until the backend receives a sync message
|
|
||||||
Query.prototype.handleEmptyQuery = function (con) {
|
|
||||||
if (this.isPreparedStatement) {
|
|
||||||
con.sync()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Query.prototype.handleReadyForQuery = function (con) {
|
|
||||||
if (this._canceledDueToError) {
|
|
||||||
return this.handleError(this._canceledDueToError, con)
|
|
||||||
}
|
|
||||||
if (this.callback) {
|
|
||||||
this.callback(null, this._results)
|
|
||||||
}
|
|
||||||
this.emit('end', this._results)
|
|
||||||
}
|
|
||||||
|
|
||||||
Query.prototype.handleError = function (err, connection) {
|
|
||||||
// need to sync after error during a prepared statement
|
|
||||||
if (this.isPreparedStatement) {
|
|
||||||
connection.sync()
|
|
||||||
}
|
|
||||||
if (this._canceledDueToError) {
|
|
||||||
err = this._canceledDueToError
|
|
||||||
this._canceledDueToError = false
|
|
||||||
}
|
|
||||||
// if callback supplied do not emit error event as uncaught error
|
|
||||||
// events will bubble up to node process
|
|
||||||
if (this.callback) {
|
|
||||||
return this.callback(err)
|
|
||||||
}
|
|
||||||
this.emit('error', err)
|
|
||||||
}
|
|
||||||
|
|
||||||
Query.prototype.submit = function (connection) {
|
|
||||||
if (typeof this.text !== 'string' && typeof this.name !== 'string') {
|
|
||||||
const err = new Error('A query must have either text or a name. Supplying neither is unsupported.')
|
|
||||||
connection.emit('error', err)
|
|
||||||
connection.emit('readyForQuery')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (this.values && !Array.isArray(this.values)) {
|
|
||||||
const err = new Error('Query values must be an array')
|
|
||||||
connection.emit('error', err)
|
|
||||||
connection.emit('readyForQuery')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (this.requiresPreparation()) {
|
|
||||||
this.prepare(connection)
|
|
||||||
} else {
|
|
||||||
connection.query(this.text)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Query.prototype.hasBeenParsed = function (connection) {
|
|
||||||
return this.name && connection.parsedStatements[this.name]
|
|
||||||
}
|
|
||||||
|
|
||||||
Query.prototype.handlePortalSuspended = function (connection) {
|
|
||||||
this._getRows(connection, this.rows)
|
|
||||||
}
|
|
||||||
|
|
||||||
Query.prototype._getRows = function (connection, rows) {
|
|
||||||
connection.execute({
|
|
||||||
portal: this.portalName,
|
|
||||||
rows: rows
|
|
||||||
}, true)
|
|
||||||
connection.flush()
|
|
||||||
}
|
|
||||||
|
|
||||||
Query.prototype.prepare = function (connection) {
|
|
||||||
var self = this
|
|
||||||
// prepared statements need sync to be called after each command
|
|
||||||
// complete or when an error is encountered
|
|
||||||
this.isPreparedStatement = true
|
|
||||||
// TODO refactor this poor encapsulation
|
|
||||||
if (!this.hasBeenParsed(connection)) {
|
|
||||||
connection.parse({
|
|
||||||
text: self.text,
|
|
||||||
name: self.name,
|
|
||||||
types: self.types
|
|
||||||
}, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (self.values) {
|
|
||||||
self.values = self.values.map(utils.prepareValue)
|
|
||||||
}
|
|
||||||
|
|
||||||
// http://developer.postgresql.org/pgdocs/postgres/protocol-flow.html#PROTOCOL-FLOW-EXT-QUERY
|
|
||||||
connection.bind({
|
|
||||||
portal: self.portalName,
|
|
||||||
statement: self.name,
|
|
||||||
values: self.values,
|
|
||||||
binary: self.binary
|
|
||||||
}, true)
|
|
||||||
|
|
||||||
connection.describe({
|
|
||||||
type: 'P',
|
|
||||||
name: self.portalName || ''
|
|
||||||
}, true)
|
|
||||||
|
|
||||||
this._getRows(connection, this.rows)
|
|
||||||
}
|
|
||||||
|
|
||||||
Query.prototype.handleCopyInResponse = function (connection) {
|
|
||||||
if (this.stream) this.stream.startStreamingToConnection(connection)
|
|
||||||
else connection.sendCopyFail('No source stream defined')
|
|
||||||
}
|
|
||||||
|
|
||||||
Query.prototype.handleCopyData = function (msg, connection) {
|
|
||||||
var chunk = msg.chunk
|
|
||||||
if (this.stream) {
|
|
||||||
this.stream.handleChunk(chunk)
|
|
||||||
}
|
|
||||||
// if there are no stream (for example when copy to query was sent by
|
|
||||||
// query method instead of copyTo) error will be handled
|
|
||||||
// on copyOutResponse event, so silently ignore this error here
|
|
||||||
}
|
|
||||||
module.exports = Query
|
|
||||||
117
lib/result.js
117
lib/result.js
@ -1,117 +0,0 @@
|
|||||||
'use strict'
|
|
||||||
/**
|
|
||||||
* Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com)
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* README.md file in the root directory of this source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
var types = require('pg-types')
|
|
||||||
var escape = require('js-string-escape')
|
|
||||||
|
|
||||||
// result object returned from query
|
|
||||||
// in the 'end' event and also
|
|
||||||
// passed as second argument to provided callback
|
|
||||||
var Result = function (rowMode) {
|
|
||||||
this.command = null
|
|
||||||
this.rowCount = null
|
|
||||||
this.oid = null
|
|
||||||
this.rows = []
|
|
||||||
this.fields = []
|
|
||||||
this._parsers = []
|
|
||||||
this.RowCtor = null
|
|
||||||
this.rowAsArray = rowMode === 'array'
|
|
||||||
if (this.rowAsArray) {
|
|
||||||
this.parseRow = this._parseRowAsArray
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var matchRegexp = /([A-Za-z]+) ?(\d+ )?(\d+)?/
|
|
||||||
|
|
||||||
// adds a command complete message
|
|
||||||
Result.prototype.addCommandComplete = function (msg) {
|
|
||||||
var match
|
|
||||||
if (msg.text) {
|
|
||||||
// pure javascript
|
|
||||||
match = matchRegexp.exec(msg.text)
|
|
||||||
} else {
|
|
||||||
// native bindings
|
|
||||||
match = matchRegexp.exec(msg.command)
|
|
||||||
}
|
|
||||||
if (match) {
|
|
||||||
this.command = match[1]
|
|
||||||
// match 3 will only be existing on insert commands
|
|
||||||
if (match[3]) {
|
|
||||||
// msg.value is from native bindings
|
|
||||||
this.rowCount = parseInt(match[3] || msg.value, 10)
|
|
||||||
this.oid = parseInt(match[2], 10)
|
|
||||||
} else {
|
|
||||||
this.rowCount = parseInt(match[2], 10)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Result.prototype._parseRowAsArray = function (rowData) {
|
|
||||||
var row = []
|
|
||||||
for (var i = 0, len = rowData.length; i < len; i++) {
|
|
||||||
var rawValue = rowData[i]
|
|
||||||
if (rawValue !== null) {
|
|
||||||
row.push(this._parsers[i](rawValue))
|
|
||||||
} else {
|
|
||||||
row.push(null)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return row
|
|
||||||
}
|
|
||||||
|
|
||||||
// rowData is an array of text or binary values
|
|
||||||
// this turns the row into a JavaScript object
|
|
||||||
Result.prototype.parseRow = function (rowData) {
|
|
||||||
return new this.RowCtor(this._parsers, rowData)
|
|
||||||
}
|
|
||||||
|
|
||||||
Result.prototype.addRow = function (row) {
|
|
||||||
this.rows.push(row)
|
|
||||||
}
|
|
||||||
|
|
||||||
var inlineParser = function (fieldName, i) {
|
|
||||||
return "\nthis['" +
|
|
||||||
// fields containing single quotes will break
|
|
||||||
// the evaluated javascript unless they are escaped
|
|
||||||
// see https://github.com/brianc/node-postgres/issues/507
|
|
||||||
// Addendum: However, we need to make sure to replace all
|
|
||||||
// occurences of apostrophes, not just the first one.
|
|
||||||
// See https://github.com/brianc/node-postgres/issues/934
|
|
||||||
escape(fieldName) +
|
|
||||||
"'] = " +
|
|
||||||
'rowData[' + i + '] == null ? null : parsers[' + i + '](rowData[' + i + ']);'
|
|
||||||
}
|
|
||||||
|
|
||||||
Result.prototype.addFields = function (fieldDescriptions) {
|
|
||||||
// clears field definitions
|
|
||||||
// multiple query statements in 1 action can result in multiple sets
|
|
||||||
// of rowDescriptions...eg: 'select NOW(); select 1::int;'
|
|
||||||
// you need to reset the fields
|
|
||||||
if (this.fields.length) {
|
|
||||||
this.fields = []
|
|
||||||
this._parsers = []
|
|
||||||
}
|
|
||||||
var ctorBody = ''
|
|
||||||
for (var i = 0; i < fieldDescriptions.length; i++) {
|
|
||||||
var desc = fieldDescriptions[i]
|
|
||||||
this.fields.push(desc)
|
|
||||||
var parser = this._getTypeParser(desc.dataTypeID, desc.format || 'text')
|
|
||||||
this._parsers.push(parser)
|
|
||||||
// this is some craziness to compile the row result parsing
|
|
||||||
// results in ~60% speedup on large query result sets
|
|
||||||
ctorBody += inlineParser(desc.name, i)
|
|
||||||
}
|
|
||||||
if (!this.rowAsArray) {
|
|
||||||
this.RowCtor = Function('parsers', 'rowData', ctorBody)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Result.prototype._getTypeParser = types.getTypeParser
|
|
||||||
|
|
||||||
module.exports = Result
|
|
||||||
149
lib/utils.js
149
lib/utils.js
@ -1,149 +0,0 @@
|
|||||||
'use strict'
|
|
||||||
/**
|
|
||||||
* Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com)
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* README.md file in the root directory of this source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const crypto = require('crypto')
|
|
||||||
|
|
||||||
const defaults = require('./defaults')
|
|
||||||
|
|
||||||
function escapeElement (elementRepresentation) {
|
|
||||||
var escaped = elementRepresentation
|
|
||||||
.replace(/\\/g, '\\\\')
|
|
||||||
.replace(/"/g, '\\"')
|
|
||||||
|
|
||||||
return '"' + escaped + '"'
|
|
||||||
}
|
|
||||||
|
|
||||||
// convert a JS array to a postgres array literal
|
|
||||||
// uses comma separator so won't work for types like box that use
|
|
||||||
// a different array separator.
|
|
||||||
function arrayString (val) {
|
|
||||||
var result = '{'
|
|
||||||
for (var i = 0; i < val.length; i++) {
|
|
||||||
if (i > 0) {
|
|
||||||
result = result + ','
|
|
||||||
}
|
|
||||||
if (val[i] === null || typeof val[i] === 'undefined') {
|
|
||||||
result = result + 'NULL'
|
|
||||||
} else if (Array.isArray(val[i])) {
|
|
||||||
result = result + arrayString(val[i])
|
|
||||||
} else if (val[i] instanceof Buffer) {
|
|
||||||
result += '\\\\x' + val[i].toString('hex')
|
|
||||||
} else {
|
|
||||||
result += escapeElement(prepareValue(val[i]))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
result = result + '}'
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// converts values from javascript types
|
|
||||||
// to their 'raw' counterparts for use as a postgres parameter
|
|
||||||
// note: you can override this function to provide your own conversion mechanism
|
|
||||||
// for complex types, etc...
|
|
||||||
var prepareValue = function (val, seen) {
|
|
||||||
if (val instanceof Buffer) {
|
|
||||||
return val
|
|
||||||
}
|
|
||||||
if (val instanceof Date) {
|
|
||||||
if (defaults.parseInputDatesAsUTC) {
|
|
||||||
return dateToStringUTC(val)
|
|
||||||
} else {
|
|
||||||
return dateToString(val)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (Array.isArray(val)) {
|
|
||||||
return arrayString(val)
|
|
||||||
}
|
|
||||||
if (val === null || typeof val === 'undefined') {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
if (typeof val === 'object') {
|
|
||||||
return prepareObject(val, seen)
|
|
||||||
}
|
|
||||||
return val.toString()
|
|
||||||
}
|
|
||||||
|
|
||||||
function prepareObject (val, seen) {
|
|
||||||
if (val && typeof val.toPostgres === 'function') {
|
|
||||||
seen = seen || []
|
|
||||||
if (seen.indexOf(val) !== -1) {
|
|
||||||
throw new Error('circular reference detected while preparing "' + val + '" for query')
|
|
||||||
}
|
|
||||||
seen.push(val)
|
|
||||||
|
|
||||||
return prepareValue(val.toPostgres(prepareValue), seen)
|
|
||||||
}
|
|
||||||
return JSON.stringify(val)
|
|
||||||
}
|
|
||||||
|
|
||||||
function pad (number, digits) {
|
|
||||||
number = '' + number
|
|
||||||
while (number.length < digits) { number = '0' + number }
|
|
||||||
return number
|
|
||||||
}
|
|
||||||
|
|
||||||
function dateToString (date) {
|
|
||||||
var offset = -date.getTimezoneOffset()
|
|
||||||
var ret = pad(date.getFullYear(), 4) + '-' +
|
|
||||||
pad(date.getMonth() + 1, 2) + '-' +
|
|
||||||
pad(date.getDate(), 2) + 'T' +
|
|
||||||
pad(date.getHours(), 2) + ':' +
|
|
||||||
pad(date.getMinutes(), 2) + ':' +
|
|
||||||
pad(date.getSeconds(), 2) + '.' +
|
|
||||||
pad(date.getMilliseconds(), 3)
|
|
||||||
|
|
||||||
if (offset < 0) {
|
|
||||||
ret += '-'
|
|
||||||
offset *= -1
|
|
||||||
} else { ret += '+' }
|
|
||||||
|
|
||||||
return ret + pad(Math.floor(offset / 60), 2) + ':' + pad(offset % 60, 2)
|
|
||||||
}
|
|
||||||
|
|
||||||
function dateToStringUTC (date) {
|
|
||||||
var ret = pad(date.getUTCFullYear(), 4) + '-' +
|
|
||||||
pad(date.getUTCMonth() + 1, 2) + '-' +
|
|
||||||
pad(date.getUTCDate(), 2) + 'T' +
|
|
||||||
pad(date.getUTCHours(), 2) + ':' +
|
|
||||||
pad(date.getUTCMinutes(), 2) + ':' +
|
|
||||||
pad(date.getUTCSeconds(), 2) + '.' +
|
|
||||||
pad(date.getUTCMilliseconds(), 3)
|
|
||||||
|
|
||||||
return ret + '+00:00'
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeQueryConfig (config, values, callback) {
|
|
||||||
// can take in strings or config objects
|
|
||||||
config = (typeof (config) === 'string') ? { text: config } : config
|
|
||||||
if (values) {
|
|
||||||
if (typeof values === 'function') {
|
|
||||||
config.callback = values
|
|
||||||
} else {
|
|
||||||
config.values = values
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (callback) {
|
|
||||||
config.callback = callback
|
|
||||||
}
|
|
||||||
return config
|
|
||||||
}
|
|
||||||
|
|
||||||
const md5 = function (string) {
|
|
||||||
return crypto.createHash('md5').update(string, 'utf-8').digest('hex')
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
prepareValue: function prepareValueWrapper (value) {
|
|
||||||
// this ensures that extra arguments do not get passed into prepareValue
|
|
||||||
// by accident, eg: from calling values.map(utils.prepareValue)
|
|
||||||
return prepareValue(value)
|
|
||||||
},
|
|
||||||
normalizeQueryConfig: normalizeQueryConfig,
|
|
||||||
md5: md5
|
|
||||||
}
|
|
||||||
75
package.json
75
package.json
@ -1,49 +1,40 @@
|
|||||||
{
|
{
|
||||||
"name": "pg",
|
"name": "node-postgres",
|
||||||
"version": "7.2.0",
|
"description": "node postgres monorepo",
|
||||||
"description": "PostgreSQL client - pure javascript & libpq with the same API",
|
"main": "index.js",
|
||||||
"keywords": [
|
"private": true,
|
||||||
"postgres",
|
"repository": "git@github.com:brianc/node-postgres.git",
|
||||||
"pg",
|
"author": "Brian M. Carlson <brian.m.carlson@gmail.com>",
|
||||||
"libpq",
|
"license": "MIT",
|
||||||
"postgre",
|
"workspaces": [
|
||||||
"database",
|
"packages/*"
|
||||||
"rdbms"
|
|
||||||
],
|
],
|
||||||
"homepage": "http://github.com/brianc/node-postgres",
|
"scripts": {
|
||||||
"repository": {
|
"test": "yarn lerna exec --concurrency 1 yarn test",
|
||||||
"type": "git",
|
"build": "tsc --build",
|
||||||
"url": "git://github.com/brianc/node-postgres.git"
|
"build:watch": "tsc --build --watch",
|
||||||
},
|
"docs:build": "cd docs && yarn build",
|
||||||
"author": "Brian Carlson <brian.m.carlson@gmail.com>",
|
"docs:start": "cd docs && yarn start",
|
||||||
"main": "./lib",
|
"pretest": "yarn build",
|
||||||
"dependencies": {
|
"prepublish": "yarn build",
|
||||||
"buffer-writer": "1.0.1",
|
"lint": "eslint --cache 'packages/**/*.{js,ts,tsx}'"
|
||||||
"packet-reader": "0.3.1",
|
|
||||||
"js-string-escape": "1.0.1",
|
|
||||||
"pg-connection-string": "0.1.3",
|
|
||||||
"pg-pool": "~2.0.3",
|
|
||||||
"pg-types": "~1.12.1",
|
|
||||||
"pgpass": "1.x",
|
|
||||||
"semver": "4.3.2"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"async": "0.9.0",
|
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
||||||
"co": "4.6.0",
|
"@typescript-eslint/parser": "^6.17.0",
|
||||||
"eslint": "4.2.0",
|
"eslint": "^8.56.0",
|
||||||
"eslint-config-standard": "10.2.1",
|
"eslint-config-prettier": "^10.1.2",
|
||||||
"eslint-plugin-import": "2.7.0",
|
"eslint-plugin-node": "^11.1.0",
|
||||||
"eslint-plugin-node": "5.1.0",
|
"eslint-plugin-prettier": "^5.1.2",
|
||||||
"eslint-plugin-promise": "3.5.0",
|
"lerna": "^3.19.0",
|
||||||
"eslint-plugin-standard": "3.0.1",
|
"prettier": "3.0.3",
|
||||||
"pg-copy-streams": "0.3.0"
|
"typescript": "^4.0.3"
|
||||||
},
|
},
|
||||||
"minNativeVersion": "2.0.0",
|
"prettier": {
|
||||||
"scripts": {
|
"semi": false,
|
||||||
"test": "make test-all"
|
"printWidth": 120,
|
||||||
},
|
"arrowParens": "always",
|
||||||
"license": "MIT",
|
"trailingComma": "es5",
|
||||||
"engines": {
|
"singleQuote": true
|
||||||
"node": ">= 4.5.0"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
8
packages/pg-bundler-test/esbuild-cloudflare.config.mjs
Normal file
8
packages/pg-bundler-test/esbuild-cloudflare.config.mjs
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
import * as esbuild from 'esbuild'
|
||||||
|
|
||||||
|
await esbuild.build({
|
||||||
|
entryPoints: ['./src/index.mjs'],
|
||||||
|
bundle: true,
|
||||||
|
outfile: './dist/esbuild-cloudflare.js',
|
||||||
|
conditions: ['import', 'workerd'],
|
||||||
|
})
|
||||||
7
packages/pg-bundler-test/esbuild-empty.config.mjs
Normal file
7
packages/pg-bundler-test/esbuild-empty.config.mjs
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import * as esbuild from 'esbuild'
|
||||||
|
|
||||||
|
await esbuild.build({
|
||||||
|
entryPoints: ['./src/index.mjs'],
|
||||||
|
bundle: true,
|
||||||
|
outfile: './dist/esbuild-empty.js',
|
||||||
|
})
|
||||||
25
packages/pg-bundler-test/package.json
Normal file
25
packages/pg-bundler-test/package.json
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"name": "pg-bundler-test",
|
||||||
|
"version": "0.0.2",
|
||||||
|
"description": "Test bundlers with pg-cloudflare, https://github.com/brianc/node-postgres/issues/3452",
|
||||||
|
"license": "MIT",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-commonjs": "^28.0.3",
|
||||||
|
"@rollup/plugin-node-resolve": "^16.0.1",
|
||||||
|
"esbuild": "^0.25.5",
|
||||||
|
"pg-cloudflare": "^1.2.7",
|
||||||
|
"rollup": "^4.41.1",
|
||||||
|
"vite": "^6.3.5",
|
||||||
|
"webpack": "^5.99.9",
|
||||||
|
"webpack-cli": "^6.0.1"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "yarn webpack && yarn rollup && yarn vite && yarn esbuild",
|
||||||
|
"webpack": "webpack --config webpack-empty.config.mjs && webpack --config webpack-cloudflare.config.mjs",
|
||||||
|
"rollup": "rollup --config rollup-empty.config.mjs --failAfterWarnings && rollup --config rollup-cloudflare.config.mjs --failAfterWarnings",
|
||||||
|
"vite": "[ $(node --version | sed 's/v//' | cut -d'.' -f1) -ge 18 ] && vite build --config vite-empty.config.mjs && vite build --config vite-cloudflare.config.mjs || echo 'Skip Vite test'",
|
||||||
|
"esbuild": "node esbuild-empty.config.mjs && node esbuild-cloudflare.config.mjs"
|
||||||
|
}
|
||||||
|
}
|
||||||
13
packages/pg-bundler-test/rollup-cloudflare.config.mjs
Normal file
13
packages/pg-bundler-test/rollup-cloudflare.config.mjs
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
import { defineConfig } from 'rollup'
|
||||||
|
import { nodeResolve } from '@rollup/plugin-node-resolve'
|
||||||
|
import commonjs from '@rollup/plugin-commonjs'
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
input: './src/index.mjs',
|
||||||
|
output: {
|
||||||
|
file: 'dist/rollup-cloudflare.js',
|
||||||
|
format: 'es',
|
||||||
|
},
|
||||||
|
plugins: [nodeResolve({ exportConditions: ['import', 'workerd'], preferBuiltins: true }), commonjs()],
|
||||||
|
external: ['cloudflare:sockets'],
|
||||||
|
})
|
||||||
12
packages/pg-bundler-test/rollup-empty.config.mjs
Normal file
12
packages/pg-bundler-test/rollup-empty.config.mjs
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { defineConfig } from 'rollup'
|
||||||
|
import { nodeResolve } from '@rollup/plugin-node-resolve'
|
||||||
|
import commonjs from '@rollup/plugin-commonjs'
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
input: './src/index.mjs',
|
||||||
|
output: {
|
||||||
|
file: 'dist/rollup-empty.js',
|
||||||
|
format: 'es',
|
||||||
|
},
|
||||||
|
plugins: [nodeResolve(), commonjs()],
|
||||||
|
})
|
||||||
1
packages/pg-bundler-test/src/index.mjs
Normal file
1
packages/pg-bundler-test/src/index.mjs
Normal file
@ -0,0 +1 @@
|
|||||||
|
import 'pg-cloudflare'
|
||||||
20
packages/pg-bundler-test/vite-cloudflare.config.mjs
Normal file
20
packages/pg-bundler-test/vite-cloudflare.config.mjs
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
import { defineConfig } from 'vite'
|
||||||
|
import commonjs from '@rollup/plugin-commonjs'
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
build: {
|
||||||
|
emptyOutDir: false,
|
||||||
|
lib: {
|
||||||
|
entry: './src/index.mjs',
|
||||||
|
fileName: 'vite-cloudflare',
|
||||||
|
formats: ['es'],
|
||||||
|
},
|
||||||
|
rollupOptions: {
|
||||||
|
external: ['cloudflare:sockets'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
resolve: {
|
||||||
|
conditions: ['import', 'workerd'],
|
||||||
|
},
|
||||||
|
plugins: [commonjs()],
|
||||||
|
})
|
||||||
12
packages/pg-bundler-test/vite-empty.config.mjs
Normal file
12
packages/pg-bundler-test/vite-empty.config.mjs
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { defineConfig } from 'vite'
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
build: {
|
||||||
|
emptyOutDir: false,
|
||||||
|
lib: {
|
||||||
|
entry: './src/index.mjs',
|
||||||
|
fileName: 'vite-empty',
|
||||||
|
formats: ['es'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
16
packages/pg-bundler-test/webpack-cloudflare.config.mjs
Normal file
16
packages/pg-bundler-test/webpack-cloudflare.config.mjs
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import webpack from 'webpack'
|
||||||
|
|
||||||
|
export default {
|
||||||
|
mode: 'production',
|
||||||
|
entry: './src/index.mjs',
|
||||||
|
output: {
|
||||||
|
filename: 'webpack-cloudflare.js',
|
||||||
|
},
|
||||||
|
resolve: { conditionNames: ['import', 'workerd'] },
|
||||||
|
plugins: [
|
||||||
|
// ignore cloudflare:sockets imports
|
||||||
|
new webpack.IgnorePlugin({
|
||||||
|
resourceRegExp: /^cloudflare:sockets$/,
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
}
|
||||||
7
packages/pg-bundler-test/webpack-empty.config.mjs
Normal file
7
packages/pg-bundler-test/webpack-empty.config.mjs
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
export default {
|
||||||
|
mode: 'production',
|
||||||
|
entry: './src/index.mjs',
|
||||||
|
output: {
|
||||||
|
filename: 'webpack-empty.js',
|
||||||
|
},
|
||||||
|
}
|
||||||
112
packages/pg-cloudflare/README.md
Normal file
112
packages/pg-cloudflare/README.md
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
# pg-cloudflare
|
||||||
|
|
||||||
|
`pg-cloudflare` makes it easier to take an existing package that relies on `tls` and `net`, and make it work in environments where only `connect()` is supported, such as Cloudflare Workers.
|
||||||
|
|
||||||
|
`pg-cloudflare` wraps `connect()`, the [TCP Socket API](https://github.com/wintercg/proposal-sockets-api) proposed within WinterCG, and implemented in [Cloudflare Workers](https://developers.cloudflare.com/workers/runtime-apis/tcp-sockets/), and exposes an interface with methods similar to what the `net` and `tls` modules in Node.js expose. (ex: `net.connect(path[, options][, callback])`). This minimizes the number of changes needed in order to make an existing package work across JavaScript runtimes.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```
|
||||||
|
npm i --save-dev pg-cloudflare
|
||||||
|
```
|
||||||
|
|
||||||
|
The package uses conditional exports to support bundlers that don't know about
|
||||||
|
`cloudflare:sockets`, so the consumer code by default imports an empty file. To
|
||||||
|
enable the package, resolve to the `cloudflare` condition in your bundler's
|
||||||
|
config. For example:
|
||||||
|
|
||||||
|
- `webpack.config.js`
|
||||||
|
```js
|
||||||
|
export default {
|
||||||
|
...,
|
||||||
|
resolve: { conditionNames: [..., "workerd"] },
|
||||||
|
plugins: [
|
||||||
|
// ignore cloudflare:sockets imports
|
||||||
|
new webpack.IgnorePlugin({
|
||||||
|
resourceRegExp: /^cloudflare:sockets$/,
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- `vite.config.js`
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> If you are using the [Cloudflare Vite plugin](https://www.npmjs.com/package/@cloudflare/vite-plugin) then the following configuration is not necessary.
|
||||||
|
|
||||||
|
```js
|
||||||
|
export default defineConfig({
|
||||||
|
...,
|
||||||
|
resolve: {
|
||||||
|
conditions: [..., "workerd"],
|
||||||
|
},
|
||||||
|
build: {
|
||||||
|
...,
|
||||||
|
// don't try to bundle cloudflare:sockets
|
||||||
|
rollupOptions: {
|
||||||
|
external: [..., 'cloudflare:sockets'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
- `rollup.config.js`
|
||||||
|
```js
|
||||||
|
export default defineConfig({
|
||||||
|
...,
|
||||||
|
plugins: [..., nodeResolve({ exportConditions: [..., 'workerd'] })],
|
||||||
|
// don't try to bundle cloudflare:sockets
|
||||||
|
external: [..., 'cloudflare:sockets'],
|
||||||
|
})
|
||||||
|
```
|
||||||
|
- `esbuild.config.js`
|
||||||
|
```js
|
||||||
|
await esbuild.build({
|
||||||
|
...,
|
||||||
|
conditions: [..., 'workerd'],
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
The concrete examples can be found in `packages/pg-bundler-test`.
|
||||||
|
|
||||||
|
## How to use conditionally, in non-Node.js environments
|
||||||
|
|
||||||
|
As implemented in `pg` [here](https://github.com/brianc/node-postgres/commit/07553428e9c0eacf761a5d4541a3300ff7859578#diff-34588ad868ebcb232660aba7ee6a99d1e02f4bc93f73497d2688c3f074e60533R5-R13), a typical use case might look as follows, where in a Node.js environment the `net` module is used, while in a non-Node.js environment, where `net` is unavailable, `pg-cloudflare` is used instead, providing an equivalent interface:
|
||||||
|
|
||||||
|
```js
|
||||||
|
module.exports.getStream = function getStream(ssl = false) {
|
||||||
|
const net = require('net')
|
||||||
|
if (typeof net.Socket === 'function') {
|
||||||
|
return net.Socket()
|
||||||
|
}
|
||||||
|
const { CloudflareSocket } = require('pg-cloudflare')
|
||||||
|
return new CloudflareSocket(ssl)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Node.js implementation of the Socket API proposal
|
||||||
|
|
||||||
|
If you're looking for a way to rely on `connect()` as the interface you use to interact with raw sockets, but need this interface to be available in a Node.js environment, [`@arrowood.dev/socket`](https://github.com/Ethan-Arrowood/socket) provides a Node.js implementation of the Socket API.
|
||||||
|
|
||||||
|
### license
|
||||||
|
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2023 Brian M. Carlson
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
3
packages/pg-cloudflare/esm/index.mjs
Normal file
3
packages/pg-cloudflare/esm/index.mjs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
import cf from '../dist/index.js'
|
||||||
|
|
||||||
|
export const CloudflareSocket = cf.CloudflareSocket
|
||||||
38
packages/pg-cloudflare/package.json
Normal file
38
packages/pg-cloudflare/package.json
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
{
|
||||||
|
"name": "pg-cloudflare",
|
||||||
|
"version": "1.2.7",
|
||||||
|
"description": "A socket implementation that can run on Cloudflare Workers using native TCP connections.",
|
||||||
|
"main": "dist/index.js",
|
||||||
|
"types": "dist/index.d.ts",
|
||||||
|
"license": "MIT",
|
||||||
|
"devDependencies": {
|
||||||
|
"ts-node": "^8.5.4",
|
||||||
|
"typescript": "^4.0.3"
|
||||||
|
},
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"workerd": {
|
||||||
|
"import": "./esm/index.mjs",
|
||||||
|
"require": "./dist/index.js"
|
||||||
|
},
|
||||||
|
"default": "./dist/empty.js"
|
||||||
|
},
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"build:watch": "tsc --watch",
|
||||||
|
"prepublish": "yarn build",
|
||||||
|
"test": "echo e2e test in pg package"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/brianc/node-postgres.git",
|
||||||
|
"directory": "packages/pg-cloudflare"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"/dist/*{js,ts,map}",
|
||||||
|
"/src",
|
||||||
|
"/esm"
|
||||||
|
]
|
||||||
|
}
|
||||||
3
packages/pg-cloudflare/src/empty.ts
Normal file
3
packages/pg-cloudflare/src/empty.ts
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
// This is an empty module that is served up when outside of a workerd environment
|
||||||
|
// See the `exports` field in package.json
|
||||||
|
export default {}
|
||||||
166
packages/pg-cloudflare/src/index.ts
Normal file
166
packages/pg-cloudflare/src/index.ts
Normal file
@ -0,0 +1,166 @@
|
|||||||
|
import { SocketOptions, Socket, TlsOptions } from 'cloudflare:sockets'
|
||||||
|
import { EventEmitter } from 'events'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wrapper around the Cloudflare built-in socket that can be used by the `Connection`.
|
||||||
|
*/
|
||||||
|
export class CloudflareSocket extends EventEmitter {
|
||||||
|
writable = false
|
||||||
|
destroyed = false
|
||||||
|
|
||||||
|
private _upgrading = false
|
||||||
|
private _upgraded = false
|
||||||
|
private _cfSocket: Socket | null = null
|
||||||
|
private _cfWriter: WritableStreamDefaultWriter | null = null
|
||||||
|
private _cfReader: ReadableStreamDefaultReader | null = null
|
||||||
|
|
||||||
|
constructor(readonly ssl: boolean) {
|
||||||
|
super()
|
||||||
|
}
|
||||||
|
|
||||||
|
setNoDelay() {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
setKeepAlive() {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
ref() {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
unref() {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
async connect(port: number, host: string, connectListener?: (...args: unknown[]) => void) {
|
||||||
|
try {
|
||||||
|
log('connecting')
|
||||||
|
if (connectListener) this.once('connect', connectListener)
|
||||||
|
|
||||||
|
const options: SocketOptions = this.ssl ? { secureTransport: 'starttls' } : {}
|
||||||
|
const mod = await import('cloudflare:sockets')
|
||||||
|
const connect = mod.connect
|
||||||
|
this._cfSocket = connect(`${host}:${port}`, options)
|
||||||
|
this._cfWriter = this._cfSocket.writable.getWriter()
|
||||||
|
this._addClosedHandler()
|
||||||
|
|
||||||
|
this._cfReader = this._cfSocket.readable.getReader()
|
||||||
|
if (this.ssl) {
|
||||||
|
this._listenOnce().catch((e) => this.emit('error', e))
|
||||||
|
} else {
|
||||||
|
this._listen().catch((e) => this.emit('error', e))
|
||||||
|
}
|
||||||
|
|
||||||
|
await this._cfWriter!.ready
|
||||||
|
log('socket ready')
|
||||||
|
this.writable = true
|
||||||
|
this.emit('connect')
|
||||||
|
|
||||||
|
return this
|
||||||
|
} catch (e) {
|
||||||
|
this.emit('error', e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async _listen() {
|
||||||
|
// eslint-disable-next-line no-constant-condition
|
||||||
|
while (true) {
|
||||||
|
log('awaiting receive from CF socket')
|
||||||
|
const { done, value } = await this._cfReader!.read()
|
||||||
|
log('CF socket received:', done, value)
|
||||||
|
if (done) {
|
||||||
|
log('done')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
this.emit('data', Buffer.from(value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async _listenOnce() {
|
||||||
|
log('awaiting first receive from CF socket')
|
||||||
|
const { done, value } = await this._cfReader!.read()
|
||||||
|
log('First CF socket received:', done, value)
|
||||||
|
this.emit('data', Buffer.from(value))
|
||||||
|
}
|
||||||
|
|
||||||
|
write(
|
||||||
|
data: Uint8Array | string,
|
||||||
|
encoding: BufferEncoding = 'utf8',
|
||||||
|
callback: (...args: unknown[]) => void = () => {}
|
||||||
|
) {
|
||||||
|
if (data.length === 0) return callback()
|
||||||
|
if (typeof data === 'string') data = Buffer.from(data, encoding)
|
||||||
|
|
||||||
|
log('sending data direct:', data)
|
||||||
|
this._cfWriter!.write(data).then(
|
||||||
|
() => {
|
||||||
|
log('data sent')
|
||||||
|
callback()
|
||||||
|
},
|
||||||
|
(err) => {
|
||||||
|
log('send error', err)
|
||||||
|
callback(err)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
end(data = Buffer.alloc(0), encoding: BufferEncoding = 'utf8', callback: (...args: unknown[]) => void = () => {}) {
|
||||||
|
log('ending CF socket')
|
||||||
|
this.write(data, encoding, (err) => {
|
||||||
|
this._cfSocket!.close()
|
||||||
|
if (callback) callback(err)
|
||||||
|
})
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
destroy(reason: string) {
|
||||||
|
log('destroying CF socket', reason)
|
||||||
|
this.destroyed = true
|
||||||
|
return this.end()
|
||||||
|
}
|
||||||
|
|
||||||
|
startTls(options: TlsOptions) {
|
||||||
|
if (this._upgraded) {
|
||||||
|
// Don't try to upgrade again.
|
||||||
|
this.emit('error', 'Cannot call `startTls()` more than once on a socket')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
this._cfWriter!.releaseLock()
|
||||||
|
this._cfReader!.releaseLock()
|
||||||
|
this._upgrading = true
|
||||||
|
this._cfSocket = this._cfSocket!.startTls(options)
|
||||||
|
this._cfWriter = this._cfSocket.writable.getWriter()
|
||||||
|
this._cfReader = this._cfSocket.readable.getReader()
|
||||||
|
this._addClosedHandler()
|
||||||
|
this._listen().catch((e) => this.emit('error', e))
|
||||||
|
}
|
||||||
|
|
||||||
|
_addClosedHandler() {
|
||||||
|
this._cfSocket!.closed.then(() => {
|
||||||
|
if (!this._upgrading) {
|
||||||
|
log('CF socket closed')
|
||||||
|
this._cfSocket = null
|
||||||
|
this.emit('close')
|
||||||
|
} else {
|
||||||
|
this._upgrading = false
|
||||||
|
this._upgraded = true
|
||||||
|
}
|
||||||
|
}).catch((e) => this.emit('error', e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const debug = false
|
||||||
|
|
||||||
|
function dump(data: unknown) {
|
||||||
|
if (data instanceof Uint8Array || data instanceof ArrayBuffer) {
|
||||||
|
const hex = Buffer.from(data).toString('hex')
|
||||||
|
const str = new TextDecoder().decode(data)
|
||||||
|
return `\n>>> STR: "${str.replace(/\n/g, '\\n')}"\n>>> HEX: ${hex}\n`
|
||||||
|
} else {
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function log(...args: unknown[]) {
|
||||||
|
debug && console.log(...args.map(dump))
|
||||||
|
}
|
||||||
25
packages/pg-cloudflare/src/types.d.ts
vendored
Normal file
25
packages/pg-cloudflare/src/types.d.ts
vendored
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
declare module 'cloudflare:sockets' {
|
||||||
|
export class Socket {
|
||||||
|
public readonly readable: any
|
||||||
|
public readonly writable: any
|
||||||
|
public readonly closed: Promise<void>
|
||||||
|
public close(): Promise<void>
|
||||||
|
public startTls(options: TlsOptions): Socket
|
||||||
|
}
|
||||||
|
|
||||||
|
export type TlsOptions = {
|
||||||
|
expectedServerHostname?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SocketAddress = {
|
||||||
|
hostname: string
|
||||||
|
port: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SocketOptions = {
|
||||||
|
secureTransport?: 'off' | 'on' | 'starttls'
|
||||||
|
allowHalfOpen?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export function connect(address: string | SocketAddress, options?: SocketOptions): Socket
|
||||||
|
}
|
||||||
25
packages/pg-cloudflare/tsconfig.json
Normal file
25
packages/pg-cloudflare/tsconfig.json
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"module": "node16",
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"allowSyntheticDefaultImports": true,
|
||||||
|
"strict": true,
|
||||||
|
"target": "es2020",
|
||||||
|
"noImplicitAny": true,
|
||||||
|
"moduleResolution": "node16",
|
||||||
|
"sourceMap": true,
|
||||||
|
"outDir": "dist",
|
||||||
|
"incremental": true,
|
||||||
|
"baseUrl": ".",
|
||||||
|
"declaration": true,
|
||||||
|
"paths": {
|
||||||
|
"*": [
|
||||||
|
"node_modules/*",
|
||||||
|
"src/types/*"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"include": [
|
||||||
|
"src/**/*"
|
||||||
|
]
|
||||||
|
}
|
||||||
2
packages/pg-connection-string/.coveralls.yml
Normal file
2
packages/pg-connection-string/.coveralls.yml
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
service_name: travis-pro
|
||||||
|
repo_token: 5F6dODinz9L9uFR6HatKmtsYDoV1A5S2N
|
||||||
30
packages/pg-connection-string/.gitignore
vendored
Normal file
30
packages/pg-connection-string/.gitignore
vendored
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
# Logs
|
||||||
|
logs
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Runtime data
|
||||||
|
pids
|
||||||
|
*.pid
|
||||||
|
*.seed
|
||||||
|
|
||||||
|
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||||
|
lib-cov
|
||||||
|
|
||||||
|
# Coverage directory used by tools like istanbul
|
||||||
|
coverage
|
||||||
|
.nyc_output
|
||||||
|
|
||||||
|
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
|
.grunt
|
||||||
|
|
||||||
|
# Compiled binary addons (http://nodejs.org/api/addons.html)
|
||||||
|
build/Release
|
||||||
|
|
||||||
|
# Dependency directory
|
||||||
|
# Deployed apps should consider commenting this line out:
|
||||||
|
# see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git
|
||||||
|
node_modules
|
||||||
|
package-lock.json
|
||||||
|
|
||||||
|
# TypeScript output directory
|
||||||
|
dist
|
||||||
4
packages/pg-connection-string/.mocharc.json
Normal file
4
packages/pg-connection-string/.mocharc.json
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"extension": ["js", "ts"],
|
||||||
|
"require": "tsx"
|
||||||
|
}
|
||||||
21
packages/pg-connection-string/LICENSE
Normal file
21
packages/pg-connection-string/LICENSE
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2014 Iced Development
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
105
packages/pg-connection-string/README.md
Normal file
105
packages/pg-connection-string/README.md
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
pg-connection-string
|
||||||
|
====================
|
||||||
|
|
||||||
|
[](https://nodei.co/npm/pg-connection-string/)
|
||||||
|
|
||||||
|
Functions for dealing with a PostgresSQL connection string
|
||||||
|
|
||||||
|
`parse` method taken from [node-postgres](https://github.com/brianc/node-postgres.git)
|
||||||
|
Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com)
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
const parse = require('pg-connection-string').parse;
|
||||||
|
|
||||||
|
const config = parse('postgres://someuser:somepassword@somehost:381/somedatabase')
|
||||||
|
```
|
||||||
|
|
||||||
|
The resulting config contains a subset of the following properties:
|
||||||
|
|
||||||
|
* `user` - User with which to authenticate to the server
|
||||||
|
* `password` - Corresponding password
|
||||||
|
* `host` - Postgres server hostname or, for UNIX domain sockets, the socket filename
|
||||||
|
* `port` - port on which to connect
|
||||||
|
* `database` - Database name within the server
|
||||||
|
* `client_encoding` - string encoding the client will use
|
||||||
|
* `ssl`, either a boolean or an object with properties
|
||||||
|
* `rejectUnauthorized`
|
||||||
|
* `cert`
|
||||||
|
* `key`
|
||||||
|
* `ca`
|
||||||
|
* any other query parameters (for example, `application_name`) are preserved intact.
|
||||||
|
|
||||||
|
### ClientConfig Compatibility for TypeScript
|
||||||
|
|
||||||
|
The pg-connection-string `ConnectionOptions` interface is not compatible with the `ClientConfig` interface that [pg.Client](https://node-postgres.com/apis/client) expects. To remedy this, use the `parseIntoClientConfig` function instead of `parse`:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { ClientConfig } from 'pg';
|
||||||
|
import { parseIntoClientConfig } from 'pg-connection-string';
|
||||||
|
|
||||||
|
const config: ClientConfig = parseIntoClientConfig('postgres://someuser:somepassword@somehost:381/somedatabase')
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also use `toClientConfig` to convert an existing `ConnectionOptions` interface into a `ClientConfig` interface:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { ClientConfig } from 'pg';
|
||||||
|
import { parse, toClientConfig } from 'pg-connection-string';
|
||||||
|
|
||||||
|
const config = parse('postgres://someuser:somepassword@somehost:381/somedatabase')
|
||||||
|
const clientConfig: ClientConfig = toClientConfig(config)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Connection Strings
|
||||||
|
|
||||||
|
The short summary of acceptable URLs is:
|
||||||
|
|
||||||
|
* `socket:<path>?<query>` - UNIX domain socket
|
||||||
|
* `postgres://<user>:<password>@<host>:<port>/<database>?<query>` - TCP connection
|
||||||
|
|
||||||
|
But see below for more details.
|
||||||
|
|
||||||
|
### UNIX Domain Sockets
|
||||||
|
|
||||||
|
When user and password are not given, the socket path follows `socket:`, as in `socket:/var/run/pgsql`.
|
||||||
|
This form can be shortened to just a path: `/var/run/pgsql`.
|
||||||
|
|
||||||
|
When user and password are given, they are included in the typical URL positions, with an empty `host`, as in `socket://user:pass@/var/run/pgsql`.
|
||||||
|
|
||||||
|
Query parameters follow a `?` character, including the following special query parameters:
|
||||||
|
|
||||||
|
* `db=<database>` - sets the database name (urlencoded)
|
||||||
|
* `encoding=<encoding>` - sets the `client_encoding` property
|
||||||
|
|
||||||
|
### TCP Connections
|
||||||
|
|
||||||
|
TCP connections to the Postgres server are indicated with `pg:` or `postgres:` schemes (in fact, any scheme but `socket:` is accepted).
|
||||||
|
If username and password are included, they should be urlencoded.
|
||||||
|
The database name, however, should *not* be urlencoded.
|
||||||
|
|
||||||
|
Query parameters follow a `?` character, including the following special query parameters:
|
||||||
|
* `host=<host>` - sets `host` property, overriding the URL's host
|
||||||
|
* `encoding=<encoding>` - sets the `client_encoding` property
|
||||||
|
* `ssl=1`, `ssl=true`, `ssl=0`, `ssl=false` - sets `ssl` to true or false, accordingly
|
||||||
|
* `uselibpqcompat=true` - use libpq semantics
|
||||||
|
* `sslmode=<sslmode>` when `uselibpqcompat=true` is not set
|
||||||
|
* `sslmode=disable` - sets `ssl` to false
|
||||||
|
* `sslmode=no-verify` - sets `ssl` to `{ rejectUnauthorized: false }`
|
||||||
|
* `sslmode=prefer`, `sslmode=require`, `sslmode=verify-ca`, `sslmode=verify-full` - sets `ssl` to true
|
||||||
|
* `sslmode=<sslmode>` when `uselibpqcompat=true`
|
||||||
|
* `sslmode=disable` - sets `ssl` to false
|
||||||
|
* `sslmode=prefer` - sets `ssl` to `{ rejectUnauthorized: false }`
|
||||||
|
* `sslmode=require` - sets `ssl` to `{ rejectUnauthorized: false }` unless `sslrootcert` is specified, in which case it behaves like `verify-ca`
|
||||||
|
* `sslmode=verify-ca` - sets `ssl` to `{ checkServerIdentity: no-op }` (verify CA, but not server identity). This verifies the presented certificate against the effective CA specified in sslrootcert.
|
||||||
|
* `sslmode=verify-full` - sets `ssl` to `{}` (verify CA and server identity)
|
||||||
|
* `sslcert=<filename>` - reads data from the given file and includes the result as `ssl.cert`
|
||||||
|
* `sslkey=<filename>` - reads data from the given file and includes the result as `ssl.key`
|
||||||
|
* `sslrootcert=<filename>` - reads data from the given file and includes the result as `ssl.ca`
|
||||||
|
|
||||||
|
A bare relative URL, such as `salesdata`, will indicate a database name while leaving other properties empty.
|
||||||
|
|
||||||
|
> [!CAUTION]
|
||||||
|
> Choosing an sslmode other than verify-full has serious security implications. Please read https://www.postgresql.org/docs/current/libpq-ssl.html#LIBPQ-SSL-SSLMODE-STATEMENTS to understand the trade-offs.
|
||||||
8
packages/pg-connection-string/esm/index.mjs
Normal file
8
packages/pg-connection-string/esm/index.mjs
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
// ESM wrapper for pg-connection-string
|
||||||
|
import connectionString from '../index.js'
|
||||||
|
|
||||||
|
// Re-export the parse function
|
||||||
|
export default connectionString.parse
|
||||||
|
export const parse = connectionString.parse
|
||||||
|
export const toClientConfig = connectionString.toClientConfig
|
||||||
|
export const parseIntoClientConfig = connectionString.parseIntoClientConfig
|
||||||
36
packages/pg-connection-string/index.d.ts
vendored
Normal file
36
packages/pg-connection-string/index.d.ts
vendored
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import { ClientConfig } from 'pg'
|
||||||
|
|
||||||
|
export function parse(connectionString: string, options?: Options): ConnectionOptions
|
||||||
|
|
||||||
|
export interface Options {
|
||||||
|
// Use libpq semantics when interpreting the connection string
|
||||||
|
useLibpqCompat?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SSLConfig {
|
||||||
|
ca?: string
|
||||||
|
cert?: string | null
|
||||||
|
key?: string
|
||||||
|
rejectUnauthorized?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ConnectionOptions {
|
||||||
|
host: string | null
|
||||||
|
password?: string
|
||||||
|
user?: string
|
||||||
|
port?: string | null
|
||||||
|
database: string | null | undefined
|
||||||
|
client_encoding?: string
|
||||||
|
ssl?: boolean | string | SSLConfig
|
||||||
|
|
||||||
|
application_name?: string
|
||||||
|
fallback_application_name?: string
|
||||||
|
options?: string
|
||||||
|
keepalives?: number
|
||||||
|
|
||||||
|
// We allow any other options to be passed through
|
||||||
|
[key: string]: unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
export function toClientConfig(config: ConnectionOptions): ClientConfig
|
||||||
|
export function parseIntoClientConfig(connectionString: string): ClientConfig
|
||||||
233
packages/pg-connection-string/index.js
Normal file
233
packages/pg-connection-string/index.js
Normal file
@ -0,0 +1,233 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { emitWarning } = require('node:process')
|
||||||
|
|
||||||
|
//Parse method copied from https://github.com/brianc/node-postgres
|
||||||
|
//Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com)
|
||||||
|
//MIT License
|
||||||
|
|
||||||
|
//parses a connection string
|
||||||
|
function parse(str, options = {}) {
|
||||||
|
//unix socket
|
||||||
|
if (str.charAt(0) === '/') {
|
||||||
|
const config = str.split(' ')
|
||||||
|
return { host: config[0], database: config[1] }
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for empty host in URL
|
||||||
|
|
||||||
|
const config = {}
|
||||||
|
let result
|
||||||
|
let dummyHost = false
|
||||||
|
if (/ |%[^a-f0-9]|%[a-f0-9][^a-f0-9]/i.test(str)) {
|
||||||
|
// Ensure spaces are encoded as %20
|
||||||
|
str = encodeURI(str).replace(/%25(\d\d)/g, '%$1')
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
try {
|
||||||
|
result = new URL(str, 'postgres://base')
|
||||||
|
} catch (e) {
|
||||||
|
// The URL is invalid so try again with a dummy host
|
||||||
|
result = new URL(str.replace('@/', '@___DUMMY___/'), 'postgres://base')
|
||||||
|
dummyHost = true
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// Remove the input from the error message to avoid leaking sensitive information
|
||||||
|
err.input && (err.input = '*****REDACTED*****')
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
|
||||||
|
// We'd like to use Object.fromEntries() here but Node.js 10 does not support it
|
||||||
|
for (const entry of result.searchParams.entries()) {
|
||||||
|
config[entry[0]] = entry[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
config.user = config.user || decodeURIComponent(result.username)
|
||||||
|
config.password = config.password || decodeURIComponent(result.password)
|
||||||
|
|
||||||
|
if (result.protocol == 'socket:') {
|
||||||
|
config.host = decodeURI(result.pathname)
|
||||||
|
config.database = result.searchParams.get('db')
|
||||||
|
config.client_encoding = result.searchParams.get('encoding')
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
const hostname = dummyHost ? '' : result.hostname
|
||||||
|
if (!config.host) {
|
||||||
|
// Only set the host if there is no equivalent query param.
|
||||||
|
config.host = decodeURIComponent(hostname)
|
||||||
|
} else if (hostname && /^%2f/i.test(hostname)) {
|
||||||
|
// Only prepend the hostname to the pathname if it is not a URL encoded Unix socket host.
|
||||||
|
result.pathname = hostname + result.pathname
|
||||||
|
}
|
||||||
|
if (!config.port) {
|
||||||
|
// Only set the port if there is no equivalent query param.
|
||||||
|
config.port = result.port
|
||||||
|
}
|
||||||
|
|
||||||
|
const pathname = result.pathname.slice(1) || null
|
||||||
|
config.database = pathname ? decodeURI(pathname) : null
|
||||||
|
|
||||||
|
if (config.ssl === 'true' || config.ssl === '1') {
|
||||||
|
config.ssl = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.ssl === '0') {
|
||||||
|
config.ssl = false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.sslcert || config.sslkey || config.sslrootcert || config.sslmode) {
|
||||||
|
config.ssl = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only try to load fs if we expect to read from the disk
|
||||||
|
const fs = config.sslcert || config.sslkey || config.sslrootcert ? require('fs') : null
|
||||||
|
|
||||||
|
if (config.sslcert) {
|
||||||
|
config.ssl.cert = fs.readFileSync(config.sslcert).toString()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.sslkey) {
|
||||||
|
config.ssl.key = fs.readFileSync(config.sslkey).toString()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.sslrootcert) {
|
||||||
|
config.ssl.ca = fs.readFileSync(config.sslrootcert).toString()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.useLibpqCompat && config.uselibpqcompat) {
|
||||||
|
throw new Error('Both useLibpqCompat and uselibpqcompat are set. Please use only one of them.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.uselibpqcompat === 'true' || options.useLibpqCompat) {
|
||||||
|
switch (config.sslmode) {
|
||||||
|
case 'disable': {
|
||||||
|
config.ssl = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'prefer': {
|
||||||
|
config.ssl.rejectUnauthorized = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'require': {
|
||||||
|
if (config.sslrootcert) {
|
||||||
|
// If a root CA is specified, behavior of `sslmode=require` will be the same as that of `verify-ca`
|
||||||
|
config.ssl.checkServerIdentity = function () {}
|
||||||
|
} else {
|
||||||
|
config.ssl.rejectUnauthorized = false
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'verify-ca': {
|
||||||
|
if (!config.ssl.ca) {
|
||||||
|
throw new Error(
|
||||||
|
'SECURITY WARNING: Using sslmode=verify-ca requires specifying a CA with sslrootcert. If a public CA is used, verify-ca allows connections to a server that somebody else may have registered with the CA, making you vulnerable to Man-in-the-Middle attacks. Either specify a custom CA certificate with sslrootcert parameter or use sslmode=verify-full for proper security.'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
config.ssl.checkServerIdentity = function () {}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'verify-full': {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
switch (config.sslmode) {
|
||||||
|
case 'disable': {
|
||||||
|
config.ssl = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'prefer':
|
||||||
|
case 'require':
|
||||||
|
case 'verify-ca':
|
||||||
|
case 'verify-full': {
|
||||||
|
if (config.sslmode !== 'verify-full') {
|
||||||
|
deprecatedSslModeWarning(config.sslmode)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'no-verify': {
|
||||||
|
config.ssl.rejectUnauthorized = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
// convert pg-connection-string ssl config to a ClientConfig.ConnectionOptions
|
||||||
|
function toConnectionOptions(sslConfig) {
|
||||||
|
const connectionOptions = Object.entries(sslConfig).reduce((c, [key, value]) => {
|
||||||
|
// we explicitly check for undefined and null instead of `if (value)` because some
|
||||||
|
// options accept falsy values. Example: `ssl.rejectUnauthorized = false`
|
||||||
|
if (value !== undefined && value !== null) {
|
||||||
|
c[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
return c
|
||||||
|
}, {})
|
||||||
|
|
||||||
|
return connectionOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// convert pg-connection-string config to a ClientConfig
|
||||||
|
function toClientConfig(config) {
|
||||||
|
const poolConfig = Object.entries(config).reduce((c, [key, value]) => {
|
||||||
|
if (key === 'ssl') {
|
||||||
|
const sslConfig = value
|
||||||
|
|
||||||
|
if (typeof sslConfig === 'boolean') {
|
||||||
|
c[key] = sslConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof sslConfig === 'object') {
|
||||||
|
c[key] = toConnectionOptions(sslConfig)
|
||||||
|
}
|
||||||
|
} else if (value !== undefined && value !== null) {
|
||||||
|
if (key === 'port') {
|
||||||
|
// when port is not specified, it is converted into an empty string
|
||||||
|
// we want to avoid NaN or empty string as a values in ClientConfig
|
||||||
|
if (value !== '') {
|
||||||
|
const v = parseInt(value, 10)
|
||||||
|
if (isNaN(v)) {
|
||||||
|
throw new Error(`Invalid ${key}: ${value}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
c[key] = v
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
c[key] = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return c
|
||||||
|
}, {})
|
||||||
|
|
||||||
|
return poolConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
// parses a connection string into ClientConfig
|
||||||
|
function parseIntoClientConfig(str) {
|
||||||
|
return toClientConfig(parse(str))
|
||||||
|
}
|
||||||
|
|
||||||
|
function deprecatedSslModeWarning(sslmode) {
|
||||||
|
if (!deprecatedSslModeWarning.warned) {
|
||||||
|
deprecatedSslModeWarning.warned = true
|
||||||
|
emitWarning(`SECURITY WARNING: The SSL modes 'prefer', 'require', and 'verify-ca' are treated as aliases for 'verify-full'.
|
||||||
|
In the next major version (pg-connection-string v3.0.0 and pg v9.0.0), these modes will adopt standard libpq semantics, which have weaker security guarantees.
|
||||||
|
|
||||||
|
To prepare for this change:
|
||||||
|
- If you want the current behavior, explicitly use 'sslmode=verify-full'
|
||||||
|
- If you want libpq compatibility now, use 'uselibpqcompat=true&sslmode=${sslmode}'
|
||||||
|
|
||||||
|
See https://www.postgresql.org/docs/current/libpq-ssl.html for libpq SSL mode definitions.`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = parse
|
||||||
|
|
||||||
|
parse.parse = parse
|
||||||
|
parse.toClientConfig = toClientConfig
|
||||||
|
parse.parseIntoClientConfig = parseIntoClientConfig
|
||||||
51
packages/pg-connection-string/package.json
Normal file
51
packages/pg-connection-string/package.json
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
{
|
||||||
|
"name": "pg-connection-string",
|
||||||
|
"version": "2.9.1",
|
||||||
|
"description": "Functions for dealing with a PostgresSQL connection string",
|
||||||
|
"main": "./index.js",
|
||||||
|
"types": "./index.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"types": "./index.d.ts",
|
||||||
|
"import": "./esm/index.mjs",
|
||||||
|
"require": "./index.js",
|
||||||
|
"default": "./index.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "nyc --reporter=lcov mocha && npm run check-coverage",
|
||||||
|
"check-coverage": "nyc check-coverage --statements 100 --branches 100 --lines 100 --functions 100"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/brianc/node-postgres.git",
|
||||||
|
"directory": "packages/pg-connection-string"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"pg",
|
||||||
|
"connection",
|
||||||
|
"string",
|
||||||
|
"parse"
|
||||||
|
],
|
||||||
|
"author": "Blaine Bublitz <blaine@iceddev.com> (http://iceddev.com/)",
|
||||||
|
"license": "MIT",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/brianc/node-postgres/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string",
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/pg": "^8.12.0",
|
||||||
|
"chai": "^4.1.1",
|
||||||
|
"coveralls": "^3.0.4",
|
||||||
|
"istanbul": "^0.4.5",
|
||||||
|
"mocha": "^10.5.2",
|
||||||
|
"nyc": "^15",
|
||||||
|
"tsx": "^4.19.4",
|
||||||
|
"typescript": "^4.0.3"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js",
|
||||||
|
"index.d.ts",
|
||||||
|
"esm"
|
||||||
|
]
|
||||||
|
}
|
||||||
125
packages/pg-connection-string/test/clientConfig.ts
Normal file
125
packages/pg-connection-string/test/clientConfig.ts
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
import chai from 'chai'
|
||||||
|
const expect = chai.expect
|
||||||
|
chai.should()
|
||||||
|
|
||||||
|
import { parse, toClientConfig, parseIntoClientConfig } from '../'
|
||||||
|
|
||||||
|
describe('toClientConfig', function () {
|
||||||
|
it('converts connection info', function () {
|
||||||
|
const config = parse('postgres://brian:pw@boom:381/lala')
|
||||||
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
|
clientConfig.user?.should.equal('brian')
|
||||||
|
clientConfig.password?.should.equal('pw')
|
||||||
|
clientConfig.host?.should.equal('boom')
|
||||||
|
clientConfig.port?.should.equal(381)
|
||||||
|
clientConfig.database?.should.equal('lala')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('converts query params', function () {
|
||||||
|
const config = parse(
|
||||||
|
'postgres:///?application_name=TheApp&fallback_application_name=TheAppFallback&client_encoding=utf8&options=-c geqo=off'
|
||||||
|
)
|
||||||
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
|
clientConfig.application_name?.should.equal('TheApp')
|
||||||
|
clientConfig.fallback_application_name?.should.equal('TheAppFallback')
|
||||||
|
clientConfig.client_encoding?.should.equal('utf8')
|
||||||
|
clientConfig.options?.should.equal('-c geqo=off')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('converts SSL boolean', function () {
|
||||||
|
const config = parse('pg:///?ssl=true')
|
||||||
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
|
clientConfig.ssl?.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('converts sslmode=disable', function () {
|
||||||
|
const config = parse('pg:///?sslmode=disable')
|
||||||
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
|
clientConfig.ssl?.should.equal(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('converts sslmode=noverify', function () {
|
||||||
|
const config = parse('pg:///?sslmode=no-verify')
|
||||||
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
|
clientConfig.ssl?.should.deep.equal({
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('converts other sslmode options', function () {
|
||||||
|
const config = parse('pg:///?sslmode=verify-ca')
|
||||||
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
|
clientConfig.ssl?.should.deep.equal({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('converts other sslmode options', function () {
|
||||||
|
const config = parse('pg:///?sslmode=verify-ca')
|
||||||
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
|
clientConfig.ssl?.should.deep.equal({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('converts ssl cert options', function () {
|
||||||
|
const connectionString =
|
||||||
|
'pg:///?sslcert=' +
|
||||||
|
__dirname +
|
||||||
|
'/example.cert&sslkey=' +
|
||||||
|
__dirname +
|
||||||
|
'/example.key&sslrootcert=' +
|
||||||
|
__dirname +
|
||||||
|
'/example.ca'
|
||||||
|
const config = parse(connectionString)
|
||||||
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
|
clientConfig.ssl?.should.deep.equal({
|
||||||
|
ca: 'example ca\n',
|
||||||
|
cert: 'example cert\n',
|
||||||
|
key: 'example key\n',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('converts unix domain sockets', function () {
|
||||||
|
const config = parse('socket:/some path/?db=my[db]&encoding=utf8&client_encoding=bogus')
|
||||||
|
const clientConfig = toClientConfig(config)
|
||||||
|
clientConfig.host?.should.equal('/some path/')
|
||||||
|
clientConfig.database?.should.equal('my[db]', 'must to be escaped and unescaped through "my%5Bdb%5D"')
|
||||||
|
clientConfig.client_encoding?.should.equal('utf8')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('handles invalid port', function () {
|
||||||
|
const config = parse('postgres://@boom:381/lala')
|
||||||
|
config.port = 'bogus'
|
||||||
|
expect(() => toClientConfig(config)).to.throw()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('handles invalid sslconfig values', function () {
|
||||||
|
const config = parse('postgres://@boom/lala')
|
||||||
|
config.ssl = {}
|
||||||
|
config.ssl.cert = null
|
||||||
|
config.ssl.key = undefined
|
||||||
|
|
||||||
|
const clientConfig = toClientConfig(config)
|
||||||
|
|
||||||
|
clientConfig.host?.should.equal('boom')
|
||||||
|
clientConfig.database?.should.equal('lala')
|
||||||
|
clientConfig.ssl?.should.deep.equal({})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('parseIntoClientConfig', function () {
|
||||||
|
it('converts url', function () {
|
||||||
|
const clientConfig = parseIntoClientConfig('postgres://brian:pw@boom:381/lala')
|
||||||
|
|
||||||
|
clientConfig.user?.should.equal('brian')
|
||||||
|
clientConfig.password?.should.equal('pw')
|
||||||
|
clientConfig.host?.should.equal('boom')
|
||||||
|
clientConfig.port?.should.equal(381)
|
||||||
|
clientConfig.database?.should.equal('lala')
|
||||||
|
})
|
||||||
|
})
|
||||||
1
packages/pg-connection-string/test/example.ca
Normal file
1
packages/pg-connection-string/test/example.ca
Normal file
@ -0,0 +1 @@
|
|||||||
|
example ca
|
||||||
1
packages/pg-connection-string/test/example.cert
Normal file
1
packages/pg-connection-string/test/example.cert
Normal file
@ -0,0 +1 @@
|
|||||||
|
example cert
|
||||||
1
packages/pg-connection-string/test/example.key
Normal file
1
packages/pg-connection-string/test/example.key
Normal file
@ -0,0 +1 @@
|
|||||||
|
example key
|
||||||
470
packages/pg-connection-string/test/parse.ts
Normal file
470
packages/pg-connection-string/test/parse.ts
Normal file
@ -0,0 +1,470 @@
|
|||||||
|
import chai from 'chai'
|
||||||
|
const expect = chai.expect
|
||||||
|
chai.should()
|
||||||
|
|
||||||
|
import { parse } from '../'
|
||||||
|
|
||||||
|
describe('parse', function () {
|
||||||
|
it('using connection string in client constructor', function () {
|
||||||
|
const subject = parse('postgres://brian:pw@boom:381/lala')
|
||||||
|
subject.user?.should.equal('brian')
|
||||||
|
subject.password?.should.equal('pw')
|
||||||
|
subject.host?.should.equal('boom')
|
||||||
|
subject.port?.should.equal('381')
|
||||||
|
subject.database?.should.equal('lala')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('escape spaces if present', function () {
|
||||||
|
const subject = parse('postgres://localhost/post gres')
|
||||||
|
subject.database?.should.equal('post gres')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('do not double escape spaces', function () {
|
||||||
|
const subject = parse('postgres://localhost/post%20gres')
|
||||||
|
subject.database?.should.equal('post gres')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('initializing with unix domain socket', function () {
|
||||||
|
const subject = parse('/const/run/')
|
||||||
|
subject.host?.should.equal('/const/run/')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('initializing with unix domain socket and a specific database, the simple way', function () {
|
||||||
|
const subject = parse('/const/run/ mydb')
|
||||||
|
subject.host?.should.equal('/const/run/')
|
||||||
|
subject.database?.should.equal('mydb')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('initializing with unix domain socket, the health way', function () {
|
||||||
|
const subject = parse('socket:/some path/?db=my[db]&encoding=utf8')
|
||||||
|
subject.host?.should.equal('/some path/')
|
||||||
|
subject.database?.should.equal('my[db]', 'must to be escaped and unescaped trough "my%5Bdb%5D"')
|
||||||
|
subject.client_encoding?.should.equal('utf8')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('initializing with unix domain socket, the escaped health way', function () {
|
||||||
|
const subject = parse('socket:/some%20path/?db=my%2Bdb&encoding=utf8')
|
||||||
|
subject.host?.should.equal('/some path/')
|
||||||
|
subject.database?.should.equal('my+db')
|
||||||
|
subject.client_encoding?.should.equal('utf8')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('initializing with unix domain socket, username and password', function () {
|
||||||
|
const subject = parse('socket://brian:pw@/const/run/?db=mydb')
|
||||||
|
subject.user?.should.equal('brian')
|
||||||
|
subject.password?.should.equal('pw')
|
||||||
|
subject.host?.should.equal('/const/run/')
|
||||||
|
subject.database?.should.equal('mydb')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('password contains < and/or > characters', function () {
|
||||||
|
const sourceConfig = {
|
||||||
|
user: 'brian',
|
||||||
|
password: 'hello<ther>e',
|
||||||
|
host: 'localhost',
|
||||||
|
port: 5432,
|
||||||
|
database: 'postgres',
|
||||||
|
}
|
||||||
|
const connectionString =
|
||||||
|
'postgres://' +
|
||||||
|
sourceConfig.user +
|
||||||
|
':' +
|
||||||
|
sourceConfig.password +
|
||||||
|
'@' +
|
||||||
|
sourceConfig.host +
|
||||||
|
':' +
|
||||||
|
sourceConfig.port +
|
||||||
|
'/' +
|
||||||
|
sourceConfig.database
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.password?.should.equal(sourceConfig.password)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('password contains colons', function () {
|
||||||
|
const sourceConfig = {
|
||||||
|
user: 'brian',
|
||||||
|
password: 'hello:pass:world',
|
||||||
|
host: 'localhost',
|
||||||
|
port: 5432,
|
||||||
|
database: 'postgres',
|
||||||
|
}
|
||||||
|
const connectionString =
|
||||||
|
'postgres://' +
|
||||||
|
sourceConfig.user +
|
||||||
|
':' +
|
||||||
|
sourceConfig.password +
|
||||||
|
'@' +
|
||||||
|
sourceConfig.host +
|
||||||
|
':' +
|
||||||
|
sourceConfig.port +
|
||||||
|
'/' +
|
||||||
|
sourceConfig.database
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.password?.should.equal(sourceConfig.password)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('username or password contains weird characters', function () {
|
||||||
|
const strang = 'pg://my f%irst name:is&%awesome!@localhost:9000'
|
||||||
|
const subject = parse(strang)
|
||||||
|
subject.user?.should.equal('my f%irst name')
|
||||||
|
subject.password?.should.equal('is&%awesome!')
|
||||||
|
subject.host?.should.equal('localhost')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('url is properly encoded', function () {
|
||||||
|
const encoded = 'pg://bi%25na%25%25ry%20:s%40f%23@localhost/%20u%2520rl'
|
||||||
|
const subject = parse(encoded)
|
||||||
|
subject.user?.should.equal('bi%na%%ry ')
|
||||||
|
subject.password?.should.equal('s@f#')
|
||||||
|
subject.host?.should.equal('localhost')
|
||||||
|
subject.database?.should.equal(' u%20rl')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('relative url sets database', function () {
|
||||||
|
const relative = 'different_db_on_default_host'
|
||||||
|
const subject = parse(relative)
|
||||||
|
subject.database?.should.equal('different_db_on_default_host')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('no pathname returns null database', function () {
|
||||||
|
const subject = parse('pg://myhost')
|
||||||
|
;(subject.database === null).should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('pathname of "/" returns null database', function () {
|
||||||
|
const subject = parse('pg://myhost/')
|
||||||
|
subject.host?.should.equal('myhost')
|
||||||
|
;(subject.database === null).should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter host', function () {
|
||||||
|
const subject = parse('pg://user:pass@/dbname?host=/unix/socket')
|
||||||
|
subject.user?.should.equal('user')
|
||||||
|
subject.password?.should.equal('pass')
|
||||||
|
subject.host?.should.equal('/unix/socket')
|
||||||
|
subject.database?.should.equal('dbname')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter host overrides url host', function () {
|
||||||
|
const subject = parse('pg://user:pass@localhost/dbname?host=/unix/socket')
|
||||||
|
subject.database?.should.equal('dbname')
|
||||||
|
subject.host?.should.equal('/unix/socket')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('url with encoded socket', function () {
|
||||||
|
const subject = parse('pg://user:pass@%2Funix%2Fsocket/dbname')
|
||||||
|
subject.user?.should.equal('user')
|
||||||
|
subject.password?.should.equal('pass')
|
||||||
|
subject.host?.should.equal('/unix/socket')
|
||||||
|
subject.database?.should.equal('dbname')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('url with real host and an encoded db name', function () {
|
||||||
|
const subject = parse('pg://user:pass@localhost/%2Fdbname')
|
||||||
|
subject.user?.should.equal('user')
|
||||||
|
subject.password?.should.equal('pass')
|
||||||
|
subject.host?.should.equal('localhost')
|
||||||
|
subject.database?.should.equal('%2Fdbname')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter host treats encoded host as part of the db name', function () {
|
||||||
|
const subject = parse('pg://user:pass@%2Funix%2Fsocket/dbname?host=localhost')
|
||||||
|
subject.user?.should.equal('user')
|
||||||
|
subject.password?.should.equal('pass')
|
||||||
|
subject.host?.should.equal('localhost')
|
||||||
|
subject.database?.should.equal('%2Funix%2Fsocket/dbname')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter application_name', function () {
|
||||||
|
const connectionString = 'pg:///?application_name=TheApp'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.application_name?.should.equal('TheApp')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter fallback_application_name', function () {
|
||||||
|
const connectionString = 'pg:///?fallback_application_name=TheAppFallback'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.fallback_application_name?.should.equal('TheAppFallback')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter options', function () {
|
||||||
|
const connectionString = 'pg:///?options=-c geqo=off'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.options?.should.equal('-c geqo=off')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter ssl=true', function () {
|
||||||
|
const connectionString = 'pg:///?ssl=true'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter ssl=1', function () {
|
||||||
|
const connectionString = 'pg:///?ssl=1'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter ssl=0', function () {
|
||||||
|
const connectionString = 'pg:///?ssl=0'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.equal(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('set ssl', function () {
|
||||||
|
const subject = parse('pg://myhost/db?ssl=1')
|
||||||
|
subject.ssl?.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslcert=/path/to/cert', function () {
|
||||||
|
const connectionString = 'pg:///?sslcert=' + __dirname + '/example.cert'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql({
|
||||||
|
cert: 'example cert\n',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslkey=/path/to/key', function () {
|
||||||
|
const connectionString = 'pg:///?sslkey=' + __dirname + '/example.key'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql({
|
||||||
|
key: 'example key\n',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslrootcert=/path/to/ca', function () {
|
||||||
|
const connectionString = 'pg:///?sslrootcert=' + __dirname + '/example.ca'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql({
|
||||||
|
ca: 'example ca\n',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=no-verify', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=no-verify'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql({
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=disable', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=disable'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=prefer', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=prefer'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=require', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=require'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-ca', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=verify-ca'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-full', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=verify-full'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter ssl=true and sslmode=require still work with sslrootcert=/path/to/ca', function () {
|
||||||
|
const connectionString = 'pg:///?ssl=true&sslrootcert=' + __dirname + '/example.ca&sslmode=require'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql({
|
||||||
|
ca: 'example ca\n',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=disable with uselibpqcompat query param', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=disable&uselibpqcompat=true'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=prefer with uselibpqcompat query param', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=prefer&uselibpqcompat=true'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql({
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=require with uselibpqcompat query param', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=require&uselibpqcompat=true'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql({
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-ca with uselibpqcompat query param', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=verify-ca&uselibpqcompat=true'
|
||||||
|
expect(function () {
|
||||||
|
parse(connectionString)
|
||||||
|
}).to.throw()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('when throwing on invalid url does not print out the password in the error message', function () {
|
||||||
|
const host = 'localhost'
|
||||||
|
const port = 5432
|
||||||
|
const user = 'user'
|
||||||
|
const password = 'g#4624$@F$#v`'
|
||||||
|
const database = 'db'
|
||||||
|
|
||||||
|
const connectionString = `postgres://${user}:${password}@${host}:${port}/${database}`
|
||||||
|
expect(function () {
|
||||||
|
parse(connectionString)
|
||||||
|
}).to.throw()
|
||||||
|
try {
|
||||||
|
parse(connectionString)
|
||||||
|
} catch (err: unknown) {
|
||||||
|
expect(JSON.stringify(err)).to.not.include(password, 'Password should not be in the error message')
|
||||||
|
expect(JSON.stringify(err)).to.include('REDACTED', 'The thrown error should contain the redacted URL')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
throw new Error('Expected an error to be thrown')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-ca and sslrootcert with uselibpqcompat query param', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=verify-ca&uselibpqcompat=true&sslrootcert=' + __dirname + '/example.ca'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.have.property('checkServerIdentity').that.is.a('function')
|
||||||
|
// We prove above that the checkServerIdentity function is defined
|
||||||
|
//
|
||||||
|
// FIXME: remove this if we upgrade to TypeScript 5
|
||||||
|
// @ts-ignore
|
||||||
|
expect(subject.ssl.checkServerIdentity()).be.undefined
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-full with uselibpqcompat query param', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=verify-full&uselibpqcompat=true'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.eql({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter ssl=true and sslmode=require still work with sslrootcert=/path/to/ca with uselibpqcompat query param', function () {
|
||||||
|
const connectionString =
|
||||||
|
'pg:///?ssl=true&sslrootcert=' + __dirname + '/example.ca&sslmode=require&uselibpqcompat=true'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ssl?.should.have.property('ca', 'example ca\n')
|
||||||
|
subject.ssl?.should.have.property('checkServerIdentity').that.is.a('function')
|
||||||
|
// We prove above that the checkServerIdentity function is defined
|
||||||
|
//
|
||||||
|
// FIXME: remove this if we upgrade to TypeScript 5
|
||||||
|
// @ts-ignore
|
||||||
|
expect(subject.ssl?.checkServerIdentity()).be.undefined
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=disable with useLibpqCompat option', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=disable'
|
||||||
|
const subject = parse(connectionString, { useLibpqCompat: true })
|
||||||
|
subject.ssl?.should.eql(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=prefer with useLibpqCompat option', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=prefer'
|
||||||
|
const subject = parse(connectionString, { useLibpqCompat: true })
|
||||||
|
subject.ssl?.should.eql({
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=require with useLibpqCompat option', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=require'
|
||||||
|
const subject = parse(connectionString, { useLibpqCompat: true })
|
||||||
|
subject.ssl?.should.eql({
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-ca with useLibpqCompat option', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=verify-ca'
|
||||||
|
expect(function () {
|
||||||
|
parse(connectionString, { useLibpqCompat: true })
|
||||||
|
}).to.throw()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-ca and sslrootcert with useLibpqCompat option', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=verify-ca&sslrootcert=' + __dirname + '/example.ca'
|
||||||
|
const subject = parse(connectionString, { useLibpqCompat: true })
|
||||||
|
subject.ssl?.should.have.property('checkServerIdentity').that.is.a('function')
|
||||||
|
// We prove above that the checkServerIdentity function is defined
|
||||||
|
//
|
||||||
|
// FIXME: remove this if we upgrade to TypeScript 5
|
||||||
|
// @ts-ignore
|
||||||
|
expect(subject.ssl?.checkServerIdentity()).be.undefined
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter sslmode=verify-full with useLibpqCompat option', function () {
|
||||||
|
const connectionString = 'pg:///?sslmode=verify-full'
|
||||||
|
const subject = parse(connectionString, { useLibpqCompat: true })
|
||||||
|
subject.ssl?.should.eql({})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter ssl=true and sslmode=require still work with sslrootcert=/path/to/ca with useLibpqCompat option', function () {
|
||||||
|
const connectionString = 'pg:///?ssl=true&sslrootcert=' + __dirname + '/example.ca&sslmode=require'
|
||||||
|
const subject = parse(connectionString, { useLibpqCompat: true })
|
||||||
|
subject.ssl?.should.have.property('ca', 'example ca\n')
|
||||||
|
subject.ssl?.should.have.property('checkServerIdentity').that.is.a('function')
|
||||||
|
// We prove above that the checkServerIdentity function is defined
|
||||||
|
//
|
||||||
|
// FIXME: remove this if we upgrade to TypeScript 5
|
||||||
|
// @ts-ignore
|
||||||
|
expect(subject.ssl?.checkServerIdentity()).be.undefined
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does not allow uselibpqcompat query parameter and useLibpqCompat option at the same time', function () {
|
||||||
|
const connectionString = 'pg:///?uselibpqcompat=true'
|
||||||
|
expect(function () {
|
||||||
|
parse(connectionString, { useLibpqCompat: true })
|
||||||
|
}).to.throw()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('allow other params like max, ...', function () {
|
||||||
|
const subject = parse('pg://myhost/db?max=18&min=4')
|
||||||
|
subject.max?.should.equal('18')
|
||||||
|
subject.min?.should.equal('4')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('configuration parameter keepalives', function () {
|
||||||
|
const connectionString = 'pg:///?keepalives=1'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.keepalives?.should.equal('1')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('unknown configuration parameter is passed into client', function () {
|
||||||
|
const connectionString = 'pg:///?ThereIsNoSuchPostgresParameter=1234'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.ThereIsNoSuchPostgresParameter?.should.equal('1234')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('do not override a config field with value from query string', function () {
|
||||||
|
const subject = parse('socket:/some path/?db=my[db]&encoding=utf8&client_encoding=bogus')
|
||||||
|
subject.host?.should.equal('/some path/')
|
||||||
|
subject.database?.should.equal('my[db]', 'must to be escaped and unescaped through "my%5Bdb%5D"')
|
||||||
|
subject.client_encoding?.should.equal('utf8')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('return last value of repeated parameter', function () {
|
||||||
|
const connectionString = 'pg:///?keepalives=1&keepalives=0'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.keepalives?.should.equal('0')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('use the port specified in the query parameters', function () {
|
||||||
|
const connectionString = 'postgres:///?host=localhost&port=1234'
|
||||||
|
const subject = parse(connectionString)
|
||||||
|
subject.port?.should.equal('1234')
|
||||||
|
})
|
||||||
|
})
|
||||||
19
packages/pg-connection-string/tsconfig.json
Normal file
19
packages/pg-connection-string/tsconfig.json
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"module": "commonjs",
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"allowSyntheticDefaultImports": true,
|
||||||
|
"strict": true,
|
||||||
|
"target": "es6",
|
||||||
|
"noImplicitAny": true,
|
||||||
|
"moduleResolution": "node",
|
||||||
|
"sourceMap": true,
|
||||||
|
"outDir": "dist",
|
||||||
|
"incremental": true,
|
||||||
|
"baseUrl": ".",
|
||||||
|
"declaration": true
|
||||||
|
},
|
||||||
|
"include": [
|
||||||
|
"test/**/*"
|
||||||
|
]
|
||||||
|
}
|
||||||
37
packages/pg-cursor/README.md
Normal file
37
packages/pg-cursor/README.md
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
node-pg-cursor
|
||||||
|
==============
|
||||||
|
|
||||||
|
Use a PostgreSQL result cursor from node with an easy to use API.
|
||||||
|
|
||||||
|
### install
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ npm install pg-cursor
|
||||||
|
```
|
||||||
|
___note___: this depends on _either_ `npm install pg` or `npm install pg.js`, but you __must__ be using the pure JavaScript client. This will __not work__ with the native bindings.
|
||||||
|
|
||||||
|
### :star: [Documentation](https://node-postgres.com/apis/cursor) :star:
|
||||||
|
|
||||||
|
### license
|
||||||
|
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2013 Brian M. Carlson
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
5
packages/pg-cursor/esm/index.mjs
Normal file
5
packages/pg-cursor/esm/index.mjs
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
// ESM wrapper for pg-cursor
|
||||||
|
import Cursor from '../index.js'
|
||||||
|
|
||||||
|
// Export as default only to match CJS module
|
||||||
|
export default Cursor
|
||||||
265
packages/pg-cursor/index.js
Normal file
265
packages/pg-cursor/index.js
Normal file
@ -0,0 +1,265 @@
|
|||||||
|
'use strict'
|
||||||
|
// note: can remove these deep requires when we bump min version of pg to 9.x
|
||||||
|
const Result = require('pg/lib/result.js')
|
||||||
|
const prepare = require('pg/lib/utils.js').prepareValue
|
||||||
|
const EventEmitter = require('events').EventEmitter
|
||||||
|
const util = require('util')
|
||||||
|
|
||||||
|
let nextUniqueID = 1 // concept borrowed from org.postgresql.core.v3.QueryExecutorImpl
|
||||||
|
|
||||||
|
class Cursor extends EventEmitter {
|
||||||
|
constructor(text, values, config) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this._conf = config || {}
|
||||||
|
this.text = text
|
||||||
|
this.values = values ? values.map(prepare) : null
|
||||||
|
this.connection = null
|
||||||
|
this._queue = []
|
||||||
|
this.state = 'initialized'
|
||||||
|
this._result = new Result(this._conf.rowMode, this._conf.types)
|
||||||
|
this._Promise = this._conf.Promise || global.Promise
|
||||||
|
this._cb = null
|
||||||
|
this._rows = null
|
||||||
|
this._portal = null
|
||||||
|
this._ifNoData = this._ifNoData.bind(this)
|
||||||
|
this._rowDescription = this._rowDescription.bind(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
_ifNoData() {
|
||||||
|
this.state = 'idle'
|
||||||
|
this._shiftQueue()
|
||||||
|
if (this.connection) {
|
||||||
|
this.connection.removeListener('rowDescription', this._rowDescription)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_rowDescription() {
|
||||||
|
if (this.connection) {
|
||||||
|
this.connection.removeListener('noData', this._ifNoData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
submit(connection) {
|
||||||
|
this.state = 'submitted'
|
||||||
|
this.connection = connection
|
||||||
|
this._portal = 'C_' + nextUniqueID++
|
||||||
|
|
||||||
|
const con = connection
|
||||||
|
|
||||||
|
con.parse(
|
||||||
|
{
|
||||||
|
text: this.text,
|
||||||
|
},
|
||||||
|
true
|
||||||
|
)
|
||||||
|
|
||||||
|
con.bind(
|
||||||
|
{
|
||||||
|
portal: this._portal,
|
||||||
|
values: this.values,
|
||||||
|
},
|
||||||
|
true
|
||||||
|
)
|
||||||
|
|
||||||
|
con.describe(
|
||||||
|
{
|
||||||
|
type: 'P',
|
||||||
|
name: this._portal, // AWS Redshift requires a portal name
|
||||||
|
},
|
||||||
|
true
|
||||||
|
)
|
||||||
|
|
||||||
|
con.flush()
|
||||||
|
|
||||||
|
if (this._conf.types) {
|
||||||
|
this._result._getTypeParser = this._conf.types.getTypeParser
|
||||||
|
}
|
||||||
|
|
||||||
|
con.once('noData', this._ifNoData)
|
||||||
|
con.once('rowDescription', this._rowDescription)
|
||||||
|
}
|
||||||
|
|
||||||
|
_shiftQueue() {
|
||||||
|
if (this._queue.length) {
|
||||||
|
this._getRows.apply(this, this._queue.shift())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_closePortal() {
|
||||||
|
if (this.state === 'done') return
|
||||||
|
|
||||||
|
// because we opened a named portal to stream results
|
||||||
|
// we need to close the same named portal. Leaving a named portal
|
||||||
|
// open can lock tables for modification if inside a transaction.
|
||||||
|
// see https://github.com/brianc/node-pg-cursor/issues/56
|
||||||
|
this.connection.close({ type: 'P', name: this._portal })
|
||||||
|
|
||||||
|
// If we've received an error we already sent a sync message.
|
||||||
|
// do not send another sync as it triggers another readyForQuery message.
|
||||||
|
if (this.state !== 'error') {
|
||||||
|
this.connection.sync()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.state = 'done'
|
||||||
|
}
|
||||||
|
|
||||||
|
handleRowDescription(msg) {
|
||||||
|
this._result.addFields(msg.fields)
|
||||||
|
this.state = 'idle'
|
||||||
|
this._shiftQueue()
|
||||||
|
}
|
||||||
|
|
||||||
|
handleDataRow(msg) {
|
||||||
|
const row = this._result.parseRow(msg.fields)
|
||||||
|
this.emit('row', row, this._result)
|
||||||
|
this._rows.push(row)
|
||||||
|
}
|
||||||
|
|
||||||
|
_sendRows() {
|
||||||
|
this.state = 'idle'
|
||||||
|
setImmediate(() => {
|
||||||
|
const cb = this._cb
|
||||||
|
// remove callback before calling it
|
||||||
|
// because likely a new one will be added
|
||||||
|
// within the call to this callback
|
||||||
|
this._cb = null
|
||||||
|
if (cb) {
|
||||||
|
this._result.rows = this._rows
|
||||||
|
cb(null, this._rows, this._result)
|
||||||
|
}
|
||||||
|
this._rows = []
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
handleCommandComplete(msg) {
|
||||||
|
this._result.addCommandComplete(msg)
|
||||||
|
this._closePortal()
|
||||||
|
}
|
||||||
|
|
||||||
|
handlePortalSuspended() {
|
||||||
|
this._sendRows()
|
||||||
|
}
|
||||||
|
|
||||||
|
handleReadyForQuery() {
|
||||||
|
this._sendRows()
|
||||||
|
this.state = 'done'
|
||||||
|
this.emit('end', this._result)
|
||||||
|
}
|
||||||
|
|
||||||
|
handleEmptyQuery() {
|
||||||
|
this.connection.sync()
|
||||||
|
}
|
||||||
|
|
||||||
|
handleError(msg) {
|
||||||
|
// If this cursor has already closed, don't try to handle the error.
|
||||||
|
if (this.state === 'done') return
|
||||||
|
|
||||||
|
// If we're in an initialized state we've never been submitted
|
||||||
|
// and don't have a connection instance reference yet.
|
||||||
|
// This can happen if you queue a stream and close the client before
|
||||||
|
// the client has submitted the stream. In this scenario we don't have
|
||||||
|
// a connection so there's nothing to unsubscribe from.
|
||||||
|
if (this.state !== 'initialized') {
|
||||||
|
this.connection.removeListener('noData', this._ifNoData)
|
||||||
|
this.connection.removeListener('rowDescription', this._rowDescription)
|
||||||
|
// call sync to trigger a readyForQuery
|
||||||
|
this.connection.sync()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.state = 'error'
|
||||||
|
this._error = msg
|
||||||
|
// satisfy any waiting callback
|
||||||
|
if (this._cb) {
|
||||||
|
this._cb(msg)
|
||||||
|
}
|
||||||
|
// dispatch error to all waiting callbacks
|
||||||
|
for (let i = 0; i < this._queue.length; i++) {
|
||||||
|
const queuedCallback = this._queue[i][1]
|
||||||
|
queuedCallback.call(this, msg)
|
||||||
|
}
|
||||||
|
this._queue.length = 0
|
||||||
|
|
||||||
|
if (this.listenerCount('error') > 0) {
|
||||||
|
// only dispatch error events if we have a listener
|
||||||
|
this.emit('error', msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_getRows(rows, cb) {
|
||||||
|
this.state = 'busy'
|
||||||
|
this._cb = cb
|
||||||
|
this._rows = []
|
||||||
|
const msg = {
|
||||||
|
portal: this._portal,
|
||||||
|
rows: rows,
|
||||||
|
}
|
||||||
|
this.connection.execute(msg, true)
|
||||||
|
this.connection.flush()
|
||||||
|
}
|
||||||
|
|
||||||
|
// users really shouldn't be calling 'end' here and terminating a connection to postgres
|
||||||
|
// via the low level connection.end api
|
||||||
|
end(cb) {
|
||||||
|
if (this.state !== 'initialized') {
|
||||||
|
this.connection.sync()
|
||||||
|
}
|
||||||
|
this.connection.once('end', cb)
|
||||||
|
this.connection.end()
|
||||||
|
}
|
||||||
|
|
||||||
|
close(cb) {
|
||||||
|
let promise
|
||||||
|
|
||||||
|
if (!cb) {
|
||||||
|
promise = new this._Promise((resolve, reject) => {
|
||||||
|
cb = (err) => (err ? reject(err) : resolve())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.connection || this.state === 'done') {
|
||||||
|
setImmediate(cb)
|
||||||
|
return promise
|
||||||
|
}
|
||||||
|
|
||||||
|
this._closePortal()
|
||||||
|
this.connection.once('readyForQuery', function () {
|
||||||
|
cb()
|
||||||
|
})
|
||||||
|
|
||||||
|
// Return the promise (or undefined)
|
||||||
|
return promise
|
||||||
|
}
|
||||||
|
|
||||||
|
read(rows, cb) {
|
||||||
|
let promise
|
||||||
|
|
||||||
|
if (!cb) {
|
||||||
|
promise = new this._Promise((resolve, reject) => {
|
||||||
|
cb = (err, rows) => (err ? reject(err) : resolve(rows))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.state === 'idle' || this.state === 'submitted') {
|
||||||
|
this._getRows(rows, cb)
|
||||||
|
} else if (this.state === 'busy' || this.state === 'initialized') {
|
||||||
|
this._queue.push([rows, cb])
|
||||||
|
} else if (this.state === 'error') {
|
||||||
|
setImmediate(() => cb(this._error))
|
||||||
|
} else if (this.state === 'done') {
|
||||||
|
setImmediate(() => cb(null, []))
|
||||||
|
} else {
|
||||||
|
throw new Error('Unknown state: ' + this.state)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the promise (or undefined)
|
||||||
|
return promise
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Cursor.prototype.end = util.deprecate(
|
||||||
|
Cursor.prototype.end,
|
||||||
|
'Cursor.end is deprecated. Call end on the client itself to end a connection to the database.'
|
||||||
|
)
|
||||||
|
|
||||||
|
module.exports = Cursor
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user