mirror of
https://github.com/typeorm/typeorm.git
synced 2025-12-08 21:26:23 +00:00
fix: add support for mongodb v4 (#9450)
* fix: add support for mongodb driver v4 This new fix allow support to mongodb driver v4 Closes: #7907 * fix: add support for mongodb driver v4 This new fix allow support to mongodb driver v4 Closes: #7907 --------- Co-authored-by: Matheus Melo Antiquera <matheusantiquera@finchsolucoes.com.br>
This commit is contained in:
parent
b3b0c118a4
commit
8150525354
@ -251,7 +251,7 @@ await timber.remove()
|
||||
|
||||
- for **MongoDB** (experimental)
|
||||
|
||||
`npm install mongodb@^3.6.0 --save`
|
||||
`npm install mongodb@^4.10.0 --save`
|
||||
|
||||
- for **NativeScript**, **react-native** and **Cordova**
|
||||
|
||||
|
||||
390
package-lock.json
generated
390
package-lock.json
generated
@ -40,7 +40,8 @@
|
||||
"@types/js-yaml": "^4.0.0",
|
||||
"@types/mkdirp": "^1.0.1",
|
||||
"@types/mocha": "^8.2.1",
|
||||
"@types/node": "^14.14.31",
|
||||
"@types/mongodb": "^4.0.7",
|
||||
"@types/node": "^18.8.3",
|
||||
"@types/rimraf": "^3.0.0",
|
||||
"@types/sha.js": "^2.4.0",
|
||||
"@types/sinon": "^9.0.10",
|
||||
@ -66,7 +67,7 @@
|
||||
"gulpclass": "^0.2.0",
|
||||
"husky": "^5.1.1",
|
||||
"mocha": "^8.3.0",
|
||||
"mongodb": "^3.6.4",
|
||||
"mongodb": "^4.10.0",
|
||||
"mssql": "^7.3.0",
|
||||
"mysql": "^2.18.1",
|
||||
"mysql2": "^2.2.5",
|
||||
@ -97,7 +98,7 @@
|
||||
"better-sqlite3": "^7.1.2 || ^8.0.0",
|
||||
"hdb-pool": "^0.1.6",
|
||||
"ioredis": "^5.0.4",
|
||||
"mongodb": "^3.6.0",
|
||||
"mongodb": "^4.10.0",
|
||||
"mssql": "^7.3.0",
|
||||
"mysql2": "^2.2.5 || ^3.0.1",
|
||||
"oracledb": "^5.1.0",
|
||||
@ -1309,6 +1310,16 @@
|
||||
"integrity": "sha512-ekGvFhFgrc2zYQoX4JeZPmVzZxw6Dtllga7iGHzfbYIYkAMUx/sAFP2GdFpLff+vdHXu5fl7WX9AT+TtqYcsyw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/mongodb": {
|
||||
"version": "4.0.7",
|
||||
"resolved": "https://registry.npmjs.org/@types/mongodb/-/mongodb-4.0.7.tgz",
|
||||
"integrity": "sha512-lPUYPpzA43baXqnd36cZ9xxorprybxXDzteVKCPAdp14ppHtFJHnXYvNpmBvtMUTb5fKXVv6sVbzo1LHkWhJlw==",
|
||||
"deprecated": "mongodb provides its own types. @types/mongodb is no longer needed.",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"mongodb": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/ms": {
|
||||
"version": "0.7.31",
|
||||
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz",
|
||||
@ -1316,9 +1327,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "14.17.20",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.17.20.tgz",
|
||||
"integrity": "sha512-gI5Sl30tmhXsqkNvopFydP7ASc4c2cLfGNQrVKN3X90ADFWFsPEsotm/8JHSUJQKTHbwowAHtcJPeyVhtKv0TQ==",
|
||||
"version": "18.8.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.8.3.tgz",
|
||||
"integrity": "sha512-0os9vz6BpGwxGe9LOhgP/ncvYN5Tx1fNcd2TM3rD/aCGBkysb+ZWpXEocG24h6ZzOi13+VB8HndAQFezsSOw1w==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/node-fetch": {
|
||||
@ -1456,6 +1467,22 @@
|
||||
"@types/vinyl": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/webidl-conversions": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
|
||||
"integrity": "sha512-xTE1E+YF4aWPJJeUzaZI5DRntlkY3+BCVJi0axFptnjGmAoWxkyREIh/XMrfxVLejwQxMCfDXdICo0VLxThrog==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/whatwg-url": {
|
||||
"version": "8.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-8.2.2.tgz",
|
||||
"integrity": "sha512-FtQu10RWgn3D9U4aazdwIE2yzphmTJREDqNdODHrbrZmmMqI0vMheC/6NE/J1Yveaj8H+ela+YwWTjq5PGmuhA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"@types/webidl-conversions": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/xml2js": {
|
||||
"version": "0.4.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.9.tgz",
|
||||
@ -2450,16 +2477,6 @@
|
||||
"file-uri-to-path": "1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bl": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz",
|
||||
"integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"readable-stream": "^2.3.5",
|
||||
"safe-buffer": "^5.1.1"
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
@ -2497,12 +2514,39 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/bson": {
|
||||
"version": "1.1.6",
|
||||
"resolved": "https://registry.npmjs.org/bson/-/bson-1.1.6.tgz",
|
||||
"integrity": "sha512-EvVNVeGo4tHxwi8L6bPj3y3itEvStdwvvlojVxxbyYfoaxJ6keLgrTuKdyfEAszFK+H3olzBuafE0yoh0D1gdg==",
|
||||
"version": "4.7.0",
|
||||
"resolved": "https://registry.npmjs.org/bson/-/bson-4.7.0.tgz",
|
||||
"integrity": "sha512-VrlEE4vuiO1WTpfof4VmaVolCVYkYTgB9iWgYNOrVlnifpME/06fhFRmONgBhClD5pFC1t9ZWqFUQEQAzY43bA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"buffer": "^5.6.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6.19"
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bson/node_modules/buffer": {
|
||||
"version": "5.7.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
|
||||
"integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"base64-js": "^1.3.1",
|
||||
"ieee754": "^1.1.13"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer": {
|
||||
@ -5467,6 +5511,12 @@
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/gulp-replace/node_modules/@types/node": {
|
||||
"version": "14.18.31",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.31.tgz",
|
||||
"integrity": "sha512-vQAnaReSQkEDa8uwAyQby8bYGKu84R/deEc6mg5T8fX6gzCn8QW6rziSgsti1fNvsrswKUKPnVTi7uoB+u62Mw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/gulp-shell": {
|
||||
"version": "0.8.0",
|
||||
"resolved": "https://registry.npmjs.org/gulp-shell/-/gulp-shell-0.8.0.tgz",
|
||||
@ -5959,11 +6009,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/ip": {
|
||||
"version": "1.1.8",
|
||||
"resolved": "https://registry.npmjs.org/ip/-/ip-1.1.8.tgz",
|
||||
"integrity": "sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz",
|
||||
"integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/ip-regex": {
|
||||
"version": "2.1.0",
|
||||
@ -8021,42 +8070,74 @@
|
||||
}
|
||||
},
|
||||
"node_modules/mongodb": {
|
||||
"version": "3.7.1",
|
||||
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.7.1.tgz",
|
||||
"integrity": "sha512-iSVgexYr8ID0ieeNFUbRfQeOZxOchRck6kEDVySQRaa8VIw/1Pm+/LgcpZcl/BWV6nT0L8lP9qyl7dRPJ6mnLw==",
|
||||
"version": "4.10.0",
|
||||
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-4.10.0.tgz",
|
||||
"integrity": "sha512-My2QxLTw0Cc1O9gih0mz4mqo145Jq4rLAQx0Glk/Ha9iYBzYpt4I2QFNRIh35uNFNfe8KFQcdwY1/HKxXBkinw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"bl": "^2.2.1",
|
||||
"bson": "^1.1.4",
|
||||
"denque": "^1.4.1",
|
||||
"optional-require": "^1.0.3",
|
||||
"safe-buffer": "^5.1.2"
|
||||
"bson": "^4.7.0",
|
||||
"denque": "^2.1.0",
|
||||
"mongodb-connection-string-url": "^2.5.3",
|
||||
"socks": "^2.7.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
"node": ">=12.9.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"saslprep": "^1.0.0"
|
||||
"saslprep": "^1.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/mongodb-connection-string-url": {
|
||||
"version": "2.5.4",
|
||||
"resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-2.5.4.tgz",
|
||||
"integrity": "sha512-SeAxuWs0ez3iI3vvmLk/j2y+zHwigTDKQhtdxTgt5ZCOQQS5+HW4g45/Xw5vzzbn7oQXCNQ24Z40AkJsizEy7w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/whatwg-url": "^8.2.1",
|
||||
"whatwg-url": "^11.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/mongodb-connection-string-url/node_modules/tr46": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz",
|
||||
"integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"punycode": "^2.1.1"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"aws4": {
|
||||
"optional": true
|
||||
},
|
||||
"bson-ext": {
|
||||
"optional": true
|
||||
},
|
||||
"kerberos": {
|
||||
"optional": true
|
||||
},
|
||||
"mongodb-client-encryption": {
|
||||
"optional": true
|
||||
},
|
||||
"mongodb-extjson": {
|
||||
"optional": true
|
||||
},
|
||||
"snappy": {
|
||||
"optional": true
|
||||
}
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/mongodb-connection-string-url/node_modules/webidl-conversions": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
|
||||
"integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/mongodb-connection-string-url/node_modules/whatwg-url": {
|
||||
"version": "11.0.0",
|
||||
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz",
|
||||
"integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"tr46": "^3.0.0",
|
||||
"webidl-conversions": "^7.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/mongodb/node_modules/denque": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
|
||||
"integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
@ -8807,18 +8888,6 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/optional-require": {
|
||||
"version": "1.1.8",
|
||||
"resolved": "https://registry.npmjs.org/optional-require/-/optional-require-1.1.8.tgz",
|
||||
"integrity": "sha512-jq83qaUb0wNg9Krv1c5OQ+58EK+vHde6aBPzLvPPqJm89UQWsvSuFy9X/OSNJnFeSOKo7btE0n8Nl2+nE+z5nA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"require-at": "^1.0.6"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/oracledb": {
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/oracledb/-/oracledb-5.2.0.tgz",
|
||||
@ -10120,15 +10189,6 @@
|
||||
"node": ">=0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/require-at": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/require-at/-/require-at-1.0.6.tgz",
|
||||
"integrity": "sha512-7i1auJbMUrXEAZCOQ0VNJgmcT2VOKPRl2YGJwgpHpC9CE91Mv4/4UYIUm4chGJaI381ZDq1JUicFii64Hapd8g==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/require-directory": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
|
||||
@ -10503,7 +10563,6 @@
|
||||
"resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz",
|
||||
"integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"engines": {
|
||||
"node": ">= 6.0.0",
|
||||
"npm": ">= 3.0.0"
|
||||
@ -10663,13 +10722,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/socks": {
|
||||
"version": "2.6.2",
|
||||
"resolved": "https://registry.npmjs.org/socks/-/socks-2.6.2.tgz",
|
||||
"integrity": "sha512-zDZhHhZRY9PxRruRMR7kMhnf3I8hDs4S3f9RecfnGxvcBHQcKcIH/oUcEWffsfl1XxdYlA7nnlGbbTvPz9D8gA==",
|
||||
"version": "2.7.1",
|
||||
"resolved": "https://registry.npmjs.org/socks/-/socks-2.7.1.tgz",
|
||||
"integrity": "sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"ip": "^1.1.5",
|
||||
"ip": "^2.0.0",
|
||||
"smart-buffer": "^4.2.0"
|
||||
},
|
||||
"engines": {
|
||||
@ -13300,6 +13358,15 @@
|
||||
"integrity": "sha512-ekGvFhFgrc2zYQoX4JeZPmVzZxw6Dtllga7iGHzfbYIYkAMUx/sAFP2GdFpLff+vdHXu5fl7WX9AT+TtqYcsyw==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/mongodb": {
|
||||
"version": "4.0.7",
|
||||
"resolved": "https://registry.npmjs.org/@types/mongodb/-/mongodb-4.0.7.tgz",
|
||||
"integrity": "sha512-lPUYPpzA43baXqnd36cZ9xxorprybxXDzteVKCPAdp14ppHtFJHnXYvNpmBvtMUTb5fKXVv6sVbzo1LHkWhJlw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"mongodb": "*"
|
||||
}
|
||||
},
|
||||
"@types/ms": {
|
||||
"version": "0.7.31",
|
||||
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz",
|
||||
@ -13307,9 +13374,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"@types/node": {
|
||||
"version": "14.17.20",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.17.20.tgz",
|
||||
"integrity": "sha512-gI5Sl30tmhXsqkNvopFydP7ASc4c2cLfGNQrVKN3X90ADFWFsPEsotm/8JHSUJQKTHbwowAHtcJPeyVhtKv0TQ==",
|
||||
"version": "18.8.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.8.3.tgz",
|
||||
"integrity": "sha512-0os9vz6BpGwxGe9LOhgP/ncvYN5Tx1fNcd2TM3rD/aCGBkysb+ZWpXEocG24h6ZzOi13+VB8HndAQFezsSOw1w==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/node-fetch": {
|
||||
@ -13446,6 +13513,22 @@
|
||||
"@types/vinyl": "*"
|
||||
}
|
||||
},
|
||||
"@types/webidl-conversions": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
|
||||
"integrity": "sha512-xTE1E+YF4aWPJJeUzaZI5DRntlkY3+BCVJi0axFptnjGmAoWxkyREIh/XMrfxVLejwQxMCfDXdICo0VLxThrog==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/whatwg-url": {
|
||||
"version": "8.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-8.2.2.tgz",
|
||||
"integrity": "sha512-FtQu10RWgn3D9U4aazdwIE2yzphmTJREDqNdODHrbrZmmMqI0vMheC/6NE/J1Yveaj8H+ela+YwWTjq5PGmuhA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/node": "*",
|
||||
"@types/webidl-conversions": "*"
|
||||
}
|
||||
},
|
||||
"@types/xml2js": {
|
||||
"version": "0.4.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.9.tgz",
|
||||
@ -14208,16 +14291,6 @@
|
||||
"file-uri-to-path": "1.0.0"
|
||||
}
|
||||
},
|
||||
"bl": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz",
|
||||
"integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"readable-stream": "^2.3.5",
|
||||
"safe-buffer": "^5.1.1"
|
||||
}
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
@ -14252,10 +14325,25 @@
|
||||
"dev": true
|
||||
},
|
||||
"bson": {
|
||||
"version": "1.1.6",
|
||||
"resolved": "https://registry.npmjs.org/bson/-/bson-1.1.6.tgz",
|
||||
"integrity": "sha512-EvVNVeGo4tHxwi8L6bPj3y3itEvStdwvvlojVxxbyYfoaxJ6keLgrTuKdyfEAszFK+H3olzBuafE0yoh0D1gdg==",
|
||||
"dev": true
|
||||
"version": "4.7.0",
|
||||
"resolved": "https://registry.npmjs.org/bson/-/bson-4.7.0.tgz",
|
||||
"integrity": "sha512-VrlEE4vuiO1WTpfof4VmaVolCVYkYTgB9iWgYNOrVlnifpME/06fhFRmONgBhClD5pFC1t9ZWqFUQEQAzY43bA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"buffer": "^5.6.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"buffer": {
|
||||
"version": "5.7.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
|
||||
"integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"base64-js": "^1.3.1",
|
||||
"ieee754": "^1.1.13"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"buffer": {
|
||||
"version": "6.0.3",
|
||||
@ -16604,6 +16692,14 @@
|
||||
"istextorbinary": "^3.0.0",
|
||||
"replacestream": "^4.0.3",
|
||||
"yargs-parser": ">=5.0.0-security.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/node": {
|
||||
"version": "14.18.31",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.31.tgz",
|
||||
"integrity": "sha512-vQAnaReSQkEDa8uwAyQby8bYGKu84R/deEc6mg5T8fX6gzCn8QW6rziSgsti1fNvsrswKUKPnVTi7uoB+u62Mw==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"gulp-shell": {
|
||||
@ -16975,11 +17071,10 @@
|
||||
"dev": true
|
||||
},
|
||||
"ip": {
|
||||
"version": "1.1.8",
|
||||
"resolved": "https://registry.npmjs.org/ip/-/ip-1.1.8.tgz",
|
||||
"integrity": "sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz",
|
||||
"integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==",
|
||||
"dev": true
|
||||
},
|
||||
"ip-regex": {
|
||||
"version": "2.1.0",
|
||||
@ -18600,17 +18695,61 @@
|
||||
"dev": true
|
||||
},
|
||||
"mongodb": {
|
||||
"version": "3.7.1",
|
||||
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.7.1.tgz",
|
||||
"integrity": "sha512-iSVgexYr8ID0ieeNFUbRfQeOZxOchRck6kEDVySQRaa8VIw/1Pm+/LgcpZcl/BWV6nT0L8lP9qyl7dRPJ6mnLw==",
|
||||
"version": "4.10.0",
|
||||
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-4.10.0.tgz",
|
||||
"integrity": "sha512-My2QxLTw0Cc1O9gih0mz4mqo145Jq4rLAQx0Glk/Ha9iYBzYpt4I2QFNRIh35uNFNfe8KFQcdwY1/HKxXBkinw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"bl": "^2.2.1",
|
||||
"bson": "^1.1.4",
|
||||
"denque": "^1.4.1",
|
||||
"optional-require": "^1.0.3",
|
||||
"safe-buffer": "^5.1.2",
|
||||
"saslprep": "^1.0.0"
|
||||
"bson": "^4.7.0",
|
||||
"denque": "^2.1.0",
|
||||
"mongodb-connection-string-url": "^2.5.3",
|
||||
"saslprep": "^1.0.3",
|
||||
"socks": "^2.7.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"denque": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
|
||||
"integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"mongodb-connection-string-url": {
|
||||
"version": "2.5.4",
|
||||
"resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-2.5.4.tgz",
|
||||
"integrity": "sha512-SeAxuWs0ez3iI3vvmLk/j2y+zHwigTDKQhtdxTgt5ZCOQQS5+HW4g45/Xw5vzzbn7oQXCNQ24Z40AkJsizEy7w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/whatwg-url": "^8.2.1",
|
||||
"whatwg-url": "^11.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"tr46": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz",
|
||||
"integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"punycode": "^2.1.1"
|
||||
}
|
||||
},
|
||||
"webidl-conversions": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
|
||||
"integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
|
||||
"dev": true
|
||||
},
|
||||
"whatwg-url": {
|
||||
"version": "11.0.0",
|
||||
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz",
|
||||
"integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"tr46": "^3.0.0",
|
||||
"webidl-conversions": "^7.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"ms": {
|
||||
@ -19211,15 +19350,6 @@
|
||||
"is-wsl": "^2.1.1"
|
||||
}
|
||||
},
|
||||
"optional-require": {
|
||||
"version": "1.1.8",
|
||||
"resolved": "https://registry.npmjs.org/optional-require/-/optional-require-1.1.8.tgz",
|
||||
"integrity": "sha512-jq83qaUb0wNg9Krv1c5OQ+58EK+vHde6aBPzLvPPqJm89UQWsvSuFy9X/OSNJnFeSOKo7btE0n8Nl2+nE+z5nA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"require-at": "^1.0.6"
|
||||
}
|
||||
},
|
||||
"oracledb": {
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/oracledb/-/oracledb-5.2.0.tgz",
|
||||
@ -20233,12 +20363,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"require-at": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/require-at/-/require-at-1.0.6.tgz",
|
||||
"integrity": "sha512-7i1auJbMUrXEAZCOQ0VNJgmcT2VOKPRl2YGJwgpHpC9CE91Mv4/4UYIUm4chGJaI381ZDq1JUicFii64Hapd8g==",
|
||||
"dev": true
|
||||
},
|
||||
"require-directory": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
|
||||
@ -20519,8 +20643,7 @@
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz",
|
||||
"integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"snapdragon": {
|
||||
"version": "0.8.2",
|
||||
@ -20652,13 +20775,12 @@
|
||||
}
|
||||
},
|
||||
"socks": {
|
||||
"version": "2.6.2",
|
||||
"resolved": "https://registry.npmjs.org/socks/-/socks-2.6.2.tgz",
|
||||
"integrity": "sha512-zDZhHhZRY9PxRruRMR7kMhnf3I8hDs4S3f9RecfnGxvcBHQcKcIH/oUcEWffsfl1XxdYlA7nnlGbbTvPz9D8gA==",
|
||||
"version": "2.7.1",
|
||||
"resolved": "https://registry.npmjs.org/socks/-/socks-2.7.1.tgz",
|
||||
"integrity": "sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"ip": "^1.1.5",
|
||||
"ip": "^2.0.0",
|
||||
"smart-buffer": "^4.2.0"
|
||||
}
|
||||
},
|
||||
|
||||
12
package.json
12
package.json
@ -98,7 +98,7 @@
|
||||
"@types/js-yaml": "^4.0.0",
|
||||
"@types/mkdirp": "^1.0.1",
|
||||
"@types/mocha": "^8.2.1",
|
||||
"@types/node": "^14.14.31",
|
||||
"@types/node": "^18.8.3",
|
||||
"@types/rimraf": "^3.0.0",
|
||||
"@types/sha.js": "^2.4.0",
|
||||
"@types/sinon": "^9.0.10",
|
||||
@ -124,7 +124,7 @@
|
||||
"gulpclass": "^0.2.0",
|
||||
"husky": "^5.1.1",
|
||||
"mocha": "^8.3.0",
|
||||
"mongodb": "^3.6.4",
|
||||
"mongodb": "^4.10.0",
|
||||
"mssql": "^7.3.0",
|
||||
"mysql": "^2.18.1",
|
||||
"mysql2": "^2.2.5",
|
||||
@ -149,7 +149,7 @@
|
||||
"better-sqlite3": "^7.1.2 || ^8.0.0",
|
||||
"hdb-pool": "^0.1.6",
|
||||
"ioredis": "^5.0.4",
|
||||
"mongodb": "^3.6.0",
|
||||
"mongodb": "^4.10.0",
|
||||
"mssql": "^7.3.0",
|
||||
"mysql2": "^2.2.5 || ^3.0.1",
|
||||
"oracledb": "^5.1.0",
|
||||
@ -235,14 +235,14 @@
|
||||
"yargs": "^17.3.1"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "rimraf ./build && tsc && mocha --file ./build/compiled/test/utils/test-setup.js --bail --recursive --timeout 60000 ./build/compiled/test",
|
||||
"test-fast": "mocha --file ./build/compiled/test/utils/test-setup.js --bail --recursive --timeout 60000 ./build/compiled/test",
|
||||
"test": "rimraf ./build && tsc && mocha --file ./build/compiled/test/utils/test-setup.js --bail --recursive --timeout 90000 ./build/compiled/test",
|
||||
"test-fast": "mocha --file ./build/compiled/test/utils/test-setup.js --bail --recursive --timeout 90000 ./build/compiled/test",
|
||||
"compile": "rimraf ./build && tsc",
|
||||
"watch": "./node_modules/.bin/tsc -w",
|
||||
"package": "gulp package",
|
||||
"pack": "gulp pack",
|
||||
"lint": "prettier --check \"./src/**/*.ts\" \"./test/**/*.ts\" \"./sample/**/*.ts\"",
|
||||
"format": "prettier --write \"./src/**/*.ts\" \"./test/**/*.ts\" \"./sample/**/*.ts\"",
|
||||
"format": "prettier --write --end-of-line auto \"./src/**/*.ts\" \"./test/**/*.ts\" \"./sample/**/*.ts\"",
|
||||
"changelog": "conventional-changelog -p angular -i CHANGELOG.md -s -r 2"
|
||||
},
|
||||
"bin": {
|
||||
|
||||
@ -10,6 +10,7 @@ import {
|
||||
CannotExecuteNotConnectedError,
|
||||
EntityMetadataNotFoundError,
|
||||
QueryRunnerProviderAlreadyReleasedError,
|
||||
TypeORMError,
|
||||
} from "../error"
|
||||
import { TreeRepository } from "../repository/TreeRepository"
|
||||
import { NamingStrategyInterface } from "../naming-strategy/NamingStrategyInterface"
|
||||
@ -35,7 +36,6 @@ import { RelationLoader } from "../query-builder/RelationLoader"
|
||||
import { ObjectUtils } from "../util/ObjectUtils"
|
||||
import { IsolationLevel } from "../driver/types/IsolationLevel"
|
||||
import { ReplicationMode } from "../driver/types/ReplicationMode"
|
||||
import { TypeORMError } from "../error"
|
||||
import { RelationIdLoader } from "../query-builder/RelationIdLoader"
|
||||
import { DriverUtils } from "../driver/DriverUtils"
|
||||
import { InstanceChecker } from "../util/InstanceChecker"
|
||||
|
||||
@ -1,44 +1,10 @@
|
||||
import { QueryRunner } from "../../query-runner/QueryRunner"
|
||||
import { ObjectLiteral } from "../../common/ObjectLiteral"
|
||||
import { TableColumn } from "../../schema-builder/table/TableColumn"
|
||||
import { Table } from "../../schema-builder/table/Table"
|
||||
import { TableForeignKey } from "../../schema-builder/table/TableForeignKey"
|
||||
import { TableIndex } from "../../schema-builder/table/TableIndex"
|
||||
import { View } from "../../schema-builder/view/View"
|
||||
import {
|
||||
AggregationCursor,
|
||||
BulkWriteOpResultObject,
|
||||
ChangeStream,
|
||||
ChangeStreamOptions,
|
||||
Code,
|
||||
Collection,
|
||||
CollectionAggregationOptions,
|
||||
CollectionBulkWriteOptions,
|
||||
CollectionInsertManyOptions,
|
||||
CollectionInsertOneOptions,
|
||||
CollectionOptions,
|
||||
CollStats,
|
||||
CommandCursor,
|
||||
Cursor,
|
||||
DeleteWriteOpResultObject,
|
||||
FindAndModifyWriteOpResultObject,
|
||||
FindOneAndReplaceOption,
|
||||
GeoHaystackSearchOptions,
|
||||
GeoNearOptions,
|
||||
InsertOneWriteOpResult,
|
||||
InsertWriteOpResult,
|
||||
MapReduceOptions,
|
||||
MongoClient,
|
||||
MongoCountPreferences,
|
||||
MongodbIndexOptions,
|
||||
OrderedBulkOperation,
|
||||
ParallelCollectionScanOptions,
|
||||
ReadPreference,
|
||||
ReplaceOneOptions,
|
||||
UnorderedBulkOperation,
|
||||
UpdateWriteOpResult,
|
||||
} from "./typings"
|
||||
import { DataSource } from "../../data-source/DataSource"
|
||||
// import {Connection} from "../../connection/Connection";
|
||||
import { ReadStream } from "../../platform/PlatformTools"
|
||||
import { MongoEntityManager } from "../../entity-manager/MongoEntityManager"
|
||||
import { SqlInMemory } from "../SqlInMemory"
|
||||
@ -47,6 +13,53 @@ import { Broadcaster } from "../../subscriber/Broadcaster"
|
||||
import { TableCheck } from "../../schema-builder/table/TableCheck"
|
||||
import { TableExclusion } from "../../schema-builder/table/TableExclusion"
|
||||
import { TypeORMError } from "../../error"
|
||||
|
||||
import {
|
||||
BulkWriteResult,
|
||||
AggregationCursor,
|
||||
MongoClient,
|
||||
Collection,
|
||||
FindCursor,
|
||||
Document,
|
||||
AggregateOptions,
|
||||
AnyBulkWriteOperation,
|
||||
BulkWriteOptions,
|
||||
Filter,
|
||||
CountOptions,
|
||||
CountDocumentsOptions,
|
||||
IndexSpecification,
|
||||
CreateIndexesOptions,
|
||||
IndexDescription,
|
||||
DeleteResult,
|
||||
DeleteOptions,
|
||||
CommandOperationOptions,
|
||||
FindOneAndDeleteOptions,
|
||||
ModifyResult,
|
||||
FindOneAndReplaceOptions,
|
||||
UpdateFilter,
|
||||
FindOneAndUpdateOptions,
|
||||
RenameOptions,
|
||||
ReplaceOptions,
|
||||
UpdateResult,
|
||||
CollStats,
|
||||
CollStatsOptions,
|
||||
ChangeStreamOptions,
|
||||
ChangeStream,
|
||||
UpdateOptions,
|
||||
ListIndexesOptions,
|
||||
ListIndexesCursor,
|
||||
OptionalId,
|
||||
InsertOneOptions,
|
||||
InsertOneResult,
|
||||
InsertManyResult,
|
||||
MapFunction,
|
||||
ReduceFunction,
|
||||
MapReduceOptions,
|
||||
UnorderedBulkOperation,
|
||||
OrderedBulkOperation,
|
||||
IndexInformationOptions,
|
||||
} from "mongodb"
|
||||
import { DataSource } from "../../data-source/DataSource"
|
||||
import { ReplicationMode } from "../types/ReplicationMode"
|
||||
|
||||
/**
|
||||
@ -137,8 +150,8 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
/**
|
||||
* Creates a cursor for a query that can be used to iterate over results from MongoDB.
|
||||
*/
|
||||
cursor(collectionName: string, query?: ObjectLiteral): Cursor<any> {
|
||||
return this.getCollection(collectionName).find(query || {})
|
||||
cursor(collectionName: string, filter: Filter<Document>): FindCursor<any> {
|
||||
return this.getCollection(collectionName).find(filter || {})
|
||||
}
|
||||
|
||||
/**
|
||||
@ -146,10 +159,13 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
aggregate(
|
||||
collectionName: string,
|
||||
pipeline: ObjectLiteral[],
|
||||
options?: CollectionAggregationOptions,
|
||||
pipeline: Document[],
|
||||
options?: AggregateOptions,
|
||||
): AggregationCursor<any> {
|
||||
return this.getCollection(collectionName).aggregate(pipeline, options)
|
||||
return this.getCollection(collectionName).aggregate(
|
||||
pipeline,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -157,12 +173,12 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async bulkWrite(
|
||||
collectionName: string,
|
||||
operations: ObjectLiteral[],
|
||||
options?: CollectionBulkWriteOptions,
|
||||
): Promise<BulkWriteOpResultObject> {
|
||||
operations: AnyBulkWriteOperation<Document>[],
|
||||
options?: BulkWriteOptions,
|
||||
): Promise<BulkWriteResult> {
|
||||
return await this.getCollection(collectionName).bulkWrite(
|
||||
operations,
|
||||
options,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
@ -171,12 +187,26 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async count(
|
||||
collectionName: string,
|
||||
query?: ObjectLiteral,
|
||||
options?: MongoCountPreferences,
|
||||
filter: Filter<Document>,
|
||||
options?: CountOptions,
|
||||
): Promise<number> {
|
||||
return this.getCollection(collectionName).count(
|
||||
filter || {},
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Count number of matching documents in the db to a query.
|
||||
*/
|
||||
async countDocuments(
|
||||
collectionName: string,
|
||||
filter: Filter<Document>,
|
||||
options?: CountDocumentsOptions,
|
||||
): Promise<any> {
|
||||
return await this.getCollection(collectionName).countDocuments(
|
||||
query || {},
|
||||
options,
|
||||
return this.getCollection(collectionName).countDocuments(
|
||||
filter || {},
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
@ -185,12 +215,12 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async createCollectionIndex(
|
||||
collectionName: string,
|
||||
fieldOrSpec: string | any,
|
||||
options?: MongodbIndexOptions,
|
||||
indexSpec: IndexSpecification,
|
||||
options?: CreateIndexesOptions,
|
||||
): Promise<string> {
|
||||
return await this.getCollection(collectionName).createIndex(
|
||||
fieldOrSpec,
|
||||
options,
|
||||
return this.getCollection(collectionName).createIndex(
|
||||
indexSpec,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
@ -200,11 +230,9 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async createCollectionIndexes(
|
||||
collectionName: string,
|
||||
indexSpecs: ObjectLiteral[],
|
||||
): Promise<void> {
|
||||
return await this.getCollection(collectionName).createIndexes(
|
||||
indexSpecs,
|
||||
)
|
||||
indexSpecs: IndexDescription[],
|
||||
): Promise<string[]> {
|
||||
return this.getCollection(collectionName).createIndexes(indexSpecs)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -212,12 +240,12 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async deleteMany(
|
||||
collectionName: string,
|
||||
query: ObjectLiteral,
|
||||
options?: CollectionOptions,
|
||||
): Promise<DeleteWriteOpResultObject> {
|
||||
return await this.getCollection(collectionName).deleteMany(
|
||||
query,
|
||||
options,
|
||||
filter: Filter<Document>,
|
||||
options: DeleteOptions,
|
||||
): Promise<DeleteResult> {
|
||||
return this.getCollection(collectionName).deleteMany(
|
||||
filter,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
@ -226,12 +254,12 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async deleteOne(
|
||||
collectionName: string,
|
||||
query: ObjectLiteral,
|
||||
options?: CollectionOptions,
|
||||
): Promise<DeleteWriteOpResultObject> {
|
||||
return await this.getCollection(collectionName).deleteOne(
|
||||
query,
|
||||
options,
|
||||
filter: Filter<Document>,
|
||||
options?: DeleteOptions,
|
||||
): Promise<DeleteResult> {
|
||||
return this.getCollection(collectionName).deleteOne(
|
||||
filter,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
@ -240,14 +268,14 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async distinct(
|
||||
collectionName: string,
|
||||
key: string,
|
||||
query: ObjectLiteral,
|
||||
options?: { readPreference?: ReadPreference | string },
|
||||
key: any,
|
||||
filter: Filter<Document>,
|
||||
options?: CommandOperationOptions,
|
||||
): Promise<any> {
|
||||
return await this.getCollection(collectionName).distinct(
|
||||
return this.getCollection(collectionName).distinct(
|
||||
key,
|
||||
query,
|
||||
options,
|
||||
filter,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
@ -257,19 +285,19 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
async dropCollectionIndex(
|
||||
collectionName: string,
|
||||
indexName: string,
|
||||
options?: CollectionOptions,
|
||||
): Promise<any> {
|
||||
return await this.getCollection(collectionName).dropIndex(
|
||||
options?: CommandOperationOptions,
|
||||
): Promise<Document> {
|
||||
return this.getCollection(collectionName).dropIndex(
|
||||
indexName,
|
||||
options,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Drops all indexes from the collection.
|
||||
*/
|
||||
async dropCollectionIndexes(collectionName: string): Promise<any> {
|
||||
return await this.getCollection(collectionName).dropIndexes()
|
||||
async dropCollectionIndexes(collectionName: string): Promise<Document> {
|
||||
return this.getCollection(collectionName).dropIndexes()
|
||||
}
|
||||
|
||||
/**
|
||||
@ -277,12 +305,12 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async findOneAndDelete(
|
||||
collectionName: string,
|
||||
query: ObjectLiteral,
|
||||
options?: { projection?: Object; sort?: Object; maxTimeMS?: number },
|
||||
): Promise<FindAndModifyWriteOpResultObject> {
|
||||
return await this.getCollection(collectionName).findOneAndDelete(
|
||||
query,
|
||||
options,
|
||||
filter: Filter<Document>,
|
||||
options?: FindOneAndDeleteOptions,
|
||||
): Promise<ModifyResult<Document>> {
|
||||
return this.getCollection(collectionName).findOneAndDelete(
|
||||
filter,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
@ -291,14 +319,14 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async findOneAndReplace(
|
||||
collectionName: string,
|
||||
query: ObjectLiteral,
|
||||
replacement: Object,
|
||||
options?: FindOneAndReplaceOption,
|
||||
): Promise<FindAndModifyWriteOpResultObject> {
|
||||
return await this.getCollection(collectionName).findOneAndReplace(
|
||||
query,
|
||||
filter: Filter<Document>,
|
||||
replacement: Document,
|
||||
options?: FindOneAndReplaceOptions,
|
||||
): Promise<ModifyResult<Document>> {
|
||||
return this.getCollection(collectionName).findOneAndReplace(
|
||||
filter,
|
||||
replacement,
|
||||
options,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
@ -307,74 +335,22 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async findOneAndUpdate(
|
||||
collectionName: string,
|
||||
query: ObjectLiteral,
|
||||
update: Object,
|
||||
options?: FindOneAndReplaceOption,
|
||||
): Promise<FindAndModifyWriteOpResultObject> {
|
||||
return await this.getCollection(collectionName).findOneAndUpdate(
|
||||
query,
|
||||
filter: Filter<Document>,
|
||||
update: UpdateFilter<Document>,
|
||||
options?: FindOneAndUpdateOptions,
|
||||
): Promise<ModifyResult<Document>> {
|
||||
return this.getCollection(collectionName).findOneAndUpdate(
|
||||
filter,
|
||||
update,
|
||||
options,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a geo search using a geo haystack index on a collection.
|
||||
*/
|
||||
async geoHaystackSearch(
|
||||
collectionName: string,
|
||||
x: number,
|
||||
y: number,
|
||||
options?: GeoHaystackSearchOptions,
|
||||
): Promise<any> {
|
||||
return await this.getCollection(collectionName).geoHaystackSearch(
|
||||
x,
|
||||
y,
|
||||
options,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the geoNear command to search for items in the collection.
|
||||
*/
|
||||
async geoNear(
|
||||
collectionName: string,
|
||||
x: number,
|
||||
y: number,
|
||||
options?: GeoNearOptions,
|
||||
): Promise<any> {
|
||||
return await this.getCollection(collectionName).geoNear(x, y, options)
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a group command across a collection.
|
||||
*/
|
||||
async group(
|
||||
collectionName: string,
|
||||
keys: Object | Array<any> | Function | Code,
|
||||
condition: Object,
|
||||
initial: Object,
|
||||
reduce: Function | Code,
|
||||
finalize: Function | Code,
|
||||
command: boolean,
|
||||
options?: { readPreference?: ReadPreference | string },
|
||||
): Promise<any> {
|
||||
return await this.getCollection(collectionName).group(
|
||||
keys,
|
||||
condition,
|
||||
initial,
|
||||
reduce,
|
||||
finalize,
|
||||
command,
|
||||
options,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve all the indexes on the collection.
|
||||
*/
|
||||
async collectionIndexes(collectionName: string): Promise<any> {
|
||||
return await this.getCollection(collectionName).indexes()
|
||||
async collectionIndexes(collectionName: string): Promise<Document> {
|
||||
return this.getCollection(collectionName).indexes()
|
||||
}
|
||||
|
||||
/**
|
||||
@ -384,7 +360,7 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
collectionName: string,
|
||||
indexes: string | string[],
|
||||
): Promise<boolean> {
|
||||
return await this.getCollection(collectionName).indexExists(indexes)
|
||||
return this.getCollection(collectionName).indexExists(indexes)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -392,10 +368,10 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async collectionIndexInformation(
|
||||
collectionName: string,
|
||||
options?: { full: boolean },
|
||||
options?: IndexInformationOptions,
|
||||
): Promise<any> {
|
||||
return await this.getCollection(collectionName).indexInformation(
|
||||
options,
|
||||
return this.getCollection(collectionName).indexInformation(
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
@ -404,7 +380,7 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
initializeOrderedBulkOp(
|
||||
collectionName: string,
|
||||
options?: CollectionOptions,
|
||||
options?: BulkWriteOptions,
|
||||
): OrderedBulkOperation {
|
||||
return this.getCollection(collectionName).initializeOrderedBulkOp(
|
||||
options,
|
||||
@ -416,7 +392,7 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
initializeUnorderedBulkOp(
|
||||
collectionName: string,
|
||||
options?: CollectionOptions,
|
||||
options?: BulkWriteOptions,
|
||||
): UnorderedBulkOperation {
|
||||
return this.getCollection(collectionName).initializeUnorderedBulkOp(
|
||||
options,
|
||||
@ -428,12 +404,12 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async insertMany(
|
||||
collectionName: string,
|
||||
docs: ObjectLiteral[],
|
||||
options?: CollectionInsertManyOptions,
|
||||
): Promise<InsertWriteOpResult> {
|
||||
return await this.getCollection(collectionName).insertMany(
|
||||
docs: OptionalId<Document>[],
|
||||
options?: BulkWriteOptions,
|
||||
): Promise<InsertManyResult> {
|
||||
return this.getCollection(collectionName).insertMany(
|
||||
docs,
|
||||
options,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
@ -442,17 +418,17 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async insertOne(
|
||||
collectionName: string,
|
||||
doc: ObjectLiteral,
|
||||
options?: CollectionInsertOneOptions,
|
||||
): Promise<InsertOneWriteOpResult> {
|
||||
return await this.getCollection(collectionName).insertOne(doc, options)
|
||||
doc: OptionalId<Document>,
|
||||
options?: InsertOneOptions,
|
||||
): Promise<InsertOneResult> {
|
||||
return this.getCollection(collectionName).insertOne(doc, options || {})
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns if the collection is a capped collection.
|
||||
*/
|
||||
async isCapped(collectionName: string): Promise<any> {
|
||||
return await this.getCollection(collectionName).isCapped()
|
||||
async isCapped(collectionName: string): Promise<boolean> {
|
||||
return this.getCollection(collectionName).isCapped()
|
||||
}
|
||||
|
||||
/**
|
||||
@ -460,11 +436,8 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
listCollectionIndexes(
|
||||
collectionName: string,
|
||||
options?: {
|
||||
batchSize?: number
|
||||
readPreference?: ReadPreference | string
|
||||
},
|
||||
): CommandCursor {
|
||||
options?: ListIndexesOptions,
|
||||
): ListIndexesCursor {
|
||||
return this.getCollection(collectionName).listIndexes(options)
|
||||
}
|
||||
|
||||
@ -473,46 +446,26 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async mapReduce(
|
||||
collectionName: string,
|
||||
map: Function | string,
|
||||
reduce: Function | string,
|
||||
map: MapFunction,
|
||||
reduce: ReduceFunction | string,
|
||||
options?: MapReduceOptions,
|
||||
): Promise<any> {
|
||||
return await this.getCollection(collectionName).mapReduce(
|
||||
): Promise<Document | Document[]> {
|
||||
return this.getCollection(collectionName).mapReduce(
|
||||
map,
|
||||
reduce,
|
||||
options,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Return N number of parallel cursors for a collection allowing parallel reading of entire collection.
|
||||
* There are no ordering guarantees for returned results.
|
||||
*/
|
||||
async parallelCollectionScan(
|
||||
collectionName: string,
|
||||
options?: ParallelCollectionScanOptions,
|
||||
): Promise<Cursor<any>[]> {
|
||||
return await this.getCollection(collectionName).parallelCollectionScan(
|
||||
options,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Reindex all indexes on the collection Warning: reIndex is a blocking operation (indexes are rebuilt in the foreground) and will be slow for large collections.
|
||||
*/
|
||||
async reIndex(collectionName: string): Promise<any> {
|
||||
return await this.getCollection(collectionName).reIndex()
|
||||
}
|
||||
|
||||
/**
|
||||
* Reindex all indexes on the collection Warning: reIndex is a blocking operation (indexes are rebuilt in the foreground) and will be slow for large collections.
|
||||
*/
|
||||
async rename(
|
||||
collectionName: string,
|
||||
newName: string,
|
||||
options?: { dropTarget?: boolean },
|
||||
): Promise<Collection<any>> {
|
||||
return await this.getCollection(collectionName).rename(newName, options)
|
||||
options?: RenameOptions,
|
||||
): Promise<Collection<Document>> {
|
||||
return this.getCollection(collectionName).rename(newName, options || {})
|
||||
}
|
||||
|
||||
/**
|
||||
@ -520,14 +473,14 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async replaceOne(
|
||||
collectionName: string,
|
||||
query: ObjectLiteral,
|
||||
doc: ObjectLiteral,
|
||||
options?: ReplaceOneOptions,
|
||||
): Promise<UpdateWriteOpResult> {
|
||||
return await this.getCollection(collectionName).replaceOne(
|
||||
query,
|
||||
doc,
|
||||
options,
|
||||
filter: Filter<Document>,
|
||||
replacement: Document,
|
||||
options?: ReplaceOptions,
|
||||
): Promise<Document | UpdateResult> {
|
||||
return this.getCollection(collectionName).replaceOne(
|
||||
filter,
|
||||
replacement,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
@ -536,9 +489,9 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async stats(
|
||||
collectionName: string,
|
||||
options?: { scale: number },
|
||||
options?: CollStatsOptions,
|
||||
): Promise<CollStats> {
|
||||
return await this.getCollection(collectionName).stats(options)
|
||||
return this.getCollection(collectionName).stats(options || {})
|
||||
}
|
||||
|
||||
/**
|
||||
@ -546,7 +499,7 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
watch(
|
||||
collectionName: string,
|
||||
pipeline?: Object[],
|
||||
pipeline?: Document[],
|
||||
options?: ChangeStreamOptions,
|
||||
): ChangeStream {
|
||||
return this.getCollection(collectionName).watch(pipeline, options)
|
||||
@ -557,14 +510,14 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async updateMany(
|
||||
collectionName: string,
|
||||
query: ObjectLiteral,
|
||||
update: ObjectLiteral,
|
||||
options?: { upsert?: boolean; w?: any; wtimeout?: number; j?: boolean },
|
||||
): Promise<UpdateWriteOpResult> {
|
||||
return await this.getCollection(collectionName).updateMany(
|
||||
query,
|
||||
filter: Filter<Document>,
|
||||
update: UpdateFilter<Document>,
|
||||
options?: UpdateOptions,
|
||||
): Promise<Document | UpdateResult> {
|
||||
return this.getCollection(collectionName).updateMany(
|
||||
filter,
|
||||
update,
|
||||
options,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
@ -573,14 +526,14 @@ export class MongoQueryRunner implements QueryRunner {
|
||||
*/
|
||||
async updateOne(
|
||||
collectionName: string,
|
||||
query: ObjectLiteral,
|
||||
update: ObjectLiteral,
|
||||
options?: ReplaceOneOptions,
|
||||
): Promise<UpdateWriteOpResult> {
|
||||
filter: Filter<Document>,
|
||||
update: UpdateFilter<Document>,
|
||||
options?: UpdateOptions,
|
||||
): Promise<Document | UpdateResult> {
|
||||
return await this.getCollection(collectionName).updateOne(
|
||||
query,
|
||||
filter,
|
||||
update,
|
||||
options,
|
||||
options || {},
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@ -4855,14 +4855,31 @@ export interface FindOperatorsUnordered {
|
||||
|
||||
/**
|
||||
* Add a remove operation to the bulk operation.
|
||||
*
|
||||
* @deprecated deprecated since mongodb@4.0, in new code, use delete instead
|
||||
*/
|
||||
remove(): UnorderedBulkOperation
|
||||
|
||||
/**
|
||||
* Add a remove one operation to the bulk operation.
|
||||
*
|
||||
* @deprecated deprecated since mongodb@4.0, in new code, use deleteOne instead
|
||||
*/
|
||||
removeOne(): UnorderedBulkOperation
|
||||
|
||||
/**
|
||||
* Add a delete operation to the bulk operation.
|
||||
*
|
||||
* @deprecated deprecated since mongodb@4.0, in new code, use delete instead
|
||||
*/
|
||||
delete(): UnorderedBulkOperation
|
||||
|
||||
/**
|
||||
* Add a delete one operation to the bulk operation.
|
||||
*
|
||||
*/
|
||||
deleteOne(): UnorderedBulkOperation
|
||||
|
||||
/**
|
||||
* Add a replace one operation to the bulk operation.
|
||||
* @param doc The new document to replace the existing one with.
|
||||
|
||||
@ -1314,7 +1314,7 @@ export class EntityManager {
|
||||
): Repository<Entity> {
|
||||
// find already created repository instance and return it if found
|
||||
const repository = this.repositories.find(
|
||||
(repository) => repository.target === target,
|
||||
(repo) => repo.target === target,
|
||||
)
|
||||
if (repository) return repository
|
||||
|
||||
|
||||
@ -1,62 +1,78 @@
|
||||
import { DataSource } from "../data-source/DataSource"
|
||||
import { EntityManager } from "./EntityManager"
|
||||
import { EntityTarget } from "../common/EntityTarget"
|
||||
|
||||
import {
|
||||
AggregationCursor,
|
||||
BulkWriteOpResultObject,
|
||||
ChangeStream,
|
||||
ChangeStreamOptions,
|
||||
Code,
|
||||
Collection,
|
||||
CollectionAggregationOptions,
|
||||
CollectionBulkWriteOptions,
|
||||
CollectionInsertManyOptions,
|
||||
CollectionInsertOneOptions,
|
||||
CollectionOptions,
|
||||
CollStats,
|
||||
CommandCursor,
|
||||
Cursor,
|
||||
CursorResult,
|
||||
DeleteWriteOpResultObject,
|
||||
FindAndModifyWriteOpResultObject,
|
||||
FindOneAndReplaceOption,
|
||||
GeoHaystackSearchOptions,
|
||||
GeoNearOptions,
|
||||
InsertOneWriteOpResult,
|
||||
InsertWriteOpResult,
|
||||
MapReduceOptions,
|
||||
MongoCallback,
|
||||
MongoCountPreferences,
|
||||
MongodbIndexOptions,
|
||||
MongoError,
|
||||
CursorResult,
|
||||
ObjectID,
|
||||
OrderedBulkOperation,
|
||||
ParallelCollectionScanOptions,
|
||||
ReadPreference,
|
||||
ReplaceOneOptions,
|
||||
UnorderedBulkOperation,
|
||||
UpdateWriteOpResult,
|
||||
} from "../driver/mongodb/typings"
|
||||
import { ObjectLiteral } from "../common/ObjectLiteral"
|
||||
import { MongoQueryRunner } from "../driver/mongodb/MongoQueryRunner"
|
||||
import { MongoDriver } from "../driver/mongodb/MongoDriver"
|
||||
import { DocumentToEntityTransformer } from "../query-builder/transformer/DocumentToEntityTransformer"
|
||||
import { FindManyOptions } from "../find-options/FindManyOptions"
|
||||
import { FindOptionsUtils } from "../find-options/FindOptionsUtils"
|
||||
import { PlatformTools } from "../platform/PlatformTools"
|
||||
// import { PlatformTools } from "../platform/PlatformTools";
|
||||
import { QueryDeepPartialEntity } from "../query-builder/QueryPartialEntity"
|
||||
import { InsertResult } from "../query-builder/result/InsertResult"
|
||||
import { UpdateResult } from "../query-builder/result/UpdateResult"
|
||||
import { DeleteResult } from "../query-builder/result/DeleteResult"
|
||||
import { EntityMetadata } from "../metadata/EntityMetadata"
|
||||
import { FindOptionsWhere } from "../find-options/FindOptionsWhere"
|
||||
|
||||
import {
|
||||
BulkWriteResult,
|
||||
AggregationCursor,
|
||||
Collection,
|
||||
FindCursor,
|
||||
Document,
|
||||
AggregateOptions,
|
||||
AnyBulkWriteOperation,
|
||||
BulkWriteOptions,
|
||||
Filter,
|
||||
CountOptions,
|
||||
IndexSpecification,
|
||||
CreateIndexesOptions,
|
||||
IndexDescription,
|
||||
DeleteResult as DeleteResultMongoDb,
|
||||
DeleteOptions,
|
||||
CommandOperationOptions,
|
||||
FindOneAndDeleteOptions,
|
||||
ModifyResult,
|
||||
FindOneAndReplaceOptions,
|
||||
UpdateFilter,
|
||||
FindOneAndUpdateOptions,
|
||||
RenameOptions,
|
||||
ReplaceOptions,
|
||||
UpdateResult as UpdateResultMongoDb,
|
||||
CollStats,
|
||||
CollStatsOptions,
|
||||
ChangeStreamOptions,
|
||||
ChangeStream,
|
||||
UpdateOptions,
|
||||
ListIndexesOptions,
|
||||
ListIndexesCursor,
|
||||
OptionalId,
|
||||
InsertOneOptions,
|
||||
InsertOneResult,
|
||||
InsertManyResult,
|
||||
MapFunction,
|
||||
ReduceFunction,
|
||||
MapReduceOptions,
|
||||
UnorderedBulkOperation,
|
||||
OrderedBulkOperation,
|
||||
IndexInformationOptions,
|
||||
ObjectId,
|
||||
MongoError,
|
||||
} from "mongodb"
|
||||
import { DataSource } from "../data-source/DataSource"
|
||||
import { MongoFindManyOptions } from "../find-options/mongodb/MongoFindManyOptions"
|
||||
import { MongoFindOneOptions } from "../find-options/mongodb/MongoFindOneOptions"
|
||||
import {
|
||||
FindOptionsSelect,
|
||||
FindOptionsSelectByString,
|
||||
} from "../find-options/FindOptionsSelect"
|
||||
import { MongoFindManyOptions } from "../find-options/mongodb/MongoFindManyOptions"
|
||||
import { MongoFindOneOptions } from "../find-options/mongodb/MongoFindOneOptions"
|
||||
import { ColumnMetadata } from "../metadata/ColumnMetadata"
|
||||
import { ObjectUtils } from "../util/ObjectUtils"
|
||||
import { ColumnMetadata } from "../metadata/ColumnMetadata"
|
||||
|
||||
/**
|
||||
* Entity manager supposed to work with any entity, automatically find its repository and call its methods,
|
||||
@ -87,25 +103,51 @@ export class MongoEntityManager extends EntityManager {
|
||||
/**
|
||||
* Finds entities that match given find options.
|
||||
*/
|
||||
/**
|
||||
* Finds entities that match given find options or conditions.
|
||||
*/
|
||||
async find<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
options?: MongoFindManyOptions<Entity>,
|
||||
optionsOrConditions?: FindManyOptions<Entity> | Partial<Entity>,
|
||||
): Promise<Entity[]> {
|
||||
return this.executeFind(entityClassOrName, options)
|
||||
const query =
|
||||
this.convertFindManyOptionsOrConditionsToMongodbQuery(
|
||||
optionsOrConditions,
|
||||
)
|
||||
const cursor = this.createEntityCursor(
|
||||
entityClassOrName,
|
||||
query as Filter<Entity>,
|
||||
)
|
||||
const deleteDateColumn =
|
||||
this.connection.getMetadata(entityClassOrName).deleteDateColumn
|
||||
if (FindOptionsUtils.isFindManyOptions(optionsOrConditions)) {
|
||||
if (optionsOrConditions.select)
|
||||
cursor.project(
|
||||
this.convertFindOptionsSelectToProjectCriteria(
|
||||
optionsOrConditions.select,
|
||||
),
|
||||
)
|
||||
if (optionsOrConditions.skip) cursor.skip(optionsOrConditions.skip)
|
||||
if (optionsOrConditions.take) cursor.limit(optionsOrConditions.take)
|
||||
if (optionsOrConditions.order)
|
||||
cursor.sort(
|
||||
this.convertFindOptionsOrderToOrderCriteria(
|
||||
optionsOrConditions.order,
|
||||
),
|
||||
)
|
||||
if (deleteDateColumn && !optionsOrConditions.withDeleted) {
|
||||
this.filterSoftDeleted(cursor, deleteDateColumn, query)
|
||||
}
|
||||
} else if (deleteDateColumn) {
|
||||
this.filterSoftDeleted(cursor, deleteDateColumn, query)
|
||||
}
|
||||
return cursor.toArray()
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds entities that match given conditions.
|
||||
*/
|
||||
async findBy<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
where: any,
|
||||
): Promise<Entity[]> {
|
||||
return this.executeFind(entityClassOrName, where)
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds entities that match given find options.
|
||||
* Finds entities that match given find options or conditions.
|
||||
* Also counts all entities that match given conditions,
|
||||
* but ignores pagination settings (from and take options).
|
||||
*/
|
||||
async findAndCount<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
@ -133,37 +175,38 @@ export class MongoEntityManager extends EntityManager {
|
||||
async findByIds<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
ids: any[],
|
||||
optionsOrConditions?: any,
|
||||
optionsOrConditions?: FindManyOptions<Entity> | Partial<Entity>,
|
||||
): Promise<Entity[]> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
const query =
|
||||
this.convertFindManyOptionsOrConditionsToMongodbQuery(
|
||||
optionsOrConditions,
|
||||
) || {}
|
||||
const objectIdInstance = PlatformTools.load("mongodb").ObjectID
|
||||
const objectIdInstance = ObjectId
|
||||
query["_id"] = {
|
||||
$in: ids.map((id) => {
|
||||
if (typeof id === "string") {
|
||||
return new objectIdInstance(id)
|
||||
}
|
||||
|
||||
if (ObjectUtils.isObject(id)) {
|
||||
if (typeof id === "object") {
|
||||
if (id instanceof objectIdInstance) {
|
||||
return id
|
||||
}
|
||||
|
||||
const propertyName = metadata.objectIdColumn!.propertyName
|
||||
|
||||
if ((id as any)[propertyName] instanceof objectIdInstance) {
|
||||
return (id as any)[propertyName]
|
||||
if (id[propertyName] instanceof objectIdInstance) {
|
||||
return id[propertyName]
|
||||
}
|
||||
}
|
||||
}),
|
||||
}
|
||||
|
||||
const cursor = await this.createEntityCursor(entityClassOrName, query)
|
||||
const deleteDateColumn =
|
||||
this.connection.getMetadata(entityClassOrName).deleteDateColumn
|
||||
const cursor = this.createEntityCursor(
|
||||
entityClassOrName,
|
||||
query as Filter<Entity>,
|
||||
)
|
||||
if (FindOptionsUtils.isFindManyOptions(optionsOrConditions)) {
|
||||
if (optionsOrConditions.select)
|
||||
cursor.project(
|
||||
@ -179,17 +222,12 @@ export class MongoEntityManager extends EntityManager {
|
||||
optionsOrConditions.order,
|
||||
),
|
||||
)
|
||||
if (deleteDateColumn && !optionsOrConditions.withDeleted) {
|
||||
this.filterSoftDeleted(cursor, deleteDateColumn, query)
|
||||
}
|
||||
} else if (deleteDateColumn) {
|
||||
this.filterSoftDeleted(cursor, deleteDateColumn, query)
|
||||
}
|
||||
return await cursor.toArray()
|
||||
return cursor.toArray()
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds first entity that matches given find options.
|
||||
* Finds first entity that matches given conditions and/or find options.
|
||||
*/
|
||||
async findOne<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
@ -219,15 +257,7 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
async findOneById<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
id:
|
||||
| string
|
||||
| string[]
|
||||
| number
|
||||
| number[]
|
||||
| Date
|
||||
| Date[]
|
||||
| ObjectID
|
||||
| ObjectID[],
|
||||
id: string | number | Date | ObjectID,
|
||||
): Promise<Entity | null> {
|
||||
return this.executeFindOne(entityClassOrName, id)
|
||||
}
|
||||
@ -298,9 +328,9 @@ export class MongoEntityManager extends EntityManager {
|
||||
| number[]
|
||||
| Date
|
||||
| Date[]
|
||||
| ObjectID
|
||||
| ObjectID[]
|
||||
| FindOptionsWhere<Entity>,
|
||||
| ObjectId
|
||||
| ObjectId[]
|
||||
| ObjectLiteral,
|
||||
partialEntity: QueryDeepPartialEntity<Entity>,
|
||||
): Promise<UpdateResult> {
|
||||
const result = new UpdateResult()
|
||||
@ -350,9 +380,9 @@ export class MongoEntityManager extends EntityManager {
|
||||
| number[]
|
||||
| Date
|
||||
| Date[]
|
||||
| ObjectID
|
||||
| ObjectID[]
|
||||
| FindOptionsWhere<Entity>,
|
||||
| ObjectId
|
||||
| ObjectId[]
|
||||
| ObjectLiteral[],
|
||||
): Promise<DeleteResult> {
|
||||
const result = new DeleteResult()
|
||||
|
||||
@ -392,8 +422,8 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
createCursor<Entity, T = any>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
query?: ObjectLiteral,
|
||||
): Cursor<T> {
|
||||
query: ObjectLiteral = {},
|
||||
): FindCursor<T> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.cursor(metadata.tableName, query)
|
||||
}
|
||||
@ -404,8 +434,8 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
createEntityCursor<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
query?: ObjectLiteral,
|
||||
): Cursor<Entity> {
|
||||
query: ObjectLiteral = {},
|
||||
): FindCursor<Entity> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
const cursor = this.createCursor(entityClassOrName, query)
|
||||
this.applyEntityTransformationToCursor(metadata, cursor)
|
||||
@ -417,8 +447,8 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
aggregate<Entity, R = any>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
pipeline: ObjectLiteral[],
|
||||
options?: CollectionAggregationOptions,
|
||||
pipeline: Document[],
|
||||
options?: AggregateOptions,
|
||||
): AggregationCursor<R> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.aggregate(
|
||||
@ -434,8 +464,8 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
aggregateEntity<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
pipeline: ObjectLiteral[],
|
||||
options?: CollectionAggregationOptions,
|
||||
pipeline: Document[],
|
||||
options?: AggregateOptions,
|
||||
): AggregationCursor<Entity> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
const cursor = this.mongoQueryRunner.aggregate(
|
||||
@ -452,9 +482,9 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
bulkWrite<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
operations: ObjectLiteral[],
|
||||
options?: CollectionBulkWriteOptions,
|
||||
): Promise<BulkWriteOpResultObject> {
|
||||
operations: AnyBulkWriteOperation<Document>[],
|
||||
options?: BulkWriteOptions,
|
||||
): Promise<BulkWriteResult> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.bulkWrite(
|
||||
metadata.tableName,
|
||||
@ -468,8 +498,8 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
count<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
query?: ObjectLiteral,
|
||||
options?: MongoCountPreferences,
|
||||
query: Filter<Document> = {},
|
||||
options: CountOptions = {},
|
||||
): Promise<number> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.count(metadata.tableName, query, options)
|
||||
@ -481,7 +511,7 @@ export class MongoEntityManager extends EntityManager {
|
||||
countBy<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
query?: ObjectLiteral,
|
||||
options?: MongoCountPreferences,
|
||||
options?: CountOptions,
|
||||
): Promise<number> {
|
||||
return this.count(entityClassOrName, query, options)
|
||||
}
|
||||
@ -491,8 +521,8 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
createCollectionIndex<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
fieldOrSpec: string | any,
|
||||
options?: MongodbIndexOptions,
|
||||
fieldOrSpec: IndexSpecification,
|
||||
options?: CreateIndexesOptions,
|
||||
): Promise<string> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.createCollectionIndex(
|
||||
@ -509,8 +539,8 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
createCollectionIndexes<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
indexSpecs: ObjectLiteral[],
|
||||
): Promise<void> {
|
||||
indexSpecs: IndexDescription[],
|
||||
): Promise<string[]> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.createCollectionIndexes(
|
||||
metadata.tableName,
|
||||
@ -523,9 +553,9 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
deleteMany<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
query: ObjectLiteral,
|
||||
options?: CollectionOptions,
|
||||
): Promise<DeleteWriteOpResultObject> {
|
||||
query: Filter<Document>,
|
||||
options: DeleteOptions = {},
|
||||
): Promise<DeleteResultMongoDb> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.deleteMany(
|
||||
metadata.tableName,
|
||||
@ -539,9 +569,9 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
deleteOne<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
query: ObjectLiteral,
|
||||
options?: CollectionOptions,
|
||||
): Promise<DeleteWriteOpResultObject> {
|
||||
query: Filter<Document>,
|
||||
options: DeleteOptions = {},
|
||||
): Promise<DeleteResultMongoDb> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.deleteOne(
|
||||
metadata.tableName,
|
||||
@ -556,8 +586,8 @@ export class MongoEntityManager extends EntityManager {
|
||||
distinct<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
key: string,
|
||||
query: ObjectLiteral,
|
||||
options?: { readPreference?: ReadPreference | string },
|
||||
query: Filter<Document>,
|
||||
options?: CommandOperationOptions,
|
||||
): Promise<any> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.distinct(
|
||||
@ -574,7 +604,7 @@ export class MongoEntityManager extends EntityManager {
|
||||
dropCollectionIndex<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
indexName: string,
|
||||
options?: CollectionOptions,
|
||||
options?: CommandOperationOptions,
|
||||
): Promise<any> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.dropCollectionIndex(
|
||||
@ -600,8 +630,8 @@ export class MongoEntityManager extends EntityManager {
|
||||
findOneAndDelete<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
query: ObjectLiteral,
|
||||
options?: { projection?: Object; sort?: Object; maxTimeMS?: number },
|
||||
): Promise<FindAndModifyWriteOpResultObject> {
|
||||
options?: FindOneAndDeleteOptions,
|
||||
): Promise<ModifyResult<Document>> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.findOneAndDelete(
|
||||
metadata.tableName,
|
||||
@ -615,10 +645,10 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
findOneAndReplace<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
query: ObjectLiteral,
|
||||
replacement: Object,
|
||||
options?: FindOneAndReplaceOption,
|
||||
): Promise<FindAndModifyWriteOpResultObject> {
|
||||
query: Filter<Document>,
|
||||
replacement: Document,
|
||||
options?: FindOneAndReplaceOptions,
|
||||
): Promise<ModifyResult<Document>> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.findOneAndReplace(
|
||||
metadata.tableName,
|
||||
@ -633,10 +663,10 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
findOneAndUpdate<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
query: ObjectLiteral,
|
||||
update: Object,
|
||||
options?: FindOneAndReplaceOption,
|
||||
): Promise<FindAndModifyWriteOpResultObject> {
|
||||
query: Filter<Document>,
|
||||
update: UpdateFilter<Document>,
|
||||
options?: FindOneAndUpdateOptions,
|
||||
): Promise<ModifyResult<Document>> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.findOneAndUpdate(
|
||||
metadata.tableName,
|
||||
@ -646,69 +676,12 @@ export class MongoEntityManager extends EntityManager {
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a geo search using a geo haystack index on a collection.
|
||||
*/
|
||||
geoHaystackSearch<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
x: number,
|
||||
y: number,
|
||||
options?: GeoHaystackSearchOptions,
|
||||
): Promise<any> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.geoHaystackSearch(
|
||||
metadata.tableName,
|
||||
x,
|
||||
y,
|
||||
options,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the geoNear command to search for items in the collection.
|
||||
*/
|
||||
geoNear<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
x: number,
|
||||
y: number,
|
||||
options?: GeoNearOptions,
|
||||
): Promise<any> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.geoNear(metadata.tableName, x, y, options)
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a group command across a collection.
|
||||
*/
|
||||
group<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
keys: Object | Array<any> | Function | Code,
|
||||
condition: Object,
|
||||
initial: Object,
|
||||
reduce: Function | Code,
|
||||
finalize: Function | Code,
|
||||
command: boolean,
|
||||
options?: { readPreference?: ReadPreference | string },
|
||||
): Promise<any> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.group(
|
||||
metadata.tableName,
|
||||
keys,
|
||||
condition,
|
||||
initial,
|
||||
reduce,
|
||||
finalize,
|
||||
command,
|
||||
options,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve all the indexes on the collection.
|
||||
*/
|
||||
collectionIndexes<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
): Promise<any> {
|
||||
): Promise<Document> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.collectionIndexes(metadata.tableName)
|
||||
}
|
||||
@ -732,7 +705,7 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
collectionIndexInformation<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
options?: { full: boolean },
|
||||
options?: IndexInformationOptions,
|
||||
): Promise<any> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.collectionIndexInformation(
|
||||
@ -746,7 +719,7 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
initializeOrderedBulkOp<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
options?: CollectionOptions,
|
||||
options?: BulkWriteOptions,
|
||||
): OrderedBulkOperation {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.initializeOrderedBulkOp(
|
||||
@ -760,7 +733,7 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
initializeUnorderedBulkOp<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
options?: CollectionOptions,
|
||||
options?: BulkWriteOptions,
|
||||
): UnorderedBulkOperation {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.initializeUnorderedBulkOp(
|
||||
@ -774,9 +747,9 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
insertMany<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
docs: ObjectLiteral[],
|
||||
options?: CollectionInsertManyOptions,
|
||||
): Promise<InsertWriteOpResult> {
|
||||
docs: OptionalId<Document>[],
|
||||
options?: BulkWriteOptions,
|
||||
): Promise<InsertManyResult> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.insertMany(
|
||||
metadata.tableName,
|
||||
@ -790,9 +763,9 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
insertOne<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
doc: ObjectLiteral,
|
||||
options?: CollectionInsertOneOptions,
|
||||
): Promise<InsertOneWriteOpResult> {
|
||||
doc: OptionalId<Document>,
|
||||
options?: InsertOneOptions,
|
||||
): Promise<InsertOneResult> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.insertOne(metadata.tableName, doc, options)
|
||||
}
|
||||
@ -810,11 +783,8 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
listCollectionIndexes<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
options?: {
|
||||
batchSize?: number
|
||||
readPreference?: ReadPreference | string
|
||||
},
|
||||
): CommandCursor {
|
||||
options?: ListIndexesOptions,
|
||||
): ListIndexesCursor {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.listCollectionIndexes(
|
||||
metadata.tableName,
|
||||
@ -827,10 +797,10 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
mapReduce<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
map: Function | string,
|
||||
reduce: Function | string,
|
||||
map: MapFunction,
|
||||
reduce: ReduceFunction | string,
|
||||
options?: MapReduceOptions,
|
||||
): Promise<any> {
|
||||
): Promise<Document | Document[]> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.mapReduce(
|
||||
metadata.tableName,
|
||||
@ -840,37 +810,14 @@ export class MongoEntityManager extends EntityManager {
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Return N number of parallel cursors for a collection allowing parallel reading of entire collection.
|
||||
* There are no ordering guarantees for returned results.
|
||||
*/
|
||||
parallelCollectionScan<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
options?: ParallelCollectionScanOptions,
|
||||
): Promise<Cursor<Entity>[]> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.parallelCollectionScan(
|
||||
metadata.tableName,
|
||||
options,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Reindex all indexes on the collection Warning: reIndex is a blocking operation (indexes are rebuilt in the foreground) and will be slow for large collections.
|
||||
*/
|
||||
reIndex<Entity>(entityClassOrName: EntityTarget<Entity>): Promise<any> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.reIndex(metadata.tableName)
|
||||
}
|
||||
|
||||
/**
|
||||
* Reindex all indexes on the collection Warning: reIndex is a blocking operation (indexes are rebuilt in the foreground) and will be slow for large collections.
|
||||
*/
|
||||
rename<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
newName: string,
|
||||
options?: { dropTarget?: boolean },
|
||||
): Promise<Collection<any>> {
|
||||
options?: RenameOptions,
|
||||
): Promise<Collection<Document>> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.rename(
|
||||
metadata.tableName,
|
||||
@ -884,10 +831,10 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
replaceOne<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
query: ObjectLiteral,
|
||||
doc: ObjectLiteral,
|
||||
options?: ReplaceOneOptions,
|
||||
): Promise<UpdateWriteOpResult> {
|
||||
query: Filter<Document>,
|
||||
doc: Document,
|
||||
options?: ReplaceOptions,
|
||||
): Promise<Document | UpdateResultMongoDb> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.replaceOne(
|
||||
metadata.tableName,
|
||||
@ -902,7 +849,7 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
stats<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
options?: { scale: number },
|
||||
options?: CollStatsOptions,
|
||||
): Promise<CollStats> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.stats(metadata.tableName, options)
|
||||
@ -910,7 +857,7 @@ export class MongoEntityManager extends EntityManager {
|
||||
|
||||
watch<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
pipeline?: Object[],
|
||||
pipeline?: Document[],
|
||||
options?: ChangeStreamOptions,
|
||||
): ChangeStream {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
@ -926,10 +873,10 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
updateMany<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
query: ObjectLiteral,
|
||||
update: ObjectLiteral,
|
||||
options?: { upsert?: boolean; w?: any; wtimeout?: number; j?: boolean },
|
||||
): Promise<UpdateWriteOpResult> {
|
||||
query: Filter<Document>,
|
||||
update: UpdateFilter<Document>,
|
||||
options?: UpdateOptions,
|
||||
): Promise<Document | UpdateResultMongoDb> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.updateMany(
|
||||
metadata.tableName,
|
||||
@ -944,10 +891,10 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
updateOne<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
query: ObjectLiteral,
|
||||
update: ObjectLiteral,
|
||||
options?: ReplaceOneOptions,
|
||||
): Promise<UpdateWriteOpResult> {
|
||||
query: Filter<Document>,
|
||||
update: UpdateFilter<Document>,
|
||||
options?: UpdateOptions,
|
||||
): Promise<Document | UpdateResultMongoDb> {
|
||||
const metadata = this.connection.getMetadata(entityClassOrName)
|
||||
return this.mongoQueryRunner.updateOne(
|
||||
metadata.tableName,
|
||||
@ -1047,7 +994,7 @@ export class MongoEntityManager extends EntityManager {
|
||||
metadata: EntityMetadata,
|
||||
idMap: any,
|
||||
): ObjectLiteral {
|
||||
const objectIdInstance = PlatformTools.load("mongodb").ObjectID
|
||||
const objectIdInstance = ObjectId
|
||||
|
||||
// check first if it's ObjectId compatible:
|
||||
// string, number, Buffer, ObjectId or ObjectId-like
|
||||
@ -1068,7 +1015,7 @@ export class MongoEntityManager extends EntityManager {
|
||||
}, {} as any)
|
||||
}
|
||||
|
||||
// last resort: try to convert it to an ObjectID anyway
|
||||
// last resort: try to convert it to an ObjectId anyway
|
||||
// most likely it will fail, but we want to be backwards compatible and keep the same thrown Errors.
|
||||
// it can still pass with null/undefined
|
||||
return {
|
||||
@ -1081,38 +1028,38 @@ export class MongoEntityManager extends EntityManager {
|
||||
*/
|
||||
protected applyEntityTransformationToCursor<Entity extends ObjectLiteral>(
|
||||
metadata: EntityMetadata,
|
||||
cursor: Cursor<Entity> | AggregationCursor<Entity>,
|
||||
cursor: FindCursor<Entity> | AggregationCursor<Entity>,
|
||||
) {
|
||||
// mongdb-3.7 exports Cursor, mongodb-4.2 exports FindCursor, provide support for both.
|
||||
const ParentCursor =
|
||||
PlatformTools.load("mongodb").Cursor ||
|
||||
PlatformTools.load("mongodb").FindCursor
|
||||
const queryRunner = this.mongoQueryRunner
|
||||
cursor.toArray = function (callback?: MongoCallback<Entity[]>) {
|
||||
if (callback) {
|
||||
ParentCursor.prototype.toArray.call(
|
||||
this,
|
||||
(error: MongoError, results: Entity[]): void => {
|
||||
if (error) {
|
||||
callback(error, results)
|
||||
return
|
||||
}
|
||||
cursor
|
||||
.clone()
|
||||
.toArray.call(
|
||||
this,
|
||||
(error: MongoError, results: Entity[]): void => {
|
||||
if (error) {
|
||||
callback(error, results)
|
||||
return
|
||||
}
|
||||
|
||||
const transformer = new DocumentToEntityTransformer()
|
||||
const entities = transformer.transformAll(
|
||||
results,
|
||||
metadata,
|
||||
)
|
||||
const transformer =
|
||||
new DocumentToEntityTransformer()
|
||||
const entities = transformer.transformAll(
|
||||
results,
|
||||
metadata,
|
||||
)
|
||||
|
||||
// broadcast "load" events
|
||||
queryRunner.broadcaster
|
||||
.broadcast("Load", metadata, entities)
|
||||
.then(() => callback(error, entities))
|
||||
},
|
||||
)
|
||||
// broadcast "load" events
|
||||
queryRunner.broadcaster
|
||||
.broadcast("Load", metadata, entities)
|
||||
.then(() => callback(error, entities))
|
||||
},
|
||||
)
|
||||
} else {
|
||||
return ParentCursor.prototype.toArray
|
||||
.call(this)
|
||||
return cursor
|
||||
.clone()
|
||||
.toArray.call(this)
|
||||
.then((results: Entity[]) => {
|
||||
const transformer = new DocumentToEntityTransformer()
|
||||
const entities = transformer.transformAll(
|
||||
@ -1129,29 +1076,38 @@ export class MongoEntityManager extends EntityManager {
|
||||
}
|
||||
cursor.next = function (callback?: MongoCallback<CursorResult>) {
|
||||
if (callback) {
|
||||
ParentCursor.prototype.next.call(
|
||||
this,
|
||||
(error: MongoError, result: CursorResult): void => {
|
||||
if (error || !result) {
|
||||
callback(error, result)
|
||||
return
|
||||
}
|
||||
cursor
|
||||
.clone()
|
||||
.next.call(
|
||||
this,
|
||||
(error: MongoError, result: CursorResult): void => {
|
||||
if (error || !result) {
|
||||
callback(error, result)
|
||||
return
|
||||
}
|
||||
|
||||
const transformer = new DocumentToEntityTransformer()
|
||||
const entity = transformer.transform(result, metadata)
|
||||
const transformer =
|
||||
new DocumentToEntityTransformer()
|
||||
const entity = transformer.transform(
|
||||
result,
|
||||
metadata,
|
||||
)
|
||||
|
||||
// broadcast "load" events
|
||||
// broadcast "load" events
|
||||
|
||||
queryRunner.broadcaster
|
||||
.broadcast("Load", metadata, [entity])
|
||||
.then(() => callback(error, entity))
|
||||
},
|
||||
)
|
||||
queryRunner.broadcaster
|
||||
.broadcast("Load", metadata, [entity])
|
||||
.then(() => callback(error, entity))
|
||||
},
|
||||
)
|
||||
} else {
|
||||
return ParentCursor.prototype.next
|
||||
.call(this)
|
||||
return cursor
|
||||
.clone()
|
||||
.next.call(this)
|
||||
.then((result: Entity) => {
|
||||
if (!result) return result
|
||||
if (!result) {
|
||||
return result
|
||||
}
|
||||
|
||||
const transformer = new DocumentToEntityTransformer()
|
||||
const entity = transformer.transform(result, metadata)
|
||||
@ -1166,7 +1122,7 @@ export class MongoEntityManager extends EntityManager {
|
||||
}
|
||||
|
||||
protected filterSoftDeleted<Entity>(
|
||||
cursor: Cursor<Entity>,
|
||||
cursor: FindCursor<Entity>,
|
||||
deleteDateColumn: ColumnMetadata,
|
||||
query?: ObjectLiteral,
|
||||
) {
|
||||
@ -1188,9 +1144,8 @@ export class MongoEntityManager extends EntityManager {
|
||||
optionsOrConditions?: any,
|
||||
maybeOptions?: MongoFindOneOptions<Entity>,
|
||||
): Promise<Entity | null> {
|
||||
const objectIdInstance = PlatformTools.load("mongodb").ObjectID
|
||||
const id =
|
||||
optionsOrConditions instanceof objectIdInstance ||
|
||||
optionsOrConditions instanceof ObjectId ||
|
||||
typeof optionsOrConditions === "string"
|
||||
? optionsOrConditions
|
||||
: undefined
|
||||
@ -1202,8 +1157,7 @@ export class MongoEntityManager extends EntityManager {
|
||||
findOneOptionsOrConditions,
|
||||
) || {}
|
||||
if (id) {
|
||||
query["_id"] =
|
||||
id instanceof objectIdInstance ? id : new objectIdInstance(id)
|
||||
query["_id"] = id instanceof ObjectId ? id : new ObjectId(id)
|
||||
}
|
||||
const cursor = await this.createEntityCursor(entityClassOrName, query)
|
||||
const deleteDateColumn =
|
||||
|
||||
@ -544,10 +544,8 @@ export class MigrationExecutor {
|
||||
): Promise<Migration[]> {
|
||||
if (this.connection.driver.options.type === "mongodb") {
|
||||
const mongoRunner = queryRunner as MongoQueryRunner
|
||||
return await mongoRunner.databaseConnection
|
||||
.db(this.connection.driver.database!)
|
||||
.collection(this.migrationsTableName)
|
||||
.find<Migration>()
|
||||
return mongoRunner
|
||||
.cursor(this.migrationsTableName, {})
|
||||
.sort({ _id: -1 })
|
||||
.toArray()
|
||||
} else {
|
||||
|
||||
@ -2,35 +2,12 @@ import { ObjectLiteral } from "../common/ObjectLiteral"
|
||||
import { Repository } from "./Repository"
|
||||
import { MongoFindManyOptions } from "../find-options/mongodb/MongoFindManyOptions"
|
||||
import {
|
||||
AggregationCursor,
|
||||
BulkWriteOpResultObject,
|
||||
Code,
|
||||
Collection,
|
||||
CollectionAggregationOptions,
|
||||
CollectionBulkWriteOptions,
|
||||
CollectionInsertManyOptions,
|
||||
CollectionInsertOneOptions,
|
||||
CollectionOptions,
|
||||
CollStats,
|
||||
CommandCursor,
|
||||
Cursor,
|
||||
DeleteWriteOpResultObject,
|
||||
FindAndModifyWriteOpResultObject,
|
||||
FindOneAndReplaceOption,
|
||||
GeoHaystackSearchOptions,
|
||||
GeoNearOptions,
|
||||
InsertOneWriteOpResult,
|
||||
InsertWriteOpResult,
|
||||
MapReduceOptions,
|
||||
MongoCountPreferences,
|
||||
MongodbIndexOptions,
|
||||
ObjectID,
|
||||
OrderedBulkOperation,
|
||||
ParallelCollectionScanOptions,
|
||||
ReadPreference,
|
||||
ReplaceOneOptions,
|
||||
UnorderedBulkOperation,
|
||||
UpdateWriteOpResult,
|
||||
} from "../driver/mongodb/typings"
|
||||
import { MongoEntityManager } from "../entity-manager/MongoEntityManager"
|
||||
import { QueryRunner } from "../query-runner/QueryRunner"
|
||||
@ -39,6 +16,42 @@ import { TypeORMError } from "../error/TypeORMError"
|
||||
import { MongoFindOneOptions } from "../find-options/mongodb/MongoFindOneOptions"
|
||||
import { FindOneOptions } from "../find-options/FindOneOptions"
|
||||
|
||||
import {
|
||||
AggregateOptions,
|
||||
AggregationCursor,
|
||||
AnyBulkWriteOperation,
|
||||
BulkWriteOptions,
|
||||
Collection,
|
||||
CollStats,
|
||||
CollStatsOptions,
|
||||
CommandOperationOptions,
|
||||
CountOptions,
|
||||
DeleteOptions,
|
||||
DeleteResult,
|
||||
Document,
|
||||
Filter,
|
||||
FindCursor,
|
||||
FindOneAndDeleteOptions,
|
||||
FindOneAndReplaceOptions,
|
||||
FindOneAndUpdateOptions,
|
||||
IndexDescription,
|
||||
InsertManyResult,
|
||||
InsertOneOptions,
|
||||
InsertOneResult,
|
||||
ListIndexesCursor,
|
||||
ListIndexesOptions,
|
||||
MapFunction,
|
||||
MapReduceOptions,
|
||||
ModifyResult,
|
||||
ObjectId,
|
||||
OrderedBulkOperation,
|
||||
ReduceFunction,
|
||||
UnorderedBulkOperation,
|
||||
UpdateFilter,
|
||||
UpdateOptions,
|
||||
UpdateResult,
|
||||
} from "mongodb"
|
||||
|
||||
/**
|
||||
* Repository used to manage mongodb documents of a single entity type.
|
||||
*/
|
||||
@ -151,15 +164,7 @@ export class MongoRepository<
|
||||
* })
|
||||
*/
|
||||
async findOneById(
|
||||
id:
|
||||
| string
|
||||
| string[]
|
||||
| number
|
||||
| number[]
|
||||
| Date
|
||||
| Date[]
|
||||
| ObjectID
|
||||
| ObjectID[],
|
||||
id: string | number | Date | ObjectID,
|
||||
): Promise<Entity | null> {
|
||||
return this.manager.findOneById(this.metadata.target, id)
|
||||
}
|
||||
@ -183,7 +188,7 @@ export class MongoRepository<
|
||||
/**
|
||||
* Creates a cursor for a query that can be used to iterate over results from MongoDB.
|
||||
*/
|
||||
createCursor<T = any>(query?: ObjectLiteral): Cursor<T> {
|
||||
createCursor<T = any>(query?: Filter<Entity>): FindCursor<T> {
|
||||
return this.manager.createCursor(this.metadata.target, query)
|
||||
}
|
||||
|
||||
@ -191,7 +196,7 @@ export class MongoRepository<
|
||||
* Creates a cursor for a query that can be used to iterate over results from MongoDB.
|
||||
* This returns modified version of cursor that transforms each result into Entity model.
|
||||
*/
|
||||
createEntityCursor(query?: ObjectLiteral): Cursor<Entity> {
|
||||
createEntityCursor(query?: Filter<Entity>): FindCursor<Entity> {
|
||||
return this.manager.createEntityCursor(this.metadata.target, query)
|
||||
}
|
||||
|
||||
@ -200,8 +205,8 @@ export class MongoRepository<
|
||||
*/
|
||||
aggregate<R = any>(
|
||||
pipeline: ObjectLiteral[],
|
||||
options?: CollectionAggregationOptions,
|
||||
): AggregationCursor<R> {
|
||||
options?: AggregateOptions,
|
||||
): AggregationCursor<Entity> {
|
||||
return this.manager.aggregate<R>(
|
||||
this.metadata.target,
|
||||
pipeline,
|
||||
@ -215,7 +220,7 @@ export class MongoRepository<
|
||||
*/
|
||||
aggregateEntity(
|
||||
pipeline: ObjectLiteral[],
|
||||
options?: CollectionAggregationOptions,
|
||||
options?: AggregateOptions,
|
||||
): AggregationCursor<Entity> {
|
||||
return this.manager.aggregateEntity(
|
||||
this.metadata.target,
|
||||
@ -227,8 +232,8 @@ export class MongoRepository<
|
||||
* Perform a bulkWrite operation without a fluent API.
|
||||
*/
|
||||
bulkWrite(
|
||||
operations: ObjectLiteral[],
|
||||
options?: CollectionBulkWriteOptions,
|
||||
operations: AnyBulkWriteOperation[],
|
||||
options?: BulkWriteOptions,
|
||||
): Promise<BulkWriteOpResultObject> {
|
||||
return this.manager.bulkWrite(this.metadata.target, operations, options)
|
||||
}
|
||||
@ -236,10 +241,7 @@ export class MongoRepository<
|
||||
/**
|
||||
* Count number of matching documents in the db to a query.
|
||||
*/
|
||||
count(
|
||||
query?: ObjectLiteral,
|
||||
options?: MongoCountPreferences,
|
||||
): Promise<number> {
|
||||
count(query?: ObjectLiteral, options?: CountOptions): Promise<number> {
|
||||
return this.manager.count(this.metadata.target, query || {}, options)
|
||||
}
|
||||
|
||||
@ -250,7 +252,13 @@ export class MongoRepository<
|
||||
query?: ObjectLiteral,
|
||||
options?: MongoCountPreferences,
|
||||
): Promise<number> {
|
||||
return this.manager.countBy(this.metadata.target, query || {}, options)
|
||||
let where = {}
|
||||
if (query !== undefined) {
|
||||
where = query
|
||||
} else if (options !== undefined) {
|
||||
where = options
|
||||
}
|
||||
return this.manager.countBy(this.metadata.target, where)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -272,7 +280,7 @@ export class MongoRepository<
|
||||
* Earlier version of MongoDB will throw a command not supported error.
|
||||
* Index specifications are defined at http://docs.mongodb.org/manual/reference/command/createIndexes/.
|
||||
*/
|
||||
createCollectionIndexes(indexSpecs: ObjectLiteral[]): Promise<void> {
|
||||
createCollectionIndexes(indexSpecs: IndexDescription[]): Promise<string[]> {
|
||||
return this.manager.createCollectionIndexes(
|
||||
this.metadata.target,
|
||||
indexSpecs,
|
||||
@ -284,8 +292,8 @@ export class MongoRepository<
|
||||
*/
|
||||
deleteMany(
|
||||
query: ObjectLiteral,
|
||||
options?: CollectionOptions,
|
||||
): Promise<DeleteWriteOpResultObject> {
|
||||
options?: DeleteOptions,
|
||||
): Promise<DeleteResult> {
|
||||
return this.manager.deleteMany(this.metadata.tableName, query, options)
|
||||
}
|
||||
|
||||
@ -294,8 +302,8 @@ export class MongoRepository<
|
||||
*/
|
||||
deleteOne(
|
||||
query: ObjectLiteral,
|
||||
options?: CollectionOptions,
|
||||
): Promise<DeleteWriteOpResultObject> {
|
||||
options?: DeleteOptions,
|
||||
): Promise<DeleteResult> {
|
||||
return this.manager.deleteOne(this.metadata.tableName, query, options)
|
||||
}
|
||||
|
||||
@ -305,7 +313,7 @@ export class MongoRepository<
|
||||
distinct(
|
||||
key: string,
|
||||
query: ObjectLiteral,
|
||||
options?: { readPreference?: ReadPreference | string },
|
||||
options?: CommandOperationOptions,
|
||||
): Promise<any> {
|
||||
return this.manager.distinct(
|
||||
this.metadata.tableName,
|
||||
@ -320,7 +328,7 @@ export class MongoRepository<
|
||||
*/
|
||||
dropCollectionIndex(
|
||||
indexName: string,
|
||||
options?: CollectionOptions,
|
||||
options?: CommandOperationOptions,
|
||||
): Promise<any> {
|
||||
return this.manager.dropCollectionIndex(
|
||||
this.metadata.tableName,
|
||||
@ -341,7 +349,7 @@ export class MongoRepository<
|
||||
*/
|
||||
findOneAndDelete(
|
||||
query: ObjectLiteral,
|
||||
options?: { projection?: Object; sort?: Object; maxTimeMS?: number },
|
||||
options?: FindOneAndDeleteOptions,
|
||||
): Promise<FindAndModifyWriteOpResultObject> {
|
||||
return this.manager.findOneAndDelete(
|
||||
this.metadata.tableName,
|
||||
@ -356,8 +364,8 @@ export class MongoRepository<
|
||||
findOneAndReplace(
|
||||
query: ObjectLiteral,
|
||||
replacement: Object,
|
||||
options?: FindOneAndReplaceOption,
|
||||
): Promise<FindAndModifyWriteOpResultObject> {
|
||||
options?: FindOneAndReplaceOptions,
|
||||
): Promise<ModifyResult<Document>> {
|
||||
return this.manager.findOneAndReplace(
|
||||
this.metadata.tableName,
|
||||
query,
|
||||
@ -372,8 +380,8 @@ export class MongoRepository<
|
||||
findOneAndUpdate(
|
||||
query: ObjectLiteral,
|
||||
update: Object,
|
||||
options?: FindOneAndReplaceOption,
|
||||
): Promise<FindAndModifyWriteOpResultObject> {
|
||||
options?: FindOneAndUpdateOptions,
|
||||
): Promise<ModifyResult<Document>> {
|
||||
return this.manager.findOneAndUpdate(
|
||||
this.metadata.tableName,
|
||||
query,
|
||||
@ -382,53 +390,6 @@ export class MongoRepository<
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a geo search using a geo haystack index on a collection.
|
||||
*/
|
||||
geoHaystackSearch(
|
||||
x: number,
|
||||
y: number,
|
||||
options?: GeoHaystackSearchOptions,
|
||||
): Promise<any> {
|
||||
return this.manager.geoHaystackSearch(
|
||||
this.metadata.tableName,
|
||||
x,
|
||||
y,
|
||||
options,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the geoNear command to search for items in the collection.
|
||||
*/
|
||||
geoNear(x: number, y: number, options?: GeoNearOptions): Promise<any> {
|
||||
return this.manager.geoNear(this.metadata.tableName, x, y, options)
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a group command across a collection.
|
||||
*/
|
||||
group(
|
||||
keys: Object | Array<any> | Function | Code,
|
||||
condition: Object,
|
||||
initial: Object,
|
||||
reduce: Function | Code,
|
||||
finalize: Function | Code,
|
||||
command: boolean,
|
||||
options?: { readPreference?: ReadPreference | string },
|
||||
): Promise<any> {
|
||||
return this.manager.group(
|
||||
this.metadata.tableName,
|
||||
keys,
|
||||
condition,
|
||||
initial,
|
||||
reduce,
|
||||
finalize,
|
||||
command,
|
||||
options,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve all the indexes on the collection.
|
||||
*/
|
||||
@ -459,7 +420,7 @@ export class MongoRepository<
|
||||
/**
|
||||
* Initiate an In order bulk write operation, operations will be serially executed in the order they are added, creating a new operation for each switch in types.
|
||||
*/
|
||||
initializeOrderedBulkOp(options?: CollectionOptions): OrderedBulkOperation {
|
||||
initializeOrderedBulkOp(options?: BulkWriteOptions): OrderedBulkOperation {
|
||||
return this.manager.initializeOrderedBulkOp(
|
||||
this.metadata.tableName,
|
||||
options,
|
||||
@ -470,7 +431,7 @@ export class MongoRepository<
|
||||
* Initiate a Out of order batch write operation. All operations will be buffered into insert/update/remove commands executed out of order.
|
||||
*/
|
||||
initializeUnorderedBulkOp(
|
||||
options?: CollectionOptions,
|
||||
options?: BulkWriteOptions,
|
||||
): UnorderedBulkOperation {
|
||||
return this.manager.initializeUnorderedBulkOp(
|
||||
this.metadata.tableName,
|
||||
@ -483,8 +444,8 @@ export class MongoRepository<
|
||||
*/
|
||||
insertMany(
|
||||
docs: ObjectLiteral[],
|
||||
options?: CollectionInsertManyOptions,
|
||||
): Promise<InsertWriteOpResult> {
|
||||
options?: BulkWriteOptions,
|
||||
): Promise<InsertManyResult<Document>> {
|
||||
return this.manager.insertMany(this.metadata.tableName, docs, options)
|
||||
}
|
||||
|
||||
@ -493,8 +454,8 @@ export class MongoRepository<
|
||||
*/
|
||||
insertOne(
|
||||
doc: ObjectLiteral,
|
||||
options?: CollectionInsertOneOptions,
|
||||
): Promise<InsertOneWriteOpResult> {
|
||||
options?: InsertOneOptions,
|
||||
): Promise<InsertOneResult> {
|
||||
return this.manager.insertOne(this.metadata.tableName, doc, options)
|
||||
}
|
||||
|
||||
@ -508,10 +469,7 @@ export class MongoRepository<
|
||||
/**
|
||||
* Get the list of all indexes information for the collection.
|
||||
*/
|
||||
listCollectionIndexes(options?: {
|
||||
batchSize?: number
|
||||
readPreference?: ReadPreference | string
|
||||
}): CommandCursor {
|
||||
listCollectionIndexes(options?: ListIndexesOptions): ListIndexesCursor {
|
||||
return this.manager.listCollectionIndexes(
|
||||
this.metadata.tableName,
|
||||
options,
|
||||
@ -522,8 +480,8 @@ export class MongoRepository<
|
||||
* Run Map Reduce across a collection. Be aware that the inline option for out will return an array of results not a collection.
|
||||
*/
|
||||
mapReduce(
|
||||
map: Function | string,
|
||||
reduce: Function | string,
|
||||
map: MapFunction,
|
||||
reduce: string | ReduceFunction<ObjectId>,
|
||||
options?: MapReduceOptions,
|
||||
): Promise<any> {
|
||||
return this.manager.mapReduce(
|
||||
@ -534,33 +492,13 @@ export class MongoRepository<
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Return N number of parallel cursors for a collection allowing parallel reading of entire collection.
|
||||
* There are no ordering guarantees for returned results.
|
||||
*/
|
||||
parallelCollectionScan(
|
||||
options?: ParallelCollectionScanOptions,
|
||||
): Promise<Cursor<Entity>[]> {
|
||||
return this.manager.parallelCollectionScan(
|
||||
this.metadata.tableName,
|
||||
options,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Reindex all indexes on the collection Warning: reIndex is a blocking operation (indexes are rebuilt in the foreground) and will be slow for large collections.
|
||||
*/
|
||||
reIndex(): Promise<any> {
|
||||
return this.manager.reIndex(this.metadata.tableName)
|
||||
}
|
||||
|
||||
/**
|
||||
* Reindex all indexes on the collection Warning: reIndex is a blocking operation (indexes are rebuilt in the foreground) and will be slow for large collections.
|
||||
*/
|
||||
rename(
|
||||
newName: string,
|
||||
options?: { dropTarget?: boolean },
|
||||
): Promise<Collection<any>> {
|
||||
): Promise<Collection<Document>> {
|
||||
return this.manager.rename(this.metadata.tableName, newName, options)
|
||||
}
|
||||
|
||||
@ -571,7 +509,7 @@ export class MongoRepository<
|
||||
query: ObjectLiteral,
|
||||
doc: ObjectLiteral,
|
||||
options?: ReplaceOneOptions,
|
||||
): Promise<UpdateWriteOpResult> {
|
||||
): Promise<Document | UpdateResult> {
|
||||
return this.manager.replaceOne(
|
||||
this.metadata.tableName,
|
||||
query,
|
||||
@ -583,7 +521,7 @@ export class MongoRepository<
|
||||
/**
|
||||
* Get all the collection statistics.
|
||||
*/
|
||||
stats(options?: { scale: number }): Promise<CollStats> {
|
||||
stats(options?: CollStatsOptions): Promise<CollStats> {
|
||||
return this.manager.stats(this.metadata.tableName, options)
|
||||
}
|
||||
|
||||
@ -592,9 +530,9 @@ export class MongoRepository<
|
||||
*/
|
||||
updateMany(
|
||||
query: ObjectLiteral,
|
||||
update: ObjectLiteral,
|
||||
options?: { upsert?: boolean; w?: any; wtimeout?: number; j?: boolean },
|
||||
): Promise<UpdateWriteOpResult> {
|
||||
update: UpdateFilter<Document>,
|
||||
options?: UpdateOptions,
|
||||
): Promise<Document | UpdateResult> {
|
||||
return this.manager.updateMany(
|
||||
this.metadata.tableName,
|
||||
query,
|
||||
@ -608,9 +546,9 @@ export class MongoRepository<
|
||||
*/
|
||||
updateOne(
|
||||
query: ObjectLiteral,
|
||||
update: ObjectLiteral,
|
||||
options?: ReplaceOneOptions,
|
||||
): Promise<UpdateWriteOpResult> {
|
||||
update: UpdateFilter<Document>,
|
||||
options?: UpdateOptions,
|
||||
): Promise<Document | UpdateResult> {
|
||||
return this.manager.updateOne(
|
||||
this.metadata.tableName,
|
||||
query,
|
||||
|
||||
@ -24,22 +24,22 @@ describe("benchmark > QueryBuilder > wide join", () => {
|
||||
|
||||
it("testing query builder with join to 10 relations with 10 columns each", () => {
|
||||
for (let i = 1; i <= 10_000; i++) {
|
||||
connections.map((connection) =>
|
||||
connections.forEach((connection) =>
|
||||
connection.manager
|
||||
.createQueryBuilder(One, "ones")
|
||||
.setFindOptions({
|
||||
where: { id: 1 },
|
||||
relations: [
|
||||
"two",
|
||||
"three",
|
||||
"four",
|
||||
"five",
|
||||
"six",
|
||||
"seven",
|
||||
"eight",
|
||||
"nine",
|
||||
"ten",
|
||||
],
|
||||
relations: {
|
||||
two: true,
|
||||
three: true,
|
||||
four: true,
|
||||
five: true,
|
||||
six: true,
|
||||
seven: true,
|
||||
eight: true,
|
||||
nine: true,
|
||||
ten: true,
|
||||
},
|
||||
})
|
||||
.getQuery(),
|
||||
)
|
||||
|
||||
@ -44,7 +44,7 @@ describe("deferrable fk constraints should be check at the end of transaction (#
|
||||
|
||||
// now check
|
||||
const user = await connection.manager.findOne(User, {
|
||||
relations: ["company"],
|
||||
relations: { company: true },
|
||||
where: { id: 1 },
|
||||
})
|
||||
|
||||
@ -86,7 +86,7 @@ describe("deferrable fk constraints should be check at the end of transaction (#
|
||||
|
||||
// now check
|
||||
const office = await connection.manager.findOne(Office, {
|
||||
relations: ["company"],
|
||||
relations: { company: true },
|
||||
where: { id: 2 },
|
||||
})
|
||||
|
||||
|
||||
@ -91,7 +91,7 @@ describe("find options > relations", () => {
|
||||
const posts2 = await connection
|
||||
.createQueryBuilder(Post, "post")
|
||||
.setFindOptions({
|
||||
relations: ["author"],
|
||||
relations: { author: true },
|
||||
order: {
|
||||
id: "asc",
|
||||
},
|
||||
|
||||
@ -26,7 +26,7 @@ describe("find options > select", () => {
|
||||
const posts1 = await connection
|
||||
.createQueryBuilder(Post, "post")
|
||||
.setFindOptions({
|
||||
select: ["id"],
|
||||
select: { id: true },
|
||||
order: {
|
||||
id: "asc",
|
||||
},
|
||||
@ -67,7 +67,7 @@ describe("find options > select", () => {
|
||||
const posts1 = await connection
|
||||
.createQueryBuilder(Post, "post")
|
||||
.setFindOptions({
|
||||
select: ["title"],
|
||||
select: { title: true },
|
||||
order: {
|
||||
title: "asc",
|
||||
},
|
||||
@ -108,7 +108,7 @@ describe("find options > select", () => {
|
||||
const posts1 = await connection
|
||||
.createQueryBuilder(Post, "post")
|
||||
.setFindOptions({
|
||||
select: ["title", "text"],
|
||||
select: { title: true, text: true },
|
||||
order: {
|
||||
title: "asc",
|
||||
},
|
||||
|
||||
@ -2,7 +2,6 @@ import { Entity } from "../../../../../../src/decorator/entity/Entity"
|
||||
import { Column } from "../../../../../../src/decorator/columns/Column"
|
||||
import { ObjectIdColumn } from "../../../../../../src/decorator/columns/ObjectIdColumn"
|
||||
import { ObjectID } from "../../../../../../src/driver/mongodb/typings"
|
||||
|
||||
@Entity()
|
||||
export class Post {
|
||||
@ObjectIdColumn()
|
||||
|
||||
@ -34,7 +34,7 @@ describe("mongodb > object id columns", () => {
|
||||
// little hack to get raw data from mongodb
|
||||
const aggArr = await postMongoRepository.aggregate([]).toArray()
|
||||
|
||||
expect(aggArr[0]._id).to.be.not.undefined
|
||||
expect((aggArr[0] as any)._id).to.be.not.undefined
|
||||
expect(aggArr[0].nonIdNameOfObjectId).to.be.undefined
|
||||
}),
|
||||
))
|
||||
@ -94,7 +94,7 @@ describe("mongodb > object id columns", () => {
|
||||
// little hack to get raw data from mongodb
|
||||
const aggArr = await postMongoRepository.aggregate([]).toArray()
|
||||
|
||||
expect(aggArr[0]._id).to.be.not.undefined
|
||||
expect((aggArr[0] as any)._id).to.be.not.undefined
|
||||
expect(aggArr[0].nonIdNameOfObjectId).to.be.undefined
|
||||
}),
|
||||
))
|
||||
|
||||
@ -45,7 +45,7 @@ describe("persistence > one-to-one", function () {
|
||||
|
||||
const loadedUser = await userRepository.findOne({
|
||||
where: { email: "mwelnick@test.com" },
|
||||
relations: ["access_token"],
|
||||
relations: { access_token: true },
|
||||
})
|
||||
|
||||
expect(loadedUser).not.to.be.null
|
||||
|
||||
@ -35,7 +35,11 @@ describe("persistence > orphanage > delete", () => {
|
||||
let postRepository: Repository<Post>
|
||||
let categoryId: number
|
||||
|
||||
beforeEach(async () => {
|
||||
beforeEach(async function () {
|
||||
if (connections.length === 0) {
|
||||
this.skip()
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
connections.map(async (connection) => {
|
||||
categoryRepository = connection.getRepository(Category)
|
||||
|
||||
@ -35,7 +35,11 @@ describe("persistence > orphanage > disable", () => {
|
||||
let settingRepo: Repository<Setting>
|
||||
let userId: number
|
||||
|
||||
beforeEach(async () => {
|
||||
beforeEach(async function () {
|
||||
if (connections.length === 0) {
|
||||
this.skip()
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
connections.map(async (connection) => {
|
||||
userRepo = connection.getRepository(User)
|
||||
|
||||
@ -300,7 +300,7 @@ describe("query builder > soft-delete", () => {
|
||||
await userRepository.save(user2)
|
||||
|
||||
const users = await userRepository.find({
|
||||
relations: ["picture"],
|
||||
relations: { picture: true },
|
||||
})
|
||||
|
||||
expect(users[0].picture.deletedAt).to.equal(null)
|
||||
@ -312,7 +312,7 @@ describe("query builder > soft-delete", () => {
|
||||
|
||||
const usersWithSoftDelete = await userRepository.find({
|
||||
withDeleted: true,
|
||||
relations: ["picture"],
|
||||
relations: { picture: true },
|
||||
})
|
||||
|
||||
expect(usersWithSoftDelete[0].picture.deletedAt).to.not.equal(
|
||||
|
||||
@ -58,7 +58,7 @@ describe("relations > multiple-primary-keys > one-to-many", () => {
|
||||
await insertSimpleTestData(connection)
|
||||
|
||||
const [user] = await connection.getRepository(User).find({
|
||||
relations: ["settings"],
|
||||
relations: { settings: true },
|
||||
// relationLoadStrategy: "join"
|
||||
})
|
||||
|
||||
@ -86,7 +86,7 @@ describe("relations > multiple-primary-keys > one-to-many", () => {
|
||||
|
||||
const [user] = await connection
|
||||
.getRepository(User)
|
||||
.find({ relations: ["settings"] })
|
||||
.find({ relations: { settings: true } })
|
||||
|
||||
// check the saved items have correctly updated value
|
||||
expect(user!).not.to.be.undefined
|
||||
@ -117,7 +117,7 @@ describe("relations > multiple-primary-keys > one-to-many", () => {
|
||||
})
|
||||
|
||||
const [user] = await connection.getRepository(User).find({
|
||||
relations: ["settings"],
|
||||
relations: { settings: true },
|
||||
})
|
||||
|
||||
// check that no relational items are found
|
||||
|
||||
@ -595,7 +595,7 @@ describe("repository > find options > locking", () => {
|
||||
.getRepository(Post)
|
||||
.findOne({
|
||||
where: { id: 1 },
|
||||
relations: ["author"],
|
||||
relations: { author: true },
|
||||
lock: {
|
||||
mode: "pessimistic_write",
|
||||
tables: ["img"],
|
||||
@ -617,7 +617,7 @@ describe("repository > find options > locking", () => {
|
||||
return Promise.all([
|
||||
entityManager.getRepository(Post).findOne({
|
||||
where: { id: 1 },
|
||||
relations: ["author"],
|
||||
relations: { author: true },
|
||||
lock: {
|
||||
mode: "pessimistic_write",
|
||||
tables: ["post"],
|
||||
@ -625,7 +625,7 @@ describe("repository > find options > locking", () => {
|
||||
}),
|
||||
entityManager.getRepository(Post).findOne({
|
||||
where: { id: 1 },
|
||||
relations: ["author"],
|
||||
relations: { author: true },
|
||||
lock: { mode: "pessimistic_write" },
|
||||
}),
|
||||
])
|
||||
@ -637,7 +637,7 @@ describe("repository > find options > locking", () => {
|
||||
return Promise.all([
|
||||
entityManager.getRepository(Post).findOne({
|
||||
where: { id: 1 },
|
||||
relations: ["author"],
|
||||
relations: { author: true },
|
||||
lock: {
|
||||
mode: "pessimistic_write",
|
||||
tables: ["post"],
|
||||
@ -647,7 +647,7 @@ describe("repository > find options > locking", () => {
|
||||
.getRepository(Post)
|
||||
.findOne({
|
||||
where: { id: 1 },
|
||||
relations: ["author"],
|
||||
relations: { author: true },
|
||||
lock: { mode: "pessimistic_write" },
|
||||
})
|
||||
.should.be.rejectedWith(
|
||||
@ -670,7 +670,7 @@ describe("repository > find options > locking", () => {
|
||||
return Promise.all([
|
||||
entityManager.getRepository(Post).findOne({
|
||||
where: { id: 1 },
|
||||
relations: ["author"],
|
||||
relations: { author: true },
|
||||
lock: {
|
||||
mode: "pessimistic_read",
|
||||
tables: ["post"],
|
||||
@ -678,7 +678,7 @@ describe("repository > find options > locking", () => {
|
||||
}),
|
||||
entityManager.getRepository(Post).findOne({
|
||||
where: { id: 1 },
|
||||
relations: ["author"],
|
||||
relations: { author: true },
|
||||
lock: {
|
||||
mode: "pessimistic_write",
|
||||
tables: ["post"],
|
||||
@ -686,7 +686,7 @@ describe("repository > find options > locking", () => {
|
||||
}),
|
||||
entityManager.getRepository(Post).findOne({
|
||||
where: { id: 1 },
|
||||
relations: ["author"],
|
||||
relations: { author: true },
|
||||
lock: {
|
||||
mode: "pessimistic_partial_write",
|
||||
tables: ["post"],
|
||||
@ -694,7 +694,7 @@ describe("repository > find options > locking", () => {
|
||||
}),
|
||||
entityManager.getRepository(Post).findOne({
|
||||
where: { id: 1 },
|
||||
relations: ["author"],
|
||||
relations: { author: true },
|
||||
lock: {
|
||||
mode: "pessimistic_write_or_fail",
|
||||
tables: ["post"],
|
||||
@ -702,7 +702,7 @@ describe("repository > find options > locking", () => {
|
||||
}),
|
||||
entityManager.getRepository(Post).findOne({
|
||||
where: { id: 1 },
|
||||
relations: ["author"],
|
||||
relations: { author: true },
|
||||
lock: {
|
||||
mode: "for_no_key_update",
|
||||
tables: ["post"],
|
||||
@ -710,7 +710,7 @@ describe("repository > find options > locking", () => {
|
||||
}),
|
||||
entityManager.getRepository(Post).findOne({
|
||||
where: { id: 1 },
|
||||
relations: ["author"],
|
||||
relations: { author: true },
|
||||
lock: {
|
||||
mode: "for_key_share",
|
||||
tables: ["post"],
|
||||
|
||||
@ -45,7 +45,7 @@ describe("repository > find options", () => {
|
||||
await connection.manager.save(post)
|
||||
|
||||
const [loadedPost] = await connection.getRepository(Post).find({
|
||||
relations: ["author", "categories"],
|
||||
relations: { author: true, categories: true },
|
||||
})
|
||||
expect(loadedPost).to.be.eql({
|
||||
id: 1,
|
||||
@ -126,7 +126,7 @@ describe("repository > find options", () => {
|
||||
const loadedPhoto = await connection
|
||||
.getRepository(Photo)
|
||||
.findOne({
|
||||
select: ["name"],
|
||||
select: { name: true },
|
||||
where: {
|
||||
id: 5,
|
||||
},
|
||||
@ -135,14 +135,14 @@ describe("repository > find options", () => {
|
||||
const loadedPhotos1 = await connection
|
||||
.getRepository(Photo)
|
||||
.find({
|
||||
select: ["filename", "views"],
|
||||
select: { filename: true, views: true },
|
||||
})
|
||||
|
||||
const loadedPhotos2 = await connection
|
||||
.getRepository(Photo)
|
||||
.find({
|
||||
select: ["id", "name", "description"],
|
||||
relations: ["categories"],
|
||||
select: { id: true, name: true, description: true },
|
||||
relations: { categories: true },
|
||||
})
|
||||
|
||||
// const loadedPhotos3 = await connection.getRepository(Photo).createQueryBuilder("photo")
|
||||
|
||||
@ -55,7 +55,7 @@ describe("table-inheritance > single-table > no-type-column", () => {
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
const [postIt] = await postItRepo.find({
|
||||
relations: ["owner"],
|
||||
relations: { owner: true },
|
||||
})
|
||||
|
||||
postIt.owner.should.be.an.instanceOf(Employee)
|
||||
@ -63,7 +63,7 @@ describe("table-inheritance > single-table > no-type-column", () => {
|
||||
postIt.owner.employeeName.should.be.equal("Alice Foo")
|
||||
|
||||
const [sticky] = await stickyRepo.find({
|
||||
relations: ["owner"],
|
||||
relations: { owner: true },
|
||||
})
|
||||
|
||||
sticky.owner.should.be.an.instanceOf(Author)
|
||||
|
||||
@ -45,7 +45,7 @@ describe("github issues > #1178 subqueries must work in insert statements", () =
|
||||
where: {
|
||||
id: 1,
|
||||
},
|
||||
relations: ["user"],
|
||||
relations: { user: true },
|
||||
})
|
||||
.should.eventually.eql({
|
||||
id: 1,
|
||||
|
||||
@ -50,7 +50,7 @@ describe("github issue > #1416 Wrong behavior when fetching an entity that has a
|
||||
where: {
|
||||
name: photoAuthor.name,
|
||||
},
|
||||
relations: ["photos"],
|
||||
relations: { photos: true },
|
||||
})) as Author
|
||||
expect(author).not.to.be.null
|
||||
expect(author.photos[0]).not.to.be.undefined
|
||||
|
||||
@ -26,11 +26,13 @@ describe("github issues > #1504 Cannot eagerly query Entity with relation more t
|
||||
where: {
|
||||
id: 1,
|
||||
},
|
||||
relations: [
|
||||
"Entity2",
|
||||
"Entity2.Entity3",
|
||||
"Entity2.Entity3.Entity4",
|
||||
],
|
||||
relations: {
|
||||
Entity2: {
|
||||
Entity3: {
|
||||
Entity4: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}),
|
||||
))
|
||||
|
||||
@ -37,7 +37,7 @@ describe("github issues > #1720 Listener not invoked when relation loaded throug
|
||||
await connection.manager.save(post1)
|
||||
|
||||
const [loadedPost] = await connection.manager.find(Post, {
|
||||
relations: ["categories"],
|
||||
relations: { categories: true },
|
||||
})
|
||||
loadedPost!.categories[0].loaded.should.be.equal(true)
|
||||
loadedPost!.categories[1].loaded.should.be.equal(true)
|
||||
|
||||
@ -41,7 +41,7 @@ describe("github issues > #1788 One to One does not load relationships.", () =>
|
||||
await providerRepository.save(provider)
|
||||
|
||||
const dbProvider = await providerRepository.find({
|
||||
relations: ["personalization"],
|
||||
relations: { personalization: true },
|
||||
})
|
||||
|
||||
expect(dbProvider[0].personalization).to.not.eql(undefined)
|
||||
|
||||
@ -30,7 +30,7 @@ describe("github issues > #1929 Select attributes in Find method - mongodb", ()
|
||||
product = new Product("test3", "label3", 30)
|
||||
await productRepository.save(product)
|
||||
await productRepository.find({
|
||||
select: ["name", "label"],
|
||||
select: { name: true, label: true },
|
||||
order: { name: 1 },
|
||||
})
|
||||
}),
|
||||
@ -47,7 +47,7 @@ describe("github issues > #1929 Select attributes in Find method - mongodb", ()
|
||||
product = new Product("test3", "label3", 30)
|
||||
await productRepository.save(product)
|
||||
await productRepository.findAndCount({
|
||||
select: ["name", "label"],
|
||||
select: { name: true, label: true },
|
||||
order: { name: 1 },
|
||||
})
|
||||
}),
|
||||
@ -65,7 +65,7 @@ describe("github issues > #1929 Select attributes in Find method - mongodb", ()
|
||||
const product3 = await productRepository.save(product)
|
||||
await productRepository.find({
|
||||
where: { _id: product3.id },
|
||||
select: ["name", "label"],
|
||||
select: { name: true, label: true },
|
||||
order: { name: 1 },
|
||||
})
|
||||
}),
|
||||
@ -82,7 +82,7 @@ describe("github issues > #1929 Select attributes in Find method - mongodb", ()
|
||||
product = new Product("test3", "label3", 30)
|
||||
await productRepository.findOne({
|
||||
where: { name: "test2" },
|
||||
select: ["name", "label"],
|
||||
select: { name: true, label: true },
|
||||
order: { name: 1 },
|
||||
})
|
||||
}),
|
||||
|
||||
@ -37,7 +37,7 @@ describe("github issues > #2044 Should not double get embedded column value", ()
|
||||
await connection.manager.save(photo)
|
||||
|
||||
const photos = await connection.manager.find(Photo, {
|
||||
relations: ["user"],
|
||||
relations: { user: true },
|
||||
})
|
||||
|
||||
const resultPhoto = photos[0]
|
||||
|
||||
@ -49,7 +49,7 @@ describe("github issues > #2632 createQueryBuilder relation remove works only if
|
||||
|
||||
let loadedPost1 = await connection.manager.findOne(Post, {
|
||||
where: { id: 1 },
|
||||
relations: ["categories"],
|
||||
relations: { categories: true },
|
||||
})
|
||||
expect(loadedPost1!.categories).to.deep.include({
|
||||
id: 1,
|
||||
@ -64,7 +64,7 @@ describe("github issues > #2632 createQueryBuilder relation remove works only if
|
||||
|
||||
loadedPost1 = await connection.manager.findOne(Post, {
|
||||
where: { id: 1 },
|
||||
relations: ["categories"],
|
||||
relations: { categories: true },
|
||||
})
|
||||
expect(loadedPost1!.categories).to.be.eql([])
|
||||
|
||||
@ -76,7 +76,7 @@ describe("github issues > #2632 createQueryBuilder relation remove works only if
|
||||
|
||||
let loadedPost2 = await connection.manager.findOne(Post, {
|
||||
where: { id: 2 },
|
||||
relations: ["categories"],
|
||||
relations: { categories: true },
|
||||
})
|
||||
expect(loadedPost2!.categories).to.deep.include({
|
||||
id: 2,
|
||||
@ -91,7 +91,7 @@ describe("github issues > #2632 createQueryBuilder relation remove works only if
|
||||
|
||||
loadedPost1 = await connection.manager.findOne(Post, {
|
||||
where: { id: 2 },
|
||||
relations: ["categories"],
|
||||
relations: { categories: true },
|
||||
})
|
||||
expect(loadedPost1!.categories).to.be.eql([])
|
||||
}),
|
||||
|
||||
@ -36,7 +36,9 @@ describe("github issues > #2965 Reuse preloaded lazy relations", () => {
|
||||
await repoNote.insert({ label: "note1", owner: personA })
|
||||
await repoNote.insert({ label: "note2", owner: personB })
|
||||
|
||||
const res1 = await repoPerson.find({ relations: ["notes"] })
|
||||
const res1 = await repoPerson.find({
|
||||
relations: { notes: true },
|
||||
})
|
||||
|
||||
const originalLoad: (...args: any[]) => Promise<any[]> =
|
||||
connection.relationLoader.load
|
||||
|
||||
@ -79,11 +79,15 @@ describe("github issues > #3118 shorten alias names (for RDBMS with a limit) whe
|
||||
const [loadedCategory] = await connection.manager.find(
|
||||
CategoryWithVeryLongName,
|
||||
{
|
||||
relations: [
|
||||
"postsWithVeryLongName",
|
||||
relations: {
|
||||
postsWithVeryLongName: {
|
||||
authorWithVeryLongName: {
|
||||
groupWithVeryLongName: true,
|
||||
},
|
||||
},
|
||||
// before: used to generate a SELECT "AS" alias like `CategoryWithVeryLongName__postsWithVeryLongName__authorWithVeryLongName_firstName`
|
||||
// now: `CaWiVeLoNa__poWiVeLoNa__auWiVeLoNa_firstName`, which is acceptable by Postgres (limit to 63 characters)
|
||||
"postsWithVeryLongName.authorWithVeryLongName",
|
||||
// "postsWithVeryLongName.authorWithVeryLongName",
|
||||
// before:
|
||||
// used to generate a JOIN "AS" alias like :
|
||||
// `CategoryWithVeryLongName__postsWithVeryLongName__authorWithVeryLongName_firstName`
|
||||
@ -95,8 +99,8 @@ describe("github issues > #3118 shorten alias names (for RDBMS with a limit) whe
|
||||
// now:
|
||||
// `CaWiVeLoNa__poWiVeLoNa__auWiVeLoNa_firstName`
|
||||
// `CaWiVeLoNa__poWiVeLoNa__auWiVeLoNa__grWiVeLoNa_name`
|
||||
"postsWithVeryLongName.authorWithVeryLongName.groupWithVeryLongName",
|
||||
],
|
||||
// "postsWithVeryLongName.authorWithVeryLongName.groupWithVeryLongName",
|
||||
},
|
||||
},
|
||||
)
|
||||
expect(loadedCategory).not.to.be.null
|
||||
@ -119,11 +123,13 @@ describe("github issues > #3118 shorten alias names (for RDBMS with a limit) whe
|
||||
const loadedCategories = await connection.manager.find(
|
||||
CategoryWithVeryLongName,
|
||||
{
|
||||
relations: [
|
||||
"postsWithVeryLongName",
|
||||
"postsWithVeryLongName.authorWithVeryLongName",
|
||||
"postsWithVeryLongName.authorWithVeryLongName.groupWithVeryLongName",
|
||||
],
|
||||
relations: {
|
||||
postsWithVeryLongName: {
|
||||
authorWithVeryLongName: {
|
||||
groupWithVeryLongName: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
expect(loadedCategories).to.be.an("array").that.is.not.empty
|
||||
|
||||
@ -123,7 +123,11 @@ describe('github issues > #3120 Add relation option "createForeignKeyConstraints
|
||||
ActionLog,
|
||||
{
|
||||
where: { action: "Test Log #1" },
|
||||
relations: ["person", "actionDetails", "addresses"],
|
||||
relations: {
|
||||
person: true,
|
||||
actionDetails: true,
|
||||
addresses: true,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@ -39,7 +39,7 @@ describe("github issues > #4190 Relation decorators: allow to pass string instea
|
||||
await connection.manager.save(user)
|
||||
|
||||
const users = await connection.manager.find(User, {
|
||||
relations: ["profile"],
|
||||
relations: { profile: true },
|
||||
})
|
||||
|
||||
users.should.eql([
|
||||
@ -73,10 +73,10 @@ describe("github issues > #4190 Relation decorators: allow to pass string instea
|
||||
await connection.manager.save(user)
|
||||
|
||||
const users = await connection.manager.find(User, {
|
||||
relations: ["photos"],
|
||||
relations: { photos: true },
|
||||
})
|
||||
const photos = await connection.manager.find(Photo, {
|
||||
relations: ["user"],
|
||||
relations: { user: true },
|
||||
})
|
||||
|
||||
// Check one-to-many
|
||||
@ -130,7 +130,7 @@ describe("github issues > #4190 Relation decorators: allow to pass string instea
|
||||
await connection.manager.save(question)
|
||||
|
||||
const questions = await connection.manager.find(Question, {
|
||||
relations: ["categories"],
|
||||
relations: { categories: true },
|
||||
})
|
||||
|
||||
questions[0].categories.should.have.deep.members([
|
||||
|
||||
@ -75,31 +75,38 @@ describe("github issues > #5684 eager relation skips children relations", () =>
|
||||
}
|
||||
}
|
||||
|
||||
const relations = [
|
||||
"company",
|
||||
"company.admin", // <-- can't be loaded without the fix.
|
||||
"company.staff", // <-- can't be loaded without the fix.
|
||||
"company.staff.company", // <-- can't be loaded without the fix.
|
||||
"company.staff.company.admin", // <-- can't be loaded without the fix.
|
||||
]
|
||||
const relations = {
|
||||
company: {
|
||||
admin: true,
|
||||
staff: {
|
||||
company: {
|
||||
admin: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
// "company.admin", // <-- can't be loaded without the fix.
|
||||
// "company.staff", // <-- can't be loaded without the fix.
|
||||
// "company.staff.company", // <-- can't be loaded without the fix.
|
||||
// "company.staff.company.admin", // <-- can't be loaded without the fix.
|
||||
}
|
||||
|
||||
const user1 = await connection.getRepository(User).findOne({
|
||||
where: { id: userAdmin.id },
|
||||
relations: [...relations],
|
||||
relations: relations,
|
||||
})
|
||||
assert(user1)
|
||||
const user2 = await connection
|
||||
.getRepository(User)
|
||||
.findOneOrFail({
|
||||
where: { id: userAdmin.id },
|
||||
relations: [...relations],
|
||||
relations: relations,
|
||||
})
|
||||
assert(user2)
|
||||
const users3 = await connection.getRepository(User).find({
|
||||
where: {
|
||||
id: userAdmin.id,
|
||||
},
|
||||
relations: [...relations],
|
||||
relations: relations,
|
||||
})
|
||||
assert(users3.pop())
|
||||
const [users4] = await connection
|
||||
@ -108,14 +115,14 @@ describe("github issues > #5684 eager relation skips children relations", () =>
|
||||
where: {
|
||||
id: userAdmin.id,
|
||||
},
|
||||
relations: [...relations],
|
||||
relations: relations,
|
||||
})
|
||||
assert(users4.pop())
|
||||
const users5 = await connection.getRepository(User).find({
|
||||
where: {
|
||||
id: In([userAdmin.id]),
|
||||
},
|
||||
relations: [...relations],
|
||||
relations: relations,
|
||||
})
|
||||
assert(users5.pop())
|
||||
}),
|
||||
|
||||
@ -103,27 +103,29 @@ describe("github issues > #5691 RelationId is too slow", () => {
|
||||
// const test1Start = new Date().getTime();
|
||||
// 54 rows for 1 root
|
||||
await connection.getRepository(Root).find({
|
||||
relations: [
|
||||
"allChild1",
|
||||
"allChild1.allShared",
|
||||
"allChild2",
|
||||
],
|
||||
relations: {
|
||||
allChild1: {
|
||||
allShared: true,
|
||||
},
|
||||
allChild2: true,
|
||||
},
|
||||
})
|
||||
// 21 rows 1 root
|
||||
await connection.getRepository(Root).find({
|
||||
relations: ["allShared"],
|
||||
relations: { allShared: true },
|
||||
})
|
||||
// const test1End = new Date().getTime();
|
||||
|
||||
// const test2Start = new Date().getTime();
|
||||
// 1134 rows 1 root
|
||||
await connection.getRepository(Root).find({
|
||||
relations: [
|
||||
"allChild1",
|
||||
"allChild1.allShared",
|
||||
"allChild2",
|
||||
"allShared",
|
||||
],
|
||||
relations: {
|
||||
allChild1: {
|
||||
allShared: true,
|
||||
},
|
||||
allChild2: true,
|
||||
allShared: true,
|
||||
},
|
||||
})
|
||||
// const test2End = new Date().getTime();
|
||||
|
||||
|
||||
@ -39,7 +39,7 @@ describe("github issues > #703.findOne does not return an empty array on OneToMa
|
||||
where: {
|
||||
id: 1,
|
||||
},
|
||||
relations: ["categories"],
|
||||
relations: { categories: true },
|
||||
})
|
||||
|
||||
loadedPost!.id.should.be.equal(1)
|
||||
|
||||
@ -32,7 +32,7 @@ describe("github issues > #7041 When requesting nested relations on foreign key
|
||||
where: {
|
||||
id: testUser.id,
|
||||
},
|
||||
relations: ["admin", "admin.organization"],
|
||||
relations: { admin: { organization: true } },
|
||||
})
|
||||
expect(foundUser?.randomField).eq("foo")
|
||||
expect(foundUser?.admin).eq(null)
|
||||
@ -50,7 +50,7 @@ describe("github issues > #7041 When requesting nested relations on foreign key
|
||||
where: {
|
||||
id: testUser.id,
|
||||
},
|
||||
relations: ["membership", "membership.organization"],
|
||||
relations: { membership: { organization: true } },
|
||||
})
|
||||
expect(foundUser?.randomField).eq("foo")
|
||||
expect(foundUser?.membership).eql([])
|
||||
|
||||
@ -50,7 +50,7 @@ describe("github issues > #7065 ChildEntity type relationship produces unexpecte
|
||||
where: {
|
||||
id: 1,
|
||||
},
|
||||
relations: ["emails", "phones"],
|
||||
relations: { emails: true, phones: true },
|
||||
})
|
||||
|
||||
expect(result!.emails.length).eq(1)
|
||||
|
||||
@ -25,13 +25,13 @@ describe("github issues > #7882 .findOne reduces relations to an empty array",
|
||||
it("should delete all documents related to search pattern", () =>
|
||||
Promise.all(
|
||||
connections.map(async (connection) => {
|
||||
const relations = ["exampleText"]
|
||||
const relations = { exampleText: true }
|
||||
|
||||
const repo = connection.getRepository(Example)
|
||||
|
||||
await repo.find({ relations })
|
||||
|
||||
expect(relations).to.be.eql(["exampleText"])
|
||||
expect(relations).to.be.eql({ exampleText: true })
|
||||
}),
|
||||
))
|
||||
})
|
||||
|
||||
17
test/github-issues/7907/entity/Post.ts
Normal file
17
test/github-issues/7907/entity/Post.ts
Normal file
@ -0,0 +1,17 @@
|
||||
import { Entity } from "../../../../src/decorator/entity/Entity"
|
||||
import { Column } from "../../../../src/decorator/columns/Column"
|
||||
import { ObjectIdColumn } from "../../../../src/decorator/columns/ObjectIdColumn"
|
||||
// import { ObjectId } from "mongodb";
|
||||
import { ObjectID } from "../../../../src"
|
||||
|
||||
@Entity()
|
||||
export class Post {
|
||||
@ObjectIdColumn()
|
||||
id: ObjectID
|
||||
|
||||
@Column()
|
||||
title: string
|
||||
|
||||
@Column()
|
||||
text: string
|
||||
}
|
||||
44
test/github-issues/7907/issue-7907.ts
Normal file
44
test/github-issues/7907/issue-7907.ts
Normal file
@ -0,0 +1,44 @@
|
||||
import "reflect-metadata"
|
||||
import { expect } from "chai"
|
||||
import { Connection } from "../../../src"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../utils/test-utils"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
describe("github issues > #7907 add support for mongodb driver v4", () => {
|
||||
let connections: Connection[]
|
||||
before(
|
||||
async () =>
|
||||
(connections = await createTestingConnections({
|
||||
entities: [Post],
|
||||
enabledDrivers: ["mongodb"],
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(connections))
|
||||
after(() => closeTestingConnections(connections))
|
||||
|
||||
it("should find the Post without throw error: Cannot read property 'prototype' of undefined", () =>
|
||||
Promise.all(
|
||||
connections.map(async (connection) => {
|
||||
const postMongoRepository = connection.getMongoRepository(Post)
|
||||
|
||||
// save a post
|
||||
const post = new Post()
|
||||
post.title = "Post"
|
||||
post.text = "This is a simple post"
|
||||
await postMongoRepository.save(post)
|
||||
|
||||
const findPosts = async () => {
|
||||
return postMongoRepository.find()
|
||||
}
|
||||
const posts = await findPosts()
|
||||
|
||||
expect(findPosts).to.not.throw()
|
||||
expect(posts).to.have.lengthOf(1)
|
||||
expect(posts[0]).to.be.instanceOf(Post)
|
||||
}),
|
||||
))
|
||||
})
|
||||
@ -40,7 +40,7 @@ describe("github issues > #8018 Non-unique relation property names causes entity
|
||||
await connection.manager.save([parent, child1, child2])
|
||||
|
||||
const result = await connection.manager.find(Parent, {
|
||||
relations: ["children"],
|
||||
relations: { children: true },
|
||||
})
|
||||
|
||||
expect(result).to.have.lengthOf(1)
|
||||
|
||||
@ -35,7 +35,11 @@ describe("persistence > delete orphans", () => {
|
||||
let postRepository: Repository<Post>
|
||||
let categoryId: number
|
||||
|
||||
beforeEach(async () => {
|
||||
beforeEach(async function () {
|
||||
if (connections.length === 0) {
|
||||
this.skip()
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
connections.map(async (connection) => {
|
||||
categoryRepository = connection.getRepository(Category)
|
||||
|
||||
@ -30,13 +30,13 @@ describe("other issues > preventing-injection", () => {
|
||||
const postWithOnlyIdSelected = await connection.manager.find(
|
||||
Post,
|
||||
{
|
||||
select: ["id"],
|
||||
select: { id: true },
|
||||
},
|
||||
)
|
||||
postWithOnlyIdSelected.should.be.eql([{ id: 1 }])
|
||||
|
||||
await connection.manager.find(Post, {
|
||||
select: ["(WHERE LIMIT 1)" as any],
|
||||
select: "(WHERE LIMIT 1)" as any,
|
||||
}).should.be.rejected
|
||||
}),
|
||||
))
|
||||
|
||||
@ -39,7 +39,7 @@ describe("other issues > Relation decorators: allow to pass given table name str
|
||||
await connection.manager.save(user)
|
||||
|
||||
const users = await connection.manager.find(User, {
|
||||
relations: ["profile"],
|
||||
relations: { profile: true },
|
||||
})
|
||||
|
||||
users.should.eql([
|
||||
@ -73,10 +73,14 @@ describe("other issues > Relation decorators: allow to pass given table name str
|
||||
await connection.manager.save(user)
|
||||
|
||||
const users = await connection.manager.find(User, {
|
||||
relations: ["photos"],
|
||||
relations: {
|
||||
photos: true,
|
||||
},
|
||||
})
|
||||
const photos = await connection.manager.find(Photo, {
|
||||
relations: ["user"],
|
||||
relations: {
|
||||
user: true,
|
||||
},
|
||||
})
|
||||
|
||||
// Check one-to-many
|
||||
@ -130,7 +134,7 @@ describe("other issues > Relation decorators: allow to pass given table name str
|
||||
await connection.manager.save(question)
|
||||
|
||||
const questions = await connection.manager.find(Question, {
|
||||
relations: ["categories"],
|
||||
relations: { categories: true },
|
||||
})
|
||||
|
||||
questions[0].categories.should.have.deep.members([
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user