mirror of
https://github.com/typeorm/typeorm.git
synced 2025-12-08 21:26:23 +00:00
Compare commits
21 Commits
9d38c2ac03
...
ccde2ec929
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ccde2ec929 | ||
|
|
a46eb0a7e1 | ||
|
|
2d8c5158db | ||
|
|
6e34756b9d | ||
|
|
73fda419e4 | ||
|
|
6f486e5a67 | ||
|
|
38715bbd41 | ||
|
|
ec3ea10b44 | ||
|
|
c4f5d12f3f | ||
|
|
61f9e0d085 | ||
|
|
55cd8e2b08 | ||
|
|
67f793feaa | ||
|
|
835647ac92 | ||
|
|
546192767d | ||
|
|
d0b54544e9 | ||
|
|
cfb3d6c015 | ||
|
|
dd55218648 | ||
|
|
cb1284c8c0 | ||
|
|
9383799b3d | ||
|
|
ea0f155532 | ||
|
|
ade198c77c |
@ -1,7 +1,7 @@
|
||||
{
|
||||
"all": true,
|
||||
"cache": false,
|
||||
"exclude": ["node_modules", "**/*.d.ts"],
|
||||
"exclude": ["**/*.d.ts"],
|
||||
"exclude-after-remap": true,
|
||||
"extension": [".ts"],
|
||||
"include": ["build/compiled/src/**", "src/**"],
|
||||
|
||||
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -33,8 +33,8 @@
|
||||
|
||||
- [ ] Code is up-to-date with the `master` branch
|
||||
- [ ] This pull request links relevant issues as `Fixes #00000`
|
||||
- [ ] There are new or updated unit tests validating the change
|
||||
- [ ] Documentation has been updated to reflect this change
|
||||
- [ ] There are new or updated tests validating the change (`tests/**.test.ts`)
|
||||
- [ ] Documentation has been updated to reflect this change (`docs/docs/**.md`)
|
||||
|
||||
<!--
|
||||
🎉 Thank you for contributing and making TypeORM even better!
|
||||
|
||||
28
.github/workflows/docsearch.yml
vendored
28
.github/workflows/docsearch.yml
vendored
@ -12,7 +12,35 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Delete unaliased collections
|
||||
env:
|
||||
TYPESENSE_API_KEY: ${{ secrets.TYPESENSE_API_KEY }}
|
||||
TYPESENSE_HOST: ${{ secrets.TYPESENSE_HOST }}
|
||||
TYPESENSE_PROTOCOL: https
|
||||
TYPESENSE_PORT: 443
|
||||
run: |
|
||||
ALIAS_COLLECTION=$(curl -s -H "X-TYPESENSE-API-KEY: $TYPESENSE_API_KEY" \
|
||||
"$TYPESENSE_PROTOCOL://$TYPESENSE_HOST:$TYPESENSE_PORT/aliases/typeorm-docs" \
|
||||
| jq -r '.collection_name')
|
||||
|
||||
if [ "$ALIAS_COLLECTION" = "null" ] || [ -z "$ALIAS_COLLECTION" ]; then
|
||||
echo "Alias does not exist; skipping collection cleanup."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Alias currently points to: $ALIAS_COLLECTION"
|
||||
|
||||
COLLECTIONS=$(curl -s -H "X-TYPESENSE-API-KEY: $TYPESENSE_API_KEY" \
|
||||
"$TYPESENSE_PROTOCOL://$TYPESENSE_HOST:$TYPESENSE_PORT/collections" \
|
||||
| jq -r '.[].name')
|
||||
|
||||
for col in $COLLECTIONS; do
|
||||
if [ "$col" != "$ALIAS_COLLECTION" ]; then
|
||||
echo "Deleting unaliased collection: $col"
|
||||
curl -s -X DELETE -H "X-TYPESENSE-API-KEY: $TYPESENSE_API_KEY" \
|
||||
"$TYPESENSE_PROTOCOL://$TYPESENSE_HOST:$TYPESENSE_PORT/collections/$col"
|
||||
fi
|
||||
done
|
||||
- run: |
|
||||
docker run \
|
||||
-e TYPESENSE_API_KEY=${{ secrets.TYPESENSE_API_KEY }} \
|
||||
|
||||
1
.github/workflows/tests-linux.yml
vendored
1
.github/workflows/tests-linux.yml
vendored
@ -300,6 +300,7 @@ jobs:
|
||||
- run: npm ci
|
||||
- run: cat ormconfig.sample.json | jq 'map(select(.name == "oracle"))' > ormconfig.json
|
||||
- run: docker compose up oracle --no-recreate --wait
|
||||
- run: sleep 10
|
||||
- run: npx c8 npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
|
||||
1
.github/workflows/tests.yml
vendored
1
.github/workflows/tests.yml
vendored
@ -34,6 +34,7 @@ jobs:
|
||||
|
||||
src-or-tests: &src-or-tests
|
||||
- *src
|
||||
- test/**/*.ts
|
||||
- .github/workflows/test/**/*
|
||||
- .github/workflows/test*.yml
|
||||
- .mocharc.json
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
{
|
||||
"__comment": "TODO: remove --exit flag: https://mochajs.org/#-exit",
|
||||
"exit": true,
|
||||
"$schema": "https://json.schemastore.org/mocharc",
|
||||
"check-leaks": true,
|
||||
"color": true,
|
||||
"exit": true,
|
||||
"file": ["./build/compiled/test/utils/test-setup.js"],
|
||||
"recursive": true,
|
||||
"spec": ["./build/compiled/test"],
|
||||
"spec": ["./build/compiled/test/**/*.test.{js,ts}"],
|
||||
"timeout": 90000
|
||||
}
|
||||
|
||||
14
.pr_agent.toml
Normal file
14
.pr_agent.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[github_app]
|
||||
pr_commands = [
|
||||
"/review",
|
||||
"/improve",
|
||||
]
|
||||
|
||||
handle_push_trigger = true
|
||||
push_commands = [
|
||||
"/improve",
|
||||
]
|
||||
|
||||
[auto_best_practices]
|
||||
enable_auto_best_practices = true
|
||||
utilize_auto_best_practices = true
|
||||
28
CHANGELOG.md
28
CHANGELOG.md
@ -1,3 +1,31 @@
|
||||
## [0.3.28](https://github.com/typeorm/typeorm/compare/0.3.27...0.3.28) (2025-12-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add multiSubnetFailover option for mssql ([#10804](https://github.com/typeorm/typeorm/issues/10804)) ([83e3a8a](https://github.com/typeorm/typeorm/commit/83e3a8a3db581a50495fa2d97c8fcd5d603cfd3c))
|
||||
* circular import in SapDriver.ts ([#11750](https://github.com/typeorm/typeorm/issues/11750)) ([bed7913](https://github.com/typeorm/typeorm/commit/bed79136230d4ab26cce8cf79071134c75527857))
|
||||
* **cli:** init command reading package.json from two folders up ([#11789](https://github.com/typeorm/typeorm/issues/11789)) ([dd55218](https://github.com/typeorm/typeorm/commit/dd55218648eb449937e22e1e7c88182db0048f1d))
|
||||
* **deps:** upgrade glob to fix CVE-2025-64756 ([#11784](https://github.com/typeorm/typeorm/issues/11784)) ([dc74f53](https://github.com/typeorm/typeorm/commit/dc74f5374ef5ec83d53045e4bca99cb9ff7d49d4))
|
||||
* **mongodb:** add missing `findBy` method to MongoEntityManager ([#11814](https://github.com/typeorm/typeorm/issues/11814)) ([38715bb](https://github.com/typeorm/typeorm/commit/38715bbd4169cae2910aac035cd2b05bddbaec5c))
|
||||
* **redis:** version detection logic ([#11815](https://github.com/typeorm/typeorm/issues/11815)) ([6f486e5](https://github.com/typeorm/typeorm/commit/6f486e5a67c007287949be119f233fb2b4fb7a59))
|
||||
* typesense doc sync ([#11807](https://github.com/typeorm/typeorm/issues/11807)) ([d0b5454](https://github.com/typeorm/typeorm/commit/d0b54544e9e43a5330c0485d41551128224fe4d3))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add support for `jsonpath` column type in PostgreSQL ([#11684](https://github.com/typeorm/typeorm/issues/11684)) ([4f05718](https://github.com/typeorm/typeorm/commit/4f05718237a6ef1a3bc623e803536db23f1f327b))
|
||||
* **cli/init:** pick dependencies versions from our own package.json ([#11705](https://github.com/typeorm/typeorm/issues/11705)) ([b930909](https://github.com/typeorm/typeorm/commit/b9309098bc00de047a96cba642ea1ed9e730b1fa))
|
||||
* entity schema support trees ([#11606](https://github.com/typeorm/typeorm/issues/11606)) ([925dee0](https://github.com/typeorm/typeorm/commit/925dee002b92f1210456dce16c18c6b436e912f3))
|
||||
* export QueryPartialEntity and QueryDeepPartialEntity types ([#11748](https://github.com/typeorm/typeorm/issues/11748)) ([ade198c](https://github.com/typeorm/typeorm/commit/ade198c77cda65e86f057f97261073f5ab2b1ed6))
|
||||
* init version in postgres driver only if not set ([#11373](https://github.com/typeorm/typeorm/issues/11373)) ([cb1284c](https://github.com/typeorm/typeorm/commit/cb1284c8c0950dcb792e95b889efe1dfafc05aea))
|
||||
* manage MongoDB SOCKS5 proxy settings ([#11731](https://github.com/typeorm/typeorm/issues/11731)) ([d7867eb](https://github.com/typeorm/typeorm/commit/d7867ebff173e6cae45e6ce82c9f8890811c4eba))
|
||||
* **mssql:** support 'vector' type for MS SQL Server ([#11732](https://github.com/typeorm/typeorm/issues/11732)) ([2681051](https://github.com/typeorm/typeorm/commit/2681051f78c5c284b340e7978f8f337e86c7e915))
|
||||
* **mysql:** add pool size options for each connection ([#11810](https://github.com/typeorm/typeorm/issues/11810)) ([67f793f](https://github.com/typeorm/typeorm/commit/67f793feaa976da717175daf152f738793b94ed2))
|
||||
* **mysql:** add support for vector columns on MariaDB and MySQL ([#11670](https://github.com/typeorm/typeorm/issues/11670)) ([cfb3d6c](https://github.com/typeorm/typeorm/commit/cfb3d6c015ad648a7ffc08a7a11ce580d108ac69))
|
||||
|
||||
|
||||
|
||||
## [0.3.27](https://github.com/typeorm/typeorm/compare/0.3.26...0.3.27) (2025-09-19)
|
||||
|
||||
|
||||
|
||||
90
DEVELOPER.md
90
DEVELOPER.md
@ -2,11 +2,11 @@
|
||||
|
||||
This document describes how to set up your development environment and run TypeORM test cases.
|
||||
|
||||
* [Prerequisite Software](#prerequisite-software)
|
||||
* [Getting the Sources](#getting-the-sources)
|
||||
* [Installing NPM Modules](#installing-npm-modules)
|
||||
* [Building](#building)
|
||||
* [Running Tests Locally](#running-tests-locally)
|
||||
- [Prerequisite Software](#prerequisite-software)
|
||||
- [Getting the Sources](#getting-the-sources)
|
||||
- [Installing NPM Modules](#installing-npm-modules)
|
||||
- [Building](#building)
|
||||
- [Running Tests Locally](#running-tests-locally)
|
||||
|
||||
See the [contribution guidelines](https://github.com/typeorm/typeorm/blob/master/CONTRIBUTING.md)
|
||||
if you'd like to contribute to TypeORM.
|
||||
@ -16,19 +16,19 @@ if you'd like to contribute to TypeORM.
|
||||
Before you can build and test TypeORM, you must install and configure the
|
||||
following products on your development machine:
|
||||
|
||||
* [Git](http://git-scm.com) and/or the **GitHub app** (for [Mac](http://mac.github.com) or
|
||||
[Windows](http://windows.github.com)); [GitHub's Guide to Installing
|
||||
Git](https://help.github.com/articles/set-up-git) is a good source of information.
|
||||
* [Node.js](http://nodejs.org), (better to install latest version) which is used to run a development web server,
|
||||
run tests, and generate distributable files.
|
||||
Depending on your system, you can install Node either from source or as a pre-packaged bundle.
|
||||
* [Mysql](https://www.mysql.com/) is required to run tests on this platform (or docker)
|
||||
* [MariaDB](https://mariadb.com/) is required to run tests on this platform (or docker)
|
||||
* [Postgres](https://www.postgresql.org/) is required to run tests on this platform (or docker)
|
||||
* [Oracle](https://www.oracle.com/database/index.html) is required to run tests on this platform
|
||||
* [Microsoft SQL Server](https://www.microsoft.com/en-us/cloud-platform/sql-server) is required to run tests on this platform
|
||||
* For MySQL, MariaDB and Postgres you can use [docker](https://www.docker.com/) instead (docker configuration is
|
||||
[here](https://github.com/typeorm/typeorm/blob/master/docker-compose.yml))
|
||||
- [Git](http://git-scm.com) and/or the **GitHub app** (for [Mac](http://mac.github.com) or
|
||||
[Windows](http://windows.github.com)); [GitHub's Guide to Installing
|
||||
Git](https://help.github.com/articles/set-up-git) is a good source of information.
|
||||
- [Node.js](http://nodejs.org), (better to install latest version) which is used to run a development web server,
|
||||
run tests, and generate distributable files.
|
||||
Depending on your system, you can install Node either from source or as a pre-packaged bundle.
|
||||
- [Mysql](https://www.mysql.com/) is required to run tests on this platform (or docker)
|
||||
- [MariaDB](https://mariadb.com/) is required to run tests on this platform (or docker)
|
||||
- [Postgres](https://www.postgresql.org/) is required to run tests on this platform (or docker)
|
||||
- [Oracle](https://www.oracle.com/database/index.html) is required to run tests on this platform
|
||||
- [Microsoft SQL Server](https://www.microsoft.com/en-us/cloud-platform/sql-server) is required to run tests on this platform
|
||||
- For MySQL, MariaDB and Postgres you can use [docker](https://www.docker.com/) instead (docker configuration is
|
||||
[here](https://github.com/typeorm/typeorm/blob/master/docker-compose.yml))
|
||||
|
||||
## Getting the Sources
|
||||
|
||||
@ -56,9 +56,9 @@ You should have node installed in the version described in [.nvmrc](.nvmrc).
|
||||
|
||||
It is recommended to configure your OS to automatically switch to use this version whenever you enter project folder. This can be achieved in many ways:
|
||||
|
||||
* [`fnm`](https://github.com/Schniz/fnm)
|
||||
* [`zsh-nvm`](https://github.com/lukechilds/zsh-nvm#auto-use)
|
||||
* [`asdf`](https://asdf-vm.com) with `asdf-nodejs` plugin and [`legacy_version_file = true`](https://asdf-vm.com/manage/configuration.html#legacy-version-file) option
|
||||
- [`fnm`](https://github.com/Schniz/fnm)
|
||||
- [`zsh-nvm`](https://github.com/lukechilds/zsh-nvm#auto-use)
|
||||
- [`asdf`](https://asdf-vm.com) with `asdf-nodejs` plugin and [`legacy_version_file = true`](https://asdf-vm.com/manage/configuration.html#legacy-version-file) option
|
||||
|
||||
## Installing package dependencies
|
||||
|
||||
@ -101,36 +101,46 @@ You can copy this tar into your project and run `npm install ./typeorm-x.x.x.tgz
|
||||
|
||||
It is greatly appreciated if PRs that change code come with appropriate tests.
|
||||
|
||||
To create a new test, check the [relevant functional tests](https://github.com/typeorm/typeorm/tree/master/test/functional). Depending on the test, you may need to create a new test file or modify an existing one.
|
||||
To create a new test, check the [relevant functional tests](https://github.com/typeorm/typeorm/tree/master/test/functional). Depending on the test, you may need to create a new `.test.ts` file or modify an existing one.
|
||||
|
||||
If the test is for a specific regression or issue opened on GitHub, add a comment to the tests mentioning the issue number.
|
||||
|
||||
Most tests will benefit from using this template as a starting point:
|
||||
|
||||
```ts
|
||||
import "reflect-metadata";
|
||||
import { createTestingConnections, closeTestingConnections, reloadTestingDatabases } from "../../utils/test-utils";
|
||||
import { expect } from "chai"
|
||||
import "reflect-metadata"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../utils/test-utils"
|
||||
import { DataSource } from "../../../src/data-source/DataSource"
|
||||
import { expect } from "chai";
|
||||
|
||||
describe("description of the functionality you're testing", () => {
|
||||
let dataSources: DataSource[]
|
||||
|
||||
let dataSources: DataSource[];
|
||||
before(async () => dataSources = await createTestingConnections({
|
||||
entities: [__dirname + "/entity/*{.js,.ts}"],
|
||||
schemaCreate: true,
|
||||
dropSchema: true,
|
||||
}));
|
||||
beforeEach(() => reloadTestingDatabases(dataSources));
|
||||
after(() => closeTestingConnections(dataSources));
|
||||
before(
|
||||
async () =>
|
||||
(dataSources = await createTestingConnections({
|
||||
entities: [__dirname + "/entity/*{.js,.ts}"],
|
||||
schemaCreate: true,
|
||||
dropSchema: true,
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(dataSources))
|
||||
after(() => closeTestingConnections(dataSources))
|
||||
|
||||
// optional: test fix for issue https://github.com/typeorm/typeorm/issues/<issue-number>
|
||||
it("should <put a detailed description of what it should do here>", () => Promise.all(dataSources.map(async dataSource => {
|
||||
// tests go here
|
||||
})));
|
||||
it("should <put a detailed description of what it should do here>", () =>
|
||||
Promise.all(
|
||||
dataSources.map(async (dataSource) => {
|
||||
// tests go here
|
||||
}),
|
||||
))
|
||||
|
||||
// you can add additional tests if needed
|
||||
});
|
||||
// you can add additional tests if needed
|
||||
})
|
||||
```
|
||||
|
||||
If you place entities in `./entity/<entity-name>.ts` relative to your test file,
|
||||
@ -173,8 +183,8 @@ Once TypeScript finishes compiling your changes, you can run `npm run test:fast`
|
||||
To run your tests you need the Database Management Systems (DBMS) installed on your machine. Alternatively, you can use docker with the DBMS running in containers. To have docker run all the DBMS for you simply run `docker-compose up`
|
||||
in the root of the project. Once all images are fetched and are running, you can run the tests.
|
||||
|
||||
- The docker image of mssql-server needs at least 3.25GB of RAM.
|
||||
- Make sure to assign enough memory to the Docker VM if you're running on Docker for Mac or Windows
|
||||
- The docker image of mssql-server needs at least 3.25GB of RAM.
|
||||
- Make sure to assign enough memory to the Docker VM if you're running on Docker for Mac or Windows
|
||||
|
||||
## Release Process
|
||||
|
||||
|
||||
@ -12,7 +12,7 @@ services:
|
||||
MYSQL_DATABASE: "test"
|
||||
|
||||
mysql-9:
|
||||
image: "mysql:9.4.0"
|
||||
image: "mysql:9.5.0"
|
||||
container_name: "typeorm-mysql-9"
|
||||
ports:
|
||||
- "3306:3306"
|
||||
@ -24,7 +24,7 @@ services:
|
||||
|
||||
# mariadb
|
||||
mariadb-10:
|
||||
image: "mariadb:10.6.22-jammy"
|
||||
image: "mariadb:10.6.24-jammy"
|
||||
container_name: "typeorm-mariadb-10"
|
||||
ports:
|
||||
- "3307:3306"
|
||||
@ -35,7 +35,7 @@ services:
|
||||
MYSQL_DATABASE: "test"
|
||||
|
||||
mariadb-12:
|
||||
image: "mariadb:12.0.1-rc"
|
||||
image: "mariadb:12.1.2"
|
||||
container_name: "typeorm-mariadb-12"
|
||||
ports:
|
||||
- "3307:3306"
|
||||
|
||||
@ -207,7 +207,7 @@ const queryEmbedding = [
|
||||
const results = await dataSource.query(
|
||||
`
|
||||
DECLARE @question AS VECTOR (1998) = @0;
|
||||
SELECT TOP (10) dc.*,
|
||||
SELECT TOP (10) dc.*,
|
||||
VECTOR_DISTANCE('cosine', @question, embedding) AS distance
|
||||
FROM document_chunk dc
|
||||
ORDER BY VECTOR_DISTANCE('cosine', @question, embedding)
|
||||
|
||||
@ -34,6 +34,10 @@ See [Data Source Options](../data-source/2-data-source-options.md) for the commo
|
||||
|
||||
- `database` - Database name.
|
||||
|
||||
- `socketPath` - Database socket path.
|
||||
|
||||
- `poolSize` - Maximum number of clients the pool should contain for each connection.
|
||||
|
||||
- `charset` and `collation` - The charset/collation for the connection. If an SQL-level charset is specified (like utf8mb4) then the default collation for that charset is used.
|
||||
|
||||
- `timezone` - the timezone configured on the MySQL server. This is used to typecast server date/time
|
||||
@ -139,3 +143,7 @@ export class User {
|
||||
roles: UserRoleType[]
|
||||
}
|
||||
```
|
||||
|
||||
### Vector Types
|
||||
|
||||
MySQL supports the [VECTOR type](https://dev.mysql.com/doc/refman/en/vector.html) since version 9.0, while in MariaDB, [vectors](https://mariadb.com/docs/server/reference/sql-structure/vectors/vector-overview) are available since 11.7.
|
||||
|
||||
@ -60,7 +60,7 @@ Additional options can be added to the `extra` object and will be passed directl
|
||||
|
||||
### Column types for `postgres`
|
||||
|
||||
`int`, `int2`, `int4`, `int8`, `smallint`, `integer`, `bigint`, `decimal`, `numeric`, `real`, `float`, `float4`, `float8`, `double precision`, `money`, `character varying`, `varchar`, `character`, `char`, `text`, `citext`, `hstore`, `bytea`, `bit`, `varbit`, `bit varying`, `timetz`, `timestamptz`, `timestamp`, `timestamp without time zone`, `timestamp with time zone`, `date`, `time`, `time without time zone`, `time with time zone`, `interval`, `bool`, `boolean`, `enum`, `point`, `line`, `lseg`, `box`, `path`, `polygon`, `circle`, `cidr`, `inet`, `macaddr`, `macaddr8`, `tsvector`, `tsquery`, `uuid`, `xml`, `json`, `jsonb`, `jsonpath`, `int4range`, `int8range`, `numrange`, `tsrange`, `tstzrange`, `daterange`, `int4multirange`, `int8multirange`, `nummultirange`, `tsmultirange`, `tstzmultirange`, `multidaterange`, `geometry`, `geography`, `cube`, `ltree`
|
||||
`int`, `int2`, `int4`, `int8`, `smallint`, `integer`, `bigint`, `decimal`, `numeric`, `real`, `float`, `float4`, `float8`, `double precision`, `money`, `character varying`, `varchar`, `character`, `char`, `text`, `citext`, `hstore`, `bytea`, `bit`, `varbit`, `bit varying`, `timetz`, `timestamptz`, `timestamp`, `timestamp without time zone`, `timestamp with time zone`, `date`, `time`, `time without time zone`, `time with time zone`, `interval`, `bool`, `boolean`, `enum`, `point`, `line`, `lseg`, `box`, `path`, `polygon`, `circle`, `cidr`, `inet`, `macaddr`, `macaddr8`, `tsvector`, `tsquery`, `uuid`, `xml`, `json`, `jsonb`, `jsonpath`, `int4range`, `int8range`, `numrange`, `tsrange`, `tstzrange`, `daterange`, `int4multirange`, `int8multirange`, `nummultirange`, `tsmultirange`, `tstzmultirange`, `multidaterange`, `geometry`, `geography`, `cube`, `ltree`, `vector`, `halfvec`.
|
||||
|
||||
### Column types for `cockroachdb`
|
||||
|
||||
@ -68,6 +68,33 @@ Additional options can be added to the `extra` object and will be passed directl
|
||||
|
||||
Note: CockroachDB returns all numeric data types as `string`. However, if you omit the column type and define your property as `number` ORM will `parseInt` string into number.
|
||||
|
||||
### Vector columns
|
||||
|
||||
Vector columns can be used for similarity searches using PostgreSQL's vector operators:
|
||||
|
||||
```typescript
|
||||
// L2 distance (Euclidean) - <->
|
||||
const results = await dataSource.sql`
|
||||
SELECT id, embedding
|
||||
FROM post
|
||||
ORDER BY embedding <-> ${"[1,2,3]"}
|
||||
LIMIT 5`
|
||||
|
||||
// Cosine distance - <=>
|
||||
const results = await dataSource.sql`
|
||||
SELECT id, embedding
|
||||
FROM post
|
||||
ORDER BY embedding <=> ${"[1,2,3]"}
|
||||
LIMIT 5`
|
||||
|
||||
// Inner product - <#>
|
||||
const results = await dataSource.sql`
|
||||
SELECT id, embedding
|
||||
FROM post
|
||||
ORDER BY embedding <#> ${"[1,2,3]"}
|
||||
LIMIT 5`
|
||||
```
|
||||
|
||||
### Spatial columns
|
||||
|
||||
TypeORM's PostgreSQL and CockroachDB support uses [GeoJSON](http://geojson.org/) as an interchange format, so geometry columns should be tagged either as `object` or `Geometry` (or subclasses, e.g. `Point`) after importing [`geojson` types](https://www.npmjs.com/package/@types/geojson) or using the TypeORM built-in GeoJSON types:
|
||||
|
||||
@ -37,15 +37,16 @@ SAP HANA 2.0 and SAP HANA Cloud support slightly different data types. Check the
|
||||
- [SAP HANA 2.0 Data Types](https://help.sap.com/docs/SAP_HANA_PLATFORM/4fe29514fd584807ac9f2a04f6754767/20a1569875191014b507cf392724b7eb.html?locale=en-US)
|
||||
- [SAP HANA Cloud Data Types](https://help.sap.com/docs/hana-cloud-database/sap-hana-cloud-sap-hana-database-sql-reference-guide/data-types)
|
||||
|
||||
TypeORM's `SapDriver` supports `tinyint`, `smallint`, `integer`, `bigint`, `smalldecimal`, `decimal`, `real`, `double`, `date`, `time`, `seconddate`, `timestamp`, `boolean`, `char`, `nchar`, `varchar`, `nvarchar`, `text`, `alphanum`, `shorttext`, `array`, `varbinary`, `blob`, `clob`, `nclob`, `st_geometry`, `st_point`, `real_vector`, `half_vector`, `vector`, and `halfvec`. Some of these data types have been deprecated or removed in SAP HANA Cloud, and will be converted to the closest available alternative when connected to a Cloud database.
|
||||
TypeORM's `SapDriver` supports `tinyint`, `smallint`, `integer`, `bigint`, `smalldecimal`, `decimal`, `real`, `double`, `date`, `time`, `seconddate`, `timestamp`, `boolean`, `char`, `nchar`, `varchar`, `nvarchar`, `text`, `alphanum`, `shorttext`, `array`, `varbinary`, `blob`, `clob`, `nclob`, `st_geometry`, `st_point`, `real_vector` and `half_vector`. Some of these data types have been deprecated or removed in SAP HANA Cloud, and will be converted to the closest available alternative when connected to a Cloud database.
|
||||
|
||||
### Vector Types
|
||||
|
||||
The `real_vector` and `half_vector` data types were introduced in SAP HANA Cloud (2024Q1 and 2025Q2 respectively), and require a supported version of `@sap/hana-client` as well.
|
||||
The `real_vector` and `half_vector` data types were introduced in SAP HANA Cloud (2024Q1 and 2025Q2 respectively), and require a supported version of `@sap/hana-client` as well.
|
||||
|
||||
For consistency with PostgreSQL's vector support, TypeORM also provides aliases:
|
||||
- `vector` (alias for `real_vector`) - stores vectors as 4-byte floats
|
||||
- `halfvec` (alias for `half_vector`) - stores vectors as 2-byte floats for memory efficiency
|
||||
|
||||
- `vector` (alias for `real_vector`) - stores vectors as 4-byte floats
|
||||
- `halfvec` (alias for `half_vector`) - stores vectors as 2-byte floats for memory efficiency
|
||||
|
||||
```typescript
|
||||
@Entity()
|
||||
@ -70,3 +71,5 @@ export class Document {
|
||||
```
|
||||
|
||||
By default, the client will return a `Buffer` in the `fvecs`/`hvecs` format, which is more efficient. It is possible to let the driver convert the values to a `number[]` by adding `{ extra: { vectorOutputType: "Array" } }` to the connection options. Check the SAP HANA Client documentation for more information about [REAL_VECTOR](https://help.sap.com/docs/SAP_HANA_CLIENT/f1b440ded6144a54ada97ff95dac7adf/0d197e4389c64e6b9cf90f6f698f62fe.html) or [HALF_VECTOR](https://help.sap.com/docs/SAP_HANA_CLIENT/f1b440ded6144a54ada97ff95dac7adf/8bb854b4ce4a4299bed27c365b717e91.html).
|
||||
|
||||
Use the appropriate [vector functions](https://help.sap.com/docs/hana-cloud-database/sap-hana-cloud-sap-hana-database-sql-reference-guide/vector-functions) for similarity searches.
|
||||
|
||||
@ -180,88 +180,6 @@ There are several special column types with additional functionality available:
|
||||
each time you call `save` of entity manager or repository, or during `upsert` operations when an update occurs.
|
||||
You don't need to set this column - it will be automatically set.
|
||||
|
||||
### Vector columns
|
||||
|
||||
Vector columns are supported on PostgreSQL (via [`pgvector`](https://github.com/pgvector/pgvector) extension), Microsoft SQL Server, and SAP HANA Cloud, enabling storing and querying vector embeddings for similarity search and machine learning applications.
|
||||
|
||||
TypeORM supports both `vector` and `halfvec` column types across databases:
|
||||
|
||||
- `vector` - stores vectors as 4-byte floats (single precision)
|
||||
- PostgreSQL: native `vector` type via pgvector extension
|
||||
- SQL Server: native `vector` type
|
||||
- SAP HANA: alias for `real_vector` type
|
||||
- `halfvec` - stores vectors as 2-byte floats (half precision) for memory efficiency
|
||||
- PostgreSQL: native `halfvec` type via pgvector extension
|
||||
- SAP HANA: alias for `half_vector` type
|
||||
|
||||
You can specify the vector dimensions using the `length` option:
|
||||
|
||||
```typescript
|
||||
@Entity()
|
||||
export class Post {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
// Vector without specified dimensions (works on PostgreSQL and SAP HANA; SQL Server requires explicit dimensions)
|
||||
@Column("vector")
|
||||
embedding: number[] | Buffer
|
||||
|
||||
// Vector with 3 dimensions: vector(3)
|
||||
@Column("vector", { length: 3 })
|
||||
embedding_3d: number[] | Buffer
|
||||
|
||||
// Half-precision vector with 4 dimensions: halfvec(4) (PostgreSQL and SAP HANA only)
|
||||
@Column("halfvec", { length: 4 })
|
||||
halfvec_embedding: number[] | Buffer
|
||||
}
|
||||
```
|
||||
|
||||
**PostgreSQL** - Vector columns can be used for similarity searches using vector operators:
|
||||
|
||||
```typescript
|
||||
// L2 distance (Euclidean) - <->
|
||||
const results = await dataSource.query(
|
||||
`SELECT id, embedding FROM post ORDER BY embedding <-> $1 LIMIT 5`,
|
||||
["[1,2,3]"],
|
||||
)
|
||||
|
||||
// Cosine distance - <=>
|
||||
const results = await dataSource.query(
|
||||
`SELECT id, embedding FROM post ORDER BY embedding <=> $1 LIMIT 5`,
|
||||
["[1,2,3]"],
|
||||
)
|
||||
|
||||
// Inner product - <#>
|
||||
const results = await dataSource.query(
|
||||
`SELECT id, embedding FROM post ORDER BY embedding <#> $1 LIMIT 5`,
|
||||
["[1,2,3]"],
|
||||
)
|
||||
```
|
||||
|
||||
**SQL Server** - Use the `VECTOR_DISTANCE` function for similarity searches:
|
||||
|
||||
```typescript
|
||||
const queryEmbedding = [1, 2, 3]
|
||||
|
||||
// Cosine distance
|
||||
const results = await dataSource.query(
|
||||
`
|
||||
DECLARE @question AS VECTOR(3) = @0;
|
||||
SELECT TOP (5) id, embedding,
|
||||
VECTOR_DISTANCE('cosine', @question, embedding) AS distance
|
||||
FROM post
|
||||
ORDER BY VECTOR_DISTANCE('cosine', @question, embedding)
|
||||
`,
|
||||
[JSON.stringify(queryEmbedding)],
|
||||
)
|
||||
```
|
||||
|
||||
> **Note**:
|
||||
>
|
||||
> - **PostgreSQL**: Vector columns require the `pgvector` extension to be installed. The extension provides the vector data types and similarity operators.
|
||||
> - **SQL Server**: Vector type support requires a compatible SQL Server version with vector functionality enabled.
|
||||
> - **SAP HANA**: Vector columns require SAP HANA Cloud (2024Q1+) and a supported version of `@sap/hana-client`. Use the appropriate [vector similarity functions](https://help.sap.com/docs/hana-cloud-database/sap-hana-cloud-sap-hana-database-sql-reference-guide/vector-functions) for similarity searches.
|
||||
|
||||
## Column types
|
||||
|
||||
TypeORM supports all of the most commonly used database-supported column types.
|
||||
@ -414,6 +332,50 @@ Besides "uuid" there is also "increment", "identity" (Postgres 10+ only) and "ro
|
||||
on some database platforms with this type of generation (for example some databases can only have one increment column,
|
||||
or some of them require increment to be a primary key).
|
||||
|
||||
### Vector columns
|
||||
|
||||
Vector columns are supported on MariaDB/MySQL, Microsoft SQL Server, PostgreSQL (via [`pgvector`](https://github.com/pgvector/pgvector) extension) and SAP HANA Cloud, enabling storing and querying vector embeddings for similarity search and machine learning applications.
|
||||
|
||||
TypeORM supports both `vector` and `halfvec` column types across databases:
|
||||
|
||||
- `vector` - stores vectors as 4-byte floats (single precision)
|
||||
- MariaDB/MySQL: native `vector` type
|
||||
- Microsoft SQL Server: native `vector` type
|
||||
- PostgreSQL: `vector` type, available via `pgvector` extension
|
||||
- SAP HANA Cloud: alias for `real_vector` type
|
||||
- `halfvec` - stores vectors as 2-byte floats (half precision) for memory efficiency
|
||||
- PostgreSQL: `halfvec` type, available via `pgvector` extension
|
||||
- SAP HANA Cloud: alias for `half_vector` type
|
||||
|
||||
You can specify the number of vector dimensions using the `length` option:
|
||||
|
||||
```typescript
|
||||
@Entity()
|
||||
export class Post {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
// Vector without specified dimensions
|
||||
@Column("vector")
|
||||
embedding: number[] | Buffer
|
||||
|
||||
// Vector with 3 dimensions: vector(3)
|
||||
@Column("vector", { length: 3 })
|
||||
embedding_3d: number[] | Buffer
|
||||
|
||||
// Half-precision vector with 4 dimensions: halfvec(4) (works on PostgreSQL and SAP HANA only)
|
||||
@Column("halfvec", { length: 4 })
|
||||
halfvec_embedding: number[] | Buffer
|
||||
}
|
||||
```
|
||||
|
||||
> **Note**:
|
||||
>
|
||||
> - **MariaDB/MySQL**: Vectors are supported since MariaDB 11.7 and MySQL 9
|
||||
> - **Microsoft SQL Server**: Vector type support requires SQL Server 2025 (17.x) or newer.
|
||||
> - **PostgreSQL**: Vector columns require the `pgvector` extension to be installed. The extension provides the vector data types and similarity operators.
|
||||
> - **SAP HANA**: Vector columns require SAP HANA Cloud (2024Q1+) and a supported version of `@sap/hana-client`.
|
||||
|
||||
### Spatial columns
|
||||
|
||||
Microsoft SQLServer, MySQL/MariaDB, PostgreSQL/CockroachDB and SAP HANA all support spatial columns. TypeORM's support for each varies slightly between databases, particularly as the column names vary between databases.
|
||||
@ -491,6 +453,7 @@ List of available options in `ColumnOptions`:
|
||||
- `hstoreType: "object"|"string"` - Return type of `HSTORE` column. Returns value as string or as object. Used only in [Postgres](https://www.postgresql.org/docs/9.6/static/hstore.html).
|
||||
- `array: boolean` - Used for postgres and cockroachdb column types which can be array (for example int[])
|
||||
- `transformer: { from(value: DatabaseType): EntityType, to(value: EntityType): DatabaseType }` - Used to marshal properties of arbitrary type `EntityType` into a type `DatabaseType` supported by the database. Array of transformers are also supported and will be applied in natural order when writing, and in reverse order when reading. e.g. `[lowercase, encrypt]` will first lowercase the string then encrypt it when writing, and will decrypt then do nothing when reading.
|
||||
- `utc: boolean` - Indicates if date values should be stored and retrieved in UTC timezone instead of local timezone. Only applies to `date` column type. Default value is `false` (uses local timezone for backward compatibility).
|
||||
|
||||
Note: most of those column options are RDBMS-specific and aren't available in `MongoDB`.
|
||||
|
||||
|
||||
@ -27,12 +27,7 @@
|
||||
"strip_chars": " .,;:#",
|
||||
"custom_settings": {
|
||||
"separatorsToIndex": "_",
|
||||
"attributesForFaceting": [
|
||||
"language",
|
||||
"version",
|
||||
"type",
|
||||
"docusaurus_tag"
|
||||
],
|
||||
"attributesForFaceting": [],
|
||||
"attributesToRetrieve": [
|
||||
"hierarchy",
|
||||
"content",
|
||||
@ -46,4 +41,4 @@
|
||||
"833762294"
|
||||
],
|
||||
"nb_hits": 0
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
import { themes as prismThemes } from "prism-react-renderer"
|
||||
import type { Config } from "@docusaurus/types"
|
||||
import type * as Preset from "@docusaurus/preset-classic"
|
||||
import type { Config } from "@docusaurus/types"
|
||||
import { PluginOptions as LLMsTXTPluginOptions } from "@signalwire/docusaurus-plugin-llms-txt"
|
||||
import { themes as prismThemes } from "prism-react-renderer"
|
||||
import { redirects } from "./redirects"
|
||||
import { LLMsTXTPluginOptions } from "@signalwire/docusaurus-plugin-llms-txt"
|
||||
|
||||
// This runs in Node.js - Don't use client-side code here (browser APIs, JSX...)
|
||||
|
||||
@ -23,7 +23,6 @@ const config: Config = {
|
||||
projectName: "typeorm", // Usually your repo name.
|
||||
|
||||
onBrokenLinks: "throw",
|
||||
onBrokenMarkdownLinks: "warn",
|
||||
|
||||
// Even if you don't use internationalization, you can use this field to set
|
||||
// useful metadata like html lang. For example, if your site is Chinese, you
|
||||
|
||||
2447
docs/package-lock.json
generated
2447
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -27,24 +27,24 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.8.1",
|
||||
"@docusaurus/plugin-client-redirects": "^3.8.1",
|
||||
"@docusaurus/preset-classic": "3.8.1",
|
||||
"@docusaurus/core": "3.9.2",
|
||||
"@docusaurus/plugin-client-redirects": "^3.9.2",
|
||||
"@docusaurus/preset-classic": "3.9.2",
|
||||
"@mdx-js/react": "^3.1.1",
|
||||
"@signalwire/docusaurus-plugin-llms-txt": "^1.2.2",
|
||||
"clsx": "^2.1.1",
|
||||
"docusaurus-theme-search-typesense": "^0.25.0",
|
||||
"docusaurus-theme-search-typesense": "^0.26.0",
|
||||
"prism-react-renderer": "^2.4.1",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "3.8.1",
|
||||
"@docusaurus/tsconfig": "3.8.1",
|
||||
"@docusaurus/types": "3.8.1",
|
||||
"typescript": "~5.9.2"
|
||||
"@docusaurus/module-type-aliases": "3.9.2",
|
||||
"@docusaurus/tsconfig": "3.9.2",
|
||||
"@docusaurus/types": "3.9.2",
|
||||
"typescript": "~5.9.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0"
|
||||
"node": ">=20.0"
|
||||
}
|
||||
}
|
||||
|
||||
4340
package-lock.json
generated
4340
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
46
package.json
46
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "typeorm",
|
||||
"version": "0.3.27",
|
||||
"version": "0.3.28",
|
||||
"description": "Data-Mapper ORM for TypeScript and ES2021+. Supports MySQL/MariaDB, PostgreSQL, MS SQL Server, Oracle, SAP HANA, SQLite, MongoDB databases.",
|
||||
"homepage": "https://typeorm.io",
|
||||
"bugs": {
|
||||
@ -96,10 +96,10 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@sqltools/formatter": "^1.2.5",
|
||||
"ansis": "^4.1.0",
|
||||
"ansis": "^4.2.0",
|
||||
"app-root-path": "^3.1.0",
|
||||
"buffer": "^6.0.3",
|
||||
"dayjs": "^1.11.18",
|
||||
"dayjs": "^1.11.19",
|
||||
"debug": "^4.4.3",
|
||||
"dedent": "^1.7.0",
|
||||
"dotenv": "^16.6.1",
|
||||
@ -113,13 +113,13 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.39.1",
|
||||
"@google-cloud/spanner": "^7.21.0",
|
||||
"@sap/hana-client": "^2.26.18",
|
||||
"@tsconfig/node16": "^16.1.5",
|
||||
"@google-cloud/spanner": "^8.3.1",
|
||||
"@sap/hana-client": "^2.26.26",
|
||||
"@tsconfig/node16": "^16.1.8",
|
||||
"@types/chai": "^4.3.20",
|
||||
"@types/chai-as-promised": "^7.1.8",
|
||||
"@types/debug": "^4.1.12",
|
||||
"@types/gulp-rename": "^2.0.6",
|
||||
"@types/gulp-rename": "^2.0.7",
|
||||
"@types/gulp-sourcemaps": "^0.0.38",
|
||||
"@types/mocha": "^10.0.10",
|
||||
"@types/node": "^16.18.126",
|
||||
@ -127,14 +127,14 @@
|
||||
"@types/sinon": "^10.0.20",
|
||||
"@types/sinon-chai": "^3.2.12",
|
||||
"@types/source-map-support": "^0.5.10",
|
||||
"@types/yargs": "^17.0.33",
|
||||
"@types/yargs": "^17.0.35",
|
||||
"better-sqlite3": "^8.7.0",
|
||||
"c8": "^10.1.3",
|
||||
"chai": "^4.5.0",
|
||||
"chai-as-promised": "^7.1.2",
|
||||
"class-transformer": "^0.5.1",
|
||||
"eslint": "^9.39.1",
|
||||
"eslint-plugin-jsdoc": "^61.1.12",
|
||||
"eslint-plugin-jsdoc": "^61.4.1",
|
||||
"globals": "^16.5.0",
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-rename": "^2.1.0",
|
||||
@ -145,19 +145,18 @@
|
||||
"gulpclass": "^0.2.0",
|
||||
"husky": "^9.1.7",
|
||||
"is-ci": "^4.1.0",
|
||||
"lint-staged": "^16.2.6",
|
||||
"mocha": "^11.7.2",
|
||||
"mongodb": "^6.20.0",
|
||||
"mssql": "^11.0.1",
|
||||
"lint-staged": "^16.2.7",
|
||||
"mocha": "^11.7.5",
|
||||
"mongodb": "^6.21.0",
|
||||
"mssql": "^12.1.1",
|
||||
"mysql": "^2.18.1",
|
||||
"mysql2": "^3.15.0",
|
||||
"oracledb": "^6.9.0",
|
||||
"mysql2": "^3.15.3",
|
||||
"oracledb": "^6.10.0",
|
||||
"pg": "^8.16.3",
|
||||
"pg-query-stream": "^4.10.3",
|
||||
"pkg-pr-new": "^0.0.60",
|
||||
"prettier": "^2.8.8",
|
||||
"redis": "^5.8.2",
|
||||
"remap-istanbul": "^0.13.0",
|
||||
"redis": "^5.10.0",
|
||||
"rimraf": "^5.0.10",
|
||||
"sinon": "^16.1.3",
|
||||
"sinon-chai": "^3.7.0",
|
||||
@ -167,16 +166,16 @@
|
||||
"sqlite3": "^5.1.7",
|
||||
"standard-changelog": "^7.0.1",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.9.2",
|
||||
"typescript-eslint": "^8.46.3"
|
||||
"typescript": "^5.9.3",
|
||||
"typescript-eslint": "^8.48.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@google-cloud/spanner": "^5.18.0 || ^6.0.0 || ^7.0.0",
|
||||
"@google-cloud/spanner": "^5.18.0 || ^6.0.0 || ^7.0.0 || ^8.0.0",
|
||||
"@sap/hana-client": "^2.14.22",
|
||||
"better-sqlite3": "^8.0.0 || ^9.0.0 || ^10.0.0 || ^11.0.0 || ^12.0.0",
|
||||
"ioredis": "^5.0.4",
|
||||
"mongodb": "^5.8.0 || ^6.0.0",
|
||||
"mssql": "^9.1.1 || ^10.0.1 || ^11.0.1",
|
||||
"mssql": "^9.1.1 || ^10.0.0 || ^11.0.0 || ^12.0.0",
|
||||
"mysql2": "^2.2.5 || ^3.0.1",
|
||||
"oracledb": "^6.3.0",
|
||||
"pg": "^8.5.1",
|
||||
@ -247,8 +246,13 @@
|
||||
"logo": "https://opencollective.com/opencollective/logo.txt"
|
||||
},
|
||||
"devEngines": {
|
||||
"runtime": {
|
||||
"name": "node",
|
||||
"version": ">=20.19.0"
|
||||
},
|
||||
"packageManager": {
|
||||
"name": "npm",
|
||||
"version": "^10.8.2",
|
||||
"onFail": "error"
|
||||
}
|
||||
},
|
||||
|
||||
28
src/cache/RedisQueryResultCache.ts
vendored
28
src/cache/RedisQueryResultCache.ts
vendored
@ -300,26 +300,22 @@ export class RedisQueryResultCache implements QueryResultCache {
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects the Redis version based on the connected client's API characteristics
|
||||
* without creating test keys in the database
|
||||
* Detects the Redis package version by reading the installed package.json
|
||||
* and sets the appropriate API version (3 for callback-based, 5 for Promise-based).
|
||||
*/
|
||||
private detectRedisVersion(): void {
|
||||
if (this.clientType !== "redis") return
|
||||
|
||||
try {
|
||||
// Detect version by examining the client's method signatures
|
||||
// This avoids creating test keys in the database
|
||||
const setMethod = this.client.set
|
||||
if (setMethod && setMethod.length <= 3) {
|
||||
// Redis 5+ set method accepts fewer parameters (key, value, options)
|
||||
this.redisMajorVersion = 5
|
||||
} else {
|
||||
// Redis 3/4 set method requires more parameters (key, value, flag, duration, callback)
|
||||
this.redisMajorVersion = 3
|
||||
}
|
||||
} catch {
|
||||
// Default to Redis 3/4 for maximum compatibility
|
||||
const version = PlatformTools.readPackageVersion("redis")
|
||||
const major = parseInt(version.split(".")[0], 10)
|
||||
if (isNaN(major)) {
|
||||
throw new TypeORMError(`Invalid Redis version format: ${version}`)
|
||||
}
|
||||
if (major <= 4) {
|
||||
// Redis 3/4 uses callback-based API
|
||||
this.redisMajorVersion = 3
|
||||
} else {
|
||||
// Redis 5+ uses Promise-based API
|
||||
this.redisMajorVersion = 5
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -6,8 +6,6 @@ import { TypeORMError } from "../error"
|
||||
import { PlatformTools } from "../platform/PlatformTools"
|
||||
import { CommandUtils } from "./CommandUtils"
|
||||
|
||||
import ourPackageJson from "../../package.json"
|
||||
|
||||
/**
|
||||
* Generates a new project with TypeORM.
|
||||
*/
|
||||
@ -117,7 +115,7 @@ export class InitCommand implements yargs.CommandModule {
|
||||
)
|
||||
await CommandUtils.createFile(
|
||||
basePath + "/package.json",
|
||||
InitCommand.appendPackageJson(
|
||||
await InitCommand.appendPackageJson(
|
||||
packageJsonContents,
|
||||
database,
|
||||
isExpress,
|
||||
@ -673,13 +671,16 @@ Steps to run this project:
|
||||
/**
|
||||
* Appends to a given package.json template everything needed.
|
||||
*/
|
||||
protected static appendPackageJson(
|
||||
protected static async appendPackageJson(
|
||||
packageJsonContents: string,
|
||||
database: string,
|
||||
express: boolean,
|
||||
projectIsEsm: boolean /*, docker: boolean*/,
|
||||
): string {
|
||||
): Promise<string> {
|
||||
const packageJson = JSON.parse(packageJsonContents)
|
||||
const ourPackageJson = JSON.parse(
|
||||
await CommandUtils.readFile(`${__dirname}/../package.json`),
|
||||
)
|
||||
|
||||
if (!packageJson.devDependencies) packageJson.devDependencies = {}
|
||||
packageJson.devDependencies = {
|
||||
|
||||
@ -200,4 +200,15 @@ export interface ColumnOptions extends ColumnCommonOptions {
|
||||
* @See https://typeorm.io/decorator-reference#virtualcolumn for more details.
|
||||
*/
|
||||
query?: (alias: string) => string
|
||||
|
||||
/**
|
||||
* Indicates if date values should be stored and retrieved in UTC timezone
|
||||
* instead of local timezone. Only applies to "date" column type.
|
||||
* Default value is "false" (uses local timezone for backward compatibility).
|
||||
*
|
||||
* @example
|
||||
* @Column({ type: "date", utc: true })
|
||||
* birthDate: Date
|
||||
*/
|
||||
utc?: boolean
|
||||
}
|
||||
|
||||
@ -533,7 +533,9 @@ export class AuroraMysqlDriver implements Driver {
|
||||
if (columnMetadata.type === Boolean) {
|
||||
return value === true ? 1 : 0
|
||||
} else if (columnMetadata.type === "date") {
|
||||
return DateUtils.mixedDateToDateString(value)
|
||||
return DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
return DateUtils.mixedDateToTimeString(value)
|
||||
} else if (columnMetadata.type === "json") {
|
||||
@ -592,7 +594,9 @@ export class AuroraMysqlDriver implements Driver {
|
||||
) {
|
||||
value = DateUtils.normalizeHydratedDate(value)
|
||||
} else if (columnMetadata.type === "date") {
|
||||
value = DateUtils.mixedDateToDateString(value)
|
||||
value = DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "json") {
|
||||
value = typeof value === "string" ? JSON.parse(value) : value
|
||||
} else if (columnMetadata.type === "time") {
|
||||
|
||||
@ -138,7 +138,7 @@ export class BetterSqlite3Driver extends AbstractSqliteDriver {
|
||||
nativeBinding = null,
|
||||
prepareDatabase,
|
||||
} = this.options
|
||||
const databaseConnection = this.sqlite(database, {
|
||||
const databaseConnection = new this.sqlite(database, {
|
||||
readonly,
|
||||
fileMustExist,
|
||||
timeout,
|
||||
@ -148,8 +148,8 @@ export class BetterSqlite3Driver extends AbstractSqliteDriver {
|
||||
// in the options, if encryption key for SQLCipher is setted.
|
||||
// Must invoke key pragma before trying to do any other interaction with the database.
|
||||
if (this.options.key) {
|
||||
databaseConnection.exec(
|
||||
`PRAGMA key = ${JSON.stringify(this.options.key)}`,
|
||||
databaseConnection.pragma(
|
||||
`key = ${JSON.stringify(this.options.key)}`,
|
||||
)
|
||||
}
|
||||
|
||||
@ -160,11 +160,11 @@ export class BetterSqlite3Driver extends AbstractSqliteDriver {
|
||||
|
||||
// we need to enable foreign keys in sqlite to make sure all foreign key related features
|
||||
// working properly. this also makes onDelete to work with sqlite.
|
||||
databaseConnection.exec(`PRAGMA foreign_keys = ON`)
|
||||
databaseConnection.pragma("foreign_keys = ON")
|
||||
|
||||
// turn on WAL mode to enhance performance
|
||||
if (this.options.enableWAL) {
|
||||
databaseConnection.exec(`PRAGMA journal_mode = WAL`)
|
||||
databaseConnection.pragma("journal_mode = WAL")
|
||||
}
|
||||
|
||||
return databaseConnection
|
||||
|
||||
@ -62,14 +62,16 @@ export class BetterSqlite3QueryRunner extends AbstractSqliteQueryRunner {
|
||||
* Called before migrations are run.
|
||||
*/
|
||||
async beforeMigration(): Promise<void> {
|
||||
await this.query(`PRAGMA foreign_keys = OFF`)
|
||||
const databaseConnection = await this.connect()
|
||||
databaseConnection.pragma("foreign_keys = OFF")
|
||||
}
|
||||
|
||||
/**
|
||||
* Called after migrations are run.
|
||||
*/
|
||||
async afterMigration(): Promise<void> {
|
||||
await this.query(`PRAGMA foreign_keys = ON`)
|
||||
const databaseConnection = await this.connect()
|
||||
databaseConnection.pragma("foreign_keys = ON")
|
||||
}
|
||||
|
||||
/**
|
||||
@ -172,10 +174,9 @@ export class BetterSqlite3QueryRunner extends AbstractSqliteQueryRunner {
|
||||
}
|
||||
protected async loadPragmaRecords(tablePath: string, pragma: string) {
|
||||
const [database, tableName] = this.splitTablePath(tablePath)
|
||||
const res = await this.query(
|
||||
`PRAGMA ${
|
||||
database ? `"${database}".` : ""
|
||||
}${pragma}("${tableName}")`,
|
||||
const databaseConnection = await this.connect()
|
||||
const res = databaseConnection.pragma(
|
||||
`${database ? `"${database}".` : ""}${pragma}("${tableName}")`,
|
||||
)
|
||||
return res
|
||||
}
|
||||
|
||||
@ -385,7 +385,9 @@ export class CockroachDriver implements Driver {
|
||||
if (columnMetadata.type === Boolean) {
|
||||
return value === true ? 1 : 0
|
||||
} else if (columnMetadata.type === "date") {
|
||||
return DateUtils.mixedDateToDateString(value)
|
||||
return DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
return DateUtils.mixedDateToTimeString(value)
|
||||
} else if (
|
||||
@ -445,7 +447,9 @@ export class CockroachDriver implements Driver {
|
||||
) {
|
||||
value = DateUtils.normalizeHydratedDate(value)
|
||||
} else if (columnMetadata.type === "date") {
|
||||
value = DateUtils.mixedDateToDateString(value)
|
||||
value = DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
value = DateUtils.mixedTimeToString(value)
|
||||
} else if (columnMetadata.type === "simple-array") {
|
||||
|
||||
@ -43,4 +43,10 @@ export interface MysqlConnectionCredentialsOptions {
|
||||
* Database socket path
|
||||
*/
|
||||
readonly socketPath?: string
|
||||
|
||||
/**
|
||||
* Maximum number of clients the pool should contain.
|
||||
* for each connection
|
||||
*/
|
||||
readonly poolSize?: number
|
||||
}
|
||||
|
||||
@ -157,6 +157,8 @@ export class MysqlDriver implements Driver {
|
||||
"multilinestring",
|
||||
"multipolygon",
|
||||
"geometrycollection",
|
||||
// vector data types
|
||||
"vector",
|
||||
// additional data types for mariadb
|
||||
"uuid",
|
||||
"inet4",
|
||||
@ -191,6 +193,7 @@ export class MysqlDriver implements Driver {
|
||||
"nvarchar",
|
||||
"binary",
|
||||
"varbinary",
|
||||
"vector",
|
||||
]
|
||||
|
||||
/**
|
||||
@ -280,6 +283,7 @@ export class MysqlDriver implements Driver {
|
||||
char: { length: 1 },
|
||||
binary: { length: 1 },
|
||||
varbinary: { length: 255 },
|
||||
vector: { length: 2048 }, // default length MySQL uses if not provided a value
|
||||
decimal: { precision: 10, scale: 0 },
|
||||
dec: { precision: 10, scale: 0 },
|
||||
numeric: { precision: 10, scale: 0 },
|
||||
@ -612,7 +616,9 @@ export class MysqlDriver implements Driver {
|
||||
if (columnMetadata.type === Boolean) {
|
||||
return value === true ? 1 : 0
|
||||
} else if (columnMetadata.type === "date") {
|
||||
return DateUtils.mixedDateToDateString(value)
|
||||
return DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
return DateUtils.mixedDateToTimeString(value)
|
||||
} else if (columnMetadata.type === "json") {
|
||||
@ -666,7 +672,9 @@ export class MysqlDriver implements Driver {
|
||||
) {
|
||||
value = DateUtils.normalizeHydratedDate(value)
|
||||
} else if (columnMetadata.type === "date") {
|
||||
value = DateUtils.mixedDateToDateString(value)
|
||||
value = DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "json") {
|
||||
// mysql2 returns JSON values already parsed, but may still be a string
|
||||
// if the JSON value itself is a string (e.g., "\"hello\"")
|
||||
@ -1283,7 +1291,7 @@ export class MysqlDriver implements Driver {
|
||||
port: credentials.port,
|
||||
ssl: options.ssl,
|
||||
socketPath: credentials.socketPath,
|
||||
connectionLimit: options.poolSize,
|
||||
connectionLimit: credentials.poolSize ?? options.poolSize,
|
||||
},
|
||||
options.acquireTimeout === undefined
|
||||
? {}
|
||||
|
||||
@ -2802,17 +2802,19 @@ export class MysqlQueryRunner extends BaseQueryRunner implements QueryRunner {
|
||||
) !== -1 &&
|
||||
dbColumn["CHARACTER_MAXIMUM_LENGTH"]
|
||||
) {
|
||||
const length =
|
||||
dbColumn[
|
||||
"CHARACTER_MAXIMUM_LENGTH"
|
||||
].toString()
|
||||
let length: number =
|
||||
dbColumn["CHARACTER_MAXIMUM_LENGTH"]
|
||||
if (tableColumn.type === "vector") {
|
||||
// MySQL and MariaDb store the vector length in bytes, not in number of dimensions.
|
||||
length = length / 4
|
||||
}
|
||||
tableColumn.length =
|
||||
!this.isDefaultColumnLength(
|
||||
table,
|
||||
tableColumn,
|
||||
length,
|
||||
length.toString(),
|
||||
)
|
||||
? length
|
||||
? length.toString()
|
||||
: ""
|
||||
}
|
||||
|
||||
|
||||
@ -531,9 +531,9 @@ export class OracleDriver implements Driver {
|
||||
} else if (columnMetadata.type === "date") {
|
||||
if (typeof value === "string") value = value.replace(/[^0-9-]/g, "")
|
||||
return () =>
|
||||
`TO_DATE('${DateUtils.mixedDateToDateString(
|
||||
value,
|
||||
)}', 'YYYY-MM-DD')`
|
||||
`TO_DATE('${DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})}', 'YYYY-MM-DD')`
|
||||
} else if (
|
||||
columnMetadata.type === Date ||
|
||||
columnMetadata.type === "timestamp" ||
|
||||
@ -567,7 +567,9 @@ export class OracleDriver implements Driver {
|
||||
if (columnMetadata.type === Boolean) {
|
||||
value = !!value
|
||||
} else if (columnMetadata.type === "date") {
|
||||
value = DateUtils.mixedDateToDateString(value)
|
||||
value = DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
value = DateUtils.mixedTimeToString(value)
|
||||
} else if (
|
||||
|
||||
@ -73,7 +73,8 @@ export class PostgresDriver implements Driver {
|
||||
options: PostgresConnectionOptions
|
||||
|
||||
/**
|
||||
* Version of Postgres. Requires a SQL query to the DB, so it is not always set
|
||||
* Version of Postgres. Requires a SQL query to the DB, so it is set on the first
|
||||
* connection attempt.
|
||||
*/
|
||||
version?: string
|
||||
|
||||
@ -362,20 +363,24 @@ export class PostgresDriver implements Driver {
|
||||
this.master = await this.createPool(this.options, this.options)
|
||||
}
|
||||
|
||||
const queryRunner = this.createQueryRunner("master")
|
||||
if (!this.version || !this.database || !this.searchSchema) {
|
||||
const queryRunner = this.createQueryRunner("master")
|
||||
|
||||
this.version = await queryRunner.getVersion()
|
||||
if (!this.version) {
|
||||
this.version = await queryRunner.getVersion()
|
||||
}
|
||||
|
||||
if (!this.database) {
|
||||
this.database = await queryRunner.getCurrentDatabase()
|
||||
if (!this.database) {
|
||||
this.database = await queryRunner.getCurrentDatabase()
|
||||
}
|
||||
|
||||
if (!this.searchSchema) {
|
||||
this.searchSchema = await queryRunner.getCurrentSchema()
|
||||
}
|
||||
|
||||
await queryRunner.release()
|
||||
}
|
||||
|
||||
if (!this.searchSchema) {
|
||||
this.searchSchema = await queryRunner.getCurrentSchema()
|
||||
}
|
||||
|
||||
await queryRunner.release()
|
||||
|
||||
if (!this.schema) {
|
||||
this.schema = this.searchSchema
|
||||
}
|
||||
@ -651,7 +656,9 @@ export class PostgresDriver implements Driver {
|
||||
if (columnMetadata.type === Boolean) {
|
||||
return value === true ? 1 : 0
|
||||
} else if (columnMetadata.type === "date") {
|
||||
return DateUtils.mixedDateToDateString(value)
|
||||
return DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
return DateUtils.mixedDateToTimeString(value)
|
||||
} else if (
|
||||
@ -750,7 +757,9 @@ export class PostgresDriver implements Driver {
|
||||
) {
|
||||
value = DateUtils.normalizeHydratedDate(value)
|
||||
} else if (columnMetadata.type === "date") {
|
||||
value = DateUtils.mixedDateToDateString(value)
|
||||
value = DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
value = DateUtils.mixedTimeToString(value)
|
||||
} else if (
|
||||
|
||||
@ -335,7 +335,9 @@ export class ReactNativeDriver implements Driver {
|
||||
) {
|
||||
return value === true ? 1 : 0
|
||||
} else if (columnMetadata.type === "date") {
|
||||
return DateUtils.mixedDateToDateString(value)
|
||||
return DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
return DateUtils.mixedDateToTimeString(value)
|
||||
} else if (
|
||||
@ -407,7 +409,9 @@ export class ReactNativeDriver implements Driver {
|
||||
|
||||
value = DateUtils.normalizeHydratedDate(value)
|
||||
} else if (columnMetadata.type === "date") {
|
||||
value = DateUtils.mixedDateToDateString(value)
|
||||
value = DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
value = DateUtils.mixedTimeToString(value)
|
||||
} else if (columnMetadata.type === "simple-array") {
|
||||
|
||||
@ -542,7 +542,9 @@ export class SapDriver implements Driver {
|
||||
if (value === null || value === undefined) return value
|
||||
|
||||
if (columnMetadata.type === "date") {
|
||||
return DateUtils.mixedDateToDateString(value)
|
||||
return DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
return DateUtils.mixedDateToTimeString(value)
|
||||
} else if (
|
||||
@ -584,7 +586,9 @@ export class SapDriver implements Driver {
|
||||
) {
|
||||
value = DateUtils.normalizeHydratedDate(value)
|
||||
} else if (columnMetadata.type === "date") {
|
||||
value = DateUtils.mixedDateToDateString(value)
|
||||
value = DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
value = DateUtils.mixedTimeToString(value)
|
||||
} else if (columnMetadata.type === "simple-array") {
|
||||
|
||||
@ -399,7 +399,9 @@ export class SpannerDriver implements Driver {
|
||||
const lib = this.options.driver || PlatformTools.load("spanner")
|
||||
return lib.Spanner.numeric(value.toString())
|
||||
} else if (columnMetadata.type === "date") {
|
||||
return DateUtils.mixedDateToDateString(value)
|
||||
return DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "json") {
|
||||
return value
|
||||
} else if (
|
||||
@ -434,7 +436,9 @@ export class SpannerDriver implements Driver {
|
||||
} else if (columnMetadata.type === "numeric") {
|
||||
value = value.value
|
||||
} else if (columnMetadata.type === "date") {
|
||||
value = DateUtils.mixedDateToDateString(value)
|
||||
value = DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "json") {
|
||||
value = typeof value === "string" ? JSON.parse(value) : value
|
||||
} else if (columnMetadata.type === Number) {
|
||||
|
||||
@ -331,7 +331,9 @@ export abstract class AbstractSqliteDriver implements Driver {
|
||||
) {
|
||||
return value === true ? 1 : 0
|
||||
} else if (columnMetadata.type === "date") {
|
||||
return DateUtils.mixedDateToDateString(value)
|
||||
return DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
return DateUtils.mixedDateToTimeString(value)
|
||||
} else if (
|
||||
@ -406,7 +408,9 @@ export abstract class AbstractSqliteDriver implements Driver {
|
||||
|
||||
value = DateUtils.normalizeHydratedDate(value)
|
||||
} else if (columnMetadata.type === "date") {
|
||||
value = DateUtils.mixedDateToDateString(value)
|
||||
value = DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
value = DateUtils.mixedTimeToString(value)
|
||||
} else if (
|
||||
|
||||
@ -532,7 +532,7 @@ export class SqlServerDriver implements Driver {
|
||||
if (columnMetadata.type === Boolean) {
|
||||
return value === true ? 1 : 0
|
||||
} else if (columnMetadata.type === "date") {
|
||||
return DateUtils.mixedDateToDate(value)
|
||||
return DateUtils.mixedDateToDate(value, columnMetadata.utc)
|
||||
} else if (columnMetadata.type === "time") {
|
||||
return DateUtils.mixedTimeToDate(value)
|
||||
} else if (
|
||||
@ -586,7 +586,9 @@ export class SqlServerDriver implements Driver {
|
||||
) {
|
||||
value = DateUtils.normalizeHydratedDate(value)
|
||||
} else if (columnMetadata.type === "date") {
|
||||
value = DateUtils.mixedDateToDateString(value)
|
||||
value = DateUtils.mixedDateToDateString(value, {
|
||||
utc: columnMetadata.utc,
|
||||
})
|
||||
} else if (columnMetadata.type === "time") {
|
||||
value = DateUtils.mixedTimeToString(value)
|
||||
} else if (columnMetadata.type === "simple-array") {
|
||||
|
||||
@ -75,7 +75,7 @@ export type WithLengthColumnType =
|
||||
| "binary" // mssql
|
||||
| "varbinary" // mssql, sap
|
||||
| "string" // cockroachdb, spanner
|
||||
| "vector" // postgres, mssql, sap
|
||||
| "vector" // mariadb, mysql, mssql, postgres, sap
|
||||
| "halfvec" // postgres, sap
|
||||
| "half_vector" // sap
|
||||
| "real_vector" // sap
|
||||
|
||||
@ -1090,12 +1090,16 @@ export class EntityManager {
|
||||
)
|
||||
}
|
||||
|
||||
const result = await this.createQueryBuilder(entityClass, metadata.name)
|
||||
.setFindOptions({ where })
|
||||
const qb = this.createQueryBuilder(entityClass, metadata.name)
|
||||
qb.setFindOptions({ where })
|
||||
|
||||
const alias = qb.alias
|
||||
|
||||
const result = await qb
|
||||
.select(
|
||||
`${fnName}(${this.connection.driver.escape(
|
||||
column.databaseName,
|
||||
)})`,
|
||||
alias,
|
||||
)}.${this.connection.driver.escape(column.databaseName)})`,
|
||||
fnName,
|
||||
)
|
||||
.getRawOne()
|
||||
|
||||
@ -15,46 +15,46 @@ import { DeleteResult } from "../query-builder/result/DeleteResult"
|
||||
import { EntityMetadata } from "../metadata/EntityMetadata"
|
||||
|
||||
import {
|
||||
BulkWriteResult,
|
||||
AggregationCursor,
|
||||
Collection,
|
||||
FindCursor,
|
||||
Document,
|
||||
AggregateOptions,
|
||||
AggregationCursor,
|
||||
AnyBulkWriteOperation,
|
||||
BulkWriteOptions,
|
||||
Filter,
|
||||
CountOptions,
|
||||
IndexSpecification,
|
||||
CreateIndexesOptions,
|
||||
IndexDescription,
|
||||
DeleteResult as DeleteResultMongoDb,
|
||||
DeleteOptions,
|
||||
CommandOperationOptions,
|
||||
FindOneAndDeleteOptions,
|
||||
FindOneAndReplaceOptions,
|
||||
UpdateFilter,
|
||||
FindOneAndUpdateOptions,
|
||||
RenameOptions,
|
||||
ReplaceOptions,
|
||||
UpdateResult as UpdateResultMongoDb,
|
||||
BulkWriteResult,
|
||||
ChangeStream,
|
||||
ChangeStreamOptions,
|
||||
Collection,
|
||||
CollStats,
|
||||
CollStatsOptions,
|
||||
ChangeStreamOptions,
|
||||
ChangeStream,
|
||||
UpdateOptions,
|
||||
ListIndexesOptions,
|
||||
ListIndexesCursor,
|
||||
OptionalId,
|
||||
CommandOperationOptions,
|
||||
CountDocumentsOptions,
|
||||
CountOptions,
|
||||
CreateIndexesOptions,
|
||||
DeleteOptions,
|
||||
DeleteResult as DeleteResultMongoDb,
|
||||
Document,
|
||||
Filter,
|
||||
FilterOperators,
|
||||
FindCursor,
|
||||
FindOneAndDeleteOptions,
|
||||
FindOneAndReplaceOptions,
|
||||
FindOneAndUpdateOptions,
|
||||
IndexDescription,
|
||||
IndexInformationOptions,
|
||||
IndexSpecification,
|
||||
InsertManyResult,
|
||||
InsertOneOptions,
|
||||
InsertOneResult,
|
||||
InsertManyResult,
|
||||
UnorderedBulkOperation,
|
||||
OrderedBulkOperation,
|
||||
IndexInformationOptions,
|
||||
ListIndexesCursor,
|
||||
ListIndexesOptions,
|
||||
ObjectId,
|
||||
FilterOperators,
|
||||
CountDocumentsOptions,
|
||||
OptionalId,
|
||||
OrderedBulkOperation,
|
||||
RenameOptions,
|
||||
ReplaceOptions,
|
||||
UnorderedBulkOperation,
|
||||
UpdateFilter,
|
||||
UpdateOptions,
|
||||
UpdateResult as UpdateResultMongoDb,
|
||||
} from "../driver/mongodb/typings"
|
||||
import { DataSource } from "../data-source/DataSource"
|
||||
import { MongoFindManyOptions } from "../find-options/mongodb/MongoFindManyOptions"
|
||||
@ -161,6 +161,16 @@ export class MongoEntityManager extends EntityManager {
|
||||
return this.executeFindAndCount(entityClassOrName, where)
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds entities that match given WHERE conditions.
|
||||
*/
|
||||
async findBy<Entity>(
|
||||
entityClassOrName: EntityTarget<Entity>,
|
||||
where: any,
|
||||
): Promise<Entity[]> {
|
||||
return this.executeFind(entityClassOrName, where)
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds entities by ids.
|
||||
* Optionally find options can be applied.
|
||||
|
||||
@ -160,6 +160,10 @@ export { WhereExpression } from "./query-builder/WhereExpressionBuilder"
|
||||
export { InsertResult } from "./query-builder/result/InsertResult"
|
||||
export { UpdateResult } from "./query-builder/result/UpdateResult"
|
||||
export { DeleteResult } from "./query-builder/result/DeleteResult"
|
||||
export {
|
||||
QueryPartialEntity,
|
||||
QueryDeepPartialEntity,
|
||||
} from "./query-builder/QueryPartialEntity"
|
||||
export { QueryResult } from "./query-runner/QueryResult"
|
||||
export { QueryRunner } from "./query-runner/QueryRunner"
|
||||
export { MongoEntityManager } from "./entity-manager/MongoEntityManager"
|
||||
|
||||
@ -123,6 +123,12 @@ export class ColumnMetadata {
|
||||
*/
|
||||
comment?: string
|
||||
|
||||
/**
|
||||
* Indicates if date values use UTC timezone.
|
||||
* Only applies to "date" column type.
|
||||
*/
|
||||
utc: boolean = false
|
||||
|
||||
/**
|
||||
* Default database value.
|
||||
*/
|
||||
@ -388,6 +394,8 @@ export class ColumnMetadata {
|
||||
this.isSelect = options.args.options.select
|
||||
if (options.args.options.insert !== undefined)
|
||||
this.isInsert = options.args.options.insert
|
||||
if (options.args.options.utc !== undefined)
|
||||
this.utc = options.args.options.utc
|
||||
if (options.args.options.update !== undefined)
|
||||
this.isUpdate = options.args.options.update
|
||||
if (options.args.options.readonly !== undefined)
|
||||
|
||||
@ -12,6 +12,7 @@ import { PropertyTypeFactory } from "./types/PropertyTypeInFunction"
|
||||
import { TypeORMError } from "../error"
|
||||
import { ObjectUtils } from "../util/ObjectUtils"
|
||||
import { InstanceChecker } from "../util/InstanceChecker"
|
||||
import { OrmUtils } from "../util/OrmUtils"
|
||||
|
||||
/**
|
||||
* Contains all information about some entity's relation.
|
||||
@ -520,7 +521,11 @@ export class RelationMetadata {
|
||||
entity,
|
||||
)
|
||||
} else {
|
||||
entity[propertyName] = value
|
||||
if (ObjectUtils.isObject(entity[propertyName])) {
|
||||
OrmUtils.mergeDeep(entity[propertyName], value)
|
||||
} else {
|
||||
entity[propertyName] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -417,6 +417,8 @@ export class MigrationExecutor {
|
||||
this.connection.logger.logSchemaBuild(
|
||||
`No migrations were found in the database. Nothing to revert!`,
|
||||
)
|
||||
// if query runner was created by us then release it
|
||||
if (!this.queryRunner) await queryRunner.release()
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@ -27,6 +27,20 @@ export class PlatformTools {
|
||||
return global
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the version string from package.json of the given package.
|
||||
* This operation is only supported in node.
|
||||
*/
|
||||
static readPackageVersion(name: string): string {
|
||||
try {
|
||||
return require(`${name}/package.json`).version
|
||||
} catch (err) {
|
||||
throw new TypeError(
|
||||
`Failed to read package.json for "${name}": ${err.message}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads ("require"-s) given file or package.
|
||||
* This operation only supports on node platform
|
||||
|
||||
@ -25,8 +25,21 @@ export class DateUtils {
|
||||
/**
|
||||
* Converts given value into date string in a "YYYY-MM-DD" format.
|
||||
*/
|
||||
static mixedDateToDateString(value: string | Date): string {
|
||||
static mixedDateToDateString(
|
||||
value: string | Date,
|
||||
options?: { utc?: boolean },
|
||||
): string {
|
||||
const utc = options?.utc ?? false
|
||||
if (value instanceof Date) {
|
||||
if (utc) {
|
||||
return (
|
||||
this.formatZerolessValue(value.getUTCFullYear(), 4) +
|
||||
"-" +
|
||||
this.formatZerolessValue(value.getUTCMonth() + 1) +
|
||||
"-" +
|
||||
this.formatZerolessValue(value.getUTCDate())
|
||||
)
|
||||
}
|
||||
return (
|
||||
this.formatZerolessValue(value.getFullYear(), 4) +
|
||||
"-" +
|
||||
|
||||
@ -12,6 +12,7 @@ describe("benchmark > bulk-sql-build", () => {
|
||||
before(async () => {
|
||||
dataSources = await createTestingConnections({
|
||||
__dirname,
|
||||
enabledDrivers: ["postgres"],
|
||||
})
|
||||
})
|
||||
beforeEach(() => reloadTestingDatabases(dataSources))
|
||||
@ -1,11 +1,11 @@
|
||||
import { expect } from "chai"
|
||||
import "reflect-metadata"
|
||||
import { scheduler } from "timers/promises"
|
||||
import { DataSource } from "../../../../src"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
sleep,
|
||||
} from "../../../utils/test-utils"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
@ -81,7 +81,7 @@ describe("column kinds > create date column", () => {
|
||||
})
|
||||
|
||||
// wait a second
|
||||
await sleep(1000)
|
||||
await scheduler.wait(1010)
|
||||
|
||||
// create post once again
|
||||
post.title = "Updated Title"
|
||||
@ -1,11 +1,11 @@
|
||||
import { expect } from "chai"
|
||||
import "reflect-metadata"
|
||||
import { scheduler } from "timers/promises"
|
||||
import { DataSource } from "../../../../src"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
sleep,
|
||||
} from "../../../utils/test-utils"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
@ -109,7 +109,7 @@ describe("column kinds > update date column", () => {
|
||||
})
|
||||
|
||||
// wait a second
|
||||
await sleep(2000)
|
||||
await scheduler.wait(1010)
|
||||
|
||||
// update post once again
|
||||
post.title = "Updated Title"
|
||||
@ -120,9 +120,9 @@ describe("column kinds > update date column", () => {
|
||||
await postRepository.findOneByOrFail({
|
||||
id: post.id,
|
||||
})
|
||||
expect(loadedPostAfterUpdate.updatedAt.getTime()).to.be.not.eql(
|
||||
loadedPostBeforeUpdate.updatedAt.getTime(),
|
||||
)
|
||||
expect(
|
||||
loadedPostAfterUpdate.updatedAt.getTime(),
|
||||
).to.be.greaterThan(loadedPostBeforeUpdate.updatedAt.getTime())
|
||||
}),
|
||||
))
|
||||
|
||||
@ -5,7 +5,6 @@ import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
sleep,
|
||||
} from "../../../utils/test-utils"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
@ -98,9 +97,6 @@ describe("column kinds > version column", () => {
|
||||
post.title = "Post"
|
||||
await postRepository.save(post)
|
||||
|
||||
// wait a second
|
||||
await sleep(1000)
|
||||
|
||||
// update post once again
|
||||
post.title = "Updated Title"
|
||||
await postRepository.save(post)
|
||||
@ -1,15 +1,15 @@
|
||||
import "reflect-metadata"
|
||||
import { expect } from "chai"
|
||||
import "reflect-metadata"
|
||||
import { scheduler } from "timers/promises"
|
||||
import { DataSource } from "../../../src/data-source/DataSource"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
sleep,
|
||||
} from "../../utils/test-utils"
|
||||
import { DataSource } from "../../../src/data-source/DataSource"
|
||||
import { Address } from "./entity/Address"
|
||||
import { User } from "./entity/User"
|
||||
import { MockQueryResultCache } from "./provider/MockQueryResultCache"
|
||||
import { Address } from "./entity/Address"
|
||||
|
||||
describe("custom cache provider", () => {
|
||||
let connections: DataSource[]
|
||||
@ -101,7 +101,7 @@ describe("custom cache provider", () => {
|
||||
expect(users3.length).to.be.equal(1)
|
||||
|
||||
// give some time for cache to expire
|
||||
await sleep(1000)
|
||||
await scheduler.wait(1010)
|
||||
|
||||
// now, when our cache has expired we check if we have new user inserted even with cache enabled
|
||||
const users4 = await connection
|
||||
@ -179,7 +179,7 @@ describe("custom cache provider", () => {
|
||||
expect(users3.length).to.be.equal(1)
|
||||
|
||||
// give some time for cache to expire
|
||||
await sleep(1000)
|
||||
await scheduler.wait(1010)
|
||||
|
||||
// now, when our cache has expired we check if we have new user inserted even with cache enabled
|
||||
const users4 = await connection
|
||||
@ -249,7 +249,7 @@ describe("custom cache provider", () => {
|
||||
expect(users2.length).to.be.equal(2)
|
||||
|
||||
// give some time for cache to expire
|
||||
await sleep(1000)
|
||||
await scheduler.wait(1010)
|
||||
|
||||
// but with cache enabled it must not return newly inserted entity since cache is not expired yet
|
||||
const users3 = await connection
|
||||
@ -263,7 +263,7 @@ describe("custom cache provider", () => {
|
||||
expect(users3.length).to.be.equal(1)
|
||||
|
||||
// give some time for cache to expire
|
||||
await sleep(1000)
|
||||
await scheduler.wait(1010)
|
||||
|
||||
// now, when our cache has expired we check if we have new user inserted even with cache enabled
|
||||
const users4 = await connection
|
||||
@ -389,7 +389,7 @@ describe("custom cache provider", () => {
|
||||
expect(users3).to.be.equal(1)
|
||||
|
||||
// give some time for cache to expire
|
||||
await sleep(1000)
|
||||
await scheduler.wait(1010)
|
||||
|
||||
// now, when our cache has expired we check if we have new user inserted even with cache enabled
|
||||
const users4 = await connection
|
||||
50
test/functional/columns/date-utc/date-utc.test.ts
Normal file
50
test/functional/columns/date-utc/date-utc.test.ts
Normal file
@ -0,0 +1,50 @@
|
||||
import "reflect-metadata"
|
||||
import { expect } from "chai"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../../utils/test-utils"
|
||||
import { Event } from "./entity/Event"
|
||||
import { DataSource } from "../../../../src"
|
||||
|
||||
describe("columns > date utc flag", () => {
|
||||
let originalTZ: string | undefined
|
||||
let connections: DataSource[]
|
||||
|
||||
before(async () => {
|
||||
originalTZ = process.env.TZ
|
||||
process.env.TZ = "America/New_York"
|
||||
connections = await createTestingConnections({
|
||||
entities: [Event],
|
||||
})
|
||||
})
|
||||
|
||||
after(async () => {
|
||||
process.env.TZ = originalTZ
|
||||
await closeTestingConnections(connections)
|
||||
})
|
||||
|
||||
beforeEach(() => reloadTestingDatabases(connections))
|
||||
|
||||
it("should save date columns in UTC when utc flag is true and in local timezone when false", () =>
|
||||
Promise.all(
|
||||
connections.map(async (connection) => {
|
||||
const event = new Event()
|
||||
const testDate = new Date(Date.UTC(2025, 5, 1)) // 2025-06-01 in UTC
|
||||
|
||||
event.localDate = testDate
|
||||
event.utcDate = testDate
|
||||
|
||||
const savedEvent = await connection.manager.save(event)
|
||||
const result = await connection.manager.findOneBy(Event, {
|
||||
id: savedEvent.id,
|
||||
})
|
||||
|
||||
// UTC flag true: should save as 2025-06-01 (UTC date)
|
||||
expect(result!.utcDate).to.equal("2025-06-01")
|
||||
// UTC flag false (default): should save as 2025-05-31 (local timezone)
|
||||
expect(result!.localDate).to.equal("2025-05-31")
|
||||
}),
|
||||
))
|
||||
})
|
||||
17
test/functional/columns/date-utc/entity/Event.ts
Normal file
17
test/functional/columns/date-utc/entity/Event.ts
Normal file
@ -0,0 +1,17 @@
|
||||
import { Entity } from "../../../../../src/decorator/entity/Entity"
|
||||
import { PrimaryGeneratedColumn } from "../../../../../src/decorator/columns/PrimaryGeneratedColumn"
|
||||
import { Column } from "../../../../../src/decorator/columns/Column"
|
||||
|
||||
@Entity({
|
||||
name: "event",
|
||||
})
|
||||
export class Event {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
@Column({ type: "date" })
|
||||
localDate: Date
|
||||
|
||||
@Column({ type: "date", utc: true })
|
||||
utcDate: Date
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user