Compare commits

...

9 Commits

Author SHA1 Message Date
Lucian Mocanu
c4f5d12f3f
refactor(tests): ensure test files have the .test.ts extension (#11801) 2025-11-30 21:37:49 +01:00
Giorgio Boa
61f9e0d085
docs(mysql): add missing mysql credential options (#11813) 2025-11-30 20:35:23 +01:00
CHOIJEWON
55cd8e2b08
feat:add utc flag to date column (#11740) 2025-11-30 14:18:50 +01:00
Giorgio Boa
67f793feaa
feat(mysql): add pool size options for each connection (#11810)
Co-authored-by: 아이작_조서환 <wtae1216@sooplive.com>
2025-11-30 12:31:52 +01:00
Lucian Mocanu
835647ac92
test: use built-in wait function and fix wait times to avoid flaky tests (#11812) 2025-11-29 10:25:39 +01:00
Lucian Mocanu
546192767d
chore: update dependencies (#11811) 2025-11-29 05:54:38 +01:00
Mohammed Gomaa
d0b54544e9
fix: typesense doc sync (#11807)
Co-authored-by: Giorgio Boa <35845425+gioboa@users.noreply.github.com>
2025-11-28 15:03:18 +01:00
Lucian Mocanu
cfb3d6c015
feat(mysql): add support for vector columns on MariaDB and MySQL (#11670) 2025-11-27 15:28:49 +01:00
Piotr Kuczynski
dd55218648
fix(cli): init command reading package.json from two folders up (#11789) 2025-11-25 14:13:25 +01:00
944 changed files with 4701 additions and 3122 deletions

View File

@ -1,7 +1,7 @@
{
"all": true,
"cache": false,
"exclude": ["node_modules", "**/*.d.ts"],
"exclude": ["**/*.d.ts"],
"exclude-after-remap": true,
"extension": [".ts"],
"include": ["build/compiled/src/**", "src/**"],

View File

@ -33,8 +33,8 @@
- [ ] Code is up-to-date with the `master` branch
- [ ] This pull request links relevant issues as `Fixes #00000`
- [ ] There are new or updated unit tests validating the change
- [ ] Documentation has been updated to reflect this change
- [ ] There are new or updated tests validating the change (`tests/**.test.ts`)
- [ ] Documentation has been updated to reflect this change (`docs/docs/**.md`)
<!--
🎉 Thank you for contributing and making TypeORM even better!

View File

@ -12,7 +12,35 @@ jobs:
steps:
- uses: actions/checkout@v5
- name: Delete unaliased collections
env:
TYPESENSE_API_KEY: ${{ secrets.TYPESENSE_API_KEY }}
TYPESENSE_HOST: ${{ secrets.TYPESENSE_HOST }}
TYPESENSE_PROTOCOL: https
TYPESENSE_PORT: 443
run: |
ALIAS_COLLECTION=$(curl -s -H "X-TYPESENSE-API-KEY: $TYPESENSE_API_KEY" \
"$TYPESENSE_PROTOCOL://$TYPESENSE_HOST:$TYPESENSE_PORT/aliases/typeorm-docs" \
| jq -r '.collection_name')
if [ "$ALIAS_COLLECTION" = "null" ] || [ -z "$ALIAS_COLLECTION" ]; then
echo "Alias does not exist; skipping collection cleanup."
exit 0
fi
echo "Alias currently points to: $ALIAS_COLLECTION"
COLLECTIONS=$(curl -s -H "X-TYPESENSE-API-KEY: $TYPESENSE_API_KEY" \
"$TYPESENSE_PROTOCOL://$TYPESENSE_HOST:$TYPESENSE_PORT/collections" \
| jq -r '.[].name')
for col in $COLLECTIONS; do
if [ "$col" != "$ALIAS_COLLECTION" ]; then
echo "Deleting unaliased collection: $col"
curl -s -X DELETE -H "X-TYPESENSE-API-KEY: $TYPESENSE_API_KEY" \
"$TYPESENSE_PROTOCOL://$TYPESENSE_HOST:$TYPESENSE_PORT/collections/$col"
fi
done
- run: |
docker run \
-e TYPESENSE_API_KEY=${{ secrets.TYPESENSE_API_KEY }} \

View File

@ -34,6 +34,7 @@ jobs:
src-or-tests: &src-or-tests
- *src
- test/**/*.ts
- .github/workflows/test/**/*
- .github/workflows/test*.yml
- .mocharc.json

View File

@ -1,10 +1,10 @@
{
"__comment": "TODO: remove --exit flag: https://mochajs.org/#-exit",
"exit": true,
"$schema": "https://json.schemastore.org/mocharc",
"check-leaks": true,
"color": true,
"exit": true,
"file": ["./build/compiled/test/utils/test-setup.js"],
"recursive": true,
"spec": ["./build/compiled/test"],
"spec": ["./build/compiled/test/**/*.test.{js,ts}"],
"timeout": 90000
}

View File

@ -2,11 +2,11 @@
This document describes how to set up your development environment and run TypeORM test cases.
* [Prerequisite Software](#prerequisite-software)
* [Getting the Sources](#getting-the-sources)
* [Installing NPM Modules](#installing-npm-modules)
* [Building](#building)
* [Running Tests Locally](#running-tests-locally)
- [Prerequisite Software](#prerequisite-software)
- [Getting the Sources](#getting-the-sources)
- [Installing NPM Modules](#installing-npm-modules)
- [Building](#building)
- [Running Tests Locally](#running-tests-locally)
See the [contribution guidelines](https://github.com/typeorm/typeorm/blob/master/CONTRIBUTING.md)
if you'd like to contribute to TypeORM.
@ -16,19 +16,19 @@ if you'd like to contribute to TypeORM.
Before you can build and test TypeORM, you must install and configure the
following products on your development machine:
* [Git](http://git-scm.com) and/or the **GitHub app** (for [Mac](http://mac.github.com) or
[Windows](http://windows.github.com)); [GitHub's Guide to Installing
Git](https://help.github.com/articles/set-up-git) is a good source of information.
* [Node.js](http://nodejs.org), (better to install latest version) which is used to run a development web server,
run tests, and generate distributable files.
Depending on your system, you can install Node either from source or as a pre-packaged bundle.
* [Mysql](https://www.mysql.com/) is required to run tests on this platform (or docker)
* [MariaDB](https://mariadb.com/) is required to run tests on this platform (or docker)
* [Postgres](https://www.postgresql.org/) is required to run tests on this platform (or docker)
* [Oracle](https://www.oracle.com/database/index.html) is required to run tests on this platform
* [Microsoft SQL Server](https://www.microsoft.com/en-us/cloud-platform/sql-server) is required to run tests on this platform
* For MySQL, MariaDB and Postgres you can use [docker](https://www.docker.com/) instead (docker configuration is
[here](https://github.com/typeorm/typeorm/blob/master/docker-compose.yml))
- [Git](http://git-scm.com) and/or the **GitHub app** (for [Mac](http://mac.github.com) or
[Windows](http://windows.github.com)); [GitHub's Guide to Installing
Git](https://help.github.com/articles/set-up-git) is a good source of information.
- [Node.js](http://nodejs.org), (better to install latest version) which is used to run a development web server,
run tests, and generate distributable files.
Depending on your system, you can install Node either from source or as a pre-packaged bundle.
- [Mysql](https://www.mysql.com/) is required to run tests on this platform (or docker)
- [MariaDB](https://mariadb.com/) is required to run tests on this platform (or docker)
- [Postgres](https://www.postgresql.org/) is required to run tests on this platform (or docker)
- [Oracle](https://www.oracle.com/database/index.html) is required to run tests on this platform
- [Microsoft SQL Server](https://www.microsoft.com/en-us/cloud-platform/sql-server) is required to run tests on this platform
- For MySQL, MariaDB and Postgres you can use [docker](https://www.docker.com/) instead (docker configuration is
[here](https://github.com/typeorm/typeorm/blob/master/docker-compose.yml))
## Getting the Sources
@ -56,9 +56,9 @@ You should have node installed in the version described in [.nvmrc](.nvmrc).
It is recommended to configure your OS to automatically switch to use this version whenever you enter project folder. This can be achieved in many ways:
* [`fnm`](https://github.com/Schniz/fnm)
* [`zsh-nvm`](https://github.com/lukechilds/zsh-nvm#auto-use)
* [`asdf`](https://asdf-vm.com) with `asdf-nodejs` plugin and [`legacy_version_file = true`](https://asdf-vm.com/manage/configuration.html#legacy-version-file) option
- [`fnm`](https://github.com/Schniz/fnm)
- [`zsh-nvm`](https://github.com/lukechilds/zsh-nvm#auto-use)
- [`asdf`](https://asdf-vm.com) with `asdf-nodejs` plugin and [`legacy_version_file = true`](https://asdf-vm.com/manage/configuration.html#legacy-version-file) option
## Installing package dependencies
@ -101,36 +101,46 @@ You can copy this tar into your project and run `npm install ./typeorm-x.x.x.tgz
It is greatly appreciated if PRs that change code come with appropriate tests.
To create a new test, check the [relevant functional tests](https://github.com/typeorm/typeorm/tree/master/test/functional). Depending on the test, you may need to create a new test file or modify an existing one.
To create a new test, check the [relevant functional tests](https://github.com/typeorm/typeorm/tree/master/test/functional). Depending on the test, you may need to create a new `.test.ts` file or modify an existing one.
If the test is for a specific regression or issue opened on GitHub, add a comment to the tests mentioning the issue number.
Most tests will benefit from using this template as a starting point:
```ts
import "reflect-metadata";
import { createTestingConnections, closeTestingConnections, reloadTestingDatabases } from "../../utils/test-utils";
import { expect } from "chai"
import "reflect-metadata"
import {
closeTestingConnections,
createTestingConnections,
reloadTestingDatabases,
} from "../../utils/test-utils"
import { DataSource } from "../../../src/data-source/DataSource"
import { expect } from "chai";
describe("description of the functionality you're testing", () => {
let dataSources: DataSource[]
let dataSources: DataSource[];
before(async () => dataSources = await createTestingConnections({
entities: [__dirname + "/entity/*{.js,.ts}"],
schemaCreate: true,
dropSchema: true,
}));
beforeEach(() => reloadTestingDatabases(dataSources));
after(() => closeTestingConnections(dataSources));
before(
async () =>
(dataSources = await createTestingConnections({
entities: [__dirname + "/entity/*{.js,.ts}"],
schemaCreate: true,
dropSchema: true,
})),
)
beforeEach(() => reloadTestingDatabases(dataSources))
after(() => closeTestingConnections(dataSources))
// optional: test fix for issue https://github.com/typeorm/typeorm/issues/<issue-number>
it("should <put a detailed description of what it should do here>", () => Promise.all(dataSources.map(async dataSource => {
// tests go here
})));
it("should <put a detailed description of what it should do here>", () =>
Promise.all(
dataSources.map(async (dataSource) => {
// tests go here
}),
))
// you can add additional tests if needed
});
// you can add additional tests if needed
})
```
If you place entities in `./entity/<entity-name>.ts` relative to your test file,
@ -173,8 +183,8 @@ Once TypeScript finishes compiling your changes, you can run `npm run test:fast`
To run your tests you need the Database Management Systems (DBMS) installed on your machine. Alternatively, you can use docker with the DBMS running in containers. To have docker run all the DBMS for you simply run `docker-compose up`
in the root of the project. Once all images are fetched and are running, you can run the tests.
- The docker image of mssql-server needs at least 3.25GB of RAM.
- Make sure to assign enough memory to the Docker VM if you're running on Docker for Mac or Windows
- The docker image of mssql-server needs at least 3.25GB of RAM.
- Make sure to assign enough memory to the Docker VM if you're running on Docker for Mac or Windows
## Release Process

View File

@ -12,7 +12,7 @@ services:
MYSQL_DATABASE: "test"
mysql-9:
image: "mysql:9.4.0"
image: "mysql:9.5.0"
container_name: "typeorm-mysql-9"
ports:
- "3306:3306"
@ -24,7 +24,7 @@ services:
# mariadb
mariadb-10:
image: "mariadb:10.6.22-jammy"
image: "mariadb:10.6.24-jammy"
container_name: "typeorm-mariadb-10"
ports:
- "3307:3306"
@ -35,7 +35,7 @@ services:
MYSQL_DATABASE: "test"
mariadb-12:
image: "mariadb:12.0.1-rc"
image: "mariadb:12.1.2"
container_name: "typeorm-mariadb-12"
ports:
- "3307:3306"

View File

@ -207,7 +207,7 @@ const queryEmbedding = [
const results = await dataSource.query(
`
DECLARE @question AS VECTOR (1998) = @0;
SELECT TOP (10) dc.*,
SELECT TOP (10) dc.*,
VECTOR_DISTANCE('cosine', @question, embedding) AS distance
FROM document_chunk dc
ORDER BY VECTOR_DISTANCE('cosine', @question, embedding)

View File

@ -34,6 +34,10 @@ See [Data Source Options](../data-source/2-data-source-options.md) for the commo
- `database` - Database name.
- `socketPath` - Database socket path.
- `poolSize` - Maximum number of clients the pool should contain for each connection.
- `charset` and `collation` - The charset/collation for the connection. If an SQL-level charset is specified (like utf8mb4) then the default collation for that charset is used.
- `timezone` - the timezone configured on the MySQL server. This is used to typecast server date/time
@ -139,3 +143,7 @@ export class User {
roles: UserRoleType[]
}
```
### Vector Types
MySQL supports the [VECTOR type](https://dev.mysql.com/doc/refman/en/vector.html) since version 9.0, while in MariaDB, [vectors](https://mariadb.com/docs/server/reference/sql-structure/vectors/vector-overview) are available since 11.7.

View File

@ -60,7 +60,7 @@ Additional options can be added to the `extra` object and will be passed directl
### Column types for `postgres`
`int`, `int2`, `int4`, `int8`, `smallint`, `integer`, `bigint`, `decimal`, `numeric`, `real`, `float`, `float4`, `float8`, `double precision`, `money`, `character varying`, `varchar`, `character`, `char`, `text`, `citext`, `hstore`, `bytea`, `bit`, `varbit`, `bit varying`, `timetz`, `timestamptz`, `timestamp`, `timestamp without time zone`, `timestamp with time zone`, `date`, `time`, `time without time zone`, `time with time zone`, `interval`, `bool`, `boolean`, `enum`, `point`, `line`, `lseg`, `box`, `path`, `polygon`, `circle`, `cidr`, `inet`, `macaddr`, `macaddr8`, `tsvector`, `tsquery`, `uuid`, `xml`, `json`, `jsonb`, `jsonpath`, `int4range`, `int8range`, `numrange`, `tsrange`, `tstzrange`, `daterange`, `int4multirange`, `int8multirange`, `nummultirange`, `tsmultirange`, `tstzmultirange`, `multidaterange`, `geometry`, `geography`, `cube`, `ltree`
`int`, `int2`, `int4`, `int8`, `smallint`, `integer`, `bigint`, `decimal`, `numeric`, `real`, `float`, `float4`, `float8`, `double precision`, `money`, `character varying`, `varchar`, `character`, `char`, `text`, `citext`, `hstore`, `bytea`, `bit`, `varbit`, `bit varying`, `timetz`, `timestamptz`, `timestamp`, `timestamp without time zone`, `timestamp with time zone`, `date`, `time`, `time without time zone`, `time with time zone`, `interval`, `bool`, `boolean`, `enum`, `point`, `line`, `lseg`, `box`, `path`, `polygon`, `circle`, `cidr`, `inet`, `macaddr`, `macaddr8`, `tsvector`, `tsquery`, `uuid`, `xml`, `json`, `jsonb`, `jsonpath`, `int4range`, `int8range`, `numrange`, `tsrange`, `tstzrange`, `daterange`, `int4multirange`, `int8multirange`, `nummultirange`, `tsmultirange`, `tstzmultirange`, `multidaterange`, `geometry`, `geography`, `cube`, `ltree`, `vector`, `halfvec`.
### Column types for `cockroachdb`
@ -68,6 +68,33 @@ Additional options can be added to the `extra` object and will be passed directl
Note: CockroachDB returns all numeric data types as `string`. However, if you omit the column type and define your property as `number` ORM will `parseInt` string into number.
### Vector columns
Vector columns can be used for similarity searches using PostgreSQL's vector operators:
```typescript
// L2 distance (Euclidean) - <->
const results = await dataSource.sql`
SELECT id, embedding
FROM post
ORDER BY embedding <-> ${"[1,2,3]"}
LIMIT 5`
// Cosine distance - <=>
const results = await dataSource.sql`
SELECT id, embedding
FROM post
ORDER BY embedding <=> ${"[1,2,3]"}
LIMIT 5`
// Inner product - <#>
const results = await dataSource.sql`
SELECT id, embedding
FROM post
ORDER BY embedding <#> ${"[1,2,3]"}
LIMIT 5`
```
### Spatial columns
TypeORM's PostgreSQL and CockroachDB support uses [GeoJSON](http://geojson.org/) as an interchange format, so geometry columns should be tagged either as `object` or `Geometry` (or subclasses, e.g. `Point`) after importing [`geojson` types](https://www.npmjs.com/package/@types/geojson) or using the TypeORM built-in GeoJSON types:

View File

@ -37,15 +37,16 @@ SAP HANA 2.0 and SAP HANA Cloud support slightly different data types. Check the
- [SAP HANA 2.0 Data Types](https://help.sap.com/docs/SAP_HANA_PLATFORM/4fe29514fd584807ac9f2a04f6754767/20a1569875191014b507cf392724b7eb.html?locale=en-US)
- [SAP HANA Cloud Data Types](https://help.sap.com/docs/hana-cloud-database/sap-hana-cloud-sap-hana-database-sql-reference-guide/data-types)
TypeORM's `SapDriver` supports `tinyint`, `smallint`, `integer`, `bigint`, `smalldecimal`, `decimal`, `real`, `double`, `date`, `time`, `seconddate`, `timestamp`, `boolean`, `char`, `nchar`, `varchar`, `nvarchar`, `text`, `alphanum`, `shorttext`, `array`, `varbinary`, `blob`, `clob`, `nclob`, `st_geometry`, `st_point`, `real_vector`, `half_vector`, `vector`, and `halfvec`. Some of these data types have been deprecated or removed in SAP HANA Cloud, and will be converted to the closest available alternative when connected to a Cloud database.
TypeORM's `SapDriver` supports `tinyint`, `smallint`, `integer`, `bigint`, `smalldecimal`, `decimal`, `real`, `double`, `date`, `time`, `seconddate`, `timestamp`, `boolean`, `char`, `nchar`, `varchar`, `nvarchar`, `text`, `alphanum`, `shorttext`, `array`, `varbinary`, `blob`, `clob`, `nclob`, `st_geometry`, `st_point`, `real_vector` and `half_vector`. Some of these data types have been deprecated or removed in SAP HANA Cloud, and will be converted to the closest available alternative when connected to a Cloud database.
### Vector Types
The `real_vector` and `half_vector` data types were introduced in SAP HANA Cloud (2024Q1 and 2025Q2 respectively), and require a supported version of `@sap/hana-client` as well.
The `real_vector` and `half_vector` data types were introduced in SAP HANA Cloud (2024Q1 and 2025Q2 respectively), and require a supported version of `@sap/hana-client` as well.
For consistency with PostgreSQL's vector support, TypeORM also provides aliases:
- `vector` (alias for `real_vector`) - stores vectors as 4-byte floats
- `halfvec` (alias for `half_vector`) - stores vectors as 2-byte floats for memory efficiency
- `vector` (alias for `real_vector`) - stores vectors as 4-byte floats
- `halfvec` (alias for `half_vector`) - stores vectors as 2-byte floats for memory efficiency
```typescript
@Entity()
@ -70,3 +71,5 @@ export class Document {
```
By default, the client will return a `Buffer` in the `fvecs`/`hvecs` format, which is more efficient. It is possible to let the driver convert the values to a `number[]` by adding `{ extra: { vectorOutputType: "Array" } }` to the connection options. Check the SAP HANA Client documentation for more information about [REAL_VECTOR](https://help.sap.com/docs/SAP_HANA_CLIENT/f1b440ded6144a54ada97ff95dac7adf/0d197e4389c64e6b9cf90f6f698f62fe.html) or [HALF_VECTOR](https://help.sap.com/docs/SAP_HANA_CLIENT/f1b440ded6144a54ada97ff95dac7adf/8bb854b4ce4a4299bed27c365b717e91.html).
Use the appropriate [vector functions](https://help.sap.com/docs/hana-cloud-database/sap-hana-cloud-sap-hana-database-sql-reference-guide/vector-functions) for similarity searches.

View File

@ -180,88 +180,6 @@ There are several special column types with additional functionality available:
each time you call `save` of entity manager or repository, or during `upsert` operations when an update occurs.
You don't need to set this column - it will be automatically set.
### Vector columns
Vector columns are supported on PostgreSQL (via [`pgvector`](https://github.com/pgvector/pgvector) extension), Microsoft SQL Server, and SAP HANA Cloud, enabling storing and querying vector embeddings for similarity search and machine learning applications.
TypeORM supports both `vector` and `halfvec` column types across databases:
- `vector` - stores vectors as 4-byte floats (single precision)
- PostgreSQL: native `vector` type via pgvector extension
- SQL Server: native `vector` type
- SAP HANA: alias for `real_vector` type
- `halfvec` - stores vectors as 2-byte floats (half precision) for memory efficiency
- PostgreSQL: native `halfvec` type via pgvector extension
- SAP HANA: alias for `half_vector` type
You can specify the vector dimensions using the `length` option:
```typescript
@Entity()
export class Post {
@PrimaryGeneratedColumn()
id: number
// Vector without specified dimensions (works on PostgreSQL and SAP HANA; SQL Server requires explicit dimensions)
@Column("vector")
embedding: number[] | Buffer
// Vector with 3 dimensions: vector(3)
@Column("vector", { length: 3 })
embedding_3d: number[] | Buffer
// Half-precision vector with 4 dimensions: halfvec(4) (PostgreSQL and SAP HANA only)
@Column("halfvec", { length: 4 })
halfvec_embedding: number[] | Buffer
}
```
**PostgreSQL** - Vector columns can be used for similarity searches using vector operators:
```typescript
// L2 distance (Euclidean) - <->
const results = await dataSource.query(
`SELECT id, embedding FROM post ORDER BY embedding <-> $1 LIMIT 5`,
["[1,2,3]"],
)
// Cosine distance - <=>
const results = await dataSource.query(
`SELECT id, embedding FROM post ORDER BY embedding <=> $1 LIMIT 5`,
["[1,2,3]"],
)
// Inner product - <#>
const results = await dataSource.query(
`SELECT id, embedding FROM post ORDER BY embedding <#> $1 LIMIT 5`,
["[1,2,3]"],
)
```
**SQL Server** - Use the `VECTOR_DISTANCE` function for similarity searches:
```typescript
const queryEmbedding = [1, 2, 3]
// Cosine distance
const results = await dataSource.query(
`
DECLARE @question AS VECTOR(3) = @0;
SELECT TOP (5) id, embedding,
VECTOR_DISTANCE('cosine', @question, embedding) AS distance
FROM post
ORDER BY VECTOR_DISTANCE('cosine', @question, embedding)
`,
[JSON.stringify(queryEmbedding)],
)
```
> **Note**:
>
> - **PostgreSQL**: Vector columns require the `pgvector` extension to be installed. The extension provides the vector data types and similarity operators.
> - **SQL Server**: Vector type support requires a compatible SQL Server version with vector functionality enabled.
> - **SAP HANA**: Vector columns require SAP HANA Cloud (2024Q1+) and a supported version of `@sap/hana-client`. Use the appropriate [vector similarity functions](https://help.sap.com/docs/hana-cloud-database/sap-hana-cloud-sap-hana-database-sql-reference-guide/vector-functions) for similarity searches.
## Column types
TypeORM supports all of the most commonly used database-supported column types.
@ -414,6 +332,50 @@ Besides "uuid" there is also "increment", "identity" (Postgres 10+ only) and "ro
on some database platforms with this type of generation (for example some databases can only have one increment column,
or some of them require increment to be a primary key).
### Vector columns
Vector columns are supported on MariaDB/MySQL, Microsoft SQL Server, PostgreSQL (via [`pgvector`](https://github.com/pgvector/pgvector) extension) and SAP HANA Cloud, enabling storing and querying vector embeddings for similarity search and machine learning applications.
TypeORM supports both `vector` and `halfvec` column types across databases:
- `vector` - stores vectors as 4-byte floats (single precision)
- MariaDB/MySQL: native `vector` type
- Microsoft SQL Server: native `vector` type
- PostgreSQL: `vector` type, available via `pgvector` extension
- SAP HANA Cloud: alias for `real_vector` type
- `halfvec` - stores vectors as 2-byte floats (half precision) for memory efficiency
- PostgreSQL: `halfvec` type, available via `pgvector` extension
- SAP HANA Cloud: alias for `half_vector` type
You can specify the number of vector dimensions using the `length` option:
```typescript
@Entity()
export class Post {
@PrimaryGeneratedColumn()
id: number
// Vector without specified dimensions
@Column("vector")
embedding: number[] | Buffer
// Vector with 3 dimensions: vector(3)
@Column("vector", { length: 3 })
embedding_3d: number[] | Buffer
// Half-precision vector with 4 dimensions: halfvec(4) (works on PostgreSQL and SAP HANA only)
@Column("halfvec", { length: 4 })
halfvec_embedding: number[] | Buffer
}
```
> **Note**:
>
> - **MariaDB/MySQL**: Vectors are supported since MariaDB 11.7 and MySQL 9
> - **Microsoft SQL Server**: Vector type support requires SQL Server 2025 (17.x) or newer.
> - **PostgreSQL**: Vector columns require the `pgvector` extension to be installed. The extension provides the vector data types and similarity operators.
> - **SAP HANA**: Vector columns require SAP HANA Cloud (2024Q1+) and a supported version of `@sap/hana-client`.
### Spatial columns
Microsoft SQLServer, MySQL/MariaDB, PostgreSQL/CockroachDB and SAP HANA all support spatial columns. TypeORM's support for each varies slightly between databases, particularly as the column names vary between databases.
@ -491,6 +453,7 @@ List of available options in `ColumnOptions`:
- `hstoreType: "object"|"string"` - Return type of `HSTORE` column. Returns value as string or as object. Used only in [Postgres](https://www.postgresql.org/docs/9.6/static/hstore.html).
- `array: boolean` - Used for postgres and cockroachdb column types which can be array (for example int[])
- `transformer: { from(value: DatabaseType): EntityType, to(value: EntityType): DatabaseType }` - Used to marshal properties of arbitrary type `EntityType` into a type `DatabaseType` supported by the database. Array of transformers are also supported and will be applied in natural order when writing, and in reverse order when reading. e.g. `[lowercase, encrypt]` will first lowercase the string then encrypt it when writing, and will decrypt then do nothing when reading.
- `utc: boolean` - Indicates if date values should be stored and retrieved in UTC timezone instead of local timezone. Only applies to `date` column type. Default value is `false` (uses local timezone for backward compatibility).
Note: most of those column options are RDBMS-specific and aren't available in `MongoDB`.

View File

@ -27,12 +27,7 @@
"strip_chars": " .,;:#",
"custom_settings": {
"separatorsToIndex": "_",
"attributesForFaceting": [
"language",
"version",
"type",
"docusaurus_tag"
],
"attributesForFaceting": [],
"attributesToRetrieve": [
"hierarchy",
"content",
@ -46,4 +41,4 @@
"833762294"
],
"nb_hits": 0
}
}

View File

@ -1,8 +1,8 @@
import { themes as prismThemes } from "prism-react-renderer"
import type { Config } from "@docusaurus/types"
import type * as Preset from "@docusaurus/preset-classic"
import type { Config } from "@docusaurus/types"
import { PluginOptions as LLMsTXTPluginOptions } from "@signalwire/docusaurus-plugin-llms-txt"
import { themes as prismThemes } from "prism-react-renderer"
import { redirects } from "./redirects"
import { LLMsTXTPluginOptions } from "@signalwire/docusaurus-plugin-llms-txt"
// This runs in Node.js - Don't use client-side code here (browser APIs, JSX...)
@ -23,7 +23,6 @@ const config: Config = {
projectName: "typeorm", // Usually your repo name.
onBrokenLinks: "throw",
onBrokenMarkdownLinks: "warn",
// Even if you don't use internationalization, you can use this field to set
// useful metadata like html lang. For example, if your site is Chinese, you

2447
docs/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -27,24 +27,24 @@
]
},
"dependencies": {
"@docusaurus/core": "3.8.1",
"@docusaurus/plugin-client-redirects": "^3.8.1",
"@docusaurus/preset-classic": "3.8.1",
"@docusaurus/core": "3.9.2",
"@docusaurus/plugin-client-redirects": "^3.9.2",
"@docusaurus/preset-classic": "3.9.2",
"@mdx-js/react": "^3.1.1",
"@signalwire/docusaurus-plugin-llms-txt": "^1.2.2",
"clsx": "^2.1.1",
"docusaurus-theme-search-typesense": "^0.25.0",
"docusaurus-theme-search-typesense": "^0.26.0",
"prism-react-renderer": "^2.4.1",
"react": "^18.3.1",
"react-dom": "^18.3.1"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.8.1",
"@docusaurus/tsconfig": "3.8.1",
"@docusaurus/types": "3.8.1",
"typescript": "~5.9.2"
"@docusaurus/module-type-aliases": "3.9.2",
"@docusaurus/tsconfig": "3.9.2",
"@docusaurus/types": "3.9.2",
"typescript": "~5.9.3"
},
"engines": {
"node": ">=18.0"
"node": ">=20.0"
}
}

4336
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -96,10 +96,10 @@
},
"dependencies": {
"@sqltools/formatter": "^1.2.5",
"ansis": "^4.1.0",
"ansis": "^4.2.0",
"app-root-path": "^3.1.0",
"buffer": "^6.0.3",
"dayjs": "^1.11.18",
"dayjs": "^1.11.19",
"debug": "^4.4.3",
"dedent": "^1.7.0",
"dotenv": "^16.6.1",
@ -113,13 +113,13 @@
},
"devDependencies": {
"@eslint/js": "^9.39.1",
"@google-cloud/spanner": "^7.21.0",
"@sap/hana-client": "^2.26.18",
"@tsconfig/node16": "^16.1.5",
"@google-cloud/spanner": "^8.3.1",
"@sap/hana-client": "^2.26.26",
"@tsconfig/node16": "^16.1.8",
"@types/chai": "^4.3.20",
"@types/chai-as-promised": "^7.1.8",
"@types/debug": "^4.1.12",
"@types/gulp-rename": "^2.0.6",
"@types/gulp-rename": "^2.0.7",
"@types/gulp-sourcemaps": "^0.0.38",
"@types/mocha": "^10.0.10",
"@types/node": "^16.18.126",
@ -127,14 +127,14 @@
"@types/sinon": "^10.0.20",
"@types/sinon-chai": "^3.2.12",
"@types/source-map-support": "^0.5.10",
"@types/yargs": "^17.0.33",
"@types/yargs": "^17.0.35",
"better-sqlite3": "^8.7.0",
"c8": "^10.1.3",
"chai": "^4.5.0",
"chai-as-promised": "^7.1.2",
"class-transformer": "^0.5.1",
"eslint": "^9.39.1",
"eslint-plugin-jsdoc": "^61.1.12",
"eslint-plugin-jsdoc": "^61.4.1",
"globals": "^16.5.0",
"gulp": "^4.0.2",
"gulp-rename": "^2.1.0",
@ -145,19 +145,18 @@
"gulpclass": "^0.2.0",
"husky": "^9.1.7",
"is-ci": "^4.1.0",
"lint-staged": "^16.2.6",
"mocha": "^11.7.2",
"mongodb": "^6.20.0",
"mssql": "^11.0.1",
"lint-staged": "^16.2.7",
"mocha": "^11.7.5",
"mongodb": "^6.21.0",
"mssql": "^12.1.1",
"mysql": "^2.18.1",
"mysql2": "^3.15.0",
"oracledb": "^6.9.0",
"mysql2": "^3.15.3",
"oracledb": "^6.10.0",
"pg": "^8.16.3",
"pg-query-stream": "^4.10.3",
"pkg-pr-new": "^0.0.60",
"prettier": "^2.8.8",
"redis": "^5.8.2",
"remap-istanbul": "^0.13.0",
"redis": "^5.10.0",
"rimraf": "^5.0.10",
"sinon": "^16.1.3",
"sinon-chai": "^3.7.0",
@ -167,16 +166,16 @@
"sqlite3": "^5.1.7",
"standard-changelog": "^7.0.1",
"ts-node": "^10.9.2",
"typescript": "^5.9.2",
"typescript-eslint": "^8.46.3"
"typescript": "^5.9.3",
"typescript-eslint": "^8.48.0"
},
"peerDependencies": {
"@google-cloud/spanner": "^5.18.0 || ^6.0.0 || ^7.0.0",
"@google-cloud/spanner": "^5.18.0 || ^6.0.0 || ^7.0.0 || ^8.0.0",
"@sap/hana-client": "^2.14.22",
"better-sqlite3": "^8.0.0 || ^9.0.0 || ^10.0.0 || ^11.0.0 || ^12.0.0",
"ioredis": "^5.0.4",
"mongodb": "^5.8.0 || ^6.0.0",
"mssql": "^9.1.1 || ^10.0.1 || ^11.0.1",
"mssql": "^9.1.1 || ^10.0.0 || ^11.0.0 || ^12.0.0",
"mysql2": "^2.2.5 || ^3.0.1",
"oracledb": "^6.3.0",
"pg": "^8.5.1",
@ -247,8 +246,13 @@
"logo": "https://opencollective.com/opencollective/logo.txt"
},
"devEngines": {
"runtime": {
"name": "node",
"version": ">=20.19.0"
},
"packageManager": {
"name": "npm",
"version": "^10.8.2",
"onFail": "error"
}
},

View File

@ -6,8 +6,6 @@ import { TypeORMError } from "../error"
import { PlatformTools } from "../platform/PlatformTools"
import { CommandUtils } from "./CommandUtils"
import ourPackageJson from "../../package.json"
/**
* Generates a new project with TypeORM.
*/
@ -117,7 +115,7 @@ export class InitCommand implements yargs.CommandModule {
)
await CommandUtils.createFile(
basePath + "/package.json",
InitCommand.appendPackageJson(
await InitCommand.appendPackageJson(
packageJsonContents,
database,
isExpress,
@ -673,13 +671,16 @@ Steps to run this project:
/**
* Appends to a given package.json template everything needed.
*/
protected static appendPackageJson(
protected static async appendPackageJson(
packageJsonContents: string,
database: string,
express: boolean,
projectIsEsm: boolean /*, docker: boolean*/,
): string {
): Promise<string> {
const packageJson = JSON.parse(packageJsonContents)
const ourPackageJson = JSON.parse(
await CommandUtils.readFile(`${__dirname}/../package.json`),
)
if (!packageJson.devDependencies) packageJson.devDependencies = {}
packageJson.devDependencies = {

View File

@ -200,4 +200,15 @@ export interface ColumnOptions extends ColumnCommonOptions {
* @See https://typeorm.io/decorator-reference#virtualcolumn for more details.
*/
query?: (alias: string) => string
/**
* Indicates if date values should be stored and retrieved in UTC timezone
* instead of local timezone. Only applies to "date" column type.
* Default value is "false" (uses local timezone for backward compatibility).
*
* @example
* @Column({ type: "date", utc: true })
* birthDate: Date
*/
utc?: boolean
}

View File

@ -533,7 +533,9 @@ export class AuroraMysqlDriver implements Driver {
if (columnMetadata.type === Boolean) {
return value === true ? 1 : 0
} else if (columnMetadata.type === "date") {
return DateUtils.mixedDateToDateString(value)
return DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
return DateUtils.mixedDateToTimeString(value)
} else if (columnMetadata.type === "json") {
@ -592,7 +594,9 @@ export class AuroraMysqlDriver implements Driver {
) {
value = DateUtils.normalizeHydratedDate(value)
} else if (columnMetadata.type === "date") {
value = DateUtils.mixedDateToDateString(value)
value = DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "json") {
value = typeof value === "string" ? JSON.parse(value) : value
} else if (columnMetadata.type === "time") {

View File

@ -385,7 +385,9 @@ export class CockroachDriver implements Driver {
if (columnMetadata.type === Boolean) {
return value === true ? 1 : 0
} else if (columnMetadata.type === "date") {
return DateUtils.mixedDateToDateString(value)
return DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
return DateUtils.mixedDateToTimeString(value)
} else if (
@ -445,7 +447,9 @@ export class CockroachDriver implements Driver {
) {
value = DateUtils.normalizeHydratedDate(value)
} else if (columnMetadata.type === "date") {
value = DateUtils.mixedDateToDateString(value)
value = DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
value = DateUtils.mixedTimeToString(value)
} else if (columnMetadata.type === "simple-array") {

View File

@ -43,4 +43,10 @@ export interface MysqlConnectionCredentialsOptions {
* Database socket path
*/
readonly socketPath?: string
/**
* Maximum number of clients the pool should contain.
* for each connection
*/
readonly poolSize?: number
}

View File

@ -157,6 +157,8 @@ export class MysqlDriver implements Driver {
"multilinestring",
"multipolygon",
"geometrycollection",
// vector data types
"vector",
// additional data types for mariadb
"uuid",
"inet4",
@ -191,6 +193,7 @@ export class MysqlDriver implements Driver {
"nvarchar",
"binary",
"varbinary",
"vector",
]
/**
@ -280,6 +283,7 @@ export class MysqlDriver implements Driver {
char: { length: 1 },
binary: { length: 1 },
varbinary: { length: 255 },
vector: { length: 2048 }, // default length MySQL uses if not provided a value
decimal: { precision: 10, scale: 0 },
dec: { precision: 10, scale: 0 },
numeric: { precision: 10, scale: 0 },
@ -612,7 +616,9 @@ export class MysqlDriver implements Driver {
if (columnMetadata.type === Boolean) {
return value === true ? 1 : 0
} else if (columnMetadata.type === "date") {
return DateUtils.mixedDateToDateString(value)
return DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
return DateUtils.mixedDateToTimeString(value)
} else if (columnMetadata.type === "json") {
@ -666,7 +672,9 @@ export class MysqlDriver implements Driver {
) {
value = DateUtils.normalizeHydratedDate(value)
} else if (columnMetadata.type === "date") {
value = DateUtils.mixedDateToDateString(value)
value = DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "json") {
// mysql2 returns JSON values already parsed, but may still be a string
// if the JSON value itself is a string (e.g., "\"hello\"")
@ -1283,7 +1291,7 @@ export class MysqlDriver implements Driver {
port: credentials.port,
ssl: options.ssl,
socketPath: credentials.socketPath,
connectionLimit: options.poolSize,
connectionLimit: credentials.poolSize ?? options.poolSize,
},
options.acquireTimeout === undefined
? {}

View File

@ -2802,17 +2802,19 @@ export class MysqlQueryRunner extends BaseQueryRunner implements QueryRunner {
) !== -1 &&
dbColumn["CHARACTER_MAXIMUM_LENGTH"]
) {
const length =
dbColumn[
"CHARACTER_MAXIMUM_LENGTH"
].toString()
let length: number =
dbColumn["CHARACTER_MAXIMUM_LENGTH"]
if (tableColumn.type === "vector") {
// MySQL and MariaDb store the vector length in bytes, not in number of dimensions.
length = length / 4
}
tableColumn.length =
!this.isDefaultColumnLength(
table,
tableColumn,
length,
length.toString(),
)
? length
? length.toString()
: ""
}

View File

@ -531,9 +531,9 @@ export class OracleDriver implements Driver {
} else if (columnMetadata.type === "date") {
if (typeof value === "string") value = value.replace(/[^0-9-]/g, "")
return () =>
`TO_DATE('${DateUtils.mixedDateToDateString(
value,
)}', 'YYYY-MM-DD')`
`TO_DATE('${DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})}', 'YYYY-MM-DD')`
} else if (
columnMetadata.type === Date ||
columnMetadata.type === "timestamp" ||
@ -567,7 +567,9 @@ export class OracleDriver implements Driver {
if (columnMetadata.type === Boolean) {
value = !!value
} else if (columnMetadata.type === "date") {
value = DateUtils.mixedDateToDateString(value)
value = DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
value = DateUtils.mixedTimeToString(value)
} else if (

View File

@ -656,7 +656,9 @@ export class PostgresDriver implements Driver {
if (columnMetadata.type === Boolean) {
return value === true ? 1 : 0
} else if (columnMetadata.type === "date") {
return DateUtils.mixedDateToDateString(value)
return DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
return DateUtils.mixedDateToTimeString(value)
} else if (
@ -755,7 +757,9 @@ export class PostgresDriver implements Driver {
) {
value = DateUtils.normalizeHydratedDate(value)
} else if (columnMetadata.type === "date") {
value = DateUtils.mixedDateToDateString(value)
value = DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
value = DateUtils.mixedTimeToString(value)
} else if (

View File

@ -335,7 +335,9 @@ export class ReactNativeDriver implements Driver {
) {
return value === true ? 1 : 0
} else if (columnMetadata.type === "date") {
return DateUtils.mixedDateToDateString(value)
return DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
return DateUtils.mixedDateToTimeString(value)
} else if (
@ -407,7 +409,9 @@ export class ReactNativeDriver implements Driver {
value = DateUtils.normalizeHydratedDate(value)
} else if (columnMetadata.type === "date") {
value = DateUtils.mixedDateToDateString(value)
value = DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
value = DateUtils.mixedTimeToString(value)
} else if (columnMetadata.type === "simple-array") {

View File

@ -542,7 +542,9 @@ export class SapDriver implements Driver {
if (value === null || value === undefined) return value
if (columnMetadata.type === "date") {
return DateUtils.mixedDateToDateString(value)
return DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
return DateUtils.mixedDateToTimeString(value)
} else if (
@ -584,7 +586,9 @@ export class SapDriver implements Driver {
) {
value = DateUtils.normalizeHydratedDate(value)
} else if (columnMetadata.type === "date") {
value = DateUtils.mixedDateToDateString(value)
value = DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
value = DateUtils.mixedTimeToString(value)
} else if (columnMetadata.type === "simple-array") {

View File

@ -399,7 +399,9 @@ export class SpannerDriver implements Driver {
const lib = this.options.driver || PlatformTools.load("spanner")
return lib.Spanner.numeric(value.toString())
} else if (columnMetadata.type === "date") {
return DateUtils.mixedDateToDateString(value)
return DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "json") {
return value
} else if (
@ -434,7 +436,9 @@ export class SpannerDriver implements Driver {
} else if (columnMetadata.type === "numeric") {
value = value.value
} else if (columnMetadata.type === "date") {
value = DateUtils.mixedDateToDateString(value)
value = DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "json") {
value = typeof value === "string" ? JSON.parse(value) : value
} else if (columnMetadata.type === Number) {

View File

@ -331,7 +331,9 @@ export abstract class AbstractSqliteDriver implements Driver {
) {
return value === true ? 1 : 0
} else if (columnMetadata.type === "date") {
return DateUtils.mixedDateToDateString(value)
return DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
return DateUtils.mixedDateToTimeString(value)
} else if (
@ -406,7 +408,9 @@ export abstract class AbstractSqliteDriver implements Driver {
value = DateUtils.normalizeHydratedDate(value)
} else if (columnMetadata.type === "date") {
value = DateUtils.mixedDateToDateString(value)
value = DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
value = DateUtils.mixedTimeToString(value)
} else if (

View File

@ -532,7 +532,7 @@ export class SqlServerDriver implements Driver {
if (columnMetadata.type === Boolean) {
return value === true ? 1 : 0
} else if (columnMetadata.type === "date") {
return DateUtils.mixedDateToDate(value)
return DateUtils.mixedDateToDate(value, columnMetadata.utc)
} else if (columnMetadata.type === "time") {
return DateUtils.mixedTimeToDate(value)
} else if (
@ -586,7 +586,9 @@ export class SqlServerDriver implements Driver {
) {
value = DateUtils.normalizeHydratedDate(value)
} else if (columnMetadata.type === "date") {
value = DateUtils.mixedDateToDateString(value)
value = DateUtils.mixedDateToDateString(value, {
utc: columnMetadata.utc,
})
} else if (columnMetadata.type === "time") {
value = DateUtils.mixedTimeToString(value)
} else if (columnMetadata.type === "simple-array") {

View File

@ -75,7 +75,7 @@ export type WithLengthColumnType =
| "binary" // mssql
| "varbinary" // mssql, sap
| "string" // cockroachdb, spanner
| "vector" // postgres, mssql, sap
| "vector" // mariadb, mysql, mssql, postgres, sap
| "halfvec" // postgres, sap
| "half_vector" // sap
| "real_vector" // sap

View File

@ -123,6 +123,12 @@ export class ColumnMetadata {
*/
comment?: string
/**
* Indicates if date values use UTC timezone.
* Only applies to "date" column type.
*/
utc: boolean = false
/**
* Default database value.
*/
@ -388,6 +394,8 @@ export class ColumnMetadata {
this.isSelect = options.args.options.select
if (options.args.options.insert !== undefined)
this.isInsert = options.args.options.insert
if (options.args.options.utc !== undefined)
this.utc = options.args.options.utc
if (options.args.options.update !== undefined)
this.isUpdate = options.args.options.update
if (options.args.options.readonly !== undefined)

View File

@ -25,8 +25,21 @@ export class DateUtils {
/**
* Converts given value into date string in a "YYYY-MM-DD" format.
*/
static mixedDateToDateString(value: string | Date): string {
static mixedDateToDateString(
value: string | Date,
options?: { utc?: boolean },
): string {
const utc = options?.utc ?? false
if (value instanceof Date) {
if (utc) {
return (
this.formatZerolessValue(value.getUTCFullYear(), 4) +
"-" +
this.formatZerolessValue(value.getUTCMonth() + 1) +
"-" +
this.formatZerolessValue(value.getUTCDate())
)
}
return (
this.formatZerolessValue(value.getFullYear(), 4) +
"-" +

View File

@ -12,6 +12,7 @@ describe("benchmark > bulk-sql-build", () => {
before(async () => {
dataSources = await createTestingConnections({
__dirname,
enabledDrivers: ["postgres"],
})
})
beforeEach(() => reloadTestingDatabases(dataSources))

View File

@ -1,11 +1,11 @@
import { expect } from "chai"
import "reflect-metadata"
import { scheduler } from "timers/promises"
import { DataSource } from "../../../../src"
import {
closeTestingConnections,
createTestingConnections,
reloadTestingDatabases,
sleep,
} from "../../../utils/test-utils"
import { Post } from "./entity/Post"
@ -81,7 +81,7 @@ describe("column kinds > create date column", () => {
})
// wait a second
await sleep(1000)
await scheduler.wait(1010)
// create post once again
post.title = "Updated Title"

View File

@ -1,11 +1,11 @@
import { expect } from "chai"
import "reflect-metadata"
import { scheduler } from "timers/promises"
import { DataSource } from "../../../../src"
import {
closeTestingConnections,
createTestingConnections,
reloadTestingDatabases,
sleep,
} from "../../../utils/test-utils"
import { Post } from "./entity/Post"
@ -109,7 +109,7 @@ describe("column kinds > update date column", () => {
})
// wait a second
await sleep(2000)
await scheduler.wait(1010)
// update post once again
post.title = "Updated Title"
@ -120,9 +120,9 @@ describe("column kinds > update date column", () => {
await postRepository.findOneByOrFail({
id: post.id,
})
expect(loadedPostAfterUpdate.updatedAt.getTime()).to.be.not.eql(
loadedPostBeforeUpdate.updatedAt.getTime(),
)
expect(
loadedPostAfterUpdate.updatedAt.getTime(),
).to.be.greaterThan(loadedPostBeforeUpdate.updatedAt.getTime())
}),
))

View File

@ -5,7 +5,6 @@ import {
closeTestingConnections,
createTestingConnections,
reloadTestingDatabases,
sleep,
} from "../../../utils/test-utils"
import { Post } from "./entity/Post"
@ -98,9 +97,6 @@ describe("column kinds > version column", () => {
post.title = "Post"
await postRepository.save(post)
// wait a second
await sleep(1000)
// update post once again
post.title = "Updated Title"
await postRepository.save(post)

View File

@ -1,15 +1,15 @@
import "reflect-metadata"
import { expect } from "chai"
import "reflect-metadata"
import { scheduler } from "timers/promises"
import { DataSource } from "../../../src/data-source/DataSource"
import {
closeTestingConnections,
createTestingConnections,
reloadTestingDatabases,
sleep,
} from "../../utils/test-utils"
import { DataSource } from "../../../src/data-source/DataSource"
import { Address } from "./entity/Address"
import { User } from "./entity/User"
import { MockQueryResultCache } from "./provider/MockQueryResultCache"
import { Address } from "./entity/Address"
describe("custom cache provider", () => {
let connections: DataSource[]
@ -101,7 +101,7 @@ describe("custom cache provider", () => {
expect(users3.length).to.be.equal(1)
// give some time for cache to expire
await sleep(1000)
await scheduler.wait(1010)
// now, when our cache has expired we check if we have new user inserted even with cache enabled
const users4 = await connection
@ -179,7 +179,7 @@ describe("custom cache provider", () => {
expect(users3.length).to.be.equal(1)
// give some time for cache to expire
await sleep(1000)
await scheduler.wait(1010)
// now, when our cache has expired we check if we have new user inserted even with cache enabled
const users4 = await connection
@ -249,7 +249,7 @@ describe("custom cache provider", () => {
expect(users2.length).to.be.equal(2)
// give some time for cache to expire
await sleep(1000)
await scheduler.wait(1010)
// but with cache enabled it must not return newly inserted entity since cache is not expired yet
const users3 = await connection
@ -263,7 +263,7 @@ describe("custom cache provider", () => {
expect(users3.length).to.be.equal(1)
// give some time for cache to expire
await sleep(1000)
await scheduler.wait(1010)
// now, when our cache has expired we check if we have new user inserted even with cache enabled
const users4 = await connection
@ -389,7 +389,7 @@ describe("custom cache provider", () => {
expect(users3).to.be.equal(1)
// give some time for cache to expire
await sleep(1000)
await scheduler.wait(1010)
// now, when our cache has expired we check if we have new user inserted even with cache enabled
const users4 = await connection

View File

@ -0,0 +1,50 @@
import "reflect-metadata"
import { expect } from "chai"
import {
closeTestingConnections,
createTestingConnections,
reloadTestingDatabases,
} from "../../../utils/test-utils"
import { Event } from "./entity/Event"
import { DataSource } from "../../../../src"
describe("columns > date utc flag", () => {
let originalTZ: string | undefined
let connections: DataSource[]
before(async () => {
originalTZ = process.env.TZ
process.env.TZ = "America/New_York"
connections = await createTestingConnections({
entities: [Event],
})
})
after(async () => {
process.env.TZ = originalTZ
await closeTestingConnections(connections)
})
beforeEach(() => reloadTestingDatabases(connections))
it("should save date columns in UTC when utc flag is true and in local timezone when false", () =>
Promise.all(
connections.map(async (connection) => {
const event = new Event()
const testDate = new Date(Date.UTC(2025, 5, 1)) // 2025-06-01 in UTC
event.localDate = testDate
event.utcDate = testDate
const savedEvent = await connection.manager.save(event)
const result = await connection.manager.findOneBy(Event, {
id: savedEvent.id,
})
// UTC flag true: should save as 2025-06-01 (UTC date)
expect(result!.utcDate).to.equal("2025-06-01")
// UTC flag false (default): should save as 2025-05-31 (local timezone)
expect(result!.localDate).to.equal("2025-05-31")
}),
))
})

View File

@ -0,0 +1,17 @@
import { Entity } from "../../../../../src/decorator/entity/Entity"
import { PrimaryGeneratedColumn } from "../../../../../src/decorator/columns/PrimaryGeneratedColumn"
import { Column } from "../../../../../src/decorator/columns/Column"
@Entity({
name: "event",
})
export class Event {
@PrimaryGeneratedColumn()
id: number
@Column({ type: "date" })
localDate: Date
@Column({ type: "date", utc: true })
utcDate: Date
}

Some files were not shown because too many files have changed in this diff Show More