mirror of
https://github.com/typeorm/typeorm.git
synced 2025-12-08 21:26:23 +00:00
build: improve test workflow (#11361)
This commit is contained in:
parent
4e31a8648a
commit
6ba408214e
27
.github/workflows/database-compose-tests.yml
vendored
27
.github/workflows/database-compose-tests.yml
vendored
@ -9,24 +9,31 @@ on:
|
||||
|
||||
jobs:
|
||||
oracle:
|
||||
# For some reason nyc is stuck at the end of the test execution even if all tests pass
|
||||
# Probably that's why the job failed on CircleCI
|
||||
if: ${{inputs.node-version != '16.x'}}
|
||||
# nyc is stuck at the end of the test execution even if all tests pass on Node.js 16.x
|
||||
if: ${{ inputs.node-version != 16 }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- run: docker compose -f .github/workflows/test/oracle.docker-compose.yml up oracle --detach
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{inputs.node-version}}
|
||||
- uses: actions/checkout@v4
|
||||
- run: docker compose -f .github/workflows/test/oracle.docker-compose up -d
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
|
||||
- run: npm i
|
||||
- run: cp .github/workflows/test/oracle.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test
|
||||
- run: docker compose -f .github/workflows/test/oracle.docker-compose.yml up oracle --wait
|
||||
- run: npx nyc npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
flag-name: oracle-${{ inputs.node-version }}
|
||||
flag-name: oracle-node:${{ inputs.node-version }}
|
||||
parallel: true
|
||||
|
||||
160
.github/workflows/database-tests.yml
vendored
160
.github/workflows/database-tests.yml
vendored
@ -3,97 +3,113 @@ name: database-tests
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
node-container:
|
||||
node-version:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
cockroachdb:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
container: ${{inputs.node-container}}
|
||||
services:
|
||||
crdb:
|
||||
image: cockroachdb/cockroach
|
||||
cockroachdb:
|
||||
image: cockroachdb/cockroach:v24.3.8
|
||||
ports:
|
||||
- "26257:26257"
|
||||
env:
|
||||
COCKROACH_ARGS: 'start-single-node --insecure --cache=1GB --store=type=mem,size=4GB'
|
||||
COCKROACH_ARGS: "start-single-node --insecure --cache=1GB --store=type=mem,size=4GB"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm i
|
||||
- run: chown -R 1001:127 /github/home/.npm #This fix is needed for running CLI tests
|
||||
- run: cp .github/workflows/test/cockroachdb.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test
|
||||
- run: npx nyc npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
flag-name: crdb-${{ inputs.node-container }}
|
||||
flag-name: crdb-node:${{ inputs.node-version }}
|
||||
parallel: true
|
||||
|
||||
mongodb:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
container: ${{inputs.node-container}}
|
||||
services:
|
||||
mongodb:
|
||||
image: mongo:5.0.12
|
||||
ports:
|
||||
- "27017:27017"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm i
|
||||
- run: chown -R 1001:127 /github/home/.npm #This fix is needed for running CLI tests
|
||||
- run: cp .github/workflows/test/mongodb.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test
|
||||
- run: npx nyc npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
flag-name: mongodb-${{ inputs.node-container }}
|
||||
flag-name: mongodb-node:${{ inputs.node-version }}
|
||||
parallel: true
|
||||
|
||||
|
||||
mssql:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
mssql-version: ['^9.1.1', '^10.0.1', '^11.0.1']
|
||||
mssql-version: ["^9.1.1", "^10.0.1", "^11.0.1"]
|
||||
|
||||
container: ${{inputs.node-container}}
|
||||
services:
|
||||
mssql:
|
||||
image: "mcr.microsoft.com/mssql/server:2022-latest"
|
||||
ports:
|
||||
- "1433:1433"
|
||||
env:
|
||||
SA_PASSWORD: "Admin12345"
|
||||
ACCEPT_EULA: "Y"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm i
|
||||
- run: npm install mssql@${{ matrix.mssql-version }}
|
||||
- run: chown -R 1001:127 /github/home/.npm #This fix is needed for running CLI tests
|
||||
- run: cp .github/workflows/test/mssql.ormconfig.json ormconfig.json
|
||||
- run: npm test
|
||||
- run: npx nyc npm run test
|
||||
- run: npx nyc npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
flag-name: mssql-${{ inputs.node-container }}
|
||||
flag-name: mssql-node:${{ inputs.node-version }}
|
||||
parallel: true
|
||||
|
||||
|
||||
mysql_mariadb:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
container: ${{inputs.node-container}}
|
||||
services:
|
||||
mysql:
|
||||
image: mysql:5.7.37
|
||||
ports:
|
||||
- "3306:3306"
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: "admin"
|
||||
MYSQL_USER: "test"
|
||||
@ -101,6 +117,8 @@ jobs:
|
||||
MYSQL_DATABASE: "test"
|
||||
mariadb:
|
||||
image: mariadb:10.10.3
|
||||
ports:
|
||||
- "3307:3306"
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: "admin"
|
||||
MYSQL_USER: "test"
|
||||
@ -109,88 +127,97 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm i
|
||||
- run: chown -R 1001:127 /github/home/.npm #This fix is needed for running CLI tests
|
||||
- run: cp .github/workflows/test/mysql-mariadb.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test
|
||||
- run: npx nyc npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
flag-name: mysql+mariadb-${{ inputs.node-container }}
|
||||
flag-name: mysql+mariadb-node:${{ inputs.node-version }}
|
||||
parallel: true
|
||||
|
||||
|
||||
better-sqlite3:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
container: ${{inputs.node-container}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm i
|
||||
- run: chown -R 1001:127 /github/home/.npm #This fix is needed for running CLI tests
|
||||
- run: chown -R 1001:127 ./temp #This fix is needed for fixing permission error on Node 16
|
||||
- run: cp .github/workflows/test/better-sqlite3.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test
|
||||
- run: npx nyc npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
flag-name: better-sqlite3-${{ inputs.node-container }}
|
||||
flag-name: better-sqlite3-node:${{ inputs.node-version }}
|
||||
parallel: true
|
||||
|
||||
|
||||
sqlite:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
container: ${{inputs.node-container}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm i
|
||||
- run: chown -R 1001:127 /github/home/.npm #This fix is needed for running CLI tests
|
||||
- run: chown -R 1001:127 ./temp #This fix is needed for fixing permission error on Node 16
|
||||
- run: cp .github/workflows/test/sqlite.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test
|
||||
- run: npx nyc npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
flag-name: sqlite-${{ inputs.node-container }}
|
||||
flag-name: sqlite-node:${{ inputs.node-version }}
|
||||
parallel: true
|
||||
|
||||
|
||||
sqljs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
container: ${{inputs.node-container}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm i
|
||||
- run: chown -R 1001:127 /github/home/.npm #This fix is needed for running CLI tests
|
||||
- run: cp .github/workflows/test/sqljs.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test
|
||||
- run: npx nyc npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
flag-name: sqljs-${{ inputs.node-container }}
|
||||
flag-name: sqljs-node:${{ inputs.node-version }}
|
||||
parallel: true
|
||||
|
||||
|
||||
postgres:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
container: ${{inputs.node-container}}
|
||||
services:
|
||||
postgres:
|
||||
image: postgis/postgis:14-3.3
|
||||
ports:
|
||||
- "5432:5432"
|
||||
env:
|
||||
POSTGRES_USERNAME: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
@ -203,12 +230,19 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm i
|
||||
- run: chown -R 1001:127 /github/home/.npm #This fix is needed for running CLI tests
|
||||
- run: cp .github/workflows/test/postgres.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test
|
||||
- run: npx nyc npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
flag-name: postgres-${{ inputs.node-container }}
|
||||
flag-name: postgres-node:${{ inputs.node-version }}
|
||||
parallel: true
|
||||
|
||||
43
.github/workflows/test.yml
vendored
43
.github/workflows/test.yml
vendored
@ -6,50 +6,71 @@ name: test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "**" ]
|
||||
branches: ["**"]
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
branches: ["master"]
|
||||
|
||||
jobs:
|
||||
formatting:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18.x"
|
||||
node-version: "20.x"
|
||||
cache: "npm"
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- run: npm i
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: npm run format:ci
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
cache: "npm"
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run compile
|
||||
|
||||
- name: Upload build
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
retention-days: 1
|
||||
|
||||
# These tests run in standard node containers with their db attached as a service
|
||||
database-tests:
|
||||
needs: build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-container: ["node:16", "node:18", "node:20"] #, "node:22"]
|
||||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||
node-version: [16, 18, 20] #, 22]
|
||||
uses: ./.github/workflows/database-tests.yml
|
||||
with:
|
||||
node-container: ${{matrix.node-container}}
|
||||
node-version: ${{matrix.node-version}}
|
||||
|
||||
# These tests run with custom docker image attributes that can't be specified in a GHA service
|
||||
database-compose-tests:
|
||||
needs: build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [16.x, 18.x, 20.x] #, 22.x]
|
||||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||
node-version: [16, 18, 20] #, 22]
|
||||
uses: ./.github/workflows/database-compose-tests.yml
|
||||
with:
|
||||
node-version: ${{matrix.node-version}}
|
||||
|
||||
windows-database-tests:
|
||||
needs: build
|
||||
uses: ./.github/workflows/windows-database-tests.yml
|
||||
|
||||
# Run with most databases possible to provide the coverage of the tests
|
||||
|
||||
@ -5,17 +5,5 @@
|
||||
"type": "better-sqlite3",
|
||||
"database": "./temp/better-sqlite3db.db",
|
||||
"logging": false
|
||||
},
|
||||
|
||||
{
|
||||
"skip": true,
|
||||
"name": "postgres",
|
||||
"type": "postgres",
|
||||
"host": "postgres",
|
||||
"port": 5432,
|
||||
"username": "postgres",
|
||||
"password": "postgres",
|
||||
"database": "postgres",
|
||||
"logging": false
|
||||
}
|
||||
]
|
||||
|
||||
@ -3,23 +3,11 @@
|
||||
"skip": false,
|
||||
"name": "cockroachdb",
|
||||
"type": "cockroachdb",
|
||||
"host": "crdb",
|
||||
"host": "localhost",
|
||||
"port": 26257,
|
||||
"username": "root",
|
||||
"password": "",
|
||||
"database": "defaultdb",
|
||||
"logging": false
|
||||
},
|
||||
|
||||
{
|
||||
"skip": true,
|
||||
"name": "postgres",
|
||||
"type": "postgres",
|
||||
"host": "postgres",
|
||||
"port": 5432,
|
||||
"username": "postgres",
|
||||
"password": "postgres",
|
||||
"database": "postgres",
|
||||
"logging": false
|
||||
}
|
||||
]
|
||||
|
||||
14
.github/workflows/test/mongodb.ormconfig.json
vendored
14
.github/workflows/test/mongodb.ormconfig.json
vendored
@ -4,23 +4,11 @@
|
||||
"disabledIfNotEnabledImplicitly": true,
|
||||
"name": "mongodb",
|
||||
"type": "mongodb",
|
||||
"host": "mongodb",
|
||||
"host": "localhost",
|
||||
"port": 27017,
|
||||
"database": "test",
|
||||
"logging": false,
|
||||
"useNewUrlParser": true,
|
||||
"useUnifiedTopology": true
|
||||
},
|
||||
|
||||
{
|
||||
"skip": true,
|
||||
"name": "postgres",
|
||||
"type": "postgres",
|
||||
"host": "postgres",
|
||||
"port": 5432,
|
||||
"username": "postgres",
|
||||
"password": "postgres",
|
||||
"database": "postgres",
|
||||
"logging": false
|
||||
}
|
||||
]
|
||||
|
||||
14
.github/workflows/test/mssql.ormconfig.json
vendored
14
.github/workflows/test/mssql.ormconfig.json
vendored
@ -3,7 +3,7 @@
|
||||
"skip": false,
|
||||
"name": "mssql",
|
||||
"type": "mssql",
|
||||
"host": "mssql",
|
||||
"host": "localhost",
|
||||
"port": 1433,
|
||||
"username": "sa",
|
||||
"password": "Admin12345",
|
||||
@ -12,17 +12,5 @@
|
||||
"extra": {
|
||||
"trustServerCertificate": true
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
"skip": true,
|
||||
"name": "postgres",
|
||||
"type": "postgres",
|
||||
"host": "postgres",
|
||||
"port": 5432,
|
||||
"username": "postgres",
|
||||
"password": "postgres",
|
||||
"database": "postgres",
|
||||
"logging": false
|
||||
}
|
||||
]
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
"skip": false,
|
||||
"name": "mysql",
|
||||
"type": "mysql",
|
||||
"host": "mysql",
|
||||
"host": "localhost",
|
||||
"port": 3306,
|
||||
"username": "root",
|
||||
"password": "admin",
|
||||
@ -14,23 +14,11 @@
|
||||
"skip": false,
|
||||
"name": "mariadb",
|
||||
"type": "mariadb",
|
||||
"host": "mariadb",
|
||||
"port": 3306,
|
||||
"host": "localhost",
|
||||
"port": 3307,
|
||||
"username": "root",
|
||||
"password": "admin",
|
||||
"database": "test",
|
||||
"logging": false
|
||||
},
|
||||
|
||||
{
|
||||
"skip": true,
|
||||
"name": "postgres",
|
||||
"type": "postgres",
|
||||
"host": "postgres",
|
||||
"port": 5432,
|
||||
"username": "postgres",
|
||||
"password": "postgres",
|
||||
"database": "postgres",
|
||||
"logging": false
|
||||
}
|
||||
]
|
||||
|
||||
12
.github/workflows/test/oracle.docker-compose
vendored
12
.github/workflows/test/oracle.docker-compose
vendored
@ -1,12 +0,0 @@
|
||||
services:
|
||||
oracle:
|
||||
build:
|
||||
context: ../../../docker/oracle
|
||||
container_name: "typeorm-oracle"
|
||||
ports:
|
||||
- "1521:1521"
|
||||
#volumes:
|
||||
# - oracle-data:/opt/oracle/oradata
|
||||
healthcheck:
|
||||
test: [ "CMD", "/opt/oracle/checkDBStatus.sh" ]
|
||||
interval: 2s
|
||||
12
.github/workflows/test/oracle.docker-compose.yml
vendored
Normal file
12
.github/workflows/test/oracle.docker-compose.yml
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
services:
|
||||
oracle:
|
||||
image: "container-registry.oracle.com/database/free:23.5.0.0-lite"
|
||||
container_name: "typeorm-oracle"
|
||||
ports:
|
||||
- "1521:1521"
|
||||
environment:
|
||||
ORACLE_PWD: "oracle"
|
||||
ORACLE_SID: "FREE"
|
||||
volumes:
|
||||
# - oracle-data:/opt/oracle/oradata
|
||||
- ../../../docker/oracle/startup:/opt/oracle/scripts/startup:ro
|
||||
17
.github/workflows/test/oracle.ormconfig.json
vendored
17
.github/workflows/test/oracle.ormconfig.json
vendored
@ -5,24 +5,9 @@
|
||||
"type": "oracle",
|
||||
"host": "localhost",
|
||||
"port": 1521,
|
||||
"serviceName": "XEPDB1",
|
||||
"serviceName": "FREEPDB1",
|
||||
"username": "typeorm",
|
||||
"password": "oracle",
|
||||
"logging": false,
|
||||
"extra": {
|
||||
"connectString": "localhost:1521/XEPDB1"
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
"skip": true,
|
||||
"name": "postgres",
|
||||
"type": "postgres",
|
||||
"host": "postgres",
|
||||
"port": 5432,
|
||||
"username": "postgres",
|
||||
"password": "postgres",
|
||||
"database": "postgres",
|
||||
"logging": false
|
||||
}
|
||||
]
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
"skip": false,
|
||||
"name": "postgres",
|
||||
"type": "postgres",
|
||||
"host": "postgres",
|
||||
"host": "localhost",
|
||||
"port": 5432,
|
||||
"username": "postgres",
|
||||
"password": "postgres",
|
||||
|
||||
12
.github/workflows/test/sqlite.ormconfig.json
vendored
12
.github/workflows/test/sqlite.ormconfig.json
vendored
@ -6,17 +6,5 @@
|
||||
"database": "./temp/sqlitedb-1.db",
|
||||
"logging": false,
|
||||
"relationLoadStrategy": "join"
|
||||
},
|
||||
|
||||
{
|
||||
"skip": true,
|
||||
"name": "postgres",
|
||||
"type": "postgres",
|
||||
"host": "postgres",
|
||||
"port": 5432,
|
||||
"username": "postgres",
|
||||
"password": "postgres",
|
||||
"database": "postgres",
|
||||
"logging": false
|
||||
}
|
||||
]
|
||||
|
||||
12
.github/workflows/test/sqljs.ormconfig.json
vendored
12
.github/workflows/test/sqljs.ormconfig.json
vendored
@ -4,17 +4,5 @@
|
||||
"name": "sqljs",
|
||||
"type": "sqljs",
|
||||
"logging": false
|
||||
},
|
||||
|
||||
{
|
||||
"skip": true,
|
||||
"name": "postgres",
|
||||
"type": "postgres",
|
||||
"host": "postgres",
|
||||
"port": 5432,
|
||||
"username": "postgres",
|
||||
"password": "postgres",
|
||||
"database": "postgres",
|
||||
"logging": false
|
||||
}
|
||||
]
|
||||
|
||||
42
.github/workflows/windows-database-tests.yml
vendored
42
.github/workflows/windows-database-tests.yml
vendored
@ -9,9 +9,19 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: npm i
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/better-sqlite3.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test
|
||||
- run: npx nyc npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -24,9 +34,19 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: npm i
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/sqlite.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test
|
||||
- run: npx nyc npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -39,9 +59,19 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: npm i
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/sqljs.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test
|
||||
- run: npx nyc npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
|
||||
8
.mocharc.json
Normal file
8
.mocharc.json
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"check-leaks": true,
|
||||
"color": true,
|
||||
"file": ["./build/compiled/test/utils/test-setup.js"],
|
||||
"spec": ["./build/compiled/test"],
|
||||
"timeout": "90000",
|
||||
"recursive": true
|
||||
}
|
||||
@ -49,6 +49,7 @@ cd typeorm
|
||||
# Add the main TypeORM repository as an upstream remote to your repository:
|
||||
git remote add upstream https://github.com/typeorm/typeorm.git
|
||||
```
|
||||
|
||||
## Installing NPM Modules
|
||||
|
||||
Install all TypeORM dependencies by running this command:
|
||||
@ -139,7 +140,7 @@ You should make sure the test suites pass before submitting a PR to GitHub. Test
|
||||
|
||||
**Executing only some tests**: When you are creating tests to some specific code, you may want to only execute the tests that you're creating.
|
||||
|
||||
To do this, you can temporarily modify your test definitions by adding [`.only` *mocha* commands](https://mochajs.org/#exclusive-tests) to `describe` and `it`. For example:
|
||||
To do this, you can temporarily modify your test definitions by adding [`.only` _mocha_ commands](https://mochajs.org/#exclusive-tests) to `describe` and `it`. For example:
|
||||
|
||||
```
|
||||
describe.only('your describe test', ....)
|
||||
@ -157,7 +158,7 @@ The `npm test` script works by deleting built TypeScript code, rebuilding the co
|
||||
|
||||
Instead, for a quicker feedback cycle, you can run `npm run compile -- --watch` to make a fresh build and instruct TypeScript to watch for changes and only compile what code you've changed.
|
||||
|
||||
Once TypeScript finishes compiling your changes, you can run `npm run test-fast` (instead of `test`), to trigger a test without causing a full recompile, which allows you to edit and check your changes much faster.
|
||||
Once TypeScript finishes compiling your changes, you can run `npm run test:fast` (instead of `test`), to trigger a test without causing a full recompile, which allows you to edit and check your changes much faster.
|
||||
|
||||
## Using Docker
|
||||
|
||||
|
||||
@ -49,24 +49,26 @@ services:
|
||||
|
||||
# cockroachdb
|
||||
cockroachdb:
|
||||
image: "cockroachdb/cockroach:v23.1.9"
|
||||
image: "cockroachdb/cockroach:v24.3.8"
|
||||
container_name: "typeorm-cockroachdb"
|
||||
command: start-single-node --insecure --cache=.25 --store=type=mem,size=.25
|
||||
command: "start-single-node --insecure --cache=.25 --store=type=mem,size=.25"
|
||||
ports:
|
||||
- "26257:26257"
|
||||
# volumes:
|
||||
# - cockroach-data:/cockroach/cockroach-data
|
||||
|
||||
# oracle
|
||||
oracle:
|
||||
build:
|
||||
context: docker/oracle
|
||||
image: "container-registry.oracle.com/database/free:23.5.0.0-lite"
|
||||
container_name: "typeorm-oracle"
|
||||
ports:
|
||||
- "1521:1521"
|
||||
#volumes:
|
||||
# - oracle-data:/opt/oracle/oradata
|
||||
healthcheck:
|
||||
test: ["CMD", "/opt/oracle/checkDBStatus.sh"]
|
||||
interval: 2s
|
||||
environment:
|
||||
ORACLE_PWD: "oracle"
|
||||
ORACLE_SID: "FREE"
|
||||
volumes:
|
||||
# - oracle-data:/opt/oracle/oradata
|
||||
- ./docker/oracle/startup:/opt/oracle/scripts/startup:ro
|
||||
|
||||
# google cloud spanner
|
||||
spanner:
|
||||
@ -110,12 +112,15 @@ services:
|
||||
container_name: "typeorm-mongodb"
|
||||
ports:
|
||||
- "27017:27017"
|
||||
|
||||
# redis
|
||||
# redis:
|
||||
# image: "redis:3.0.3"
|
||||
# container_name: "typeorm-redis"
|
||||
# ports:
|
||||
# - "6379:6379"
|
||||
#volumes:
|
||||
# volume-hana-xe:
|
||||
# mysql8_volume:
|
||||
|
||||
# volumes:
|
||||
# cockroach-data:
|
||||
# oracle-data:
|
||||
# volume-hana-xe:
|
||||
|
||||
@ -1,22 +0,0 @@
|
||||
ALTER SESSION SET CONTAINER = XEPDB1;
|
||||
|
||||
CREATE TABLESPACE typeormspace32
|
||||
DATAFILE 'typeormspace32.dbf'
|
||||
SIZE 100M
|
||||
AUTOEXTEND ON;
|
||||
|
||||
-- create users:
|
||||
CREATE USER typeorm IDENTIFIED BY "oracle" DEFAULT TABLESPACE typeormspace32;
|
||||
|
||||
GRANT CREATE SESSION TO typeorm;
|
||||
GRANT CREATE TABLE TO typeorm;
|
||||
GRANT CREATE VIEW TO typeorm;
|
||||
GRANT CREATE MATERIALIZED VIEW TO typeorm;
|
||||
GRANT CREATE PROCEDURE TO typeorm;
|
||||
GRANT CREATE SEQUENCE TO typeorm;
|
||||
|
||||
ALTER USER typeorm QUOTA UNLIMITED ON typeormspace32;
|
||||
|
||||
-- set password expiry to unlimited
|
||||
ALTER PROFILE DEFAULT LIMIT PASSWORD_REUSE_TIME UNLIMITED;
|
||||
ALTER PROFILE DEFAULT LIMIT PASSWORD_LIFE_TIME UNLIMITED;
|
||||
@ -1,8 +0,0 @@
|
||||
FROM container-registry.oracle.com/database/express:21.3.0-xe
|
||||
|
||||
ENV ORACLE_PWD=oracle
|
||||
ENV ORACLE_SID=XE
|
||||
COPY 01_init.sql /docker-entrypoint-initdb.d/startup/
|
||||
ENV PORT=1521
|
||||
|
||||
EXPOSE ${PORT}
|
||||
16
docker/oracle/startup/01_init.sql
Normal file
16
docker/oracle/startup/01_init.sql
Normal file
@ -0,0 +1,16 @@
|
||||
WHENEVER SQLERROR EXIT SQL.SQLCODE;
|
||||
|
||||
ALTER SESSION SET CONTAINER = FREEPDB1;
|
||||
|
||||
CREATE TABLESPACE typeormspace32
|
||||
DATAFILE 'typeormspace32.dbf'
|
||||
SIZE 100M
|
||||
AUTOEXTEND ON;
|
||||
|
||||
-- create users:
|
||||
CREATE USER typeorm
|
||||
IDENTIFIED BY "oracle"
|
||||
DEFAULT TABLESPACE typeormspace32
|
||||
QUOTA UNLIMITED ON typeormspace32;
|
||||
|
||||
GRANT DB_DEVELOPER_ROLE TO typeorm;
|
||||
@ -1,126 +0,0 @@
|
||||
[
|
||||
{
|
||||
"skip": true,
|
||||
"name": "mysql",
|
||||
"type": "mysql",
|
||||
"host": "typeorm-mysql",
|
||||
"port": 3306,
|
||||
"username": "root",
|
||||
"password": "admin",
|
||||
"database": "test",
|
||||
"logging": false
|
||||
},
|
||||
{
|
||||
"skip": true,
|
||||
"name": "mariadb",
|
||||
"type": "mariadb",
|
||||
"host": "typeorm-mariadb",
|
||||
"port": 3306,
|
||||
"username": "root",
|
||||
"password": "admin",
|
||||
"database": "test",
|
||||
"logging": false
|
||||
},
|
||||
{
|
||||
"skip": true,
|
||||
"name": "sqlite",
|
||||
"type": "sqlite",
|
||||
"database": "./temp/sqlitedb-1.db",
|
||||
"logging": false,
|
||||
"relationLoadStrategy": "join"
|
||||
},
|
||||
{
|
||||
"skip": true,
|
||||
"name": "sqlite-2",
|
||||
"type": "sqlite",
|
||||
"database": "./temp/sqlitedb-2.db",
|
||||
"logging": false,
|
||||
"relationLoadStrategy": "query"
|
||||
},
|
||||
{
|
||||
"skip": true,
|
||||
"name": "better-sqlite3",
|
||||
"type": "better-sqlite3",
|
||||
"database": "temp/better-sqlite3db.db",
|
||||
"logging": false
|
||||
},
|
||||
{
|
||||
"skip": true,
|
||||
"name": "postgres",
|
||||
"type": "postgres",
|
||||
"host": "typeorm-postgres",
|
||||
"port": 5432,
|
||||
"username": "test",
|
||||
"password": "test",
|
||||
"database": "test",
|
||||
"logging": false
|
||||
},
|
||||
{
|
||||
"skip": true,
|
||||
"name": "sqljs",
|
||||
"type": "sqljs",
|
||||
"logging": false
|
||||
},
|
||||
|
||||
{
|
||||
"skip": true,
|
||||
"name": "mssql",
|
||||
"type": "mssql",
|
||||
"host": "typeorm-mssql",
|
||||
"port": 1433,
|
||||
"username": "sa",
|
||||
"password": "Admin12345",
|
||||
"database": "tempdb",
|
||||
"logging": false,
|
||||
"extra": {
|
||||
"trustServerCertificate": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"skip": true,
|
||||
"name": "sap",
|
||||
"type": "sap",
|
||||
"host": "typeorm-hana",
|
||||
"port": 39015,
|
||||
"username": "SYSTEM",
|
||||
"password": "MySuperHanaPwd123!",
|
||||
"database": "HXE",
|
||||
"logging": false
|
||||
},
|
||||
{
|
||||
"skip": true,
|
||||
"disabledIfNotEnabledImplicitly": true,
|
||||
"name": "mongodb",
|
||||
"type": "mongodb",
|
||||
"host": "typeorm-mongodb",
|
||||
"port": 27017,
|
||||
"database": "test",
|
||||
"logging": false,
|
||||
"useNewUrlParser": true,
|
||||
"useUnifiedTopology": true
|
||||
},
|
||||
{
|
||||
"skip": true,
|
||||
"name": "cockroachdb",
|
||||
"type": "cockroachdb",
|
||||
"host": "typeorm-cockroachdb",
|
||||
"port": 26257,
|
||||
"username": "root",
|
||||
"password": "",
|
||||
"database": "defaultdb"
|
||||
},
|
||||
{
|
||||
"skip": true,
|
||||
"name": "oracle",
|
||||
"type": "oracle",
|
||||
"host": "typeorm-oracle",
|
||||
"port": 1521,
|
||||
"serviceName": "XEPDB1",
|
||||
"username": "typeorm",
|
||||
"password": "oracle",
|
||||
"logging": false,
|
||||
"extra": {
|
||||
"connectString": "typeorm-oracle:1521/XEPDB1"
|
||||
}
|
||||
}
|
||||
]
|
||||
@ -76,7 +76,7 @@
|
||||
"username": "typeorm",
|
||||
"password": "oracle",
|
||||
"port": 1521,
|
||||
"serviceName": "XEPDB1",
|
||||
"serviceName": "FREEPDB1",
|
||||
"logging": false
|
||||
},
|
||||
{
|
||||
|
||||
12
package-lock.json
generated
12
package-lock.json
generated
@ -40,6 +40,7 @@
|
||||
"@types/node": "^16.18.126",
|
||||
"@types/sha.js": "^2.4.4",
|
||||
"@types/sinon": "^10.0.20",
|
||||
"@types/sinon-chai": "^4.0.0",
|
||||
"@types/source-map-support": "^0.5.10",
|
||||
"@types/yargs": "^17.0.33",
|
||||
"better-sqlite3": "^8.7.0",
|
||||
@ -3254,6 +3255,17 @@
|
||||
"@types/sinonjs__fake-timers": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/sinon-chai": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/sinon-chai/-/sinon-chai-4.0.0.tgz",
|
||||
"integrity": "sha512-Uar+qk3TmeFsUWCwtqRNqNUE7vf34+MCJiQJR5M2rd4nCbhtE8RgTiHwN/mVwbfCjhmO6DiOel/MgzHkRMJJFg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/chai": "*",
|
||||
"@types/sinon": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/sinonjs__fake-timers": {
|
||||
"version": "8.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.5.tgz",
|
||||
|
||||
@ -82,8 +82,9 @@
|
||||
"lint": "eslint .",
|
||||
"pack": "gulp pack",
|
||||
"package": "gulp package",
|
||||
"test": "rimraf ./build && tsc && mocha --file ./build/compiled/test/utils/test-setup.js --bail --recursive --timeout 90000 ./build/compiled/test",
|
||||
"test-fast": "mocha --file ./build/compiled/test/utils/test-setup.js --bail --recursive --timeout 90000 ./build/compiled/test",
|
||||
"test": "npm run compile && npm run test:fast",
|
||||
"test:ci": "mocha --bail",
|
||||
"test:fast": "mocha",
|
||||
"watch": "./node_modules/.bin/tsc -w"
|
||||
},
|
||||
"dependencies": {
|
||||
@ -113,6 +114,7 @@
|
||||
"@types/node": "^16.18.126",
|
||||
"@types/sha.js": "^2.4.4",
|
||||
"@types/sinon": "^10.0.20",
|
||||
"@types/sinon-chai": "^4.0.0",
|
||||
"@types/source-map-support": "^0.5.10",
|
||||
"@types/yargs": "^17.0.33",
|
||||
"better-sqlite3": "^8.7.0",
|
||||
|
||||
@ -1,11 +1,12 @@
|
||||
import appRootPath from "app-root-path"
|
||||
import path from "path"
|
||||
|
||||
import { DataSourceOptions } from "../data-source/DataSourceOptions"
|
||||
import { PlatformTools } from "../platform/PlatformTools"
|
||||
import { ConnectionOptionsEnvReader } from "./options-reader/ConnectionOptionsEnvReader"
|
||||
import { TypeORMError } from "../error"
|
||||
import { isAbsolute } from "../util/PathUtils"
|
||||
import { PlatformTools } from "../platform/PlatformTools"
|
||||
import { importOrRequireFile } from "../util/ImportUtils"
|
||||
import { isAbsolute } from "../util/PathUtils"
|
||||
import { ConnectionOptionsEnvReader } from "./options-reader/ConnectionOptionsEnvReader"
|
||||
|
||||
/**
|
||||
* Reads connection options from the ormconfig.
|
||||
@ -257,18 +258,13 @@ export class ConnectionOptionsReader {
|
||||
* Gets directory where configuration file should be located.
|
||||
*/
|
||||
protected get baseDirectory(): string {
|
||||
if (this.options && this.options.root) return this.options.root
|
||||
|
||||
return appRootPath.path
|
||||
return this.options?.root ?? appRootPath.path
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets configuration file name.
|
||||
*/
|
||||
protected get baseConfigName(): string {
|
||||
if (this.options && this.options.configName)
|
||||
return this.options.configName
|
||||
|
||||
return "ormconfig"
|
||||
return this.options?.configName ?? "ormconfig"
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,2 +0,0 @@
|
||||
TYPEORM_CONNECTION = mysql
|
||||
TYPEORM_DATABASE = test-env
|
||||
@ -1,2 +0,0 @@
|
||||
TYPEORM_CONNECTION = mysql
|
||||
TYPEORM_DATABASE = test-ormconfig-env
|
||||
@ -1,3 +0,0 @@
|
||||
- type: "sqlite"
|
||||
name: "file"
|
||||
database: "test-yaml"
|
||||
@ -1,15 +1,16 @@
|
||||
import "reflect-metadata"
|
||||
import "../../utils/test-setup"
|
||||
import { expect } from "chai"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Category } from "./entity/Category"
|
||||
import "reflect-metadata"
|
||||
|
||||
import { QueryRunner } from "../../../src"
|
||||
import { DataSource } from "../../../src/data-source/DataSource"
|
||||
import "../../utils/test-setup"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
getTypeOrmConfig,
|
||||
} from "../../utils/test-utils"
|
||||
import { DataSource } from "../../../src/data-source/DataSource"
|
||||
import { QueryRunner } from "../../../src"
|
||||
import { Category } from "./entity/Category"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
const expectCurrentApplicationName = async (
|
||||
queryRunner: QueryRunner,
|
||||
@ -22,18 +23,18 @@ const expectCurrentApplicationName = async (
|
||||
}
|
||||
|
||||
describe("Connection replication", () => {
|
||||
const ormConfigConnectionOptionsArray = getTypeOrmConfig()
|
||||
const postgresOptions = ormConfigConnectionOptionsArray.find(
|
||||
(options) => options.type == "postgres",
|
||||
)
|
||||
if (!postgresOptions) {
|
||||
return
|
||||
}
|
||||
|
||||
describe("after connection is established successfully", function () {
|
||||
let connection: DataSource
|
||||
beforeEach(async () => {
|
||||
const ormConfigConnectionOptionsArray = getTypeOrmConfig()
|
||||
const postgres = ormConfigConnectionOptionsArray.find(
|
||||
(options) => options.type == "postgres",
|
||||
)
|
||||
if (!postgres)
|
||||
throw new Error(
|
||||
"need a postgres connection in the test connection options to test replication",
|
||||
)
|
||||
|
||||
beforeEach(async () => {
|
||||
connection = (
|
||||
await createTestingConnections({
|
||||
entities: [Post, Category],
|
||||
@ -42,15 +43,21 @@ describe("Connection replication", () => {
|
||||
dropSchema: true,
|
||||
driverSpecific: {
|
||||
replication: {
|
||||
master: { ...postgres, applicationName: "master" },
|
||||
slaves: [{ ...postgres, applicationName: "slave" }],
|
||||
master: {
|
||||
...postgresOptions,
|
||||
applicationName: "master",
|
||||
},
|
||||
slaves: [
|
||||
{
|
||||
...postgresOptions,
|
||||
applicationName: "slave",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
)[0]
|
||||
|
||||
if (!connection) return
|
||||
|
||||
const post = new Post()
|
||||
post.title = "TypeORM Intro"
|
||||
|
||||
@ -64,17 +71,11 @@ describe("Connection replication", () => {
|
||||
|
||||
afterEach(() => closeTestingConnections([connection]))
|
||||
|
||||
it("connection.isConnected should be true", () => {
|
||||
if (!connection || connection.driver.options.type !== "postgres") {
|
||||
return
|
||||
}
|
||||
it("connection.isInitialized should be true", () => {
|
||||
connection.isInitialized.should.be.true
|
||||
})
|
||||
|
||||
it("query runners should go to the master by default", async () => {
|
||||
if (!connection || connection.driver.options.type !== "postgres") {
|
||||
return
|
||||
}
|
||||
const queryRunner = connection.createQueryRunner()
|
||||
expect(queryRunner.getReplicationMode()).to.equal("master")
|
||||
|
||||
@ -83,9 +84,6 @@ describe("Connection replication", () => {
|
||||
})
|
||||
|
||||
it("query runners can have their replication mode overridden", async () => {
|
||||
if (!connection || connection.driver.options.type !== "postgres") {
|
||||
return
|
||||
}
|
||||
let queryRunner = connection.createQueryRunner("master")
|
||||
queryRunner.getReplicationMode().should.equal("master")
|
||||
await expectCurrentApplicationName(queryRunner, "master")
|
||||
@ -98,9 +96,6 @@ describe("Connection replication", () => {
|
||||
})
|
||||
|
||||
it("read queries should go to the slaves by default", async () => {
|
||||
if (!connection || connection.driver.options.type !== "postgres") {
|
||||
return
|
||||
}
|
||||
const result = await connection.manager
|
||||
.createQueryBuilder(Post, "post")
|
||||
.select("id")
|
||||
@ -113,9 +108,6 @@ describe("Connection replication", () => {
|
||||
})
|
||||
|
||||
it("write queries should go to the master", async () => {
|
||||
if (!connection || connection.driver.options.type !== "postgres") {
|
||||
return
|
||||
}
|
||||
const result = await connection.manager
|
||||
.createQueryBuilder(Post, "post")
|
||||
.insert()
|
||||
@ -132,16 +124,8 @@ describe("Connection replication", () => {
|
||||
|
||||
describe("with custom replication default mode", function () {
|
||||
let connection: DataSource
|
||||
beforeEach(async () => {
|
||||
const ormConfigConnectionOptionsArray = getTypeOrmConfig()
|
||||
const postgres = ormConfigConnectionOptionsArray.find(
|
||||
(options) => options.type == "postgres",
|
||||
)
|
||||
if (!postgres)
|
||||
throw new Error(
|
||||
"need a postgres connection in the test connection options to test replication",
|
||||
)
|
||||
|
||||
beforeEach(async () => {
|
||||
connection = (
|
||||
await createTestingConnections({
|
||||
entities: [Post, Category],
|
||||
@ -151,15 +135,21 @@ describe("Connection replication", () => {
|
||||
driverSpecific: {
|
||||
replication: {
|
||||
defaultMode: "master",
|
||||
master: { ...postgres, applicationName: "master" },
|
||||
slaves: [{ ...postgres, applicationName: "slave" }],
|
||||
master: {
|
||||
...postgresOptions,
|
||||
applicationName: "master",
|
||||
},
|
||||
slaves: [
|
||||
{
|
||||
...postgresOptions,
|
||||
applicationName: "slave",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
)[0]
|
||||
|
||||
if (!connection) return
|
||||
|
||||
const post = new Post()
|
||||
post.title = "TypeORM Intro"
|
||||
|
||||
@ -174,9 +164,6 @@ describe("Connection replication", () => {
|
||||
afterEach(() => closeTestingConnections([connection]))
|
||||
|
||||
it("query runners should go to the master by default", async () => {
|
||||
if (!connection || connection.driver.options.type !== "postgres") {
|
||||
return
|
||||
}
|
||||
const queryRunner = connection.createQueryRunner()
|
||||
expect(queryRunner.getReplicationMode()).to.equal("master")
|
||||
|
||||
@ -185,9 +172,6 @@ describe("Connection replication", () => {
|
||||
})
|
||||
|
||||
it("query runners can have their replication mode overridden", async () => {
|
||||
if (!connection || connection.driver.options.type !== "postgres") {
|
||||
return
|
||||
}
|
||||
let queryRunner = connection.createQueryRunner("master")
|
||||
queryRunner.getReplicationMode().should.equal("master")
|
||||
await expectCurrentApplicationName(queryRunner, "master")
|
||||
@ -200,9 +184,6 @@ describe("Connection replication", () => {
|
||||
})
|
||||
|
||||
it("read queries should go to the master by default", async () => {
|
||||
if (!connection || connection.driver.options.type !== "postgres") {
|
||||
return
|
||||
}
|
||||
const result = await connection.manager
|
||||
.createQueryBuilder(Post, "post")
|
||||
.select("id")
|
||||
|
||||
@ -15,7 +15,6 @@ describe("find options > find operators > ArrayContainedBy", () => {
|
||||
(connections = await createTestingConnections({
|
||||
__dirname,
|
||||
enabledDrivers: ["postgres", "cockroachdb"],
|
||||
// logging: true,
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(connections))
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
import "../../../utils/test-setup"
|
||||
import { DataSource, EntityManager } from "../../../../src"
|
||||
import { ArrayContains } from "../../../../src/find-options/operator/ArrayContains"
|
||||
import "../../../utils/test-setup"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../../utils/test-utils"
|
||||
import { Post, PostStatus } from "./entity/Post"
|
||||
import { ArrayContains } from "../../../../src/find-options/operator/ArrayContains"
|
||||
|
||||
describe("find options > find operators > ArrayContains", () => {
|
||||
let connections: DataSource[]
|
||||
@ -15,7 +15,6 @@ describe("find options > find operators > ArrayContains", () => {
|
||||
(connections = await createTestingConnections({
|
||||
__dirname,
|
||||
enabledDrivers: ["postgres", "cockroachdb"],
|
||||
// logging: true,
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(connections))
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
import "../../../utils/test-setup"
|
||||
import { DataSource, EntityManager } from "../../../../src"
|
||||
import { ArrayOverlap } from "../../../../src/find-options/operator/ArrayOverlap"
|
||||
import "../../../utils/test-setup"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../../utils/test-utils"
|
||||
import { Post, PostStatus } from "./entity/Post"
|
||||
import { ArrayOverlap } from "../../../../src/find-options/operator/ArrayOverlap"
|
||||
|
||||
describe("find options > find operators > ArrayOverlap", () => {
|
||||
let connections: DataSource[]
|
||||
@ -15,7 +15,6 @@ describe("find options > find operators > ArrayOverlap", () => {
|
||||
(connections = await createTestingConnections({
|
||||
__dirname,
|
||||
enabledDrivers: ["postgres"],
|
||||
// logging: true,
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(connections))
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
import "../../../utils/test-setup"
|
||||
import { expect } from "chai"
|
||||
import { Record } from "./entity/Record"
|
||||
import { DataSource } from "../../../../src"
|
||||
import "../../../utils/test-setup"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../../utils/test-utils"
|
||||
import { Record } from "./entity/Record"
|
||||
|
||||
describe("jsonb type", () => {
|
||||
let connections: DataSource[]
|
||||
@ -15,7 +15,6 @@ describe("jsonb type", () => {
|
||||
(connections = await createTestingConnections({
|
||||
entities: [Record],
|
||||
enabledDrivers: ["postgres"], // because only postgres supports jsonb type
|
||||
// logging: true,
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(connections))
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
import "../../../utils/test-setup"
|
||||
import { Post } from "./entity/Post"
|
||||
import { DataSource } from "../../../../src"
|
||||
import "../../../utils/test-setup"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../../utils/test-utils"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
describe("json > defaults", () => {
|
||||
let connections: DataSource[]
|
||||
@ -14,7 +14,6 @@ describe("json > defaults", () => {
|
||||
(connections = await createTestingConnections({
|
||||
entities: [Post],
|
||||
enabledDrivers: ["postgres"], // because only postgres supports jsonb type
|
||||
// logging: true,
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(connections))
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
import "reflect-metadata"
|
||||
import { UpdateResult } from "../../../../src"
|
||||
import { DataSource } from "../../../../src/data-source/DataSource"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../../utils/test-utils"
|
||||
import { DataSource } from "../../../../src/data-source/DataSource"
|
||||
import { UpdateResult } from "../../../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { PostBigInt } from "./entity/PostBigInt"
|
||||
import { UserWithEmbededEntity } from "./entity/UserWithEmbededEntity"
|
||||
@ -181,7 +181,6 @@ describe("repository > decrement method", () => {
|
||||
(connections = await createTestingConnections({
|
||||
entities: [PostBigInt],
|
||||
enabledDrivers: ["mysql", "mariadb", "postgres"],
|
||||
// logging: true
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(connections))
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
import "reflect-metadata"
|
||||
import { UpdateResult } from "../../../../src"
|
||||
import { DataSource } from "../../../../src/data-source/DataSource"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../../utils/test-utils"
|
||||
import { DataSource } from "../../../../src/data-source/DataSource"
|
||||
import { UpdateResult } from "../../../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { PostBigInt } from "./entity/PostBigInt"
|
||||
import { UserWithEmbededEntity } from "./entity/UserWithEmbededEntity"
|
||||
@ -181,7 +181,6 @@ describe("repository > increment method", () => {
|
||||
(connections = await createTestingConnections({
|
||||
entities: [PostBigInt],
|
||||
enabledDrivers: ["mysql", "mariadb", "postgres", "sap"],
|
||||
// logging: true
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(connections))
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
import "reflect-metadata"
|
||||
import { Category } from "./entity/Category"
|
||||
import { DataSource } from "../../../../src/data-source/DataSource"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../../utils/test-utils"
|
||||
import { Category } from "./entity/Category"
|
||||
import { Product } from "./entity/Product"
|
||||
|
||||
describe("tree tables > materialized-path", () => {
|
||||
@ -14,7 +14,6 @@ describe("tree tables > materialized-path", () => {
|
||||
async () =>
|
||||
(connections = await createTestingConnections({
|
||||
entities: [Product, Category],
|
||||
// logging: true,
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(connections))
|
||||
|
||||
@ -1,87 +0,0 @@
|
||||
import { expect } from "chai"
|
||||
import { OrmUtils } from "../../../src/util/OrmUtils"
|
||||
|
||||
describe("OrmUtils.mergeDeep", () => {
|
||||
it("should handle simple values.", () => {
|
||||
expect(OrmUtils.mergeDeep(1, 2)).to.equal(1)
|
||||
expect(OrmUtils.mergeDeep(2, 1)).to.equal(2)
|
||||
expect(OrmUtils.mergeDeep(2, 1, 1)).to.equal(2)
|
||||
expect(OrmUtils.mergeDeep(1, 2, 1)).to.equal(1)
|
||||
expect(OrmUtils.mergeDeep(1, 1, 2)).to.equal(1)
|
||||
expect(OrmUtils.mergeDeep(2, 1, 2)).to.equal(2)
|
||||
})
|
||||
|
||||
it("should handle ordering and indempotence.", () => {
|
||||
const a = { a: 1 }
|
||||
const b = { a: 2 }
|
||||
expect(OrmUtils.mergeDeep(a, b)).to.deep.equal(b)
|
||||
expect(OrmUtils.mergeDeep(b, a)).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(b, a, a)).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(a, b, a)).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(a, a, b)).to.deep.equal(b)
|
||||
expect(OrmUtils.mergeDeep(b, a, b)).to.deep.equal(b)
|
||||
const c = { a: 3 }
|
||||
expect(OrmUtils.mergeDeep(a, b, c)).to.deep.equal(c)
|
||||
expect(OrmUtils.mergeDeep(b, c, b)).to.deep.equal(b)
|
||||
expect(OrmUtils.mergeDeep(c, a, a)).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(c, b, a)).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(a, c, b)).to.deep.equal(b)
|
||||
expect(OrmUtils.mergeDeep(b, a, c)).to.deep.equal(c)
|
||||
})
|
||||
|
||||
it("should skip nested promises in sources.", () => {
|
||||
expect(OrmUtils.mergeDeep({}, { p: Promise.resolve() })).to.deep.equal(
|
||||
{},
|
||||
)
|
||||
expect(
|
||||
OrmUtils.mergeDeep({}, { p: { p: Promise.resolve() } }),
|
||||
).to.deep.equal({ p: {} })
|
||||
const a = { p: Promise.resolve(0) }
|
||||
const b = { p: Promise.resolve(1) }
|
||||
expect(OrmUtils.mergeDeep(a, {})).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(a, b)).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(b, a)).to.deep.equal(b)
|
||||
expect(OrmUtils.mergeDeep(b, {})).to.deep.equal(b)
|
||||
})
|
||||
|
||||
it("should merge moderately deep objects correctly.", () => {
|
||||
const a = { a: { b: { c: { d: { e: 123, h: { i: 23 } } } } }, g: 19 }
|
||||
const b = { a: { b: { c: { d: { f: 99 } }, f: 31 } } }
|
||||
const c = {
|
||||
a: { b: { c: { d: { e: 123, f: 99, h: { i: 23 } } }, f: 31 } },
|
||||
g: 19,
|
||||
}
|
||||
expect(OrmUtils.mergeDeep(a, b)).to.deep.equal(c)
|
||||
expect(OrmUtils.mergeDeep(b, a)).to.deep.equal(c)
|
||||
expect(OrmUtils.mergeDeep(b, a, a)).to.deep.equal(c)
|
||||
expect(OrmUtils.mergeDeep(a, b, a)).to.deep.equal(c)
|
||||
expect(OrmUtils.mergeDeep(a, a, b)).to.deep.equal(c)
|
||||
expect(OrmUtils.mergeDeep(b, a, b)).to.deep.equal(c)
|
||||
})
|
||||
|
||||
it("should merge recursively deep objects correctly", () => {
|
||||
const a: Record<string, any> = {}
|
||||
const b: Record<string, any> = {}
|
||||
|
||||
a["b"] = b
|
||||
a["a"] = a
|
||||
b["a"] = a
|
||||
|
||||
expect(OrmUtils.mergeDeep({}, a))
|
||||
})
|
||||
|
||||
it("should reference copy complex instances of classes.", () => {
|
||||
class Foo {
|
||||
recursive: Foo
|
||||
|
||||
constructor() {
|
||||
this.recursive = this
|
||||
}
|
||||
}
|
||||
|
||||
const foo = new Foo()
|
||||
const result = OrmUtils.mergeDeep({}, { foo })
|
||||
expect(result).to.have.property("foo")
|
||||
expect(result.foo).to.equal(foo)
|
||||
})
|
||||
})
|
||||
@ -1,11 +1,11 @@
|
||||
import { assert } from "chai"
|
||||
import "reflect-metadata"
|
||||
import { DataSource } from "../../../src"
|
||||
import {
|
||||
createTestingConnections,
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../utils/test-utils"
|
||||
import { DataSource } from "../../../src"
|
||||
import { assert } from "chai"
|
||||
import { Dog } from "./entity/family"
|
||||
|
||||
describe("github issues > #10653 Default value in child table/entity column decorator for multiple table inheritance is ignored for inherited columns", () => {
|
||||
@ -16,7 +16,6 @@ describe("github issues > #10653 Default value in child table/entity column deco
|
||||
entities: [__dirname + "/entity/*{.js,.ts}"],
|
||||
schemaCreate: true,
|
||||
dropSchema: true,
|
||||
logging: true,
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(dataSources))
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
import "reflect-metadata"
|
||||
import {
|
||||
createTestingConnections,
|
||||
closeTestingConnections,
|
||||
} from "../../utils/test-utils"
|
||||
import { DataSource } from "../../../src/index.js"
|
||||
import { expect } from "chai"
|
||||
import "reflect-metadata"
|
||||
import { DataSource } from "../../../src/index.js"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
} from "../../utils/test-utils"
|
||||
|
||||
describe("github issues > #10626 Postgres CREATE INDEX CONCURRENTLY bug", () => {
|
||||
let dataSources: DataSource[]
|
||||
@ -16,7 +16,6 @@ describe("github issues > #10626 Postgres CREATE INDEX CONCURRENTLY bug", () =>
|
||||
schemaCreate: false,
|
||||
dropSchema: true,
|
||||
enabledDrivers: ["postgres"],
|
||||
logging: true,
|
||||
})),
|
||||
)
|
||||
|
||||
|
||||
@ -1,13 +1,13 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource } from "../../../src"
|
||||
import "../../utils/test-setup"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../utils/test-utils"
|
||||
import { DataSource } from "../../../src"
|
||||
import { Person } from "./entity/person"
|
||||
import { Note } from "./entity/note"
|
||||
import { Person } from "./entity/person"
|
||||
|
||||
describe("github issues > #2965 Reuse preloaded lazy relations", () => {
|
||||
let connections: DataSource[]
|
||||
@ -15,8 +15,6 @@ describe("github issues > #2965 Reuse preloaded lazy relations", () => {
|
||||
async () =>
|
||||
(connections = await createTestingConnections({
|
||||
entities: [__dirname + "/entity/*{.js,.ts}"],
|
||||
// use for manual validation
|
||||
// logging: true,
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(connections))
|
||||
|
||||
@ -13,7 +13,6 @@ describe("github issues > #4782 mariadb driver wants to recreate create/update d
|
||||
before(
|
||||
async () =>
|
||||
(connections = await createTestingConnections({
|
||||
// logging: true,
|
||||
entities: [__dirname + "/entity/*{.js,.ts}"],
|
||||
enabledDrivers: ["mysql", "mariadb"],
|
||||
})),
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { expect } from "chai"
|
||||
import "reflect-metadata"
|
||||
|
||||
import { DataSource, QueryRunner, Table } from "../../../src"
|
||||
import {
|
||||
@ -40,7 +40,6 @@ describe("github issues > #6195 feature: fake migrations for existing tables", (
|
||||
schemaCreate: false,
|
||||
dropSchema: false,
|
||||
migrations: [__dirname + "/migrations/**/*{.ts,.js}"],
|
||||
// logging: true,
|
||||
})
|
||||
|
||||
await reloadTestingDatabases(dataSources)
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
import "reflect-metadata"
|
||||
import { Category } from "./entity/Category"
|
||||
import { DataSource } from "../../../src"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../utils/test-utils"
|
||||
import { Category } from "./entity/Category"
|
||||
|
||||
describe("github issues > #9534 materialized-path", () => {
|
||||
let connections: DataSource[]
|
||||
@ -13,7 +13,6 @@ describe("github issues > #9534 materialized-path", () => {
|
||||
async () =>
|
||||
(connections = await createTestingConnections({
|
||||
entities: [Category],
|
||||
// logging: true,
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(connections))
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { expect } from "chai"
|
||||
import "reflect-metadata"
|
||||
|
||||
import { DataSource } from "../../../src"
|
||||
//import { DataSource, TableColumn } from "../../../src"
|
||||
@ -9,8 +9,8 @@ import {
|
||||
reloadTestingDatabases,
|
||||
} from "../../utils/test-utils"
|
||||
|
||||
import { Foo } from "./entity/Foo"
|
||||
import { Bar } from "./entity/Bar"
|
||||
import { Foo } from "./entity/Foo"
|
||||
|
||||
describe("github issues > #9770 check for referencing foreign keys when altering a table using sqlite", () => {
|
||||
let dataSources: DataSource[]
|
||||
@ -21,7 +21,6 @@ describe("github issues > #9770 check for referencing foreign keys when altering
|
||||
enabledDrivers: ["sqlite", "better-sqlite3"],
|
||||
schemaCreate: true,
|
||||
dropSchema: true,
|
||||
logging: true,
|
||||
})
|
||||
})
|
||||
beforeEach(() => reloadTestingDatabases(dataSources))
|
||||
|
||||
@ -1,28 +1,24 @@
|
||||
import fs from "fs/promises"
|
||||
import { expect } from "chai"
|
||||
import { DataSourceOptions } from "../../../src/data-source/DataSourceOptions"
|
||||
import { ConnectionOptionsReader } from "../../../src/connection/ConnectionOptionsReader"
|
||||
import path from "path"
|
||||
import fs from "fs/promises"
|
||||
|
||||
async function createDotenvFiles() {
|
||||
// These files may not always exist
|
||||
await fs.writeFile(
|
||||
path.join(__dirname, "configs/.env"),
|
||||
"TYPEORM_CONNECTION = mysql\nTYPEORM_DATABASE = test-env",
|
||||
)
|
||||
await fs.writeFile(
|
||||
path.join(__dirname, "configs/ormconfig.env"),
|
||||
"TYPEORM_CONNECTION = mysql\nTYPEORM_DATABASE = test-ormconfig-env",
|
||||
)
|
||||
}
|
||||
import { ConnectionOptionsReader } from "../../../src/connection/ConnectionOptionsReader"
|
||||
import { DataSourceOptions } from "../../../src/data-source/DataSourceOptions"
|
||||
|
||||
describe("ConnectionOptionsReader", () => {
|
||||
beforeEach(() => {
|
||||
delete process.env["TYPEORM_CONNECTION"]
|
||||
delete process.env["TYPEORM_DATABASE"]
|
||||
before(async () => {
|
||||
// These files may not always exist
|
||||
await fs.mkdir("./temp/configs", { recursive: true })
|
||||
await fs.writeFile(
|
||||
"./temp/configs/.env",
|
||||
"TYPEORM_CONNECTION = mysql\nTYPEORM_DATABASE = test-env",
|
||||
)
|
||||
await fs.writeFile(
|
||||
"./temp/configs/ormconfig.env",
|
||||
"TYPEORM_CONNECTION = mysql\nTYPEORM_DATABASE = test-ormconfig-env",
|
||||
)
|
||||
})
|
||||
|
||||
after(() => {
|
||||
afterEach(() => {
|
||||
delete process.env.TYPEORM_CONNECTION
|
||||
delete process.env.TYPEORM_DATABASE
|
||||
})
|
||||
@ -85,10 +81,8 @@ describe("ConnectionOptionsReader", () => {
|
||||
})
|
||||
|
||||
it("properly loads config from .env file", async () => {
|
||||
await createDotenvFiles()
|
||||
|
||||
const connectionOptionsReader = new ConnectionOptionsReader({
|
||||
root: __dirname,
|
||||
root: "./temp",
|
||||
configName: "configs/.env",
|
||||
})
|
||||
const [fileOptions]: DataSourceOptions[] =
|
||||
@ -98,10 +92,8 @@ describe("ConnectionOptionsReader", () => {
|
||||
})
|
||||
|
||||
it("properly loads config from ormconfig.env file", async () => {
|
||||
await createDotenvFiles()
|
||||
|
||||
const connectionOptionsReader = new ConnectionOptionsReader({
|
||||
root: __dirname,
|
||||
root: "./temp",
|
||||
configName: "configs/ormconfig.env",
|
||||
})
|
||||
const [fileOptions]: DataSourceOptions[] =
|
||||
@ -111,10 +103,8 @@ describe("ConnectionOptionsReader", () => {
|
||||
})
|
||||
|
||||
it("properly loads config ormconfig.env when given multiple choices", async () => {
|
||||
await createDotenvFiles()
|
||||
|
||||
const connectionOptionsReader = new ConnectionOptionsReader({
|
||||
root: path.join(__dirname, "configs"),
|
||||
root: "./temp/configs",
|
||||
})
|
||||
const [fileOptions]: DataSourceOptions[] =
|
||||
await connectionOptionsReader.all()
|
||||
@ -1,75 +0,0 @@
|
||||
import { OrmUtils } from "../../src/util/OrmUtils"
|
||||
import { expect } from "chai"
|
||||
|
||||
describe(`orm-utils`, () => {
|
||||
describe("parseSqlCheckExpression", () => {
|
||||
it("parses a simple CHECK constraint", () => {
|
||||
// Spaces between CHECK values
|
||||
expect(
|
||||
OrmUtils.parseSqlCheckExpression(
|
||||
`CREATE TABLE "foo_table" (
|
||||
"id" integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"col" varchar CHECK("col" IN ('FOO', 'BAR', 'BAZ')) NOT NULL,
|
||||
"some_other_col" integer NOT NULL
|
||||
);`,
|
||||
"col",
|
||||
),
|
||||
).to.have.same.members(["FOO", "BAR", "BAZ"])
|
||||
|
||||
// No spaces between CHECK values
|
||||
expect(
|
||||
OrmUtils.parseSqlCheckExpression(
|
||||
`CREATE TABLE "foo_table" (
|
||||
"id" integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"col" varchar CHECK("col" IN ('FOO','BAR','BAZ')) NOT NULL,
|
||||
"some_other_col" integer NOT NULL
|
||||
);`,
|
||||
"col",
|
||||
),
|
||||
).to.have.same.members(["FOO", "BAR", "BAZ"])
|
||||
})
|
||||
|
||||
it("returns undefined when the column doesn't have a CHECK", () => {
|
||||
expect(
|
||||
OrmUtils.parseSqlCheckExpression(
|
||||
`CREATE TABLE "foo_table" (
|
||||
"id" integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"col" varchar NOT NULL,
|
||||
"some_other_col" integer NOT NULL
|
||||
);`,
|
||||
"col",
|
||||
),
|
||||
).to.equal(undefined)
|
||||
})
|
||||
|
||||
it("parses a CHECK constraint with values containing special characters", () => {
|
||||
expect(
|
||||
OrmUtils.parseSqlCheckExpression(
|
||||
`CREATE TABLE "foo_table" (
|
||||
"id" integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"col" varchar CHECK("col" IN (
|
||||
'a,b',
|
||||
',c,',
|
||||
'd''d',
|
||||
'''e''',
|
||||
'f'',''f',
|
||||
''')',
|
||||
')'''
|
||||
)
|
||||
) NOT NULL,
|
||||
"some_other_col" integer NOT NULL
|
||||
);`,
|
||||
"col",
|
||||
),
|
||||
).to.have.same.members([
|
||||
"a,b",
|
||||
",c,",
|
||||
"d'd",
|
||||
"'e'",
|
||||
"f','f",
|
||||
"')",
|
||||
")'",
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -1,6 +1,7 @@
|
||||
import { expect } from "chai"
|
||||
import fs from "fs/promises"
|
||||
import path from "path"
|
||||
|
||||
import { importOrRequireFile } from "../../../src/util/ImportUtils"
|
||||
|
||||
describe("ImportUtils.importOrRequireFile", () => {
|
||||
@ -20,7 +21,9 @@ describe("ImportUtils.importOrRequireFile", () => {
|
||||
|
||||
try {
|
||||
await fs.rmdir(testDir, { recursive: true })
|
||||
} catch {}
|
||||
} catch {
|
||||
// no-op
|
||||
}
|
||||
|
||||
await fs.mkdir(srcDir, { recursive: true })
|
||||
|
||||
@ -59,7 +62,9 @@ describe("ImportUtils.importOrRequireFile", () => {
|
||||
|
||||
try {
|
||||
await fs.rmdir(testDir, { recursive: true })
|
||||
} catch {}
|
||||
} catch {
|
||||
// no-op
|
||||
}
|
||||
|
||||
await fs.mkdir(srcDir, { recursive: true })
|
||||
|
||||
@ -93,7 +98,9 @@ describe("ImportUtils.importOrRequireFile", () => {
|
||||
|
||||
try {
|
||||
await fs.rmdir(testDir, { recursive: true })
|
||||
} catch {}
|
||||
} catch {
|
||||
// no-op
|
||||
}
|
||||
|
||||
await fs.mkdir(srcDir, { recursive: true })
|
||||
|
||||
@ -124,7 +131,9 @@ describe("ImportUtils.importOrRequireFile", () => {
|
||||
|
||||
try {
|
||||
await fs.rmdir(testDir, { recursive: true })
|
||||
} catch {}
|
||||
} catch {
|
||||
// no-op
|
||||
}
|
||||
|
||||
await fs.mkdir(srcDir, { recursive: true })
|
||||
|
||||
@ -148,7 +157,9 @@ describe("ImportUtils.importOrRequireFile", () => {
|
||||
|
||||
try {
|
||||
await fs.rmdir(testDir, { recursive: true })
|
||||
} catch {}
|
||||
} catch {
|
||||
// no-op
|
||||
}
|
||||
|
||||
await fs.mkdir(testDir, { recursive: true })
|
||||
|
||||
163
test/unit/util/orm-utils.ts
Normal file
163
test/unit/util/orm-utils.ts
Normal file
@ -0,0 +1,163 @@
|
||||
import { expect } from "chai"
|
||||
import { OrmUtils } from "../../../src/util/OrmUtils"
|
||||
|
||||
describe(`OrmUtils`, () => {
|
||||
describe("parseSqlCheckExpression", () => {
|
||||
it("parses a simple CHECK constraint", () => {
|
||||
// Spaces between CHECK values
|
||||
expect(
|
||||
OrmUtils.parseSqlCheckExpression(
|
||||
`CREATE TABLE "foo_table" (
|
||||
"id" integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"col" varchar CHECK("col" IN ('FOO', 'BAR', 'BAZ')) NOT NULL,
|
||||
"some_other_col" integer NOT NULL
|
||||
);`,
|
||||
"col",
|
||||
),
|
||||
).to.have.same.members(["FOO", "BAR", "BAZ"])
|
||||
|
||||
// No spaces between CHECK values
|
||||
expect(
|
||||
OrmUtils.parseSqlCheckExpression(
|
||||
`CREATE TABLE "foo_table" (
|
||||
"id" integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"col" varchar CHECK("col" IN ('FOO','BAR','BAZ')) NOT NULL,
|
||||
"some_other_col" integer NOT NULL
|
||||
);`,
|
||||
"col",
|
||||
),
|
||||
).to.have.same.members(["FOO", "BAR", "BAZ"])
|
||||
})
|
||||
|
||||
it("returns undefined when the column doesn't have a CHECK", () => {
|
||||
expect(
|
||||
OrmUtils.parseSqlCheckExpression(
|
||||
`CREATE TABLE "foo_table" (
|
||||
"id" integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"col" varchar NOT NULL,
|
||||
"some_other_col" integer NOT NULL
|
||||
);`,
|
||||
"col",
|
||||
),
|
||||
).to.equal(undefined)
|
||||
})
|
||||
|
||||
it("parses a CHECK constraint with values containing special characters", () => {
|
||||
expect(
|
||||
OrmUtils.parseSqlCheckExpression(
|
||||
`CREATE TABLE "foo_table" (
|
||||
"id" integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"col" varchar CHECK("col" IN (
|
||||
'a,b',
|
||||
',c,',
|
||||
'd''d',
|
||||
'''e''',
|
||||
'f'',''f',
|
||||
''')',
|
||||
')'''
|
||||
)
|
||||
) NOT NULL,
|
||||
"some_other_col" integer NOT NULL
|
||||
);`,
|
||||
"col",
|
||||
),
|
||||
).to.have.same.members([
|
||||
"a,b",
|
||||
",c,",
|
||||
"d'd",
|
||||
"'e'",
|
||||
"f','f",
|
||||
"')",
|
||||
")'",
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe("mergeDeep", () => {
|
||||
it("should handle simple values.", () => {
|
||||
expect(OrmUtils.mergeDeep(1, 2)).to.equal(1)
|
||||
expect(OrmUtils.mergeDeep(2, 1)).to.equal(2)
|
||||
expect(OrmUtils.mergeDeep(2, 1, 1)).to.equal(2)
|
||||
expect(OrmUtils.mergeDeep(1, 2, 1)).to.equal(1)
|
||||
expect(OrmUtils.mergeDeep(1, 1, 2)).to.equal(1)
|
||||
expect(OrmUtils.mergeDeep(2, 1, 2)).to.equal(2)
|
||||
})
|
||||
|
||||
it("should handle ordering and indempotence.", () => {
|
||||
const a = { a: 1 }
|
||||
const b = { a: 2 }
|
||||
expect(OrmUtils.mergeDeep(a, b)).to.deep.equal(b)
|
||||
expect(OrmUtils.mergeDeep(b, a)).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(b, a, a)).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(a, b, a)).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(a, a, b)).to.deep.equal(b)
|
||||
expect(OrmUtils.mergeDeep(b, a, b)).to.deep.equal(b)
|
||||
const c = { a: 3 }
|
||||
expect(OrmUtils.mergeDeep(a, b, c)).to.deep.equal(c)
|
||||
expect(OrmUtils.mergeDeep(b, c, b)).to.deep.equal(b)
|
||||
expect(OrmUtils.mergeDeep(c, a, a)).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(c, b, a)).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(a, c, b)).to.deep.equal(b)
|
||||
expect(OrmUtils.mergeDeep(b, a, c)).to.deep.equal(c)
|
||||
})
|
||||
|
||||
it("should skip nested promises in sources.", () => {
|
||||
expect(
|
||||
OrmUtils.mergeDeep({}, { p: Promise.resolve() }),
|
||||
).to.deep.equal({})
|
||||
expect(
|
||||
OrmUtils.mergeDeep({}, { p: { p: Promise.resolve() } }),
|
||||
).to.deep.equal({ p: {} })
|
||||
const a = { p: Promise.resolve(0) }
|
||||
const b = { p: Promise.resolve(1) }
|
||||
expect(OrmUtils.mergeDeep(a, {})).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(a, b)).to.deep.equal(a)
|
||||
expect(OrmUtils.mergeDeep(b, a)).to.deep.equal(b)
|
||||
expect(OrmUtils.mergeDeep(b, {})).to.deep.equal(b)
|
||||
})
|
||||
|
||||
it("should merge moderately deep objects correctly.", () => {
|
||||
const a = {
|
||||
a: { b: { c: { d: { e: 123, h: { i: 23 } } } } },
|
||||
g: 19,
|
||||
}
|
||||
const b = { a: { b: { c: { d: { f: 99 } }, f: 31 } } }
|
||||
const c = {
|
||||
a: { b: { c: { d: { e: 123, f: 99, h: { i: 23 } } }, f: 31 } },
|
||||
g: 19,
|
||||
}
|
||||
expect(OrmUtils.mergeDeep(a, b)).to.deep.equal(c)
|
||||
expect(OrmUtils.mergeDeep(b, a)).to.deep.equal(c)
|
||||
expect(OrmUtils.mergeDeep(b, a, a)).to.deep.equal(c)
|
||||
expect(OrmUtils.mergeDeep(a, b, a)).to.deep.equal(c)
|
||||
expect(OrmUtils.mergeDeep(a, a, b)).to.deep.equal(c)
|
||||
expect(OrmUtils.mergeDeep(b, a, b)).to.deep.equal(c)
|
||||
})
|
||||
|
||||
it("should merge recursively deep objects correctly", () => {
|
||||
const a: Record<string, unknown> = {}
|
||||
const b: Record<string, unknown> = {}
|
||||
|
||||
a["b"] = b
|
||||
a["a"] = a
|
||||
b["a"] = a
|
||||
|
||||
expect(OrmUtils.mergeDeep({}, a))
|
||||
})
|
||||
|
||||
it("should reference copy complex instances of classes.", () => {
|
||||
class Foo {
|
||||
recursive: Foo
|
||||
|
||||
constructor() {
|
||||
this.recursive = this
|
||||
}
|
||||
}
|
||||
|
||||
const foo = new Foo()
|
||||
const result = OrmUtils.mergeDeep({}, { foo })
|
||||
expect(result).to.have.property("foo")
|
||||
expect(result.foo).to.equal(foo)
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -1,6 +1,6 @@
|
||||
import { isAbsolute, toPortablePath } from "../../src/util/PathUtils"
|
||||
import { isAbsolute, toPortablePath } from "../../../src/util/PathUtils"
|
||||
import { expect } from "chai"
|
||||
import { withPlatform } from "../utils/test-utils"
|
||||
import { withPlatform } from "../../utils/test-utils"
|
||||
|
||||
describe(`path-utils`, () => {
|
||||
describe("isAbsolute", () => {
|
||||
@ -1,6 +1,6 @@
|
||||
import { expect } from "chai"
|
||||
|
||||
import { VersionUtils } from "../../src/util/VersionUtils"
|
||||
import { VersionUtils } from "../../../src/util/VersionUtils"
|
||||
|
||||
describe("VersionUtils", () => {
|
||||
describe("isGreaterOrEqual", () => {
|
||||
@ -1,10 +1,13 @@
|
||||
import "source-map-support/register"
|
||||
import "reflect-metadata"
|
||||
import * as chai from "chai"
|
||||
|
||||
import chai from "chai"
|
||||
import sinonChai from "sinon-chai"
|
||||
import chaiAsPromised from "chai-as-promised"
|
||||
|
||||
// Tests assume UTC time zone when formatting/parsing dates.
|
||||
process.env.TZ = "UTC"
|
||||
|
||||
chai.should()
|
||||
chai.use(require("sinon-chai"))
|
||||
chai.use(require("chai-as-promised"))
|
||||
chai.use(sinonChai)
|
||||
chai.use(chaiAsPromised)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user