mirror of
https://github.com/typeorm/typeorm.git
synced 2025-12-08 21:26:23 +00:00
Compare commits
43 Commits
bcee921ee0
...
d8444fc3ed
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d8444fc3ed | ||
|
|
cb1284c8c0 | ||
|
|
9383799b3d | ||
|
|
ea0f155532 | ||
|
|
ade198c77c | ||
|
|
b2ad548798 | ||
|
|
5fa8a0bf6c | ||
|
|
6eda13884e | ||
|
|
61379daf11 | ||
|
|
d943a17639 | ||
|
|
6ed24f8235 | ||
|
|
cad0921827 | ||
|
|
dc74f5374e | ||
|
|
806ea0a502 | ||
|
|
bec548a7d4 | ||
|
|
02e7b713ed | ||
|
|
6da0911a0b | ||
|
|
2681051f78 | ||
|
|
e04ffd3f3d | ||
|
|
52a96ad68c | ||
|
|
5d02f06fcb | ||
|
|
5a28729b2e | ||
|
|
ba3319d583 | ||
|
|
51fbcf4045 | ||
|
|
bed7913623 | ||
|
|
6381c8d519 | ||
|
|
08a9397491 | ||
|
|
69dfc42eb9 | ||
|
|
b639d33aee | ||
|
|
3ac605359d | ||
|
|
5b01c399ce | ||
|
|
d7867ebff1 | ||
|
|
8692da2b69 | ||
|
|
7c55d32321 | ||
|
|
181154ab3c | ||
|
|
2671579793 | ||
|
|
1f19abb62c | ||
|
|
74522ffc82 | ||
|
|
b9309098bc | ||
|
|
dbfaad9e7c | ||
|
|
77fb94d419 | ||
|
|
336fef8f14 | ||
|
|
2446bd0d87 |
@ -1,8 +1,9 @@
|
||||
{
|
||||
"all": true,
|
||||
"cache": false,
|
||||
"exclude": ["**/*.d.ts"],
|
||||
"exclude": ["node_modules", "**/*.d.ts"],
|
||||
"exclude-after-remap": true,
|
||||
"extension": [".ts"],
|
||||
"include": ["build/compiled/src/**", "src/**"],
|
||||
"reporter": "lcov"
|
||||
"reporter": ["lcov"]
|
||||
}
|
||||
283
.github/copilot-instructions.md
vendored
Normal file
283
.github/copilot-instructions.md
vendored
Normal file
@ -0,0 +1,283 @@
|
||||
# GitHub Copilot Instructions for TypeORM
|
||||
|
||||
This document provides guidance for GitHub Copilot when working with the TypeORM codebase.
|
||||
|
||||
## Project Overview
|
||||
|
||||
TypeORM is a TypeScript-based Object-Relational Mapping (ORM) library that supports multiple databases including MySQL/MariaDB, PostgreSQL, MS SQL Server, Oracle, SAP HANA, SQLite, MongoDB, and Google Spanner. It implements both Active Record and Data Mapper patterns and runs on Node.js, Browser, React Native, and Electron platforms.
|
||||
|
||||
## Architecture & Structure
|
||||
|
||||
### Core Components
|
||||
|
||||
- **`src/data-source/`** - DataSource (formerly Connection) management
|
||||
- **`src/entity-manager/`** - Entity management and operations
|
||||
- **`src/repository/`** - Repository pattern implementation
|
||||
- **`src/query-builder/`** - SQL query building
|
||||
- **`src/decorator/`** - TypeScript decorators for entities, columns, relations
|
||||
- **`src/driver/`** - Database-specific drivers
|
||||
- **`src/metadata/`** - Entity metadata management
|
||||
- **`src/schema-builder/`** - Schema creation and migration
|
||||
- **`src/migration/`** - Database migration system
|
||||
- **`src/subscriber/`** - Event subscriber system
|
||||
- **`src/persistence/`** - Entity persistence logic
|
||||
|
||||
### Design Patterns
|
||||
|
||||
- **Active Record Pattern**: Entities have methods to save, remove, and query themselves
|
||||
- **Data Mapper Pattern**: Repositories handle entity persistence separately from business logic
|
||||
- **Decorator Pattern**: Extensive use of TypeScript decorators for metadata definition
|
||||
- **Builder Pattern**: QueryBuilder for constructing complex queries
|
||||
|
||||
## Coding Standards
|
||||
|
||||
### TypeScript Configuration
|
||||
|
||||
- Target: ES2021+ with CommonJS modules
|
||||
- Decorators: `experimentalDecorators` and `emitDecoratorMetadata` enabled
|
||||
|
||||
### Code Style
|
||||
|
||||
- **Formatting**: Use Prettier with these settings:
|
||||
- No semicolons (`"semi": false`)
|
||||
- Arrow function parentheses always (`"arrowParens": "always"`)
|
||||
- Trailing commas everywhere (`"trailingComma": "all"`)
|
||||
- **Linting**: ESLint with TypeScript support
|
||||
- Use `@typescript-eslint` rules
|
||||
- Warnings allowed for some `@typescript-eslint/no-*` rules
|
||||
- Unused variables starting with `_` are ignored
|
||||
- **Naming Conventions**:
|
||||
- Classes: PascalCase (e.g., `DataSource`, `EntityManager`)
|
||||
- Interfaces: PascalCase (e.g., `ColumnOptions`, `RelationOptions`)
|
||||
- Variables/functions: camelCase
|
||||
- Constants: UPPER_SNAKE_CASE for true constants
|
||||
- Private members: Use standard camelCase (no underscore prefix)
|
||||
|
||||
### TypeScript Patterns
|
||||
|
||||
- Use explicit types for public APIs
|
||||
- Prefer interfaces over type aliases for object shapes
|
||||
- Use generics for reusable components
|
||||
- Avoid `any` where possible; use `unknown` or proper types
|
||||
- Use optional chaining (`?.`) and nullish coalescing (`??`) operators
|
||||
- Leverage TypeScript utility types (`Partial<T>`, `Required<T>`, `Pick<T>`, etc.)
|
||||
|
||||
## Testing
|
||||
|
||||
### Test Structure
|
||||
|
||||
Tests are organized in `test/` directory:
|
||||
- **`test/functional/`** - Feature and integration tests organized by functionality (preferred)
|
||||
- **`test/github-issues/`** - Tests for specific GitHub issues
|
||||
- **`test/unit/`** - Unit tests for individual components
|
||||
- **`test/utils/`** - Test utilities and helpers
|
||||
|
||||
**Note**: Prefer writing functional tests over per-issue tests.
|
||||
|
||||
### Test Writing Guidelines
|
||||
|
||||
1. **Use the standard test template**:
|
||||
```typescript
|
||||
import "reflect-metadata"
|
||||
import { createTestingConnections, closeTestingConnections, reloadTestingDatabases } from "../../utils/test-utils"
|
||||
import { DataSource } from "../../../src/data-source/DataSource"
|
||||
import { expect } from "chai"
|
||||
|
||||
describe("description of functionality", () => {
|
||||
let dataSources: DataSource[]
|
||||
before(async () => dataSources = await createTestingConnections({
|
||||
entities: [__dirname + "/entity/*{.js,.ts}"],
|
||||
schemaCreate: true,
|
||||
dropSchema: true,
|
||||
}))
|
||||
beforeEach(() => reloadTestingDatabases(dataSources))
|
||||
after(() => closeTestingConnections(dataSources))
|
||||
|
||||
it("should do something specific", () => Promise.all(dataSources.map(async dataSource => {
|
||||
// Test implementation
|
||||
})))
|
||||
})
|
||||
```
|
||||
|
||||
2. **Test Configuration**:
|
||||
- Tests run against multiple databases (as configured in `ormconfig.json`)
|
||||
- Each test should work across all supported databases unless database-specific
|
||||
- Place entity files in `./entity/` relative to test file for automatic loading
|
||||
- Use `Promise.all(dataSources.map(...))` pattern to test against all databases
|
||||
|
||||
3. **Test Naming**:
|
||||
- Use descriptive `describe()` blocks for features
|
||||
- Use "should..." format for `it()` descriptions
|
||||
- Reference GitHub issue numbers when fixing specific issues
|
||||
|
||||
4. **Running Tests**:
|
||||
- Full test suite: `npm test` (compiles then runs tests)
|
||||
- Fast iteration: `npm run test:fast` (runs without recompiling)
|
||||
- Specific tests: `npm run test:fast -- --grep "pattern"`
|
||||
- Watch mode: `npm run compile -- --watch` + `npm run test:fast`
|
||||
|
||||
## Database-Specific Considerations
|
||||
|
||||
### Multi-Database Support
|
||||
|
||||
When writing code or tests:
|
||||
- Ensure compatibility across all supported databases
|
||||
- Use driver-specific code only in `src/driver/` directory
|
||||
- Test database-agnostic code against multiple databases
|
||||
- Use `DataSource.options.type` to check database type when needed
|
||||
- Be aware of SQL dialect differences (LIMIT vs TOP, etc.)
|
||||
|
||||
### Driver Implementation
|
||||
|
||||
Each driver in `src/driver/` implements common interfaces:
|
||||
- Connection management
|
||||
- Query execution
|
||||
- Schema synchronization
|
||||
- Type mapping
|
||||
- Transaction handling
|
||||
|
||||
## Common Development Tasks
|
||||
|
||||
### Adding a New Feature
|
||||
|
||||
1. Create entities in appropriate test directory
|
||||
2. Write tests first (TDD approach encouraged)
|
||||
3. Implement feature in `src/`
|
||||
4. Ensure tests pass across all databases
|
||||
5. Update documentation if public API changes
|
||||
6. Follow commit message conventions
|
||||
|
||||
### Adding a New Decorator
|
||||
|
||||
1. Create decorator file in `src/decorator/`
|
||||
2. Create metadata args in `src/metadata-args/`
|
||||
3. Update metadata builder in `src/metadata-builder/`
|
||||
4. Export from `src/index.ts`
|
||||
5. Add comprehensive tests
|
||||
6. Update TypeScript type definitions if needed
|
||||
|
||||
### Working with Migrations
|
||||
|
||||
- Migrations are in `src/migration/`
|
||||
- Migration files should be timestamped
|
||||
- Support both up and down migrations
|
||||
- Test migrations against all supported databases
|
||||
- Ensure schema changes are reversible
|
||||
|
||||
## Build & Development Workflow
|
||||
|
||||
### Commands
|
||||
|
||||
- **Build**: `npm run compile` - Compiles TypeScript to `build/compiled/`
|
||||
- **Package**: `npm run package` - Creates distribution in `build/package/`
|
||||
- **Pack**: `npm run pack` - Creates `.tgz` file in `build/`
|
||||
- **Test**: `npm test` - Compile and run all tests
|
||||
- **Lint**: `npm run lint` - Run ESLint
|
||||
- **Format**: `npm run format` - Run Prettier
|
||||
- **Watch**: `npm run watch` - Watch mode for TypeScript compilation
|
||||
|
||||
### Development Setup
|
||||
|
||||
1. Install dependencies: `npm install`
|
||||
2. Copy config: `cp ormconfig.sample.json ormconfig.json`
|
||||
3. Configure database connections in `ormconfig.json`
|
||||
4. Optionally use Docker: `docker-compose up` for database services
|
||||
|
||||
### Pre-commit Hooks
|
||||
|
||||
- Husky runs pre-commit hooks
|
||||
- Lint-staged runs on staged files
|
||||
- Format and lint checks must pass
|
||||
|
||||
## Contribution Guidelines
|
||||
|
||||
### Commit Message Format
|
||||
|
||||
Follow conventional commits:
|
||||
```
|
||||
<type>: <subject>
|
||||
|
||||
<body>
|
||||
|
||||
<footer>
|
||||
```
|
||||
|
||||
**Types**: `feat`, `fix`, `docs`, `style`, `refactor`, `perf`, `test`, `build`, `chore`, `revert`
|
||||
|
||||
**Subject**:
|
||||
- Use imperative, present tense
|
||||
- Don't capitalize first letter
|
||||
- No period at the end
|
||||
- Max 100 characters per line
|
||||
|
||||
### Pull Request Requirements
|
||||
|
||||
- All tests must pass
|
||||
- Include appropriate tests for changes
|
||||
- Follow existing code style
|
||||
- Update documentation for API changes
|
||||
- Reference related GitHub issues
|
||||
- Get approval before merging
|
||||
|
||||
## Common Patterns & Idioms
|
||||
|
||||
### Entity Definition
|
||||
|
||||
```typescript
|
||||
@Entity()
|
||||
export class User {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
@Column()
|
||||
name: string
|
||||
|
||||
@OneToMany(() => Photo, photo => photo.user)
|
||||
photos: Photo[]
|
||||
}
|
||||
```
|
||||
|
||||
### Repository Usage
|
||||
|
||||
```typescript
|
||||
const userRepository = dataSource.getRepository(User)
|
||||
const user = await userRepository.findOne({ where: { id: 1 } })
|
||||
```
|
||||
|
||||
### QueryBuilder
|
||||
|
||||
```typescript
|
||||
const users = await dataSource
|
||||
.getRepository(User)
|
||||
.createQueryBuilder("user")
|
||||
.leftJoinAndSelect("user.photos", "photo")
|
||||
.where("user.name = :name", { name: "John" })
|
||||
.getMany()
|
||||
```
|
||||
|
||||
### Transactions
|
||||
|
||||
```typescript
|
||||
await dataSource.transaction(async (manager) => {
|
||||
await manager.save(user)
|
||||
await manager.save(photo)
|
||||
})
|
||||
```
|
||||
|
||||
## Important Notes
|
||||
|
||||
- Always import `reflect-metadata` before TypeORM
|
||||
- Be careful with circular dependencies between entities
|
||||
- Use lazy relations or forward references for circular entity references
|
||||
- Connection pooling is handled automatically by drivers
|
||||
- Be mindful of N+1 query problems; use joins or eager loading
|
||||
- Repository methods are async; always use `await`
|
||||
- Entity instances should be plain objects, not class instances with methods (Data Mapper pattern)
|
||||
|
||||
## Resources
|
||||
|
||||
- [Main Documentation](https://typeorm.io)
|
||||
- [Contributing Guide](../CONTRIBUTING.md)
|
||||
- [Developer Guide](../DEVELOPER.md)
|
||||
- [GitHub Repository](https://github.com/typeorm/typeorm)
|
||||
- [Issue Tracker](https://github.com/typeorm/typeorm/issues)
|
||||
28
.github/workflows/close-stale-issues.yml
vendored
28
.github/workflows/close-stale-issues.yml
vendored
@ -1,28 +0,0 @@
|
||||
name: Close Stale Issues
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
close-stale-issues:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
days-before-stale: 180 # 6 months
|
||||
days-before-close: 0
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
stale-issue-message: ""
|
||||
close-issue-message: |
|
||||
TypeORM has returned to active development after recently transitioning to new leadership ([see related announcement](https://typeorm.io/docs/future-of-typeorm/)).
|
||||
|
||||
As part of the new team's efforts to prioritise work on the project moving forward, it is necessary to clean up the backlog of stale issues.
|
||||
|
||||
🧹 This issue is being automatically closed because it has had no activity in the last 6 months.
|
||||
If you believe this issue is still relevant, feel free to comment below and the maintainers may reopen it. Thank you for your contribution.
|
||||
stale-issue-label: "stale"
|
||||
exempt-issue-labels: "pinned,security"
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@ -23,7 +23,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
|
||||
76
.github/workflows/commit-validation.yml
vendored
76
.github/workflows/commit-validation.yml
vendored
@ -1,76 +0,0 @@
|
||||
name: Commit Validation
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "**"
|
||||
paths-ignore:
|
||||
- "docs/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "**"
|
||||
paths-ignore:
|
||||
- "docs/**"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
formatting:
|
||||
if: ${{ (github.event_name != 'pull_request') || (github.event.pull_request.head.repo.fork) }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: npm run format:ci
|
||||
|
||||
build:
|
||||
if: ${{ (github.event_name != 'pull_request') || (github.event.pull_request.head.repo.fork) }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run compile
|
||||
|
||||
- name: Upload build
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
retention-days: 1
|
||||
|
||||
tests-linux:
|
||||
needs: build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20, 24]
|
||||
uses: ./.github/workflows/tests-linux.yml
|
||||
with:
|
||||
node-version: ${{matrix.node-version}}
|
||||
|
||||
tests-windows:
|
||||
needs: build
|
||||
uses: ./.github/workflows/tests-windows.yml
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
# Run with most databases possible to provide the coverage of the tests
|
||||
coverage:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [tests-linux, tests-windows]
|
||||
steps:
|
||||
- name: Coveralls Finished
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
parallel-finished: true
|
||||
9
.github/workflows/docsearch.yml
vendored
9
.github/workflows/docsearch.yml
vendored
@ -6,15 +6,14 @@ on:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
index_docs:
|
||||
index-docs:
|
||||
if: ${{ !github.event.repository.fork }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Index docs to Typesense
|
||||
run: |
|
||||
- run: |
|
||||
docker run \
|
||||
-e TYPESENSE_API_KEY=${{ secrets.TYPESENSE_API_KEY }} \
|
||||
-e TYPESENSE_HOST="${{ secrets.TYPESENSE_HOST }}" \
|
||||
|
||||
7
.github/workflows/preview.yml
vendored
7
.github/workflows/preview.yml
vendored
@ -15,15 +15,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
cache: "npm"
|
||||
node-version: 22
|
||||
node-version-file: .nvmrc
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
6
.github/workflows/publish-package.yml
vendored
6
.github/workflows/publish-package.yml
vendored
@ -11,10 +11,10 @@ jobs:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
node-version-file: .nvmrc
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- run: npm ci
|
||||
- run: npm run package
|
||||
|
||||
83
.github/workflows/tests-linux.yml
vendored
83
.github/workflows/tests-linux.yml
vendored
@ -21,18 +21,17 @@ jobs:
|
||||
COCKROACH_ARGS: "start-single-node --insecure --cache=1GB --store=type=mem,size=4GB"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/cockroachdb.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -44,23 +43,22 @@ jobs:
|
||||
|
||||
services:
|
||||
mongodb:
|
||||
image: mongo:5.0.31
|
||||
image: mongo:8
|
||||
ports:
|
||||
- "27017:27017"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/mongodb.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -72,26 +70,26 @@ jobs:
|
||||
|
||||
services:
|
||||
mssql:
|
||||
image: "mcr.microsoft.com/mssql/server:2022-latest"
|
||||
image: "mcr.microsoft.com/mssql/server:2025-latest"
|
||||
ports:
|
||||
- "1433:1433"
|
||||
env:
|
||||
SA_PASSWORD: "Admin12345"
|
||||
ACCEPT_EULA: "Y"
|
||||
MSSQL_PID: "Express"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/mssql.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -122,18 +120,17 @@ jobs:
|
||||
MYSQL_DATABASE: "test"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/mysql-mariadb.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -164,18 +161,17 @@ jobs:
|
||||
MYSQL_DATABASE: "test"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/mysql-mariadb-latest.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -186,18 +182,17 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/better-sqlite3.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -208,18 +203,17 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/sqlite.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -230,18 +224,17 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/sqljs.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -271,18 +264,17 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/postgres.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -293,14 +285,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- run: docker compose up oracle --detach
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
@ -309,7 +300,8 @@ jobs:
|
||||
- run: npm ci
|
||||
- run: cat ormconfig.sample.json | jq 'map(select(.name == "oracle"))' > ormconfig.json
|
||||
- run: docker compose up oracle --no-recreate --wait
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: sleep 10
|
||||
- run: npx c8 npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
@ -321,14 +313,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- run: docker compose up hanaexpress --detach
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
@ -337,7 +328,7 @@ jobs:
|
||||
- run: npm ci
|
||||
- run: cat ormconfig.sample.json | jq 'map(select(.name == "hanaexpress"))' > ormconfig.json
|
||||
- run: docker compose up hanaexpress --no-recreate --wait
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
|
||||
21
.github/workflows/tests-windows.yml
vendored
21
.github/workflows/tests-windows.yml
vendored
@ -11,11 +11,10 @@ jobs:
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
@ -23,7 +22,7 @@ jobs:
|
||||
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/better-sqlite3.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
@ -35,11 +34,10 @@ jobs:
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
@ -47,7 +45,7 @@ jobs:
|
||||
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/sqlite.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
@ -59,11 +57,10 @@ jobs:
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
@ -71,7 +68,7 @@ jobs:
|
||||
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/sqljs.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
|
||||
131
.github/workflows/tests.yml
vendored
Normal file
131
.github/workflows/tests.yml
vendored
Normal file
@ -0,0 +1,131 @@
|
||||
name: Tests
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- next
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
detect-changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
changes: ${{ steps.detect-changes.outputs.changes }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: detect-changes
|
||||
with:
|
||||
filters: |
|
||||
package-json: &package-json
|
||||
- package.json
|
||||
- package-lock.json
|
||||
|
||||
docs: &docs
|
||||
- docs/**/*
|
||||
|
||||
src: &src
|
||||
- extra/**/*.js
|
||||
- src/**/*.ts
|
||||
- gulpfile.js
|
||||
- tsconfig.json
|
||||
- *package-json
|
||||
|
||||
src-or-tests: &src-or-tests
|
||||
- *src
|
||||
- .github/workflows/test/**/*
|
||||
- .github/workflows/test*.yml
|
||||
- .mocharc.json
|
||||
- .nvmrc
|
||||
- .nycrc.json
|
||||
|
||||
lint: &lint
|
||||
- *src-or-tests
|
||||
- .prettierrc.json
|
||||
- eslint.config.mjs
|
||||
|
||||
formatting:
|
||||
if: contains(needs.detect-changes.outputs.changes, 'lint')
|
||||
needs: detect-changes
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: .nvmrc
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: npm run format:ci
|
||||
|
||||
docs:
|
||||
if: contains(needs.detect-changes.outputs.changes, 'docs')
|
||||
needs: detect-changes
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./docs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: .nvmrc
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
|
||||
build:
|
||||
if: contains(needs.detect-changes.outputs.changes, 'src-or-tests')
|
||||
needs: detect-changes
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: .nvmrc
|
||||
- run: npm ci
|
||||
- run: npm run compile
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
retention-days: 1
|
||||
|
||||
tests-linux:
|
||||
if: contains(needs.detect-changes.outputs.changes, 'src-or-tests')
|
||||
needs: [detect-changes, build]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20, 24]
|
||||
uses: ./.github/workflows/tests-linux.yml
|
||||
with:
|
||||
node-version: ${{matrix.node-version}}
|
||||
|
||||
tests-windows:
|
||||
if: contains(needs.detect-changes.outputs.changes, 'src-or-tests')
|
||||
needs: [detect-changes, build]
|
||||
uses: ./.github/workflows/tests-windows.yml
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
coverage:
|
||||
if: contains(needs.detect-changes.outputs.changes, 'src-or-tests')
|
||||
runs-on: ubuntu-latest
|
||||
needs: [detect-changes, tests-linux, tests-windows]
|
||||
steps:
|
||||
- uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
parallel-finished: true
|
||||
|
||||
all-passed:
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
needs:
|
||||
- build
|
||||
- coverage
|
||||
- docs
|
||||
- formatting
|
||||
- tests-linux
|
||||
- tests-windows
|
||||
steps:
|
||||
- run: exit ${{ contains(needs.*.result, 'failure') && 1 || 0 }}
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@ -3,12 +3,11 @@
|
||||
._*
|
||||
|
||||
### Node ###
|
||||
npm-debug.log*
|
||||
build/
|
||||
coverage/
|
||||
*.lcov
|
||||
.nyc_output/
|
||||
node_modules/
|
||||
npm-debug.log*
|
||||
*.lcov
|
||||
|
||||
### VisualStudioCode ###
|
||||
.vscode/*
|
||||
|
||||
14
.pr_agent.toml
Normal file
14
.pr_agent.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[github_app]
|
||||
pr_commands = [
|
||||
"/review",
|
||||
"/improve",
|
||||
]
|
||||
|
||||
handle_push_trigger = true
|
||||
push_commands = [
|
||||
"/improve",
|
||||
]
|
||||
|
||||
[auto_best_practices]
|
||||
enable_auto_best_practices = true
|
||||
utilize_auto_best_practices = true
|
||||
@ -18,8 +18,7 @@ If you have a question or want community support:
|
||||
|
||||
## <a name="issue"></a> Found a security vulnerability?
|
||||
|
||||
If you find a security vulnerability or something that should be discussed personally,
|
||||
please contact me within my [email](https://github.com/typeorm/typeorm/blob/master/package.json#L10).
|
||||
If you find a security vulnerability or something that should be discussed privately, please contact us at [maintainers@typeorm.io](mailto:maintainers@typeorm.io).
|
||||
|
||||
## <a name="issue"></a> Found a Bug?
|
||||
|
||||
|
||||
12
DEVELOPER.md
12
DEVELOPER.md
@ -53,7 +53,17 @@ cd typeorm
|
||||
git remote add upstream https://github.com/typeorm/typeorm.git
|
||||
```
|
||||
|
||||
## Installing NPM Modules
|
||||
## Node
|
||||
|
||||
You should have node installed in the version described in [.nvmrc](.nvmrc).
|
||||
|
||||
It is recommended to configure your OS to automatically switch to use this version whenever you enter project folder. This can be achieved in many ways:
|
||||
|
||||
* [`fnm`](https://github.com/Schniz/fnm)
|
||||
* [`zsh-nvm`](https://github.com/lukechilds/zsh-nvm#auto-use)
|
||||
* [`asdf`](https://asdf-vm.com) with `asdf-nodejs` plugin and [`legacy_version_file = true`](https://asdf-vm.com/manage/configuration.html#legacy-version-file) option
|
||||
|
||||
## Installing package dependencies
|
||||
|
||||
Install all TypeORM dependencies by running this command:
|
||||
|
||||
|
||||
1208
README-zh_CN.md
1208
README-zh_CN.md
File diff suppressed because it is too large
Load Diff
36
README.md
36
README.md
@ -8,11 +8,11 @@
|
||||
</a>
|
||||
<br>
|
||||
<br>
|
||||
<a href="https://www.npmjs.com/package/typeorm"><img src="https://img.shields.io/npm/v/typeorm" alt="NPM Version" /></a>
|
||||
<a href="https://www.npmjs.com/package/typeorm"><img src="https://img.shields.io/npm/dm/typeorm" alt="NPM Downloads" /></a>
|
||||
<a href="https://github.com/typeorm/typeorm/actions/workflows/commit-validation.yml?query=branch%3Amaster"><img src="https://github.com/typeorm/typeorm/actions/workflows/commit-validation.yml/badge.svg?branch=master" alt="Commit Validation"/></a>
|
||||
<a href="https://coveralls.io/github/typeorm/typeorm?branch=master"><img src="https://coveralls.io/repos/github/typeorm/typeorm/badge.svg?branch=master" alt="Coverage Status" /></a>
|
||||
<a href=""><img src="https://img.shields.io/badge/License-MIT-teal.svg" alt="MIT License" /></a>
|
||||
<a href="https://www.npmjs.com/package/typeorm"><img src="https://img.shields.io/npm/v/typeorm" alt="NPM Version"/></a>
|
||||
<a href="https://www.npmjs.com/package/typeorm"><img src="https://img.shields.io/npm/dm/typeorm" alt="NPM Downloads"/></a>
|
||||
<a href="https://github.com/typeorm/typeorm/actions/workflows/tests.yml?query=branch%3Amaster"><img src="https://github.com/typeorm/typeorm/actions/workflows/tests.yml/badge.svg?branch=master" alt="Commit Validation"/></a>
|
||||
<a href="https://coveralls.io/github/typeorm/typeorm?branch=master"><img src="https://coveralls.io/repos/github/typeorm/typeorm/badge.svg?branch=master" alt="Coverage Status"/></a>
|
||||
<a href=""><img src="https://img.shields.io/badge/License-MIT-teal.svg" alt="MIT License"/></a>
|
||||
<br>
|
||||
<br>
|
||||
</div>
|
||||
@ -201,12 +201,28 @@ This project exists thanks to all the people who contribute:
|
||||
|
||||
## Sponsors
|
||||
|
||||
Open source is hard and time-consuming. If you want to invest in TypeORM's future, you can become a sponsor and allow our core team to spend more time on TypeORM's improvements and new features. [Become a sponsor](https://opencollective.com/typeorm)
|
||||
Open source is hard and time-consuming. If you want to invest in TypeORM's future, you can become a sponsor and allow our core team to spend more time on TypeORM's improvements and new features.
|
||||
|
||||
<a href="https://opencollective.com/typeorm" target="_blank"><img src="https://opencollective.com/typeorm/tiers/sponsor.svg?width=890"></a>
|
||||
### Champion
|
||||
|
||||
## Gold Sponsors
|
||||
Become a champion sponsor and get premium technical support from our core contributors. [Become a champion](https://opencollective.com/typeorm)
|
||||
|
||||
Become a gold sponsor and get premium technical support from our core contributors. [Become a gold sponsor](https://opencollective.com/typeorm)
|
||||
<a href="https://opencollective.com/typeorm" target="_blank"><img src="https://opencollective.com/typeorm/tiers/gold-sponsor.svg?avatarHeight=36"></a>
|
||||
|
||||
<a href="https://opencollective.com/typeorm" target="_blank"><img src="https://opencollective.com/typeorm/tiers/gold-sponsor.svg?width=890"></a>
|
||||
### Supporter
|
||||
|
||||
Support TypeORM's development with a monthly contribution. [Become a supporter](https://opencollective.com/typeorm)
|
||||
|
||||
<a href="https://opencollective.com/typeorm" target="_blank"><img src="https://opencollective.com/typeorm/tiers/love.svg?avatarHeight=36"></a>
|
||||
|
||||
### Community
|
||||
|
||||
Join our community of supporters and help sustain TypeORM. [Become a community supporter](https://opencollective.com/typeorm)
|
||||
|
||||
<a href="https://opencollective.com/typeorm" target="_blank"><img src="https://opencollective.com/typeorm/tiers/like.svg?avatarHeight=36"></a>
|
||||
|
||||
### Sponsor
|
||||
|
||||
Make a one-time or recurring contribution of your choice. [Become a sponsor](https://opencollective.com/typeorm)
|
||||
|
||||
<a href="https://opencollective.com/typeorm" target="_blank"><img src="https://opencollective.com/typeorm/tiers/sponsor.svg?avatarHeight=36"></a>
|
||||
|
||||
1205
README_ko.md
1205
README_ko.md
File diff suppressed because it is too large
Load Diff
@ -73,13 +73,14 @@ services:
|
||||
|
||||
# mssql
|
||||
mssql:
|
||||
image: "mcr.microsoft.com/mssql/server:2022-latest"
|
||||
image: "mcr.microsoft.com/mssql/server:2025-latest"
|
||||
container_name: "typeorm-mssql"
|
||||
ports:
|
||||
- "1433:1433"
|
||||
environment:
|
||||
SA_PASSWORD: "Admin12345"
|
||||
ACCEPT_EULA: "Y"
|
||||
MSSQL_PID: "Express"
|
||||
|
||||
# cockroachdb
|
||||
cockroachdb:
|
||||
@ -142,7 +143,7 @@ services:
|
||||
|
||||
# mongodb
|
||||
mongodb:
|
||||
image: "mongo:5.0.31"
|
||||
image: "mongo:8"
|
||||
container_name: "typeorm-mongodb"
|
||||
ports:
|
||||
- "27017:27017"
|
||||
|
||||
1
docs/.gitignore
vendored
1
docs/.gitignore
vendored
@ -8,6 +8,7 @@
|
||||
.docusaurus
|
||||
.cache-loader
|
||||
llms.txt
|
||||
llms-full.txt
|
||||
|
||||
# Misc
|
||||
.DS_Store
|
||||
|
||||
@ -46,7 +46,7 @@ export class User {
|
||||
|
||||
## Unique indices
|
||||
|
||||
To create an unique index you need to specify `{ unique: true }` in the index options:
|
||||
To create a unique index you need to specify `{ unique: true }` in the index options:
|
||||
|
||||
> Note: CockroachDB stores unique indices as `UNIQUE` constraints
|
||||
|
||||
@ -133,10 +133,10 @@ export class Thing {
|
||||
|
||||
## Concurrent creation
|
||||
|
||||
In order to avoid having to obtain an access exclusive lock when creating and dropping indexes in postgres, you may create them using the CONCURRENTLY modifier.
|
||||
If you want use the concurrent option, you need set `migrationsTransactionMode: none` between data source options.
|
||||
In order to avoid having to obtain an ACCESS EXCLUSIVE lock when creating and dropping indexes in Postgres, you may create them using the CONCURRENTLY modifier.
|
||||
If you want to use the concurrent option, you need to set `migrationsTransactionMode: none` in your data source options.
|
||||
|
||||
Typeorm supports generating SQL with this option if when the concurrent option is specified on the index.
|
||||
TypeORM supports generating SQL with this option when the concurrent option is specified on the index.
|
||||
|
||||
```typescript
|
||||
@Index(["firstName", "middleName", "lastName"], { concurrent: true })
|
||||
@ -146,10 +146,10 @@ For more information see the [Postgres documentation](https://www.postgresql.org
|
||||
|
||||
## Disabling synchronization
|
||||
|
||||
TypeORM does not support some index options and definitions (e.g. `lower`, `pg_trgm`) because of lot of different database specifics and multiple
|
||||
issues with getting information about exist database indices and synchronizing them automatically. In such cases you should create index manually
|
||||
(for example in the migrations) with any index signature you want. To make TypeORM ignore these indices during synchronization use `synchronize: false`
|
||||
option on `@Index` decorator.
|
||||
TypeORM does not support some index options and definitions (e.g. `lower`, `pg_trgm`) due to many database-specific differences and multiple
|
||||
issues with getting information about existing database indices and synchronizing them automatically. In such cases you should create the index manually
|
||||
(for example, in [the migrations](../migrations/01-why.md)) with any index signature you want. To make TypeORM ignore these indices during synchronization, use `synchronize: false`
|
||||
option on the `@Index` decorator.
|
||||
|
||||
For example, you create an index with case-insensitive comparison:
|
||||
|
||||
|
||||
@ -91,7 +91,7 @@ export class Post {
|
||||
### `@BeforeRemove`
|
||||
|
||||
You can define a method with any name in the entity and mark it with `@BeforeRemove`
|
||||
and TypeORM will call it before a entity is removed using repository/manager `remove`.
|
||||
and TypeORM will call it before an entity is removed using repository/manager `remove`.
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
@ -123,7 +123,7 @@ export class Post {
|
||||
### `@BeforeSoftRemove`
|
||||
|
||||
You can define a method with any name in the entity and mark it with `@BeforeSoftRemove`
|
||||
and TypeORM will call it before a entity is soft removed using repository/manager `softRemove`.
|
||||
and TypeORM will call it before an entity is soft removed using repository/manager `softRemove`.
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
@ -155,7 +155,7 @@ export class Post {
|
||||
### `@BeforeRecover`
|
||||
|
||||
You can define a method with any name in the entity and mark it with `@BeforeRecover`
|
||||
and TypeORM will call it before a entity is recovered using repository/manager `recover`.
|
||||
and TypeORM will call it before an entity is recovered using repository/manager `recover`.
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
|
||||
@ -124,66 +124,15 @@ typeorm subscriber:create path-to-subscriber-dir/subscriber
|
||||
|
||||
Learn more about [Subscribers](./4-listeners-and-subscribers.md).
|
||||
|
||||
## Create a new migration
|
||||
## Manage migrations
|
||||
|
||||
You can create a new migration using CLI:
|
||||
* `typeorm migration:create` - [create](../migrations/03-creating.md) empty migration
|
||||
* `typeorm migration:generate` - [generate](../migrations/04-generating.md) migration comparing entities with actual database schema
|
||||
* `typeorm migration:run` - [execute](../migrations/05-executing.md) all migrations
|
||||
* `typeorm migration:revert` - [revert](../migrations/06-reverting.md) last migration
|
||||
* `typeorm migration:show` - [list](../migrations/07-status.md) all migrations with their execution status
|
||||
|
||||
```shell
|
||||
typeorm migration:create path-to-migrations-dir/migrationName
|
||||
```
|
||||
|
||||
Learn more about [Migrations](./1-migrations.md).
|
||||
|
||||
## Generate a migration from existing table schema
|
||||
|
||||
Automatic migration generation creates a new migration file
|
||||
and writes all sql queries that must be executed to update the database.
|
||||
|
||||
If no there were no changes generated, the command will exit with code 1.
|
||||
|
||||
```shell
|
||||
typeorm migration:generate path/to/Migration -d path/to/datasource
|
||||
```
|
||||
|
||||
The rule of thumb is to generate a migration after each entity change.
|
||||
the -d argument value should specify the path where your DataSource instance is defined.
|
||||
You can specify the path and name of the migration with the first argument.
|
||||
|
||||
Learn more about [Migrations](./1-migrations.md).
|
||||
|
||||
## Run migrations
|
||||
|
||||
To execute all pending migrations use following command:
|
||||
|
||||
```shell
|
||||
typeorm migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
Learn more about [Migrations](./1-migrations.md).
|
||||
|
||||
## Revert migrations
|
||||
|
||||
To revert the most recently executed migration use the following command:
|
||||
|
||||
```shell
|
||||
typeorm migration:revert -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
This command will undo only the last executed migration.
|
||||
You can execute this command multiple times to revert multiple migrations.
|
||||
Learn more about [Migrations](./1-migrations.md).
|
||||
|
||||
## Show migrations
|
||||
|
||||
To show all migrations and whether they've been run or not use following command:
|
||||
|
||||
```shell
|
||||
typeorm migration:show -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
[X] = Migration has been ran
|
||||
|
||||
[ ] = Migration is pending/unapplied
|
||||
Learn more about [Migrations](../migrations/01-why.md).
|
||||
|
||||
## Sync database schema
|
||||
|
||||
|
||||
@ -28,12 +28,6 @@ Different RDBMS-es have their own specific options.
|
||||
Example: `subscribers: [PostSubscriber, AppSubscriber, "subscriber/*.js", "modules/**/subscriber/*.js"]`.
|
||||
Learn more about [Subscribers](../advanced-topics/4-listeners-and-subscribers.md).
|
||||
|
||||
- `migrations` - Migrations to be loaded and used for this data source.
|
||||
It accepts both migration classes and directories from which to load.
|
||||
Directories support glob patterns.
|
||||
Example: `migrations: [FirstMigration, SecondMigration, "migration/*.js", "modules/**/migration/*.js"]`.
|
||||
Learn more about [Migrations](../advanced-topics/1-migrations.md).
|
||||
|
||||
- `logging` - Indicates if logging is enabled or not.
|
||||
If set to `true` then query and error logging will be enabled.
|
||||
You can also specify different types of logging to be enabled, for example `["query", "error", "schema"]`.
|
||||
@ -67,13 +61,13 @@ Different RDBMS-es have their own specific options.
|
||||
Note that for MongoDB database it does not create schema, because MongoDB is schemaless.
|
||||
Instead, it syncs just by creating indices.
|
||||
|
||||
- `migrationsRun` - Indicates if migrations should be auto run on every application launch.
|
||||
As an alternative, you can use CLI and run migration:run command.
|
||||
- `migrations` - [Migrations](../migrations/01-why.md) to be loaded and used for this data source
|
||||
|
||||
- `migrationsTableName` - Name of the table in the database which is going to contain information about executed migrations.
|
||||
By default, this table is called "migrations".
|
||||
- `migrationsRun` - Indicates if [migrations](../migrations/01-why.md) should be auto-run on every application launch.
|
||||
|
||||
- `migrationsTransactionMode` - Control transactions for migrations (default: `all`), can be one of `all` | `none` | `each`
|
||||
- `migrationsTableName` - Name of the table in the database which is going to contain information about executed [migrations](../migrations/01-why.md).
|
||||
|
||||
- `migrationsTransactionMode` - Controls transaction mode when running [migrations](../migrations/01-why.md).
|
||||
|
||||
- `metadataTableName` - Name of the table in the database which is going to contain information about table metadata.
|
||||
By default, this table is called "typeorm_metadata".
|
||||
@ -85,7 +79,7 @@ Different RDBMS-es have their own specific options.
|
||||
eg. `.where("user.firstName = :search OR user.lastName = :search")` becomes `WHERE (user.firstName = ? OR user.lastName = ?)` instead of `WHERE user.firstName = ? OR user.lastName = ?`
|
||||
|
||||
- `invalidWhereValuesBehavior` - Controls how null and undefined values are handled in where conditions across all TypeORM operations (find operations, query builders, repository methods).
|
||||
|
||||
|
||||
- `null` behavior options:
|
||||
- `'ignore'` (default) - skips null properties
|
||||
- `'sql-null'` - transforms null to SQL NULL
|
||||
|
||||
@ -165,4 +165,64 @@ Based on [tedious](https://tediousjs.github.io/node-mssql/) MSSQL implementation
|
||||
|
||||
## Column Types
|
||||
|
||||
`int`, `bigint`, `bit`, `decimal`, `money`, `numeric`, `smallint`, `smallmoney`, `tinyint`, `float`, `real`, `date`, `datetime2`, `datetime`, `datetimeoffset`, `smalldatetime`, `time`, `char`, `varchar`, `text`, `nchar`, `nvarchar`, `ntext`, `binary`, `image`, `varbinary`, `hierarchyid`, `sql_variant`, `timestamp`, `uniqueidentifier`, `xml`, `geometry`, `geography`, `rowversion`
|
||||
`int`, `bigint`, `bit`, `decimal`, `money`, `numeric`, `smallint`, `smallmoney`, `tinyint`, `float`, `real`, `date`, `datetime2`, `datetime`, `datetimeoffset`, `smalldatetime`, `time`, `char`, `varchar`, `text`, `nchar`, `nvarchar`, `ntext`, `binary`, `image`, `varbinary`, `hierarchyid`, `sql_variant`, `timestamp`, `uniqueidentifier`, `xml`, `geometry`, `geography`, `rowversion`, `vector`
|
||||
|
||||
### Vector Type (vector)
|
||||
|
||||
The `vector` data type is available in SQL Server for storing high-dimensional vectors, commonly used for:
|
||||
|
||||
- Semantic search with embeddings
|
||||
- Recommendation systems
|
||||
- Similarity matching
|
||||
- Machine learning applications
|
||||
|
||||
NOTE: general `halfvec` type support is unavailable because this feature is still in preview. See the Microsoft docs: [Vector data type](https://learn.microsoft.com/en-us/sql/t-sql/data-types/vector-data-type).
|
||||
|
||||
#### Usage
|
||||
|
||||
```typescript
|
||||
@Entity()
|
||||
export class DocumentChunk {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
@Column("varchar")
|
||||
content: string
|
||||
|
||||
// Vector column with 1998 dimensions
|
||||
@Column("vector", { length: 1998 })
|
||||
embedding: number[]
|
||||
}
|
||||
```
|
||||
|
||||
#### Vector Similarity Search
|
||||
|
||||
SQL Server provides the `VECTOR_DISTANCE` function for calculating distances between vectors:
|
||||
|
||||
```typescript
|
||||
const queryEmbedding = [
|
||||
/* your query vector */
|
||||
]
|
||||
|
||||
const results = await dataSource.query(
|
||||
`
|
||||
DECLARE @question AS VECTOR (1998) = @0;
|
||||
SELECT TOP (10) dc.*,
|
||||
VECTOR_DISTANCE('cosine', @question, embedding) AS distance
|
||||
FROM document_chunk dc
|
||||
ORDER BY VECTOR_DISTANCE('cosine', @question, embedding)
|
||||
`,
|
||||
[JSON.stringify(queryEmbedding)],
|
||||
)
|
||||
```
|
||||
|
||||
**Distance Metrics:**
|
||||
|
||||
- `'cosine'` - Cosine distance (most common for semantic search)
|
||||
- `'euclidean'` - Euclidean (L2) distance
|
||||
- `'dot'` - Negative dot product
|
||||
|
||||
**Requirements:**
|
||||
|
||||
- SQL Server version with vector support enabled
|
||||
- Vector dimensions must be specified using the `length` option
|
||||
|
||||
@ -182,16 +182,17 @@ There are several special column types with additional functionality available:
|
||||
|
||||
### Vector columns
|
||||
|
||||
Vector columns are supported on both PostgreSQL (via [`pgvector`](https://github.com/pgvector/pgvector) extension) and SAP HANA Cloud, enabling storing and querying vector embeddings for similarity search and machine learning applications.
|
||||
Vector columns are supported on PostgreSQL (via [`pgvector`](https://github.com/pgvector/pgvector) extension), Microsoft SQL Server, and SAP HANA Cloud, enabling storing and querying vector embeddings for similarity search and machine learning applications.
|
||||
|
||||
TypeORM supports both `vector` and `halfvec` column types across databases:
|
||||
|
||||
- `vector` - stores vectors as 4-byte floats (single precision)
|
||||
- PostgreSQL: native `vector` type via pgvector extension
|
||||
- SAP HANA: alias for `real_vector` type
|
||||
- `halfvec` - stores vectors as 2-byte floats (half precision) for memory efficiency
|
||||
- PostgreSQL: native `halfvec` type via pgvector extension
|
||||
- SAP HANA: alias for `half_vector` type
|
||||
- `vector` - stores vectors as 4-byte floats (single precision)
|
||||
- PostgreSQL: native `vector` type via pgvector extension
|
||||
- SQL Server: native `vector` type
|
||||
- SAP HANA: alias for `real_vector` type
|
||||
- `halfvec` - stores vectors as 2-byte floats (half precision) for memory efficiency
|
||||
- PostgreSQL: native `halfvec` type via pgvector extension
|
||||
- SAP HANA: alias for `half_vector` type
|
||||
|
||||
You can specify the vector dimensions using the `length` option:
|
||||
|
||||
@ -201,45 +202,65 @@ export class Post {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
// Vector without specified dimensions (works on PostgreSQL and SAP HANA)
|
||||
// Vector without specified dimensions (works on PostgreSQL and SAP HANA; SQL Server requires explicit dimensions)
|
||||
@Column("vector")
|
||||
embedding: number[] | Buffer
|
||||
|
||||
// Vector with 3 dimensions: vector(3) (works on PostgreSQL and SAP HANA)
|
||||
// Vector with 3 dimensions: vector(3)
|
||||
@Column("vector", { length: 3 })
|
||||
embedding_3d: number[] | Buffer
|
||||
|
||||
// Half-precision vector with 4 dimensions: halfvec(4) (works on PostgreSQL and SAP HANA)
|
||||
// Half-precision vector with 4 dimensions: halfvec(4) (PostgreSQL and SAP HANA only)
|
||||
@Column("halfvec", { length: 4 })
|
||||
halfvec_embedding: number[] | Buffer
|
||||
}
|
||||
```
|
||||
|
||||
Vector columns can be used for similarity searches using PostgreSQL's vector operators:
|
||||
**PostgreSQL** - Vector columns can be used for similarity searches using vector operators:
|
||||
|
||||
```typescript
|
||||
// L2 distance (Euclidean) - <->
|
||||
const results = await dataSource.query(
|
||||
`SELECT id, embedding FROM post ORDER BY embedding <-> $1 LIMIT 5`,
|
||||
["[1,2,3]"]
|
||||
["[1,2,3]"],
|
||||
)
|
||||
|
||||
// Cosine distance - <=>
|
||||
const results = await dataSource.query(
|
||||
`SELECT id, embedding FROM post ORDER BY embedding <=> $1 LIMIT 5`,
|
||||
["[1,2,3]"]
|
||||
["[1,2,3]"],
|
||||
)
|
||||
|
||||
// Inner product - <#>
|
||||
const results = await dataSource.query(
|
||||
`SELECT id, embedding FROM post ORDER BY embedding <#> $1 LIMIT 5`,
|
||||
["[1,2,3]"]
|
||||
["[1,2,3]"],
|
||||
)
|
||||
```
|
||||
|
||||
> **Note**:
|
||||
> - **PostgreSQL**: Vector columns require the `pgvector` extension to be installed. The extension provides the vector data types and similarity operators.
|
||||
> - **SAP HANA**: Vector columns require SAP HANA Cloud (2024Q1+) and a supported version of `@sap/hana-client`. Use the appropriate [vector similarity functions](https://help.sap.com/docs/hana-cloud-database/sap-hana-cloud-sap-hana-database-sql-reference-guide/vector-functions) for similarity searches.
|
||||
**SQL Server** - Use the `VECTOR_DISTANCE` function for similarity searches:
|
||||
|
||||
```typescript
|
||||
const queryEmbedding = [1, 2, 3]
|
||||
|
||||
// Cosine distance
|
||||
const results = await dataSource.query(
|
||||
`
|
||||
DECLARE @question AS VECTOR(3) = @0;
|
||||
SELECT TOP (5) id, embedding,
|
||||
VECTOR_DISTANCE('cosine', @question, embedding) AS distance
|
||||
FROM post
|
||||
ORDER BY VECTOR_DISTANCE('cosine', @question, embedding)
|
||||
`,
|
||||
[JSON.stringify(queryEmbedding)],
|
||||
)
|
||||
```
|
||||
|
||||
> **Note**:
|
||||
>
|
||||
> - **PostgreSQL**: Vector columns require the `pgvector` extension to be installed. The extension provides the vector data types and similarity operators.
|
||||
> - **SQL Server**: Vector type support requires a compatible SQL Server version with vector functionality enabled.
|
||||
> - **SAP HANA**: Vector columns require SAP HANA Cloud (2024Q1+) and a supported version of `@sap/hana-client`. Use the appropriate [vector similarity functions](https://help.sap.com/docs/hana-cloud-database/sap-hana-cloud-sap-hana-database-sql-reference-guide/vector-functions) for similarity searches.
|
||||
|
||||
## Column types
|
||||
|
||||
|
||||
@ -11,7 +11,7 @@ You can create a view entity by defining a new class and mark it with `@ViewEnti
|
||||
- `database` - database name in selected DB server.
|
||||
- `schema` - schema name.
|
||||
- `expression` - view definition. **Required parameter**.
|
||||
- `dependsOn` - List of other views on which the current views depends. If your view uses another view in its definition, you can add it here so that migrations are generated in the correct order.
|
||||
- `dependsOn` - List of other views on which the current views depends. If your view uses another view in its definition, you can add it here so that [migrations](../migrations/01-why.md) are generated in the correct order.
|
||||
|
||||
`expression` can be string with properly escaped columns and tables, depend on database used (postgres in example):
|
||||
|
||||
|
||||
@ -34,7 +34,7 @@ TypeORM is highly influenced by other ORMs, such as [Hibernate](http://hibernate
|
||||
- Cascades.
|
||||
- Indices.
|
||||
- Transactions.
|
||||
- Migrations and automatic migrations generation.
|
||||
- [Migrations](/docs/migrations/why) with automatic generation.
|
||||
- Connection pooling.
|
||||
- Replication.
|
||||
- Using multiple database instances.
|
||||
@ -1195,7 +1195,7 @@ There are several extensions that simplify working with TypeORM and integrating
|
||||
- ER Diagram generator - [typeorm-uml](https://github.com/eugene-manuilov/typeorm-uml/)
|
||||
- another ER Diagram generator - [erdia](https://www.npmjs.com/package/erdia/)
|
||||
- Create, drop and seed database - [typeorm-extension](https://github.com/tada5hi/typeorm-extension)
|
||||
- Automatically update `data-source.ts` after generating migrations/entities - [typeorm-codebase-sync](https://www.npmjs.com/package/typeorm-codebase-sync)
|
||||
- Automatically update `data-source.ts` after generating [migrations](/docs/migrations/why)/entities - [typeorm-codebase-sync](https://www.npmjs.com/package/typeorm-codebase-sync)
|
||||
- Easy manipulation of `relations` objects - [typeorm-relations](https://npmjs.com/package/typeorm-relations)
|
||||
- Automatically generate `relations` based on a GraphQL query - [typeorm-relations-graphql](https://npmjs.com/package/typeorm-relations-graphql)
|
||||
|
||||
|
||||
@ -154,4 +154,3 @@ Both strategies have their own cons and pros.
|
||||
One thing we should always keep in mind with software development is how we are going to maintain our applications.
|
||||
The `Data Mapper` approach helps with maintainability, which is more effective in larger apps.
|
||||
The `Active Record` approach helps keep things simple which works well in smaller apps.
|
||||
And simplicity is always a key to better maintainability.
|
||||
|
||||
@ -178,7 +178,7 @@ module.exports = {
|
||||
|
||||
### Bundling Migration Files
|
||||
|
||||
By default Webpack tries to bundle everything into one file. This can be problematic when your project has migration files which are meant to be executed after bundled code is deployed to production. To make sure all your migrations can be recognized and executed by TypeORM, you may need to use "Object Syntax" for the `entry` configuration for the migration files only.
|
||||
By default Webpack tries to bundle everything into one file. This can be problematic when your project has migration files which are meant to be executed after bundled code is deployed to production. To make sure all your [migrations](../migrations/01-why.md) can be recognized and executed by TypeORM, you may need to use "Object Syntax" for the `entry` configuration for the migration files only.
|
||||
|
||||
```javascript
|
||||
const { globSync } = require("node:fs")
|
||||
@ -211,7 +211,7 @@ module.exports = {
|
||||
}
|
||||
```
|
||||
|
||||
Also, since Webpack 4, when using `mode: 'production'`, files are optimized by default which includes mangling your code in order to minimize file sizes. This breaks the migrations because TypeORM relies on their names to determine which has already been executed. You may disable minimization completely by adding:
|
||||
Also, since Webpack 4, when using `mode: 'production'`, files are optimized by default which includes mangling your code in order to minimize file sizes. This breaks the [migrations](../migrations/01-why.md) because TypeORM relies on their names to determine which has already been executed. You may disable minimization completely by adding:
|
||||
|
||||
```javascript
|
||||
module.exports = {
|
||||
@ -257,93 +257,6 @@ module.exports = {
|
||||
}
|
||||
```
|
||||
|
||||
## How to use Vite for the backend?
|
||||
|
||||
Using TypeORM in a Vite project is pretty straight forward. However, when you use migrations, you will run into "...migration name is wrong. Migration class name should have a
|
||||
JavaScript timestamp appended." errors when running the production build.
|
||||
On production builds, files are [optimized by default](https://vite.dev/config/build-options#build-minify) which includes mangling your code in order to minimize file sizes.
|
||||
|
||||
You have 3 options to mitigate this. The 3 options are shown belown as diff to this basic "vite.config.ts"
|
||||
|
||||
```typescript
|
||||
import legacy from "@vitejs/plugin-legacy"
|
||||
import vue from "@vitejs/plugin-vue"
|
||||
import path from "path"
|
||||
import { defineConfig } from "vite"
|
||||
|
||||
// https://vitejs.dev/config/
|
||||
export default defineConfig({
|
||||
build: {
|
||||
sourcemap: true,
|
||||
},
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
alias: {
|
||||
"@": path.resolve(__dirname, "./src"),
|
||||
},
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
### Option 1: Disable minify
|
||||
|
||||
This is the most crude option and will result in significantly larger files. Add `build.minify = false` to your config.
|
||||
|
||||
```diff
|
||||
--- basic vite.config.ts
|
||||
+++ disable minify vite.config.ts
|
||||
@@ -7,6 +7,7 @@
|
||||
export default defineConfig({
|
||||
build: {
|
||||
sourcemap: true,
|
||||
+ minify: false,
|
||||
},
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
```
|
||||
|
||||
### Option 2: Disable esbuild minify identifiers
|
||||
|
||||
Vite uses esbuild as the default minifier. You can disable mangling of identifiers by adding `esbuild.minifyIdentifiers = false` to your config.
|
||||
This will result in smaller file sizes, but depending on your code base you will get diminishing returns as all identifiers will be kept at full length.
|
||||
|
||||
```diff
|
||||
--- basic vite.config.ts
|
||||
+++ disable esbuild minify identifiers vite.config.ts
|
||||
@@ -8,6 +8,7 @@
|
||||
build: {
|
||||
sourcemap: true,
|
||||
},
|
||||
+ esbuild: { minifyIdentifiers: false },
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
```
|
||||
|
||||
### Option 3: Use terser as minifier while keeping only the migration class names
|
||||
|
||||
Vite supports using terser as minifier. Terser is slower then esbuild, but offers more fine grained control over what to minify.
|
||||
Add `minify: 'terser'` with `terserOptions.mangle.keep_classnames: /^Migrations\d+$/` and `terserOptions.compress.keep_classnames: /^Migrations\d+$/` to your config.
|
||||
These options will make sure classnames that start with "Migrations" and end with numbers are not renamed during minification.
|
||||
|
||||
Make sure terser is available as dev dependency in your project: `npm add -D terser`.
|
||||
|
||||
```diff
|
||||
--- basic vite.config.ts
|
||||
+++ terser keep migration class names vite.config.ts
|
||||
@@ -7,6 +7,11 @@
|
||||
export default defineConfig({
|
||||
build: {
|
||||
sourcemap: true,
|
||||
+ minify: 'terser',
|
||||
+ terserOptions: {
|
||||
+ mangle: { keep_classnames: /^Migrations\d+$/ },
|
||||
+ compress: { keep_classnames: /^Migrations\d+$/ },
|
||||
+ },
|
||||
},
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
```
|
||||
|
||||
## How to use TypeORM in ESM projects?
|
||||
|
||||
Make sure to add `"type": "module"` in the `package.json` of your project so TypeORM will know to use `import( ... )` on files.
|
||||
|
||||
@ -662,7 +662,7 @@ Learn more about [listeners](../advanced-topics/4-listeners-and-subscribers.md).
|
||||
#### `@BeforeRemove`
|
||||
|
||||
You can define a method with any name in the entity and mark it with `@BeforeRemove`
|
||||
and TypeORM will call it before a entity is removed using repository/manager `remove`.
|
||||
and TypeORM will call it before an entity is removed using repository/manager `remove`.
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
@ -698,7 +698,7 @@ Learn more about [listeners](../advanced-topics/4-listeners-and-subscribers.md).
|
||||
#### `@BeforeSoftRemove`
|
||||
|
||||
You can define a method with any name in the entity and mark it with `@BeforeSoftRemove`
|
||||
and TypeORM will call it before a entity is soft removed using repository/manager `softRemove`.
|
||||
and TypeORM will call it before an entity is soft removed using repository/manager `softRemove`.
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
@ -734,7 +734,7 @@ Learn more about [listeners](../advanced-topics/4-listeners-and-subscribers.md).
|
||||
#### `@BeforeRecover`
|
||||
|
||||
You can define a method with any name in the entity and mark it with `@BeforeRecover`
|
||||
and TypeORM will call it before a entity is recovered using repository/manager `recover`.
|
||||
and TypeORM will call it before an entity is recovered using repository/manager `recover`.
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
|
||||
42
docs/docs/migrations/01-why.md
Normal file
42
docs/docs/migrations/01-why.md
Normal file
@ -0,0 +1,42 @@
|
||||
# How migrations work?
|
||||
|
||||
Once you get into production you'll need to synchronize model changes into the database.
|
||||
Typically, it is unsafe to use `synchronize: true` for schema synchronization on production once
|
||||
you get data in your database. Here is where migrations come to help.
|
||||
|
||||
A migration is just a single file with SQL queries to update a database schema
|
||||
and apply new changes to an existing database.
|
||||
|
||||
Let's say you already have a database and a `Post` entity:
|
||||
|
||||
```typescript
|
||||
import { Entity, Column, PrimaryGeneratedColumn } from "typeorm"
|
||||
|
||||
@Entity()
|
||||
export class Post {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
@Column()
|
||||
title: string
|
||||
|
||||
@Column()
|
||||
text: string
|
||||
}
|
||||
```
|
||||
|
||||
And your entity worked in production for months without any changes.
|
||||
You have thousands of posts in your database.
|
||||
|
||||
Now you need to make a new release and rename `title` to `name`.
|
||||
What would you do?
|
||||
|
||||
You need to create a new migration with the following SQL query (PostgreSQL dialect):
|
||||
|
||||
```sql
|
||||
ALTER TABLE "post" RENAME COLUMN "title" TO "name";
|
||||
```
|
||||
|
||||
Once you run this SQL query your database schema is ready to work with your new codebase.
|
||||
TypeORM provides a place where you can write such sql queries and run them when needed.
|
||||
This place is called "migrations".
|
||||
71
docs/docs/migrations/02-setup.md
Normal file
71
docs/docs/migrations/02-setup.md
Normal file
@ -0,0 +1,71 @@
|
||||
# Setup
|
||||
|
||||
Before working with migrations you need to setup your [DataSource](../data-source/1-data-source.md) options properly:
|
||||
|
||||
```ts
|
||||
export default new DataSource({
|
||||
// basic setup
|
||||
synchronize: false,
|
||||
migrations: [ /*...*/ ],
|
||||
|
||||
// optional
|
||||
migrationsRun: false,
|
||||
migrationsTableName: 'migrations',
|
||||
migrationsTransactionMode: 'all'
|
||||
|
||||
// other options...
|
||||
})
|
||||
```
|
||||
|
||||
## `synchronise`
|
||||
|
||||
Turning off automatic schema synchronisation is essential for working with migrations. Otherwise they would make no sense.
|
||||
|
||||
## `migrations`
|
||||
|
||||
Defines list of migrations that need to be loaded by TypeORM. It accepts both migration classes and directories from which to load.
|
||||
|
||||
The easiest is to specify the directory where your migration files are located (glob patterns are supported):
|
||||
|
||||
```ts
|
||||
migrations: [__dirname + '/migration/**/*{.js,.ts}']
|
||||
```
|
||||
|
||||
Defining both `.js` and `.ts` extensions would allow you to run migrations in development and from compiled to JavaScript for production (eg. from Docker image).
|
||||
|
||||
Alternatively you could also specify exact classes to get more fine grained control:
|
||||
|
||||
```ts
|
||||
import FirstMigration from 'migrations/TIMESTAMP-first-migration'
|
||||
import SecondMigration from 'migrations/TIMESTAMP-second-migration'
|
||||
|
||||
export default new DataSource({
|
||||
migrations: [FirstMigration, SecondMigration]
|
||||
})
|
||||
```
|
||||
|
||||
but it also requires more manual work and can be error prone.
|
||||
|
||||
- `migrationsRun` - Indicates if [migrations](../migrations/01-why.md) should be auto-run on every application launch.
|
||||
|
||||
## Optional settings
|
||||
|
||||
### `migrationsRun`
|
||||
|
||||
Indicates if migrations should be auto-run on every application launch. Default: `false`
|
||||
|
||||
### `migrationsTableName`
|
||||
|
||||
You might want to specify the name of the table that will store information about executed migrations. By default it is called `'migrations'`.
|
||||
|
||||
```ts
|
||||
migrationsTableName: 'some_custom_migrations_table'
|
||||
```
|
||||
|
||||
### `migrationsTransactionMode`
|
||||
|
||||
Controls transaction mode when running migrations. Possible options are:
|
||||
|
||||
- `all` (_default_) - wraps migrations run into a single transaction
|
||||
- `none`
|
||||
- `each`
|
||||
56
docs/docs/migrations/03-creating.md
Normal file
56
docs/docs/migrations/03-creating.md
Normal file
@ -0,0 +1,56 @@
|
||||
# Creating manually
|
||||
|
||||
You can create a new migration using CLI by specifying the name and location of the migration:
|
||||
|
||||
```shell
|
||||
npx typeorm migration:create <path/to/migrations>/<migration-name>
|
||||
```
|
||||
|
||||
For example:
|
||||
|
||||
```shell
|
||||
npx typeorm migration:create src/db/migrations/post-refactoring
|
||||
```
|
||||
|
||||
After you run the command you can see a new file generated in the `src/db/migrations` directory named `{TIMESTAMP}-post-refactoring.ts` where `{TIMESTAMP}` is the current timestamp when the migration was generated.
|
||||
|
||||
Now you can open the file and add your migration sql queries there. You should see the following content inside your migration:
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class PostRefactoringTIMESTAMP implements MigrationInterface {
|
||||
async up(queryRunner: QueryRunner): Promise<void> {}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {}
|
||||
}
|
||||
```
|
||||
|
||||
There are two methods you must fill with your migration code: `up` and `down`.
|
||||
`up` has to contain the code you need to perform the migration.
|
||||
`down` has to revert whatever `up` changed.
|
||||
`down` method is used to revert the last migration.
|
||||
|
||||
Inside both `up` and `down` you have a `QueryRunner` object.
|
||||
All database operations are executed using this object.
|
||||
Learn more about [query runner](../query-runner.md).
|
||||
|
||||
Let's see what the migration looks like with our `Post` changes:
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class PostRefactoringTIMESTAMP implements MigrationInterface {
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" RENAME COLUMN "title" TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" RENAME COLUMN "name" TO "title"`,
|
||||
) // reverts things made in "up" method
|
||||
}
|
||||
}
|
||||
```
|
||||
117
docs/docs/migrations/04-generating.md
Normal file
117
docs/docs/migrations/04-generating.md
Normal file
@ -0,0 +1,117 @@
|
||||
# Generating
|
||||
|
||||
TypeORM is able to automatically generate migration files based on the changes you made to the entities, comparing them with existing database schema on the server.
|
||||
|
||||
Automatic migration generation creates a new migration file and writes all sql queries that must be executed to update the database. If no changes are detected, the command will exit with code `1`.
|
||||
|
||||
Let's say you have a `Post` entity with a `title` column, and you have changed the name `title` to `name`.
|
||||
|
||||
You can generate migration with of the following command:
|
||||
|
||||
```shell
|
||||
typeorm migration:generate -d <path/to/datasource> <migration-name>
|
||||
```
|
||||
|
||||
The `-d` argument value should specify the path where your [DataSource](../data-source/1-data-source.md) instance is defined.
|
||||
|
||||
Alternatively you can also specify name with `--name` param
|
||||
|
||||
```shell
|
||||
typeorm migration:generate -- -d <path/to/datasource> --name=<migration-name>
|
||||
```
|
||||
|
||||
or use a full path:
|
||||
|
||||
```shell
|
||||
typeorm migration:generate -d <path/to/datasource> <path/to/migrations>/<migration-name>
|
||||
```
|
||||
|
||||
Assuming you used `post-refactoring` as a name, it will generate a new file called `{TIMESTAMP}-post-refactoring.ts` with the following content:
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class PostRefactoringTIMESTAMP implements MigrationInterface {
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "name" RENAME TO "title"`,
|
||||
)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Alternatively, you can also output your migrations as Javascript files using the `o` (alias for `--outputJs`) flag. This is useful for Javascript only projects in which TypeScript additional packages are not installed. This command, will generate a new migration file `{TIMESTAMP}-PostRefactoring.js` with the following content:
|
||||
|
||||
```javascript
|
||||
/**
|
||||
* @typedef {import('typeorm').MigrationInterface} MigrationInterface
|
||||
* @typedef {import('typeorm').QueryRunner} QueryRunner
|
||||
*/
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @implements {MigrationInterface}
|
||||
*/
|
||||
module.exports = class PostRefactoringTIMESTAMP {
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async up(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async down(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "name" RENAME TO "title"`,
|
||||
)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
By default, it generates CommonJS JavaScript code with the `o` (alias for `--outputJs`) flag, but you can also generate ESM code with the `esm` flag. This is useful for Javascript projects that use ESM:
|
||||
|
||||
```javascript
|
||||
/**
|
||||
* @typedef {import('typeorm').MigrationInterface} MigrationInterface
|
||||
* @typedef {import('typeorm').QueryRunner} QueryRunner
|
||||
*/
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @implements {MigrationInterface}
|
||||
*/
|
||||
export class PostRefactoringTIMESTAMP {
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async up(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async down(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "name" RENAME TO "title"`,
|
||||
)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
See, you don't need to write the queries on your own.
|
||||
|
||||
The rule of thumb for generating migrations is that you generate them after **each** change you made to your models. To apply multi-line formatting to your generated migration queries, use the `p` (alias for `--pretty`) flag.
|
||||
29
docs/docs/migrations/05-executing.md
Normal file
29
docs/docs/migrations/05-executing.md
Normal file
@ -0,0 +1,29 @@
|
||||
# Executing and reverting
|
||||
|
||||
Once you have a migration to run on production, you can run them using a CLI command:
|
||||
|
||||
```shell
|
||||
typeorm migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
**`typeorm migration:create` and `typeorm migration:generate` will create `.ts` files, unless you use the `o` flag (see more in [Generating migrations](04-generating.md)). The `migration:run` and `migration:revert` commands only work on `.js` files. Thus the typescript files need to be compiled before running the commands.** Alternatively, you can use `ts-node` with `typeorm` to run `.ts` migration files.
|
||||
|
||||
Example with `ts-node`:
|
||||
|
||||
```shell
|
||||
npx typeorm-ts-node-commonjs migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
Example with `ts-node` in ESM projects:
|
||||
|
||||
```shell
|
||||
npx typeorm-ts-node-esm migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
```shell
|
||||
npx typeorm-ts-node-esm migration:generate ./src/migrations/update-post-table -d ./src/data-source.ts
|
||||
```
|
||||
|
||||
This command will execute all pending migrations and run them in a sequence ordered by their timestamps.
|
||||
This means all sql queries written in the `up` methods of your created migrations will be executed.
|
||||
That's all! Now you have your database schema up-to-date.
|
||||
11
docs/docs/migrations/06-reverting.md
Normal file
11
docs/docs/migrations/06-reverting.md
Normal file
@ -0,0 +1,11 @@
|
||||
# Reverting
|
||||
|
||||
If for some reason you want to revert the changes, you can run:
|
||||
|
||||
```shell
|
||||
typeorm migration:revert -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
This command will execute `down` in the latest executed migration.
|
||||
|
||||
If you need to revert multiple migrations you must call this command multiple times.
|
||||
11
docs/docs/migrations/07-status.md
Normal file
11
docs/docs/migrations/07-status.md
Normal file
@ -0,0 +1,11 @@
|
||||
# Status
|
||||
|
||||
To show all migrations and whether they've been run or not use following command:
|
||||
|
||||
```shell
|
||||
typeorm migration:show -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
[X] = Migration has been ran
|
||||
|
||||
[ ] = Migration is pending/unapplied
|
||||
42
docs/docs/migrations/08-faking.md
Normal file
42
docs/docs/migrations/08-faking.md
Normal file
@ -0,0 +1,42 @@
|
||||
# Faking Migrations and Rollbacks
|
||||
|
||||
You can also fake run a migration using the `--fake` flag (`-f` for short). This will add the migration
|
||||
to the migrations table without running it. This is useful for migrations created after manual changes
|
||||
have already been made to the database or when migrations have been run externally
|
||||
(e.g. by another tool or application), and you still would like to keep a consistent migration history.
|
||||
|
||||
```shell
|
||||
typeorm migration:run -d path-to-datasource-config --fake
|
||||
```
|
||||
|
||||
This is also possible with rollbacks.
|
||||
|
||||
```shell
|
||||
typeorm migration:revert -d path-to-datasource-config --fake
|
||||
```
|
||||
|
||||
### Transaction modes
|
||||
|
||||
By default, TypeORM will run all your migrations within a single wrapping transaction.
|
||||
This corresponds to the `--transaction all` flag.
|
||||
If you require more fine grained transaction control, you can use the `--transaction each` flag to wrap every migration individually, or the `--transaction none` flag to opt out of wrapping the migrations in transactions altogether.
|
||||
|
||||
In addition to these flags, you can also override the transaction behavior on a per-migration basis by setting the `transaction` property on the `MigrationInterface` to `true` or `false`. This only works in the `each` or `none` transaction mode.
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class AddIndexTIMESTAMP implements MigrationInterface {
|
||||
transaction = false
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX CONCURRENTLY post_names_idx ON post(name)`,
|
||||
)
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP INDEX CONCURRENTLY post_names_idx`)
|
||||
}
|
||||
}
|
||||
```
|
||||
@ -1,348 +1,7 @@
|
||||
# Migrations
|
||||
|
||||
## How migrations work?
|
||||
|
||||
Once you get into production you'll need to synchronize model changes into the database.
|
||||
Typically, it is unsafe to use `synchronize: true` for schema synchronization on production once
|
||||
you get data in your database. Here is where migrations come to help.
|
||||
|
||||
A migration is just a single file with sql queries to update a database schema
|
||||
and apply new changes to an existing database.
|
||||
|
||||
Let's say you already have a database and a post entity:
|
||||
|
||||
```typescript
|
||||
import { Entity, Column, PrimaryGeneratedColumn } from "typeorm"
|
||||
|
||||
@Entity()
|
||||
export class Post {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
@Column()
|
||||
title: string
|
||||
|
||||
@Column()
|
||||
text: string
|
||||
}
|
||||
```
|
||||
|
||||
And your entity worked in production for months without any changes.
|
||||
You have thousands of posts in your database.
|
||||
|
||||
Now you need to make a new release and rename `title` to `name`.
|
||||
What would you do?
|
||||
|
||||
You need to create a new migration with the following SQL query (postgres dialect):
|
||||
|
||||
```sql
|
||||
ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name";
|
||||
```
|
||||
|
||||
Once you run this SQL query your database schema is ready to work with your new codebase.
|
||||
TypeORM provides a place where you can write such sql queries and run them when needed.
|
||||
This place is called "migrations".
|
||||
|
||||
## Creating a new migration
|
||||
|
||||
**Pre-requisites**: [Installing CLI](./6-using-cli.md#installing-cli)
|
||||
|
||||
Before creating a new migration you need to setup your data source options properly:
|
||||
|
||||
```ts
|
||||
import { DataSource } from "typeorm"
|
||||
|
||||
export default new DataSource({
|
||||
type: "mysql",
|
||||
host: "localhost",
|
||||
port: 3306,
|
||||
username: "test",
|
||||
password: "test",
|
||||
database: "test",
|
||||
entities: [
|
||||
/*...*/
|
||||
],
|
||||
migrations: [
|
||||
/*...*/
|
||||
],
|
||||
migrationsTableName: "custom_migration_table",
|
||||
})
|
||||
```
|
||||
|
||||
Here we setup two options:
|
||||
|
||||
- `"migrationsTableName": "migrations"` - Specify this option only if you need the migration table name to be different from `"migrations"`.
|
||||
- `"migrations": [/*...*/]` - list of migrations that need to be loaded by TypeORM
|
||||
|
||||
Once you setup the connection options you can create a new migration using CLI:
|
||||
|
||||
```shell
|
||||
typeorm migration:create ./path-to-migrations-dir/PostRefactoring
|
||||
```
|
||||
|
||||
Here, `PostRefactoring` is the name of the migration - you can specify any name you want.
|
||||
After you run the command you can see a new file generated in the "migration" directory
|
||||
named `{TIMESTAMP}-PostRefactoring.ts` where `{TIMESTAMP}` is the current timestamp when the migration was generated.
|
||||
Now you can open the file and add your migration sql queries there.
|
||||
|
||||
You should see the following content inside your migration:
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class PostRefactoringTIMESTAMP implements MigrationInterface {
|
||||
async up(queryRunner: QueryRunner): Promise<void> {}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {}
|
||||
}
|
||||
```
|
||||
|
||||
There are two methods you must fill with your migration code: `up` and `down`.
|
||||
`up` has to contain the code you need to perform the migration.
|
||||
`down` has to revert whatever `up` changed.
|
||||
`down` method is used to revert the last migration.
|
||||
|
||||
Inside both `up` and `down` you have a `QueryRunner` object.
|
||||
All database operations are executed using this object.
|
||||
Learn more about [query runner](../query-runner.md).
|
||||
|
||||
Let's see what the migration looks like with our `Post` changes:
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class PostRefactoringTIMESTAMP implements MigrationInterface {
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" RENAME COLUMN "title" TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" RENAME COLUMN "name" TO "title"`,
|
||||
) // reverts things made in "up" method
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Running and reverting migrations
|
||||
|
||||
Once you have a migration to run on production, you can run them using a CLI command:
|
||||
|
||||
```shell
|
||||
typeorm migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
**`typeorm migration:create` and `typeorm migration:generate` will create `.ts` files, unless you use the `o` flag (see more in [Generating migrations](#generating-migrations)). The `migration:run` and `migration:revert` commands only work on `.js` files. Thus the typescript files need to be compiled before running the commands.** Alternatively, you can use `ts-node` with `typeorm` to run `.ts` migration files.
|
||||
|
||||
Example with `ts-node`:
|
||||
|
||||
```shell
|
||||
npx typeorm-ts-node-commonjs migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
Example with `ts-node` in ESM projects:
|
||||
|
||||
```shell
|
||||
npx typeorm-ts-node-esm migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
```shell
|
||||
npx typeorm-ts-node-esm migration:generate ./src/migrations/update-post-table -d ./src/data-source.ts
|
||||
```
|
||||
|
||||
This command will execute all pending migrations and run them in a sequence ordered by their timestamps.
|
||||
This means all sql queries written in the `up` methods of your created migrations will be executed.
|
||||
That's all! Now you have your database schema up-to-date.
|
||||
|
||||
If for some reason you want to revert the changes, you can run:
|
||||
|
||||
```shell
|
||||
typeorm migration:revert -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
This command will execute `down` in the latest executed migration.
|
||||
If you need to revert multiple migrations you must call this command multiple times.
|
||||
|
||||
### Faking Migrations and Rollbacks
|
||||
|
||||
You can also fake run a migration using the `--fake` flag (`-f` for short). This will add the migration
|
||||
to the migrations table without running it. This is useful for migrations created after manual changes
|
||||
have already been made to the database or when migrations have been run externally
|
||||
(e.g. by another tool or application), and you still would like to keep a consistent migration history.
|
||||
|
||||
```shell
|
||||
typeorm migration:run -d path-to-datasource-config --fake
|
||||
```
|
||||
|
||||
This is also possible with rollbacks.
|
||||
|
||||
```shell
|
||||
typeorm migration:revert -d path-to-datasource-config --fake
|
||||
```
|
||||
|
||||
### Transaction modes
|
||||
|
||||
By default, TypeORM will run all your migrations within a single wrapping transaction.
|
||||
This corresponds to the `--transaction all` flag.
|
||||
If you require more fine grained transaction control, you can use the `--transaction each` flag to wrap every migration individually, or the `--transaction none` flag to opt out of wrapping the migrations in transactions altogether.
|
||||
|
||||
In addition to these flags, you can also override the transaction behavior on a per-migration basis by setting the `transaction` property on the `MigrationInterface` to `true` or `false`. This only works in the `each` or `none` transaction mode.
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class AddIndexTIMESTAMP implements MigrationInterface {
|
||||
transaction = false
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX CONCURRENTLY post_names_idx ON post(name)`,
|
||||
)
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP INDEX CONCURRENTLY post_names_idx`)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Generating migrations
|
||||
|
||||
TypeORM is able to automatically generate migration files with schema changes you made.
|
||||
|
||||
Let's say you have a `Post` entity with a `title` column, and you have changed the name `title` to `name`.
|
||||
You can run following command:
|
||||
|
||||
```shell
|
||||
typeorm migration:generate PostRefactoring -d path-to-datasource-config
|
||||
```
|
||||
|
||||
If you encounter any error, it require you have the path to migration name and data source. You can try this option
|
||||
|
||||
```shell
|
||||
typeorm migration:generate -d <path/to/datasource> path/to/migrations/<migration-name>
|
||||
```
|
||||
|
||||
And it will generate a new migration called `{TIMESTAMP}-PostRefactoring.ts` with the following content:
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class PostRefactoringTIMESTAMP implements MigrationInterface {
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "name" RENAME TO "title"`,
|
||||
)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Alternatively, you can also output your migrations as Javascript files using the `o` (alias for `--outputJs`) flag. This is useful for Javascript only projects in which TypeScript additional packages are not installed. This command, will generate a new migration file `{TIMESTAMP}-PostRefactoring.js` with the following content:
|
||||
|
||||
```javascript
|
||||
/**
|
||||
* @typedef {import('typeorm').MigrationInterface} MigrationInterface
|
||||
* @typedef {import('typeorm').QueryRunner} QueryRunner
|
||||
*/
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @implements {MigrationInterface}
|
||||
*/
|
||||
module.exports = class PostRefactoringTIMESTAMP {
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async up(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async down(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "name" RENAME TO "title"`,
|
||||
)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
By default, it generates CommonJS JavaScript code with the `o` (alias for `--outputJs`) flag, but you can also generate ESM code with the `esm` flag. This is useful for Javascript projects that use ESM:
|
||||
|
||||
```javascript
|
||||
/**
|
||||
* @typedef {import('typeorm').MigrationInterface} MigrationInterface
|
||||
* @typedef {import('typeorm').QueryRunner} QueryRunner
|
||||
*/
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @implements {MigrationInterface}
|
||||
*/
|
||||
export class PostRefactoringTIMESTAMP {
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async up(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async down(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "name" RENAME TO "title"`,
|
||||
)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
See, you don't need to write the queries on your own.
|
||||
The rule of thumb for generating migrations is that you generate them after **each** change you made to your models. To apply multi-line formatting to your generated migration queries, use the `p` (alias for `--pretty`) flag.
|
||||
|
||||
## DataSource option
|
||||
|
||||
If you need to run/revert/generate/show your migrations use the `-d` (alias for `--dataSource`) and pass the path to the file where your DataSource instance is defined as an argument
|
||||
|
||||
```shell
|
||||
typeorm -d <your-data-source-path> migration:{run|revert}
|
||||
```
|
||||
|
||||
## Timestamp option
|
||||
|
||||
If you need to specify a timestamp for the migration name, use the `-t` (alias for `--timestamp`) and pass the timestamp (should be a non-negative number)
|
||||
|
||||
```shell
|
||||
typeorm -t <specific-timestamp> migration:{create|generate}
|
||||
```
|
||||
|
||||
You can get a timestamp from:
|
||||
|
||||
```js
|
||||
Date.now()
|
||||
/* OR */ new Date().getTime()
|
||||
```
|
||||
|
||||
## Using migration API to write migrations
|
||||
# Query Runner API
|
||||
|
||||
In order to use an API to change a database schema you can use `QueryRunner`.
|
||||
|
||||
Example:
|
||||
|
||||
```ts
|
||||
import {
|
||||
MigrationInterface,
|
||||
@ -749,7 +408,7 @@ dropUniqueConstraint(table: Table|string, uniqueOrName: TableUnique|string): Pro
|
||||
- `table` - Table object or name
|
||||
- `uniqueOrName` - TableUnique object or unique constraint name to be dropped
|
||||
|
||||
Drops an unique constraint.
|
||||
Drops a unique constraint.
|
||||
|
||||
> Note: does not work for MySQL, because MySQL stores unique constraints as unique indices. Use `dropIndex()` method instead.
|
||||
|
||||
@ -762,7 +421,7 @@ dropUniqueConstraints(table: Table|string, uniqueConstraints: TableUnique[]): Pr
|
||||
- `table` - Table object or name
|
||||
- `uniqueConstraints` - array of TableUnique objects to be dropped
|
||||
|
||||
Drops an unique constraints.
|
||||
Drops unique constraints.
|
||||
|
||||
> Note: does not work for MySQL, because MySQL stores unique constraints as unique indices. Use `dropIndices()` method instead.
|
||||
|
||||
@ -775,7 +434,7 @@ createCheckConstraint(table: Table|string, checkConstraint: TableCheck): Promise
|
||||
- `table` - Table object or name
|
||||
- `checkConstraint` - TableCheck object
|
||||
|
||||
Creates new check constraint.
|
||||
Creates a new check constraint.
|
||||
|
||||
> Note: MySQL does not support check constraints.
|
||||
|
||||
@ -788,7 +447,7 @@ createCheckConstraints(table: Table|string, checkConstraints: TableCheck[]): Pro
|
||||
- `table` - Table object or name
|
||||
- `checkConstraints` - array of TableCheck objects
|
||||
|
||||
Creates new check constraint.
|
||||
Creates a new check constraint.
|
||||
|
||||
> Note: MySQL does not support check constraints.
|
||||
|
||||
16
docs/docs/migrations/10-extra.md
Normal file
16
docs/docs/migrations/10-extra.md
Normal file
@ -0,0 +1,16 @@
|
||||
# Extra options
|
||||
|
||||
## Timestamp
|
||||
|
||||
If you need to specify a timestamp for the migration name, use the `-t` (alias for `--timestamp`) and pass the timestamp (should be a non-negative number)
|
||||
|
||||
```shell
|
||||
typeorm -t <specific-timestamp> migration:{create|generate}
|
||||
```
|
||||
|
||||
You can get a timestamp from:
|
||||
|
||||
```js
|
||||
Date.now()
|
||||
/* OR */ new Date().getTime()
|
||||
```
|
||||
86
docs/docs/migrations/11-vite.md
Normal file
86
docs/docs/migrations/11-vite.md
Normal file
@ -0,0 +1,86 @@
|
||||
# Vite
|
||||
|
||||
Using TypeORM in a [Vite](https://vite.dev) project is pretty straight forward. However, when you use [migrations](../migrations/01-why.md), you will run into "...migration name is wrong. Migration class name should have a
|
||||
JavaScript timestamp appended." errors when running the production build.
|
||||
On production builds, files are [optimized by default](https://vite.dev/config/build-options#build-minify) which includes mangling your code in order to minimize file sizes.
|
||||
|
||||
You have 3 options to mitigate this. The 3 options are shown below as diff to this basic `vite.config.ts`
|
||||
|
||||
```typescript
|
||||
import legacy from "@vitejs/plugin-legacy"
|
||||
import vue from "@vitejs/plugin-vue"
|
||||
import path from "path"
|
||||
import { defineConfig } from "vite"
|
||||
|
||||
// https://vitejs.dev/config/
|
||||
export default defineConfig({
|
||||
build: {
|
||||
sourcemap: true,
|
||||
},
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
alias: {
|
||||
"@": path.resolve(__dirname, "./src"),
|
||||
},
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
## Option 1: Disable minify
|
||||
|
||||
This is the most crude option and will result in significantly larger files. Add `build.minify = false` to your config.
|
||||
|
||||
```diff
|
||||
--- basic vite.config.ts
|
||||
+++ disable minify vite.config.ts
|
||||
@@ -7,6 +7,7 @@
|
||||
export default defineConfig({
|
||||
build: {
|
||||
sourcemap: true,
|
||||
+ minify: false,
|
||||
},
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
```
|
||||
|
||||
## Option 2: Disable esbuild minify identifiers
|
||||
|
||||
Vite uses esbuild as the default minifier. You can disable mangling of identifiers by adding `esbuild.minifyIdentifiers = false` to your config.
|
||||
This will result in smaller file sizes, but depending on your code base you will get diminishing returns as all identifiers will be kept at full length.
|
||||
|
||||
```diff
|
||||
--- basic vite.config.ts
|
||||
+++ disable esbuild minify identifiers vite.config.ts
|
||||
@@ -8,6 +8,7 @@
|
||||
build: {
|
||||
sourcemap: true,
|
||||
},
|
||||
+ esbuild: { minifyIdentifiers: false },
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
```
|
||||
|
||||
## Option 3: Use terser as minifier while keeping only the migration class names
|
||||
|
||||
Vite supports using terser as minifier. Terser is slower then esbuild, but offers more fine grained control over what to minify.
|
||||
Add `minify: 'terser'` with `terserOptions.mangle.keep_classnames: /^Migrations\d+$/` and `terserOptions.compress.keep_classnames: /^Migrations\d+$/` to your config.
|
||||
These options will make sure classnames that start with "Migrations" and end with numbers are not renamed during minification.
|
||||
|
||||
Make sure terser is available as dev dependency in your project: `npm add -D terser`.
|
||||
|
||||
```diff
|
||||
--- basic vite.config.ts
|
||||
+++ terser keep migration class names vite.config.ts
|
||||
@@ -7,6 +7,11 @@
|
||||
export default defineConfig({
|
||||
build: {
|
||||
sourcemap: true,
|
||||
+ minify: 'terser',
|
||||
+ terserOptions: {
|
||||
+ mangle: { keep_classnames: /^Migrations\d+$/ },
|
||||
+ compress: { keep_classnames: /^Migrations\d+$/ },
|
||||
+ },
|
||||
},
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
```
|
||||
@ -2,12 +2,12 @@
|
||||
|
||||
## What is a QueryRunner?
|
||||
|
||||
Each new `QueryRunner` instance takes a single connection from connection pool, if RDBMS supports connection pooling.
|
||||
For databases not supporting connection pools, it uses the same connection across the entire data source.
|
||||
Each new `QueryRunner` instance takes a single connection from the connection pool, if the RDBMS supports connection pooling.
|
||||
For databases that do not support connection pools, it uses the same connection across the entire data source.
|
||||
|
||||
## Creating a new `QueryRunner` instance
|
||||
|
||||
Use `createQueryRunner` method to create a new `QueryRunner`:
|
||||
Use the `createQueryRunner` method to create a new `QueryRunner`:
|
||||
|
||||
```typescript
|
||||
const queryRunner = dataSource.createQueryRunner()
|
||||
|
||||
@ -2,6 +2,7 @@ import { themes as prismThemes } from "prism-react-renderer"
|
||||
import type { Config } from "@docusaurus/types"
|
||||
import type * as Preset from "@docusaurus/preset-classic"
|
||||
import { redirects } from "./redirects"
|
||||
import { LLMsTXTPluginOptions } from "@signalwire/docusaurus-plugin-llms-txt"
|
||||
|
||||
// This runs in Node.js - Don't use client-side code here (browser APIs, JSX...)
|
||||
|
||||
@ -197,7 +198,33 @@ const config: Config = {
|
||||
redirects,
|
||||
},
|
||||
],
|
||||
"@signalwire/docusaurus-plugin-llms-txt",
|
||||
[
|
||||
"@signalwire/docusaurus-plugin-llms-txt",
|
||||
{
|
||||
content: {
|
||||
// https://www.npmjs.com/package/@signalwire/docusaurus-plugin-llms-txt#content-selectors
|
||||
contentSelectors: [
|
||||
".theme-doc-markdown", // Docusaurus main content area
|
||||
"main .container .col", // Bootstrap-style layout
|
||||
"main .theme-doc-wrapper", // Docusaurus wrapper
|
||||
"article", // Semantic article element
|
||||
"main .container", // Broader container
|
||||
"main", // Fallback to main element
|
||||
".code-example",
|
||||
],
|
||||
enableLlmsFullTxt: true,
|
||||
includeGeneratedIndex: false,
|
||||
includePages: true,
|
||||
includeVersionedDocs: false,
|
||||
relativePaths: false,
|
||||
},
|
||||
depth: 3,
|
||||
onRouteError: "throw",
|
||||
siteTitle: "TypeORM",
|
||||
siteDescription:
|
||||
"TypeORM is an ORM that can run in NodeJS, Browser, Cordova, Ionic, React Native, NativeScript, Expo, and Electron platforms and can be used with TypeScript and JavaScript.",
|
||||
} satisfies LLMsTXTPluginOptions,
|
||||
],
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
12
docs/package-lock.json
generated
12
docs/package-lock.json
generated
@ -8639,9 +8639,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/gray-matter/node_modules/js-yaml": {
|
||||
"version": "3.14.1",
|
||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
|
||||
"integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
|
||||
"version": "3.14.2",
|
||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz",
|
||||
"integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"argparse": "^1.0.7",
|
||||
@ -9981,9 +9981,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/js-yaml": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
|
||||
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
|
||||
"integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"argparse": "^2.0.1"
|
||||
|
||||
@ -101,7 +101,7 @@ export const redirects = [
|
||||
},
|
||||
{ from: "/data-source-api", to: "/docs/data-source/data-source-api" },
|
||||
|
||||
{ from: "/migrations", to: "/docs/advanced-topics/migrations" },
|
||||
{ from: "/migrations", to: "/docs/migrations/why" },
|
||||
{ from: "/transactions", to: "/docs/advanced-topics/transactions" },
|
||||
{ from: "/indices", to: "/docs/advanced-topics/indices" },
|
||||
{
|
||||
@ -123,4 +123,5 @@ export const redirects = [
|
||||
{ from: "/getting-started", to: "/docs/getting-started" },
|
||||
{ from: "/future-of-typeorm", to: "/docs/future-of-typeorm" },
|
||||
{ from: "/query-runner", to: "/docs/query-runner" },
|
||||
{ from: "/docs/advanced-topics/migrations", to: "/docs/migrations/why" },
|
||||
]
|
||||
|
||||
@ -35,6 +35,11 @@ const sidebars: SidebarsConfig = {
|
||||
label: "Relations",
|
||||
items: [{ type: "autogenerated", dirName: "relations" }],
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "Migrations",
|
||||
items: [{ type: "autogenerated", dirName: "migrations" }],
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "Working with Entity Manager",
|
||||
|
||||
@ -1,7 +0,0 @@
|
||||
---
|
||||
title: Markdown page example
|
||||
---
|
||||
|
||||
# Markdown page example
|
||||
|
||||
You don't need React to write simple standalone pages.
|
||||
@ -1,9 +1,9 @@
|
||||
import eslint from "@eslint/js"
|
||||
import pluginChaiFriendly from "eslint-plugin-chai-friendly"
|
||||
import js from "@eslint/js"
|
||||
import chaiFriendly from "eslint-plugin-chai-friendly"
|
||||
import { jsdoc } from "eslint-plugin-jsdoc"
|
||||
import { defineConfig, globalIgnores } from "eslint/config"
|
||||
import globals from "globals"
|
||||
import tseslint from "typescript-eslint"
|
||||
import ts from "typescript-eslint"
|
||||
|
||||
export default defineConfig([
|
||||
globalIgnores([
|
||||
@ -13,10 +13,11 @@ export default defineConfig([
|
||||
"sample/playground/**",
|
||||
"temp/**",
|
||||
]),
|
||||
|
||||
{
|
||||
files: ["**/*.ts"],
|
||||
languageOptions: {
|
||||
parser: tseslint.parser,
|
||||
parser: ts.parser,
|
||||
parserOptions: {
|
||||
project: "tsconfig.json",
|
||||
},
|
||||
@ -25,9 +26,13 @@ export default defineConfig([
|
||||
...globals.node,
|
||||
},
|
||||
},
|
||||
plugins: {
|
||||
js,
|
||||
ts,
|
||||
},
|
||||
extends: [
|
||||
eslint.configs.recommended,
|
||||
...tseslint.configs.recommendedTypeChecked,
|
||||
js.configs.recommended,
|
||||
...ts.configs.recommendedTypeChecked,
|
||||
],
|
||||
rules: {
|
||||
// exceptions from typescript-eslint/recommended
|
||||
@ -41,7 +46,10 @@ export default defineConfig([
|
||||
"@typescript-eslint/no-unsafe-function-type": "warn",
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"warn",
|
||||
{ argsIgnorePattern: "^_" },
|
||||
{
|
||||
argsIgnorePattern: "^_",
|
||||
destructuredArrayIgnorePattern: "^_"
|
||||
},
|
||||
],
|
||||
"@typescript-eslint/no-wrapper-object-types": "off",
|
||||
"prefer-const": ["error", { destructuring: "all" }],
|
||||
@ -80,12 +88,19 @@ export default defineConfig([
|
||||
"no-regex-spaces": "warn",
|
||||
},
|
||||
},
|
||||
|
||||
jsdoc({
|
||||
files: ["src/**/*.ts"],
|
||||
config: "flat/recommended-typescript", // change to 'flat/recommended-typescript-error' once warnings are fixed
|
||||
// Temporarily enable individual rules when they are fixed, until all current warnings are gone,
|
||||
// and then remove manual config in favor of `config: "flat/recommended-typescript-error"`
|
||||
rules: {
|
||||
"jsdoc/valid-types": "error"
|
||||
}
|
||||
}),
|
||||
|
||||
{
|
||||
files: ["test/**/*.ts"],
|
||||
...pluginChaiFriendly.configs.recommendedFlat,
|
||||
...chaiFriendly.configs.recommendedFlat,
|
||||
},
|
||||
])
|
||||
|
||||
13
gulpfile.ts
13
gulpfile.ts
@ -189,6 +189,16 @@ export class Gulpfile {
|
||||
.pipe(gulp.dest("./build/package"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Move reference to package.json one level up
|
||||
*/
|
||||
@Task()
|
||||
movePackageJsonReferenceLevelUp() {
|
||||
return gulp.src("./build/package/commands/InitCommand.js")
|
||||
.pipe(replace(/\.\.\/package.json/g, "package.json"))
|
||||
.pipe(gulp.dest("./build/package/commands"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a package that can be published to npm.
|
||||
*/
|
||||
@ -206,7 +216,8 @@ export class Gulpfile {
|
||||
"packageReplaceReferences",
|
||||
"packagePreparePackageFile",
|
||||
"packageCopyReadme",
|
||||
"packageCopyShims"
|
||||
"packageCopyShims",
|
||||
"movePackageJsonReferenceLevelUp"
|
||||
],
|
||||
];
|
||||
}
|
||||
|
||||
4069
package-lock.json
generated
4069
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
27
package.json
27
package.json
@ -13,8 +13,8 @@
|
||||
"funding": "https://opencollective.com/typeorm",
|
||||
"license": "MIT",
|
||||
"author": {
|
||||
"name": "Umed Khudoiberdiev",
|
||||
"email": "pleerock.me@gmail.com"
|
||||
"name": "TypeORM",
|
||||
"email": "maintainers@typeorm.io"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
@ -82,7 +82,7 @@
|
||||
"docs:dev": "cd docs && npm run start",
|
||||
"format": "prettier --cache --write \"./**/*.ts\"",
|
||||
"format:ci": "prettier --check \"./**/*.ts\"",
|
||||
"lint": "eslint .",
|
||||
"lint": "eslint",
|
||||
"package": "gulp package",
|
||||
"pre-commit": "lint-staged",
|
||||
"prepare": "is-ci || husky",
|
||||
@ -102,6 +102,7 @@
|
||||
"debug": "^4.4.3",
|
||||
"dedent": "^1.7.0",
|
||||
"dotenv": "^17.2.3",
|
||||
"glob": "^10.5.0",
|
||||
"reflect-metadata": "^0.2.2",
|
||||
"sha.js": "^2.4.12",
|
||||
"sql-highlight": "^6.1.0",
|
||||
@ -111,7 +112,7 @@
|
||||
"yargs": "^18.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@eslint/js": "^9.39.1",
|
||||
"@google-cloud/spanner": "^8.2.1",
|
||||
"@sap/hana-client": "^2.26.18",
|
||||
"@tsconfig/node20": "^20.1.6",
|
||||
@ -128,13 +129,14 @@
|
||||
"@types/source-map-support": "^0.5.10",
|
||||
"@types/yargs": "^17.0.33",
|
||||
"better-sqlite3": "^12.4.1",
|
||||
"c8": "^10.1.3",
|
||||
"chai": "^6.2.0",
|
||||
"chai-as-promised": "^8.0.2",
|
||||
"class-transformer": "^0.5.1",
|
||||
"eslint": "^9.36.0",
|
||||
"eslint": "^9.39.1",
|
||||
"eslint-plugin-chai-friendly": "^1.1.0",
|
||||
"eslint-plugin-jsdoc": "^60.7.0",
|
||||
"globals": "^16.4.0",
|
||||
"eslint-plugin-jsdoc": "^61.1.12",
|
||||
"globals": "^16.5.0",
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-rename": "^2.1.0",
|
||||
"gulp-replace": "^1.1.4",
|
||||
@ -144,13 +146,12 @@
|
||||
"gulpclass": "^0.2.0",
|
||||
"husky": "^9.1.7",
|
||||
"is-ci": "^4.1.0",
|
||||
"lint-staged": "^16.2.3",
|
||||
"lint-staged": "^16.2.6",
|
||||
"mocha": "^11.7.3",
|
||||
"mongodb": "^6.20.0",
|
||||
"mssql": "^12.0.0",
|
||||
"mysql": "^2.18.1",
|
||||
"mysql2": "^3.15.1",
|
||||
"nyc": "^17.1.0",
|
||||
"oracledb": "^6.9.0",
|
||||
"pg": "^8.16.3",
|
||||
"pg-query-stream": "^4.10.3",
|
||||
@ -167,7 +168,7 @@
|
||||
"standard-changelog": "^7.0.1",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.9.2",
|
||||
"typescript-eslint": "^8.45.0"
|
||||
"typescript-eslint": "^8.46.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@google-cloud/spanner": "^8.0.0",
|
||||
@ -245,6 +246,12 @@
|
||||
"url": "https://opencollective.com/typeorm",
|
||||
"logo": "https://opencollective.com/opencollective/logo.txt"
|
||||
},
|
||||
"devEngines": {
|
||||
"packageManager": {
|
||||
"name": "npm",
|
||||
"onFail": "error"
|
||||
}
|
||||
},
|
||||
"readmeFilename": "README.md",
|
||||
"tags": [
|
||||
"orm",
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { PostDetails } from "./entity/PostDetails"
|
||||
import { Image } from "./entity/Image"
|
||||
@ -49,136 +49,7 @@ dataSource
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((result) => {
|
||||
/*const qb = postRepository.createQueryBuilder("post")
|
||||
.leftJoinAndSelect("post.details", "details")
|
||||
.leftJoinAndSelect("post.images", "images")
|
||||
// .leftJoinAndSelect("post.coverId", "coverId")
|
||||
.leftJoinAndSelect("post.categories", "categories")
|
||||
.where("post.id=:id")
|
||||
.setParameter("id", 6);
|
||||
|
||||
return qb
|
||||
.getSingleResult()
|
||||
.then(post => {
|
||||
console.log("loaded post: ", post);
|
||||
|
||||
let category1 = new Category();
|
||||
category1.id = 12;
|
||||
category1.description = "about cat#12";
|
||||
|
||||
let category2 = new Category();
|
||||
category2.id = 52;
|
||||
category2.description = "about cat#52";
|
||||
|
||||
let image = new Image();
|
||||
image.name = "second image of the post";
|
||||
|
||||
//post
|
||||
post.title = "This! is updated post$";
|
||||
post.text = "Hello world of post#4";
|
||||
post.categories = [category2, category1];
|
||||
post.images.push(image);
|
||||
return postRepository.save(post);
|
||||
|
||||
})
|
||||
.then(() => qb.getSingleResult())
|
||||
.then(reloadedPost => console.log("reloadedPost: ", reloadedPost));*/
|
||||
})
|
||||
.then((result) => console.log(result))
|
||||
.catch((error) => console.log(error.stack ? error.stack : error))
|
||||
|
||||
return
|
||||
|
||||
/*const postJson = {
|
||||
id: 1, // changed
|
||||
text: "This is post about hello", // changed
|
||||
title: "hello", // changed
|
||||
details: { // new relation added
|
||||
id: 10, // new object persisted
|
||||
comment: "This is post about hello",
|
||||
meta: "about-hello!",
|
||||
chapter: {
|
||||
id: 1, // new object persisted
|
||||
about: "part I"
|
||||
},
|
||||
categories: [{
|
||||
id: 5, // new object persisted
|
||||
description: "cat5"
|
||||
}]
|
||||
},
|
||||
cover: null, // relation removed
|
||||
images: [{ // new relation added
|
||||
id: 4, // new object persisted
|
||||
name: "post!.jpg",
|
||||
secondaryPost: {
|
||||
id: 2,
|
||||
title: "secondary post"
|
||||
}
|
||||
}, { // secondaryPost relation removed
|
||||
id: 3,
|
||||
name: "post_2!.jpg", // changed
|
||||
details: { // new relation added
|
||||
id: 3, // new object persisted
|
||||
meta: "sec image",
|
||||
comment: "image sec"
|
||||
}
|
||||
}],
|
||||
categories: [{ // two categories removed, new category added
|
||||
id: 4, // new persisted
|
||||
description: "cat2"
|
||||
}]
|
||||
};
|
||||
|
||||
let entity = postRepository.create(postJson);
|
||||
return postRepository.initialize(postJson)
|
||||
.then(result => {
|
||||
const mergedEntity = postRepository.merge(result, entity);
|
||||
console.log("entity created from json: ", entity);
|
||||
console.log("entity initialized from db: ", result);
|
||||
console.log("entity merged: ", mergedEntity);
|
||||
const diff = postRepository.difference(result, mergedEntity);
|
||||
console.log("diff: ", diff);
|
||||
//console.log("diff[0]: ", diff[0].removedRelations);
|
||||
})
|
||||
.catch(error => console.log(error.stack ? error.stack : error));
|
||||
|
||||
let qb = postRepository
|
||||
.createQueryBuilder("post")
|
||||
.addSelect("cover")
|
||||
.addSelect("image")
|
||||
.addSelect("imageDetails")
|
||||
.addSelect("secondaryImage")
|
||||
.addSelect("category")
|
||||
.innerJoin("post.coverId", "cover")
|
||||
.leftJoin("post.images", "image")
|
||||
.leftJoin("post.secondaryImages", "secondaryImage")
|
||||
.leftJoin("image.details", "imageDetails", "on", "imageDetails.meta=:meta")
|
||||
.leftJoin("post.categories", "category", "on", "category.description=:description")
|
||||
//.leftJoin(Image, "image", "on", "image.post=post.id")
|
||||
//.where("post.id=:id")
|
||||
.setParameter("id", 1)
|
||||
.setParameter("description", "cat2")
|
||||
.setParameter("meta", "sec image");
|
||||
|
||||
return qb
|
||||
.getSingleResult()
|
||||
.then(post => console.log(post))
|
||||
// .then(result => console.log(JSON.stringify(result, null, 4)))
|
||||
.catch(error => console.log(error.stack ? error.stack : error));*/
|
||||
|
||||
/*let details = new PostDetails();
|
||||
details.comment = "This is post about hello";
|
||||
details.meta = "about-hello";
|
||||
|
||||
const post = new Post();
|
||||
post.text = "Hello how are you?";
|
||||
post.title = "hello";
|
||||
//post.details = details;
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then(post => console.log("Post has been saved"))
|
||||
.catch(error => console.log("Cannot save. Error: ", error));*/
|
||||
})
|
||||
.catch((error) => console.log(error.stack ? error.stack : error))
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { EverythingEntity, SampleEnum } from "./entity/EverythingEntity"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
@ -99,7 +99,7 @@ dataSource.initialize().then(
|
||||
console.log("Now remove it")
|
||||
return postRepository.remove(entity!)
|
||||
})
|
||||
.then((entity) => {
|
||||
.then(() => {
|
||||
console.log("Entity has been removed")
|
||||
})
|
||||
.catch((error) =>
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { CustomNamingStrategy } from "./naming-strategy/CustomNamingStrategy"
|
||||
|
||||
@ -26,7 +26,7 @@ dataSource.initialize().then(
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((post) => console.log("Post has been saved"))
|
||||
.then(() => console.log("Post has been saved"))
|
||||
.catch((error) => console.log("Cannot save. Error: ", error))
|
||||
},
|
||||
(error) => console.log("Cannot connect: ", error),
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { PostCategory } from "./entity/PostCategory"
|
||||
import { PostAuthor } from "./entity/PostAuthor"
|
||||
|
||||
@ -9,7 +9,7 @@ export class Blog extends BaseObject {
|
||||
@Column()
|
||||
text: string
|
||||
|
||||
@ManyToMany((type) => PostCategory, (category) => category.posts, {
|
||||
@ManyToMany(() => PostCategory, (category) => category.posts, {
|
||||
cascade: true,
|
||||
})
|
||||
@JoinTable()
|
||||
|
||||
@ -9,7 +9,7 @@ export class Post extends BaseObject {
|
||||
@Column()
|
||||
text: string
|
||||
|
||||
@ManyToMany((type) => PostCategory, (category) => category.posts, {
|
||||
@ManyToMany(() => PostCategory, (category) => category.posts, {
|
||||
cascade: true,
|
||||
})
|
||||
@JoinTable()
|
||||
|
||||
@ -10,7 +10,7 @@ export class PostCategory {
|
||||
@Column()
|
||||
name: string
|
||||
|
||||
@ManyToMany((type) => Post, (post) => post.categories, {
|
||||
@ManyToMany(() => Post, (post) => post.categories, {
|
||||
cascade: true,
|
||||
})
|
||||
posts: Post[] = []
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { PostAuthor } from "./entity/PostAuthor"
|
||||
|
||||
@ -29,7 +29,7 @@ dataSource.initialize().then(
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((post) => console.log("Post has been saved"))
|
||||
.then(() => console.log("Post has been saved"))
|
||||
.catch((error) => console.log("Cannot save. Error: ", error))
|
||||
},
|
||||
(error) => console.log("Cannot connect: ", error),
|
||||
|
||||
@ -35,7 +35,7 @@ export class Post {
|
||||
// @JoinTable() // uncomment this and you'll get an error because JoinTable is not allowed here (only many-to-many)
|
||||
editors: PostAuthor[]
|
||||
|
||||
@ManyToMany((type) => PostAuthor, (author) => author.manyPosts)
|
||||
@ManyToMany(() => PostAuthor, (author) => author.manyPosts)
|
||||
@JoinTable() // comment this and you'll get an error because JoinTable must be at least on one side of the many-to-many relationship
|
||||
manyAuthors: PostAuthor[]
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { BasePost } from "./entity/BasePost"
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
import { Category } from "./entity/Category"
|
||||
@ -47,13 +47,13 @@ dataSource.initialize().then(
|
||||
|
||||
return authorRepository.save(author)
|
||||
})
|
||||
.then((author: any) => {
|
||||
.then((author) => {
|
||||
// temporary
|
||||
console.log(
|
||||
"Author with a new post has been saved. Lets try to update post in the author",
|
||||
)
|
||||
|
||||
return author.posts!.then((posts: any) => {
|
||||
return author.posts!.then((posts) => {
|
||||
// temporary
|
||||
posts![0]!.title = "should be updated second post"
|
||||
return authorRepository.save(author!)
|
||||
@ -76,7 +76,7 @@ dataSource.initialize().then(
|
||||
posts[1].author = Promise.resolve(null)
|
||||
return postRepository.save(posts[0])
|
||||
})
|
||||
.then((posts) => {
|
||||
.then(() => {
|
||||
console.log("Two post's author has been removed.")
|
||||
console.log("Now lets check many-to-many relations")
|
||||
|
||||
@ -93,7 +93,7 @@ dataSource.initialize().then(
|
||||
|
||||
return postRepository.save(post)
|
||||
})
|
||||
.then((posts) => {
|
||||
.then(() => {
|
||||
console.log("Post has been saved with its categories. ")
|
||||
console.log("Lets find it now. ")
|
||||
return postRepository.find({
|
||||
@ -106,14 +106,13 @@ dataSource.initialize().then(
|
||||
.then((posts) => {
|
||||
console.log("Post with categories are loaded: ", posts)
|
||||
console.log("Lets remove one of the categories: ")
|
||||
return posts[0].categories.then((categories: any) => {
|
||||
return posts[0].categories.then((categories) => {
|
||||
// temporary
|
||||
categories!.splice(0, 1)
|
||||
// console.log(posts[0]);
|
||||
return postRepository.save(posts[0])
|
||||
})
|
||||
})
|
||||
.then((posts) => {
|
||||
.then(() => {
|
||||
console.log("One of the post category has been removed.")
|
||||
})
|
||||
.catch((error) => console.log(error.stack))
|
||||
|
||||
@ -10,6 +10,6 @@ export class Category {
|
||||
@Column()
|
||||
name: string
|
||||
|
||||
@ManyToMany((type) => Post, (post) => post.categories)
|
||||
@ManyToMany(() => Post, (post) => post.categories)
|
||||
posts: Promise<Post[]>
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
import { Category } from "./entity/Category"
|
||||
@ -61,70 +61,10 @@ dataSource.initialize().then(
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// let secondPost = postRepository.create();
|
||||
// secondPost.text = "Second post";
|
||||
// secondPost.title = "About second post";
|
||||
// return authorRepository.save(author);
|
||||
})
|
||||
.then((post) => {
|
||||
console.log("Loaded posts: ", post)
|
||||
})
|
||||
/* posts[0].title = "should be updated second post";
|
||||
|
||||
return author.posts.then(posts => {
|
||||
return authorRepository.save(author);
|
||||
});
|
||||
})
|
||||
.then(updatedAuthor => {
|
||||
console.log("Author has been updated: ", updatedAuthor);
|
||||
console.log("Now lets load all posts with their authors:");
|
||||
return postRepository.find({ alias: "post", leftJoinAndSelect: { author: "post.author" } });
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Posts are loaded: ", posts);
|
||||
console.log("Now lets delete a post");
|
||||
posts[0].author = Promise.resolve(null);
|
||||
posts[1].author = Promise.resolve(null);
|
||||
return postRepository.save(posts[0]);
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Two post's author has been removed.");
|
||||
console.log("Now lets check many-to-many relations");
|
||||
|
||||
let category1 = categoryRepository.create();
|
||||
category1.name = "Hello category1";
|
||||
|
||||
let category2 = categoryRepository.create();
|
||||
category2.name = "Bye category2";
|
||||
|
||||
let post = postRepository.create();
|
||||
post.title = "Post & Categories";
|
||||
post.text = "Post with many categories";
|
||||
post.categories = Promise.resolve([
|
||||
category1,
|
||||
category2
|
||||
]);
|
||||
|
||||
return postRepository.save(post);
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Post has been saved with its categories. ");
|
||||
console.log("Lets find it now. ");
|
||||
return postRepository.find({ alias: "post", innerJoinAndSelect: { categories: "post.categories" } });
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Post with categories are loaded: ", posts);
|
||||
console.log("Lets remove one of the categories: ");
|
||||
return posts[0].categories.then(categories => {
|
||||
categories.splice(0, 1);
|
||||
// console.log(posts[0]);
|
||||
return postRepository.save(posts[0]);
|
||||
});
|
||||
})*/
|
||||
.then((posts) => {
|
||||
// console.log("One of the post category has been removed.");
|
||||
})
|
||||
.catch((error) => console.log(error.stack))
|
||||
},
|
||||
(error) => console.log("Cannot connect: ", error),
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { PostDetails } from "./entity/PostDetails"
|
||||
import { PostCategory } from "./entity/PostCategory"
|
||||
@ -45,7 +45,7 @@ dataSource.initialize().then(
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((post) => {
|
||||
.then(() => {
|
||||
console.log(
|
||||
"Post has been saved. Lets try to find this post using query builder: ",
|
||||
)
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
import { Category } from "./entity/Category"
|
||||
@ -38,7 +38,6 @@ dataSource.initialize().then(
|
||||
post.text = "Hello how are you?"
|
||||
post.title = "hello"
|
||||
post.authorId = 1
|
||||
// post.author = author;
|
||||
post.categories = [category1, category2]
|
||||
|
||||
Promise.all<any>([
|
||||
@ -78,61 +77,6 @@ dataSource.initialize().then(
|
||||
.then((authors) => {
|
||||
console.log("Loaded authors: ", authors)
|
||||
})
|
||||
/* posts[0].title = "should be updated second post";
|
||||
|
||||
return author.posts.then(posts => {
|
||||
return authorRepository.save(author);
|
||||
});
|
||||
})
|
||||
.then(updatedAuthor => {
|
||||
console.log("Author has been updated: ", updatedAuthor);
|
||||
console.log("Now lets load all posts with their authors:");
|
||||
return postRepository.find({ alias: "post", leftJoinAndSelect: { author: "post.author" } });
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Posts are loaded: ", posts);
|
||||
console.log("Now lets delete a post");
|
||||
posts[0].author = Promise.resolve(null);
|
||||
posts[1].author = Promise.resolve(null);
|
||||
return postRepository.save(posts[0]);
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Two post's author has been removed.");
|
||||
console.log("Now lets check many-to-many relations");
|
||||
|
||||
let category1 = categoryRepository.create();
|
||||
category1.name = "Hello category1";
|
||||
|
||||
let category2 = categoryRepository.create();
|
||||
category2.name = "Bye category2";
|
||||
|
||||
let post = postRepository.create();
|
||||
post.title = "Post & Categories";
|
||||
post.text = "Post with many categories";
|
||||
post.categories = Promise.resolve([
|
||||
category1,
|
||||
category2
|
||||
]);
|
||||
|
||||
return postRepository.save(post);
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Post has been saved with its categories. ");
|
||||
console.log("Lets find it now. ");
|
||||
return postRepository.find({ alias: "post", innerJoinAndSelect: { categories: "post.categories" } });
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Post with categories are loaded: ", posts);
|
||||
console.log("Lets remove one of the categories: ");
|
||||
return posts[0].categories.then(categories => {
|
||||
categories.splice(0, 1);
|
||||
// console.log(posts[0]);
|
||||
return postRepository.save(posts[0]);
|
||||
});
|
||||
})*/
|
||||
.then((posts) => {
|
||||
// console.log("One of the post category has been removed.");
|
||||
})
|
||||
.catch((error) => console.log(error.stack))
|
||||
},
|
||||
(error) => console.log("Cannot connect: ", error),
|
||||
|
||||
@ -18,7 +18,7 @@ export class Post {
|
||||
@Column("int")
|
||||
authorId: number
|
||||
|
||||
@ManyToMany((type) => Category)
|
||||
@ManyToMany(() => Category)
|
||||
@JoinTable()
|
||||
categories: Category[]
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
import { Category } from "./entity/Category"
|
||||
@ -38,7 +38,7 @@ dataSource.initialize().then(
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((post) => {
|
||||
.then(() => {
|
||||
console.log("Post has been saved. Lets load it now.")
|
||||
return postRepository.find({
|
||||
join: {
|
||||
|
||||
@ -10,6 +10,6 @@ export class Category {
|
||||
@Column()
|
||||
name: string
|
||||
|
||||
@ManyToMany((type) => Post, (post) => post.categories)
|
||||
@ManyToMany(() => Post, (post) => post.categories)
|
||||
posts: Post[]
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Category } from "./entity/Category"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
@ -39,7 +39,7 @@ dataSource.initialize().then(
|
||||
|
||||
return categoryRepository
|
||||
.save(category1)
|
||||
.then((category) => {
|
||||
.then(() => {
|
||||
console.log(
|
||||
"Categories has been saved. Lets now load it and all its descendants:",
|
||||
)
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
import { Category } from "./entity/Category"
|
||||
@ -41,7 +41,7 @@ dataSource.initialize().then(
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((post) => {
|
||||
.then(() => {
|
||||
return postRepository
|
||||
.createQueryBuilder("post")
|
||||
.leftJoin("post.categories", "categories")
|
||||
@ -63,7 +63,7 @@ dataSource.initialize().then(
|
||||
|
||||
return postRepository.save(post)
|
||||
})
|
||||
.then((updatedPost) => {
|
||||
.then(() => {
|
||||
return postRepository
|
||||
.createQueryBuilder("post")
|
||||
.leftJoinAndSelect("post.author", "author")
|
||||
@ -80,7 +80,7 @@ dataSource.initialize().then(
|
||||
loadedPost!.author = author
|
||||
return postRepository.save(loadedPost!)
|
||||
})
|
||||
.then((updatedPost) => {
|
||||
.then(() => {
|
||||
return postRepository
|
||||
.createQueryBuilder("post")
|
||||
.leftJoinAndSelect("post.author", "author")
|
||||
@ -94,7 +94,7 @@ dataSource.initialize().then(
|
||||
post.author = null
|
||||
return postRepository.save(post)
|
||||
})
|
||||
.then((updatedPost) => {
|
||||
.then(() => {
|
||||
return postRepository
|
||||
.createQueryBuilder("post")
|
||||
.leftJoinAndSelect("post.author", "author")
|
||||
@ -106,7 +106,7 @@ dataSource.initialize().then(
|
||||
post.author = author2
|
||||
return postRepository.save(post)
|
||||
})
|
||||
.then((updatedPost) => {
|
||||
.then(() => {
|
||||
return postRepository
|
||||
.createQueryBuilder("post")
|
||||
.leftJoinAndSelect("post.author", "author")
|
||||
|
||||
@ -11,7 +11,7 @@ export class Category {
|
||||
@Column()
|
||||
name: string
|
||||
|
||||
@ManyToMany((type) => Author)
|
||||
@ManyToMany(() => Author)
|
||||
@JoinTable()
|
||||
author: Author
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
|
||||
@ -27,7 +27,7 @@ dataSource.initialize().then(
|
||||
if (!author) {
|
||||
author = new Author()
|
||||
author.name = "Umed"
|
||||
return authorRepository.save(author).then((savedAuthor) => {
|
||||
return authorRepository.save(author).then(() => {
|
||||
return authorRepository.findOneBy({ id: 1 })
|
||||
})
|
||||
}
|
||||
@ -39,7 +39,7 @@ dataSource.initialize().then(
|
||||
post = new Post()
|
||||
post.title = "Hello post"
|
||||
post.text = "This is post contents"
|
||||
return postRepository.save(post).then((savedPost) => {
|
||||
return postRepository.save(post).then(() => {
|
||||
return postRepository.findOneBy({ id: 1 })
|
||||
})
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Question } from "./entity/Question"
|
||||
import { Counters } from "./entity/Counters"
|
||||
|
||||
@ -12,6 +12,6 @@ export class Post {
|
||||
@Column()
|
||||
text: string
|
||||
|
||||
@Column((type) => Counters)
|
||||
@Column(() => Counters)
|
||||
counters: Counters
|
||||
}
|
||||
|
||||
@ -9,6 +9,6 @@ export class Question {
|
||||
@Column()
|
||||
title: string
|
||||
|
||||
@Column((type) => Counters)
|
||||
@Column(() => Counters)
|
||||
counters: Counters
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Employee } from "./entity/Employee"
|
||||
import { Homesitter } from "./entity/Homesitter"
|
||||
import { Student } from "./entity/Student"
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { PostDetails } from "./entity/PostDetails"
|
||||
import { PostCategory } from "./entity/PostCategory"
|
||||
@ -9,11 +9,6 @@ import { PostInformation } from "./entity/PostInformation"
|
||||
import { PostAuthor } from "./entity/PostAuthor"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
// type: "mssql",
|
||||
// host: "192.168.1.10",
|
||||
// username: "sa",
|
||||
// password: "admin12345",
|
||||
// database: "test",
|
||||
type: "oracle",
|
||||
host: "localhost",
|
||||
username: "system",
|
||||
@ -51,7 +46,7 @@ dataSource
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((post) => console.log("Post has been saved"))
|
||||
.then(() => console.log("Post has been saved"))
|
||||
.catch((error) => console.log("Cannot save. Error: ", error))
|
||||
})
|
||||
.catch((error) => console.log("Error: ", error))
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Category } from "./entity/Category"
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ export class Post {
|
||||
@Column()
|
||||
text: string
|
||||
|
||||
@ManyToMany((type) => Category)
|
||||
@ManyToMany(() => Category)
|
||||
@JoinTable()
|
||||
categories: Category[]
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
import { Category } from "./entity/Category"
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
|
||||
96
sample/sample37-vector-sqlserver/README.md
Normal file
96
sample/sample37-vector-sqlserver/README.md
Normal file
@ -0,0 +1,96 @@
|
||||
# Vector Type Support in SQL Server
|
||||
|
||||
This sample demonstrates how to use the `vector` column type in SQL Server with TypeORM for storing and querying vector embeddings.
|
||||
|
||||
## Overview
|
||||
|
||||
SQL Server supports the `vector` data type for storing high-dimensional vectors, which is useful for:
|
||||
|
||||
- Semantic search with embeddings
|
||||
- Recommendation systems
|
||||
- Similarity matching
|
||||
- Machine learning applications
|
||||
|
||||
## Features Demonstrated
|
||||
|
||||
1. **Vector Column Definition**: Define columns with specific vector dimensions
|
||||
2. **Storing Embeddings**: Save vector data as arrays of numbers
|
||||
3. **Vector Similarity Search**: Use `VECTOR_DISTANCE` function for cosine similarity
|
||||
|
||||
## Entity Definition
|
||||
|
||||
```typescript
|
||||
@Entity("document_chunks")
|
||||
export class DocumentChunk {
|
||||
@PrimaryGeneratedColumn("uuid")
|
||||
id: string
|
||||
|
||||
@Column("varchar", { length: "MAX" })
|
||||
content: string
|
||||
|
||||
// Vector column with 1998 dimensions
|
||||
@Column("vector", { length: 1998 })
|
||||
embedding: number[]
|
||||
|
||||
@Column("uuid")
|
||||
documentId: string
|
||||
|
||||
@ManyToOne(() => Document, (document) => document.chunks)
|
||||
@JoinColumn({ name: "documentId" })
|
||||
document: Document
|
||||
}
|
||||
```
|
||||
|
||||
## Vector Similarity Search
|
||||
|
||||
SQL Server provides the `VECTOR_DISTANCE` function for calculating distances between vectors:
|
||||
|
||||
```typescript
|
||||
const queryEmbedding = [
|
||||
/* your query vector */
|
||||
]
|
||||
const documentIds = ["doc-id-1", "doc-id-2"]
|
||||
|
||||
const results = await connection.query(
|
||||
`
|
||||
DECLARE @question AS VECTOR (1998) = @0;
|
||||
SELECT TOP (10) dc.*,
|
||||
VECTOR_DISTANCE('cosine', @question, embedding) AS distance,
|
||||
d.fileName as "documentName"
|
||||
FROM document_chunks dc
|
||||
LEFT JOIN documents d ON dc.documentId = d.id
|
||||
WHERE documentId IN (@1))
|
||||
ORDER BY VECTOR_DISTANCE('cosine', @question, embedding)
|
||||
`,
|
||||
[JSON.stringify(queryEmbedding), documentIds.join(", ")],
|
||||
)
|
||||
```
|
||||
|
||||
## Distance Metrics
|
||||
|
||||
The `VECTOR_DISTANCE` function supports different distance metrics:
|
||||
|
||||
- `'cosine'` - Cosine distance (most common for semantic search)
|
||||
- `'euclidean'` - Euclidean (L2) distance
|
||||
- `'dot'` - Negative dot product
|
||||
|
||||
## Requirements
|
||||
|
||||
- SQL Server with vector support enabled
|
||||
- TypeORM with SQL Server driver (`mssql` package)
|
||||
|
||||
## Running the Sample
|
||||
|
||||
1. Make sure you have SQL Server running with vector support
|
||||
2. Update the connection settings in `app.ts` if needed
|
||||
3. Run:
|
||||
```bash
|
||||
npm install
|
||||
ts-node app.ts
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- Vector dimensions must be specified using the `length` option
|
||||
- Embeddings are stored as JSON strings internally and converted to/from arrays automatically
|
||||
- The maximum vector dimension depends on your SQL Server version and configuration
|
||||
88
sample/sample37-vector-sqlserver/app.ts
Normal file
88
sample/sample37-vector-sqlserver/app.ts
Normal file
@ -0,0 +1,88 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource } from "../../src/index"
|
||||
import { DocumentChunk } from "./entity/DocumentChunk"
|
||||
import { Document } from "./entity/Document"
|
||||
|
||||
const AppDataSource = new DataSource({
|
||||
type: "mssql",
|
||||
host: "localhost",
|
||||
username: "sa",
|
||||
password: "Admin12345",
|
||||
database: "test",
|
||||
synchronize: true,
|
||||
dropSchema: true,
|
||||
logging: false,
|
||||
entities: [Document, DocumentChunk],
|
||||
options: {
|
||||
// Enable trust server certificate for local development
|
||||
trustServerCertificate: true,
|
||||
},
|
||||
})
|
||||
|
||||
AppDataSource.initialize()
|
||||
.then(async (connection) => {
|
||||
console.log("Inserting documents and chunks with vector embeddings...")
|
||||
|
||||
// Create a document
|
||||
const document = new Document()
|
||||
document.fileName = "sample-document.txt"
|
||||
await connection.manager.save(document)
|
||||
|
||||
// Generate sample embeddings (in a real app, these would come from an ML model)
|
||||
const generateEmbedding = (dimension: number): number[] => {
|
||||
return Array.from({ length: dimension }, () => Math.random())
|
||||
}
|
||||
|
||||
// Create document chunks with embeddings
|
||||
const chunk1 = new DocumentChunk()
|
||||
chunk1.content =
|
||||
"TypeORM is an ORM that can run in NodeJS and can be used with TypeScript and JavaScript."
|
||||
chunk1.embedding = generateEmbedding(1998)
|
||||
chunk1.document = document
|
||||
|
||||
const chunk2 = new DocumentChunk()
|
||||
chunk2.content =
|
||||
"It supports both Active Record and Data Mapper patterns."
|
||||
chunk2.embedding = generateEmbedding(1998)
|
||||
chunk2.document = document
|
||||
|
||||
const chunk3 = new DocumentChunk()
|
||||
chunk3.content =
|
||||
"TypeORM supports MySQL, PostgreSQL, MariaDB, SQLite, MS SQL Server, Oracle, and more."
|
||||
chunk3.embedding = generateEmbedding(1998)
|
||||
chunk3.document = document
|
||||
|
||||
await connection.manager.save([chunk1, chunk2, chunk3])
|
||||
|
||||
console.log("Documents and chunks have been saved!")
|
||||
|
||||
// Perform a vector similarity search
|
||||
console.log("\nPerforming vector similarity search...")
|
||||
|
||||
// Query embedding (in a real app, this would be generated from user query)
|
||||
const queryEmbedding = generateEmbedding(1998)
|
||||
const documentIds = [document.id]
|
||||
|
||||
const docIdParams = documentIds.map((_, i) => `@${i + 1}`).join(", ")
|
||||
const results = await connection.query(
|
||||
`
|
||||
DECLARE @question AS VECTOR (1998) = @0;
|
||||
SELECT TOP (3) dc.*, VECTOR_DISTANCE('cosine', @question, embedding) AS distance, d.fileName as "documentName"
|
||||
FROM document_chunks dc
|
||||
LEFT JOIN documents d ON dc.documentId = d.id
|
||||
WHERE documentId IN (${docIdParams})
|
||||
ORDER BY VECTOR_DISTANCE('cosine', @question, embedding)
|
||||
`,
|
||||
[JSON.stringify(queryEmbedding), ...documentIds],
|
||||
)
|
||||
|
||||
console.log("Search results (top 3 most similar chunks):")
|
||||
results.forEach((result: any, index: number) => {
|
||||
console.log(`\n${index + 1}. Distance: ${result.distance}`)
|
||||
console.log(` Content: ${result.content.substring(0, 80)}...`)
|
||||
console.log(` Document: ${result.documentName}`)
|
||||
})
|
||||
|
||||
await connection.destroy()
|
||||
})
|
||||
.catch((error) => console.log(error))
|
||||
19
sample/sample37-vector-sqlserver/entity/Document.ts
Normal file
19
sample/sample37-vector-sqlserver/entity/Document.ts
Normal file
@ -0,0 +1,19 @@
|
||||
import {
|
||||
Entity,
|
||||
PrimaryGeneratedColumn,
|
||||
Column,
|
||||
OneToMany,
|
||||
} from "../../../src/index"
|
||||
import { DocumentChunk } from "./DocumentChunk"
|
||||
|
||||
@Entity("documents")
|
||||
export class Document {
|
||||
@PrimaryGeneratedColumn("uuid")
|
||||
id: string
|
||||
|
||||
@Column("varchar")
|
||||
fileName: string
|
||||
|
||||
@OneToMany(() => DocumentChunk, (chunk) => chunk.document)
|
||||
chunks: DocumentChunk[]
|
||||
}
|
||||
27
sample/sample37-vector-sqlserver/entity/DocumentChunk.ts
Normal file
27
sample/sample37-vector-sqlserver/entity/DocumentChunk.ts
Normal file
@ -0,0 +1,27 @@
|
||||
import {
|
||||
Entity,
|
||||
PrimaryGeneratedColumn,
|
||||
Column,
|
||||
ManyToOne,
|
||||
JoinColumn,
|
||||
} from "../../../src/index"
|
||||
import { Document } from "./Document"
|
||||
|
||||
@Entity("document_chunks")
|
||||
export class DocumentChunk {
|
||||
@PrimaryGeneratedColumn("uuid")
|
||||
id: string
|
||||
|
||||
@Column("varchar", { length: "MAX" })
|
||||
content: string
|
||||
|
||||
@Column("vector", { length: 1998 })
|
||||
embedding: number[]
|
||||
|
||||
@Column("uuid")
|
||||
documentId: string
|
||||
|
||||
@ManyToOne(() => Document, (document) => document.chunks)
|
||||
@JoinColumn({ name: "documentId" })
|
||||
document: Document
|
||||
}
|
||||
19
sample/sample37-vector-sqlserver/package.json
Normal file
19
sample/sample37-vector-sqlserver/package.json
Normal file
@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "sample37-vector-sqlserver",
|
||||
"version": "0.0.1",
|
||||
"description": "Sample demonstrating vector type support in SQL Server with TypeORM",
|
||||
"main": "app.ts",
|
||||
"scripts": {
|
||||
"start": "ts-node app.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"typeorm": "latest",
|
||||
"mssql": "^11.0.0",
|
||||
"reflect-metadata": "^0.2.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.0.0",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.5.4"
|
||||
}
|
||||
}
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { PostDetails } from "./entity/PostDetails"
|
||||
|
||||
@ -32,7 +32,7 @@ dataSource.initialize().then(
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((post) => console.log("Post has been saved"))
|
||||
.then(() => console.log("Post has been saved"))
|
||||
.catch((error) => console.log("Cannot save. Error: ", error))
|
||||
},
|
||||
(error) => console.log("Cannot connect: ", error),
|
||||
|
||||
@ -25,7 +25,7 @@ export class Post {
|
||||
|
||||
// Post has relation with PostCategory, however inverse relation is not set
|
||||
// (category does not have relation with post set)
|
||||
@ManyToMany((type) => PostCategory, {
|
||||
@ManyToMany(() => PostCategory, {
|
||||
cascade: true,
|
||||
})
|
||||
@JoinTable()
|
||||
@ -33,7 +33,7 @@ export class Post {
|
||||
|
||||
// Post has relation with PostDetails. Cascade insert here means if there is a new PostDetails instance set
|
||||
// on this relation, it will be inserted automatically to the db when you save this Post entity
|
||||
@ManyToMany((type) => PostDetails, (details) => details.posts, {
|
||||
@ManyToMany(() => PostDetails, (details) => details.posts, {
|
||||
cascade: ["insert"],
|
||||
})
|
||||
@JoinTable()
|
||||
@ -41,7 +41,7 @@ export class Post {
|
||||
|
||||
// Post has relation with PostImage. Cascade update here means if there are changes to an existing PostImage, it
|
||||
// will be updated automatically to the db when you save this Post entity
|
||||
@ManyToMany((type) => PostImage, (image) => image.posts, {
|
||||
@ManyToMany(() => PostImage, (image) => image.posts, {
|
||||
cascade: ["update"],
|
||||
})
|
||||
@JoinTable()
|
||||
@ -49,12 +49,12 @@ export class Post {
|
||||
|
||||
// Post has relation with PostMetadata. No cascades here means that when saving a Post entity, there will be
|
||||
// no creating/updating/destroying PostMetadata.
|
||||
@ManyToMany((type) => PostMetadata, (metadata) => metadata.posts)
|
||||
@ManyToMany(() => PostMetadata, (metadata) => metadata.posts)
|
||||
@JoinTable()
|
||||
metadatas: PostMetadata[]
|
||||
|
||||
// Post has relation with PostInformation. Full cascades here
|
||||
@ManyToMany((type) => PostInformation, (information) => information.posts, {
|
||||
@ManyToMany(() => PostInformation, (information) => information.posts, {
|
||||
cascade: true,
|
||||
})
|
||||
@JoinTable()
|
||||
@ -62,7 +62,7 @@ export class Post {
|
||||
|
||||
// Post has relation with author. No cascades here means that when saving a Post entity, there will be
|
||||
// no creating/updating/destroying PostAuthor.
|
||||
@ManyToMany((type) => PostAuthor, (author) => author.posts)
|
||||
@ManyToMany(() => PostAuthor, (author) => author.posts)
|
||||
@JoinTable()
|
||||
authors: PostAuthor[]
|
||||
}
|
||||
|
||||
@ -14,6 +14,6 @@ export class PostAuthor {
|
||||
@Column()
|
||||
name: string
|
||||
|
||||
@ManyToMany((type) => Post, (post) => post.authors)
|
||||
@ManyToMany(() => Post, (post) => post.authors)
|
||||
posts: Post[]
|
||||
}
|
||||
|
||||
@ -29,7 +29,7 @@ export class PostDetails {
|
||||
})
|
||||
metadata: string | null
|
||||
|
||||
@ManyToMany((type) => Post, (post) => post.details, {
|
||||
@ManyToMany(() => Post, (post) => post.details, {
|
||||
cascade: true,
|
||||
})
|
||||
posts: Post[]
|
||||
|
||||
@ -14,6 +14,6 @@ export class PostImage {
|
||||
@Column()
|
||||
url: string
|
||||
|
||||
@ManyToMany((type) => Post, (post) => post.images)
|
||||
@ManyToMany(() => Post, (post) => post.images)
|
||||
posts: Post[]
|
||||
}
|
||||
|
||||
@ -14,7 +14,7 @@ export class PostInformation {
|
||||
@Column()
|
||||
text: string
|
||||
|
||||
@ManyToMany((type) => Post, (post) => post.informations, {
|
||||
@ManyToMany(() => Post, (post) => post.informations, {
|
||||
cascade: ["update"],
|
||||
})
|
||||
posts: Post[]
|
||||
|
||||
@ -14,6 +14,6 @@ export class PostMetadata {
|
||||
@Column()
|
||||
description: string
|
||||
|
||||
@ManyToMany((type) => Post, (post) => post.metadatas)
|
||||
@ManyToMany(() => Post, (post) => post.metadatas)
|
||||
posts: Post[]
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user