mirror of
https://github.com/typeorm/typeorm.git
synced 2025-12-08 21:26:23 +00:00
Merge branch 'master' of github.com:typeorm/typeorm into art
This commit is contained in:
commit
78746f106d
@ -2,7 +2,8 @@
|
||||
"all": true,
|
||||
"cache": false,
|
||||
"exclude": ["**/*.d.ts"],
|
||||
"exclude-after-remap": true,
|
||||
"extension": [".ts"],
|
||||
"include": ["build/compiled/src/**", "src/**"],
|
||||
"reporter": "lcov"
|
||||
"reporter": ["lcov"]
|
||||
}
|
||||
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -33,8 +33,8 @@
|
||||
|
||||
- [ ] Code is up-to-date with the `master` branch
|
||||
- [ ] This pull request links relevant issues as `Fixes #00000`
|
||||
- [ ] There are new or updated unit tests validating the change
|
||||
- [ ] Documentation has been updated to reflect this change
|
||||
- [ ] There are new or updated tests validating the change (`tests/**.test.ts`)
|
||||
- [ ] Documentation has been updated to reflect this change (`docs/docs/**.md`)
|
||||
|
||||
<!--
|
||||
🎉 Thank you for contributing and making TypeORM even better!
|
||||
|
||||
283
.github/copilot-instructions.md
vendored
Normal file
283
.github/copilot-instructions.md
vendored
Normal file
@ -0,0 +1,283 @@
|
||||
# GitHub Copilot Instructions for TypeORM
|
||||
|
||||
This document provides guidance for GitHub Copilot when working with the TypeORM codebase.
|
||||
|
||||
## Project Overview
|
||||
|
||||
TypeORM is a TypeScript-based Object-Relational Mapping (ORM) library that supports multiple databases including MySQL/MariaDB, PostgreSQL, MS SQL Server, Oracle, SAP HANA, SQLite, MongoDB, and Google Spanner. It implements both Active Record and Data Mapper patterns and runs on Node.js, Browser, React Native, and Electron platforms.
|
||||
|
||||
## Architecture & Structure
|
||||
|
||||
### Core Components
|
||||
|
||||
- **`src/data-source/`** - DataSource (formerly Connection) management
|
||||
- **`src/entity-manager/`** - Entity management and operations
|
||||
- **`src/repository/`** - Repository pattern implementation
|
||||
- **`src/query-builder/`** - SQL query building
|
||||
- **`src/decorator/`** - TypeScript decorators for entities, columns, relations
|
||||
- **`src/driver/`** - Database-specific drivers
|
||||
- **`src/metadata/`** - Entity metadata management
|
||||
- **`src/schema-builder/`** - Schema creation and migration
|
||||
- **`src/migration/`** - Database migration system
|
||||
- **`src/subscriber/`** - Event subscriber system
|
||||
- **`src/persistence/`** - Entity persistence logic
|
||||
|
||||
### Design Patterns
|
||||
|
||||
- **Active Record Pattern**: Entities have methods to save, remove, and query themselves
|
||||
- **Data Mapper Pattern**: Repositories handle entity persistence separately from business logic
|
||||
- **Decorator Pattern**: Extensive use of TypeScript decorators for metadata definition
|
||||
- **Builder Pattern**: QueryBuilder for constructing complex queries
|
||||
|
||||
## Coding Standards
|
||||
|
||||
### TypeScript Configuration
|
||||
|
||||
- Target: ES2021+ with CommonJS modules
|
||||
- Decorators: `experimentalDecorators` and `emitDecoratorMetadata` enabled
|
||||
|
||||
### Code Style
|
||||
|
||||
- **Formatting**: Use Prettier with these settings:
|
||||
- No semicolons (`"semi": false`)
|
||||
- Arrow function parentheses always (`"arrowParens": "always"`)
|
||||
- Trailing commas everywhere (`"trailingComma": "all"`)
|
||||
- **Linting**: ESLint with TypeScript support
|
||||
- Use `@typescript-eslint` rules
|
||||
- Warnings allowed for some `@typescript-eslint/no-*` rules
|
||||
- Unused variables starting with `_` are ignored
|
||||
- **Naming Conventions**:
|
||||
- Classes: PascalCase (e.g., `DataSource`, `EntityManager`)
|
||||
- Interfaces: PascalCase (e.g., `ColumnOptions`, `RelationOptions`)
|
||||
- Variables/functions: camelCase
|
||||
- Constants: UPPER_SNAKE_CASE for true constants
|
||||
- Private members: Use standard camelCase (no underscore prefix)
|
||||
|
||||
### TypeScript Patterns
|
||||
|
||||
- Use explicit types for public APIs
|
||||
- Prefer interfaces over type aliases for object shapes
|
||||
- Use generics for reusable components
|
||||
- Avoid `any` where possible; use `unknown` or proper types
|
||||
- Use optional chaining (`?.`) and nullish coalescing (`??`) operators
|
||||
- Leverage TypeScript utility types (`Partial<T>`, `Required<T>`, `Pick<T>`, etc.)
|
||||
|
||||
## Testing
|
||||
|
||||
### Test Structure
|
||||
|
||||
Tests are organized in `test/` directory:
|
||||
- **`test/functional/`** - Feature and integration tests organized by functionality (preferred)
|
||||
- **`test/github-issues/`** - Tests for specific GitHub issues
|
||||
- **`test/unit/`** - Unit tests for individual components
|
||||
- **`test/utils/`** - Test utilities and helpers
|
||||
|
||||
**Note**: Prefer writing functional tests over per-issue tests.
|
||||
|
||||
### Test Writing Guidelines
|
||||
|
||||
1. **Use the standard test template**:
|
||||
```typescript
|
||||
import "reflect-metadata"
|
||||
import { createTestingConnections, closeTestingConnections, reloadTestingDatabases } from "../../utils/test-utils"
|
||||
import { DataSource } from "../../../src/data-source/DataSource"
|
||||
import { expect } from "chai"
|
||||
|
||||
describe("description of functionality", () => {
|
||||
let dataSources: DataSource[]
|
||||
before(async () => dataSources = await createTestingConnections({
|
||||
entities: [__dirname + "/entity/*{.js,.ts}"],
|
||||
schemaCreate: true,
|
||||
dropSchema: true,
|
||||
}))
|
||||
beforeEach(() => reloadTestingDatabases(dataSources))
|
||||
after(() => closeTestingConnections(dataSources))
|
||||
|
||||
it("should do something specific", () => Promise.all(dataSources.map(async dataSource => {
|
||||
// Test implementation
|
||||
})))
|
||||
})
|
||||
```
|
||||
|
||||
2. **Test Configuration**:
|
||||
- Tests run against multiple databases (as configured in `ormconfig.json`)
|
||||
- Each test should work across all supported databases unless database-specific
|
||||
- Place entity files in `./entity/` relative to test file for automatic loading
|
||||
- Use `Promise.all(dataSources.map(...))` pattern to test against all databases
|
||||
|
||||
3. **Test Naming**:
|
||||
- Use descriptive `describe()` blocks for features
|
||||
- Use "should..." format for `it()` descriptions
|
||||
- Reference GitHub issue numbers when fixing specific issues
|
||||
|
||||
4. **Running Tests**:
|
||||
- Full test suite: `npm test` (compiles then runs tests)
|
||||
- Fast iteration: `npm run test:fast` (runs without recompiling)
|
||||
- Specific tests: `npm run test:fast -- --grep "pattern"`
|
||||
- Watch mode: `npm run compile -- --watch` + `npm run test:fast`
|
||||
|
||||
## Database-Specific Considerations
|
||||
|
||||
### Multi-Database Support
|
||||
|
||||
When writing code or tests:
|
||||
- Ensure compatibility across all supported databases
|
||||
- Use driver-specific code only in `src/driver/` directory
|
||||
- Test database-agnostic code against multiple databases
|
||||
- Use `DataSource.options.type` to check database type when needed
|
||||
- Be aware of SQL dialect differences (LIMIT vs TOP, etc.)
|
||||
|
||||
### Driver Implementation
|
||||
|
||||
Each driver in `src/driver/` implements common interfaces:
|
||||
- Connection management
|
||||
- Query execution
|
||||
- Schema synchronization
|
||||
- Type mapping
|
||||
- Transaction handling
|
||||
|
||||
## Common Development Tasks
|
||||
|
||||
### Adding a New Feature
|
||||
|
||||
1. Create entities in appropriate test directory
|
||||
2. Write tests first (TDD approach encouraged)
|
||||
3. Implement feature in `src/`
|
||||
4. Ensure tests pass across all databases
|
||||
5. Update documentation if public API changes
|
||||
6. Follow commit message conventions
|
||||
|
||||
### Adding a New Decorator
|
||||
|
||||
1. Create decorator file in `src/decorator/`
|
||||
2. Create metadata args in `src/metadata-args/`
|
||||
3. Update metadata builder in `src/metadata-builder/`
|
||||
4. Export from `src/index.ts`
|
||||
5. Add comprehensive tests
|
||||
6. Update TypeScript type definitions if needed
|
||||
|
||||
### Working with Migrations
|
||||
|
||||
- Migrations are in `src/migration/`
|
||||
- Migration files should be timestamped
|
||||
- Support both up and down migrations
|
||||
- Test migrations against all supported databases
|
||||
- Ensure schema changes are reversible
|
||||
|
||||
## Build & Development Workflow
|
||||
|
||||
### Commands
|
||||
|
||||
- **Build**: `npm run compile` - Compiles TypeScript to `build/compiled/`
|
||||
- **Package**: `npm run package` - Creates distribution in `build/package/`
|
||||
- **Pack**: `npm run pack` - Creates `.tgz` file in `build/`
|
||||
- **Test**: `npm test` - Compile and run all tests
|
||||
- **Lint**: `npm run lint` - Run ESLint
|
||||
- **Format**: `npm run format` - Run Prettier
|
||||
- **Watch**: `npm run watch` - Watch mode for TypeScript compilation
|
||||
|
||||
### Development Setup
|
||||
|
||||
1. Install dependencies: `npm install`
|
||||
2. Copy config: `cp ormconfig.sample.json ormconfig.json`
|
||||
3. Configure database connections in `ormconfig.json`
|
||||
4. Optionally use Docker: `docker-compose up` for database services
|
||||
|
||||
### Pre-commit Hooks
|
||||
|
||||
- Husky runs pre-commit hooks
|
||||
- Lint-staged runs on staged files
|
||||
- Format and lint checks must pass
|
||||
|
||||
## Contribution Guidelines
|
||||
|
||||
### Commit Message Format
|
||||
|
||||
Follow conventional commits:
|
||||
```
|
||||
<type>: <subject>
|
||||
|
||||
<body>
|
||||
|
||||
<footer>
|
||||
```
|
||||
|
||||
**Types**: `feat`, `fix`, `docs`, `style`, `refactor`, `perf`, `test`, `build`, `chore`, `revert`
|
||||
|
||||
**Subject**:
|
||||
- Use imperative, present tense
|
||||
- Don't capitalize first letter
|
||||
- No period at the end
|
||||
- Max 100 characters per line
|
||||
|
||||
### Pull Request Requirements
|
||||
|
||||
- All tests must pass
|
||||
- Include appropriate tests for changes
|
||||
- Follow existing code style
|
||||
- Update documentation for API changes
|
||||
- Reference related GitHub issues
|
||||
- Get approval before merging
|
||||
|
||||
## Common Patterns & Idioms
|
||||
|
||||
### Entity Definition
|
||||
|
||||
```typescript
|
||||
@Entity()
|
||||
export class User {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
@Column()
|
||||
name: string
|
||||
|
||||
@OneToMany(() => Photo, photo => photo.user)
|
||||
photos: Photo[]
|
||||
}
|
||||
```
|
||||
|
||||
### Repository Usage
|
||||
|
||||
```typescript
|
||||
const userRepository = dataSource.getRepository(User)
|
||||
const user = await userRepository.findOne({ where: { id: 1 } })
|
||||
```
|
||||
|
||||
### QueryBuilder
|
||||
|
||||
```typescript
|
||||
const users = await dataSource
|
||||
.getRepository(User)
|
||||
.createQueryBuilder("user")
|
||||
.leftJoinAndSelect("user.photos", "photo")
|
||||
.where("user.name = :name", { name: "John" })
|
||||
.getMany()
|
||||
```
|
||||
|
||||
### Transactions
|
||||
|
||||
```typescript
|
||||
await dataSource.transaction(async (manager) => {
|
||||
await manager.save(user)
|
||||
await manager.save(photo)
|
||||
})
|
||||
```
|
||||
|
||||
## Important Notes
|
||||
|
||||
- Always import `reflect-metadata` before TypeORM
|
||||
- Be careful with circular dependencies between entities
|
||||
- Use lazy relations or forward references for circular entity references
|
||||
- Connection pooling is handled automatically by drivers
|
||||
- Be mindful of N+1 query problems; use joins or eager loading
|
||||
- Repository methods are async; always use `await`
|
||||
- Entity instances should be plain objects, not class instances with methods (Data Mapper pattern)
|
||||
|
||||
## Resources
|
||||
|
||||
- [Main Documentation](https://typeorm.io)
|
||||
- [Contributing Guide](../CONTRIBUTING.md)
|
||||
- [Developer Guide](../DEVELOPER.md)
|
||||
- [GitHub Repository](https://github.com/typeorm/typeorm)
|
||||
- [Issue Tracker](https://github.com/typeorm/typeorm/issues)
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@ -23,7 +23,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
|
||||
76
.github/workflows/commit-validation.yml
vendored
76
.github/workflows/commit-validation.yml
vendored
@ -1,76 +0,0 @@
|
||||
name: Commit Validation
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "**"
|
||||
paths-ignore:
|
||||
- "docs/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "**"
|
||||
paths-ignore:
|
||||
- "docs/**"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
formatting:
|
||||
if: ${{ (github.event_name != 'pull_request') || (github.event.pull_request.head.repo.fork) }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: npm run format:ci
|
||||
|
||||
build:
|
||||
if: ${{ (github.event_name != 'pull_request') || (github.event.pull_request.head.repo.fork) }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run compile
|
||||
|
||||
- name: Upload build
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
retention-days: 1
|
||||
|
||||
tests-linux:
|
||||
needs: build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [18, 20]
|
||||
uses: ./.github/workflows/tests-linux.yml
|
||||
with:
|
||||
node-version: ${{matrix.node-version}}
|
||||
|
||||
tests-windows:
|
||||
needs: build
|
||||
uses: ./.github/workflows/tests-windows.yml
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
# Run with most databases possible to provide the coverage of the tests
|
||||
coverage:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [tests-linux, tests-windows]
|
||||
steps:
|
||||
- name: Coveralls Finished
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
parallel-finished: true
|
||||
37
.github/workflows/docsearch.yml
vendored
37
.github/workflows/docsearch.yml
vendored
@ -6,15 +6,42 @@ on:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
index_docs:
|
||||
index-docs:
|
||||
if: ${{ !github.event.repository.fork }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Index docs to Typesense
|
||||
- uses: actions/checkout@v5
|
||||
- name: Delete unaliased collections
|
||||
env:
|
||||
TYPESENSE_API_KEY: ${{ secrets.TYPESENSE_API_KEY }}
|
||||
TYPESENSE_HOST: ${{ secrets.TYPESENSE_HOST }}
|
||||
TYPESENSE_PROTOCOL: https
|
||||
TYPESENSE_PORT: 443
|
||||
run: |
|
||||
ALIAS_COLLECTION=$(curl -s -H "X-TYPESENSE-API-KEY: $TYPESENSE_API_KEY" \
|
||||
"$TYPESENSE_PROTOCOL://$TYPESENSE_HOST:$TYPESENSE_PORT/aliases/typeorm-docs" \
|
||||
| jq -r '.collection_name')
|
||||
|
||||
if [ "$ALIAS_COLLECTION" = "null" ] || [ -z "$ALIAS_COLLECTION" ]; then
|
||||
echo "Alias does not exist; skipping collection cleanup."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Alias currently points to: $ALIAS_COLLECTION"
|
||||
|
||||
COLLECTIONS=$(curl -s -H "X-TYPESENSE-API-KEY: $TYPESENSE_API_KEY" \
|
||||
"$TYPESENSE_PROTOCOL://$TYPESENSE_HOST:$TYPESENSE_PORT/collections" \
|
||||
| jq -r '.[].name')
|
||||
|
||||
for col in $COLLECTIONS; do
|
||||
if [ "$col" != "$ALIAS_COLLECTION" ]; then
|
||||
echo "Deleting unaliased collection: $col"
|
||||
curl -s -X DELETE -H "X-TYPESENSE-API-KEY: $TYPESENSE_API_KEY" \
|
||||
"$TYPESENSE_PROTOCOL://$TYPESENSE_HOST:$TYPESENSE_PORT/collections/$col"
|
||||
fi
|
||||
done
|
||||
- run: |
|
||||
docker run \
|
||||
-e TYPESENSE_API_KEY=${{ secrets.TYPESENSE_API_KEY }} \
|
||||
-e TYPESENSE_HOST="${{ secrets.TYPESENSE_HOST }}" \
|
||||
|
||||
7
.github/workflows/preview.yml
vendored
7
.github/workflows/preview.yml
vendored
@ -15,15 +15,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
cache: "npm"
|
||||
node-version: 20
|
||||
node-version-file: .nvmrc
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
6
.github/workflows/publish-package.yml
vendored
6
.github/workflows/publish-package.yml
vendored
@ -11,10 +11,10 @@ jobs:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20
|
||||
node-version-file: .nvmrc
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- run: npm ci
|
||||
- run: npm run package
|
||||
|
||||
83
.github/workflows/tests-linux.yml
vendored
83
.github/workflows/tests-linux.yml
vendored
@ -21,18 +21,17 @@ jobs:
|
||||
COCKROACH_ARGS: "start-single-node --insecure --cache=1GB --store=type=mem,size=4GB"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/cockroachdb.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -44,23 +43,22 @@ jobs:
|
||||
|
||||
services:
|
||||
mongodb:
|
||||
image: mongo:5.0.31
|
||||
image: mongo:8
|
||||
ports:
|
||||
- "27017:27017"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/mongodb.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -72,26 +70,26 @@ jobs:
|
||||
|
||||
services:
|
||||
mssql:
|
||||
image: "mcr.microsoft.com/mssql/server:2022-latest"
|
||||
image: "mcr.microsoft.com/mssql/server:2025-latest"
|
||||
ports:
|
||||
- "1433:1433"
|
||||
env:
|
||||
SA_PASSWORD: "Admin12345"
|
||||
ACCEPT_EULA: "Y"
|
||||
MSSQL_PID: "Express"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/mssql.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -122,18 +120,17 @@ jobs:
|
||||
MYSQL_DATABASE: "test"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/mysql-mariadb.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -164,18 +161,17 @@ jobs:
|
||||
MYSQL_DATABASE: "test"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/mysql-mariadb-latest.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -186,18 +182,17 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/better-sqlite3.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -208,18 +203,17 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/sqlite.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -230,18 +224,17 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/sqljs.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -271,18 +264,17 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/postgres.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
@ -293,14 +285,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- run: docker compose up oracle --detach
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
@ -309,7 +300,8 @@ jobs:
|
||||
- run: npm ci
|
||||
- run: cat ormconfig.sample.json | jq 'map(select(.name == "oracle"))' > ormconfig.json
|
||||
- run: docker compose up oracle --no-recreate --wait
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: sleep 10
|
||||
- run: npx c8 npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
@ -321,14 +313,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- run: docker compose up hanaexpress --detach
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
@ -337,7 +328,7 @@ jobs:
|
||||
- run: npm ci
|
||||
- run: cat ormconfig.sample.json | jq 'map(select(.name == "hanaexpress"))' > ormconfig.json
|
||||
- run: docker compose up hanaexpress --no-recreate --wait
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
|
||||
21
.github/workflows/tests-windows.yml
vendored
21
.github/workflows/tests-windows.yml
vendored
@ -11,11 +11,10 @@ jobs:
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
@ -23,7 +22,7 @@ jobs:
|
||||
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/better-sqlite3.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
@ -35,11 +34,10 @@ jobs:
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
@ -47,7 +45,7 @@ jobs:
|
||||
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/sqlite.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
@ -59,11 +57,10 @@ jobs:
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
cache: "npm"
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
@ -71,7 +68,7 @@ jobs:
|
||||
|
||||
- run: npm ci
|
||||
- run: cp .github/workflows/test/sqljs.ormconfig.json ormconfig.json
|
||||
- run: npx nyc npm run test:ci
|
||||
- run: npx c8 npm run test:ci
|
||||
|
||||
- name: Coveralls Parallel
|
||||
uses: coverallsapp/github-action@v2
|
||||
|
||||
132
.github/workflows/tests.yml
vendored
Normal file
132
.github/workflows/tests.yml
vendored
Normal file
@ -0,0 +1,132 @@
|
||||
name: Tests
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- next
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
detect-changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
changes: ${{ steps.detect-changes.outputs.changes }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: detect-changes
|
||||
with:
|
||||
filters: |
|
||||
package-json: &package-json
|
||||
- package.json
|
||||
- package-lock.json
|
||||
|
||||
docs: &docs
|
||||
- docs/**/*
|
||||
|
||||
src: &src
|
||||
- extra/**/*.js
|
||||
- src/**/*.ts
|
||||
- gulpfile.js
|
||||
- tsconfig.json
|
||||
- *package-json
|
||||
|
||||
src-or-tests: &src-or-tests
|
||||
- *src
|
||||
- test/**/*.ts
|
||||
- .github/workflows/test/**/*
|
||||
- .github/workflows/test*.yml
|
||||
- .mocharc.json
|
||||
- .nvmrc
|
||||
- .nycrc.json
|
||||
|
||||
lint: &lint
|
||||
- *src-or-tests
|
||||
- .prettierrc.json
|
||||
- eslint.config.mjs
|
||||
|
||||
formatting:
|
||||
if: contains(needs.detect-changes.outputs.changes, 'lint')
|
||||
needs: detect-changes
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: .nvmrc
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: npm run format:ci
|
||||
|
||||
docs:
|
||||
if: contains(needs.detect-changes.outputs.changes, 'docs')
|
||||
needs: detect-changes
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./docs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: .nvmrc
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
|
||||
build:
|
||||
if: contains(needs.detect-changes.outputs.changes, 'src-or-tests')
|
||||
needs: detect-changes
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: .nvmrc
|
||||
- run: npm ci
|
||||
- run: npm run compile
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build
|
||||
path: build/
|
||||
retention-days: 1
|
||||
|
||||
tests-linux:
|
||||
if: contains(needs.detect-changes.outputs.changes, 'src-or-tests')
|
||||
needs: [detect-changes, build]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [18, 20]
|
||||
uses: ./.github/workflows/tests-linux.yml
|
||||
with:
|
||||
node-version: ${{matrix.node-version}}
|
||||
|
||||
tests-windows:
|
||||
if: contains(needs.detect-changes.outputs.changes, 'src-or-tests')
|
||||
needs: [detect-changes, build]
|
||||
uses: ./.github/workflows/tests-windows.yml
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
coverage:
|
||||
if: contains(needs.detect-changes.outputs.changes, 'src-or-tests')
|
||||
runs-on: ubuntu-latest
|
||||
needs: [detect-changes, tests-linux, tests-windows]
|
||||
steps:
|
||||
- uses: coverallsapp/github-action@v2
|
||||
with:
|
||||
parallel-finished: true
|
||||
|
||||
all-passed:
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
needs:
|
||||
- build
|
||||
- coverage
|
||||
- docs
|
||||
- formatting
|
||||
- tests-linux
|
||||
- tests-windows
|
||||
steps:
|
||||
- run: exit ${{ contains(needs.*.result, 'failure') && 1 || 0 }}
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@ -3,12 +3,11 @@
|
||||
._*
|
||||
|
||||
### Node ###
|
||||
npm-debug.log*
|
||||
build/
|
||||
coverage/
|
||||
*.lcov
|
||||
.nyc_output/
|
||||
node_modules/
|
||||
npm-debug.log*
|
||||
*.lcov
|
||||
|
||||
### VisualStudioCode ###
|
||||
.vscode/*
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
{
|
||||
"__comment": "TODO: remove --exit flag: https://mochajs.org/#-exit",
|
||||
"exit": true,
|
||||
"$schema": "https://json.schemastore.org/mocharc",
|
||||
"check-leaks": true,
|
||||
"color": true,
|
||||
"exit": true,
|
||||
"file": ["./build/compiled/test/utils/test-setup.js"],
|
||||
"recursive": true,
|
||||
"spec": ["./build/compiled/test"],
|
||||
"spec": ["./build/compiled/test/**/*.test.{js,ts}"],
|
||||
"timeout": 90000
|
||||
}
|
||||
|
||||
14
.pr_agent.toml
Normal file
14
.pr_agent.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[github_app]
|
||||
pr_commands = [
|
||||
"/review",
|
||||
"/improve",
|
||||
]
|
||||
|
||||
handle_push_trigger = true
|
||||
push_commands = [
|
||||
"/improve",
|
||||
]
|
||||
|
||||
[auto_best_practices]
|
||||
enable_auto_best_practices = true
|
||||
utilize_auto_best_practices = true
|
||||
28
CHANGELOG.md
28
CHANGELOG.md
@ -1,3 +1,31 @@
|
||||
## [0.3.28](https://github.com/typeorm/typeorm/compare/0.3.27...0.3.28) (2025-12-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add multiSubnetFailover option for mssql ([#10804](https://github.com/typeorm/typeorm/issues/10804)) ([83e3a8a](https://github.com/typeorm/typeorm/commit/83e3a8a3db581a50495fa2d97c8fcd5d603cfd3c))
|
||||
* circular import in SapDriver.ts ([#11750](https://github.com/typeorm/typeorm/issues/11750)) ([bed7913](https://github.com/typeorm/typeorm/commit/bed79136230d4ab26cce8cf79071134c75527857))
|
||||
* **cli:** init command reading package.json from two folders up ([#11789](https://github.com/typeorm/typeorm/issues/11789)) ([dd55218](https://github.com/typeorm/typeorm/commit/dd55218648eb449937e22e1e7c88182db0048f1d))
|
||||
* **deps:** upgrade glob to fix CVE-2025-64756 ([#11784](https://github.com/typeorm/typeorm/issues/11784)) ([dc74f53](https://github.com/typeorm/typeorm/commit/dc74f5374ef5ec83d53045e4bca99cb9ff7d49d4))
|
||||
* **mongodb:** add missing `findBy` method to MongoEntityManager ([#11814](https://github.com/typeorm/typeorm/issues/11814)) ([38715bb](https://github.com/typeorm/typeorm/commit/38715bbd4169cae2910aac035cd2b05bddbaec5c))
|
||||
* **redis:** version detection logic ([#11815](https://github.com/typeorm/typeorm/issues/11815)) ([6f486e5](https://github.com/typeorm/typeorm/commit/6f486e5a67c007287949be119f233fb2b4fb7a59))
|
||||
* typesense doc sync ([#11807](https://github.com/typeorm/typeorm/issues/11807)) ([d0b5454](https://github.com/typeorm/typeorm/commit/d0b54544e9e43a5330c0485d41551128224fe4d3))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add support for `jsonpath` column type in PostgreSQL ([#11684](https://github.com/typeorm/typeorm/issues/11684)) ([4f05718](https://github.com/typeorm/typeorm/commit/4f05718237a6ef1a3bc623e803536db23f1f327b))
|
||||
* **cli/init:** pick dependencies versions from our own package.json ([#11705](https://github.com/typeorm/typeorm/issues/11705)) ([b930909](https://github.com/typeorm/typeorm/commit/b9309098bc00de047a96cba642ea1ed9e730b1fa))
|
||||
* entity schema support trees ([#11606](https://github.com/typeorm/typeorm/issues/11606)) ([925dee0](https://github.com/typeorm/typeorm/commit/925dee002b92f1210456dce16c18c6b436e912f3))
|
||||
* export QueryPartialEntity and QueryDeepPartialEntity types ([#11748](https://github.com/typeorm/typeorm/issues/11748)) ([ade198c](https://github.com/typeorm/typeorm/commit/ade198c77cda65e86f057f97261073f5ab2b1ed6))
|
||||
* init version in postgres driver only if not set ([#11373](https://github.com/typeorm/typeorm/issues/11373)) ([cb1284c](https://github.com/typeorm/typeorm/commit/cb1284c8c0950dcb792e95b889efe1dfafc05aea))
|
||||
* manage MongoDB SOCKS5 proxy settings ([#11731](https://github.com/typeorm/typeorm/issues/11731)) ([d7867eb](https://github.com/typeorm/typeorm/commit/d7867ebff173e6cae45e6ce82c9f8890811c4eba))
|
||||
* **mssql:** support 'vector' type for MS SQL Server ([#11732](https://github.com/typeorm/typeorm/issues/11732)) ([2681051](https://github.com/typeorm/typeorm/commit/2681051f78c5c284b340e7978f8f337e86c7e915))
|
||||
* **mysql:** add pool size options for each connection ([#11810](https://github.com/typeorm/typeorm/issues/11810)) ([67f793f](https://github.com/typeorm/typeorm/commit/67f793feaa976da717175daf152f738793b94ed2))
|
||||
* **mysql:** add support for vector columns on MariaDB and MySQL ([#11670](https://github.com/typeorm/typeorm/issues/11670)) ([cfb3d6c](https://github.com/typeorm/typeorm/commit/cfb3d6c015ad648a7ffc08a7a11ce580d108ac69))
|
||||
|
||||
|
||||
|
||||
## [0.3.27](https://github.com/typeorm/typeorm/compare/0.3.26...0.3.27) (2025-09-19)
|
||||
|
||||
|
||||
|
||||
@ -18,8 +18,7 @@ If you have a question or want community support:
|
||||
|
||||
## <a name="issue"></a> Found a security vulnerability?
|
||||
|
||||
If you find a security vulnerability or something that should be discussed personally,
|
||||
please contact me within my [email](https://github.com/typeorm/typeorm/blob/master/package.json#L10).
|
||||
If you find a security vulnerability or something that should be discussed privately, please contact us at [maintainers@typeorm.io](mailto:maintainers@typeorm.io).
|
||||
|
||||
## <a name="issue"></a> Found a Bug?
|
||||
|
||||
|
||||
98
DEVELOPER.md
98
DEVELOPER.md
@ -2,11 +2,11 @@
|
||||
|
||||
This document describes how to set up your development environment and run TypeORM test cases.
|
||||
|
||||
* [Prerequisite Software](#prerequisite-software)
|
||||
* [Getting the Sources](#getting-the-sources)
|
||||
* [Installing NPM Modules](#installing-npm-modules)
|
||||
* [Building](#building)
|
||||
* [Running Tests Locally](#running-tests-locally)
|
||||
- [Prerequisite Software](#prerequisite-software)
|
||||
- [Getting the Sources](#getting-the-sources)
|
||||
- [Installing NPM Modules](#installing-npm-modules)
|
||||
- [Building](#building)
|
||||
- [Running Tests Locally](#running-tests-locally)
|
||||
|
||||
See the [contribution guidelines](https://github.com/typeorm/typeorm/blob/master/CONTRIBUTING.md)
|
||||
if you'd like to contribute to TypeORM.
|
||||
@ -16,19 +16,19 @@ if you'd like to contribute to TypeORM.
|
||||
Before you can build and test TypeORM, you must install and configure the
|
||||
following products on your development machine:
|
||||
|
||||
* [Git](http://git-scm.com) and/or the **GitHub app** (for [Mac](http://mac.github.com) or
|
||||
[Windows](http://windows.github.com)); [GitHub's Guide to Installing
|
||||
Git](https://help.github.com/articles/set-up-git) is a good source of information.
|
||||
* [Node.js](http://nodejs.org), (better to install latest version) which is used to run a development web server,
|
||||
run tests, and generate distributable files.
|
||||
Depending on your system, you can install Node either from source or as a pre-packaged bundle.
|
||||
* [Mysql](https://www.mysql.com/) is required to run tests on this platform (or docker)
|
||||
* [MariaDB](https://mariadb.com/) is required to run tests on this platform (or docker)
|
||||
* [Postgres](https://www.postgresql.org/) is required to run tests on this platform (or docker)
|
||||
* [Oracle](https://www.oracle.com/database/index.html) is required to run tests on this platform
|
||||
* [Microsoft SQL Server](https://www.microsoft.com/en-us/cloud-platform/sql-server) is required to run tests on this platform
|
||||
* For MySQL, MariaDB and Postgres you can use [docker](https://www.docker.com/) instead (docker configuration is
|
||||
[here](https://github.com/typeorm/typeorm/blob/master/docker-compose.yml))
|
||||
- [Git](http://git-scm.com) and/or the **GitHub app** (for [Mac](http://mac.github.com) or
|
||||
[Windows](http://windows.github.com)); [GitHub's Guide to Installing
|
||||
Git](https://help.github.com/articles/set-up-git) is a good source of information.
|
||||
- [Node.js](http://nodejs.org), (better to install latest version) which is used to run a development web server,
|
||||
run tests, and generate distributable files.
|
||||
Depending on your system, you can install Node either from source or as a pre-packaged bundle.
|
||||
- [Mysql](https://www.mysql.com/) is required to run tests on this platform (or docker)
|
||||
- [MariaDB](https://mariadb.com/) is required to run tests on this platform (or docker)
|
||||
- [Postgres](https://www.postgresql.org/) is required to run tests on this platform (or docker)
|
||||
- [Oracle](https://www.oracle.com/database/index.html) is required to run tests on this platform
|
||||
- [Microsoft SQL Server](https://www.microsoft.com/en-us/cloud-platform/sql-server) is required to run tests on this platform
|
||||
- For MySQL, MariaDB and Postgres you can use [docker](https://www.docker.com/) instead (docker configuration is
|
||||
[here](https://github.com/typeorm/typeorm/blob/master/docker-compose.yml))
|
||||
|
||||
## Getting the Sources
|
||||
|
||||
@ -50,7 +50,17 @@ cd typeorm
|
||||
git remote add upstream https://github.com/typeorm/typeorm.git
|
||||
```
|
||||
|
||||
## Installing NPM Modules
|
||||
## Node
|
||||
|
||||
You should have node installed in the version described in [.nvmrc](.nvmrc).
|
||||
|
||||
It is recommended to configure your OS to automatically switch to use this version whenever you enter project folder. This can be achieved in many ways:
|
||||
|
||||
- [`fnm`](https://github.com/Schniz/fnm)
|
||||
- [`zsh-nvm`](https://github.com/lukechilds/zsh-nvm#auto-use)
|
||||
- [`asdf`](https://asdf-vm.com) with `asdf-nodejs` plugin and [`legacy_version_file = true`](https://asdf-vm.com/manage/configuration.html#legacy-version-file) option
|
||||
|
||||
## Installing package dependencies
|
||||
|
||||
Install all TypeORM dependencies by running this command:
|
||||
|
||||
@ -89,38 +99,48 @@ You can copy this tar into your project and run `npm install ./typeorm-x.x.x.tgz
|
||||
|
||||
## Running Tests Locally
|
||||
|
||||
It is greatly appreciated if PRs that change code come with appropriate tests.
|
||||
It is greatly appreciated if PRs that change code come with appropriate tests.
|
||||
|
||||
To create a new test, check the [relevant functional tests](https://github.com/typeorm/typeorm/tree/master/test/functional). Depending on the test, you may need to create a new test file or modify an existing one.
|
||||
To create a new test, check the [relevant functional tests](https://github.com/typeorm/typeorm/tree/master/test/functional). Depending on the test, you may need to create a new `.test.ts` file or modify an existing one.
|
||||
|
||||
If the test is for a specific regression or issue opened on GitHub, add a comment to the tests mentioning the issue number.
|
||||
|
||||
Most tests will benefit from using this template as a starting point:
|
||||
|
||||
```ts
|
||||
import "reflect-metadata";
|
||||
import { createTestingConnections, closeTestingConnections, reloadTestingDatabases } from "../../utils/test-utils";
|
||||
import { expect } from "chai"
|
||||
import "reflect-metadata"
|
||||
import {
|
||||
closeTestingConnections,
|
||||
createTestingConnections,
|
||||
reloadTestingDatabases,
|
||||
} from "../../utils/test-utils"
|
||||
import { DataSource } from "../../../src/data-source/DataSource"
|
||||
import { expect } from "chai";
|
||||
|
||||
describe("description of the functionality you're testing", () => {
|
||||
let dataSources: DataSource[]
|
||||
|
||||
let dataSources: DataSource[];
|
||||
before(async () => dataSources = await createTestingConnections({
|
||||
entities: [__dirname + "/entity/*{.js,.ts}"],
|
||||
schemaCreate: true,
|
||||
dropSchema: true,
|
||||
}));
|
||||
beforeEach(() => reloadTestingDatabases(dataSources));
|
||||
after(() => closeTestingConnections(dataSources));
|
||||
before(
|
||||
async () =>
|
||||
(dataSources = await createTestingConnections({
|
||||
entities: [__dirname + "/entity/*{.js,.ts}"],
|
||||
schemaCreate: true,
|
||||
dropSchema: true,
|
||||
})),
|
||||
)
|
||||
beforeEach(() => reloadTestingDatabases(dataSources))
|
||||
after(() => closeTestingConnections(dataSources))
|
||||
|
||||
// optional: test fix for issue https://github.com/typeorm/typeorm/issues/<issue-number>
|
||||
it("should <put a detailed description of what it should do here>", () => Promise.all(dataSources.map(async dataSource => {
|
||||
// tests go here
|
||||
})));
|
||||
it("should <put a detailed description of what it should do here>", () =>
|
||||
Promise.all(
|
||||
dataSources.map(async (dataSource) => {
|
||||
// tests go here
|
||||
}),
|
||||
))
|
||||
|
||||
// you can add additional tests if needed
|
||||
});
|
||||
// you can add additional tests if needed
|
||||
})
|
||||
```
|
||||
|
||||
If you place entities in `./entity/<entity-name>.ts` relative to your test file,
|
||||
@ -163,8 +183,8 @@ Once TypeScript finishes compiling your changes, you can run `npm run test:fast`
|
||||
To run your tests you need the Database Management Systems (DBMS) installed on your machine. Alternatively, you can use docker with the DBMS running in containers. To have docker run all the DBMS for you simply run `docker-compose up`
|
||||
in the root of the project. Once all images are fetched and are running, you can run the tests.
|
||||
|
||||
- The docker image of mssql-server needs at least 3.25GB of RAM.
|
||||
- Make sure to assign enough memory to the Docker VM if you're running on Docker for Mac or Windows
|
||||
- The docker image of mssql-server needs at least 3.25GB of RAM.
|
||||
- Make sure to assign enough memory to the Docker VM if you're running on Docker for Mac or Windows
|
||||
|
||||
## Release Process
|
||||
|
||||
|
||||
1208
README-zh_CN.md
1208
README-zh_CN.md
File diff suppressed because it is too large
Load Diff
36
README.md
36
README.md
@ -8,11 +8,11 @@
|
||||
</a>
|
||||
<br>
|
||||
<br>
|
||||
<a href="https://www.npmjs.com/package/typeorm"><img src="https://img.shields.io/npm/v/typeorm" alt="NPM Version" /></a>
|
||||
<a href="https://www.npmjs.com/package/typeorm"><img src="https://img.shields.io/npm/dm/typeorm" alt="NPM Downloads" /></a>
|
||||
<a href="https://github.com/typeorm/typeorm/actions/workflows/commit-validation.yml?query=branch%3Amaster"><img src="https://github.com/typeorm/typeorm/actions/workflows/commit-validation.yml/badge.svg?branch=master" alt="Commit Validation"/></a>
|
||||
<a href="https://coveralls.io/github/typeorm/typeorm?branch=master"><img src="https://coveralls.io/repos/github/typeorm/typeorm/badge.svg?branch=master" alt="Coverage Status" /></a>
|
||||
<a href=""><img src="https://img.shields.io/badge/License-MIT-teal.svg" alt="MIT License" /></a>
|
||||
<a href="https://www.npmjs.com/package/typeorm"><img src="https://img.shields.io/npm/v/typeorm" alt="NPM Version"/></a>
|
||||
<a href="https://www.npmjs.com/package/typeorm"><img src="https://img.shields.io/npm/dm/typeorm" alt="NPM Downloads"/></a>
|
||||
<a href="https://github.com/typeorm/typeorm/actions/workflows/tests.yml?query=branch%3Amaster"><img src="https://github.com/typeorm/typeorm/actions/workflows/tests.yml/badge.svg?branch=master" alt="Commit Validation"/></a>
|
||||
<a href="https://coveralls.io/github/typeorm/typeorm?branch=master"><img src="https://coveralls.io/repos/github/typeorm/typeorm/badge.svg?branch=master" alt="Coverage Status"/></a>
|
||||
<a href=""><img src="https://img.shields.io/badge/License-MIT-teal.svg" alt="MIT License"/></a>
|
||||
<br>
|
||||
<br>
|
||||
</div>
|
||||
@ -207,12 +207,28 @@ This project exists thanks to all the people who contribute:
|
||||
|
||||
## Sponsors
|
||||
|
||||
Open source is hard and time-consuming. If you want to invest in TypeORM's future, you can become a sponsor and allow our core team to spend more time on TypeORM's improvements and new features. [Become a sponsor](https://opencollective.com/typeorm)
|
||||
Open source is hard and time-consuming. If you want to invest in TypeORM's future, you can become a sponsor and allow our core team to spend more time on TypeORM's improvements and new features.
|
||||
|
||||
<a href="https://opencollective.com/typeorm" target="_blank"><img src="https://opencollective.com/typeorm/tiers/sponsor.svg?width=890"></a>
|
||||
### Champion
|
||||
|
||||
## Gold Sponsors
|
||||
Become a champion sponsor and get premium technical support from our core contributors. [Become a champion](https://opencollective.com/typeorm)
|
||||
|
||||
Become a gold sponsor and get premium technical support from our core contributors. [Become a gold sponsor](https://opencollective.com/typeorm)
|
||||
<a href="https://opencollective.com/typeorm" target="_blank"><img src="https://opencollective.com/typeorm/tiers/gold-sponsor.svg?avatarHeight=36"></a>
|
||||
|
||||
<a href="https://opencollective.com/typeorm" target="_blank"><img src="https://opencollective.com/typeorm/tiers/gold-sponsor.svg?width=890"></a>
|
||||
### Supporter
|
||||
|
||||
Support TypeORM's development with a monthly contribution. [Become a supporter](https://opencollective.com/typeorm)
|
||||
|
||||
<a href="https://opencollective.com/typeorm" target="_blank"><img src="https://opencollective.com/typeorm/tiers/love.svg?avatarHeight=36"></a>
|
||||
|
||||
### Community
|
||||
|
||||
Join our community of supporters and help sustain TypeORM. [Become a community supporter](https://opencollective.com/typeorm)
|
||||
|
||||
<a href="https://opencollective.com/typeorm" target="_blank"><img src="https://opencollective.com/typeorm/tiers/like.svg?avatarHeight=36"></a>
|
||||
|
||||
### Sponsor
|
||||
|
||||
Make a one-time or recurring contribution of your choice. [Become a sponsor](https://opencollective.com/typeorm)
|
||||
|
||||
<a href="https://opencollective.com/typeorm" target="_blank"><img src="https://opencollective.com/typeorm/tiers/sponsor.svg?avatarHeight=36"></a>
|
||||
|
||||
1205
README_ko.md
1205
README_ko.md
File diff suppressed because it is too large
Load Diff
@ -12,7 +12,7 @@ services:
|
||||
MYSQL_DATABASE: "test"
|
||||
|
||||
mysql-9:
|
||||
image: "mysql:9.4.0"
|
||||
image: "mysql:9.5.0"
|
||||
container_name: "typeorm-mysql-9"
|
||||
ports:
|
||||
- "3306:3306"
|
||||
@ -24,7 +24,7 @@ services:
|
||||
|
||||
# mariadb
|
||||
mariadb-10:
|
||||
image: "mariadb:10.6.22-jammy"
|
||||
image: "mariadb:10.6.24-jammy"
|
||||
container_name: "typeorm-mariadb-10"
|
||||
ports:
|
||||
- "3307:3306"
|
||||
@ -35,7 +35,7 @@ services:
|
||||
MYSQL_DATABASE: "test"
|
||||
|
||||
mariadb-12:
|
||||
image: "mariadb:12.0.1-rc"
|
||||
image: "mariadb:12.1.2"
|
||||
container_name: "typeorm-mariadb-12"
|
||||
ports:
|
||||
- "3307:3306"
|
||||
@ -73,13 +73,14 @@ services:
|
||||
|
||||
# mssql
|
||||
mssql:
|
||||
image: "mcr.microsoft.com/mssql/server:2022-latest"
|
||||
image: "mcr.microsoft.com/mssql/server:2025-latest"
|
||||
container_name: "typeorm-mssql"
|
||||
ports:
|
||||
- "1433:1433"
|
||||
environment:
|
||||
SA_PASSWORD: "Admin12345"
|
||||
ACCEPT_EULA: "Y"
|
||||
MSSQL_PID: "Express"
|
||||
|
||||
# cockroachdb
|
||||
cockroachdb:
|
||||
@ -142,7 +143,7 @@ services:
|
||||
|
||||
# mongodb
|
||||
mongodb:
|
||||
image: "mongo:5.0.31"
|
||||
image: "mongo:8"
|
||||
container_name: "typeorm-mongodb"
|
||||
ports:
|
||||
- "27017:27017"
|
||||
|
||||
1
docs/.gitignore
vendored
1
docs/.gitignore
vendored
@ -8,6 +8,7 @@
|
||||
.docusaurus
|
||||
.cache-loader
|
||||
llms.txt
|
||||
llms-full.txt
|
||||
|
||||
# Misc
|
||||
.DS_Store
|
||||
|
||||
@ -46,7 +46,7 @@ export class User {
|
||||
|
||||
## Unique indices
|
||||
|
||||
To create an unique index you need to specify `{ unique: true }` in the index options:
|
||||
To create a unique index you need to specify `{ unique: true }` in the index options:
|
||||
|
||||
> Note: CockroachDB stores unique indices as `UNIQUE` constraints
|
||||
|
||||
@ -133,10 +133,10 @@ export class Thing {
|
||||
|
||||
## Concurrent creation
|
||||
|
||||
In order to avoid having to obtain an access exclusive lock when creating and dropping indexes in postgres, you may create them using the CONCURRENTLY modifier.
|
||||
If you want use the concurrent option, you need set `migrationsTransactionMode: none` between data source options.
|
||||
In order to avoid having to obtain an ACCESS EXCLUSIVE lock when creating and dropping indexes in Postgres, you may create them using the CONCURRENTLY modifier.
|
||||
If you want to use the concurrent option, you need to set `migrationsTransactionMode: none` in your data source options.
|
||||
|
||||
Typeorm supports generating SQL with this option if when the concurrent option is specified on the index.
|
||||
TypeORM supports generating SQL with this option when the concurrent option is specified on the index.
|
||||
|
||||
```typescript
|
||||
@Index(["firstName", "middleName", "lastName"], { concurrent: true })
|
||||
@ -146,10 +146,10 @@ For more information see the [Postgres documentation](https://www.postgresql.org
|
||||
|
||||
## Disabling synchronization
|
||||
|
||||
TypeORM does not support some index options and definitions (e.g. `lower`, `pg_trgm`) because of lot of different database specifics and multiple
|
||||
issues with getting information about exist database indices and synchronizing them automatically. In such cases you should create index manually
|
||||
(for example in the migrations) with any index signature you want. To make TypeORM ignore these indices during synchronization use `synchronize: false`
|
||||
option on `@Index` decorator.
|
||||
TypeORM does not support some index options and definitions (e.g. `lower`, `pg_trgm`) due to many database-specific differences and multiple
|
||||
issues with getting information about existing database indices and synchronizing them automatically. In such cases you should create the index manually
|
||||
(for example, in [the migrations](../migrations/01-why.md)) with any index signature you want. To make TypeORM ignore these indices during synchronization, use `synchronize: false`
|
||||
option on the `@Index` decorator.
|
||||
|
||||
For example, you create an index with case-insensitive comparison:
|
||||
|
||||
|
||||
@ -91,7 +91,7 @@ export class Post {
|
||||
### `@BeforeRemove`
|
||||
|
||||
You can define a method with any name in the entity and mark it with `@BeforeRemove`
|
||||
and TypeORM will call it before a entity is removed using repository/manager `remove`.
|
||||
and TypeORM will call it before an entity is removed using repository/manager `remove`.
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
@ -123,7 +123,7 @@ export class Post {
|
||||
### `@BeforeSoftRemove`
|
||||
|
||||
You can define a method with any name in the entity and mark it with `@BeforeSoftRemove`
|
||||
and TypeORM will call it before a entity is soft removed using repository/manager `softRemove`.
|
||||
and TypeORM will call it before an entity is soft removed using repository/manager `softRemove`.
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
@ -155,7 +155,7 @@ export class Post {
|
||||
### `@BeforeRecover`
|
||||
|
||||
You can define a method with any name in the entity and mark it with `@BeforeRecover`
|
||||
and TypeORM will call it before a entity is recovered using repository/manager `recover`.
|
||||
and TypeORM will call it before an entity is recovered using repository/manager `recover`.
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
|
||||
@ -124,66 +124,15 @@ typeorm subscriber:create path-to-subscriber-dir/subscriber
|
||||
|
||||
Learn more about [Subscribers](./4-listeners-and-subscribers.md).
|
||||
|
||||
## Create a new migration
|
||||
## Manage migrations
|
||||
|
||||
You can create a new migration using CLI:
|
||||
* `typeorm migration:create` - [create](../migrations/03-creating.md) empty migration
|
||||
* `typeorm migration:generate` - [generate](../migrations/04-generating.md) migration comparing entities with actual database schema
|
||||
* `typeorm migration:run` - [execute](../migrations/05-executing.md) all migrations
|
||||
* `typeorm migration:revert` - [revert](../migrations/06-reverting.md) last migration
|
||||
* `typeorm migration:show` - [list](../migrations/07-status.md) all migrations with their execution status
|
||||
|
||||
```shell
|
||||
typeorm migration:create path-to-migrations-dir/migrationName
|
||||
```
|
||||
|
||||
Learn more about [Migrations](./1-migrations.md).
|
||||
|
||||
## Generate a migration from existing table schema
|
||||
|
||||
Automatic migration generation creates a new migration file
|
||||
and writes all sql queries that must be executed to update the database.
|
||||
|
||||
If no there were no changes generated, the command will exit with code 1.
|
||||
|
||||
```shell
|
||||
typeorm migration:generate path/to/Migration -d path/to/datasource
|
||||
```
|
||||
|
||||
The rule of thumb is to generate a migration after each entity change.
|
||||
the -d argument value should specify the path where your DataSource instance is defined.
|
||||
You can specify the path and name of the migration with the first argument.
|
||||
|
||||
Learn more about [Migrations](./1-migrations.md).
|
||||
|
||||
## Run migrations
|
||||
|
||||
To execute all pending migrations use following command:
|
||||
|
||||
```shell
|
||||
typeorm migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
Learn more about [Migrations](./1-migrations.md).
|
||||
|
||||
## Revert migrations
|
||||
|
||||
To revert the most recently executed migration use the following command:
|
||||
|
||||
```shell
|
||||
typeorm migration:revert -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
This command will undo only the last executed migration.
|
||||
You can execute this command multiple times to revert multiple migrations.
|
||||
Learn more about [Migrations](./1-migrations.md).
|
||||
|
||||
## Show migrations
|
||||
|
||||
To show all migrations and whether they've been run or not use following command:
|
||||
|
||||
```shell
|
||||
typeorm migration:show -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
[X] = Migration has been ran
|
||||
|
||||
[ ] = Migration is pending/unapplied
|
||||
Learn more about [Migrations](../migrations/01-why.md).
|
||||
|
||||
## Sync database schema
|
||||
|
||||
|
||||
@ -28,12 +28,6 @@ Different RDBMS-es have their own specific options.
|
||||
Example: `subscribers: [PostSubscriber, AppSubscriber, "subscriber/*.js", "modules/**/subscriber/*.js"]`.
|
||||
Learn more about [Subscribers](../advanced-topics/4-listeners-and-subscribers.md).
|
||||
|
||||
- `migrations` - Migrations to be loaded and used for this data source.
|
||||
It accepts both migration classes and directories from which to load.
|
||||
Directories support glob patterns.
|
||||
Example: `migrations: [FirstMigration, SecondMigration, "migration/*.js", "modules/**/migration/*.js"]`.
|
||||
Learn more about [Migrations](../advanced-topics/1-migrations.md).
|
||||
|
||||
- `logging` - Indicates if logging is enabled or not.
|
||||
If set to `true` then query and error logging will be enabled.
|
||||
You can also specify different types of logging to be enabled, for example `["query", "error", "schema"]`.
|
||||
@ -67,13 +61,13 @@ Different RDBMS-es have their own specific options.
|
||||
Note that for MongoDB database it does not create schema, because MongoDB is schemaless.
|
||||
Instead, it syncs just by creating indices.
|
||||
|
||||
- `migrationsRun` - Indicates if migrations should be auto run on every application launch.
|
||||
As an alternative, you can use CLI and run migration:run command.
|
||||
- `migrations` - [Migrations](../migrations/01-why.md) to be loaded and used for this data source
|
||||
|
||||
- `migrationsTableName` - Name of the table in the database which is going to contain information about executed migrations.
|
||||
By default, this table is called "migrations".
|
||||
- `migrationsRun` - Indicates if [migrations](../migrations/01-why.md) should be auto-run on every application launch.
|
||||
|
||||
- `migrationsTransactionMode` - Control transactions for migrations (default: `all`), can be one of `all` | `none` | `each`
|
||||
- `migrationsTableName` - Name of the table in the database which is going to contain information about executed [migrations](../migrations/01-why.md).
|
||||
|
||||
- `migrationsTransactionMode` - Controls transaction mode when running [migrations](../migrations/01-why.md).
|
||||
|
||||
- `metadataTableName` - Name of the table in the database which is going to contain information about table metadata.
|
||||
By default, this table is called "typeorm_metadata".
|
||||
@ -85,7 +79,7 @@ Different RDBMS-es have their own specific options.
|
||||
eg. `.where("user.firstName = :search OR user.lastName = :search")` becomes `WHERE (user.firstName = ? OR user.lastName = ?)` instead of `WHERE user.firstName = ? OR user.lastName = ?`
|
||||
|
||||
- `invalidWhereValuesBehavior` - Controls how null and undefined values are handled in where conditions across all TypeORM operations (find operations, query builders, repository methods).
|
||||
|
||||
|
||||
- `null` behavior options:
|
||||
- `'ignore'` (default) - skips null properties
|
||||
- `'sql-null'` - transforms null to SQL NULL
|
||||
|
||||
@ -165,4 +165,64 @@ Based on [tedious](https://tediousjs.github.io/node-mssql/) MSSQL implementation
|
||||
|
||||
## Column Types
|
||||
|
||||
`int`, `bigint`, `bit`, `decimal`, `money`, `numeric`, `smallint`, `smallmoney`, `tinyint`, `float`, `real`, `date`, `datetime2`, `datetime`, `datetimeoffset`, `smalldatetime`, `time`, `char`, `varchar`, `text`, `nchar`, `nvarchar`, `ntext`, `binary`, `image`, `varbinary`, `hierarchyid`, `sql_variant`, `timestamp`, `uniqueidentifier`, `xml`, `geometry`, `geography`, `rowversion`
|
||||
`int`, `bigint`, `bit`, `decimal`, `money`, `numeric`, `smallint`, `smallmoney`, `tinyint`, `float`, `real`, `date`, `datetime2`, `datetime`, `datetimeoffset`, `smalldatetime`, `time`, `char`, `varchar`, `text`, `nchar`, `nvarchar`, `ntext`, `binary`, `image`, `varbinary`, `hierarchyid`, `sql_variant`, `timestamp`, `uniqueidentifier`, `xml`, `geometry`, `geography`, `rowversion`, `vector`
|
||||
|
||||
### Vector Type (vector)
|
||||
|
||||
The `vector` data type is available in SQL Server for storing high-dimensional vectors, commonly used for:
|
||||
|
||||
- Semantic search with embeddings
|
||||
- Recommendation systems
|
||||
- Similarity matching
|
||||
- Machine learning applications
|
||||
|
||||
NOTE: general `halfvec` type support is unavailable because this feature is still in preview. See the Microsoft docs: [Vector data type](https://learn.microsoft.com/en-us/sql/t-sql/data-types/vector-data-type).
|
||||
|
||||
#### Usage
|
||||
|
||||
```typescript
|
||||
@Entity()
|
||||
export class DocumentChunk {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
@Column("varchar")
|
||||
content: string
|
||||
|
||||
// Vector column with 1998 dimensions
|
||||
@Column("vector", { length: 1998 })
|
||||
embedding: number[]
|
||||
}
|
||||
```
|
||||
|
||||
#### Vector Similarity Search
|
||||
|
||||
SQL Server provides the `VECTOR_DISTANCE` function for calculating distances between vectors:
|
||||
|
||||
```typescript
|
||||
const queryEmbedding = [
|
||||
/* your query vector */
|
||||
]
|
||||
|
||||
const results = await dataSource.query(
|
||||
`
|
||||
DECLARE @question AS VECTOR (1998) = @0;
|
||||
SELECT TOP (10) dc.*,
|
||||
VECTOR_DISTANCE('cosine', @question, embedding) AS distance
|
||||
FROM document_chunk dc
|
||||
ORDER BY VECTOR_DISTANCE('cosine', @question, embedding)
|
||||
`,
|
||||
[JSON.stringify(queryEmbedding)],
|
||||
)
|
||||
```
|
||||
|
||||
**Distance Metrics:**
|
||||
|
||||
- `'cosine'` - Cosine distance (most common for semantic search)
|
||||
- `'euclidean'` - Euclidean (L2) distance
|
||||
- `'dot'` - Negative dot product
|
||||
|
||||
**Requirements:**
|
||||
|
||||
- SQL Server version with vector support enabled
|
||||
- Vector dimensions must be specified using the `length` option
|
||||
|
||||
@ -34,6 +34,10 @@ See [Data Source Options](../data-source/2-data-source-options.md) for the commo
|
||||
|
||||
- `database` - Database name.
|
||||
|
||||
- `socketPath` - Database socket path.
|
||||
|
||||
- `poolSize` - Maximum number of clients the pool should contain for each connection.
|
||||
|
||||
- `charset` and `collation` - The charset/collation for the connection. If an SQL-level charset is specified (like utf8mb4) then the default collation for that charset is used.
|
||||
|
||||
- `timezone` - the timezone configured on the MySQL server. This is used to typecast server date/time
|
||||
@ -139,3 +143,7 @@ export class User {
|
||||
roles: UserRoleType[]
|
||||
}
|
||||
```
|
||||
|
||||
### Vector Types
|
||||
|
||||
MySQL supports the [VECTOR type](https://dev.mysql.com/doc/refman/en/vector.html) since version 9.0, while in MariaDB, [vectors](https://mariadb.com/docs/server/reference/sql-structure/vectors/vector-overview) are available since 11.7.
|
||||
|
||||
@ -60,7 +60,7 @@ Additional options can be added to the `extra` object and will be passed directl
|
||||
|
||||
### Column types for `postgres`
|
||||
|
||||
`int`, `int2`, `int4`, `int8`, `smallint`, `integer`, `bigint`, `decimal`, `numeric`, `real`, `float`, `float4`, `float8`, `double precision`, `money`, `character varying`, `varchar`, `character`, `char`, `text`, `citext`, `hstore`, `bytea`, `bit`, `varbit`, `bit varying`, `timetz`, `timestamptz`, `timestamp`, `timestamp without time zone`, `timestamp with time zone`, `date`, `time`, `time without time zone`, `time with time zone`, `interval`, `bool`, `boolean`, `enum`, `point`, `line`, `lseg`, `box`, `path`, `polygon`, `circle`, `cidr`, `inet`, `macaddr`, `macaddr8`, `tsvector`, `tsquery`, `uuid`, `xml`, `json`, `jsonb`, `jsonpath`, `int4range`, `int8range`, `numrange`, `tsrange`, `tstzrange`, `daterange`, `int4multirange`, `int8multirange`, `nummultirange`, `tsmultirange`, `tstzmultirange`, `multidaterange`, `geometry`, `geography`, `cube`, `ltree`
|
||||
`int`, `int2`, `int4`, `int8`, `smallint`, `integer`, `bigint`, `decimal`, `numeric`, `real`, `float`, `float4`, `float8`, `double precision`, `money`, `character varying`, `varchar`, `character`, `char`, `text`, `citext`, `hstore`, `bytea`, `bit`, `varbit`, `bit varying`, `timetz`, `timestamptz`, `timestamp`, `timestamp without time zone`, `timestamp with time zone`, `date`, `time`, `time without time zone`, `time with time zone`, `interval`, `bool`, `boolean`, `enum`, `point`, `line`, `lseg`, `box`, `path`, `polygon`, `circle`, `cidr`, `inet`, `macaddr`, `macaddr8`, `tsvector`, `tsquery`, `uuid`, `xml`, `json`, `jsonb`, `jsonpath`, `int4range`, `int8range`, `numrange`, `tsrange`, `tstzrange`, `daterange`, `int4multirange`, `int8multirange`, `nummultirange`, `tsmultirange`, `tstzmultirange`, `multidaterange`, `geometry`, `geography`, `cube`, `ltree`, `vector`, `halfvec`.
|
||||
|
||||
### Column types for `cockroachdb`
|
||||
|
||||
@ -68,6 +68,33 @@ Additional options can be added to the `extra` object and will be passed directl
|
||||
|
||||
Note: CockroachDB returns all numeric data types as `string`. However, if you omit the column type and define your property as `number` ORM will `parseInt` string into number.
|
||||
|
||||
### Vector columns
|
||||
|
||||
Vector columns can be used for similarity searches using PostgreSQL's vector operators:
|
||||
|
||||
```typescript
|
||||
// L2 distance (Euclidean) - <->
|
||||
const results = await dataSource.sql`
|
||||
SELECT id, embedding
|
||||
FROM post
|
||||
ORDER BY embedding <-> ${"[1,2,3]"}
|
||||
LIMIT 5`
|
||||
|
||||
// Cosine distance - <=>
|
||||
const results = await dataSource.sql`
|
||||
SELECT id, embedding
|
||||
FROM post
|
||||
ORDER BY embedding <=> ${"[1,2,3]"}
|
||||
LIMIT 5`
|
||||
|
||||
// Inner product - <#>
|
||||
const results = await dataSource.sql`
|
||||
SELECT id, embedding
|
||||
FROM post
|
||||
ORDER BY embedding <#> ${"[1,2,3]"}
|
||||
LIMIT 5`
|
||||
```
|
||||
|
||||
### Spatial columns
|
||||
|
||||
TypeORM's PostgreSQL and CockroachDB support uses [GeoJSON](http://geojson.org/) as an interchange format, so geometry columns should be tagged either as `object` or `Geometry` (or subclasses, e.g. `Point`) after importing [`geojson` types](https://www.npmjs.com/package/@types/geojson) or using the TypeORM built-in GeoJSON types:
|
||||
|
||||
@ -37,15 +37,16 @@ SAP HANA 2.0 and SAP HANA Cloud support slightly different data types. Check the
|
||||
- [SAP HANA 2.0 Data Types](https://help.sap.com/docs/SAP_HANA_PLATFORM/4fe29514fd584807ac9f2a04f6754767/20a1569875191014b507cf392724b7eb.html?locale=en-US)
|
||||
- [SAP HANA Cloud Data Types](https://help.sap.com/docs/hana-cloud-database/sap-hana-cloud-sap-hana-database-sql-reference-guide/data-types)
|
||||
|
||||
TypeORM's `SapDriver` supports `tinyint`, `smallint`, `integer`, `bigint`, `smalldecimal`, `decimal`, `real`, `double`, `date`, `time`, `seconddate`, `timestamp`, `boolean`, `char`, `nchar`, `varchar`, `nvarchar`, `text`, `alphanum`, `shorttext`, `array`, `varbinary`, `blob`, `clob`, `nclob`, `st_geometry`, `st_point`, `real_vector`, `half_vector`, `vector`, and `halfvec`. Some of these data types have been deprecated or removed in SAP HANA Cloud, and will be converted to the closest available alternative when connected to a Cloud database.
|
||||
TypeORM's `SapDriver` supports `tinyint`, `smallint`, `integer`, `bigint`, `smalldecimal`, `decimal`, `real`, `double`, `date`, `time`, `seconddate`, `timestamp`, `boolean`, `char`, `nchar`, `varchar`, `nvarchar`, `text`, `alphanum`, `shorttext`, `array`, `varbinary`, `blob`, `clob`, `nclob`, `st_geometry`, `st_point`, `real_vector` and `half_vector`. Some of these data types have been deprecated or removed in SAP HANA Cloud, and will be converted to the closest available alternative when connected to a Cloud database.
|
||||
|
||||
### Vector Types
|
||||
|
||||
The `real_vector` and `half_vector` data types were introduced in SAP HANA Cloud (2024Q1 and 2025Q2 respectively), and require a supported version of `@sap/hana-client` as well.
|
||||
The `real_vector` and `half_vector` data types were introduced in SAP HANA Cloud (2024Q1 and 2025Q2 respectively), and require a supported version of `@sap/hana-client` as well.
|
||||
|
||||
For consistency with PostgreSQL's vector support, TypeORM also provides aliases:
|
||||
- `vector` (alias for `real_vector`) - stores vectors as 4-byte floats
|
||||
- `halfvec` (alias for `half_vector`) - stores vectors as 2-byte floats for memory efficiency
|
||||
|
||||
- `vector` (alias for `real_vector`) - stores vectors as 4-byte floats
|
||||
- `halfvec` (alias for `half_vector`) - stores vectors as 2-byte floats for memory efficiency
|
||||
|
||||
```typescript
|
||||
@Entity()
|
||||
@ -70,3 +71,5 @@ export class Document {
|
||||
```
|
||||
|
||||
By default, the client will return a `Buffer` in the `fvecs`/`hvecs` format, which is more efficient. It is possible to let the driver convert the values to a `number[]` by adding `{ extra: { vectorOutputType: "Array" } }` to the connection options. Check the SAP HANA Client documentation for more information about [REAL_VECTOR](https://help.sap.com/docs/SAP_HANA_CLIENT/f1b440ded6144a54ada97ff95dac7adf/0d197e4389c64e6b9cf90f6f698f62fe.html) or [HALF_VECTOR](https://help.sap.com/docs/SAP_HANA_CLIENT/f1b440ded6144a54ada97ff95dac7adf/8bb854b4ce4a4299bed27c365b717e91.html).
|
||||
|
||||
Use the appropriate [vector functions](https://help.sap.com/docs/hana-cloud-database/sap-hana-cloud-sap-hana-database-sql-reference-guide/vector-functions) for similarity searches.
|
||||
|
||||
@ -180,67 +180,6 @@ There are several special column types with additional functionality available:
|
||||
each time you call `save` of entity manager or repository, or during `upsert` operations when an update occurs.
|
||||
You don't need to set this column - it will be automatically set.
|
||||
|
||||
### Vector columns
|
||||
|
||||
Vector columns are supported on both PostgreSQL (via [`pgvector`](https://github.com/pgvector/pgvector) extension) and SAP HANA Cloud, enabling storing and querying vector embeddings for similarity search and machine learning applications.
|
||||
|
||||
TypeORM supports both `vector` and `halfvec` column types across databases:
|
||||
|
||||
- `vector` - stores vectors as 4-byte floats (single precision)
|
||||
- PostgreSQL: native `vector` type via pgvector extension
|
||||
- SAP HANA: alias for `real_vector` type
|
||||
- `halfvec` - stores vectors as 2-byte floats (half precision) for memory efficiency
|
||||
- PostgreSQL: native `halfvec` type via pgvector extension
|
||||
- SAP HANA: alias for `half_vector` type
|
||||
|
||||
You can specify the vector dimensions using the `length` option:
|
||||
|
||||
```typescript
|
||||
@Entity()
|
||||
export class Post {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
// Vector without specified dimensions (works on PostgreSQL and SAP HANA)
|
||||
@Column("vector")
|
||||
embedding: number[] | Buffer
|
||||
|
||||
// Vector with 3 dimensions: vector(3) (works on PostgreSQL and SAP HANA)
|
||||
@Column("vector", { length: 3 })
|
||||
embedding_3d: number[] | Buffer
|
||||
|
||||
// Half-precision vector with 4 dimensions: halfvec(4) (works on PostgreSQL and SAP HANA)
|
||||
@Column("halfvec", { length: 4 })
|
||||
halfvec_embedding: number[] | Buffer
|
||||
}
|
||||
```
|
||||
|
||||
Vector columns can be used for similarity searches using PostgreSQL's vector operators:
|
||||
|
||||
```typescript
|
||||
// L2 distance (Euclidean) - <->
|
||||
const results = await dataSource.query(
|
||||
`SELECT id, embedding FROM post ORDER BY embedding <-> $1 LIMIT 5`,
|
||||
["[1,2,3]"]
|
||||
)
|
||||
|
||||
// Cosine distance - <=>
|
||||
const results = await dataSource.query(
|
||||
`SELECT id, embedding FROM post ORDER BY embedding <=> $1 LIMIT 5`,
|
||||
["[1,2,3]"]
|
||||
)
|
||||
|
||||
// Inner product - <#>
|
||||
const results = await dataSource.query(
|
||||
`SELECT id, embedding FROM post ORDER BY embedding <#> $1 LIMIT 5`,
|
||||
["[1,2,3]"]
|
||||
)
|
||||
```
|
||||
|
||||
> **Note**:
|
||||
> - **PostgreSQL**: Vector columns require the `pgvector` extension to be installed. The extension provides the vector data types and similarity operators.
|
||||
> - **SAP HANA**: Vector columns require SAP HANA Cloud (2024Q1+) and a supported version of `@sap/hana-client`. Use the appropriate [vector similarity functions](https://help.sap.com/docs/hana-cloud-database/sap-hana-cloud-sap-hana-database-sql-reference-guide/vector-functions) for similarity searches.
|
||||
|
||||
## Column types
|
||||
|
||||
TypeORM supports all of the most commonly used database-supported column types.
|
||||
@ -393,6 +332,50 @@ Besides "uuid" there is also "increment", "identity" (Postgres 10+ only) and "ro
|
||||
on some database platforms with this type of generation (for example some databases can only have one increment column,
|
||||
or some of them require increment to be a primary key).
|
||||
|
||||
### Vector columns
|
||||
|
||||
Vector columns are supported on MariaDB/MySQL, Microsoft SQL Server, PostgreSQL (via [`pgvector`](https://github.com/pgvector/pgvector) extension) and SAP HANA Cloud, enabling storing and querying vector embeddings for similarity search and machine learning applications.
|
||||
|
||||
TypeORM supports both `vector` and `halfvec` column types across databases:
|
||||
|
||||
- `vector` - stores vectors as 4-byte floats (single precision)
|
||||
- MariaDB/MySQL: native `vector` type
|
||||
- Microsoft SQL Server: native `vector` type
|
||||
- PostgreSQL: `vector` type, available via `pgvector` extension
|
||||
- SAP HANA Cloud: alias for `real_vector` type
|
||||
- `halfvec` - stores vectors as 2-byte floats (half precision) for memory efficiency
|
||||
- PostgreSQL: `halfvec` type, available via `pgvector` extension
|
||||
- SAP HANA Cloud: alias for `half_vector` type
|
||||
|
||||
You can specify the number of vector dimensions using the `length` option:
|
||||
|
||||
```typescript
|
||||
@Entity()
|
||||
export class Post {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
// Vector without specified dimensions
|
||||
@Column("vector")
|
||||
embedding: number[] | Buffer
|
||||
|
||||
// Vector with 3 dimensions: vector(3)
|
||||
@Column("vector", { length: 3 })
|
||||
embedding_3d: number[] | Buffer
|
||||
|
||||
// Half-precision vector with 4 dimensions: halfvec(4) (works on PostgreSQL and SAP HANA only)
|
||||
@Column("halfvec", { length: 4 })
|
||||
halfvec_embedding: number[] | Buffer
|
||||
}
|
||||
```
|
||||
|
||||
> **Note**:
|
||||
>
|
||||
> - **MariaDB/MySQL**: Vectors are supported since MariaDB 11.7 and MySQL 9
|
||||
> - **Microsoft SQL Server**: Vector type support requires SQL Server 2025 (17.x) or newer.
|
||||
> - **PostgreSQL**: Vector columns require the `pgvector` extension to be installed. The extension provides the vector data types and similarity operators.
|
||||
> - **SAP HANA**: Vector columns require SAP HANA Cloud (2024Q1+) and a supported version of `@sap/hana-client`.
|
||||
|
||||
### Spatial columns
|
||||
|
||||
Microsoft SQLServer, MySQL/MariaDB, PostgreSQL/CockroachDB and SAP HANA all support spatial columns. TypeORM's support for each varies slightly between databases, particularly as the column names vary between databases.
|
||||
@ -470,6 +453,7 @@ List of available options in `ColumnOptions`:
|
||||
- `hstoreType: "object"|"string"` - Return type of `HSTORE` column. Returns value as string or as object. Used only in [Postgres](https://www.postgresql.org/docs/9.6/static/hstore.html).
|
||||
- `array: boolean` - Used for postgres and cockroachdb column types which can be array (for example int[])
|
||||
- `transformer: { from(value: DatabaseType): EntityType, to(value: EntityType): DatabaseType }` - Used to marshal properties of arbitrary type `EntityType` into a type `DatabaseType` supported by the database. Array of transformers are also supported and will be applied in natural order when writing, and in reverse order when reading. e.g. `[lowercase, encrypt]` will first lowercase the string then encrypt it when writing, and will decrypt then do nothing when reading.
|
||||
- `utc: boolean` - Indicates if date values should be stored and retrieved in UTC timezone instead of local timezone. Only applies to `date` column type. Default value is `false` (uses local timezone for backward compatibility).
|
||||
|
||||
Note: most of those column options are RDBMS-specific and aren't available in `MongoDB`.
|
||||
|
||||
|
||||
@ -11,7 +11,7 @@ You can create a view entity by defining a new class and mark it with `@ViewEnti
|
||||
- `database` - database name in selected DB server.
|
||||
- `schema` - schema name.
|
||||
- `expression` - view definition. **Required parameter**.
|
||||
- `dependsOn` - List of other views on which the current views depends. If your view uses another view in its definition, you can add it here so that migrations are generated in the correct order.
|
||||
- `dependsOn` - List of other views on which the current views depends. If your view uses another view in its definition, you can add it here so that [migrations](../migrations/01-why.md) are generated in the correct order.
|
||||
|
||||
`expression` can be string with properly escaped columns and tables, depend on database used (postgres in example):
|
||||
|
||||
|
||||
@ -34,7 +34,7 @@ TypeORM is highly influenced by other ORMs, such as [Hibernate](http://hibernate
|
||||
- Cascades.
|
||||
- Indices.
|
||||
- Transactions.
|
||||
- Migrations and automatic migrations generation.
|
||||
- [Migrations](/docs/migrations/why) with automatic generation.
|
||||
- Connection pooling.
|
||||
- Replication.
|
||||
- Using multiple database instances.
|
||||
@ -1195,7 +1195,7 @@ There are several extensions that simplify working with TypeORM and integrating
|
||||
- ER Diagram generator - [typeorm-uml](https://github.com/eugene-manuilov/typeorm-uml/)
|
||||
- another ER Diagram generator - [erdia](https://www.npmjs.com/package/erdia/)
|
||||
- Create, drop and seed database - [typeorm-extension](https://github.com/tada5hi/typeorm-extension)
|
||||
- Automatically update `data-source.ts` after generating migrations/entities - [typeorm-codebase-sync](https://www.npmjs.com/package/typeorm-codebase-sync)
|
||||
- Automatically update `data-source.ts` after generating [migrations](/docs/migrations/why)/entities - [typeorm-codebase-sync](https://www.npmjs.com/package/typeorm-codebase-sync)
|
||||
- Easy manipulation of `relations` objects - [typeorm-relations](https://npmjs.com/package/typeorm-relations)
|
||||
- Automatically generate `relations` based on a GraphQL query - [typeorm-relations-graphql](https://npmjs.com/package/typeorm-relations-graphql)
|
||||
|
||||
|
||||
@ -154,4 +154,3 @@ Both strategies have their own cons and pros.
|
||||
One thing we should always keep in mind with software development is how we are going to maintain our applications.
|
||||
The `Data Mapper` approach helps with maintainability, which is more effective in larger apps.
|
||||
The `Active Record` approach helps keep things simple which works well in smaller apps.
|
||||
And simplicity is always a key to better maintainability.
|
||||
|
||||
@ -178,7 +178,7 @@ module.exports = {
|
||||
|
||||
### Bundling Migration Files
|
||||
|
||||
By default Webpack tries to bundle everything into one file. This can be problematic when your project has migration files which are meant to be executed after bundled code is deployed to production. To make sure all your migrations can be recognized and executed by TypeORM, you may need to use "Object Syntax" for the `entry` configuration for the migration files only.
|
||||
By default Webpack tries to bundle everything into one file. This can be problematic when your project has migration files which are meant to be executed after bundled code is deployed to production. To make sure all your [migrations](../migrations/01-why.md) can be recognized and executed by TypeORM, you may need to use "Object Syntax" for the `entry` configuration for the migration files only.
|
||||
|
||||
```javascript
|
||||
const glob = require("glob")
|
||||
@ -210,7 +210,7 @@ module.exports = {
|
||||
}
|
||||
```
|
||||
|
||||
Also, since Webpack 4, when using `mode: 'production'`, files are optimized by default which includes mangling your code in order to minimize file sizes. This breaks the migrations because TypeORM relies on their names to determine which has already been executed. You may disable minimization completely by adding:
|
||||
Also, since Webpack 4, when using `mode: 'production'`, files are optimized by default which includes mangling your code in order to minimize file sizes. This breaks the [migrations](../migrations/01-why.md) because TypeORM relies on their names to determine which has already been executed. You may disable minimization completely by adding:
|
||||
|
||||
```javascript
|
||||
module.exports = {
|
||||
@ -256,93 +256,6 @@ module.exports = {
|
||||
}
|
||||
```
|
||||
|
||||
## How to use Vite for the backend?
|
||||
|
||||
Using TypeORM in a Vite project is pretty straight forward. However, when you use migrations, you will run into "...migration name is wrong. Migration class name should have a
|
||||
JavaScript timestamp appended." errors when running the production build.
|
||||
On production builds, files are [optimized by default](https://vite.dev/config/build-options#build-minify) which includes mangling your code in order to minimize file sizes.
|
||||
|
||||
You have 3 options to mitigate this. The 3 options are shown belown as diff to this basic "vite.config.ts"
|
||||
|
||||
```typescript
|
||||
import legacy from "@vitejs/plugin-legacy"
|
||||
import vue from "@vitejs/plugin-vue"
|
||||
import path from "path"
|
||||
import { defineConfig } from "vite"
|
||||
|
||||
// https://vitejs.dev/config/
|
||||
export default defineConfig({
|
||||
build: {
|
||||
sourcemap: true,
|
||||
},
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
alias: {
|
||||
"@": path.resolve(__dirname, "./src"),
|
||||
},
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
### Option 1: Disable minify
|
||||
|
||||
This is the most crude option and will result in significantly larger files. Add `build.minify = false` to your config.
|
||||
|
||||
```diff
|
||||
--- basic vite.config.ts
|
||||
+++ disable minify vite.config.ts
|
||||
@@ -7,6 +7,7 @@
|
||||
export default defineConfig({
|
||||
build: {
|
||||
sourcemap: true,
|
||||
+ minify: false,
|
||||
},
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
```
|
||||
|
||||
### Option 2: Disable esbuild minify identifiers
|
||||
|
||||
Vite uses esbuild as the default minifier. You can disable mangling of identifiers by adding `esbuild.minifyIdentifiers = false` to your config.
|
||||
This will result in smaller file sizes, but depending on your code base you will get diminishing returns as all identifiers will be kept at full length.
|
||||
|
||||
```diff
|
||||
--- basic vite.config.ts
|
||||
+++ disable esbuild minify identifiers vite.config.ts
|
||||
@@ -8,6 +8,7 @@
|
||||
build: {
|
||||
sourcemap: true,
|
||||
},
|
||||
+ esbuild: { minifyIdentifiers: false },
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
```
|
||||
|
||||
### Option 3: Use terser as minifier while keeping only the migration class names
|
||||
|
||||
Vite supports using terser as minifier. Terser is slower then esbuild, but offers more fine grained control over what to minify.
|
||||
Add `minify: 'terser'` with `terserOptions.mangle.keep_classnames: /^Migrations\d+$/` and `terserOptions.compress.keep_classnames: /^Migrations\d+$/` to your config.
|
||||
These options will make sure classnames that start with "Migrations" and end with numbers are not renamed during minification.
|
||||
|
||||
Make sure terser is available as dev dependency in your project: `npm add -D terser`.
|
||||
|
||||
```diff
|
||||
--- basic vite.config.ts
|
||||
+++ terser keep migration class names vite.config.ts
|
||||
@@ -7,6 +7,11 @@
|
||||
export default defineConfig({
|
||||
build: {
|
||||
sourcemap: true,
|
||||
+ minify: 'terser',
|
||||
+ terserOptions: {
|
||||
+ mangle: { keep_classnames: /^Migrations\d+$/ },
|
||||
+ compress: { keep_classnames: /^Migrations\d+$/ },
|
||||
+ },
|
||||
},
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
```
|
||||
|
||||
## How to use TypeORM in ESM projects?
|
||||
|
||||
Make sure to add `"type": "module"` in the `package.json` of your project so TypeORM will know to use `import( ... )` on files.
|
||||
|
||||
@ -662,7 +662,7 @@ Learn more about [listeners](../advanced-topics/4-listeners-and-subscribers.md).
|
||||
#### `@BeforeRemove`
|
||||
|
||||
You can define a method with any name in the entity and mark it with `@BeforeRemove`
|
||||
and TypeORM will call it before a entity is removed using repository/manager `remove`.
|
||||
and TypeORM will call it before an entity is removed using repository/manager `remove`.
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
@ -698,7 +698,7 @@ Learn more about [listeners](../advanced-topics/4-listeners-and-subscribers.md).
|
||||
#### `@BeforeSoftRemove`
|
||||
|
||||
You can define a method with any name in the entity and mark it with `@BeforeSoftRemove`
|
||||
and TypeORM will call it before a entity is soft removed using repository/manager `softRemove`.
|
||||
and TypeORM will call it before an entity is soft removed using repository/manager `softRemove`.
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
@ -734,7 +734,7 @@ Learn more about [listeners](../advanced-topics/4-listeners-and-subscribers.md).
|
||||
#### `@BeforeRecover`
|
||||
|
||||
You can define a method with any name in the entity and mark it with `@BeforeRecover`
|
||||
and TypeORM will call it before a entity is recovered using repository/manager `recover`.
|
||||
and TypeORM will call it before an entity is recovered using repository/manager `recover`.
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
|
||||
42
docs/docs/migrations/01-why.md
Normal file
42
docs/docs/migrations/01-why.md
Normal file
@ -0,0 +1,42 @@
|
||||
# How migrations work?
|
||||
|
||||
Once you get into production you'll need to synchronize model changes into the database.
|
||||
Typically, it is unsafe to use `synchronize: true` for schema synchronization on production once
|
||||
you get data in your database. Here is where migrations come to help.
|
||||
|
||||
A migration is just a single file with SQL queries to update a database schema
|
||||
and apply new changes to an existing database.
|
||||
|
||||
Let's say you already have a database and a `Post` entity:
|
||||
|
||||
```typescript
|
||||
import { Entity, Column, PrimaryGeneratedColumn } from "typeorm"
|
||||
|
||||
@Entity()
|
||||
export class Post {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
@Column()
|
||||
title: string
|
||||
|
||||
@Column()
|
||||
text: string
|
||||
}
|
||||
```
|
||||
|
||||
And your entity worked in production for months without any changes.
|
||||
You have thousands of posts in your database.
|
||||
|
||||
Now you need to make a new release and rename `title` to `name`.
|
||||
What would you do?
|
||||
|
||||
You need to create a new migration with the following SQL query (PostgreSQL dialect):
|
||||
|
||||
```sql
|
||||
ALTER TABLE "post" RENAME COLUMN "title" TO "name";
|
||||
```
|
||||
|
||||
Once you run this SQL query your database schema is ready to work with your new codebase.
|
||||
TypeORM provides a place where you can write such sql queries and run them when needed.
|
||||
This place is called "migrations".
|
||||
71
docs/docs/migrations/02-setup.md
Normal file
71
docs/docs/migrations/02-setup.md
Normal file
@ -0,0 +1,71 @@
|
||||
# Setup
|
||||
|
||||
Before working with migrations you need to setup your [DataSource](../data-source/1-data-source.md) options properly:
|
||||
|
||||
```ts
|
||||
export default new DataSource({
|
||||
// basic setup
|
||||
synchronize: false,
|
||||
migrations: [ /*...*/ ],
|
||||
|
||||
// optional
|
||||
migrationsRun: false,
|
||||
migrationsTableName: 'migrations',
|
||||
migrationsTransactionMode: 'all'
|
||||
|
||||
// other options...
|
||||
})
|
||||
```
|
||||
|
||||
## `synchronise`
|
||||
|
||||
Turning off automatic schema synchronisation is essential for working with migrations. Otherwise they would make no sense.
|
||||
|
||||
## `migrations`
|
||||
|
||||
Defines list of migrations that need to be loaded by TypeORM. It accepts both migration classes and directories from which to load.
|
||||
|
||||
The easiest is to specify the directory where your migration files are located (glob patterns are supported):
|
||||
|
||||
```ts
|
||||
migrations: [__dirname + '/migration/**/*{.js,.ts}']
|
||||
```
|
||||
|
||||
Defining both `.js` and `.ts` extensions would allow you to run migrations in development and from compiled to JavaScript for production (eg. from Docker image).
|
||||
|
||||
Alternatively you could also specify exact classes to get more fine grained control:
|
||||
|
||||
```ts
|
||||
import FirstMigration from 'migrations/TIMESTAMP-first-migration'
|
||||
import SecondMigration from 'migrations/TIMESTAMP-second-migration'
|
||||
|
||||
export default new DataSource({
|
||||
migrations: [FirstMigration, SecondMigration]
|
||||
})
|
||||
```
|
||||
|
||||
but it also requires more manual work and can be error prone.
|
||||
|
||||
- `migrationsRun` - Indicates if [migrations](../migrations/01-why.md) should be auto-run on every application launch.
|
||||
|
||||
## Optional settings
|
||||
|
||||
### `migrationsRun`
|
||||
|
||||
Indicates if migrations should be auto-run on every application launch. Default: `false`
|
||||
|
||||
### `migrationsTableName`
|
||||
|
||||
You might want to specify the name of the table that will store information about executed migrations. By default it is called `'migrations'`.
|
||||
|
||||
```ts
|
||||
migrationsTableName: 'some_custom_migrations_table'
|
||||
```
|
||||
|
||||
### `migrationsTransactionMode`
|
||||
|
||||
Controls transaction mode when running migrations. Possible options are:
|
||||
|
||||
- `all` (_default_) - wraps migrations run into a single transaction
|
||||
- `none`
|
||||
- `each`
|
||||
56
docs/docs/migrations/03-creating.md
Normal file
56
docs/docs/migrations/03-creating.md
Normal file
@ -0,0 +1,56 @@
|
||||
# Creating manually
|
||||
|
||||
You can create a new migration using CLI by specifying the name and location of the migration:
|
||||
|
||||
```shell
|
||||
npx typeorm migration:create <path/to/migrations>/<migration-name>
|
||||
```
|
||||
|
||||
For example:
|
||||
|
||||
```shell
|
||||
npx typeorm migration:create src/db/migrations/post-refactoring
|
||||
```
|
||||
|
||||
After you run the command you can see a new file generated in the `src/db/migrations` directory named `{TIMESTAMP}-post-refactoring.ts` where `{TIMESTAMP}` is the current timestamp when the migration was generated.
|
||||
|
||||
Now you can open the file and add your migration sql queries there. You should see the following content inside your migration:
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class PostRefactoringTIMESTAMP implements MigrationInterface {
|
||||
async up(queryRunner: QueryRunner): Promise<void> {}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {}
|
||||
}
|
||||
```
|
||||
|
||||
There are two methods you must fill with your migration code: `up` and `down`.
|
||||
`up` has to contain the code you need to perform the migration.
|
||||
`down` has to revert whatever `up` changed.
|
||||
`down` method is used to revert the last migration.
|
||||
|
||||
Inside both `up` and `down` you have a `QueryRunner` object.
|
||||
All database operations are executed using this object.
|
||||
Learn more about [query runner](../query-runner.md).
|
||||
|
||||
Let's see what the migration looks like with our `Post` changes:
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class PostRefactoringTIMESTAMP implements MigrationInterface {
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" RENAME COLUMN "title" TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" RENAME COLUMN "name" TO "title"`,
|
||||
) // reverts things made in "up" method
|
||||
}
|
||||
}
|
||||
```
|
||||
117
docs/docs/migrations/04-generating.md
Normal file
117
docs/docs/migrations/04-generating.md
Normal file
@ -0,0 +1,117 @@
|
||||
# Generating
|
||||
|
||||
TypeORM is able to automatically generate migration files based on the changes you made to the entities, comparing them with existing database schema on the server.
|
||||
|
||||
Automatic migration generation creates a new migration file and writes all sql queries that must be executed to update the database. If no changes are detected, the command will exit with code `1`.
|
||||
|
||||
Let's say you have a `Post` entity with a `title` column, and you have changed the name `title` to `name`.
|
||||
|
||||
You can generate migration with of the following command:
|
||||
|
||||
```shell
|
||||
typeorm migration:generate -d <path/to/datasource> <migration-name>
|
||||
```
|
||||
|
||||
The `-d` argument value should specify the path where your [DataSource](../data-source/1-data-source.md) instance is defined.
|
||||
|
||||
Alternatively you can also specify name with `--name` param
|
||||
|
||||
```shell
|
||||
typeorm migration:generate -- -d <path/to/datasource> --name=<migration-name>
|
||||
```
|
||||
|
||||
or use a full path:
|
||||
|
||||
```shell
|
||||
typeorm migration:generate -d <path/to/datasource> <path/to/migrations>/<migration-name>
|
||||
```
|
||||
|
||||
Assuming you used `post-refactoring` as a name, it will generate a new file called `{TIMESTAMP}-post-refactoring.ts` with the following content:
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class PostRefactoringTIMESTAMP implements MigrationInterface {
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "name" RENAME TO "title"`,
|
||||
)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Alternatively, you can also output your migrations as Javascript files using the `o` (alias for `--outputJs`) flag. This is useful for Javascript only projects in which TypeScript additional packages are not installed. This command, will generate a new migration file `{TIMESTAMP}-PostRefactoring.js` with the following content:
|
||||
|
||||
```javascript
|
||||
/**
|
||||
* @typedef {import('typeorm').MigrationInterface} MigrationInterface
|
||||
* @typedef {import('typeorm').QueryRunner} QueryRunner
|
||||
*/
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @implements {MigrationInterface}
|
||||
*/
|
||||
module.exports = class PostRefactoringTIMESTAMP {
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async up(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async down(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "name" RENAME TO "title"`,
|
||||
)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
By default, it generates CommonJS JavaScript code with the `o` (alias for `--outputJs`) flag, but you can also generate ESM code with the `esm` flag. This is useful for Javascript projects that use ESM:
|
||||
|
||||
```javascript
|
||||
/**
|
||||
* @typedef {import('typeorm').MigrationInterface} MigrationInterface
|
||||
* @typedef {import('typeorm').QueryRunner} QueryRunner
|
||||
*/
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @implements {MigrationInterface}
|
||||
*/
|
||||
export class PostRefactoringTIMESTAMP {
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async up(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async down(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "name" RENAME TO "title"`,
|
||||
)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
See, you don't need to write the queries on your own.
|
||||
|
||||
The rule of thumb for generating migrations is that you generate them after **each** change you made to your models. To apply multi-line formatting to your generated migration queries, use the `p` (alias for `--pretty`) flag.
|
||||
29
docs/docs/migrations/05-executing.md
Normal file
29
docs/docs/migrations/05-executing.md
Normal file
@ -0,0 +1,29 @@
|
||||
# Executing and reverting
|
||||
|
||||
Once you have a migration to run on production, you can run them using a CLI command:
|
||||
|
||||
```shell
|
||||
typeorm migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
**`typeorm migration:create` and `typeorm migration:generate` will create `.ts` files, unless you use the `o` flag (see more in [Generating migrations](04-generating.md)). The `migration:run` and `migration:revert` commands only work on `.js` files. Thus the typescript files need to be compiled before running the commands.** Alternatively, you can use `ts-node` with `typeorm` to run `.ts` migration files.
|
||||
|
||||
Example with `ts-node`:
|
||||
|
||||
```shell
|
||||
npx typeorm-ts-node-commonjs migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
Example with `ts-node` in ESM projects:
|
||||
|
||||
```shell
|
||||
npx typeorm-ts-node-esm migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
```shell
|
||||
npx typeorm-ts-node-esm migration:generate ./src/migrations/update-post-table -d ./src/data-source.ts
|
||||
```
|
||||
|
||||
This command will execute all pending migrations and run them in a sequence ordered by their timestamps.
|
||||
This means all sql queries written in the `up` methods of your created migrations will be executed.
|
||||
That's all! Now you have your database schema up-to-date.
|
||||
11
docs/docs/migrations/06-reverting.md
Normal file
11
docs/docs/migrations/06-reverting.md
Normal file
@ -0,0 +1,11 @@
|
||||
# Reverting
|
||||
|
||||
If for some reason you want to revert the changes, you can run:
|
||||
|
||||
```shell
|
||||
typeorm migration:revert -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
This command will execute `down` in the latest executed migration.
|
||||
|
||||
If you need to revert multiple migrations you must call this command multiple times.
|
||||
11
docs/docs/migrations/07-status.md
Normal file
11
docs/docs/migrations/07-status.md
Normal file
@ -0,0 +1,11 @@
|
||||
# Status
|
||||
|
||||
To show all migrations and whether they've been run or not use following command:
|
||||
|
||||
```shell
|
||||
typeorm migration:show -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
[X] = Migration has been ran
|
||||
|
||||
[ ] = Migration is pending/unapplied
|
||||
42
docs/docs/migrations/08-faking.md
Normal file
42
docs/docs/migrations/08-faking.md
Normal file
@ -0,0 +1,42 @@
|
||||
# Faking Migrations and Rollbacks
|
||||
|
||||
You can also fake run a migration using the `--fake` flag (`-f` for short). This will add the migration
|
||||
to the migrations table without running it. This is useful for migrations created after manual changes
|
||||
have already been made to the database or when migrations have been run externally
|
||||
(e.g. by another tool or application), and you still would like to keep a consistent migration history.
|
||||
|
||||
```shell
|
||||
typeorm migration:run -d path-to-datasource-config --fake
|
||||
```
|
||||
|
||||
This is also possible with rollbacks.
|
||||
|
||||
```shell
|
||||
typeorm migration:revert -d path-to-datasource-config --fake
|
||||
```
|
||||
|
||||
### Transaction modes
|
||||
|
||||
By default, TypeORM will run all your migrations within a single wrapping transaction.
|
||||
This corresponds to the `--transaction all` flag.
|
||||
If you require more fine grained transaction control, you can use the `--transaction each` flag to wrap every migration individually, or the `--transaction none` flag to opt out of wrapping the migrations in transactions altogether.
|
||||
|
||||
In addition to these flags, you can also override the transaction behavior on a per-migration basis by setting the `transaction` property on the `MigrationInterface` to `true` or `false`. This only works in the `each` or `none` transaction mode.
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class AddIndexTIMESTAMP implements MigrationInterface {
|
||||
transaction = false
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX CONCURRENTLY post_names_idx ON post(name)`,
|
||||
)
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP INDEX CONCURRENTLY post_names_idx`)
|
||||
}
|
||||
}
|
||||
```
|
||||
@ -1,348 +1,7 @@
|
||||
# Migrations
|
||||
|
||||
## How migrations work?
|
||||
|
||||
Once you get into production you'll need to synchronize model changes into the database.
|
||||
Typically, it is unsafe to use `synchronize: true` for schema synchronization on production once
|
||||
you get data in your database. Here is where migrations come to help.
|
||||
|
||||
A migration is just a single file with sql queries to update a database schema
|
||||
and apply new changes to an existing database.
|
||||
|
||||
Let's say you already have a database and a post entity:
|
||||
|
||||
```typescript
|
||||
import { Entity, Column, PrimaryGeneratedColumn } from "typeorm"
|
||||
|
||||
@Entity()
|
||||
export class Post {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number
|
||||
|
||||
@Column()
|
||||
title: string
|
||||
|
||||
@Column()
|
||||
text: string
|
||||
}
|
||||
```
|
||||
|
||||
And your entity worked in production for months without any changes.
|
||||
You have thousands of posts in your database.
|
||||
|
||||
Now you need to make a new release and rename `title` to `name`.
|
||||
What would you do?
|
||||
|
||||
You need to create a new migration with the following SQL query (postgres dialect):
|
||||
|
||||
```sql
|
||||
ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name";
|
||||
```
|
||||
|
||||
Once you run this SQL query your database schema is ready to work with your new codebase.
|
||||
TypeORM provides a place where you can write such sql queries and run them when needed.
|
||||
This place is called "migrations".
|
||||
|
||||
## Creating a new migration
|
||||
|
||||
**Pre-requisites**: [Installing CLI](./6-using-cli.md#installing-cli)
|
||||
|
||||
Before creating a new migration you need to setup your data source options properly:
|
||||
|
||||
```ts
|
||||
import { DataSource } from "typeorm"
|
||||
|
||||
export default new DataSource({
|
||||
type: "mysql",
|
||||
host: "localhost",
|
||||
port: 3306,
|
||||
username: "test",
|
||||
password: "test",
|
||||
database: "test",
|
||||
entities: [
|
||||
/*...*/
|
||||
],
|
||||
migrations: [
|
||||
/*...*/
|
||||
],
|
||||
migrationsTableName: "custom_migration_table",
|
||||
})
|
||||
```
|
||||
|
||||
Here we setup two options:
|
||||
|
||||
- `"migrationsTableName": "migrations"` - Specify this option only if you need the migration table name to be different from `"migrations"`.
|
||||
- `"migrations": [/*...*/]` - list of migrations that need to be loaded by TypeORM
|
||||
|
||||
Once you setup the connection options you can create a new migration using CLI:
|
||||
|
||||
```shell
|
||||
typeorm migration:create ./path-to-migrations-dir/PostRefactoring
|
||||
```
|
||||
|
||||
Here, `PostRefactoring` is the name of the migration - you can specify any name you want.
|
||||
After you run the command you can see a new file generated in the "migration" directory
|
||||
named `{TIMESTAMP}-PostRefactoring.ts` where `{TIMESTAMP}` is the current timestamp when the migration was generated.
|
||||
Now you can open the file and add your migration sql queries there.
|
||||
|
||||
You should see the following content inside your migration:
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class PostRefactoringTIMESTAMP implements MigrationInterface {
|
||||
async up(queryRunner: QueryRunner): Promise<void> {}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {}
|
||||
}
|
||||
```
|
||||
|
||||
There are two methods you must fill with your migration code: `up` and `down`.
|
||||
`up` has to contain the code you need to perform the migration.
|
||||
`down` has to revert whatever `up` changed.
|
||||
`down` method is used to revert the last migration.
|
||||
|
||||
Inside both `up` and `down` you have a `QueryRunner` object.
|
||||
All database operations are executed using this object.
|
||||
Learn more about [query runner](../query-runner.md).
|
||||
|
||||
Let's see what the migration looks like with our `Post` changes:
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class PostRefactoringTIMESTAMP implements MigrationInterface {
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" RENAME COLUMN "title" TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" RENAME COLUMN "name" TO "title"`,
|
||||
) // reverts things made in "up" method
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Running and reverting migrations
|
||||
|
||||
Once you have a migration to run on production, you can run them using a CLI command:
|
||||
|
||||
```shell
|
||||
typeorm migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
**`typeorm migration:create` and `typeorm migration:generate` will create `.ts` files, unless you use the `o` flag (see more in [Generating migrations](#generating-migrations)). The `migration:run` and `migration:revert` commands only work on `.js` files. Thus the typescript files need to be compiled before running the commands.** Alternatively, you can use `ts-node` with `typeorm` to run `.ts` migration files.
|
||||
|
||||
Example with `ts-node`:
|
||||
|
||||
```shell
|
||||
npx typeorm-ts-node-commonjs migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
Example with `ts-node` in ESM projects:
|
||||
|
||||
```shell
|
||||
npx typeorm-ts-node-esm migration:run -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
```shell
|
||||
npx typeorm-ts-node-esm migration:generate ./src/migrations/update-post-table -d ./src/data-source.ts
|
||||
```
|
||||
|
||||
This command will execute all pending migrations and run them in a sequence ordered by their timestamps.
|
||||
This means all sql queries written in the `up` methods of your created migrations will be executed.
|
||||
That's all! Now you have your database schema up-to-date.
|
||||
|
||||
If for some reason you want to revert the changes, you can run:
|
||||
|
||||
```shell
|
||||
typeorm migration:revert -- -d path-to-datasource-config
|
||||
```
|
||||
|
||||
This command will execute `down` in the latest executed migration.
|
||||
If you need to revert multiple migrations you must call this command multiple times.
|
||||
|
||||
### Faking Migrations and Rollbacks
|
||||
|
||||
You can also fake run a migration using the `--fake` flag (`-f` for short). This will add the migration
|
||||
to the migrations table without running it. This is useful for migrations created after manual changes
|
||||
have already been made to the database or when migrations have been run externally
|
||||
(e.g. by another tool or application), and you still would like to keep a consistent migration history.
|
||||
|
||||
```shell
|
||||
typeorm migration:run -d path-to-datasource-config --fake
|
||||
```
|
||||
|
||||
This is also possible with rollbacks.
|
||||
|
||||
```shell
|
||||
typeorm migration:revert -d path-to-datasource-config --fake
|
||||
```
|
||||
|
||||
### Transaction modes
|
||||
|
||||
By default, TypeORM will run all your migrations within a single wrapping transaction.
|
||||
This corresponds to the `--transaction all` flag.
|
||||
If you require more fine grained transaction control, you can use the `--transaction each` flag to wrap every migration individually, or the `--transaction none` flag to opt out of wrapping the migrations in transactions altogether.
|
||||
|
||||
In addition to these flags, you can also override the transaction behavior on a per-migration basis by setting the `transaction` property on the `MigrationInterface` to `true` or `false`. This only works in the `each` or `none` transaction mode.
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class AddIndexTIMESTAMP implements MigrationInterface {
|
||||
transaction = false
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX CONCURRENTLY post_names_idx ON post(name)`,
|
||||
)
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP INDEX CONCURRENTLY post_names_idx`)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Generating migrations
|
||||
|
||||
TypeORM is able to automatically generate migration files with schema changes you made.
|
||||
|
||||
Let's say you have a `Post` entity with a `title` column, and you have changed the name `title` to `name`.
|
||||
You can run following command:
|
||||
|
||||
```shell
|
||||
typeorm migration:generate PostRefactoring -d path-to-datasource-config
|
||||
```
|
||||
|
||||
If you encounter any error, it require you have the path to migration name and data source. You can try this option
|
||||
|
||||
```shell
|
||||
typeorm migration:generate -d <path/to/datasource> path/to/migrations/<migration-name>
|
||||
```
|
||||
|
||||
And it will generate a new migration called `{TIMESTAMP}-PostRefactoring.ts` with the following content:
|
||||
|
||||
```typescript
|
||||
import { MigrationInterface, QueryRunner } from "typeorm"
|
||||
|
||||
export class PostRefactoringTIMESTAMP implements MigrationInterface {
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "name" RENAME TO "title"`,
|
||||
)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Alternatively, you can also output your migrations as Javascript files using the `o` (alias for `--outputJs`) flag. This is useful for Javascript only projects in which TypeScript additional packages are not installed. This command, will generate a new migration file `{TIMESTAMP}-PostRefactoring.js` with the following content:
|
||||
|
||||
```javascript
|
||||
/**
|
||||
* @typedef {import('typeorm').MigrationInterface} MigrationInterface
|
||||
* @typedef {import('typeorm').QueryRunner} QueryRunner
|
||||
*/
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @implements {MigrationInterface}
|
||||
*/
|
||||
module.exports = class PostRefactoringTIMESTAMP {
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async up(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async down(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "name" RENAME TO "title"`,
|
||||
)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
By default, it generates CommonJS JavaScript code with the `o` (alias for `--outputJs`) flag, but you can also generate ESM code with the `esm` flag. This is useful for Javascript projects that use ESM:
|
||||
|
||||
```javascript
|
||||
/**
|
||||
* @typedef {import('typeorm').MigrationInterface} MigrationInterface
|
||||
* @typedef {import('typeorm').QueryRunner} QueryRunner
|
||||
*/
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @implements {MigrationInterface}
|
||||
*/
|
||||
export class PostRefactoringTIMESTAMP {
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async up(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "title" RENAME TO "name"`,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {QueryRunner} queryRunner
|
||||
*/
|
||||
async down(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "post" ALTER COLUMN "name" RENAME TO "title"`,
|
||||
)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
See, you don't need to write the queries on your own.
|
||||
The rule of thumb for generating migrations is that you generate them after **each** change you made to your models. To apply multi-line formatting to your generated migration queries, use the `p` (alias for `--pretty`) flag.
|
||||
|
||||
## DataSource option
|
||||
|
||||
If you need to run/revert/generate/show your migrations use the `-d` (alias for `--dataSource`) and pass the path to the file where your DataSource instance is defined as an argument
|
||||
|
||||
```shell
|
||||
typeorm -d <your-data-source-path> migration:{run|revert}
|
||||
```
|
||||
|
||||
## Timestamp option
|
||||
|
||||
If you need to specify a timestamp for the migration name, use the `-t` (alias for `--timestamp`) and pass the timestamp (should be a non-negative number)
|
||||
|
||||
```shell
|
||||
typeorm -t <specific-timestamp> migration:{create|generate}
|
||||
```
|
||||
|
||||
You can get a timestamp from:
|
||||
|
||||
```js
|
||||
Date.now()
|
||||
/* OR */ new Date().getTime()
|
||||
```
|
||||
|
||||
## Using migration API to write migrations
|
||||
# Query Runner API
|
||||
|
||||
In order to use an API to change a database schema you can use `QueryRunner`.
|
||||
|
||||
Example:
|
||||
|
||||
```ts
|
||||
import {
|
||||
MigrationInterface,
|
||||
@ -749,7 +408,7 @@ dropUniqueConstraint(table: Table|string, uniqueOrName: TableUnique|string): Pro
|
||||
- `table` - Table object or name
|
||||
- `uniqueOrName` - TableUnique object or unique constraint name to be dropped
|
||||
|
||||
Drops an unique constraint.
|
||||
Drops a unique constraint.
|
||||
|
||||
> Note: does not work for MySQL, because MySQL stores unique constraints as unique indices. Use `dropIndex()` method instead.
|
||||
|
||||
@ -762,7 +421,7 @@ dropUniqueConstraints(table: Table|string, uniqueConstraints: TableUnique[]): Pr
|
||||
- `table` - Table object or name
|
||||
- `uniqueConstraints` - array of TableUnique objects to be dropped
|
||||
|
||||
Drops an unique constraints.
|
||||
Drops unique constraints.
|
||||
|
||||
> Note: does not work for MySQL, because MySQL stores unique constraints as unique indices. Use `dropIndices()` method instead.
|
||||
|
||||
@ -775,7 +434,7 @@ createCheckConstraint(table: Table|string, checkConstraint: TableCheck): Promise
|
||||
- `table` - Table object or name
|
||||
- `checkConstraint` - TableCheck object
|
||||
|
||||
Creates new check constraint.
|
||||
Creates a new check constraint.
|
||||
|
||||
> Note: MySQL does not support check constraints.
|
||||
|
||||
@ -788,7 +447,7 @@ createCheckConstraints(table: Table|string, checkConstraints: TableCheck[]): Pro
|
||||
- `table` - Table object or name
|
||||
- `checkConstraints` - array of TableCheck objects
|
||||
|
||||
Creates new check constraint.
|
||||
Creates a new check constraint.
|
||||
|
||||
> Note: MySQL does not support check constraints.
|
||||
|
||||
16
docs/docs/migrations/10-extra.md
Normal file
16
docs/docs/migrations/10-extra.md
Normal file
@ -0,0 +1,16 @@
|
||||
# Extra options
|
||||
|
||||
## Timestamp
|
||||
|
||||
If you need to specify a timestamp for the migration name, use the `-t` (alias for `--timestamp`) and pass the timestamp (should be a non-negative number)
|
||||
|
||||
```shell
|
||||
typeorm -t <specific-timestamp> migration:{create|generate}
|
||||
```
|
||||
|
||||
You can get a timestamp from:
|
||||
|
||||
```js
|
||||
Date.now()
|
||||
/* OR */ new Date().getTime()
|
||||
```
|
||||
86
docs/docs/migrations/11-vite.md
Normal file
86
docs/docs/migrations/11-vite.md
Normal file
@ -0,0 +1,86 @@
|
||||
# Vite
|
||||
|
||||
Using TypeORM in a [Vite](https://vite.dev) project is pretty straight forward. However, when you use [migrations](../migrations/01-why.md), you will run into "...migration name is wrong. Migration class name should have a
|
||||
JavaScript timestamp appended." errors when running the production build.
|
||||
On production builds, files are [optimized by default](https://vite.dev/config/build-options#build-minify) which includes mangling your code in order to minimize file sizes.
|
||||
|
||||
You have 3 options to mitigate this. The 3 options are shown below as diff to this basic `vite.config.ts`
|
||||
|
||||
```typescript
|
||||
import legacy from "@vitejs/plugin-legacy"
|
||||
import vue from "@vitejs/plugin-vue"
|
||||
import path from "path"
|
||||
import { defineConfig } from "vite"
|
||||
|
||||
// https://vitejs.dev/config/
|
||||
export default defineConfig({
|
||||
build: {
|
||||
sourcemap: true,
|
||||
},
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
alias: {
|
||||
"@": path.resolve(__dirname, "./src"),
|
||||
},
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
## Option 1: Disable minify
|
||||
|
||||
This is the most crude option and will result in significantly larger files. Add `build.minify = false` to your config.
|
||||
|
||||
```diff
|
||||
--- basic vite.config.ts
|
||||
+++ disable minify vite.config.ts
|
||||
@@ -7,6 +7,7 @@
|
||||
export default defineConfig({
|
||||
build: {
|
||||
sourcemap: true,
|
||||
+ minify: false,
|
||||
},
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
```
|
||||
|
||||
## Option 2: Disable esbuild minify identifiers
|
||||
|
||||
Vite uses esbuild as the default minifier. You can disable mangling of identifiers by adding `esbuild.minifyIdentifiers = false` to your config.
|
||||
This will result in smaller file sizes, but depending on your code base you will get diminishing returns as all identifiers will be kept at full length.
|
||||
|
||||
```diff
|
||||
--- basic vite.config.ts
|
||||
+++ disable esbuild minify identifiers vite.config.ts
|
||||
@@ -8,6 +8,7 @@
|
||||
build: {
|
||||
sourcemap: true,
|
||||
},
|
||||
+ esbuild: { minifyIdentifiers: false },
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
```
|
||||
|
||||
## Option 3: Use terser as minifier while keeping only the migration class names
|
||||
|
||||
Vite supports using terser as minifier. Terser is slower then esbuild, but offers more fine grained control over what to minify.
|
||||
Add `minify: 'terser'` with `terserOptions.mangle.keep_classnames: /^Migrations\d+$/` and `terserOptions.compress.keep_classnames: /^Migrations\d+$/` to your config.
|
||||
These options will make sure classnames that start with "Migrations" and end with numbers are not renamed during minification.
|
||||
|
||||
Make sure terser is available as dev dependency in your project: `npm add -D terser`.
|
||||
|
||||
```diff
|
||||
--- basic vite.config.ts
|
||||
+++ terser keep migration class names vite.config.ts
|
||||
@@ -7,6 +7,11 @@
|
||||
export default defineConfig({
|
||||
build: {
|
||||
sourcemap: true,
|
||||
+ minify: 'terser',
|
||||
+ terserOptions: {
|
||||
+ mangle: { keep_classnames: /^Migrations\d+$/ },
|
||||
+ compress: { keep_classnames: /^Migrations\d+$/ },
|
||||
+ },
|
||||
},
|
||||
plugins: [vue(), legacy()],
|
||||
resolve: {
|
||||
```
|
||||
@ -2,12 +2,12 @@
|
||||
|
||||
## What is a QueryRunner?
|
||||
|
||||
Each new `QueryRunner` instance takes a single connection from connection pool, if RDBMS supports connection pooling.
|
||||
For databases not supporting connection pools, it uses the same connection across the entire data source.
|
||||
Each new `QueryRunner` instance takes a single connection from the connection pool, if the RDBMS supports connection pooling.
|
||||
For databases that do not support connection pools, it uses the same connection across the entire data source.
|
||||
|
||||
## Creating a new `QueryRunner` instance
|
||||
|
||||
Use `createQueryRunner` method to create a new `QueryRunner`:
|
||||
Use the `createQueryRunner` method to create a new `QueryRunner`:
|
||||
|
||||
```typescript
|
||||
const queryRunner = dataSource.createQueryRunner()
|
||||
@ -15,20 +15,20 @@ const queryRunner = dataSource.createQueryRunner()
|
||||
|
||||
## Using `QueryRunner`
|
||||
|
||||
After you create a new instance of `QueryRunner` use `connect` method to actually get a connection from the connection pool:
|
||||
After you create a new instance of `QueryRunner`, use the `connect` method to get a connection from the connection pool:
|
||||
|
||||
```typescript
|
||||
const queryRunner = dataSource.createQueryRunner()
|
||||
await queryRunner.connect()
|
||||
```
|
||||
|
||||
**Important**: make sure to release it when it is not necessary anymore to make it available to the connection pool again:
|
||||
**Important**: Make sure to release it when it is no longer needed to make it available to the connection pool again:
|
||||
|
||||
```typescript
|
||||
await queryRunner.release()
|
||||
```
|
||||
|
||||
After connection is released, it is not possible to use the query runner methods.
|
||||
After the connection is released, you cannot use the query runner's methods.
|
||||
|
||||
`QueryRunner` has a bunch of methods you can use, it also has its own `EntityManager` instance,
|
||||
which you can use through `manager` property to run `EntityManager` methods on a particular database connection
|
||||
@ -43,6 +43,6 @@ await queryRunner.connect()
|
||||
// use this particular connection to execute queries
|
||||
const users = await queryRunner.manager.find(User)
|
||||
|
||||
// remember to release connection after you are done using it
|
||||
// remember to release the connection after you are done using it
|
||||
await queryRunner.release()
|
||||
```
|
||||
|
||||
@ -27,12 +27,7 @@
|
||||
"strip_chars": " .,;:#",
|
||||
"custom_settings": {
|
||||
"separatorsToIndex": "_",
|
||||
"attributesForFaceting": [
|
||||
"language",
|
||||
"version",
|
||||
"type",
|
||||
"docusaurus_tag"
|
||||
],
|
||||
"attributesForFaceting": [],
|
||||
"attributesToRetrieve": [
|
||||
"hierarchy",
|
||||
"content",
|
||||
@ -46,4 +41,4 @@
|
||||
"833762294"
|
||||
],
|
||||
"nb_hits": 0
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { themes as prismThemes } from "prism-react-renderer"
|
||||
import type { Config } from "@docusaurus/types"
|
||||
import type * as Preset from "@docusaurus/preset-classic"
|
||||
import type { Config } from "@docusaurus/types"
|
||||
import { PluginOptions as LLMsTXTPluginOptions } from "@signalwire/docusaurus-plugin-llms-txt"
|
||||
import { themes as prismThemes } from "prism-react-renderer"
|
||||
import { redirects } from "./redirects"
|
||||
|
||||
// This runs in Node.js - Don't use client-side code here (browser APIs, JSX...)
|
||||
@ -22,7 +23,6 @@ const config: Config = {
|
||||
projectName: "typeorm", // Usually your repo name.
|
||||
|
||||
onBrokenLinks: "throw",
|
||||
onBrokenMarkdownLinks: "warn",
|
||||
|
||||
// Even if you don't use internationalization, you can use this field to set
|
||||
// useful metadata like html lang. For example, if your site is Chinese, you
|
||||
@ -197,7 +197,33 @@ const config: Config = {
|
||||
redirects,
|
||||
},
|
||||
],
|
||||
"@signalwire/docusaurus-plugin-llms-txt",
|
||||
[
|
||||
"@signalwire/docusaurus-plugin-llms-txt",
|
||||
{
|
||||
content: {
|
||||
// https://www.npmjs.com/package/@signalwire/docusaurus-plugin-llms-txt#content-selectors
|
||||
contentSelectors: [
|
||||
".theme-doc-markdown", // Docusaurus main content area
|
||||
"main .container .col", // Bootstrap-style layout
|
||||
"main .theme-doc-wrapper", // Docusaurus wrapper
|
||||
"article", // Semantic article element
|
||||
"main .container", // Broader container
|
||||
"main", // Fallback to main element
|
||||
".code-example",
|
||||
],
|
||||
enableLlmsFullTxt: true,
|
||||
includeGeneratedIndex: false,
|
||||
includePages: true,
|
||||
includeVersionedDocs: false,
|
||||
relativePaths: false,
|
||||
},
|
||||
depth: 3,
|
||||
onRouteError: "throw",
|
||||
siteTitle: "TypeORM",
|
||||
siteDescription:
|
||||
"TypeORM is an ORM that can run in NodeJS, Browser, Cordova, Ionic, React Native, NativeScript, Expo, and Electron platforms and can be used with TypeScript and JavaScript.",
|
||||
} satisfies LLMsTXTPluginOptions,
|
||||
],
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
2459
docs/package-lock.json
generated
2459
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -27,24 +27,24 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.8.1",
|
||||
"@docusaurus/plugin-client-redirects": "^3.8.1",
|
||||
"@docusaurus/preset-classic": "3.8.1",
|
||||
"@docusaurus/core": "3.9.2",
|
||||
"@docusaurus/plugin-client-redirects": "^3.9.2",
|
||||
"@docusaurus/preset-classic": "3.9.2",
|
||||
"@mdx-js/react": "^3.1.1",
|
||||
"@signalwire/docusaurus-plugin-llms-txt": "^1.2.2",
|
||||
"clsx": "^2.1.1",
|
||||
"docusaurus-theme-search-typesense": "^0.25.0",
|
||||
"docusaurus-theme-search-typesense": "^0.26.0",
|
||||
"prism-react-renderer": "^2.4.1",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "3.8.1",
|
||||
"@docusaurus/tsconfig": "3.8.1",
|
||||
"@docusaurus/types": "3.8.1",
|
||||
"typescript": "~5.9.2"
|
||||
"@docusaurus/module-type-aliases": "3.9.2",
|
||||
"@docusaurus/tsconfig": "3.9.2",
|
||||
"@docusaurus/types": "3.9.2",
|
||||
"typescript": "~5.9.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0"
|
||||
"node": ">=20.0"
|
||||
}
|
||||
}
|
||||
|
||||
@ -101,7 +101,7 @@ export const redirects = [
|
||||
},
|
||||
{ from: "/data-source-api", to: "/docs/data-source/data-source-api" },
|
||||
|
||||
{ from: "/migrations", to: "/docs/advanced-topics/migrations" },
|
||||
{ from: "/migrations", to: "/docs/migrations/why" },
|
||||
{ from: "/transactions", to: "/docs/advanced-topics/transactions" },
|
||||
{ from: "/indices", to: "/docs/advanced-topics/indices" },
|
||||
{
|
||||
@ -123,4 +123,5 @@ export const redirects = [
|
||||
{ from: "/getting-started", to: "/docs/getting-started" },
|
||||
{ from: "/future-of-typeorm", to: "/docs/future-of-typeorm" },
|
||||
{ from: "/query-runner", to: "/docs/query-runner" },
|
||||
{ from: "/docs/advanced-topics/migrations", to: "/docs/migrations/why" },
|
||||
]
|
||||
|
||||
@ -35,6 +35,11 @@ const sidebars: SidebarsConfig = {
|
||||
label: "Relations",
|
||||
items: [{ type: "autogenerated", dirName: "relations" }],
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "Migrations",
|
||||
items: [{ type: "autogenerated", dirName: "migrations" }],
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "Working with Entity Manager",
|
||||
|
||||
@ -1,7 +0,0 @@
|
||||
---
|
||||
title: Markdown page example
|
||||
---
|
||||
|
||||
# Markdown page example
|
||||
|
||||
You don't need React to write simple standalone pages.
|
||||
@ -1,9 +1,10 @@
|
||||
import eslint from "@eslint/js"
|
||||
import js from "@eslint/js"
|
||||
import { defineConfig } from "eslint/config"
|
||||
import { jsdoc } from "eslint-plugin-jsdoc"
|
||||
import tseslint from "typescript-eslint"
|
||||
import globals from "globals"
|
||||
import ts from "typescript-eslint"
|
||||
|
||||
export default tseslint.config([
|
||||
export default defineConfig([
|
||||
{
|
||||
ignores: [
|
||||
"build/**",
|
||||
@ -17,7 +18,7 @@ export default tseslint.config([
|
||||
{
|
||||
files: ["**/*.ts"],
|
||||
languageOptions: {
|
||||
parser: tseslint.parser,
|
||||
parser: ts.parser,
|
||||
parserOptions: {
|
||||
project: "tsconfig.json",
|
||||
},
|
||||
@ -26,9 +27,13 @@ export default tseslint.config([
|
||||
...globals.node,
|
||||
},
|
||||
},
|
||||
plugins: {
|
||||
js,
|
||||
ts,
|
||||
},
|
||||
extends: [
|
||||
eslint.configs.recommended,
|
||||
...tseslint.configs.recommendedTypeChecked,
|
||||
js.configs.recommended,
|
||||
...ts.configs.recommendedTypeChecked,
|
||||
],
|
||||
rules: {
|
||||
// exceptions from typescript-eslint/recommended
|
||||
@ -43,7 +48,10 @@ export default tseslint.config([
|
||||
"@typescript-eslint/no-unused-expressions": "warn",
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"warn",
|
||||
{ argsIgnorePattern: "^_" },
|
||||
{
|
||||
argsIgnorePattern: "^_",
|
||||
destructuredArrayIgnorePattern: "^_"
|
||||
},
|
||||
],
|
||||
"@typescript-eslint/no-wrapper-object-types": "off",
|
||||
"prefer-const": ["error", { destructuring: "all" }],
|
||||
@ -84,7 +92,12 @@ export default tseslint.config([
|
||||
},
|
||||
|
||||
jsdoc({
|
||||
config: 'flat/recommended-typescript', // change to 'flat/recommended-typescript-error' once warnings are fixed
|
||||
config: "flat/recommended-typescript",
|
||||
files: ["src/**/*.ts"],
|
||||
// Temporarily enable individual rules when they are fixed, until all current warnings are gone,
|
||||
// and then remove manual config in favor of `config: "flat/recommended-typescript-error"`
|
||||
rules: {
|
||||
"jsdoc/valid-types": "error"
|
||||
}
|
||||
}),
|
||||
])
|
||||
|
||||
13
gulpfile.ts
13
gulpfile.ts
@ -226,6 +226,16 @@ export class Gulpfile {
|
||||
.pipe(gulp.dest("./build/package"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Move reference to package.json one level up
|
||||
*/
|
||||
@Task()
|
||||
movePackageJsonReferenceLevelUp() {
|
||||
return gulp.src("./build/package/commands/InitCommand.js")
|
||||
.pipe(replace(/\.\.\/package.json/g, "package.json"))
|
||||
.pipe(gulp.dest("./build/package/commands"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a package that can be published to npm.
|
||||
*/
|
||||
@ -243,7 +253,8 @@ export class Gulpfile {
|
||||
"packageReplaceReferences",
|
||||
"packagePreparePackageFile",
|
||||
"packageCopyReadme",
|
||||
"packageCopyShims"
|
||||
"packageCopyShims",
|
||||
"movePackageJsonReferenceLevelUp"
|
||||
],
|
||||
];
|
||||
}
|
||||
|
||||
5671
package-lock.json
generated
5671
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
67
package.json
67
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "typeorm",
|
||||
"version": "0.3.27",
|
||||
"version": "0.3.28",
|
||||
"description": "Data-Mapper ORM for TypeScript and ES2021+. Supports MySQL/MariaDB, PostgreSQL, MS SQL Server, Oracle, SAP HANA, SQLite, MongoDB databases.",
|
||||
"homepage": "https://typeorm.io",
|
||||
"bugs": {
|
||||
@ -13,8 +13,8 @@
|
||||
"funding": "https://opencollective.com/typeorm",
|
||||
"license": "MIT",
|
||||
"author": {
|
||||
"name": "Umed Khudoiberdiev",
|
||||
"email": "pleerock.me@gmail.com"
|
||||
"name": "TypeORM",
|
||||
"email": "maintainers@typeorm.io"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
@ -82,7 +82,7 @@
|
||||
"docs:dev": "cd docs && npm run start",
|
||||
"format": "prettier --cache --write \"./**/*.ts\"",
|
||||
"format:ci": "prettier --check \"./**/*.ts\"",
|
||||
"lint": "eslint .",
|
||||
"lint": "eslint",
|
||||
"pack": "gulp pack",
|
||||
"package": "gulp package",
|
||||
"pre-commit": "lint-staged",
|
||||
@ -96,14 +96,14 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@sqltools/formatter": "^1.2.5",
|
||||
"ansis": "^4.1.0",
|
||||
"ansis": "^4.2.0",
|
||||
"app-root-path": "^3.1.0",
|
||||
"buffer": "^6.0.3",
|
||||
"dayjs": "^1.11.18",
|
||||
"dayjs": "^1.11.19",
|
||||
"debug": "^4.4.3",
|
||||
"dedent": "^1.7.0",
|
||||
"dotenv": "^16.6.1",
|
||||
"glob": "^10.4.5",
|
||||
"glob": "^10.5.0",
|
||||
"reflect-metadata": "^0.2.2",
|
||||
"sha.js": "^2.4.12",
|
||||
"sql-highlight": "^6.1.0",
|
||||
@ -112,13 +112,14 @@
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@sap/hana-client": "^2.26.18",
|
||||
"@tsconfig/node16": "^16.1.5",
|
||||
"@eslint/js": "^9.39.1",
|
||||
"@google-cloud/spanner": "^8.3.1",
|
||||
"@sap/hana-client": "^2.26.26",
|
||||
"@tsconfig/node16": "^16.1.8",
|
||||
"@types/chai": "^4.3.20",
|
||||
"@types/chai-as-promised": "^7.1.8",
|
||||
"@types/debug": "^4.1.12",
|
||||
"@types/gulp-rename": "^2.0.6",
|
||||
"@types/gulp-rename": "^2.0.7",
|
||||
"@types/gulp-sourcemaps": "^0.0.38",
|
||||
"@types/mocha": "^10.0.10",
|
||||
"@types/node": "^16.18.126",
|
||||
@ -126,14 +127,15 @@
|
||||
"@types/sinon": "^10.0.20",
|
||||
"@types/sinon-chai": "^3.2.12",
|
||||
"@types/source-map-support": "^0.5.10",
|
||||
"@types/yargs": "^17.0.33",
|
||||
"@types/yargs": "^17.0.35",
|
||||
"better-sqlite3": "^8.7.0",
|
||||
"c8": "^10.1.3",
|
||||
"chai": "^4.5.0",
|
||||
"chai-as-promised": "^7.1.2",
|
||||
"class-transformer": "^0.5.1",
|
||||
"eslint": "^9.36.0",
|
||||
"eslint-plugin-jsdoc": "^60.3.0",
|
||||
"globals": "^16.4.0",
|
||||
"eslint": "^9.39.1",
|
||||
"eslint-plugin-jsdoc": "^61.4.1",
|
||||
"globals": "^16.5.0",
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-rename": "^2.1.0",
|
||||
"gulp-replace": "^1.1.4",
|
||||
@ -143,20 +145,18 @@
|
||||
"gulpclass": "^0.2.0",
|
||||
"husky": "^9.1.7",
|
||||
"is-ci": "^4.1.0",
|
||||
"lint-staged": "^15.5.2",
|
||||
"mocha": "^11.7.2",
|
||||
"mongodb": "^6.20.0",
|
||||
"mssql": "^11.0.1",
|
||||
"lint-staged": "^16.2.7",
|
||||
"mocha": "^11.7.5",
|
||||
"mongodb": "^6.21.0",
|
||||
"mssql": "^12.1.1",
|
||||
"mysql": "^2.18.1",
|
||||
"mysql2": "^3.15.0",
|
||||
"nyc": "^17.1.0",
|
||||
"oracledb": "^6.9.0",
|
||||
"mysql2": "^3.15.3",
|
||||
"oracledb": "^6.10.0",
|
||||
"pg": "^8.16.3",
|
||||
"pg-query-stream": "^4.10.3",
|
||||
"pkg-pr-new": "^0.0.60",
|
||||
"prettier": "^2.8.8",
|
||||
"redis": "^5.8.2",
|
||||
"remap-istanbul": "^0.13.0",
|
||||
"redis": "^5.10.0",
|
||||
"rimraf": "^5.0.10",
|
||||
"sinon": "^16.1.3",
|
||||
"sinon-chai": "^3.7.0",
|
||||
@ -166,16 +166,16 @@
|
||||
"sqlite3": "^5.1.7",
|
||||
"standard-changelog": "^7.0.1",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.9.2",
|
||||
"typescript-eslint": "^8.44.0"
|
||||
"typescript": "^5.9.3",
|
||||
"typescript-eslint": "^8.48.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@google-cloud/spanner": "^5.18.0 || ^6.0.0 || ^7.0.0",
|
||||
"@google-cloud/spanner": "^5.18.0 || ^6.0.0 || ^7.0.0 || ^8.0.0",
|
||||
"@sap/hana-client": "^2.14.22",
|
||||
"better-sqlite3": "^8.0.0 || ^9.0.0 || ^10.0.0 || ^11.0.0 || ^12.0.0",
|
||||
"ioredis": "^5.0.4",
|
||||
"mongodb": "^5.8.0 || ^6.0.0",
|
||||
"mssql": "^9.1.1 || ^10.0.1 || ^11.0.1",
|
||||
"mssql": "^9.1.1 || ^10.0.0 || ^11.0.0 || ^12.0.0",
|
||||
"mysql2": "^2.2.5 || ^3.0.1",
|
||||
"oracledb": "^6.3.0",
|
||||
"pg": "^8.5.1",
|
||||
@ -245,6 +245,17 @@
|
||||
"url": "https://opencollective.com/typeorm",
|
||||
"logo": "https://opencollective.com/opencollective/logo.txt"
|
||||
},
|
||||
"devEngines": {
|
||||
"runtime": {
|
||||
"name": "node",
|
||||
"version": ">=20.19.0"
|
||||
},
|
||||
"packageManager": {
|
||||
"name": "npm",
|
||||
"version": "^10.8.2",
|
||||
"onFail": "error"
|
||||
}
|
||||
},
|
||||
"readmeFilename": "README.md",
|
||||
"tags": [
|
||||
"orm",
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { PostDetails } from "./entity/PostDetails"
|
||||
import { Image } from "./entity/Image"
|
||||
@ -49,136 +49,7 @@ dataSource
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((result) => {
|
||||
/*const qb = postRepository.createQueryBuilder("post")
|
||||
.leftJoinAndSelect("post.details", "details")
|
||||
.leftJoinAndSelect("post.images", "images")
|
||||
// .leftJoinAndSelect("post.coverId", "coverId")
|
||||
.leftJoinAndSelect("post.categories", "categories")
|
||||
.where("post.id=:id")
|
||||
.setParameter("id", 6);
|
||||
|
||||
return qb
|
||||
.getSingleResult()
|
||||
.then(post => {
|
||||
console.log("loaded post: ", post);
|
||||
|
||||
let category1 = new Category();
|
||||
category1.id = 12;
|
||||
category1.description = "about cat#12";
|
||||
|
||||
let category2 = new Category();
|
||||
category2.id = 52;
|
||||
category2.description = "about cat#52";
|
||||
|
||||
let image = new Image();
|
||||
image.name = "second image of the post";
|
||||
|
||||
//post
|
||||
post.title = "This! is updated post$";
|
||||
post.text = "Hello world of post#4";
|
||||
post.categories = [category2, category1];
|
||||
post.images.push(image);
|
||||
return postRepository.save(post);
|
||||
|
||||
})
|
||||
.then(() => qb.getSingleResult())
|
||||
.then(reloadedPost => console.log("reloadedPost: ", reloadedPost));*/
|
||||
})
|
||||
.then((result) => console.log(result))
|
||||
.catch((error) => console.log(error.stack ? error.stack : error))
|
||||
|
||||
return
|
||||
|
||||
/*const postJson = {
|
||||
id: 1, // changed
|
||||
text: "This is post about hello", // changed
|
||||
title: "hello", // changed
|
||||
details: { // new relation added
|
||||
id: 10, // new object persisted
|
||||
comment: "This is post about hello",
|
||||
meta: "about-hello!",
|
||||
chapter: {
|
||||
id: 1, // new object persisted
|
||||
about: "part I"
|
||||
},
|
||||
categories: [{
|
||||
id: 5, // new object persisted
|
||||
description: "cat5"
|
||||
}]
|
||||
},
|
||||
cover: null, // relation removed
|
||||
images: [{ // new relation added
|
||||
id: 4, // new object persisted
|
||||
name: "post!.jpg",
|
||||
secondaryPost: {
|
||||
id: 2,
|
||||
title: "secondary post"
|
||||
}
|
||||
}, { // secondaryPost relation removed
|
||||
id: 3,
|
||||
name: "post_2!.jpg", // changed
|
||||
details: { // new relation added
|
||||
id: 3, // new object persisted
|
||||
meta: "sec image",
|
||||
comment: "image sec"
|
||||
}
|
||||
}],
|
||||
categories: [{ // two categories removed, new category added
|
||||
id: 4, // new persisted
|
||||
description: "cat2"
|
||||
}]
|
||||
};
|
||||
|
||||
let entity = postRepository.create(postJson);
|
||||
return postRepository.initialize(postJson)
|
||||
.then(result => {
|
||||
const mergedEntity = postRepository.merge(result, entity);
|
||||
console.log("entity created from json: ", entity);
|
||||
console.log("entity initialized from db: ", result);
|
||||
console.log("entity merged: ", mergedEntity);
|
||||
const diff = postRepository.difference(result, mergedEntity);
|
||||
console.log("diff: ", diff);
|
||||
//console.log("diff[0]: ", diff[0].removedRelations);
|
||||
})
|
||||
.catch(error => console.log(error.stack ? error.stack : error));
|
||||
|
||||
let qb = postRepository
|
||||
.createQueryBuilder("post")
|
||||
.addSelect("cover")
|
||||
.addSelect("image")
|
||||
.addSelect("imageDetails")
|
||||
.addSelect("secondaryImage")
|
||||
.addSelect("category")
|
||||
.innerJoin("post.coverId", "cover")
|
||||
.leftJoin("post.images", "image")
|
||||
.leftJoin("post.secondaryImages", "secondaryImage")
|
||||
.leftJoin("image.details", "imageDetails", "on", "imageDetails.meta=:meta")
|
||||
.leftJoin("post.categories", "category", "on", "category.description=:description")
|
||||
//.leftJoin(Image, "image", "on", "image.post=post.id")
|
||||
//.where("post.id=:id")
|
||||
.setParameter("id", 1)
|
||||
.setParameter("description", "cat2")
|
||||
.setParameter("meta", "sec image");
|
||||
|
||||
return qb
|
||||
.getSingleResult()
|
||||
.then(post => console.log(post))
|
||||
// .then(result => console.log(JSON.stringify(result, null, 4)))
|
||||
.catch(error => console.log(error.stack ? error.stack : error));*/
|
||||
|
||||
/*let details = new PostDetails();
|
||||
details.comment = "This is post about hello";
|
||||
details.meta = "about-hello";
|
||||
|
||||
const post = new Post();
|
||||
post.text = "Hello how are you?";
|
||||
post.title = "hello";
|
||||
//post.details = details;
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then(post => console.log("Post has been saved"))
|
||||
.catch(error => console.log("Cannot save. Error: ", error));*/
|
||||
})
|
||||
.catch((error) => console.log(error.stack ? error.stack : error))
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { EverythingEntity, SampleEnum } from "./entity/EverythingEntity"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
@ -99,7 +99,7 @@ dataSource.initialize().then(
|
||||
console.log("Now remove it")
|
||||
return postRepository.remove(entity!)
|
||||
})
|
||||
.then((entity) => {
|
||||
.then(() => {
|
||||
console.log("Entity has been removed")
|
||||
})
|
||||
.catch((error) =>
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { CustomNamingStrategy } from "./naming-strategy/CustomNamingStrategy"
|
||||
|
||||
@ -26,7 +26,7 @@ dataSource.initialize().then(
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((post) => console.log("Post has been saved"))
|
||||
.then(() => console.log("Post has been saved"))
|
||||
.catch((error) => console.log("Cannot save. Error: ", error))
|
||||
},
|
||||
(error) => console.log("Cannot connect: ", error),
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { PostCategory } from "./entity/PostCategory"
|
||||
import { PostAuthor } from "./entity/PostAuthor"
|
||||
|
||||
@ -9,7 +9,7 @@ export class Blog extends BaseObject {
|
||||
@Column()
|
||||
text: string
|
||||
|
||||
@ManyToMany((type) => PostCategory, (category) => category.posts, {
|
||||
@ManyToMany(() => PostCategory, (category) => category.posts, {
|
||||
cascade: true,
|
||||
})
|
||||
@JoinTable()
|
||||
|
||||
@ -9,7 +9,7 @@ export class Post extends BaseObject {
|
||||
@Column()
|
||||
text: string
|
||||
|
||||
@ManyToMany((type) => PostCategory, (category) => category.posts, {
|
||||
@ManyToMany(() => PostCategory, (category) => category.posts, {
|
||||
cascade: true,
|
||||
})
|
||||
@JoinTable()
|
||||
|
||||
@ -10,7 +10,7 @@ export class PostCategory {
|
||||
@Column()
|
||||
name: string
|
||||
|
||||
@ManyToMany((type) => Post, (post) => post.categories, {
|
||||
@ManyToMany(() => Post, (post) => post.categories, {
|
||||
cascade: true,
|
||||
})
|
||||
posts: Post[] = []
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { PostAuthor } from "./entity/PostAuthor"
|
||||
|
||||
@ -29,7 +29,7 @@ dataSource.initialize().then(
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((post) => console.log("Post has been saved"))
|
||||
.then(() => console.log("Post has been saved"))
|
||||
.catch((error) => console.log("Cannot save. Error: ", error))
|
||||
},
|
||||
(error) => console.log("Cannot connect: ", error),
|
||||
|
||||
@ -35,7 +35,7 @@ export class Post {
|
||||
// @JoinTable() // uncomment this and you'll get an error because JoinTable is not allowed here (only many-to-many)
|
||||
editors: PostAuthor[]
|
||||
|
||||
@ManyToMany((type) => PostAuthor, (author) => author.manyPosts)
|
||||
@ManyToMany(() => PostAuthor, (author) => author.manyPosts)
|
||||
@JoinTable() // comment this and you'll get an error because JoinTable must be at least on one side of the many-to-many relationship
|
||||
manyAuthors: PostAuthor[]
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { BasePost } from "./entity/BasePost"
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
import { Category } from "./entity/Category"
|
||||
@ -47,13 +47,13 @@ dataSource.initialize().then(
|
||||
|
||||
return authorRepository.save(author)
|
||||
})
|
||||
.then((author: any) => {
|
||||
.then((author) => {
|
||||
// temporary
|
||||
console.log(
|
||||
"Author with a new post has been saved. Lets try to update post in the author",
|
||||
)
|
||||
|
||||
return author.posts!.then((posts: any) => {
|
||||
return author.posts!.then((posts) => {
|
||||
// temporary
|
||||
posts![0]!.title = "should be updated second post"
|
||||
return authorRepository.save(author!)
|
||||
@ -76,7 +76,7 @@ dataSource.initialize().then(
|
||||
posts[1].author = Promise.resolve(null)
|
||||
return postRepository.save(posts[0])
|
||||
})
|
||||
.then((posts) => {
|
||||
.then(() => {
|
||||
console.log("Two post's author has been removed.")
|
||||
console.log("Now lets check many-to-many relations")
|
||||
|
||||
@ -93,7 +93,7 @@ dataSource.initialize().then(
|
||||
|
||||
return postRepository.save(post)
|
||||
})
|
||||
.then((posts) => {
|
||||
.then(() => {
|
||||
console.log("Post has been saved with its categories. ")
|
||||
console.log("Lets find it now. ")
|
||||
return postRepository.find({
|
||||
@ -106,14 +106,13 @@ dataSource.initialize().then(
|
||||
.then((posts) => {
|
||||
console.log("Post with categories are loaded: ", posts)
|
||||
console.log("Lets remove one of the categories: ")
|
||||
return posts[0].categories.then((categories: any) => {
|
||||
return posts[0].categories.then((categories) => {
|
||||
// temporary
|
||||
categories!.splice(0, 1)
|
||||
// console.log(posts[0]);
|
||||
return postRepository.save(posts[0])
|
||||
})
|
||||
})
|
||||
.then((posts) => {
|
||||
.then(() => {
|
||||
console.log("One of the post category has been removed.")
|
||||
})
|
||||
.catch((error) => console.log(error.stack))
|
||||
|
||||
@ -10,6 +10,6 @@ export class Category {
|
||||
@Column()
|
||||
name: string
|
||||
|
||||
@ManyToMany((type) => Post, (post) => post.categories)
|
||||
@ManyToMany(() => Post, (post) => post.categories)
|
||||
posts: Promise<Post[]>
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
import { Category } from "./entity/Category"
|
||||
@ -61,70 +61,10 @@ dataSource.initialize().then(
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// let secondPost = postRepository.create();
|
||||
// secondPost.text = "Second post";
|
||||
// secondPost.title = "About second post";
|
||||
// return authorRepository.save(author);
|
||||
})
|
||||
.then((post) => {
|
||||
console.log("Loaded posts: ", post)
|
||||
})
|
||||
/* posts[0].title = "should be updated second post";
|
||||
|
||||
return author.posts.then(posts => {
|
||||
return authorRepository.save(author);
|
||||
});
|
||||
})
|
||||
.then(updatedAuthor => {
|
||||
console.log("Author has been updated: ", updatedAuthor);
|
||||
console.log("Now lets load all posts with their authors:");
|
||||
return postRepository.find({ alias: "post", leftJoinAndSelect: { author: "post.author" } });
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Posts are loaded: ", posts);
|
||||
console.log("Now lets delete a post");
|
||||
posts[0].author = Promise.resolve(null);
|
||||
posts[1].author = Promise.resolve(null);
|
||||
return postRepository.save(posts[0]);
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Two post's author has been removed.");
|
||||
console.log("Now lets check many-to-many relations");
|
||||
|
||||
let category1 = categoryRepository.create();
|
||||
category1.name = "Hello category1";
|
||||
|
||||
let category2 = categoryRepository.create();
|
||||
category2.name = "Bye category2";
|
||||
|
||||
let post = postRepository.create();
|
||||
post.title = "Post & Categories";
|
||||
post.text = "Post with many categories";
|
||||
post.categories = Promise.resolve([
|
||||
category1,
|
||||
category2
|
||||
]);
|
||||
|
||||
return postRepository.save(post);
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Post has been saved with its categories. ");
|
||||
console.log("Lets find it now. ");
|
||||
return postRepository.find({ alias: "post", innerJoinAndSelect: { categories: "post.categories" } });
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Post with categories are loaded: ", posts);
|
||||
console.log("Lets remove one of the categories: ");
|
||||
return posts[0].categories.then(categories => {
|
||||
categories.splice(0, 1);
|
||||
// console.log(posts[0]);
|
||||
return postRepository.save(posts[0]);
|
||||
});
|
||||
})*/
|
||||
.then((posts) => {
|
||||
// console.log("One of the post category has been removed.");
|
||||
})
|
||||
.catch((error) => console.log(error.stack))
|
||||
},
|
||||
(error) => console.log("Cannot connect: ", error),
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { PostDetails } from "./entity/PostDetails"
|
||||
import { PostCategory } from "./entity/PostCategory"
|
||||
@ -45,7 +45,7 @@ dataSource.initialize().then(
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((post) => {
|
||||
.then(() => {
|
||||
console.log(
|
||||
"Post has been saved. Lets try to find this post using query builder: ",
|
||||
)
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
import { Category } from "./entity/Category"
|
||||
@ -38,7 +38,6 @@ dataSource.initialize().then(
|
||||
post.text = "Hello how are you?"
|
||||
post.title = "hello"
|
||||
post.authorId = 1
|
||||
// post.author = author;
|
||||
post.categories = [category1, category2]
|
||||
|
||||
Promise.all<any>([
|
||||
@ -78,61 +77,6 @@ dataSource.initialize().then(
|
||||
.then((authors) => {
|
||||
console.log("Loaded authors: ", authors)
|
||||
})
|
||||
/* posts[0].title = "should be updated second post";
|
||||
|
||||
return author.posts.then(posts => {
|
||||
return authorRepository.save(author);
|
||||
});
|
||||
})
|
||||
.then(updatedAuthor => {
|
||||
console.log("Author has been updated: ", updatedAuthor);
|
||||
console.log("Now lets load all posts with their authors:");
|
||||
return postRepository.find({ alias: "post", leftJoinAndSelect: { author: "post.author" } });
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Posts are loaded: ", posts);
|
||||
console.log("Now lets delete a post");
|
||||
posts[0].author = Promise.resolve(null);
|
||||
posts[1].author = Promise.resolve(null);
|
||||
return postRepository.save(posts[0]);
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Two post's author has been removed.");
|
||||
console.log("Now lets check many-to-many relations");
|
||||
|
||||
let category1 = categoryRepository.create();
|
||||
category1.name = "Hello category1";
|
||||
|
||||
let category2 = categoryRepository.create();
|
||||
category2.name = "Bye category2";
|
||||
|
||||
let post = postRepository.create();
|
||||
post.title = "Post & Categories";
|
||||
post.text = "Post with many categories";
|
||||
post.categories = Promise.resolve([
|
||||
category1,
|
||||
category2
|
||||
]);
|
||||
|
||||
return postRepository.save(post);
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Post has been saved with its categories. ");
|
||||
console.log("Lets find it now. ");
|
||||
return postRepository.find({ alias: "post", innerJoinAndSelect: { categories: "post.categories" } });
|
||||
})
|
||||
.then(posts => {
|
||||
console.log("Post with categories are loaded: ", posts);
|
||||
console.log("Lets remove one of the categories: ");
|
||||
return posts[0].categories.then(categories => {
|
||||
categories.splice(0, 1);
|
||||
// console.log(posts[0]);
|
||||
return postRepository.save(posts[0]);
|
||||
});
|
||||
})*/
|
||||
.then((posts) => {
|
||||
// console.log("One of the post category has been removed.");
|
||||
})
|
||||
.catch((error) => console.log(error.stack))
|
||||
},
|
||||
(error) => console.log("Cannot connect: ", error),
|
||||
|
||||
@ -18,7 +18,7 @@ export class Post {
|
||||
@Column("int")
|
||||
authorId: number
|
||||
|
||||
@ManyToMany((type) => Category)
|
||||
@ManyToMany(() => Category)
|
||||
@JoinTable()
|
||||
categories: Category[]
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
import { Category } from "./entity/Category"
|
||||
@ -38,7 +38,7 @@ dataSource.initialize().then(
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((post) => {
|
||||
.then(() => {
|
||||
console.log("Post has been saved. Lets load it now.")
|
||||
return postRepository.find({
|
||||
join: {
|
||||
|
||||
@ -10,6 +10,6 @@ export class Category {
|
||||
@Column()
|
||||
name: string
|
||||
|
||||
@ManyToMany((type) => Post, (post) => post.categories)
|
||||
@ManyToMany(() => Post, (post) => post.categories)
|
||||
posts: Post[]
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Category } from "./entity/Category"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
@ -39,7 +39,7 @@ dataSource.initialize().then(
|
||||
|
||||
return categoryRepository
|
||||
.save(category1)
|
||||
.then((category) => {
|
||||
.then(() => {
|
||||
console.log(
|
||||
"Categories has been saved. Lets now load it and all its descendants:",
|
||||
)
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
import { Category } from "./entity/Category"
|
||||
@ -41,7 +41,7 @@ dataSource.initialize().then(
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((post) => {
|
||||
.then(() => {
|
||||
return postRepository
|
||||
.createQueryBuilder("post")
|
||||
.leftJoin("post.categories", "categories")
|
||||
@ -63,7 +63,7 @@ dataSource.initialize().then(
|
||||
|
||||
return postRepository.save(post)
|
||||
})
|
||||
.then((updatedPost) => {
|
||||
.then(() => {
|
||||
return postRepository
|
||||
.createQueryBuilder("post")
|
||||
.leftJoinAndSelect("post.author", "author")
|
||||
@ -80,7 +80,7 @@ dataSource.initialize().then(
|
||||
loadedPost!.author = author
|
||||
return postRepository.save(loadedPost!)
|
||||
})
|
||||
.then((updatedPost) => {
|
||||
.then(() => {
|
||||
return postRepository
|
||||
.createQueryBuilder("post")
|
||||
.leftJoinAndSelect("post.author", "author")
|
||||
@ -94,7 +94,7 @@ dataSource.initialize().then(
|
||||
post.author = null
|
||||
return postRepository.save(post)
|
||||
})
|
||||
.then((updatedPost) => {
|
||||
.then(() => {
|
||||
return postRepository
|
||||
.createQueryBuilder("post")
|
||||
.leftJoinAndSelect("post.author", "author")
|
||||
@ -106,7 +106,7 @@ dataSource.initialize().then(
|
||||
post.author = author2
|
||||
return postRepository.save(post)
|
||||
})
|
||||
.then((updatedPost) => {
|
||||
.then(() => {
|
||||
return postRepository
|
||||
.createQueryBuilder("post")
|
||||
.leftJoinAndSelect("post.author", "author")
|
||||
|
||||
@ -11,7 +11,7 @@ export class Category {
|
||||
@Column()
|
||||
name: string
|
||||
|
||||
@ManyToMany((type) => Author)
|
||||
@ManyToMany(() => Author)
|
||||
@JoinTable()
|
||||
author: Author
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
|
||||
@ -27,7 +27,7 @@ dataSource.initialize().then(
|
||||
if (!author) {
|
||||
author = new Author()
|
||||
author.name = "Umed"
|
||||
return authorRepository.save(author).then((savedAuthor) => {
|
||||
return authorRepository.save(author).then(() => {
|
||||
return authorRepository.findOneBy({ id: 1 })
|
||||
})
|
||||
}
|
||||
@ -39,7 +39,7 @@ dataSource.initialize().then(
|
||||
post = new Post()
|
||||
post.title = "Hello post"
|
||||
post.text = "This is post contents"
|
||||
return postRepository.save(post).then((savedPost) => {
|
||||
return postRepository.save(post).then(() => {
|
||||
return postRepository.findOneBy({ id: 1 })
|
||||
})
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Question } from "./entity/Question"
|
||||
import { Counters } from "./entity/Counters"
|
||||
|
||||
@ -12,6 +12,6 @@ export class Post {
|
||||
@Column()
|
||||
text: string
|
||||
|
||||
@Column((type) => Counters)
|
||||
@Column(() => Counters)
|
||||
counters: Counters
|
||||
}
|
||||
|
||||
@ -9,6 +9,6 @@ export class Question {
|
||||
@Column()
|
||||
title: string
|
||||
|
||||
@Column((type) => Counters)
|
||||
@Column(() => Counters)
|
||||
counters: Counters
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Employee } from "./entity/Employee"
|
||||
import { Homesitter } from "./entity/Homesitter"
|
||||
import { Student } from "./entity/Student"
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { PostDetails } from "./entity/PostDetails"
|
||||
import { PostCategory } from "./entity/PostCategory"
|
||||
@ -9,11 +9,6 @@ import { PostInformation } from "./entity/PostInformation"
|
||||
import { PostAuthor } from "./entity/PostAuthor"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
// type: "mssql",
|
||||
// host: "192.168.1.10",
|
||||
// username: "sa",
|
||||
// password: "admin12345",
|
||||
// database: "test",
|
||||
type: "oracle",
|
||||
host: "localhost",
|
||||
username: "system",
|
||||
@ -51,7 +46,7 @@ dataSource
|
||||
|
||||
postRepository
|
||||
.save(post)
|
||||
.then((post) => console.log("Post has been saved"))
|
||||
.then(() => console.log("Post has been saved"))
|
||||
.catch((error) => console.log("Cannot save. Error: ", error))
|
||||
})
|
||||
.catch((error) => console.log("Error: ", error))
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Category } from "./entity/Category"
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ export class Post {
|
||||
@Column()
|
||||
text: string
|
||||
|
||||
@ManyToMany((type) => Category)
|
||||
@ManyToMany(() => Category)
|
||||
@JoinTable()
|
||||
categories: Category[]
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
import { Category } from "./entity/Category"
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
import { Author } from "./entity/Author"
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource, DataSourceOptions } from "../../src/index"
|
||||
import { DataSource, DataSourceOptions } from "../../src"
|
||||
import { Post } from "./entity/Post"
|
||||
|
||||
const options: DataSourceOptions = {
|
||||
|
||||
96
sample/sample37-vector-sqlserver/README.md
Normal file
96
sample/sample37-vector-sqlserver/README.md
Normal file
@ -0,0 +1,96 @@
|
||||
# Vector Type Support in SQL Server
|
||||
|
||||
This sample demonstrates how to use the `vector` column type in SQL Server with TypeORM for storing and querying vector embeddings.
|
||||
|
||||
## Overview
|
||||
|
||||
SQL Server supports the `vector` data type for storing high-dimensional vectors, which is useful for:
|
||||
|
||||
- Semantic search with embeddings
|
||||
- Recommendation systems
|
||||
- Similarity matching
|
||||
- Machine learning applications
|
||||
|
||||
## Features Demonstrated
|
||||
|
||||
1. **Vector Column Definition**: Define columns with specific vector dimensions
|
||||
2. **Storing Embeddings**: Save vector data as arrays of numbers
|
||||
3. **Vector Similarity Search**: Use `VECTOR_DISTANCE` function for cosine similarity
|
||||
|
||||
## Entity Definition
|
||||
|
||||
```typescript
|
||||
@Entity("document_chunks")
|
||||
export class DocumentChunk {
|
||||
@PrimaryGeneratedColumn("uuid")
|
||||
id: string
|
||||
|
||||
@Column("varchar", { length: "MAX" })
|
||||
content: string
|
||||
|
||||
// Vector column with 1998 dimensions
|
||||
@Column("vector", { length: 1998 })
|
||||
embedding: number[]
|
||||
|
||||
@Column("uuid")
|
||||
documentId: string
|
||||
|
||||
@ManyToOne(() => Document, (document) => document.chunks)
|
||||
@JoinColumn({ name: "documentId" })
|
||||
document: Document
|
||||
}
|
||||
```
|
||||
|
||||
## Vector Similarity Search
|
||||
|
||||
SQL Server provides the `VECTOR_DISTANCE` function for calculating distances between vectors:
|
||||
|
||||
```typescript
|
||||
const queryEmbedding = [
|
||||
/* your query vector */
|
||||
]
|
||||
const documentIds = ["doc-id-1", "doc-id-2"]
|
||||
|
||||
const results = await connection.query(
|
||||
`
|
||||
DECLARE @question AS VECTOR (1998) = @0;
|
||||
SELECT TOP (10) dc.*,
|
||||
VECTOR_DISTANCE('cosine', @question, embedding) AS distance,
|
||||
d.fileName as "documentName"
|
||||
FROM document_chunks dc
|
||||
LEFT JOIN documents d ON dc.documentId = d.id
|
||||
WHERE documentId IN (@1))
|
||||
ORDER BY VECTOR_DISTANCE('cosine', @question, embedding)
|
||||
`,
|
||||
[JSON.stringify(queryEmbedding), documentIds.join(", ")],
|
||||
)
|
||||
```
|
||||
|
||||
## Distance Metrics
|
||||
|
||||
The `VECTOR_DISTANCE` function supports different distance metrics:
|
||||
|
||||
- `'cosine'` - Cosine distance (most common for semantic search)
|
||||
- `'euclidean'` - Euclidean (L2) distance
|
||||
- `'dot'` - Negative dot product
|
||||
|
||||
## Requirements
|
||||
|
||||
- SQL Server with vector support enabled
|
||||
- TypeORM with SQL Server driver (`mssql` package)
|
||||
|
||||
## Running the Sample
|
||||
|
||||
1. Make sure you have SQL Server running with vector support
|
||||
2. Update the connection settings in `app.ts` if needed
|
||||
3. Run:
|
||||
```bash
|
||||
npm install
|
||||
ts-node app.ts
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- Vector dimensions must be specified using the `length` option
|
||||
- Embeddings are stored as JSON strings internally and converted to/from arrays automatically
|
||||
- The maximum vector dimension depends on your SQL Server version and configuration
|
||||
88
sample/sample37-vector-sqlserver/app.ts
Normal file
88
sample/sample37-vector-sqlserver/app.ts
Normal file
@ -0,0 +1,88 @@
|
||||
import "reflect-metadata"
|
||||
import { DataSource } from "../../src/index"
|
||||
import { DocumentChunk } from "./entity/DocumentChunk"
|
||||
import { Document } from "./entity/Document"
|
||||
|
||||
const AppDataSource = new DataSource({
|
||||
type: "mssql",
|
||||
host: "localhost",
|
||||
username: "sa",
|
||||
password: "Admin12345",
|
||||
database: "test",
|
||||
synchronize: true,
|
||||
dropSchema: true,
|
||||
logging: false,
|
||||
entities: [Document, DocumentChunk],
|
||||
options: {
|
||||
// Enable trust server certificate for local development
|
||||
trustServerCertificate: true,
|
||||
},
|
||||
})
|
||||
|
||||
AppDataSource.initialize()
|
||||
.then(async (connection) => {
|
||||
console.log("Inserting documents and chunks with vector embeddings...")
|
||||
|
||||
// Create a document
|
||||
const document = new Document()
|
||||
document.fileName = "sample-document.txt"
|
||||
await connection.manager.save(document)
|
||||
|
||||
// Generate sample embeddings (in a real app, these would come from an ML model)
|
||||
const generateEmbedding = (dimension: number): number[] => {
|
||||
return Array.from({ length: dimension }, () => Math.random())
|
||||
}
|
||||
|
||||
// Create document chunks with embeddings
|
||||
const chunk1 = new DocumentChunk()
|
||||
chunk1.content =
|
||||
"TypeORM is an ORM that can run in NodeJS and can be used with TypeScript and JavaScript."
|
||||
chunk1.embedding = generateEmbedding(1998)
|
||||
chunk1.document = document
|
||||
|
||||
const chunk2 = new DocumentChunk()
|
||||
chunk2.content =
|
||||
"It supports both Active Record and Data Mapper patterns."
|
||||
chunk2.embedding = generateEmbedding(1998)
|
||||
chunk2.document = document
|
||||
|
||||
const chunk3 = new DocumentChunk()
|
||||
chunk3.content =
|
||||
"TypeORM supports MySQL, PostgreSQL, MariaDB, SQLite, MS SQL Server, Oracle, and more."
|
||||
chunk3.embedding = generateEmbedding(1998)
|
||||
chunk3.document = document
|
||||
|
||||
await connection.manager.save([chunk1, chunk2, chunk3])
|
||||
|
||||
console.log("Documents and chunks have been saved!")
|
||||
|
||||
// Perform a vector similarity search
|
||||
console.log("\nPerforming vector similarity search...")
|
||||
|
||||
// Query embedding (in a real app, this would be generated from user query)
|
||||
const queryEmbedding = generateEmbedding(1998)
|
||||
const documentIds = [document.id]
|
||||
|
||||
const docIdParams = documentIds.map((_, i) => `@${i + 1}`).join(", ")
|
||||
const results = await connection.query(
|
||||
`
|
||||
DECLARE @question AS VECTOR (1998) = @0;
|
||||
SELECT TOP (3) dc.*, VECTOR_DISTANCE('cosine', @question, embedding) AS distance, d.fileName as "documentName"
|
||||
FROM document_chunks dc
|
||||
LEFT JOIN documents d ON dc.documentId = d.id
|
||||
WHERE documentId IN (${docIdParams})
|
||||
ORDER BY VECTOR_DISTANCE('cosine', @question, embedding)
|
||||
`,
|
||||
[JSON.stringify(queryEmbedding), ...documentIds],
|
||||
)
|
||||
|
||||
console.log("Search results (top 3 most similar chunks):")
|
||||
results.forEach((result: any, index: number) => {
|
||||
console.log(`\n${index + 1}. Distance: ${result.distance}`)
|
||||
console.log(` Content: ${result.content.substring(0, 80)}...`)
|
||||
console.log(` Document: ${result.documentName}`)
|
||||
})
|
||||
|
||||
await connection.destroy()
|
||||
})
|
||||
.catch((error) => console.log(error))
|
||||
19
sample/sample37-vector-sqlserver/entity/Document.ts
Normal file
19
sample/sample37-vector-sqlserver/entity/Document.ts
Normal file
@ -0,0 +1,19 @@
|
||||
import {
|
||||
Entity,
|
||||
PrimaryGeneratedColumn,
|
||||
Column,
|
||||
OneToMany,
|
||||
} from "../../../src/index"
|
||||
import { DocumentChunk } from "./DocumentChunk"
|
||||
|
||||
@Entity("documents")
|
||||
export class Document {
|
||||
@PrimaryGeneratedColumn("uuid")
|
||||
id: string
|
||||
|
||||
@Column("varchar")
|
||||
fileName: string
|
||||
|
||||
@OneToMany(() => DocumentChunk, (chunk) => chunk.document)
|
||||
chunks: DocumentChunk[]
|
||||
}
|
||||
27
sample/sample37-vector-sqlserver/entity/DocumentChunk.ts
Normal file
27
sample/sample37-vector-sqlserver/entity/DocumentChunk.ts
Normal file
@ -0,0 +1,27 @@
|
||||
import {
|
||||
Entity,
|
||||
PrimaryGeneratedColumn,
|
||||
Column,
|
||||
ManyToOne,
|
||||
JoinColumn,
|
||||
} from "../../../src/index"
|
||||
import { Document } from "./Document"
|
||||
|
||||
@Entity("document_chunks")
|
||||
export class DocumentChunk {
|
||||
@PrimaryGeneratedColumn("uuid")
|
||||
id: string
|
||||
|
||||
@Column("varchar", { length: "MAX" })
|
||||
content: string
|
||||
|
||||
@Column("vector", { length: 1998 })
|
||||
embedding: number[]
|
||||
|
||||
@Column("uuid")
|
||||
documentId: string
|
||||
|
||||
@ManyToOne(() => Document, (document) => document.chunks)
|
||||
@JoinColumn({ name: "documentId" })
|
||||
document: Document
|
||||
}
|
||||
19
sample/sample37-vector-sqlserver/package.json
Normal file
19
sample/sample37-vector-sqlserver/package.json
Normal file
@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "sample37-vector-sqlserver",
|
||||
"version": "0.0.1",
|
||||
"description": "Sample demonstrating vector type support in SQL Server with TypeORM",
|
||||
"main": "app.ts",
|
||||
"scripts": {
|
||||
"start": "ts-node app.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"typeorm": "latest",
|
||||
"mssql": "^11.0.0",
|
||||
"reflect-metadata": "^0.2.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.0.0",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.5.4"
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user