feat(sap): add support for REAL_VECTOR and HALF_VECTOR data types in SAP HANA Cloud (#11526)

* feat(sap): add support for REAL_VECTOR data type

* feat(sap): add support for HALF_VECTOR data type
This commit is contained in:
Lucian Mocanu 2025-06-22 01:43:10 +02:00 committed by GitHub
parent f2d2236218
commit abf8863a53
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 540 additions and 361 deletions

View File

@ -401,9 +401,14 @@ or
### Column types for `sap`
`tinyint`, `smallint`, `integer`, `bigint`, `smalldecimal`, `decimal`, `real`, `double`, `date`, `time`, `seconddate`, `timestamp`, `boolean`, `char`, `nchar`, `varchar`, `nvarchar`, `text`, `alphanum`, `shorttext`, `array`, `varbinary`, `blob`, `clob`, `nclob`, `st_geometry`, `st_point`.
SAP HANA 2.0 and SAP HANA Cloud support slightly different data types. Check the SAP Help pages for more information:
> Note: SAP HANA Cloud deprecated or removed some of these data types. TypeORM will convert them to the closest available alternative when connected to a Cloud version.
- [SAP HANA 2.0 Data Types](https://help.sap.com/docs/SAP_HANA_PLATFORM/4fe29514fd584807ac9f2a04f6754767/20a1569875191014b507cf392724b7eb.html?locale=en-US)
- [SAP HANA Cloud Data Types](https://help.sap.com/docs/hana-cloud-database/sap-hana-cloud-sap-hana-database-sql-reference-guide/data-types)
TypeORM's `SapDriver` supports `tinyint`, `smallint`, `integer`, `bigint`, `smalldecimal`, `decimal`, `real`, `double`, `date`, `time`, `seconddate`, `timestamp`, `boolean`, `char`, `nchar`, `varchar`, `nvarchar`, `text`, `alphanum`, `shorttext`, `array`, `varbinary`, `blob`, `clob`, `nclob`, `st_geometry`, `st_point`, `real_vector` and `half_vector`. Some of these data types have been deprecated or removed in SAP HANA Cloud, and will be converted to the closest available alternative when connected to a Cloud database.
The `real_vector` and `half_vector` data types were introduced in SAP HANA Cloud (2024Q1 and 2025Q2 respectively), and require a supported version of `@sap/hana-client` as well. By default, the client will return a `Buffer` in the `fvecs`/`hvecs` format, which is more efficient. It is possible to let the driver convert the values to a `number[]` by adding `{ extra: { vectorOutputType: "Array" } }` to the connection options. Check the SAP HANA Client documentation for more information about [REAL_VECTOR](https://help.sap.com/docs/SAP_HANA_CLIENT/f1b440ded6144a54ada97ff95dac7adf/0d197e4389c64e6b9cf90f6f698f62fe.html) or [HALF_VECTOR](https://help.sap.com/docs/SAP_HANA_CLIENT/f1b440ded6144a54ada97ff95dac7adf/8bb854b4ce4a4299bed27c365b717e91.html).
### Column types for `spanner`

8
package-lock.json generated
View File

@ -31,7 +31,7 @@
},
"devDependencies": {
"@eslint/js": "^9.28.0",
"@sap/hana-client": "^2.24.21",
"@sap/hana-client": "^2.25.22",
"@tsconfig/node16": "^16.1.4",
"@types/chai": "^4.3.20",
"@types/chai-as-promised": "^7.1.8",
@ -2491,9 +2491,9 @@
}
},
"node_modules/@sap/hana-client": {
"version": "2.24.21",
"resolved": "https://registry.npmjs.org/@sap/hana-client/-/hana-client-2.24.21.tgz",
"integrity": "sha512-Mat/LwhvboZqbBKXxLDJz+Xtjf+Hht/SDaVRmzCoNDrbWNKiWIGZ5b9xQVBAXZFfU6GTzFQPcP1wGLBb71BRRg==",
"version": "2.25.22",
"resolved": "https://registry.npmjs.org/@sap/hana-client/-/hana-client-2.25.22.tgz",
"integrity": "sha512-85JtWAEM3HXsXl6/9+Ppmm7aQ8QmHQTzBJsHTv8hn4Ju4WkLcrjQtSHR0qYe8hiA5PMYd2RGtgGCoAjLi63+5g==",
"dev": true,
"hasInstallScript": true,
"hasShrinkwrap": true,

View File

@ -109,7 +109,7 @@
},
"devDependencies": {
"@eslint/js": "^9.28.0",
"@sap/hana-client": "^2.24.21",
"@sap/hana-client": "^2.25.22",
"@tsconfig/node16": "^16.1.4",
"@types/chai": "^4.3.20",
"@types/chai-as-promised": "^7.1.8",

View File

@ -109,36 +109,39 @@ export class SapDriver implements Driver {
* @see https://help.sap.com/docs/hana-cloud-database/sap-hana-cloud-sap-hana-database-sql-reference-guide/data-types
*/
supportedDataTypes: ColumnType[] = [
"tinyint",
"smallint",
"int", // typeorm alias for "integer"
"integer",
"alphanum", // removed in SAP HANA Cloud
"array",
"bigint",
"smalldecimal",
"decimal",
"dec", // typeorm alias for "decimal"
"real",
"double",
"float", // database alias for "real" / "double"
"date",
"time",
"seconddate",
"timestamp",
"binary",
"blob",
"boolean",
"char", // not officially supported, in SAP HANA Cloud: alias for "nchar"
"nchar", // not officially supported
"varchar", // in SAP HANA Cloud: alias for "nvarchar"
"nvarchar",
"text", // removed in SAP HANA Cloud
"alphanum", // removed in SAP HANA Cloud
"shorttext", // removed in SAP HANA Cloud
"array",
"varbinary",
"blob",
"clob", // in SAP HANA Cloud: alias for "nclob"
"date",
"dec", // typeorm alias for "decimal"
"decimal",
"double",
"float", // database alias for "real" / "double"
"half_vector", // only supported in SAP HANA Cloud, not in SAP HANA 2.0
"int", // typeorm alias for "integer"
"integer",
"nchar", // not officially supported
"nclob",
"nvarchar",
"real_vector", // only supported in SAP HANA Cloud, not in SAP HANA 2.0
"real",
"seconddate",
"shorttext", // removed in SAP HANA Cloud
"smalldecimal",
"smallint",
"st_geometry",
"st_point",
"text", // removed in SAP HANA Cloud
"time",
"timestamp",
"tinyint",
"varbinary",
"varchar", // in SAP HANA Cloud: alias for "nvarchar"
]
/**
@ -155,11 +158,14 @@ export class SapDriver implements Driver {
* Gets list of column data types that support length by a driver.
*/
withLengthColumnTypes: ColumnType[] = [
"varchar",
"nvarchar",
"alphanum",
"binary",
"half_vector",
"nvarchar",
"real_vector",
"shorttext",
"varbinary",
"varchar",
]
/**
@ -170,7 +176,7 @@ export class SapDriver implements Driver {
/**
* Gets list of column data types that support scale by a driver.
*/
withScaleColumnTypes: ColumnType[] = ["decimal"]
withScaleColumnTypes: ColumnType[] = ["decimal", "timestamp"]
/**
* Orm has special columns and we need to know what database column types should be for those types.
@ -207,13 +213,14 @@ export class SapDriver implements Driver {
* Used in the cases when length/precision/scale is not specified by user.
*/
dataTypeDefaults: DataTypeDefaults = {
binary: { length: 1 },
char: { length: 1 },
decimal: { precision: 18, scale: 0 },
nchar: { length: 1 },
varchar: { length: 255 },
nvarchar: { length: 255 },
shorttext: { length: 255 },
varbinary: { length: 255 },
decimal: { precision: 18, scale: 0 },
varchar: { length: 255 },
}
/**
@ -493,9 +500,7 @@ export class SapDriver implements Driver {
if (value === null || value === undefined) return value
if (columnMetadata.type === Boolean) {
return value === true ? 1 : 0
} else if (columnMetadata.type === "date") {
if (columnMetadata.type === "date") {
return DateUtils.mixedDateToDateString(value)
} else if (columnMetadata.type === "time") {
return DateUtils.mixedDateToTimeString(value)
@ -531,9 +536,7 @@ export class SapDriver implements Driver {
)
: value
if (columnMetadata.type === Boolean) {
value = value ? true : false
} else if (
if (
columnMetadata.type === "timestamp" ||
columnMetadata.type === "seconddate" ||
columnMetadata.type === Date
@ -549,9 +552,6 @@ export class SapDriver implements Driver {
value = DateUtils.stringToSimpleJson(value)
} else if (columnMetadata.type === "simple-enum") {
value = DateUtils.stringToSimpleEnum(value, columnMetadata)
} else if (columnMetadata.type === Number) {
// convert to number if number
value = !isNaN(+value) ? parseInt(value) : value
}
if (columnMetadata.transformer)
@ -620,6 +620,13 @@ export class SapDriver implements Driver {
} else if (column.type === "char") {
return "nchar"
}
} else {
if (
column.type === "real_vector" ||
column.type === "half_vector"
) {
return "varbinary"
}
}
return (column.type as string) || ""

View File

@ -47,7 +47,7 @@ export type WithPrecisionColumnType =
| "time" // mysql, postgres, mssql, cockroachdb
| "time with time zone" // postgres, cockroachdb
| "time without time zone" // postgres
| "timestamp" // mysql, postgres, mssql, oracle, cockroachdb, spanner
| "timestamp" // mysql, postgres, mssql, oracle, cockroachdb, sap, spanner
| "timestamp without time zone" // postgres, cockroachdb
| "timestamp with time zone" // postgres, oracle, cockroachdb
| "timestamp with local time zone" // oracle
@ -75,6 +75,8 @@ export type WithLengthColumnType =
| "binary" // mssql
| "varbinary" // mssql, sap
| "string" // cockroachdb, spanner
| "half_vector" // sap
| "real_vector" // sap
export type WithWidthColumnType =
| "tinyint" // mysql

View File

@ -129,10 +129,7 @@ export class EntityMetadataValidator {
const normalizedColumn = driver.normalizeType(
column,
) as ColumnType
if (
driver.supportedDataTypes.indexOf(normalizedColumn) ===
-1
)
if (!driver.supportedDataTypes.includes(normalizedColumn))
throw new DataTypeNotSupportedError(
column,
normalizedColumn,
@ -140,9 +137,7 @@ export class EntityMetadataValidator {
)
if (
column.length &&
driver.withLengthColumnTypes.indexOf(
normalizedColumn,
) === -1
!driver.withLengthColumnTypes.includes(normalizedColumn)
)
throw new TypeORMError(
`Column ${column.propertyName} of Entity ${entityMetadata.name} does not support length property.`,
@ -316,8 +311,6 @@ export class EntityMetadataValidator {
`This may lead to unexpected circular removals. Please set cascade remove only from one side of relationship.`,
)
}) // todo: maybe better just deny removal from one to one relation without join column?
entityMetadata.eagerRelations.forEach((relation) => {})
}
/**

View File

@ -1,8 +1,8 @@
import "reflect-metadata"
import { DataSource } from "../../../../../src"
import { expect } from "chai"
import { DataSource, DeepPartial } from "../../../../../src"
import { DriverUtils } from "../../../../../src/driver/DriverUtils"
import { DateUtils } from "../../../../../src/util/DateUtils"
import {
closeTestingConnections,
createTestingConnections,
@ -13,347 +13,290 @@ import { PostWithOptions } from "./entity/PostWithOptions"
import { PostWithoutTypes } from "./entity/PostWithoutTypes"
describe("database schema > column types > sap", () => {
let connections: DataSource[]
let dataSources: DataSource[]
before(async () => {
connections = await createTestingConnections({
dataSources = await createTestingConnections({
entities: [__dirname + "/entity/*{.js,.ts}"],
enabledDrivers: ["sap"],
})
})
beforeEach(() => reloadTestingDatabases(connections))
after(() => closeTestingConnections(connections))
beforeEach(() => reloadTestingDatabases(dataSources))
after(() => closeTestingConnections(dataSources))
it("all types should work correctly - persist and hydrate", () =>
Promise.all(
connections.map(async (connection) => {
// this test contains data types that are available only in SAP HANA 2.0 and that have been removed in SAP HANA Cloud
const postRepository = connection.getRepository(Post)
const queryRunner = connection.createQueryRunner()
const table = await queryRunner.getTable("post")
await queryRunner.release()
dataSources.map(async (dataSource) => {
const plainPost = {
id: 1,
name: "Post",
int: 2147483647,
integer: 2147483647,
tinyint: 250,
smallint: 32767,
bigint: "8223372036854775807",
decimal: "8223372036854775807",
dec: "8223372036854775807",
smalldecimal: "8223372036854775",
real: 10.5,
double: 10.53,
float: 10.53,
char: "A",
nchar: "A",
varchar: "This is varchar",
nvarchar: "This is nvarchar",
alphanum: "This is alphanum",
text: "This is text",
shorttext: "This is shorttext",
date: "2017-06-21",
time: "13:27:05",
timestamp: new Date(),
seconddate: (() => {
const d = new Date()
d.setMilliseconds(0)
return d
})(),
blob: Buffer.from("This is blob"),
clob: "This is clob",
nclob: "This is nclob",
boolean: true,
// array: ["A", "B", "C"]; // TODO
varbinary: Buffer.from("This is varbinary"),
simpleArray: ["A", "B", "C"],
} satisfies DeepPartial<Post>
const post = new Post()
post.id = 1
post.name = "Post"
post.int = 2147483647
post.integer = 2147483647
post.tinyint = 250
post.smallint = 32767
post.bigint = "8223372036854775807"
post.decimal = "8223372036854775807"
post.dec = "8223372036854775807"
post.smalldecimal = "8223372036854775"
post.real = 10.5
post.double = 10.53
post.float = 10.53
post.char = "A"
post.nchar = "A"
post.varchar = "This is varchar"
post.nvarchar = "This is nvarchar"
post.alphanum = "This is alphanum"
post.text = "This is text"
post.shorttext = "This is shorttext"
post.dateObj = new Date()
post.date = "2017-06-21"
post.timeObj = new Date()
post.time = "13:27:05"
post.timestamp = new Date()
post.timestamp.setMilliseconds(0)
post.seconddate = new Date()
post.seconddate.setMilliseconds(0)
post.blob = Buffer.from("This is blob")
post.clob = "This is clob"
post.nclob = "This is nclob"
post.boolean = true
// post.array = ["A", "B", "C"]; // TODO
post.varbinary = Buffer.from("This is varbinary")
post.simpleArray = ["A", "B", "C"]
const postRepository = dataSource.getRepository(Post)
const post = postRepository.create(plainPost)
await postRepository.save(post)
const loadedPost = (await postRepository.findOneBy({
const loadedPost = await postRepository.findOneBy({
id: 1,
}))!
loadedPost.id.should.be.equal(post.id)
loadedPost.name.should.be.equal(post.name)
loadedPost.int.should.be.equal(post.int)
loadedPost.integer.should.be.equal(post.integer)
loadedPost.tinyint.should.be.equal(post.tinyint)
loadedPost.smallint.should.be.equal(post.smallint)
loadedPost.bigint.should.be.equal(post.bigint)
loadedPost.decimal.should.be.equal(post.decimal)
loadedPost.dec.should.be.equal(post.dec)
loadedPost.smalldecimal.should.be.equal(post.smalldecimal)
loadedPost.real.should.be.equal(post.real)
loadedPost.double.should.be.equal(post.double)
loadedPost.float.should.be.equal(post.float)
loadedPost.char.should.be.equal(post.char)
loadedPost.nchar.should.be.equal(post.nchar)
loadedPost.varchar.should.be.equal(post.varchar)
loadedPost.nvarchar.should.be.equal(post.nvarchar)
loadedPost.alphanum.should.be.equal(post.alphanum)
loadedPost.text.should.be.equal(post.text)
loadedPost.shorttext.should.be.equal(post.shorttext)
loadedPost.dateObj.should.be.equal(
DateUtils.mixedDateToDateString(post.dateObj),
)
loadedPost.date.should.be.equal(post.date)
loadedPost.timeObj
.valueOf()
.should.be.equal(DateUtils.mixedTimeToString(post.timeObj))
loadedPost.time.should.be.equal(post.time)
loadedPost.timestamp
.valueOf()
.should.be.equal(post.timestamp.valueOf())
loadedPost.seconddate
.valueOf()
.should.be.equal(post.seconddate.valueOf())
loadedPost.blob.toString().should.be.equal(post.blob.toString())
loadedPost.clob.toString().should.be.equal(post.clob.toString())
loadedPost.nclob
.toString()
.should.be.equal(post.nclob.toString())
loadedPost.boolean.should.be.equal(post.boolean)
loadedPost.varbinary
.toString()
.should.be.equal(post.varbinary.toString())
loadedPost.simpleArray[0].should.be.equal(post.simpleArray[0])
loadedPost.simpleArray[1].should.be.equal(post.simpleArray[1])
loadedPost.simpleArray[2].should.be.equal(post.simpleArray[2])
})
expect(loadedPost).to.deep.equal({
...plainPost,
})
table!.findColumnByName("id")!.type.should.be.equal("integer")
table!
.findColumnByName("name")!
.type.should.be.equal("nvarchar")
table!.findColumnByName("int")!.type.should.be.equal("integer")
table!
.findColumnByName("integer")!
.type.should.be.equal("integer")
table!
.findColumnByName("tinyint")!
.type.should.be.equal("tinyint")
table!
.findColumnByName("smallint")!
.type.should.be.equal("smallint")
table!
.findColumnByName("bigint")!
.type.should.be.equal("bigint")
table!
.findColumnByName("decimal")!
.type.should.be.equal("decimal")
table!.findColumnByName("dec")!.type.should.be.equal("decimal")
table!.findColumnByName("real")!.type.should.be.equal("real")
table!
.findColumnByName("double")!
.type.should.be.equal("double")
table!.findColumnByName("float")!.type.should.be.equal("double")
table!.findColumnByName("nchar")!.type.should.be.equal("nchar")
table!
.findColumnByName("nvarchar")!
.type.should.be.equal("nvarchar")
table!.findColumnByName("dateObj")!.type.should.be.equal("date")
table!.findColumnByName("date")!.type.should.be.equal("date")
table!.findColumnByName("timeObj")!.type.should.be.equal("time")
table!.findColumnByName("time")!.type.should.be.equal("time")
table!
.findColumnByName("timestamp")!
.type.should.be.equal("timestamp")
table!
.findColumnByName("seconddate")!
.type.should.be.equal("seconddate")
table!.findColumnByName("blob")!.type.should.be.equal("blob")
table!.findColumnByName("nclob")!.type.should.be.equal("nclob")
table!
.findColumnByName("boolean")!
.type.should.be.equal("boolean")
table!
.findColumnByName("varbinary")!
.type.should.be.equal("varbinary")
table!
.findColumnByName("simpleArray")!
.type.should.be.equal("nclob")
// Verify column metadata
const queryRunner = dataSource.createQueryRunner()
const table = (await queryRunner.getTable(
dataSource.getMetadata(Post).tableName,
))!
await queryRunner.release()
expect(table.findColumnByName("id")!.type).to.equal("integer")
expect(table.findColumnByName("name")!.type).to.equal(
"nvarchar",
)
expect(table.findColumnByName("int")!.type).to.equal("integer")
expect(table.findColumnByName("integer")!.type).to.equal(
"integer",
)
expect(table.findColumnByName("tinyint")!.type).to.equal(
"tinyint",
)
expect(table.findColumnByName("smallint")!.type).to.equal(
"smallint",
)
expect(table.findColumnByName("bigint")!.type).to.equal(
"bigint",
)
expect(table.findColumnByName("decimal")!.type).to.equal(
"decimal",
)
expect(table.findColumnByName("dec")!.type).to.equal("decimal")
expect(table.findColumnByName("real")!.type).to.equal("real")
expect(table.findColumnByName("double")!.type).to.equal(
"double",
)
expect(table.findColumnByName("float")!.type).to.equal("double")
expect(table.findColumnByName("nchar")!.type).to.equal("nchar")
expect(table.findColumnByName("nvarchar")!.type).to.equal(
"nvarchar",
)
expect(table.findColumnByName("date")!.type).to.equal("date")
expect(table.findColumnByName("time")!.type).to.equal("time")
expect(table.findColumnByName("timestamp")!.type).to.equal(
"timestamp",
)
expect(table.findColumnByName("seconddate")!.type).to.equal(
"seconddate",
)
expect(table.findColumnByName("blob")!.type).to.equal("blob")
expect(table.findColumnByName("nclob")!.type).to.equal("nclob")
expect(table.findColumnByName("boolean")!.type).to.equal(
"boolean",
)
expect(table.findColumnByName("varbinary")!.type).to.equal(
"varbinary",
)
expect(table.findColumnByName("simpleArray")!.type).to.equal(
"nclob",
)
// Deprecated column types that have a different behavior in SAP HANA Cloud
if (
DriverUtils.isReleaseVersionOrGreater(
connection.driver,
dataSource.driver,
"4.0",
)
) {
table!
.findColumnByName("char")!
.type.should.be.equal("nchar")
table!
.findColumnByName("varchar")!
.type.should.be.equal("nvarchar")
table!
.findColumnByName("alphanum")!
.type.should.be.equal("nvarchar")
table!
.findColumnByName("shorttext")!
.type.should.be.equal("nvarchar")
table!
.findColumnByName("text")!
.type.should.be.equal("nclob")
table!
.findColumnByName("clob")!
.type.should.be.equal("nclob")
expect(table.findColumnByName("char")!.type).to.equal(
"nchar",
)
expect(table.findColumnByName("varchar")!.type).to.equal(
"nvarchar",
)
expect(table.findColumnByName("alphanum")!.type).to.equal(
"nvarchar",
)
expect(table.findColumnByName("shorttext")!.type).to.equal(
"nvarchar",
)
expect(table.findColumnByName("text")!.type).to.equal(
"nclob",
)
expect(table.findColumnByName("clob")!.type).to.equal(
"nclob",
)
} else {
table!
.findColumnByName("char")!
.type.should.be.equal("char")
table!
.findColumnByName("varchar")!
.type.should.be.equal("varchar")
table!
.findColumnByName("alphanum")!
.type.should.be.equal("alphanum")
table!
.findColumnByName("shorttext")!
.type.should.be.equal("shorttext")
table!
.findColumnByName("text")!
.type.should.be.equal("text")
table!
.findColumnByName("clob")!
.type.should.be.equal("clob")
expect(table.findColumnByName("char")!.type).to.equal(
"char",
)
expect(table.findColumnByName("varchar")!.type).to.equal(
"varchar",
)
expect(table.findColumnByName("alphanum")!.type).to.equal(
"alphanum",
)
expect(table.findColumnByName("shorttext")!.type).to.equal(
"shorttext",
)
expect(table.findColumnByName("text")!.type).to.equal(
"text",
)
expect(table.findColumnByName("clob")!.type).to.equal(
"clob",
)
}
}),
))
it("all types should work correctly - persist and hydrate when options are specified on columns", () =>
Promise.all(
connections.map(async (connection) => {
const postRepository = connection.getRepository(PostWithOptions)
const queryRunner = connection.createQueryRunner()
const table = await queryRunner.getTable("post_with_options")
await queryRunner.release()
dataSources.map(async (dataSource) => {
const plainPost = {
id: 1,
dec: "60.00",
decimal: "70.000",
varchar: "This is varchar",
nvarchar: "This is nvarchar",
alphanum: "This is alphanum",
shorttext: "This is shorttext",
} satisfies DeepPartial<PostWithOptions>
const post = new PostWithOptions()
post.id = 1
post.dec = "60.00"
post.decimal = "70.000"
post.varchar = "This is varchar"
post.nvarchar = "This is nvarchar"
post.alphanum = "This is alphanum"
post.shorttext = "This is shorttext"
const postRepository = dataSource.getRepository(PostWithOptions)
const post = postRepository.create(plainPost)
await postRepository.save(post)
const loadedPost = (await postRepository.findOneBy({
const loadedPost = await postRepository.findOneBy({
id: 1,
}))!
loadedPost.id.should.be.equal(post.id)
loadedPost.dec.should.be.equal(post.dec)
loadedPost.decimal.should.be.equal(post.decimal)
loadedPost.varchar.should.be.equal(post.varchar)
loadedPost.nvarchar.should.be.equal(post.nvarchar)
loadedPost.alphanum.should.be.equal(post.alphanum)
loadedPost.shorttext.should.be.equal(post.shorttext)
})
expect(loadedPost).to.deep.equal(plainPost)
table!.findColumnByName("id")!.type.should.be.equal("integer")
table!.findColumnByName("dec")!.type.should.be.equal("decimal")
table!.findColumnByName("dec")!.precision!.should.be.equal(10)
table!.findColumnByName("dec")!.scale!.should.be.equal(2)
table!
.findColumnByName("decimal")!
.type.should.be.equal("decimal")
table!
.findColumnByName("decimal")!
.precision!.should.be.equal(10)
table!.findColumnByName("decimal")!.scale!.should.be.equal(3)
table!
.findColumnByName("nvarchar")!
.type.should.be.equal("nvarchar")
table!
.findColumnByName("nvarchar")!
.length!.should.be.equal("50")
// Verify column metadata
const queryRunner = dataSource.createQueryRunner()
const table = (await queryRunner.getTable(
dataSource.getMetadata(PostWithOptions).tableName,
))!
await queryRunner.release()
expect(table.findColumnByName("id")!.type).to.equal("integer")
expect(table.findColumnByName("dec")).to.include({
type: "decimal",
precision: 10,
scale: 2,
})
expect(table.findColumnByName("decimal")).to.include({
type: "decimal",
precision: 10,
scale: 3,
})
expect(table.findColumnByName("nvarchar")).to.include({
type: "nvarchar",
length: "50",
})
// Deprecated column types that have a different behavior in SAP HANA Cloud
if (
DriverUtils.isReleaseVersionOrGreater(
connection.driver,
dataSource.driver,
"4.0",
)
) {
table!
.findColumnByName("varchar")!
.type.should.be.equal("nvarchar")
table!
.findColumnByName("varchar")!
.length!.should.be.equal("50")
table!
.findColumnByName("alphanum")!
.type.should.be.equal("nvarchar")
table!
.findColumnByName("alphanum")!
.length!.should.be.equal("50")
table!
.findColumnByName("shorttext")!
.type.should.be.equal("nvarchar")
table!
.findColumnByName("shorttext")!
.length!.should.be.equal("50")
expect(table.findColumnByName("varchar")).to.include({
type: "nvarchar",
length: "50",
})
expect(table.findColumnByName("alphanum")).to.include({
type: "nvarchar",
length: "50",
})
expect(table.findColumnByName("shorttext")).to.include({
type: "nvarchar",
length: "50",
})
} else {
table!
.findColumnByName("varchar")!
.type.should.be.equal("varchar")
table!
.findColumnByName("varchar")!
.length!.should.be.equal("50")
table!
.findColumnByName("alphanum")!
.type.should.be.equal("alphanum")
table!
.findColumnByName("alphanum")!
.length!.should.be.equal("50")
table!
.findColumnByName("shorttext")!
.type.should.be.equal("shorttext")
table!
.findColumnByName("shorttext")!
.length!.should.be.equal("50")
expect(table.findColumnByName("varchar")).to.include({
type: "varchar",
length: "50",
})
expect(table.findColumnByName("alphanum")).to.include({
type: "alphanum",
length: "50",
})
expect(table.findColumnByName("shorttext")).to.include({
type: "shorttext",
length: "50",
})
}
}),
))
it("all types should work correctly - persist and hydrate when types are not specified on columns", () =>
Promise.all(
connections.map(async (connection) => {
const postRepository =
connection.getRepository(PostWithoutTypes)
const queryRunner = connection.createQueryRunner()
const table = await queryRunner.getTable("post_without_types")
await queryRunner.release()
dataSources.map(async (dataSource) => {
const plainPost = {
id: 1,
name: "Post",
boolean: true,
blob: Buffer.from("This is blob"),
timestamp: new Date(),
} satisfies DeepPartial<PostWithoutTypes>
const post = new PostWithoutTypes()
post.id = 1
post.name = "Post"
post.boolean = true
post.blob = Buffer.from("This is blob")
post.timestamp = new Date()
const postRepository =
dataSource.getRepository(PostWithoutTypes)
const post = postRepository.create(plainPost)
await postRepository.save(post)
const loadedPost = (await postRepository.findOneBy({
const loadedPost = await postRepository.findOneBy({
id: 1,
}))!
loadedPost.id.should.be.equal(post.id)
loadedPost.name.should.be.equal(post.name)
loadedPost.boolean.should.be.equal(post.boolean)
loadedPost.blob.toString().should.be.equal(post.blob.toString())
loadedPost.timestamp
.valueOf()
.should.be.equal(post.timestamp.valueOf())
})
expect(loadedPost).to.deep.equal(plainPost)
table!.findColumnByName("id")!.type.should.be.equal("integer")
table!
.findColumnByName("name")!
.type.should.be.equal("nvarchar")
table!
.findColumnByName("boolean")!
.type.should.be.equal("boolean")
table!.findColumnByName("blob")!.type.should.be.equal("blob")
table!
.findColumnByName("timestamp")!
.type.should.be.equal("timestamp")
// Verify column metadata
const queryRunner = dataSource.createQueryRunner()
const table = (await queryRunner.getTable(
dataSource.getMetadata(PostWithoutTypes).tableName,
))!
await queryRunner.release()
expect(table.findColumnByName("id")!.type).to.equal("integer")
expect(table.findColumnByName("name")!.type).to.equal(
"nvarchar",
)
expect(table.findColumnByName("boolean")!.type).to.equal(
"boolean",
)
expect(table.findColumnByName("blob")!.type).to.equal("blob")
expect(table.findColumnByName("timestamp")!.type).to.equal(
"timestamp",
)
}),
))
})

View File

@ -1,6 +1,4 @@
import { Entity } from "../../../../../../src"
import { PrimaryColumn } from "../../../../../../src"
import { Column } from "../../../../../../src"
import { Column, Entity, PrimaryColumn } from "../../../../../../src"
@Entity()
export class Post {
@ -76,15 +74,9 @@ export class Post {
// Date Types
// -------------------------------------------------------------------------
@Column("date")
dateObj: Date
@Column("date")
date: string
@Column("time")
timeObj: Date
@Column("time")
time: string

View File

@ -1,6 +1,4 @@
import { Entity } from "../../../../../../src"
import { PrimaryColumn } from "../../../../../../src"
import { Column } from "../../../../../../src"
import { Column, Entity, PrimaryColumn } from "../../../../../../src"
@Entity()
export class PostWithOptions {

View File

@ -1,6 +1,4 @@
import { Entity } from "../../../../../../src"
import { PrimaryColumn } from "../../../../../../src"
import { Column } from "../../../../../../src"
import { Column, Entity, PrimaryColumn } from "../../../../../../src"
@Entity()
export class PostWithoutTypes {

View File

@ -0,0 +1,27 @@
import { Column, Entity, PrimaryColumn } from "../../../../../../src"
@Entity()
export class ArrayEmbedding {
@PrimaryColumn()
id: number
@Column("nclob")
content: string
@Column("nclob")
metadata: string
@Column("real_vector", {
length: 16,
})
smallVector: number[]
@Column("real_vector", {
length: 1536,
nullable: true,
})
largeVector: number[] | null
@Column("real_vector")
variableVector: number[]
}

View File

@ -0,0 +1,16 @@
import { Column, Entity, PrimaryColumn } from "../../../../../../src"
@Entity()
export class BufferEmbedding {
@PrimaryColumn()
id: number
@Column("nclob")
content: string
@Column("nclob")
metadata: string
@Column("real_vector")
realVector: Buffer
}

View File

@ -0,0 +1,198 @@
import { expect } from "chai"
import { DataSource, DeepPartial } from "../../../../../src"
import { DriverUtils } from "../../../../../src/driver/DriverUtils"
import {
closeTestingConnections,
createTestingConnections,
} from "../../../../utils/test-utils"
import { ArrayEmbedding } from "./entity/ArrayEmbedding"
import { BufferEmbedding } from "./entity/BufferEmbedding"
describe("database-schema > vectors > sap", () => {
describe("with vector output type Array", () => {
let dataSources: DataSource[]
before(async () => {
dataSources = await createTestingConnections({
entities: [ArrayEmbedding],
enabledDrivers: ["sap"],
driverSpecific: {
extra: {
vectorOutputType: "Array",
},
synchronize: false,
},
})
})
after(() => closeTestingConnections(dataSources))
it("should work correctly - create, persist and hydrate", () =>
Promise.all(
dataSources.map(async (dataSource) => {
if (
!DriverUtils.isReleaseVersionOrGreater(
dataSource.driver,
"4.0",
)
) {
return
}
await dataSource.synchronize()
// Verify column metadata
const queryRunner = await dataSource.createQueryRunner()
const table = (await queryRunner.getTable(
dataSource.getMetadata(ArrayEmbedding).tableName,
))!
await queryRunner.release()
expect(table.findColumnByName("smallVector")).to.contain({
type: "real_vector",
length: "16",
})
expect(table.findColumnByName("largeVector")).to.contain({
type: "real_vector",
length: "1536",
isNullable: true,
})
expect(table.findColumnByName("variableVector")).to.contain(
{
type: "real_vector",
length: "",
},
)
const smallVector = [
0.004318627528846264, -0.008295782841742039,
0.011462775990366936, -0.03171011060476303,
-0.003404685528948903, 0.018827877938747406,
0.010692788287997246, 0.014154385775327682,
-0.026206370443105698, -0.03977154940366745,
-0.008630559779703617, 0.040039367973804474,
0.0019048830727115273, 0.01347813569009304,
-0.02147931419312954, -0.004211498890072107,
]
const variableVector = [
-0.0015692687593400478, -0.013364311307668686,
0.013545091263949871, 0.034843627363443375,
0.02682236023247242, -0.011710511520504951,
0.0019400346791371703, -0.003324338933452964,
0.004094745498150587, -0.01127530075609684,
-0.020943669602274895, -0.018211888149380684,
-0.00585190812125802, 0.01311657577753067,
-0.011121302843093872, 0.003078277688473463,
]
const plainEmbedding = {
id: 1,
content:
"This is a sample text to be analyzed by SAP Joule AI",
metadata: `{"client":"typeorm"}`,
smallVector,
largeVector: null,
variableVector,
} satisfies DeepPartial<ArrayEmbedding>
const embeddingRepository =
dataSource.getRepository(ArrayEmbedding)
const embedding = embeddingRepository.create(plainEmbedding)
await embeddingRepository.save(embedding)
const loadedEmbedding = await embeddingRepository.findOneBy(
{ id: 1 },
)
expect(loadedEmbedding).to.deep.equal(plainEmbedding)
}),
))
})
describe("with vector output type Buffer", () => {
let dataSources: DataSource[]
before(async () => {
dataSources = await createTestingConnections({
entities: [BufferEmbedding],
enabledDrivers: ["sap"],
driverSpecific: {
synchronize: false,
},
})
})
after(() => closeTestingConnections(dataSources))
function deserializeFvecs(buffer: Buffer) {
const dataView = new DataView(
buffer.buffer,
buffer.byteOffset,
buffer.byteLength,
)
const length = dataView.getUint32(0, true)
const array = new Array<number>(length)
for (let index = 0; index < length; index++) {
array[index] = dataView.getFloat32(4 + index * 4, true)
}
return array
}
function serializeFvecs(array: number[]) {
const length = array.length
const arrayBuffer = new ArrayBuffer(4 + length * 4)
const dataView = new DataView(arrayBuffer)
dataView.setUint32(0, length, true)
for (let index = 0; index < length; index++) {
dataView.setFloat32(4 + index * 4, array[index], true)
}
return Buffer.from(arrayBuffer)
}
it("should work correctly - persist and hydrate ", () =>
Promise.all(
dataSources.map(async (dataSource) => {
if (
!DriverUtils.isReleaseVersionOrGreater(
dataSource.driver,
"4.0",
)
) {
return
}
await dataSource.synchronize()
const plainVector = [
-0.0015692687593400478, -0.013364311307668686,
0.013545091263949871, 0.034843627363443375,
0.02682236023247242, -0.011710511520504951,
0.0019400346791371703, -0.003324338933452964,
0.004094745498150587, -0.01127530075609684,
-0.020943669602274895, -0.018211888149380684,
-0.00585190812125802, 0.01311657577753067,
-0.011121302843093872, 0.003078277688473463,
]
const plainEmbedding = {
id: 1,
content:
"This is a sample text to be analyzed by SAP Joule AI",
metadata: `{"client":"typeorm"}`,
realVector: serializeFvecs(plainVector),
} satisfies DeepPartial<BufferEmbedding>
const embeddingRepository =
dataSource.getRepository(BufferEmbedding)
const embedding = embeddingRepository.create(plainEmbedding)
await embeddingRepository.save(embedding)
const loadedEmbedding = await embeddingRepository.findOneBy(
{ id: 1 },
)
const loadedVector = deserializeFvecs(
loadedEmbedding!.realVector,
)
expect(loadedVector).to.deep.equal(plainVector)
}),
))
})
})