fix: type generation for shared libs (#2684)

This commit is contained in:
liuyi 2025-06-03 19:19:29 +08:00 committed by GitHub
parent d5a16483c5
commit d874a46647
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 206 additions and 361 deletions

View File

@ -1,18 +1,11 @@
import { spawn } from 'node:child_process'
import { createHash } from 'node:crypto'
import {
existsSync,
mkdirSync,
unlinkSync,
readFileSync,
writeFileSync,
} from 'node:fs'
import { existsSync, mkdirSync, rmSync } from 'node:fs'
import { createRequire } from 'node:module'
import { tmpdir, homedir } from 'node:os'
import { homedir } from 'node:os'
import { parse, join, resolve } from 'node:path'
import * as colors from 'colorette'
import { groupBy, split } from 'lodash-es'
import { include as setjmpInclude, lib as setjmpLib } from 'wasm-sjlj'
import { BuildOptions as RawBuildOptions } from '../def/build.js'
@ -37,6 +30,8 @@ import {
tryInstallCargoBinary,
unlinkAsync,
writeFileAsync,
dirExistsAsync,
readdirAsync,
} from '../utils/index.js'
import { createCjsBinding, createEsmBinding } from './templates/index.js'
@ -53,22 +48,14 @@ const debug = debugFactory('build')
const require = createRequire(import.meta.url)
type OutputKind = 'js' | 'dts' | 'node' | 'exe' | 'wasm'
type Output = {
kind: OutputKind
path: string
}
type Output = { kind: OutputKind; path: string }
type BuildOptions = RawBuildOptions & {
cargoOptions?: string[]
}
type BuildOptions = RawBuildOptions & { cargoOptions?: string[] }
export async function buildProject(options: BuildOptions) {
debug('napi build command receive options: %O', options)
options = {
dtsCache: true,
...options,
}
options = { dtsCache: true, ...options }
const cwd = options.cwd ?? process.cwd()
@ -92,13 +79,6 @@ export async function buildProject(options: BuildOptions) {
)
}
const depsNapiPackages = pkg.dependencies
.map((p) => metadata.packages.find((c) => c.name === p.name))
.filter(
(c): c is Crate =>
c != null && c.dependencies.some((d) => d.name == 'napi-derive'),
)
const crateDir = parse(pkg.manifest_path).dir
const builder = new Builder(
@ -121,7 +101,6 @@ export async function buildProject(options: BuildOptions) {
),
options.configPath ? resolvePath(options.configPath) : undefined,
),
depsNapiPackages,
)
return builder.build()
@ -141,7 +120,6 @@ class Builder {
private readonly outputDir: string,
private readonly targetDir: string,
private readonly config: NapiConfig,
private readonly depsNapiPackages: Crate[],
) {}
get cdyLibName() {
@ -323,10 +301,7 @@ class Builder {
const command =
process.env.CARGO ?? (this.options.useCross ? 'cross' : 'cargo')
const buildProcess = spawn(command, this.args, {
env: {
...process.env,
...this.envs,
},
env: { ...process.env, ...this.envs },
stdio: watch ? ['inherit', 'inherit', 'pipe'] : 'inherit',
cwd: this.cwd,
signal: controller.signal,
@ -342,11 +317,7 @@ class Builder {
})
buildProcess.once('error', (e) => {
reject(
new Error(`Build failed with error: ${e.message}`, {
cause: e,
}),
)
reject(new Error(`Build failed with error: ${e.message}`, { cause: e }))
})
// watch mode only, they are piped through stderr
@ -477,76 +448,8 @@ class Builder {
}
private setEnvs() {
// type definition intermediate file
this.envs.TYPE_DEF_TMP_PATH = this.getIntermediateTypeFile()
// Validate the packages and dependencies
if (existsSync(this.envs.TYPE_DEF_TMP_PATH)) {
const { depsNapiPackages, crate } = this
let shouldInvalidateSelf = false
const content = readFileSync(this.envs.TYPE_DEF_TMP_PATH, 'utf-8')
const typedefRaw = content
.split('\n')
.filter((line) => line.trim().length)
.map((line) => {
const [pkgName, ...jsonContents] = split(line, ':')
return {
pkgName,
jsonContent: jsonContents.join(':'),
}
})
const groupedTypedefRaw = groupBy(typedefRaw, ({ pkgName }) => pkgName)
const invalidPackages = new Set<string>()
for (const [pkgName, value] of Object.entries(groupedTypedefRaw)) {
const packageInvalidKey = `NAPI_PACKAGE_${pkgName.toUpperCase().replaceAll('-', '_')}_INVALID`
let done = false
for (const { jsonContent } of value) {
try {
// package_name: { "done": true } was written by napi-build in project build.rs
// if it exists, the package was successfully built and typedef was written
const json = JSON.parse(jsonContent)
if (json.done === true) {
done = true
break
}
} catch {
done = false
break
}
}
if (!done) {
process.env[packageInvalidKey] = `${Date.now()}`
shouldInvalidateSelf = true
invalidPackages.add(pkgName)
}
}
const typedefPackages = new Set(Object.keys(groupedTypedefRaw))
for (const crate of depsNapiPackages) {
if (!typedefPackages.has(crate.name)) {
debug('Set invalid package typedef: %i', crate.name)
shouldInvalidateSelf = true
process.env[
`NAPI_PACKAGE_${crate.name.toUpperCase().replaceAll('-', '_')}_INVALID`
] = `${Date.now()}`
}
}
if (shouldInvalidateSelf) {
debug('Set invalid package typedef: %i', crate.name)
process.env[
`NAPI_PACKAGE_${crate.name.toUpperCase().replaceAll('-', '_')}_INVALID`
] = `${Date.now()}`
}
// clean invalid packages typedef
if (invalidPackages.size) {
debug('Clean invalid packages typedef: %O', invalidPackages)
writeFileSync(
this.envs.TYPE_DEF_TMP_PATH,
typedefRaw
.filter(({ pkgName }) => !invalidPackages.has(pkgName))
.map(({ pkgName, jsonContent }) => `${pkgName}:${jsonContent}`)
.join('\n'),
)
}
}
// folder for intermediate type definition files
this.envs.TYPE_DEF_TMP_FOLDER = this.generateIntermediateTypeDefFolder()
// RUSTFLAGS
let rustflags =
@ -725,22 +628,25 @@ class Builder {
return this
}
private getIntermediateTypeFile() {
const dtsPath = join(
tmpdir(),
private generateIntermediateTypeDefFolder() {
let folder = join(
this.targetDir,
'napi-rs',
`${this.crate.name}-${createHash('sha256')
.update(this.crate.manifest_path)
.update(CLI_VERSION)
.digest('hex')
.substring(0, 8)}.napi_type_def`,
.substring(0, 8)}`,
)
if (!this.options.dtsCache) {
try {
unlinkSync(dtsPath)
} catch {}
return `${dtsPath}_${Date.now()}.tmp`
rmSync(folder, { recursive: true, force: true })
folder += `_${Date.now()}`
}
return `${dtsPath}.tmp`
mkdirAsync(folder, { recursive: true })
return folder
}
private async postBuild() {
@ -838,9 +744,7 @@ class Builder {
})
return wasmBinaryName ? join(this.outputDir, wasmBinaryName) : null
} catch (e) {
throw new Error('Failed to copy artifact', {
cause: e,
})
throw new Error('Failed to copy artifact', { cause: e })
}
}
@ -889,41 +793,76 @@ class Builder {
}
private async generateTypeDef() {
if (!(await fileExists(this.envs.TYPE_DEF_TMP_PATH))) {
const typeDefDir = this.envs.TYPE_DEF_TMP_FOLDER
if (!(await dirExistsAsync(typeDefDir))) {
return []
}
const dest = join(this.outputDir, this.options.dts ?? 'index.d.ts')
const { dts, exports } = await processTypeDef(
this.envs.TYPE_DEF_TMP_PATH,
this.options.constEnum ?? this.config.constEnum ?? true,
!this.options.noDtsHeader
? (this.options.dtsHeader ??
(this.config.dtsHeaderFile
? await readFileAsync(
join(this.cwd, this.config.dtsHeaderFile),
'utf-8',
).catch(() => {
debug.warn(
`Failed to read dts header file ${this.config.dtsHeaderFile}`,
)
return null
})
: null) ??
this.config.dtsHeader ??
DEFAULT_TYPE_DEF_HEADER)
: '',
)
let header = ''
let dts = ''
let exports: string[] = []
if (!this.options.noDtsHeader) {
if (this.options.dtsHeader) {
header = this.options.dtsHeader
} else if (this.config.dtsHeaderFile) {
try {
header = await readFileAsync(
join(this.cwd, this.config.dtsHeaderFile),
'utf-8',
)
} catch (e) {
debug.warn(
`Failed to read dts header file ${this.config.dtsHeaderFile}`,
e,
)
}
} else {
header = DEFAULT_TYPE_DEF_HEADER
}
}
const files = await readdirAsync(typeDefDir, { withFileTypes: true })
if (!files.length) {
debug('No type def files found. Skip generating dts file.')
return []
}
for (const file of files) {
if (!file.isFile()) {
continue
}
const { dts: fileDts, exports: fileExports } = await processTypeDef(
join(typeDefDir, file.name),
this.options.constEnum ?? this.config.constEnum ?? true,
)
dts += fileDts
exports.push(...fileExports)
}
if (dts.indexOf('ExternalObject<') > -1) {
header += `
export declare class ExternalObject<T> {
readonly '': {
readonly '': unique symbol
[K: symbol]: T
}
}
`
}
dts = header + dts
try {
debug('Writing type def to:')
debug(' %i', dest)
await writeFileAsync(dest, dts, 'utf-8')
this.outputs.push({
kind: 'dts',
path: dest,
})
this.outputs.push({ kind: 'dts', path: dest })
} catch (e) {
debug.error('Failed to write type def file')
debug.error(e as Error)
@ -956,10 +895,7 @@ class Builder {
debug('Writing js binding to:')
debug(' %i', dest)
await writeFileAsync(dest, binding, 'utf-8')
return {
kind: 'js',
path: dest,
} satisfies Output
return { kind: 'js', path: dest } satisfies Output
} catch (e) {
throw new Error('Failed to write js binding file', { cause: e })
}
@ -1029,26 +965,11 @@ class Builder {
`export * from '${this.config.packageName}-wasm32-wasi'\n`,
)
return [
{
kind: 'js',
path: bindingPath,
},
{
kind: 'js',
path: browserBindingPath,
},
{
kind: 'js',
path: workerPath,
},
{
kind: 'js',
path: browserWorkerPath,
},
{
kind: 'js',
path: browserEntryPath,
},
{ kind: 'js', path: bindingPath },
{ kind: 'js', path: browserBindingPath },
{ kind: 'js', path: workerPath },
{ kind: 'js', path: browserWorkerPath },
{ kind: 'js', path: browserEntryPath },
] satisfies Output[]
}
return []

View File

@ -66,14 +66,7 @@ Generated by [AVA](https://avajs.dev).
> Snapshot 1
`␊
export declare class ExternalObject<T> {␊
readonly '': {␊
readonly '': unique symbol␊
[K: symbol]: T␊
}␊
}␊
/**␊
`/**␊
* \`constructor\` option for \`struct\` requires all fields to be public,␊
* otherwise tag impl fn as constructor␊
* #[napi(constructor)]␊
@ -620,14 +613,7 @@ Generated by [AVA](https://avajs.dev).
> Snapshot 1
`␊
export declare class ExternalObject<T> {␊
readonly '': {␊
readonly '': unique symbol␊
[K: symbol]: T␊
}␊
}␊
/**␊
`/**␊
* \`constructor\` option for \`struct\` requires all fields to be public,␊
* otherwise tag impl fn as constructor␊
* #[napi(constructor)]␊

View File

@ -7,19 +7,11 @@ import {
stat,
readdir,
} from 'node:fs'
import { createRequire } from 'node:module'
import { promisify } from 'node:util'
import pkgJson from '../../package.json' with { type: 'json' }
import { debug } from './log.js'
const require = createRequire(import.meta.url)
// NOTE:
// import pkgJson from '@napi-rs/cli/package.json' assert { type: 'json' }
// is experimental feature now, avoid using it.
// see: https://nodejs.org/api/esm.html#import-assertions
// eslint-disable-next-line import/no-extraneous-dependencies
const pkgJson = require('@napi-rs/cli/package.json')
export const readFileAsync = promisify(readFile)
export const writeFileAsync = promisify(writeFile)
export const unlinkAsync = promisify(unlink)
@ -35,6 +27,15 @@ export async function fileExists(path: string) {
return exists
}
export async function dirExistsAsync(path: string) {
try {
const stats = await statAsync(path)
return stats.isDirectory()
} catch {
return false
}
}
export function pick<O, K extends keyof O>(o: O, ...keys: K[]): Pick<O, K> {
return keys.reduce((acc, key) => {
acc[key] = o[key]

View File

@ -1,4 +1,4 @@
import { sortBy, unionWith, isEqual } from 'lodash-es'
import { sortBy } from 'lodash-es'
import { readFileAsync } from './misc.js'
@ -85,14 +85,11 @@ function exportDeclare(ambient: boolean): string {
export async function processTypeDef(
intermediateTypeFile: string,
constEnum: boolean,
header?: string,
) {
const exports: string[] = []
const defs = await readIntermediateTypeFile(intermediateTypeFile)
const groupedDefs = preprocessTypeDef(defs)
header = header ?? ''
const dts =
sortBy(Array.from(groupedDefs), ([namespace]) => namespace)
.map(([namespace, defs]) => {
@ -130,41 +127,22 @@ export async function processTypeDef(
})
.join('\n\n') + '\n'
if (dts.indexOf('ExternalObject<') > -1) {
header += `
export declare class ExternalObject<T> {
readonly '': {
readonly '': unique symbol
[K: symbol]: T
}
}
`
}
return {
dts: header + dts,
dts,
exports,
}
}
async function readIntermediateTypeFile(file: string) {
const content = await readFileAsync(file, 'utf8')
const defs = unionWith(
content
.split('\n')
.filter(Boolean)
.map((line) => {
line = line.trim()
if (!line.startsWith('{')) {
// crateName:{ "def": "", ... }
const start = line.indexOf(':') + 1
line = line.slice(start)
}
return JSON.parse(line) as TypeDefLine
})
.filter((def) => !!def.kind),
(a, b) => isEqual(a, b),
)
const defs = content
.split('\n')
.filter(Boolean)
.map((line) => {
line = line.trim()
return JSON.parse(line) as TypeDefLine
})
// move all `struct` def to the very top
// and order the rest alphabetically.

View File

@ -103,7 +103,6 @@ fn escape_json(src: &str) -> String {
impl Display for TypeDef {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let pkg_name = std::env::var("CARGO_PKG_NAME").expect("CARGO_PKG_NAME is not set");
let js_mod = if let Some(js_mod) = &self.js_mod {
format!(", \"js_mod\": \"{}\"", js_mod)
} else {
@ -115,11 +114,9 @@ impl Display for TypeDef {
"".to_string()
};
let prefix = format!("{}:", pkg_name);
write!(
f,
r#"{}{{"kind": "{}", "name": "{}", "js_doc": "{}", "def": "{}"{}{}}}"#,
prefix,
r#"{{"kind": "{}", "name": "{}", "js_doc": "{}", "def": "{}"{}{}}}"#,
self.kind,
self.name,
escape_json(&self.js_doc),

View File

@ -23,7 +23,7 @@ type-def = ["napi-derive-backend/type-def", "ctor"]
[dependencies]
convert_case = "0.8"
ctor = { version = "0.4", features = ["dtor"], optional = true }
ctor = { version = "0.4", optional = true }
napi-derive-backend = { version = "2.0.0-beta.3", path = "../backend" }
proc-macro2 = "1"
quote = "1"

View File

@ -1,3 +1,5 @@
mod typedef;
#[cfg(feature = "noop")]
mod noop;
#[cfg(feature = "noop")]

View File

@ -1,18 +1,9 @@
#[cfg(feature = "type-def")]
use std::env;
#[cfg(feature = "type-def")]
use std::fs;
#[cfg(feature = "type-def")]
use std::io::{BufWriter, Write};
#[cfg(feature = "type-def")]
use std::sync::atomic::{AtomicBool, Ordering};
use super::typedef;
use crate::parser::{attrs::BindgenAttrs, ParseNapi};
use napi_derive_backend::{BindgenResult, TryToTokens};
#[cfg(feature = "type-def")]
use napi_derive_backend::{Napi, ToTypeDef};
use proc_macro2::TokenStream;
use quote::ToTokens;
use std::sync::atomic::{AtomicBool, Ordering};
use syn::{Attribute, Item};
/// a flag indicate whether or never at least one `napi` macro has been expanded.
@ -24,52 +15,17 @@ use syn::{Attribute, Item};
/// }
///
/// ```
#[cfg(feature = "type-def")]
static BUILT_FLAG: AtomicBool = AtomicBool::new(false);
#[cfg(feature = "type-def")]
#[ctor::dtor]
fn dtor() {
if let Ok(ref type_def_file) = env::var("TYPE_DEF_TMP_PATH") {
let package_name = std::env::var("CARGO_PKG_NAME").expect("CARGO_PKG_NAME is not set");
if let Ok(f) = fs::OpenOptions::new()
.read(true)
.append(true)
.open(type_def_file)
{
let mut writer = BufWriter::<fs::File>::new(f);
if let Err(err) = writer
.write_all(format!("{package_name}:{{\"done\": true}}\n").as_bytes())
.and_then(|_| writer.flush())
{
eprintln!(
"Failed to write type def file for `{package_name}`: {:?}",
err
);
}
}
}
}
pub fn expand(attr: TokenStream, input: TokenStream) -> BindgenResult<TokenStream> {
#[cfg(feature = "type-def")]
if BUILT_FLAG
.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed)
.is_ok()
// logic on first macro expansion
if let Ok(built) = BUILT_FLAG.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed)
{
// logic on first macro expansion
prepare_type_def_file();
if let Ok(wasi_register_file) = env::var("WASI_REGISTER_TMP_PATH") {
if let Err(_e) = remove_existed_def_file(&wasi_register_file) {
#[cfg(debug_assertions)]
{
println!("Failed to manipulate wasi register file: {:?}", _e);
}
}
if !built {
typedef::prepare_type_def_file();
}
}
let mut item = syn::parse2::<Item>(input)?;
let opts: BindgenAttrs = syn::parse2(attr)?;
let mut tokens = proc_macro2::TokenStream::new();
@ -110,11 +66,7 @@ pub fn expand(attr: TokenStream, input: TokenStream) -> BindgenResult<TokenStrea
let napi = item.parse_napi(&mut tokens, &item_opts)?;
item_opts.check_used()?;
napi.try_to_tokens(&mut tokens)?;
#[cfg(feature = "type-def")]
{
output_type_def(&napi);
}
typedef::output_type_def(&napi);
} else {
item.to_tokens(&mut tokens);
};
@ -136,36 +88,11 @@ pub fn expand(attr: TokenStream, input: TokenStream) -> BindgenResult<TokenStrea
let napi = item.parse_napi(&mut tokens, &opts)?;
opts.check_used()?;
napi.try_to_tokens(&mut tokens)?;
#[cfg(feature = "type-def")]
{
output_type_def(&napi);
}
typedef::output_type_def(&napi);
Ok(tokens)
}
}
#[cfg(feature = "type-def")]
fn output_type_def(napi: &Napi) {
if let Ok(type_def_file) = env::var("TYPE_DEF_TMP_PATH") {
if let Some(type_def) = napi.to_type_def() {
fs::OpenOptions::new()
.append(true)
.create(true)
.open(type_def_file)
.and_then(|file| {
let mut writer = BufWriter::<fs::File>::new(file);
writer.write_all(type_def.to_string().as_bytes())?;
writer.write_all("\n".as_bytes())?;
writer.flush()
})
.unwrap_or_else(|e| {
println!("Failed to write type def file: {:?}", e);
});
}
}
}
fn replace_napi_attr_in_mod(
js_namespace: String,
attrs: &mut Vec<syn::Attribute>,
@ -198,43 +125,3 @@ fn replace_napi_attr_in_mod(
None
}
}
#[cfg(feature = "type-def")]
fn prepare_type_def_file() {
if let Ok(ref type_def_file) = env::var("TYPE_DEF_TMP_PATH") {
if let Err(_e) = remove_existed_def_file(type_def_file) {
#[cfg(debug_assertions)]
{
println!("Failed to manipulate type def file: {:?}", _e);
}
}
}
}
#[cfg(feature = "type-def")]
fn remove_existed_def_file(def_file: &str) -> std::io::Result<()> {
use std::io::{BufRead, BufReader};
let pkg_name = std::env::var("CARGO_PKG_NAME").expect("CARGO_PKG_NAME is not set");
if let Ok(content) = std::fs::File::open(def_file) {
let reader = BufReader::new(content);
let cleaned_content = reader
.lines()
.filter_map(|line| {
if let Ok(line) = line {
if let Some((package_name, _)) = line.split_once(':') {
if pkg_name == package_name {
return None;
}
}
Some(line)
} else {
None
}
})
.collect::<Vec<String>>()
.join("\n");
std::fs::write(def_file, format!("{cleaned_content}\n"))?;
}
Ok(())
}

View File

@ -0,0 +1,9 @@
#[cfg(feature = "type-def")]
pub mod typedef;
#[cfg(feature = "type-def")]
pub use self::typedef::*;
#[cfg(not(feature = "type-def"))]
pub mod noop;
#[cfg(not(feature = "type-def"))]
pub use self::noop::*;

View File

@ -0,0 +1,4 @@
use napi_derive_backend::Napi;
pub fn prepare_type_def_file() {}
pub fn output_type_def(_napi: &Napi) {}

View File

@ -0,0 +1,62 @@
use std::cell::LazyCell;
use std::env;
use std::env::VarError;
use std::fs;
use std::io::{BufWriter, Write};
use std::path::PathBuf;
use napi_derive_backend::{Napi, ToTypeDef};
const PKG_NAME: LazyCell<String> =
LazyCell::new(|| env::var("CARGO_PKG_NAME").expect("Expected `CARGO_PKG_NAME` to be set"));
const TYPE_DEF_FOLDER: LazyCell<Result<String, VarError>> =
LazyCell::new(|| env::var("TYPE_DEF_TMP_FOLDER"));
fn get_type_def_file() -> Option<PathBuf> {
if let Ok(folder) = TYPE_DEF_FOLDER.as_deref() {
let file = PathBuf::from(folder).join(&*PKG_NAME);
Some(file)
} else {
if let Ok(_) = env::var("TYPE_DEF_TMP_PATH") {
panic!("Expected `TYPE_DEF_TMP_FOLDER` to be set. It may caused by an older version of '@napi-rs/cli' used. Please upgrade to the latest version.");
}
None
}
}
pub fn prepare_type_def_file() {
remove_existed_def_file();
}
fn remove_existed_def_file() {
if let Some(file) = get_type_def_file() {
if file.exists() {
if let Err(_e) = fs::remove_file(&file) {
#[cfg(debug_assertions)]
{
println!("Failed to manipulate type def file {:?}: {:?}", file, _e);
}
}
}
}
}
pub fn output_type_def(napi: &Napi) {
if let Some(file) = get_type_def_file() {
if let Some(type_def) = napi.to_type_def() {
fs::OpenOptions::new()
.append(true)
.create(true)
.open(&file)
.and_then(|file| {
let mut writer = BufWriter::<fs::File>::new(file);
writer.write_all(type_def.to_string().as_bytes())?;
writer.write_all("\n".as_bytes())?;
writer.flush()
})
.unwrap_or_else(|e| {
println!("Failed to write type def file: {:?}", e);
});
}
}
}

View File

@ -824,10 +824,6 @@ Generated by [AVA](https://avajs.dev).
export declare function setSymbolInObj(symbol: symbol): object␊
export interface Shared {␊
value: number␊
}␊
export declare function shorterEscapableScope(createString: () => string | null): string␊
export declare function shorterScope(arr: unknown[]): Array<number>
@ -1022,4 +1018,7 @@ Generated by [AVA](https://avajs.dev).
export function xxh128(input: Buffer): bigint␊
export function xxh3_64(input: Buffer): bigint␊
}␊
export interface Shared {␊
value: number␊
}␊
`

View File

@ -786,10 +786,6 @@ export declare function setNullByteProperty(obj: object): void
export declare function setSymbolInObj(symbol: symbol): object
export interface Shared {
value: number
}
export declare function shorterEscapableScope(createString: () => string | null): string
export declare function shorterScope(arr: unknown[]): Array<number>
@ -984,3 +980,6 @@ export declare namespace xxh3 {
export function xxh128(input: Buffer): bigint
export function xxh3_64(input: Buffer): bigint
}
export interface Shared {
value: number
}