mirror of
https://github.com/serverless/serverless.git
synced 2026-01-18 14:58:43 +00:00
docs: add scripts package.json
This commit is contained in:
parent
60142a2d10
commit
58d00d03d7
2
.github/workflows/sync-docs.yml
vendored
2
.github/workflows/sync-docs.yml
vendored
@ -5,6 +5,8 @@ name: Sync Docs
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'docs/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docs/**'
|
||||
|
||||
@ -96,7 +96,6 @@
|
||||
"@serverless/test": "^11.1.1",
|
||||
"@serverlessinc/standards": "*",
|
||||
"adm-zip": "^0.5.10",
|
||||
"algoliasearch": "^4.23.3",
|
||||
"aws4": "^1.12.0",
|
||||
"chai": "^4.3.7",
|
||||
"chai-as-promised": "^7.1.1",
|
||||
@ -104,13 +103,11 @@
|
||||
"eslint": "^8.57.0",
|
||||
"git-list-updated": "^1.2.1",
|
||||
"github-release-from-cc-changelog": "^2.3.0",
|
||||
"gray-matter": "^4.0.3",
|
||||
"husky": "^4.3.8",
|
||||
"jszip": "^3.10.1",
|
||||
"lint-staged": "^13.2.2",
|
||||
"log": "^6.3.1",
|
||||
"log-node": "^8.0.3",
|
||||
"marked": "^13.0.0",
|
||||
"mocha": "^9.2.2",
|
||||
"mock-require": "^3.0.3",
|
||||
"ncjsm": "^4.3.2",
|
||||
@ -120,7 +117,6 @@
|
||||
"sinon": "^13.0.2",
|
||||
"sinon-chai": "^3.7.0",
|
||||
"standard-version": "^9.5.0",
|
||||
"striptags": "^3.2.0",
|
||||
"tsx": "^4.15.6",
|
||||
"xml2js": "^0.4.23"
|
||||
},
|
||||
|
||||
1
scripts/node_modules/.bin/esparse
generated
vendored
Symbolic link
1
scripts/node_modules/.bin/esparse
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../esprima/bin/esparse.js
|
||||
1
scripts/node_modules/.bin/esvalidate
generated
vendored
Symbolic link
1
scripts/node_modules/.bin/esvalidate
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../esprima/bin/esvalidate.js
|
||||
1
scripts/node_modules/.bin/js-yaml
generated
vendored
Symbolic link
1
scripts/node_modules/.bin/js-yaml
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../js-yaml/bin/js-yaml.js
|
||||
1
scripts/node_modules/.bin/marked
generated
vendored
Symbolic link
1
scripts/node_modules/.bin/marked
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../marked/bin/marked.js
|
||||
305
scripts/node_modules/.package-lock.json
generated
vendored
Normal file
305
scripts/node_modules/.package-lock.json
generated
vendored
Normal file
@ -0,0 +1,305 @@
|
||||
{
|
||||
"name": "scripts",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/@algolia/cache-browser-local-storage": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.24.0.tgz",
|
||||
"integrity": "sha512-t63W9BnoXVrGy9iYHBgObNXqYXM3tYXCjDSHeNwnsc324r4o5UiVKUiAB4THQ5z9U5hTj6qUvwg/Ez43ZD85ww==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@algolia/cache-common": "4.24.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@algolia/cache-common": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.24.0.tgz",
|
||||
"integrity": "sha512-emi+v+DmVLpMGhp0V9q9h5CdkURsNmFC+cOS6uK9ndeJm9J4TiqSvPYVu+THUP8P/S08rxf5x2P+p3CfID0Y4g==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@algolia/cache-in-memory": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.24.0.tgz",
|
||||
"integrity": "sha512-gDrt2so19jW26jY3/MkFg5mEypFIPbPoXsQGQWAi6TrCPsNOSEYepBMPlucqWigsmEy/prp5ug2jy/N3PVG/8w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@algolia/cache-common": "4.24.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@algolia/client-account": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.24.0.tgz",
|
||||
"integrity": "sha512-adcvyJ3KjPZFDybxlqnf+5KgxJtBjwTPTeyG2aOyoJvx0Y8dUQAEOEVOJ/GBxX0WWNbmaSrhDURMhc+QeevDsA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@algolia/client-common": "4.24.0",
|
||||
"@algolia/client-search": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@algolia/client-analytics": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.24.0.tgz",
|
||||
"integrity": "sha512-y8jOZt1OjwWU4N2qr8G4AxXAzaa8DBvyHTWlHzX/7Me1LX8OayfgHexqrsL4vSBcoMmVw2XnVW9MhL+Y2ZDJXg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@algolia/client-common": "4.24.0",
|
||||
"@algolia/client-search": "4.24.0",
|
||||
"@algolia/requester-common": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@algolia/client-common": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.24.0.tgz",
|
||||
"integrity": "sha512-bc2ROsNL6w6rqpl5jj/UywlIYC21TwSSoFHKl01lYirGMW+9Eek6r02Tocg4gZ8HAw3iBvu6XQiM3BEbmEMoiA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@algolia/requester-common": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@algolia/client-personalization": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.24.0.tgz",
|
||||
"integrity": "sha512-l5FRFm/yngztweU0HdUzz1rC4yoWCFo3IF+dVIVTfEPg906eZg5BOd1k0K6rZx5JzyyoP4LdmOikfkfGsKVE9w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@algolia/client-common": "4.24.0",
|
||||
"@algolia/requester-common": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@algolia/client-search": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.24.0.tgz",
|
||||
"integrity": "sha512-uRW6EpNapmLAD0mW47OXqTP8eiIx5F6qN9/x/7HHO6owL3N1IXqydGwW5nhDFBrV+ldouro2W1VX3XlcUXEFCA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@algolia/client-common": "4.24.0",
|
||||
"@algolia/requester-common": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@algolia/logger-common": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.24.0.tgz",
|
||||
"integrity": "sha512-LLUNjkahj9KtKYrQhFKCzMx0BY3RnNP4FEtO+sBybCjJ73E8jNdaKJ/Dd8A/VA4imVHP5tADZ8pn5B8Ga/wTMA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@algolia/logger-console": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.24.0.tgz",
|
||||
"integrity": "sha512-X4C8IoHgHfiUROfoRCV+lzSy+LHMgkoEEU1BbKcsfnV0i0S20zyy0NLww9dwVHUWNfPPxdMU+/wKmLGYf96yTg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@algolia/logger-common": "4.24.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@algolia/recommend": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-4.24.0.tgz",
|
||||
"integrity": "sha512-P9kcgerfVBpfYHDfVZDvvdJv0lEoCvzNlOy2nykyt5bK8TyieYyiD0lguIJdRZZYGre03WIAFf14pgE+V+IBlw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@algolia/cache-browser-local-storage": "4.24.0",
|
||||
"@algolia/cache-common": "4.24.0",
|
||||
"@algolia/cache-in-memory": "4.24.0",
|
||||
"@algolia/client-common": "4.24.0",
|
||||
"@algolia/client-search": "4.24.0",
|
||||
"@algolia/logger-common": "4.24.0",
|
||||
"@algolia/logger-console": "4.24.0",
|
||||
"@algolia/requester-browser-xhr": "4.24.0",
|
||||
"@algolia/requester-common": "4.24.0",
|
||||
"@algolia/requester-node-http": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@algolia/requester-browser-xhr": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.24.0.tgz",
|
||||
"integrity": "sha512-Z2NxZMb6+nVXSjF13YpjYTdvV3032YTBSGm2vnYvYPA6mMxzM3v5rsCiSspndn9rzIW4Qp1lPHBvuoKJV6jnAA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@algolia/requester-common": "4.24.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@algolia/requester-common": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.24.0.tgz",
|
||||
"integrity": "sha512-k3CXJ2OVnvgE3HMwcojpvY6d9kgKMPRxs/kVohrwF5WMr2fnqojnycZkxPoEg+bXm8fi5BBfFmOqgYztRtHsQA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@algolia/requester-node-http": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.24.0.tgz",
|
||||
"integrity": "sha512-JF18yTjNOVYvU/L3UosRcvbPMGT9B+/GQWNWnenIImglzNVGpyzChkXLnrSf6uxwVNO6ESGu6oN8MqcGQcjQJw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@algolia/requester-common": "4.24.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@algolia/transporter": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.24.0.tgz",
|
||||
"integrity": "sha512-86nI7w6NzWxd1Zp9q3413dRshDqAzSbsQjhcDhPIatEFiZrL1/TjnHL8S7jVKFePlIMzDsZWXAXwXzcok9c5oA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@algolia/cache-common": "4.24.0",
|
||||
"@algolia/logger-common": "4.24.0",
|
||||
"@algolia/requester-common": "4.24.0"
|
||||
}
|
||||
},
|
||||
"node_modules/algoliasearch": {
|
||||
"version": "4.24.0",
|
||||
"resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.24.0.tgz",
|
||||
"integrity": "sha512-bf0QV/9jVejssFBmz2HQLxUadxk574t4iwjCKp5E7NBzwKkrDEhKPISIIjAU/p6K5qDx3qoeh4+26zWN1jmw3g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@algolia/cache-browser-local-storage": "4.24.0",
|
||||
"@algolia/cache-common": "4.24.0",
|
||||
"@algolia/cache-in-memory": "4.24.0",
|
||||
"@algolia/client-account": "4.24.0",
|
||||
"@algolia/client-analytics": "4.24.0",
|
||||
"@algolia/client-common": "4.24.0",
|
||||
"@algolia/client-personalization": "4.24.0",
|
||||
"@algolia/client-search": "4.24.0",
|
||||
"@algolia/logger-common": "4.24.0",
|
||||
"@algolia/logger-console": "4.24.0",
|
||||
"@algolia/recommend": "4.24.0",
|
||||
"@algolia/requester-browser-xhr": "4.24.0",
|
||||
"@algolia/requester-common": "4.24.0",
|
||||
"@algolia/requester-node-http": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
},
|
||||
"node_modules/argparse": {
|
||||
"version": "1.0.10",
|
||||
"resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
|
||||
"integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"sprintf-js": "~1.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/esprima": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
|
||||
"integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"esparse": "bin/esparse.js",
|
||||
"esvalidate": "bin/esvalidate.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/extend-shallow": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz",
|
||||
"integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"is-extendable": "^0.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/gray-matter": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz",
|
||||
"integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"js-yaml": "^3.13.1",
|
||||
"kind-of": "^6.0.2",
|
||||
"section-matter": "^1.0.0",
|
||||
"strip-bom-string": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-extendable": {
|
||||
"version": "0.1.1",
|
||||
"resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz",
|
||||
"integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/js-yaml": {
|
||||
"version": "3.14.1",
|
||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
|
||||
"integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"argparse": "^1.0.7",
|
||||
"esprima": "^4.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"js-yaml": "bin/js-yaml.js"
|
||||
}
|
||||
},
|
||||
"node_modules/kind-of": {
|
||||
"version": "6.0.3",
|
||||
"resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz",
|
||||
"integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/marked": {
|
||||
"version": "13.0.1",
|
||||
"resolved": "https://registry.npmjs.org/marked/-/marked-13.0.1.tgz",
|
||||
"integrity": "sha512-7kBohS6GrZKvCsNXZyVVXSW7/hGBHe49ng99YPkDCckSUrrG7MSFLCexsRxptzOmyW2eT5dySh4Md1V6my52fA==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"marked": "bin/marked.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/section-matter": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz",
|
||||
"integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"extend-shallow": "^2.0.1",
|
||||
"kind-of": "^6.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/sprintf-js": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
|
||||
"integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/strip-bom-string": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz",
|
||||
"integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/striptags": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/striptags/-/striptags-3.2.0.tgz",
|
||||
"integrity": "sha512-g45ZOGzHDMe2bdYMdIvdAfCQkCTDMGBazSw1ypMowwGIee7ZQ5dU0rBJ8Jqgl+jAKIv4dbeE1jscZq9wid1Tkw==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
}
|
||||
83
scripts/node_modules/@algolia/cache-browser-local-storage/dist/cache-browser-local-storage.cjs.js
generated
vendored
Normal file
83
scripts/node_modules/@algolia/cache-browser-local-storage/dist/cache-browser-local-storage.cjs.js
generated
vendored
Normal file
@ -0,0 +1,83 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
function createBrowserLocalStorageCache(options) {
|
||||
const namespaceKey = `algoliasearch-client-js-${options.key}`;
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let storage;
|
||||
const getStorage = () => {
|
||||
if (storage === undefined) {
|
||||
storage = options.localStorage || window.localStorage;
|
||||
}
|
||||
return storage;
|
||||
};
|
||||
const getNamespace = () => {
|
||||
return JSON.parse(getStorage().getItem(namespaceKey) || '{}');
|
||||
};
|
||||
const setNamespace = (namespace) => {
|
||||
getStorage().setItem(namespaceKey, JSON.stringify(namespace));
|
||||
};
|
||||
const removeOutdatedCacheItems = () => {
|
||||
const timeToLive = options.timeToLive ? options.timeToLive * 1000 : null;
|
||||
const namespace = getNamespace();
|
||||
const filteredNamespaceWithoutOldFormattedCacheItems = Object.fromEntries(Object.entries(namespace).filter(([, cacheItem]) => {
|
||||
return cacheItem.timestamp !== undefined;
|
||||
}));
|
||||
setNamespace(filteredNamespaceWithoutOldFormattedCacheItems);
|
||||
if (!timeToLive)
|
||||
return;
|
||||
const filteredNamespaceWithoutExpiredItems = Object.fromEntries(Object.entries(filteredNamespaceWithoutOldFormattedCacheItems).filter(([, cacheItem]) => {
|
||||
const currentTimestamp = new Date().getTime();
|
||||
const isExpired = cacheItem.timestamp + timeToLive < currentTimestamp;
|
||||
return !isExpired;
|
||||
}));
|
||||
setNamespace(filteredNamespaceWithoutExpiredItems);
|
||||
};
|
||||
return {
|
||||
get(key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
removeOutdatedCacheItems();
|
||||
const keyAsString = JSON.stringify(key);
|
||||
return getNamespace()[keyAsString];
|
||||
})
|
||||
.then(value => {
|
||||
return Promise.all([value ? value.value : defaultValue(), value !== undefined]);
|
||||
})
|
||||
.then(([value, exists]) => {
|
||||
return Promise.all([value, exists || events.miss(value)]);
|
||||
})
|
||||
.then(([value]) => value);
|
||||
},
|
||||
set(key, value) {
|
||||
return Promise.resolve().then(() => {
|
||||
const namespace = getNamespace();
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
namespace[JSON.stringify(key)] = {
|
||||
timestamp: new Date().getTime(),
|
||||
value,
|
||||
};
|
||||
getStorage().setItem(namespaceKey, JSON.stringify(namespace));
|
||||
return value;
|
||||
});
|
||||
},
|
||||
delete(key) {
|
||||
return Promise.resolve().then(() => {
|
||||
const namespace = getNamespace();
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
delete namespace[JSON.stringify(key)];
|
||||
getStorage().setItem(namespaceKey, JSON.stringify(namespace));
|
||||
});
|
||||
},
|
||||
clear() {
|
||||
return Promise.resolve().then(() => {
|
||||
getStorage().removeItem(namespaceKey);
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
exports.createBrowserLocalStorageCache = createBrowserLocalStorageCache;
|
||||
31
scripts/node_modules/@algolia/cache-browser-local-storage/dist/cache-browser-local-storage.d.ts
generated
vendored
Normal file
31
scripts/node_modules/@algolia/cache-browser-local-storage/dist/cache-browser-local-storage.d.ts
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
import { Cache as Cache_2 } from '@algolia/cache-common';
|
||||
|
||||
export declare type BrowserLocalStorageCacheItem = {
|
||||
/**
|
||||
* The cache item creation timestamp.
|
||||
*/
|
||||
readonly timestamp: number;
|
||||
/**
|
||||
* The cache item value
|
||||
*/
|
||||
readonly value: any;
|
||||
};
|
||||
|
||||
export declare type BrowserLocalStorageOptions = {
|
||||
/**
|
||||
* The cache key.
|
||||
*/
|
||||
readonly key: string;
|
||||
/**
|
||||
* The time to live for each cached item in seconds.
|
||||
*/
|
||||
readonly timeToLive?: number;
|
||||
/**
|
||||
* The native local storage implementation.
|
||||
*/
|
||||
readonly localStorage?: Storage;
|
||||
};
|
||||
|
||||
export declare function createBrowserLocalStorageCache(options: BrowserLocalStorageOptions): Cache_2;
|
||||
|
||||
export { }
|
||||
79
scripts/node_modules/@algolia/cache-browser-local-storage/dist/cache-browser-local-storage.esm.js
generated
vendored
Normal file
79
scripts/node_modules/@algolia/cache-browser-local-storage/dist/cache-browser-local-storage.esm.js
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
function createBrowserLocalStorageCache(options) {
|
||||
const namespaceKey = `algoliasearch-client-js-${options.key}`;
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let storage;
|
||||
const getStorage = () => {
|
||||
if (storage === undefined) {
|
||||
storage = options.localStorage || window.localStorage;
|
||||
}
|
||||
return storage;
|
||||
};
|
||||
const getNamespace = () => {
|
||||
return JSON.parse(getStorage().getItem(namespaceKey) || '{}');
|
||||
};
|
||||
const setNamespace = (namespace) => {
|
||||
getStorage().setItem(namespaceKey, JSON.stringify(namespace));
|
||||
};
|
||||
const removeOutdatedCacheItems = () => {
|
||||
const timeToLive = options.timeToLive ? options.timeToLive * 1000 : null;
|
||||
const namespace = getNamespace();
|
||||
const filteredNamespaceWithoutOldFormattedCacheItems = Object.fromEntries(Object.entries(namespace).filter(([, cacheItem]) => {
|
||||
return cacheItem.timestamp !== undefined;
|
||||
}));
|
||||
setNamespace(filteredNamespaceWithoutOldFormattedCacheItems);
|
||||
if (!timeToLive)
|
||||
return;
|
||||
const filteredNamespaceWithoutExpiredItems = Object.fromEntries(Object.entries(filteredNamespaceWithoutOldFormattedCacheItems).filter(([, cacheItem]) => {
|
||||
const currentTimestamp = new Date().getTime();
|
||||
const isExpired = cacheItem.timestamp + timeToLive < currentTimestamp;
|
||||
return !isExpired;
|
||||
}));
|
||||
setNamespace(filteredNamespaceWithoutExpiredItems);
|
||||
};
|
||||
return {
|
||||
get(key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
removeOutdatedCacheItems();
|
||||
const keyAsString = JSON.stringify(key);
|
||||
return getNamespace()[keyAsString];
|
||||
})
|
||||
.then(value => {
|
||||
return Promise.all([value ? value.value : defaultValue(), value !== undefined]);
|
||||
})
|
||||
.then(([value, exists]) => {
|
||||
return Promise.all([value, exists || events.miss(value)]);
|
||||
})
|
||||
.then(([value]) => value);
|
||||
},
|
||||
set(key, value) {
|
||||
return Promise.resolve().then(() => {
|
||||
const namespace = getNamespace();
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
namespace[JSON.stringify(key)] = {
|
||||
timestamp: new Date().getTime(),
|
||||
value,
|
||||
};
|
||||
getStorage().setItem(namespaceKey, JSON.stringify(namespace));
|
||||
return value;
|
||||
});
|
||||
},
|
||||
delete(key) {
|
||||
return Promise.resolve().then(() => {
|
||||
const namespace = getNamespace();
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
delete namespace[JSON.stringify(key)];
|
||||
getStorage().setItem(namespaceKey, JSON.stringify(namespace));
|
||||
});
|
||||
},
|
||||
clear() {
|
||||
return Promise.resolve().then(() => {
|
||||
getStorage().removeItem(namespaceKey);
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export { createBrowserLocalStorageCache };
|
||||
2
scripts/node_modules/@algolia/cache-browser-local-storage/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/cache-browser-local-storage/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/cache-browser-local-storage.cjs.js');
|
||||
22
scripts/node_modules/@algolia/cache-browser-local-storage/package.json
generated
vendored
Normal file
22
scripts/node_modules/@algolia/cache-browser-local-storage/package.json
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "@algolia/cache-browser-local-storage",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"description": "Promise-based cache library for browser using local storage.",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-javascript.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/cache-browser-local-storage.esm.js",
|
||||
"types": "dist/cache-browser-local-storage.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@algolia/cache-common": "4.24.0"
|
||||
}
|
||||
}
|
||||
61
scripts/node_modules/@algolia/cache-common/dist/cache-common.cjs.js
generated
vendored
Normal file
61
scripts/node_modules/@algolia/cache-common/dist/cache-common.cjs.js
generated
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
// @todo Add logger on options to debug when caches go wrong.
|
||||
function createFallbackableCache(options) {
|
||||
const caches = [...options.caches];
|
||||
const current = caches.shift(); // eslint-disable-line functional/immutable-data
|
||||
if (current === undefined) {
|
||||
return createNullCache();
|
||||
}
|
||||
return {
|
||||
get(key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
return current.get(key, defaultValue, events).catch(() => {
|
||||
return createFallbackableCache({ caches }).get(key, defaultValue, events);
|
||||
});
|
||||
},
|
||||
set(key, value) {
|
||||
return current.set(key, value).catch(() => {
|
||||
return createFallbackableCache({ caches }).set(key, value);
|
||||
});
|
||||
},
|
||||
delete(key) {
|
||||
return current.delete(key).catch(() => {
|
||||
return createFallbackableCache({ caches }).delete(key);
|
||||
});
|
||||
},
|
||||
clear() {
|
||||
return current.clear().catch(() => {
|
||||
return createFallbackableCache({ caches }).clear();
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createNullCache() {
|
||||
return {
|
||||
get(_key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
const value = defaultValue();
|
||||
return value
|
||||
.then(result => Promise.all([result, events.miss(result)]))
|
||||
.then(([result]) => result);
|
||||
},
|
||||
set(_key, value) {
|
||||
return Promise.resolve(value);
|
||||
},
|
||||
delete(_key) {
|
||||
return Promise.resolve();
|
||||
},
|
||||
clear() {
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
exports.createFallbackableCache = createFallbackableCache;
|
||||
exports.createNullCache = createNullCache;
|
||||
41
scripts/node_modules/@algolia/cache-common/dist/cache-common.d.ts
generated
vendored
Normal file
41
scripts/node_modules/@algolia/cache-common/dist/cache-common.d.ts
generated
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
import { Cache as Cache_3 } from '@algolia/cache-common';
|
||||
|
||||
declare type Cache_2 = {
|
||||
/**
|
||||
* Gets the value of the given `key`.
|
||||
*/
|
||||
readonly get: <TValue>(key: object | string, defaultValue: () => Readonly<Promise<TValue>>, events?: CacheEvents<TValue>) => Readonly<Promise<TValue>>;
|
||||
/**
|
||||
* Sets the given value with the given `key`.
|
||||
*/
|
||||
readonly set: <TValue>(key: object | string, value: TValue) => Readonly<Promise<TValue>>;
|
||||
/**
|
||||
* Deletes the given `key`.
|
||||
*/
|
||||
readonly delete: (key: object | string) => Readonly<Promise<void>>;
|
||||
/**
|
||||
* Clears the cache.
|
||||
*/
|
||||
readonly clear: () => Readonly<Promise<void>>;
|
||||
};
|
||||
export { Cache_2 as Cache }
|
||||
|
||||
export declare type CacheEvents<TValue> = {
|
||||
/**
|
||||
* The callback when the given `key` is missing from the cache.
|
||||
*/
|
||||
readonly miss: (value: TValue) => Readonly<Promise<any>>;
|
||||
};
|
||||
|
||||
export declare function createFallbackableCache(options: FallbackableCacheOptions): Cache_2;
|
||||
|
||||
export declare function createNullCache(): Cache_2;
|
||||
|
||||
export declare type FallbackableCacheOptions = {
|
||||
/**
|
||||
* List of caches order by priority.
|
||||
*/
|
||||
readonly caches: readonly Cache_3[];
|
||||
};
|
||||
|
||||
export { }
|
||||
56
scripts/node_modules/@algolia/cache-common/dist/cache-common.esm.js
generated
vendored
Normal file
56
scripts/node_modules/@algolia/cache-common/dist/cache-common.esm.js
generated
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
// @todo Add logger on options to debug when caches go wrong.
|
||||
function createFallbackableCache(options) {
|
||||
const caches = [...options.caches];
|
||||
const current = caches.shift(); // eslint-disable-line functional/immutable-data
|
||||
if (current === undefined) {
|
||||
return createNullCache();
|
||||
}
|
||||
return {
|
||||
get(key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
return current.get(key, defaultValue, events).catch(() => {
|
||||
return createFallbackableCache({ caches }).get(key, defaultValue, events);
|
||||
});
|
||||
},
|
||||
set(key, value) {
|
||||
return current.set(key, value).catch(() => {
|
||||
return createFallbackableCache({ caches }).set(key, value);
|
||||
});
|
||||
},
|
||||
delete(key) {
|
||||
return current.delete(key).catch(() => {
|
||||
return createFallbackableCache({ caches }).delete(key);
|
||||
});
|
||||
},
|
||||
clear() {
|
||||
return current.clear().catch(() => {
|
||||
return createFallbackableCache({ caches }).clear();
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createNullCache() {
|
||||
return {
|
||||
get(_key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
const value = defaultValue();
|
||||
return value
|
||||
.then(result => Promise.all([result, events.miss(result)]))
|
||||
.then(([result]) => result);
|
||||
},
|
||||
set(_key, value) {
|
||||
return Promise.resolve(value);
|
||||
},
|
||||
delete(_key) {
|
||||
return Promise.resolve();
|
||||
},
|
||||
clear() {
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export { createFallbackableCache, createNullCache };
|
||||
2
scripts/node_modules/@algolia/cache-common/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/cache-common/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/cache-common.cjs.js');
|
||||
19
scripts/node_modules/@algolia/cache-common/package.json
generated
vendored
Normal file
19
scripts/node_modules/@algolia/cache-common/package.json
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "@algolia/cache-common",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"description": "Common interfaces for promise-based caching libraries",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-js.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/cache-common.esm.js",
|
||||
"types": "dist/cache-common.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
]
|
||||
}
|
||||
37
scripts/node_modules/@algolia/cache-in-memory/dist/cache-in-memory.cjs.js
generated
vendored
Normal file
37
scripts/node_modules/@algolia/cache-in-memory/dist/cache-in-memory.cjs.js
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
function createInMemoryCache(options = { serializable: true }) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let cache = {};
|
||||
return {
|
||||
get(key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
const keyAsString = JSON.stringify(key);
|
||||
if (keyAsString in cache) {
|
||||
return Promise.resolve(options.serializable ? JSON.parse(cache[keyAsString]) : cache[keyAsString]);
|
||||
}
|
||||
const promise = defaultValue();
|
||||
const miss = (events && events.miss) || (() => Promise.resolve());
|
||||
return promise.then((value) => miss(value)).then(() => promise);
|
||||
},
|
||||
set(key, value) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
cache[JSON.stringify(key)] = options.serializable ? JSON.stringify(value) : value;
|
||||
return Promise.resolve(value);
|
||||
},
|
||||
delete(key) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
delete cache[JSON.stringify(key)];
|
||||
return Promise.resolve();
|
||||
},
|
||||
clear() {
|
||||
cache = {};
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
exports.createInMemoryCache = createInMemoryCache;
|
||||
12
scripts/node_modules/@algolia/cache-in-memory/dist/cache-in-memory.d.ts
generated
vendored
Normal file
12
scripts/node_modules/@algolia/cache-in-memory/dist/cache-in-memory.d.ts
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
import { Cache as Cache_2 } from '@algolia/cache-common';
|
||||
|
||||
export declare function createInMemoryCache(options?: InMemoryCacheOptions): Cache_2;
|
||||
|
||||
export declare type InMemoryCacheOptions = {
|
||||
/**
|
||||
* If keys and values should be serialized using `JSON.stringify`.
|
||||
*/
|
||||
readonly serializable?: boolean;
|
||||
};
|
||||
|
||||
export { }
|
||||
33
scripts/node_modules/@algolia/cache-in-memory/dist/cache-in-memory.esm.js
generated
vendored
Normal file
33
scripts/node_modules/@algolia/cache-in-memory/dist/cache-in-memory.esm.js
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
function createInMemoryCache(options = { serializable: true }) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let cache = {};
|
||||
return {
|
||||
get(key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
const keyAsString = JSON.stringify(key);
|
||||
if (keyAsString in cache) {
|
||||
return Promise.resolve(options.serializable ? JSON.parse(cache[keyAsString]) : cache[keyAsString]);
|
||||
}
|
||||
const promise = defaultValue();
|
||||
const miss = (events && events.miss) || (() => Promise.resolve());
|
||||
return promise.then((value) => miss(value)).then(() => promise);
|
||||
},
|
||||
set(key, value) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
cache[JSON.stringify(key)] = options.serializable ? JSON.stringify(value) : value;
|
||||
return Promise.resolve(value);
|
||||
},
|
||||
delete(key) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
delete cache[JSON.stringify(key)];
|
||||
return Promise.resolve();
|
||||
},
|
||||
clear() {
|
||||
cache = {};
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export { createInMemoryCache };
|
||||
2
scripts/node_modules/@algolia/cache-in-memory/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/cache-in-memory/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/cache-in-memory.cjs.js');
|
||||
22
scripts/node_modules/@algolia/cache-in-memory/package.json
generated
vendored
Normal file
22
scripts/node_modules/@algolia/cache-in-memory/package.json
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "@algolia/cache-in-memory",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"description": "Promise-based cache library using memory.",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-javascript.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/cache-in-memory.esm.js",
|
||||
"types": "dist/cache-in-memory.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@algolia/cache-common": "4.24.0"
|
||||
}
|
||||
}
|
||||
69
scripts/node_modules/@algolia/client-account/dist/client-account.cjs.js
generated
vendored
Normal file
69
scripts/node_modules/@algolia/client-account/dist/client-account.cjs.js
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var clientCommon = require('@algolia/client-common');
|
||||
var clientSearch = require('@algolia/client-search');
|
||||
|
||||
function createDestinationIndiceExistsError() {
|
||||
return {
|
||||
name: 'DestinationIndiceAlreadyExistsError',
|
||||
message: 'Destination indice already exists.',
|
||||
};
|
||||
}
|
||||
|
||||
function createIndicesInSameAppError(appId) {
|
||||
return {
|
||||
name: 'IndicesInTheSameAppError',
|
||||
message: 'Indices are in the same application. Use SearchClient.copyIndex instead.',
|
||||
appId,
|
||||
};
|
||||
}
|
||||
|
||||
const accountCopyIndex = (source, destination, requestOptions) => {
|
||||
// eslint-disable-next-line functional/prefer-readonly-type
|
||||
const responses = [];
|
||||
const promise = clientSearch.exists(destination)()
|
||||
.then(res => {
|
||||
if (source.appId === destination.appId) {
|
||||
throw createIndicesInSameAppError(source.appId);
|
||||
}
|
||||
if (res) {
|
||||
throw createDestinationIndiceExistsError();
|
||||
}
|
||||
})
|
||||
.then(() => clientSearch.getSettings(source)())
|
||||
.then(settings =>
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
responses.push(clientSearch.setSettings(destination)(settings, requestOptions)))
|
||||
.then(() => clientSearch.browseRules(source)({
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
batch: rules => responses.push(clientSearch.saveRules(destination)(rules, requestOptions)),
|
||||
}))
|
||||
.then(() => clientSearch.browseSynonyms(source)({
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
batch: synonyms => responses.push(clientSearch.saveSynonyms(destination)(synonyms, requestOptions)),
|
||||
}))
|
||||
.then(() => clientSearch.browseObjects(source)({
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
batch: objects => responses.push(clientSearch.saveObjects(destination)(objects, requestOptions)),
|
||||
}));
|
||||
return clientCommon.createWaitablePromise(
|
||||
/**
|
||||
* The original promise will return an array of async responses, now
|
||||
* we need to resolve that array of async responses using a
|
||||
* `Promise.all`, and then resolve `void` for the end-user.
|
||||
*/
|
||||
promise.then(() => Promise.all(responses)).then(() => undefined),
|
||||
/**
|
||||
* Next, if the end-user calls the `wait` method, we need to also call
|
||||
* the `wait` method on each element of of async responses.
|
||||
*/
|
||||
(_response, waitRequestOptions) => {
|
||||
return Promise.all(responses.map(response => response.wait(waitRequestOptions)));
|
||||
});
|
||||
};
|
||||
|
||||
exports.accountCopyIndex = accountCopyIndex;
|
||||
exports.createDestinationIndiceExistsError = createDestinationIndiceExistsError;
|
||||
exports.createIndicesInSameAppError = createIndicesInSameAppError;
|
||||
18
scripts/node_modules/@algolia/client-account/dist/client-account.d.ts
generated
vendored
Normal file
18
scripts/node_modules/@algolia/client-account/dist/client-account.d.ts
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
import { RequestOptions } from '@algolia/transporter';
|
||||
import { SearchIndex } from '@algolia/client-search';
|
||||
import { WaitablePromise } from '@algolia/client-common';
|
||||
|
||||
export declare const accountCopyIndex: (source: SearchIndex, destination: SearchIndex, requestOptions?: RequestOptions | undefined) => WaitablePromise<void>;
|
||||
|
||||
export declare function createDestinationIndiceExistsError(): Error;
|
||||
|
||||
export declare function createIndicesInSameAppError(appId: string): IndicesInSameAppError;
|
||||
|
||||
export declare type IndicesInSameAppError = Error & {
|
||||
/**
|
||||
* The app id.
|
||||
*/
|
||||
readonly appId: string;
|
||||
};
|
||||
|
||||
export { }
|
||||
63
scripts/node_modules/@algolia/client-account/dist/client-account.esm.js
generated
vendored
Normal file
63
scripts/node_modules/@algolia/client-account/dist/client-account.esm.js
generated
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
import { createWaitablePromise } from '@algolia/client-common';
|
||||
import { exists, getSettings, setSettings, browseRules, saveRules, browseSynonyms, saveSynonyms, browseObjects, saveObjects } from '@algolia/client-search';
|
||||
|
||||
function createDestinationIndiceExistsError() {
|
||||
return {
|
||||
name: 'DestinationIndiceAlreadyExistsError',
|
||||
message: 'Destination indice already exists.',
|
||||
};
|
||||
}
|
||||
|
||||
function createIndicesInSameAppError(appId) {
|
||||
return {
|
||||
name: 'IndicesInTheSameAppError',
|
||||
message: 'Indices are in the same application. Use SearchClient.copyIndex instead.',
|
||||
appId,
|
||||
};
|
||||
}
|
||||
|
||||
const accountCopyIndex = (source, destination, requestOptions) => {
|
||||
// eslint-disable-next-line functional/prefer-readonly-type
|
||||
const responses = [];
|
||||
const promise = exists(destination)()
|
||||
.then(res => {
|
||||
if (source.appId === destination.appId) {
|
||||
throw createIndicesInSameAppError(source.appId);
|
||||
}
|
||||
if (res) {
|
||||
throw createDestinationIndiceExistsError();
|
||||
}
|
||||
})
|
||||
.then(() => getSettings(source)())
|
||||
.then(settings =>
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
responses.push(setSettings(destination)(settings, requestOptions)))
|
||||
.then(() => browseRules(source)({
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
batch: rules => responses.push(saveRules(destination)(rules, requestOptions)),
|
||||
}))
|
||||
.then(() => browseSynonyms(source)({
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
batch: synonyms => responses.push(saveSynonyms(destination)(synonyms, requestOptions)),
|
||||
}))
|
||||
.then(() => browseObjects(source)({
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
batch: objects => responses.push(saveObjects(destination)(objects, requestOptions)),
|
||||
}));
|
||||
return createWaitablePromise(
|
||||
/**
|
||||
* The original promise will return an array of async responses, now
|
||||
* we need to resolve that array of async responses using a
|
||||
* `Promise.all`, and then resolve `void` for the end-user.
|
||||
*/
|
||||
promise.then(() => Promise.all(responses)).then(() => undefined),
|
||||
/**
|
||||
* Next, if the end-user calls the `wait` method, we need to also call
|
||||
* the `wait` method on each element of of async responses.
|
||||
*/
|
||||
(_response, waitRequestOptions) => {
|
||||
return Promise.all(responses.map(response => response.wait(waitRequestOptions)));
|
||||
});
|
||||
};
|
||||
|
||||
export { accountCopyIndex, createDestinationIndiceExistsError, createIndicesInSameAppError };
|
||||
2
scripts/node_modules/@algolia/client-account/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/client-account/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/client-account.cjs.js');
|
||||
23
scripts/node_modules/@algolia/client-account/package.json
generated
vendored
Normal file
23
scripts/node_modules/@algolia/client-account/package.json
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
{
|
||||
"name": "@algolia/client-account",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-javascript.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/client-account.esm.js",
|
||||
"types": "dist/client-account.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@algolia/client-common": "4.24.0",
|
||||
"@algolia/client-search": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
}
|
||||
80
scripts/node_modules/@algolia/client-analytics/dist/client-analytics.cjs.js
generated
vendored
Normal file
80
scripts/node_modules/@algolia/client-analytics/dist/client-analytics.cjs.js
generated
vendored
Normal file
@ -0,0 +1,80 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var clientCommon = require('@algolia/client-common');
|
||||
var transporter = require('@algolia/transporter');
|
||||
var requesterCommon = require('@algolia/requester-common');
|
||||
|
||||
const createAnalyticsClient = options => {
|
||||
const region = options.region || 'us';
|
||||
const auth = clientCommon.createAuth(clientCommon.AuthMode.WithinHeaders, options.appId, options.apiKey);
|
||||
const transporter$1 = transporter.createTransporter({
|
||||
hosts: [{ url: `analytics.${region}.algolia.com` }],
|
||||
...options,
|
||||
headers: {
|
||||
...auth.headers(),
|
||||
...{ 'content-type': 'application/json' },
|
||||
...options.headers,
|
||||
},
|
||||
queryParameters: {
|
||||
...auth.queryParameters(),
|
||||
...options.queryParameters,
|
||||
},
|
||||
});
|
||||
const appId = options.appId;
|
||||
return clientCommon.addMethods({ appId, transporter: transporter$1 }, options.methods);
|
||||
};
|
||||
|
||||
const addABTest = (base) => {
|
||||
return (abTest, requestOptions) => {
|
||||
return base.transporter.write({
|
||||
method: requesterCommon.MethodEnum.Post,
|
||||
path: '2/abtests',
|
||||
data: abTest,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const deleteABTest = (base) => {
|
||||
return (abTestID, requestOptions) => {
|
||||
return base.transporter.write({
|
||||
method: requesterCommon.MethodEnum.Delete,
|
||||
path: clientCommon.encode('2/abtests/%s', abTestID),
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getABTest = (base) => {
|
||||
return (abTestID, requestOptions) => {
|
||||
return base.transporter.read({
|
||||
method: requesterCommon.MethodEnum.Get,
|
||||
path: clientCommon.encode('2/abtests/%s', abTestID),
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getABTests = (base) => {
|
||||
return (requestOptions) => {
|
||||
return base.transporter.read({
|
||||
method: requesterCommon.MethodEnum.Get,
|
||||
path: '2/abtests',
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const stopABTest = (base) => {
|
||||
return (abTestID, requestOptions) => {
|
||||
return base.transporter.write({
|
||||
method: requesterCommon.MethodEnum.Post,
|
||||
path: clientCommon.encode('2/abtests/%s/stop', abTestID),
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
exports.addABTest = addABTest;
|
||||
exports.createAnalyticsClient = createAnalyticsClient;
|
||||
exports.deleteABTest = deleteABTest;
|
||||
exports.getABTest = getABTest;
|
||||
exports.getABTests = getABTests;
|
||||
exports.stopABTest = stopABTest;
|
||||
251
scripts/node_modules/@algolia/client-analytics/dist/client-analytics.d.ts
generated
vendored
Normal file
251
scripts/node_modules/@algolia/client-analytics/dist/client-analytics.d.ts
generated
vendored
Normal file
@ -0,0 +1,251 @@
|
||||
import { ClientTransporterOptions } from '@algolia/client-common';
|
||||
import { CreateClient } from '@algolia/client-common';
|
||||
import { RequestOptions } from '@algolia/transporter';
|
||||
import { SearchOptions } from '@algolia/client-search';
|
||||
import { Transporter } from '@algolia/transporter';
|
||||
|
||||
export declare type ABTest = {
|
||||
/**
|
||||
* The ab test name.
|
||||
*/
|
||||
readonly name: string;
|
||||
/**
|
||||
* The ab test list of variants.
|
||||
*/
|
||||
readonly variants: readonly Variant[];
|
||||
/**
|
||||
* The ab test end date, if any.
|
||||
*/
|
||||
readonly endAt: string;
|
||||
};
|
||||
|
||||
export declare const addABTest: (base: AnalyticsClient) => (abTest: ABTest, requestOptions?: RequestOptions | undefined) => Readonly<Promise<AddABTestResponse>>;
|
||||
|
||||
export declare type AddABTestResponse = {
|
||||
/**
|
||||
* The ab test unique identifier.
|
||||
*/
|
||||
abTestID: number;
|
||||
/**
|
||||
* The operation task id. May be used to perform a wait task.
|
||||
*/
|
||||
taskID: number;
|
||||
/**
|
||||
* The index name where the ab test is attached to.
|
||||
*/
|
||||
index: string;
|
||||
};
|
||||
|
||||
export declare type AnalyticsClient = {
|
||||
/**
|
||||
* The application id.
|
||||
*/
|
||||
readonly appId: string;
|
||||
/**
|
||||
* The underlying transporter.
|
||||
*/
|
||||
readonly transporter: Transporter;
|
||||
};
|
||||
|
||||
export declare type AnalyticsClientOptions = {
|
||||
/**
|
||||
* The application id.
|
||||
*/
|
||||
readonly appId: string;
|
||||
/**
|
||||
* The api key.
|
||||
*/
|
||||
readonly apiKey: string;
|
||||
/**
|
||||
* The prefered region.
|
||||
*/
|
||||
readonly region?: 'de' | 'us';
|
||||
};
|
||||
|
||||
export declare const createAnalyticsClient: CreateClient<AnalyticsClient, AnalyticsClientOptions & ClientTransporterOptions>;
|
||||
|
||||
export declare const deleteABTest: (base: AnalyticsClient) => (abTestID: number, requestOptions?: RequestOptions | undefined) => Readonly<Promise<DeleteABTestResponse>>;
|
||||
|
||||
export declare type DeleteABTestResponse = {
|
||||
/**
|
||||
* The ab test unique identifier.
|
||||
*/
|
||||
abTestID: number;
|
||||
/**
|
||||
* The operation task id. May be used to perform a wait task.
|
||||
*/
|
||||
taskID: number;
|
||||
/**
|
||||
* The index name where the ab test was attached to.
|
||||
*/
|
||||
index: string;
|
||||
};
|
||||
|
||||
export declare const getABTest: (base: AnalyticsClient) => (abTestID: number, requestOptions?: RequestOptions | undefined) => Readonly<Promise<GetABTestResponse>>;
|
||||
|
||||
export declare type GetABTestResponse = {
|
||||
/**
|
||||
* The ab test name.
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* The ab test status.
|
||||
*/
|
||||
status: string;
|
||||
/**
|
||||
* The ab test list of variants.
|
||||
*/
|
||||
variants: VariantResponse[];
|
||||
/**
|
||||
* The ab test end date, if any.
|
||||
*/
|
||||
endAt: string;
|
||||
/**
|
||||
* The ab test created date, if any.
|
||||
*/
|
||||
createdAt: string;
|
||||
/**
|
||||
* The ab test updated date.
|
||||
*/
|
||||
updatedAt: string;
|
||||
/**
|
||||
* The ab test unique identifier.
|
||||
*/
|
||||
abTestID: number;
|
||||
/**
|
||||
* The ab test significance based on click data. Should be higher than 0.95 to be considered significant - no matter which variant is winning.
|
||||
*/
|
||||
clickSignificance: number;
|
||||
/**
|
||||
*
|
||||
* The ab test significance based on conversion data. Should be higher than 0.95 to be considered significant - no matter which variant is winning.
|
||||
*/
|
||||
conversionSignificance: number;
|
||||
};
|
||||
|
||||
export declare const getABTests: (base: AnalyticsClient) => (requestOptions?: (RequestOptions & GetABTestsOptions) | undefined) => Readonly<Promise<GetABTestsResponse>>;
|
||||
|
||||
export declare type GetABTestsOptions = {
|
||||
/**
|
||||
* The number of ab tests to skip from the biginning of the list.
|
||||
*/
|
||||
readonly offset?: number;
|
||||
/**
|
||||
* The limit of the number of ab tests returned.
|
||||
*/
|
||||
readonly limit?: number;
|
||||
/**
|
||||
* Filters the returned ab tests by any indices starting with the
|
||||
* provided prefix that are assigned to either variant of an ab test.
|
||||
*/
|
||||
readonly indexPrefix?: string;
|
||||
/**
|
||||
* Filters the returned ab tests by any indices ending with the
|
||||
* provided suffix that are assigned to either variant of an ab test.
|
||||
*/
|
||||
readonly indexSuffix?: string;
|
||||
};
|
||||
|
||||
export declare type GetABTestsResponse = {
|
||||
/**
|
||||
* The number of ab tests within this response.
|
||||
*/
|
||||
count: number;
|
||||
/**
|
||||
* The total of ab tests.
|
||||
*/
|
||||
total: number;
|
||||
/**
|
||||
* The list of ab tests.
|
||||
*/
|
||||
abtests: GetABTestResponse[] | null;
|
||||
};
|
||||
|
||||
export declare const stopABTest: (base: AnalyticsClient) => (abTestID: number, requestOptions?: RequestOptions | undefined) => Readonly<Promise<StopABTestResponse>>;
|
||||
|
||||
export declare type StopABTestResponse = {
|
||||
/**
|
||||
* The ab test unique identifier.
|
||||
*/
|
||||
abTestID: number;
|
||||
/**
|
||||
* The operation task id. May be used to perform a wait task.
|
||||
*/
|
||||
taskID: number;
|
||||
/**
|
||||
* The index name where the ab test is attached to.
|
||||
*/
|
||||
index: string;
|
||||
};
|
||||
|
||||
export declare type Variant = {
|
||||
/**
|
||||
* The index name.
|
||||
*/
|
||||
readonly index: string;
|
||||
/**
|
||||
* Description of the variant. Useful when seing the results in the dashboard or via the API.
|
||||
*/
|
||||
readonly description?: string;
|
||||
/**
|
||||
* Percentage of the traffic that should be going to the variant. The sum of the percentage should be equal to 100.
|
||||
*/
|
||||
readonly trafficPercentage: number;
|
||||
/**
|
||||
* The search parameters.
|
||||
*/
|
||||
readonly customSearchParameters?: SearchOptions;
|
||||
};
|
||||
|
||||
export declare type VariantResponse = Variant & {
|
||||
/**
|
||||
* Average click position for the variant.
|
||||
*/
|
||||
averageClickPosition?: number;
|
||||
/**
|
||||
* Distinct click count for the variant.
|
||||
*/
|
||||
clickCount?: number;
|
||||
/**
|
||||
* Click through rate for the variant.
|
||||
*/
|
||||
clickThroughRate?: number;
|
||||
/**
|
||||
* Click through rate for the variant.
|
||||
*/
|
||||
conversionCount?: number;
|
||||
/**
|
||||
* Distinct conversion count for the variant.
|
||||
*/
|
||||
conversionRate?: number;
|
||||
/**
|
||||
* No result count.
|
||||
*/
|
||||
noResultCount?: number;
|
||||
/**
|
||||
* Tracked search count.
|
||||
*/
|
||||
trackedSearchCount?: number;
|
||||
/**
|
||||
* Search count.
|
||||
*/
|
||||
searchCount?: number;
|
||||
/**
|
||||
* User count.
|
||||
*/
|
||||
userCount?: number;
|
||||
/**
|
||||
* Count of the tracked searches attributed to outlier traffic that were removed from the A/B test.
|
||||
*/
|
||||
outlierTrackedSearchesCount?: number;
|
||||
/**
|
||||
* Count of users attributed to outlier traffic that were removed from the A/B test.
|
||||
*/
|
||||
outlierUsersCount?: number;
|
||||
/**
|
||||
* The search parameters.
|
||||
*/
|
||||
customSearchParameters?: SearchOptions;
|
||||
};
|
||||
|
||||
export { }
|
||||
71
scripts/node_modules/@algolia/client-analytics/dist/client-analytics.esm.js
generated
vendored
Normal file
71
scripts/node_modules/@algolia/client-analytics/dist/client-analytics.esm.js
generated
vendored
Normal file
@ -0,0 +1,71 @@
|
||||
import { createAuth, AuthMode, addMethods, encode } from '@algolia/client-common';
|
||||
import { createTransporter } from '@algolia/transporter';
|
||||
import { MethodEnum } from '@algolia/requester-common';
|
||||
|
||||
const createAnalyticsClient = options => {
|
||||
const region = options.region || 'us';
|
||||
const auth = createAuth(AuthMode.WithinHeaders, options.appId, options.apiKey);
|
||||
const transporter = createTransporter({
|
||||
hosts: [{ url: `analytics.${region}.algolia.com` }],
|
||||
...options,
|
||||
headers: {
|
||||
...auth.headers(),
|
||||
...{ 'content-type': 'application/json' },
|
||||
...options.headers,
|
||||
},
|
||||
queryParameters: {
|
||||
...auth.queryParameters(),
|
||||
...options.queryParameters,
|
||||
},
|
||||
});
|
||||
const appId = options.appId;
|
||||
return addMethods({ appId, transporter }, options.methods);
|
||||
};
|
||||
|
||||
const addABTest = (base) => {
|
||||
return (abTest, requestOptions) => {
|
||||
return base.transporter.write({
|
||||
method: MethodEnum.Post,
|
||||
path: '2/abtests',
|
||||
data: abTest,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const deleteABTest = (base) => {
|
||||
return (abTestID, requestOptions) => {
|
||||
return base.transporter.write({
|
||||
method: MethodEnum.Delete,
|
||||
path: encode('2/abtests/%s', abTestID),
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getABTest = (base) => {
|
||||
return (abTestID, requestOptions) => {
|
||||
return base.transporter.read({
|
||||
method: MethodEnum.Get,
|
||||
path: encode('2/abtests/%s', abTestID),
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getABTests = (base) => {
|
||||
return (requestOptions) => {
|
||||
return base.transporter.read({
|
||||
method: MethodEnum.Get,
|
||||
path: '2/abtests',
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const stopABTest = (base) => {
|
||||
return (abTestID, requestOptions) => {
|
||||
return base.transporter.write({
|
||||
method: MethodEnum.Post,
|
||||
path: encode('2/abtests/%s/stop', abTestID),
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
export { addABTest, createAnalyticsClient, deleteABTest, getABTest, getABTests, stopABTest };
|
||||
2
scripts/node_modules/@algolia/client-analytics/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/client-analytics/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/client-analytics.cjs.js');
|
||||
24
scripts/node_modules/@algolia/client-analytics/package.json
generated
vendored
Normal file
24
scripts/node_modules/@algolia/client-analytics/package.json
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
{
|
||||
"name": "@algolia/client-analytics",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-javascript.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/client-analytics.esm.js",
|
||||
"types": "dist/client-analytics.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@algolia/client-common": "4.24.0",
|
||||
"@algolia/client-search": "4.24.0",
|
||||
"@algolia/requester-common": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
}
|
||||
101
scripts/node_modules/@algolia/client-common/dist/client-common.cjs.js
generated
vendored
Normal file
101
scripts/node_modules/@algolia/client-common/dist/client-common.cjs.js
generated
vendored
Normal file
@ -0,0 +1,101 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
function createAuth(authMode, appId, apiKey) {
|
||||
const credentials = {
|
||||
'x-algolia-api-key': apiKey,
|
||||
'x-algolia-application-id': appId,
|
||||
};
|
||||
return {
|
||||
headers() {
|
||||
return authMode === AuthMode.WithinHeaders ? credentials : {};
|
||||
},
|
||||
queryParameters() {
|
||||
return authMode === AuthMode.WithinQueryParameters ? credentials : {};
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createRetryablePromise(callback) {
|
||||
let retriesCount = 0; // eslint-disable-line functional/no-let
|
||||
const retry = () => {
|
||||
retriesCount++;
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
resolve(callback(retry));
|
||||
}, Math.min(100 * retriesCount, 1000));
|
||||
});
|
||||
};
|
||||
return callback(retry);
|
||||
}
|
||||
|
||||
function createWaitablePromise(promise, wait = (_response, _requestOptions) => {
|
||||
return Promise.resolve();
|
||||
}) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
return Object.assign(promise, {
|
||||
wait(requestOptions) {
|
||||
return createWaitablePromise(promise
|
||||
.then(response => Promise.all([wait(response, requestOptions), response]))
|
||||
.then(promiseResults => promiseResults[1]));
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// eslint-disable-next-line functional/prefer-readonly-type
|
||||
function shuffle(array) {
|
||||
let c = array.length - 1; // eslint-disable-line functional/no-let
|
||||
// eslint-disable-next-line functional/no-loop-statement
|
||||
for (c; c > 0; c--) {
|
||||
const b = Math.floor(Math.random() * (c + 1));
|
||||
const a = array[c];
|
||||
array[c] = array[b]; // eslint-disable-line functional/immutable-data, no-param-reassign
|
||||
array[b] = a; // eslint-disable-line functional/immutable-data, no-param-reassign
|
||||
}
|
||||
return array;
|
||||
}
|
||||
function addMethods(base, methods) {
|
||||
if (!methods) {
|
||||
return base;
|
||||
}
|
||||
Object.keys(methods).forEach(key => {
|
||||
// eslint-disable-next-line functional/immutable-data, no-param-reassign
|
||||
base[key] = methods[key](base);
|
||||
});
|
||||
return base;
|
||||
}
|
||||
function encode(format, ...args) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let i = 0;
|
||||
return format.replace(/%s/g, () => encodeURIComponent(args[i++]));
|
||||
}
|
||||
|
||||
const version = '4.24.0';
|
||||
|
||||
const destroy = (base) => {
|
||||
return () => {
|
||||
return base.transporter.requester.destroy();
|
||||
};
|
||||
};
|
||||
|
||||
const AuthMode = {
|
||||
/**
|
||||
* If auth credentials should be in query parameters.
|
||||
*/
|
||||
WithinQueryParameters: 0,
|
||||
/**
|
||||
* If auth credentials should be in headers.
|
||||
*/
|
||||
WithinHeaders: 1,
|
||||
};
|
||||
|
||||
exports.AuthMode = AuthMode;
|
||||
exports.addMethods = addMethods;
|
||||
exports.createAuth = createAuth;
|
||||
exports.createRetryablePromise = createRetryablePromise;
|
||||
exports.createWaitablePromise = createWaitablePromise;
|
||||
exports.destroy = destroy;
|
||||
exports.encode = encode;
|
||||
exports.shuffle = shuffle;
|
||||
exports.version = version;
|
||||
102
scripts/node_modules/@algolia/client-common/dist/client-common.d.ts
generated
vendored
Normal file
102
scripts/node_modules/@algolia/client-common/dist/client-common.d.ts
generated
vendored
Normal file
@ -0,0 +1,102 @@
|
||||
import { Headers as Headers_2 } from '@algolia/transporter';
|
||||
import { HostOptions } from '@algolia/transporter';
|
||||
import { QueryParameters } from '@algolia/transporter';
|
||||
import { RequestOptions } from '@algolia/transporter';
|
||||
import { Transporter } from '@algolia/transporter';
|
||||
import { TransporterOptions } from '@algolia/transporter';
|
||||
|
||||
declare type AddedMethods<TBase, TMethods extends Methods<TBase>> = TBase & {
|
||||
[TKey in keyof TMethods extends string ? keyof TMethods : never]: ReturnType<TMethods[TKey]>;
|
||||
};
|
||||
|
||||
export declare function addMethods<TBase extends {}, TMethods extends Methods<TBase>>(base: TBase, methods?: TMethods): AddedMethods<TBase, TMethods>;
|
||||
|
||||
export declare type Auth = {
|
||||
/**
|
||||
* Returns the headers related to auth. Should be
|
||||
* merged to the transporter headers.
|
||||
*/
|
||||
readonly headers: () => Readonly<Record<string, string>>;
|
||||
/**
|
||||
* Returns the query parameters related to auth. Should be
|
||||
* merged to the query parameters headers.
|
||||
*/
|
||||
readonly queryParameters: () => Readonly<Record<string, string>>;
|
||||
};
|
||||
|
||||
export declare const AuthMode: Readonly<Record<string, AuthModeType>>;
|
||||
|
||||
export declare type AuthModeType = 0 | 1;
|
||||
|
||||
export declare type ClientTransporterOptions = Pick<TransporterOptions, Exclude<keyof TransporterOptions, 'headers'> & Exclude<keyof TransporterOptions, 'queryParameters'> & Exclude<keyof TransporterOptions, 'hosts'>> & {
|
||||
/**
|
||||
* The hosts used by the requester.
|
||||
*/
|
||||
readonly hosts?: readonly HostOptions[];
|
||||
/**
|
||||
* The headers used by the requester. The transporter
|
||||
* layer may add some extra headers during the request
|
||||
* for the user agent, and others.
|
||||
*/
|
||||
readonly headers?: Headers_2;
|
||||
/**
|
||||
* The query parameters used by the requester. The transporter
|
||||
* layer may add some extra headers during the request
|
||||
* for the user agent, and others.
|
||||
*/
|
||||
readonly queryParameters?: QueryParameters;
|
||||
};
|
||||
|
||||
export declare function createAuth(authMode: AuthModeType, appId: string, apiKey: string): Auth;
|
||||
|
||||
export declare type CreateClient<TClient, TOptions> = <TMethods extends {
|
||||
readonly [key: string]: (base: TClient) => (...args: any) => any;
|
||||
}>(options: TOptions & {
|
||||
readonly methods?: TMethods;
|
||||
}) => TClient & {
|
||||
[key in keyof TMethods extends string ? keyof TMethods : never]: ReturnType<TMethods[key]>;
|
||||
};
|
||||
|
||||
export declare function createRetryablePromise<TResponse>(callback: (retry: () => Promise<TResponse>) => Promise<TResponse>): Promise<TResponse>;
|
||||
|
||||
export declare function createWaitablePromise<TResponse>(promise: Readonly<Promise<TResponse>>, wait?: Wait<TResponse>): Readonly<WaitablePromise<TResponse>>;
|
||||
|
||||
export declare const destroy: (base: {
|
||||
readonly transporter: Transporter;
|
||||
}) => () => Readonly<Promise<void>>;
|
||||
|
||||
export declare function encode(format: string, ...args: readonly any[]): string;
|
||||
|
||||
declare type Methods<TBase> = {
|
||||
readonly [key: string]: (base: TBase) => (...args: any[]) => any;
|
||||
};
|
||||
|
||||
export declare function shuffle<TData>(array: TData[]): TData[];
|
||||
|
||||
export declare const version = "4.24.0";
|
||||
|
||||
export declare type Wait<TResponse> = (
|
||||
/**
|
||||
* The original response.
|
||||
*/
|
||||
response: TResponse,
|
||||
/**
|
||||
* The custom request options.
|
||||
*/
|
||||
requestOptions?: RequestOptions) => Readonly<Promise<any>>;
|
||||
|
||||
export declare type WaitablePromise<TResponse> = Readonly<Promise<TResponse>> & {
|
||||
/**
|
||||
* Wait for a task to complete before executing the next line of code, to synchronize index updates.
|
||||
*
|
||||
* All write operations in Algolia are asynchronous by design. It means that when you add or
|
||||
* update an object to your index, our servers will reply to your request with a taskID as
|
||||
* soon as they understood the write operation. The actual insert and indexing will be
|
||||
* done after replying to your code.
|
||||
*
|
||||
* You can wait for a task to complete by using this method.
|
||||
*/
|
||||
readonly wait: (requestOptions?: RequestOptions) => Readonly<WaitablePromise<TResponse>>;
|
||||
};
|
||||
|
||||
export { }
|
||||
89
scripts/node_modules/@algolia/client-common/dist/client-common.esm.js
generated
vendored
Normal file
89
scripts/node_modules/@algolia/client-common/dist/client-common.esm.js
generated
vendored
Normal file
@ -0,0 +1,89 @@
|
||||
function createAuth(authMode, appId, apiKey) {
|
||||
const credentials = {
|
||||
'x-algolia-api-key': apiKey,
|
||||
'x-algolia-application-id': appId,
|
||||
};
|
||||
return {
|
||||
headers() {
|
||||
return authMode === AuthMode.WithinHeaders ? credentials : {};
|
||||
},
|
||||
queryParameters() {
|
||||
return authMode === AuthMode.WithinQueryParameters ? credentials : {};
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createRetryablePromise(callback) {
|
||||
let retriesCount = 0; // eslint-disable-line functional/no-let
|
||||
const retry = () => {
|
||||
retriesCount++;
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
resolve(callback(retry));
|
||||
}, Math.min(100 * retriesCount, 1000));
|
||||
});
|
||||
};
|
||||
return callback(retry);
|
||||
}
|
||||
|
||||
function createWaitablePromise(promise, wait = (_response, _requestOptions) => {
|
||||
return Promise.resolve();
|
||||
}) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
return Object.assign(promise, {
|
||||
wait(requestOptions) {
|
||||
return createWaitablePromise(promise
|
||||
.then(response => Promise.all([wait(response, requestOptions), response]))
|
||||
.then(promiseResults => promiseResults[1]));
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// eslint-disable-next-line functional/prefer-readonly-type
|
||||
function shuffle(array) {
|
||||
let c = array.length - 1; // eslint-disable-line functional/no-let
|
||||
// eslint-disable-next-line functional/no-loop-statement
|
||||
for (c; c > 0; c--) {
|
||||
const b = Math.floor(Math.random() * (c + 1));
|
||||
const a = array[c];
|
||||
array[c] = array[b]; // eslint-disable-line functional/immutable-data, no-param-reassign
|
||||
array[b] = a; // eslint-disable-line functional/immutable-data, no-param-reassign
|
||||
}
|
||||
return array;
|
||||
}
|
||||
function addMethods(base, methods) {
|
||||
if (!methods) {
|
||||
return base;
|
||||
}
|
||||
Object.keys(methods).forEach(key => {
|
||||
// eslint-disable-next-line functional/immutable-data, no-param-reassign
|
||||
base[key] = methods[key](base);
|
||||
});
|
||||
return base;
|
||||
}
|
||||
function encode(format, ...args) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let i = 0;
|
||||
return format.replace(/%s/g, () => encodeURIComponent(args[i++]));
|
||||
}
|
||||
|
||||
const version = '4.24.0';
|
||||
|
||||
const destroy = (base) => {
|
||||
return () => {
|
||||
return base.transporter.requester.destroy();
|
||||
};
|
||||
};
|
||||
|
||||
const AuthMode = {
|
||||
/**
|
||||
* If auth credentials should be in query parameters.
|
||||
*/
|
||||
WithinQueryParameters: 0,
|
||||
/**
|
||||
* If auth credentials should be in headers.
|
||||
*/
|
||||
WithinHeaders: 1,
|
||||
};
|
||||
|
||||
export { AuthMode, addMethods, createAuth, createRetryablePromise, createWaitablePromise, destroy, encode, shuffle, version };
|
||||
2
scripts/node_modules/@algolia/client-common/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/client-common/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/client-common.cjs.js');
|
||||
22
scripts/node_modules/@algolia/client-common/package.json
generated
vendored
Normal file
22
scripts/node_modules/@algolia/client-common/package.json
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "@algolia/client-common",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-javascript.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/client-common.esm.js",
|
||||
"types": "dist/client-common.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@algolia/requester-common": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
}
|
||||
49
scripts/node_modules/@algolia/client-personalization/dist/client-personalization.cjs.js
generated
vendored
Normal file
49
scripts/node_modules/@algolia/client-personalization/dist/client-personalization.cjs.js
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var clientCommon = require('@algolia/client-common');
|
||||
var transporter = require('@algolia/transporter');
|
||||
var requesterCommon = require('@algolia/requester-common');
|
||||
|
||||
const createPersonalizationClient = options => {
|
||||
const region = options.region || 'us';
|
||||
const auth = clientCommon.createAuth(clientCommon.AuthMode.WithinHeaders, options.appId, options.apiKey);
|
||||
const transporter$1 = transporter.createTransporter({
|
||||
hosts: [{ url: `personalization.${region}.algolia.com` }],
|
||||
...options,
|
||||
headers: {
|
||||
...auth.headers(),
|
||||
...{ 'content-type': 'application/json' },
|
||||
...options.headers,
|
||||
},
|
||||
queryParameters: {
|
||||
...auth.queryParameters(),
|
||||
...options.queryParameters,
|
||||
},
|
||||
});
|
||||
return clientCommon.addMethods({ appId: options.appId, transporter: transporter$1 }, options.methods);
|
||||
};
|
||||
|
||||
const getPersonalizationStrategy = (base) => {
|
||||
return (requestOptions) => {
|
||||
return base.transporter.read({
|
||||
method: requesterCommon.MethodEnum.Get,
|
||||
path: '1/strategies/personalization',
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const setPersonalizationStrategy = (base) => {
|
||||
return (personalizationStrategy, requestOptions) => {
|
||||
return base.transporter.write({
|
||||
method: requesterCommon.MethodEnum.Post,
|
||||
path: '1/strategies/personalization',
|
||||
data: personalizationStrategy,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
exports.createPersonalizationClient = createPersonalizationClient;
|
||||
exports.getPersonalizationStrategy = getPersonalizationStrategy;
|
||||
exports.setPersonalizationStrategy = setPersonalizationStrategy;
|
||||
93
scripts/node_modules/@algolia/client-personalization/dist/client-personalization.d.ts
generated
vendored
Normal file
93
scripts/node_modules/@algolia/client-personalization/dist/client-personalization.d.ts
generated
vendored
Normal file
@ -0,0 +1,93 @@
|
||||
import { ClientTransporterOptions } from '@algolia/client-common';
|
||||
import { CreateClient } from '@algolia/client-common';
|
||||
import { RequestOptions } from '@algolia/transporter';
|
||||
import { Transporter } from '@algolia/transporter';
|
||||
|
||||
export declare const createPersonalizationClient: CreateClient<PersonalizationClient, PersonalizationClientOptions & ClientTransporterOptions>;
|
||||
|
||||
export declare const getPersonalizationStrategy: (base: PersonalizationClient) => (requestOptions?: RequestOptions | undefined) => Readonly<Promise<GetPersonalizationStrategyResponse>>;
|
||||
|
||||
export declare type GetPersonalizationStrategyResponse = {
|
||||
/**
|
||||
* Events scoring
|
||||
*/
|
||||
eventsScoring: Array<{
|
||||
eventName: string;
|
||||
eventType: string;
|
||||
score: number;
|
||||
}>;
|
||||
/**
|
||||
* Facets scoring
|
||||
*/
|
||||
facetsScoring: Array<{
|
||||
facetName: string;
|
||||
score: number;
|
||||
}>;
|
||||
/**
|
||||
* Personalization impact
|
||||
*/
|
||||
personalizationImpact: number;
|
||||
};
|
||||
|
||||
export declare type PersonalizationClient = {
|
||||
/**
|
||||
* The application id.
|
||||
*/
|
||||
readonly appId: string;
|
||||
/**
|
||||
* The underlying transporter.
|
||||
*/
|
||||
readonly transporter: Transporter;
|
||||
};
|
||||
|
||||
export declare type PersonalizationClientOptions = {
|
||||
/**
|
||||
* The application id.
|
||||
*/
|
||||
readonly appId: string;
|
||||
/**
|
||||
* The api key.
|
||||
*/
|
||||
readonly apiKey: string;
|
||||
/**
|
||||
* The prefered region.
|
||||
*/
|
||||
readonly region?: string;
|
||||
};
|
||||
|
||||
export declare type PersonalizationStrategy = {
|
||||
/**
|
||||
* Events scoring
|
||||
*/
|
||||
readonly eventsScoring: ReadonlyArray<{
|
||||
readonly eventName: string;
|
||||
readonly eventType: string;
|
||||
readonly score: number;
|
||||
}>;
|
||||
/**
|
||||
* Facets scoring
|
||||
*/
|
||||
readonly facetsScoring: ReadonlyArray<{
|
||||
readonly facetName: string;
|
||||
readonly score: number;
|
||||
}>;
|
||||
/**
|
||||
* Personalization impact
|
||||
*/
|
||||
readonly personalizationImpact: number;
|
||||
};
|
||||
|
||||
export declare const setPersonalizationStrategy: (base: PersonalizationClient) => (personalizationStrategy: PersonalizationStrategy, requestOptions?: RequestOptions | undefined) => Readonly<Promise<SetPersonalizationStrategyResponse>>;
|
||||
|
||||
export declare type SetPersonalizationStrategyResponse = {
|
||||
/**
|
||||
* The status code.
|
||||
*/
|
||||
status?: number;
|
||||
/**
|
||||
* The message.
|
||||
*/
|
||||
message: string;
|
||||
};
|
||||
|
||||
export { }
|
||||
43
scripts/node_modules/@algolia/client-personalization/dist/client-personalization.esm.js
generated
vendored
Normal file
43
scripts/node_modules/@algolia/client-personalization/dist/client-personalization.esm.js
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
import { createAuth, AuthMode, addMethods } from '@algolia/client-common';
|
||||
import { createTransporter } from '@algolia/transporter';
|
||||
import { MethodEnum } from '@algolia/requester-common';
|
||||
|
||||
const createPersonalizationClient = options => {
|
||||
const region = options.region || 'us';
|
||||
const auth = createAuth(AuthMode.WithinHeaders, options.appId, options.apiKey);
|
||||
const transporter = createTransporter({
|
||||
hosts: [{ url: `personalization.${region}.algolia.com` }],
|
||||
...options,
|
||||
headers: {
|
||||
...auth.headers(),
|
||||
...{ 'content-type': 'application/json' },
|
||||
...options.headers,
|
||||
},
|
||||
queryParameters: {
|
||||
...auth.queryParameters(),
|
||||
...options.queryParameters,
|
||||
},
|
||||
});
|
||||
return addMethods({ appId: options.appId, transporter }, options.methods);
|
||||
};
|
||||
|
||||
const getPersonalizationStrategy = (base) => {
|
||||
return (requestOptions) => {
|
||||
return base.transporter.read({
|
||||
method: MethodEnum.Get,
|
||||
path: '1/strategies/personalization',
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const setPersonalizationStrategy = (base) => {
|
||||
return (personalizationStrategy, requestOptions) => {
|
||||
return base.transporter.write({
|
||||
method: MethodEnum.Post,
|
||||
path: '1/strategies/personalization',
|
||||
data: personalizationStrategy,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
export { createPersonalizationClient, getPersonalizationStrategy, setPersonalizationStrategy };
|
||||
2
scripts/node_modules/@algolia/client-personalization/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/client-personalization/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/client-personalization.cjs.js');
|
||||
23
scripts/node_modules/@algolia/client-personalization/package.json
generated
vendored
Normal file
23
scripts/node_modules/@algolia/client-personalization/package.json
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
{
|
||||
"name": "@algolia/client-personalization",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-javascript.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/client-personalization.esm.js",
|
||||
"types": "dist/client-personalization.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@algolia/client-common": "4.24.0",
|
||||
"@algolia/requester-common": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
}
|
||||
1380
scripts/node_modules/@algolia/client-search/dist/client-search.cjs.js
generated
vendored
Normal file
1380
scripts/node_modules/@algolia/client-search/dist/client-search.cjs.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2597
scripts/node_modules/@algolia/client-search/dist/client-search.d.ts
generated
vendored
Normal file
2597
scripts/node_modules/@algolia/client-search/dist/client-search.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1287
scripts/node_modules/@algolia/client-search/dist/client-search.esm.js
generated
vendored
Normal file
1287
scripts/node_modules/@algolia/client-search/dist/client-search.esm.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2
scripts/node_modules/@algolia/client-search/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/client-search/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/client-search.cjs.js');
|
||||
23
scripts/node_modules/@algolia/client-search/package.json
generated
vendored
Normal file
23
scripts/node_modules/@algolia/client-search/package.json
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
{
|
||||
"name": "@algolia/client-search",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-javascript.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/client-search.esm.js",
|
||||
"types": "dist/client-search.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@algolia/client-common": "4.24.0",
|
||||
"@algolia/requester-common": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
}
|
||||
26
scripts/node_modules/@algolia/logger-common/dist/logger-common.cjs.js
generated
vendored
Normal file
26
scripts/node_modules/@algolia/logger-common/dist/logger-common.cjs.js
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
function createNullLogger() {
|
||||
return {
|
||||
debug(_message, _args) {
|
||||
return Promise.resolve();
|
||||
},
|
||||
info(_message, _args) {
|
||||
return Promise.resolve();
|
||||
},
|
||||
error(_message, _args) {
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const LogLevelEnum = {
|
||||
Debug: 1,
|
||||
Info: 2,
|
||||
Error: 3,
|
||||
};
|
||||
|
||||
exports.LogLevelEnum = LogLevelEnum;
|
||||
exports.createNullLogger = createNullLogger;
|
||||
22
scripts/node_modules/@algolia/logger-common/dist/logger-common.d.ts
generated
vendored
Normal file
22
scripts/node_modules/@algolia/logger-common/dist/logger-common.d.ts
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
export declare function createNullLogger(): Logger;
|
||||
|
||||
export declare type Logger = {
|
||||
/**
|
||||
* Logs debug messages.
|
||||
*/
|
||||
readonly debug: (message: string, args?: any) => Readonly<Promise<void>>;
|
||||
/**
|
||||
* Logs info messages.
|
||||
*/
|
||||
readonly info: (message: string, args?: any) => Readonly<Promise<void>>;
|
||||
/**
|
||||
* Logs error messages.
|
||||
*/
|
||||
readonly error: (message: string, args?: any) => Readonly<Promise<void>>;
|
||||
};
|
||||
|
||||
export declare const LogLevelEnum: Readonly<Record<string, LogLevelType>>;
|
||||
|
||||
export declare type LogLevelType = 1 | 2 | 3;
|
||||
|
||||
export { }
|
||||
21
scripts/node_modules/@algolia/logger-common/dist/logger-common.esm.js
generated
vendored
Normal file
21
scripts/node_modules/@algolia/logger-common/dist/logger-common.esm.js
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
function createNullLogger() {
|
||||
return {
|
||||
debug(_message, _args) {
|
||||
return Promise.resolve();
|
||||
},
|
||||
info(_message, _args) {
|
||||
return Promise.resolve();
|
||||
},
|
||||
error(_message, _args) {
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const LogLevelEnum = {
|
||||
Debug: 1,
|
||||
Info: 2,
|
||||
Error: 3,
|
||||
};
|
||||
|
||||
export { LogLevelEnum, createNullLogger };
|
||||
2
scripts/node_modules/@algolia/logger-common/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/logger-common/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/logger-common.cjs.js');
|
||||
19
scripts/node_modules/@algolia/logger-common/package.json
generated
vendored
Normal file
19
scripts/node_modules/@algolia/logger-common/package.json
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "@algolia/logger-common",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"description": "Common interfaces for promise-based log libraries",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-js.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/logger-common.esm.js",
|
||||
"types": "dist/logger-common.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
]
|
||||
}
|
||||
29
scripts/node_modules/@algolia/logger-console/dist/logger-console.cjs.js
generated
vendored
Normal file
29
scripts/node_modules/@algolia/logger-console/dist/logger-console.cjs.js
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var loggerCommon = require('@algolia/logger-common');
|
||||
|
||||
/* eslint no-console: 0 */
|
||||
function createConsoleLogger(logLevel) {
|
||||
return {
|
||||
debug(message, args) {
|
||||
if (loggerCommon.LogLevelEnum.Debug >= logLevel) {
|
||||
console.debug(message, args);
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
info(message, args) {
|
||||
if (loggerCommon.LogLevelEnum.Info >= logLevel) {
|
||||
console.info(message, args);
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
error(message, args) {
|
||||
console.error(message, args);
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
exports.createConsoleLogger = createConsoleLogger;
|
||||
6
scripts/node_modules/@algolia/logger-console/dist/logger-console.d.ts
generated
vendored
Normal file
6
scripts/node_modules/@algolia/logger-console/dist/logger-console.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import { Logger } from '@algolia/logger-common';
|
||||
import { LogLevelType } from '@algolia/logger-common';
|
||||
|
||||
export declare function createConsoleLogger(logLevel: LogLevelType): Logger;
|
||||
|
||||
export { }
|
||||
25
scripts/node_modules/@algolia/logger-console/dist/logger-console.esm.js
generated
vendored
Normal file
25
scripts/node_modules/@algolia/logger-console/dist/logger-console.esm.js
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
import { LogLevelEnum } from '@algolia/logger-common';
|
||||
|
||||
/* eslint no-console: 0 */
|
||||
function createConsoleLogger(logLevel) {
|
||||
return {
|
||||
debug(message, args) {
|
||||
if (LogLevelEnum.Debug >= logLevel) {
|
||||
console.debug(message, args);
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
info(message, args) {
|
||||
if (LogLevelEnum.Info >= logLevel) {
|
||||
console.info(message, args);
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
error(message, args) {
|
||||
console.error(message, args);
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export { createConsoleLogger };
|
||||
2
scripts/node_modules/@algolia/logger-console/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/logger-console/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/logger-console.cjs.js');
|
||||
22
scripts/node_modules/@algolia/logger-console/package.json
generated
vendored
Normal file
22
scripts/node_modules/@algolia/logger-console/package.json
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "@algolia/logger-console",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"description": "Promise-based log library using console log.",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-javascript.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/logger-console.esm.js",
|
||||
"types": "dist/logger-console.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@algolia/logger-common": "4.24.0"
|
||||
}
|
||||
}
|
||||
76
scripts/node_modules/@algolia/recommend/README.md
generated
vendored
Normal file
76
scripts/node_modules/@algolia/recommend/README.md
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
||||
<p align="center">
|
||||
<h1>Algolia Recommend</h1>
|
||||
|
||||
<h4>The perfect starting point to integrate <a href="https://www.algolia.com/products/recommendations" target="_blank">Algolia Recommend</a> within your JavaScript project</h4>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://npmjs.org/package/@algolia/recommend"><img src="https://img.shields.io/npm/v/@algolia/recommend.svg?style=flat-square" alt="NPM version"></img></a>
|
||||
<a href="LICENSE.md"><img src="https://img.shields.io/badge/license-MIT-green.svg?style=flat-square" alt="License"></a>
|
||||
</p>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://www.algolia.com/doc/api-client/methods/recommend/" target="_blank">Documentation</a> •
|
||||
<a href="https://www.algolia.com/doc/ui-libraries/recommend/introduction/what-is-recommend/" target="_blank">UI library</a> •
|
||||
<a href="https://discourse.algolia.com" target="_blank">Community Forum</a> •
|
||||
<a href="http://stackoverflow.com/questions/tagged/algolia" target="_blank">Stack Overflow</a> •
|
||||
<a href="https://github.com/algolia/algoliasearch-client-javascript/issues" target="_blank">Report a bug</a> •
|
||||
<a href="https://www.algolia.com/support" target="_blank">Support</a>
|
||||
</p>
|
||||
|
||||
## ✨ Features
|
||||
|
||||
- Thin & **minimal low-level HTTP client** to interact with Algolia's Recommend API
|
||||
- Works both on the **browser** and **node.js**
|
||||
- **UMD compatible**, you can use it with any module loader
|
||||
- Built with TypeScript
|
||||
|
||||
## 💡 Getting Started
|
||||
|
||||
First, install Algolia Recommend API Client via the [npm](https://www.npmjs.com/get-npm) package manager:
|
||||
|
||||
```bash
|
||||
npm install @algolia/recommend
|
||||
```
|
||||
|
||||
Then, let's retrieve recommendations:
|
||||
|
||||
```js
|
||||
const algoliarecommend = require('@algolia/recommend');
|
||||
|
||||
const client = algoliarecommend('YourApplicationID', 'YourAdminAPIKey');
|
||||
|
||||
client
|
||||
.getFrequentlyBoughtTogether([
|
||||
{
|
||||
indexName: 'your_index_name',
|
||||
objectID: 'your_object_id',
|
||||
},
|
||||
])
|
||||
.then(({ results }) => {
|
||||
console.log(results);
|
||||
})
|
||||
.catch(err => {
|
||||
console.log(err);
|
||||
});
|
||||
|
||||
client
|
||||
.getRelatedProducts([
|
||||
{
|
||||
indexName: 'your_index_name',
|
||||
objectID: 'your_object_id',
|
||||
},
|
||||
])
|
||||
.then(({ results }) => {
|
||||
console.log(results);
|
||||
})
|
||||
.catch(err => {
|
||||
console.log(err);
|
||||
});
|
||||
```
|
||||
|
||||
For full documentation, visit the **[online documentation](https://www.algolia.com/doc/api-client/methods/recommend/)**.
|
||||
|
||||
## 📄 License
|
||||
|
||||
Algolia Recommend API Client is an open-sourced software licensed under the [MIT license](LICENSE.md).
|
||||
201
scripts/node_modules/@algolia/recommend/dist/recommend.cjs.js
generated
vendored
Normal file
201
scripts/node_modules/@algolia/recommend/dist/recommend.cjs.js
generated
vendored
Normal file
@ -0,0 +1,201 @@
|
||||
'use strict';
|
||||
|
||||
var cacheCommon = require('@algolia/cache-common');
|
||||
var cacheInMemory = require('@algolia/cache-in-memory');
|
||||
var clientCommon = require('@algolia/client-common');
|
||||
var loggerCommon = require('@algolia/logger-common');
|
||||
var requesterNodeHttp = require('@algolia/requester-node-http');
|
||||
var transporter = require('@algolia/transporter');
|
||||
var requesterCommon = require('@algolia/requester-common');
|
||||
|
||||
const createRecommendClient = options => {
|
||||
const appId = options.appId;
|
||||
const auth = clientCommon.createAuth(options.authMode !== undefined ? options.authMode : clientCommon.AuthMode.WithinHeaders, appId, options.apiKey);
|
||||
const transporter$1 = transporter.createTransporter({
|
||||
hosts: [
|
||||
{ url: `${appId}-dsn.algolia.net`, accept: transporter.CallEnum.Read },
|
||||
{ url: `${appId}.algolia.net`, accept: transporter.CallEnum.Write },
|
||||
].concat(clientCommon.shuffle([
|
||||
{ url: `${appId}-1.algolianet.com` },
|
||||
{ url: `${appId}-2.algolianet.com` },
|
||||
{ url: `${appId}-3.algolianet.com` },
|
||||
])),
|
||||
...options,
|
||||
headers: {
|
||||
...auth.headers(),
|
||||
...{ 'content-type': 'application/x-www-form-urlencoded' },
|
||||
...options.headers,
|
||||
},
|
||||
queryParameters: {
|
||||
...auth.queryParameters(),
|
||||
...options.queryParameters,
|
||||
},
|
||||
});
|
||||
const base = {
|
||||
transporter: transporter$1,
|
||||
appId,
|
||||
addAlgoliaAgent(segment, version) {
|
||||
transporter$1.userAgent.add({ segment, version });
|
||||
},
|
||||
clearCache() {
|
||||
return Promise.all([
|
||||
transporter$1.requestsCache.clear(),
|
||||
transporter$1.responsesCache.clear(),
|
||||
]).then(() => undefined);
|
||||
},
|
||||
};
|
||||
return clientCommon.addMethods(base, options.methods);
|
||||
};
|
||||
|
||||
const getRecommendations = base => {
|
||||
return (queries, requestOptions) => {
|
||||
const requests = queries.map(query => ({
|
||||
...query,
|
||||
// The `threshold` param is required by the endpoint to make it easier
|
||||
// to provide a default value later, so we default it in the client
|
||||
// so that users don't have to provide a value.
|
||||
threshold: query.threshold || 0,
|
||||
}));
|
||||
return base.transporter.read({
|
||||
method: requesterCommon.MethodEnum.Post,
|
||||
path: '1/indexes/*/recommendations',
|
||||
data: {
|
||||
requests,
|
||||
},
|
||||
cacheable: true,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getFrequentlyBoughtTogether = base => {
|
||||
return (queries, requestOptions) => {
|
||||
return getRecommendations(base)(queries.map(query => ({
|
||||
...query,
|
||||
fallbackParameters: {},
|
||||
model: 'bought-together',
|
||||
})), requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getRelatedProducts = base => {
|
||||
return (queries, requestOptions) => {
|
||||
return getRecommendations(base)(queries.map(query => ({
|
||||
...query,
|
||||
model: 'related-products',
|
||||
})), requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getTrendingFacets = base => {
|
||||
return (queries, requestOptions) => {
|
||||
const requests = queries.map(query => ({
|
||||
...query,
|
||||
model: 'trending-facets',
|
||||
// The `threshold` param is required by the endpoint to make it easier
|
||||
// to provide a default value later, so we default it in the client
|
||||
// so that users don't have to provide a value.
|
||||
threshold: query.threshold || 0,
|
||||
}));
|
||||
return base.transporter.read({
|
||||
method: requesterCommon.MethodEnum.Post,
|
||||
path: '1/indexes/*/recommendations',
|
||||
data: {
|
||||
requests,
|
||||
},
|
||||
cacheable: true,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getTrendingItems = base => {
|
||||
return (queries, requestOptions) => {
|
||||
const requests = queries.map(query => ({
|
||||
...query,
|
||||
model: 'trending-items',
|
||||
// The `threshold` param is required by the endpoint to make it easier
|
||||
// to provide a default value later, so we default it in the client
|
||||
// so that users don't have to provide a value.
|
||||
threshold: query.threshold || 0,
|
||||
}));
|
||||
return base.transporter.read({
|
||||
method: requesterCommon.MethodEnum.Post,
|
||||
path: '1/indexes/*/recommendations',
|
||||
data: {
|
||||
requests,
|
||||
},
|
||||
cacheable: true,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getLookingSimilar = base => {
|
||||
return (queries, requestOptions) => {
|
||||
return getRecommendations(base)(queries.map(query => ({
|
||||
...query,
|
||||
model: 'looking-similar',
|
||||
})), requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getRecommendedForYou = base => {
|
||||
return (queries, requestOptions) => {
|
||||
const requests = queries.map(query => ({
|
||||
...query,
|
||||
model: 'recommended-for-you',
|
||||
threshold: query.threshold || 0,
|
||||
}));
|
||||
return base.transporter.read({
|
||||
method: requesterCommon.MethodEnum.Post,
|
||||
path: '1/indexes/*/recommendations',
|
||||
data: {
|
||||
requests,
|
||||
},
|
||||
cacheable: true,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
function recommend(appId, apiKey, options) {
|
||||
const commonOptions = {
|
||||
appId,
|
||||
apiKey,
|
||||
timeouts: {
|
||||
connect: 2,
|
||||
read: 5,
|
||||
write: 30,
|
||||
},
|
||||
requester: requesterNodeHttp.createNodeHttpRequester(),
|
||||
logger: loggerCommon.createNullLogger(),
|
||||
responsesCache: cacheCommon.createNullCache(),
|
||||
requestsCache: cacheCommon.createNullCache(),
|
||||
hostsCache: cacheInMemory.createInMemoryCache(),
|
||||
userAgent: transporter.createUserAgent(clientCommon.version)
|
||||
.add({ segment: 'Recommend', version: clientCommon.version })
|
||||
.add({ segment: 'Node.js', version: process.versions.node }),
|
||||
};
|
||||
return createRecommendClient({
|
||||
...commonOptions,
|
||||
...options,
|
||||
methods: {
|
||||
destroy: clientCommon.destroy,
|
||||
getFrequentlyBoughtTogether,
|
||||
getRecommendations,
|
||||
getRelatedProducts,
|
||||
getTrendingFacets,
|
||||
getTrendingItems,
|
||||
getLookingSimilar,
|
||||
getRecommendedForYou,
|
||||
},
|
||||
});
|
||||
}
|
||||
/* eslint-disable functional/immutable-data */
|
||||
recommend.version = clientCommon.version;
|
||||
recommend.getFrequentlyBoughtTogether = getFrequentlyBoughtTogether;
|
||||
recommend.getRecommendations = getRecommendations;
|
||||
recommend.getRelatedProducts = getRelatedProducts;
|
||||
recommend.getTrendingFacets = getTrendingFacets;
|
||||
recommend.getTrendingItems = getTrendingItems;
|
||||
recommend.getLookingSimilar = getLookingSimilar;
|
||||
recommend.getRecommendedForYou = getRecommendedForYou;
|
||||
|
||||
module.exports = recommend;
|
||||
587
scripts/node_modules/@algolia/recommend/dist/recommend.d.ts
generated
vendored
Normal file
587
scripts/node_modules/@algolia/recommend/dist/recommend.d.ts
generated
vendored
Normal file
@ -0,0 +1,587 @@
|
||||
import { AuthModeType } from '@algolia/client-common';
|
||||
import { ClientTransporterOptions } from '@algolia/client-common';
|
||||
import { Destroyable } from '@algolia/requester-common';
|
||||
import { RecommendSearchOptions as RecommendSearchOptions_2 } from '@algolia/recommend';
|
||||
import { RequestOptions } from '@algolia/transporter';
|
||||
import { SearchOptions } from '@algolia/client-search';
|
||||
import { SearchResponse } from '@algolia/client-search';
|
||||
import { Transporter } from '@algolia/transporter';
|
||||
|
||||
export declare type BaseRecommendClient = {
|
||||
/**
|
||||
* The application id.
|
||||
*/
|
||||
readonly appId: string;
|
||||
/**
|
||||
* The underlying transporter.
|
||||
*/
|
||||
readonly transporter: Transporter;
|
||||
/**
|
||||
* Mutates the transporter, adding the given user agent.
|
||||
*/
|
||||
readonly addAlgoliaAgent: (segment: string, version?: string) => void;
|
||||
/**
|
||||
* Clears both requests and responses caches.
|
||||
*/
|
||||
readonly clearCache: () => Readonly<Promise<void>>;
|
||||
};
|
||||
|
||||
export declare type FrequentlyBoughtTogetherQuery = Omit<RecommendationsQuery, 'model' | 'fallbackParameters'>;
|
||||
|
||||
export declare type LookingSimilarQuery = Omit<RecommendationsQuery, 'model'>;
|
||||
|
||||
declare function recommend(appId: string, apiKey: string, options?: RecommendOptions): RecommendClient;
|
||||
|
||||
declare namespace recommend {
|
||||
var version: string;
|
||||
var getFrequentlyBoughtTogether: (base: BaseRecommendClient) => <TObject>(queries: readonly Pick<RecommendationsQuery, "queryParameters" | "objectID" | "indexName" | "threshold" | "maxRecommendations">[], requestOptions?: (RequestOptions_2 & SearchOptions_2) | undefined) => Readonly<Promise<RecommendQueriesResponse<TObject>>>;
|
||||
var getRecommendations: (base: BaseRecommendClient) => <TObject>(queries: readonly (RecommendationsQuery | (TrendingItemsQuery & {
|
||||
readonly model: TrendingModel;
|
||||
}) | (TrendingFacetsQuery & {
|
||||
readonly model: TrendingModel;
|
||||
}) | RecommendedForYouQuery)[], requestOptions?: (RequestOptions_2 & SearchOptions_2) | undefined) => Readonly<Promise<RecommendQueriesResponse<TObject>>>;
|
||||
var getRelatedProducts: (base: BaseRecommendClient) => <TObject>(queries: readonly Pick<RecommendationsQuery, "queryParameters" | "objectID" | "indexName" | "threshold" | "maxRecommendations" | "fallbackParameters">[], requestOptions?: (RequestOptions_2 & SearchOptions_2) | undefined) => Readonly<Promise<RecommendQueriesResponse<TObject>>>;
|
||||
var getTrendingFacets: (base: BaseRecommendClient) => <TObject>(queries: readonly TrendingFacetsQuery[], requestOptions?: (RequestOptions_2 & SearchOptions_2) | undefined) => Readonly<Promise<RecommendTrendingFacetsQueriesResponse>>;
|
||||
var getTrendingItems: (base: BaseRecommendClient) => <TObject>(queries: readonly TrendingItemsQuery[], requestOptions?: (RequestOptions_2 & SearchOptions_2) | undefined) => Readonly<Promise<RecommendQueriesResponse<TObject>>>;
|
||||
var getLookingSimilar: (base: BaseRecommendClient) => <TObject>(queries: readonly Pick<RecommendationsQuery, "queryParameters" | "objectID" | "indexName" | "threshold" | "maxRecommendations" | "fallbackParameters">[], requestOptions?: (RequestOptions_2 & SearchOptions_2) | undefined) => Readonly<Promise<RecommendQueriesResponse<TObject>>>;
|
||||
var getRecommendedForYou: (base: BaseRecommendClient) => <TObject>(queries: readonly Pick<RecommendedForYouQuery, "queryParameters" | "indexName" | "threshold" | "maxRecommendations" | "fallbackParameters">[], requestOptions?: (RequestOptions_2 & SearchOptions_2) | undefined) => Readonly<Promise<RecommendQueriesResponse<TObject>>>;
|
||||
}
|
||||
export default recommend;
|
||||
|
||||
export declare type RecommendationsQuery = {
|
||||
/**
|
||||
* The name of the target index.
|
||||
*/
|
||||
readonly indexName: string;
|
||||
/**
|
||||
* The name of the Recommendation model to use.
|
||||
*/
|
||||
readonly model: 'related-products' | 'bought-together' | 'looking-similar';
|
||||
/**
|
||||
* The `objectID` of the item to get recommendations for.
|
||||
*/
|
||||
readonly objectID: string;
|
||||
/**
|
||||
* Threshold for the recommendations confidence score (between 0 and 100). Only recommendations with a greater score are returned.
|
||||
*/
|
||||
readonly threshold?: number;
|
||||
/**
|
||||
* How many recommendations to retrieve.
|
||||
*/
|
||||
readonly maxRecommendations?: number;
|
||||
/**
|
||||
* List of [search parameters](https://www.algolia.com/doc/api-reference/search-api-parameters/) to send.
|
||||
*/
|
||||
readonly queryParameters?: RecommendSearchOptions;
|
||||
/**
|
||||
* List of [search parameters](https://www.algolia.com/doc/api-reference/search-api-parameters/) to send.
|
||||
*
|
||||
* Additional filters to use as fallback when there aren’t enough recommendations.
|
||||
*/
|
||||
readonly fallbackParameters?: RecommendSearchOptions;
|
||||
};
|
||||
|
||||
export declare type RecommendClient = WithRecommendMethods<BaseRecommendClient> & Destroyable;
|
||||
|
||||
export declare type RecommendClientOptions = {
|
||||
/**
|
||||
* The application id.
|
||||
*/
|
||||
readonly appId: string;
|
||||
/**
|
||||
* The api key.
|
||||
*/
|
||||
readonly apiKey: string;
|
||||
/**
|
||||
* The auth mode type. In browser environments credentials may
|
||||
* be passed within the headers.
|
||||
*/
|
||||
readonly authMode?: AuthModeType;
|
||||
};
|
||||
|
||||
/**
|
||||
* The parameters used for `getRecommendedForYou` method.
|
||||
*/
|
||||
export declare type RecommendedForYouParams = Omit<RecommendedForYouQuery, 'model'>;
|
||||
|
||||
export declare type RecommendedForYouQuery = Omit<RecommendationsQuery, 'model' | 'objectID' | 'queryParameters'> & {
|
||||
readonly model: 'recommended-for-you';
|
||||
/**
|
||||
* List of [search parameters](https://www.algolia.com/doc/api-reference/search-api-parameters/) to send.
|
||||
*/
|
||||
readonly queryParameters: Omit<RecommendSearchOptions, 'userToken'> & {
|
||||
/**
|
||||
* A user identifier.
|
||||
* Format: alpha numeric string [a-zA-Z0-9_-]
|
||||
* Length: between 1 and 64 characters.
|
||||
*/
|
||||
readonly userToken: string;
|
||||
};
|
||||
};
|
||||
|
||||
export declare type RecommendModel = 'related-products' | 'bought-together' | 'looking-similar' | 'recommended-for-you' | TrendingModel;
|
||||
|
||||
export declare type RecommendOptions = Partial<ClientTransporterOptions>;
|
||||
|
||||
export declare type RecommendQueriesResponse<TObject> = {
|
||||
/**
|
||||
* The list of results.
|
||||
*/
|
||||
readonly results: ReadonlyArray<SearchResponse<TObject>>;
|
||||
};
|
||||
|
||||
export declare type RecommendSearchOptions = Omit<SearchOptions, 'page' | 'hitsPerPage' | 'offset' | 'length'>;
|
||||
|
||||
export declare type RecommendTrendingFacetsQueriesResponse = {
|
||||
/**
|
||||
* The list of results.
|
||||
*/
|
||||
readonly results: readonly TrendingFacetsResponse[];
|
||||
};
|
||||
|
||||
export declare type RelatedProductsQuery = Omit<RecommendationsQuery, 'model'>;
|
||||
|
||||
declare type RequestOptions_2 = {
|
||||
/**
|
||||
* If the given request should persist on the cache. Keep in mind,
|
||||
* that some methods may have this option enabled by default.
|
||||
*/
|
||||
readonly cacheable?: boolean;
|
||||
/**
|
||||
* Custom timeout for the request. Note that, in normal situacions
|
||||
* the given timeout will be applied. But the transporter layer may
|
||||
* increase this timeout if there is need for it.
|
||||
*/
|
||||
readonly timeout?: number;
|
||||
/**
|
||||
* Custom headers for the request. This headers are
|
||||
* going to be merged the transporter headers.
|
||||
*/
|
||||
readonly headers?: Readonly<Record<string, string>>;
|
||||
/**
|
||||
* Custom query parameters for the request. This query parameters are
|
||||
* going to be merged the transporter query parameters.
|
||||
*/
|
||||
readonly queryParameters?: Record<string, any>;
|
||||
/**
|
||||
* Custom data for the request. This data are
|
||||
* going to be merged the transporter data.
|
||||
*/
|
||||
readonly data?: Record<string, any>;
|
||||
/**
|
||||
* Additional request body values. It's only taken in
|
||||
* consideration in `POST` and `PUT` requests.
|
||||
*/
|
||||
[key: string]: any;
|
||||
};
|
||||
|
||||
declare type SearchOptions_2 = {
|
||||
/**
|
||||
* Create a new query with an empty search query.
|
||||
*/
|
||||
readonly query?: string;
|
||||
/**
|
||||
* Allows a search for similar objects, but the query has to be constructed on your end and included alongside an empty query.
|
||||
*
|
||||
* The similarQuery should be made from the tags and keywords of the relevant object.
|
||||
*/
|
||||
readonly similarQuery?: string;
|
||||
/**
|
||||
* Filter hits by facet value.
|
||||
*/
|
||||
readonly facetFilters?: string | readonly string[] | ReadonlyArray<readonly string[] | string>;
|
||||
/**
|
||||
* Create filters for ranking purposes, where records that match the filter are ranked highest.
|
||||
*/
|
||||
readonly optionalFilters?: string | readonly string[] | ReadonlyArray<readonly string[] | string>;
|
||||
/**
|
||||
* Filter on numeric attributes.
|
||||
*/
|
||||
readonly numericFilters?: string | readonly string[] | ReadonlyArray<readonly string[] | string>;
|
||||
/**
|
||||
* Filter hits by tags. tagFilters is a different way of filtering, which relies on the _tags
|
||||
* attribute. It uses a simpler syntax than filters. You can use it when you want to do
|
||||
* simple filtering based on tags.
|
||||
*/
|
||||
readonly tagFilters?: string | readonly string[] | ReadonlyArray<readonly string[] | string>;
|
||||
/**
|
||||
* Determines how to calculate the total score for filtering.
|
||||
*/
|
||||
readonly sumOrFiltersScores?: boolean;
|
||||
/**
|
||||
* Filter the query with numeric, facet and/or tag filters.
|
||||
*/
|
||||
readonly filters?: string;
|
||||
/**
|
||||
* Specify the page to retrieve.
|
||||
*/
|
||||
readonly page?: number;
|
||||
/**
|
||||
* Set the number of hits per page.
|
||||
*/
|
||||
readonly hitsPerPage?: number;
|
||||
/**
|
||||
* Specify the offset of the first hit to return.
|
||||
*/
|
||||
readonly offset?: number;
|
||||
/**
|
||||
* Set the number of hits to retrieve (used only with offset).
|
||||
*/
|
||||
readonly length?: number;
|
||||
/**
|
||||
* List of attributes to highlight.
|
||||
*/
|
||||
readonly attributesToHighlight?: readonly string[];
|
||||
/**
|
||||
* List of attributes to snippet, with an optional maximum number of words to snippet.
|
||||
*/
|
||||
readonly attributesToSnippet?: readonly string[];
|
||||
/**
|
||||
* Gives control over which attributes to retrieve and which not to retrieve.
|
||||
*/
|
||||
readonly attributesToRetrieve?: readonly string[];
|
||||
/**
|
||||
* The HTML string to insert before the highlighted parts in all highlight and snippet results.
|
||||
*/
|
||||
readonly highlightPreTag?: string;
|
||||
/**
|
||||
* The HTML string to insert after the highlighted parts in all highlight and snippet results
|
||||
*/
|
||||
readonly highlightPostTag?: string;
|
||||
/**
|
||||
* String used as an ellipsis indicator when a snippet is truncated.
|
||||
*/
|
||||
readonly snippetEllipsisText?: string;
|
||||
/**
|
||||
* Restrict highlighting and snippeting to items that matched the query.
|
||||
*/
|
||||
readonly restrictHighlightAndSnippetArrays?: boolean;
|
||||
/**
|
||||
* Facets to retrieve.
|
||||
*/
|
||||
readonly facets?: readonly string[];
|
||||
/**
|
||||
* Maximum number of facet values to return for each facet during a regular search.
|
||||
*/
|
||||
readonly maxValuesPerFacet?: number;
|
||||
/**
|
||||
* Force faceting to be applied after de-duplication (via the Distinct setting).
|
||||
*/
|
||||
readonly facetingAfterDistinct?: boolean;
|
||||
/**
|
||||
* Minimum number of characters a word in the query string must contain to accept matches with 1 typo
|
||||
*/
|
||||
readonly minWordSizefor1Typo?: number;
|
||||
/**
|
||||
* Minimum number of characters a word in the query string must contain to accept matches with 2 typos.
|
||||
*/
|
||||
readonly minWordSizefor2Typos?: number;
|
||||
/**
|
||||
* Whether to allow typos on numbers (“numeric tokens”) in the query string.
|
||||
*/
|
||||
readonly allowTyposOnNumericTokens?: boolean;
|
||||
/**
|
||||
* List of attributes on which you want to disable typo tolerance.
|
||||
*/
|
||||
readonly disableTypoToleranceOnAttributes?: readonly string[];
|
||||
/**
|
||||
* Controls if and how query words are interpreted as prefixes.
|
||||
*/
|
||||
readonly queryType?: 'prefixLast' | 'prefixAll' | 'prefixNone';
|
||||
/**
|
||||
* Selects a strategy to remove words from the query when it doesn’t match any hits.
|
||||
*/
|
||||
readonly removeWordsIfNoResults?: 'none' | 'lastWords' | 'firstWords' | 'allOptional';
|
||||
/**
|
||||
* Enables the advanced query syntax.
|
||||
*/
|
||||
readonly advancedSyntax?: boolean;
|
||||
/**
|
||||
* AdvancedSyntaxFeatures can be exactPhrase or excludeWords
|
||||
*/
|
||||
readonly advancedSyntaxFeatures?: ReadonlyArray<'exactPhrase' | 'excludeWords'>;
|
||||
/**
|
||||
* A list of words that should be considered as optional when found in the query.
|
||||
*/
|
||||
readonly optionalWords?: string | readonly string[];
|
||||
/**
|
||||
* List of attributes on which you want to disable the exact ranking criterion.
|
||||
*/
|
||||
readonly disableExactOnAttributes?: readonly string[];
|
||||
/**
|
||||
* Controls how the exact ranking criterion is computed when the query contains only one word.
|
||||
*/
|
||||
readonly exactOnSingleWordQuery?: 'attribute' | 'none' | 'word';
|
||||
/**
|
||||
* List of alternatives that should be considered an exact match by the exact ranking criterion.
|
||||
*/
|
||||
readonly alternativesAsExact?: ReadonlyArray<'ignorePlurals' | 'singleWordSynonym' | 'multiWordsSynonym'>;
|
||||
/**
|
||||
* Whether rules should be globally enabled.
|
||||
*/
|
||||
readonly enableRules?: boolean;
|
||||
/**
|
||||
* Enables contextual rules.
|
||||
*/
|
||||
readonly ruleContexts?: readonly string[];
|
||||
/**
|
||||
* Enables de-duplication or grouping of results.
|
||||
*/
|
||||
readonly distinct?: boolean | number;
|
||||
/**
|
||||
* Whether the current query will be taken into account in the Analytics
|
||||
*/
|
||||
readonly analytics?: boolean;
|
||||
/**
|
||||
* List of tags to apply to the query in the analytics.
|
||||
*/
|
||||
readonly analyticsTags?: readonly string[];
|
||||
/**
|
||||
* Whether to take into account an index’s synonyms for a particular search.
|
||||
*/
|
||||
readonly synonyms?: boolean;
|
||||
/**
|
||||
* Whether to highlight and snippet the original word that matches the synonym or the synonym itself.
|
||||
*/
|
||||
readonly replaceSynonymsInHighlight?: boolean;
|
||||
/**
|
||||
* Precision of the proximity ranking criterion.
|
||||
*/
|
||||
readonly minProximity?: number;
|
||||
/**
|
||||
* Choose which fields the response will contain. Applies to search and browse queries.
|
||||
*/
|
||||
readonly responseFields?: readonly string[];
|
||||
/**
|
||||
* Maximum number of facet hits to return during a search for facet values.
|
||||
*/
|
||||
readonly maxFacetHits?: number;
|
||||
/**
|
||||
* Whether to include or exclude a query from the processing-time percentile computation.
|
||||
*/
|
||||
readonly percentileComputation?: boolean;
|
||||
/**
|
||||
* Enable the Click Analytics feature.
|
||||
*/
|
||||
readonly clickAnalytics?: boolean;
|
||||
/**
|
||||
* The `personalizationImpact` parameter sets the percentage of the impact that personalization has on ranking records. The
|
||||
* value must be between 0 and 100 (inclusive). This parameter will not be taken into account if `enablePersonalization`
|
||||
* is **false**.
|
||||
*/
|
||||
readonly personalizationImpact?: number;
|
||||
/**
|
||||
* Enable personalization for the query
|
||||
*/
|
||||
readonly enablePersonalization?: boolean;
|
||||
/**
|
||||
* Restricts a given query to look in only a subset of your searchable attributes.
|
||||
*/
|
||||
readonly restrictSearchableAttributes?: readonly string[];
|
||||
/**
|
||||
* Controls how facet values are sorted.
|
||||
*/
|
||||
readonly sortFacetValuesBy?: 'count' | 'alpha';
|
||||
/**
|
||||
* Controls whether typo tolerance is enabled and how it is applied.
|
||||
*/
|
||||
readonly typoTolerance?: boolean | 'min' | 'strict';
|
||||
/**
|
||||
* Search for entries around a central geolocation, enabling a geo search within a circular area.
|
||||
*/
|
||||
readonly aroundLatLng?: string;
|
||||
/**
|
||||
* Search for entries around a given location automatically computed from the requester’s IP address.
|
||||
*/
|
||||
readonly aroundLatLngViaIP?: boolean;
|
||||
/**
|
||||
* Define the maximum radius for a geo search (in meters).
|
||||
*/
|
||||
readonly aroundRadius?: number | 'all';
|
||||
/**
|
||||
* Precision of geo search (in meters), to add grouping by geo location to the ranking formula.
|
||||
*/
|
||||
readonly aroundPrecision?: number | ReadonlyArray<{
|
||||
readonly from: number;
|
||||
readonly value: number;
|
||||
}>;
|
||||
/**
|
||||
* Minimum radius (in meters) used for a geo search when aroundRadius is not set.
|
||||
*/
|
||||
readonly minimumAroundRadius?: number;
|
||||
/**
|
||||
* Search inside a rectangular area (in geo coordinates).
|
||||
*/
|
||||
readonly insideBoundingBox?: ReadonlyArray<readonly number[]> | string;
|
||||
/**
|
||||
* Search inside a polygon (in geo coordinates).
|
||||
*/
|
||||
readonly insidePolygon?: ReadonlyArray<readonly number[]>;
|
||||
/**
|
||||
* Treats singular, plurals, and other forms of declensions as matching terms.
|
||||
*/
|
||||
readonly ignorePlurals?: boolean | readonly string[];
|
||||
/**
|
||||
* Removes stop (common) words from the query before executing it.
|
||||
*/
|
||||
readonly removeStopWords?: boolean | readonly string[];
|
||||
/**
|
||||
* List of supported languages with their associated language ISO code.
|
||||
*
|
||||
* Apply a set of natural language best practices such as ignorePlurals,
|
||||
* removeStopWords, removeWordsIfNoResults, analyticsTags and ruleContexts.
|
||||
*/
|
||||
readonly naturalLanguages?: readonly string[];
|
||||
/**
|
||||
* When true, each hit in the response contains an additional _rankingInfo object.
|
||||
*/
|
||||
readonly getRankingInfo?: boolean;
|
||||
/**
|
||||
* A user identifier.
|
||||
* Format: alpha numeric string [a-zA-Z0-9_-]
|
||||
* Length: between 1 and 64 characters.
|
||||
*/
|
||||
readonly userToken?: string;
|
||||
/**
|
||||
* Can be to enable or disable A/B tests at query time.
|
||||
* Engine's default: true
|
||||
*/
|
||||
readonly enableABTest?: boolean;
|
||||
/**
|
||||
* Enable word segmentation (also called decompounding) at query time for
|
||||
* compatible languages. For example, this turns the Dutch query
|
||||
* "spaanplaatbehang" into "spaan plaat behang" to retrieve more relevant
|
||||
* results.
|
||||
*/
|
||||
readonly decompoundQuery?: boolean;
|
||||
/**
|
||||
* The relevancy threshold to apply to search in a virtual index [0-100]. A Bigger
|
||||
* value means fewer, but more relevant results, smaller value means more, but
|
||||
* less relevant results.
|
||||
*/
|
||||
readonly relevancyStrictness?: number;
|
||||
/**
|
||||
* Whether this search should use Dynamic Re-Ranking.
|
||||
* @link https://www.algolia.com/doc/guides/algolia-ai/re-ranking/
|
||||
*
|
||||
* Note: You need to turn on Dynamic Re-Ranking on your index for it to have an effect on
|
||||
* your search results. You can do this through the Re-Ranking page on the dashboard.
|
||||
* This parameter is only used to turn off Dynamic Re-Ranking (with false) at search time.
|
||||
*/
|
||||
readonly enableReRanking?: boolean;
|
||||
/**
|
||||
* When Dynamic Re-Ranking is enabled, only records that match these filters will be impacted by Dynamic Re-Ranking.
|
||||
*/
|
||||
readonly reRankingApplyFilter?: string | readonly string[] | ReadonlyArray<readonly string[] | string> | null;
|
||||
/**
|
||||
* Sets the languages to be used by language-specific settings and functionalities such as ignorePlurals, removeStopWords, and CJK word-detection.
|
||||
*/
|
||||
readonly queryLanguages?: readonly string[];
|
||||
/**
|
||||
* Enriches the API’s response with meta-information as to how the query was processed.
|
||||
*/
|
||||
readonly explain?: readonly string[];
|
||||
};
|
||||
|
||||
export declare type TrendingFacetHit = {
|
||||
readonly _score: number;
|
||||
readonly facetName: string;
|
||||
readonly facetValue: string;
|
||||
};
|
||||
|
||||
export declare type TrendingFacetsQuery = {
|
||||
/**
|
||||
* The name of the target index.
|
||||
*/
|
||||
readonly indexName: string;
|
||||
/**
|
||||
* Threshold for the recommendations confidence score (between 0 and 100). Only recommendations with a greater score are returned.
|
||||
*/
|
||||
readonly threshold?: number;
|
||||
/**
|
||||
* How many recommendations to retrieve.
|
||||
*/
|
||||
readonly maxRecommendations?: number;
|
||||
/**
|
||||
* The facet attribute to get recommendations for.
|
||||
*/
|
||||
readonly facetName: string;
|
||||
};
|
||||
|
||||
export declare type TrendingFacetsResponse = Omit<SearchResponse, 'hits'> & {
|
||||
readonly hits: readonly TrendingFacetHit[];
|
||||
};
|
||||
|
||||
export declare type TrendingItemsQuery = {
|
||||
/**
|
||||
* The name of the target index.
|
||||
*/
|
||||
readonly indexName: string;
|
||||
/**
|
||||
* Threshold for the recommendations confidence score (between 0 and 100). Only recommendations with a greater score are returned.
|
||||
*/
|
||||
readonly threshold?: number;
|
||||
/**
|
||||
* How many recommendations to retrieve.
|
||||
*/
|
||||
readonly maxRecommendations?: number;
|
||||
/**
|
||||
* List of [search parameters](https://www.algolia.com/doc/api-reference/search-api-parameters/) to send.
|
||||
*/
|
||||
readonly queryParameters?: RecommendSearchOptions_2;
|
||||
/**
|
||||
* List of [search parameters](https://www.algolia.com/doc/api-reference/search-api-parameters/) to send.
|
||||
*
|
||||
* Additional filters to use as fallback when there aren’t enough recommendations.
|
||||
*/
|
||||
readonly fallbackParameters?: RecommendSearchOptions_2;
|
||||
/**
|
||||
* The facet attribute to get recommendations for.
|
||||
*/
|
||||
readonly facetName?: string;
|
||||
/**
|
||||
* The value of the target facet.
|
||||
*/
|
||||
readonly facetValue?: string;
|
||||
};
|
||||
|
||||
export declare type TrendingModel = 'trending-items' | 'trending-facets';
|
||||
|
||||
export declare type TrendingQuery = (TrendingItemsQuery & {
|
||||
readonly model: TrendingModel;
|
||||
}) | (TrendingFacetsQuery & {
|
||||
readonly model: TrendingModel;
|
||||
});
|
||||
|
||||
export declare type WithRecommendMethods<TType> = TType & {
|
||||
/**
|
||||
* Returns recommendations.
|
||||
*/
|
||||
readonly getRecommendations: <TObject>(queries: ReadonlyArray<RecommendationsQuery | TrendingQuery | RecommendedForYouQuery>, requestOptions?: RequestOptions & SearchOptions) => Readonly<Promise<RecommendQueriesResponse<TObject>>>;
|
||||
/**
|
||||
* Returns [Related Products](https://algolia.com/doc/guides/algolia-ai/recommend/#related-products).
|
||||
*/
|
||||
readonly getRelatedProducts: <TObject>(queries: readonly RelatedProductsQuery[], requestOptions?: RequestOptions & SearchOptions) => Readonly<Promise<RecommendQueriesResponse<TObject>>>;
|
||||
/**
|
||||
* Returns [Frequently Bought Together](https://algolia.com/doc/guides/algolia-ai/recommend/#frequently-bought-together) products.
|
||||
*/
|
||||
readonly getFrequentlyBoughtTogether: <TObject>(queries: readonly FrequentlyBoughtTogetherQuery[], requestOptions?: RequestOptions & SearchOptions) => Readonly<Promise<RecommendQueriesResponse<TObject>>>;
|
||||
/**
|
||||
* Returns trending items
|
||||
*/
|
||||
readonly getTrendingItems: <TObject>(queries: readonly TrendingItemsQuery[], requestOptions?: RequestOptions & SearchOptions) => Readonly<Promise<RecommendQueriesResponse<TObject>>>;
|
||||
/**
|
||||
* Returns trending items per facet
|
||||
*/
|
||||
readonly getTrendingFacets: <TObject>(queries: readonly TrendingFacetsQuery[], requestOptions?: RequestOptions & SearchOptions) => Readonly<Promise<RecommendTrendingFacetsQueriesResponse>>;
|
||||
/**
|
||||
* Returns Looking Similar
|
||||
*/
|
||||
readonly getLookingSimilar: <TObject>(queries: readonly LookingSimilarQuery[], requestOptions?: RequestOptions & SearchOptions) => Readonly<Promise<RecommendQueriesResponse<TObject>>>;
|
||||
/**
|
||||
* Returns Recommended for you
|
||||
*/
|
||||
readonly getRecommendedForYou: <TObject>(queries: readonly RecommendedForYouParams[], requestOptions?: RequestOptions & SearchOptions) => Readonly<Promise<RecommendQueriesResponse<TObject>>>;
|
||||
};
|
||||
|
||||
export { }
|
||||
958
scripts/node_modules/@algolia/recommend/dist/recommend.esm.browser.js
generated
vendored
Normal file
958
scripts/node_modules/@algolia/recommend/dist/recommend.esm.browser.js
generated
vendored
Normal file
@ -0,0 +1,958 @@
|
||||
function createBrowserLocalStorageCache(options) {
|
||||
const namespaceKey = `algoliasearch-client-js-${options.key}`;
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let storage;
|
||||
const getStorage = () => {
|
||||
if (storage === undefined) {
|
||||
storage = options.localStorage || window.localStorage;
|
||||
}
|
||||
return storage;
|
||||
};
|
||||
const getNamespace = () => {
|
||||
return JSON.parse(getStorage().getItem(namespaceKey) || '{}');
|
||||
};
|
||||
const setNamespace = (namespace) => {
|
||||
getStorage().setItem(namespaceKey, JSON.stringify(namespace));
|
||||
};
|
||||
const removeOutdatedCacheItems = () => {
|
||||
const timeToLive = options.timeToLive ? options.timeToLive * 1000 : null;
|
||||
const namespace = getNamespace();
|
||||
const filteredNamespaceWithoutOldFormattedCacheItems = Object.fromEntries(Object.entries(namespace).filter(([, cacheItem]) => {
|
||||
return cacheItem.timestamp !== undefined;
|
||||
}));
|
||||
setNamespace(filteredNamespaceWithoutOldFormattedCacheItems);
|
||||
if (!timeToLive)
|
||||
return;
|
||||
const filteredNamespaceWithoutExpiredItems = Object.fromEntries(Object.entries(filteredNamespaceWithoutOldFormattedCacheItems).filter(([, cacheItem]) => {
|
||||
const currentTimestamp = new Date().getTime();
|
||||
const isExpired = cacheItem.timestamp + timeToLive < currentTimestamp;
|
||||
return !isExpired;
|
||||
}));
|
||||
setNamespace(filteredNamespaceWithoutExpiredItems);
|
||||
};
|
||||
return {
|
||||
get(key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
removeOutdatedCacheItems();
|
||||
const keyAsString = JSON.stringify(key);
|
||||
return getNamespace()[keyAsString];
|
||||
})
|
||||
.then(value => {
|
||||
return Promise.all([value ? value.value : defaultValue(), value !== undefined]);
|
||||
})
|
||||
.then(([value, exists]) => {
|
||||
return Promise.all([value, exists || events.miss(value)]);
|
||||
})
|
||||
.then(([value]) => value);
|
||||
},
|
||||
set(key, value) {
|
||||
return Promise.resolve().then(() => {
|
||||
const namespace = getNamespace();
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
namespace[JSON.stringify(key)] = {
|
||||
timestamp: new Date().getTime(),
|
||||
value,
|
||||
};
|
||||
getStorage().setItem(namespaceKey, JSON.stringify(namespace));
|
||||
return value;
|
||||
});
|
||||
},
|
||||
delete(key) {
|
||||
return Promise.resolve().then(() => {
|
||||
const namespace = getNamespace();
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
delete namespace[JSON.stringify(key)];
|
||||
getStorage().setItem(namespaceKey, JSON.stringify(namespace));
|
||||
});
|
||||
},
|
||||
clear() {
|
||||
return Promise.resolve().then(() => {
|
||||
getStorage().removeItem(namespaceKey);
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// @todo Add logger on options to debug when caches go wrong.
|
||||
function createFallbackableCache(options) {
|
||||
const caches = [...options.caches];
|
||||
const current = caches.shift(); // eslint-disable-line functional/immutable-data
|
||||
if (current === undefined) {
|
||||
return createNullCache();
|
||||
}
|
||||
return {
|
||||
get(key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
return current.get(key, defaultValue, events).catch(() => {
|
||||
return createFallbackableCache({ caches }).get(key, defaultValue, events);
|
||||
});
|
||||
},
|
||||
set(key, value) {
|
||||
return current.set(key, value).catch(() => {
|
||||
return createFallbackableCache({ caches }).set(key, value);
|
||||
});
|
||||
},
|
||||
delete(key) {
|
||||
return current.delete(key).catch(() => {
|
||||
return createFallbackableCache({ caches }).delete(key);
|
||||
});
|
||||
},
|
||||
clear() {
|
||||
return current.clear().catch(() => {
|
||||
return createFallbackableCache({ caches }).clear();
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createNullCache() {
|
||||
return {
|
||||
get(_key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
const value = defaultValue();
|
||||
return value
|
||||
.then(result => Promise.all([result, events.miss(result)]))
|
||||
.then(([result]) => result);
|
||||
},
|
||||
set(_key, value) {
|
||||
return Promise.resolve(value);
|
||||
},
|
||||
delete(_key) {
|
||||
return Promise.resolve();
|
||||
},
|
||||
clear() {
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createInMemoryCache(options = { serializable: true }) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let cache = {};
|
||||
return {
|
||||
get(key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
const keyAsString = JSON.stringify(key);
|
||||
if (keyAsString in cache) {
|
||||
return Promise.resolve(options.serializable ? JSON.parse(cache[keyAsString]) : cache[keyAsString]);
|
||||
}
|
||||
const promise = defaultValue();
|
||||
const miss = (events && events.miss) || (() => Promise.resolve());
|
||||
return promise.then((value) => miss(value)).then(() => promise);
|
||||
},
|
||||
set(key, value) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
cache[JSON.stringify(key)] = options.serializable ? JSON.stringify(value) : value;
|
||||
return Promise.resolve(value);
|
||||
},
|
||||
delete(key) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
delete cache[JSON.stringify(key)];
|
||||
return Promise.resolve();
|
||||
},
|
||||
clear() {
|
||||
cache = {};
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createAuth(authMode, appId, apiKey) {
|
||||
const credentials = {
|
||||
'x-algolia-api-key': apiKey,
|
||||
'x-algolia-application-id': appId,
|
||||
};
|
||||
return {
|
||||
headers() {
|
||||
return authMode === AuthMode.WithinHeaders ? credentials : {};
|
||||
},
|
||||
queryParameters() {
|
||||
return authMode === AuthMode.WithinQueryParameters ? credentials : {};
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line functional/prefer-readonly-type
|
||||
function shuffle(array) {
|
||||
let c = array.length - 1; // eslint-disable-line functional/no-let
|
||||
// eslint-disable-next-line functional/no-loop-statement
|
||||
for (c; c > 0; c--) {
|
||||
const b = Math.floor(Math.random() * (c + 1));
|
||||
const a = array[c];
|
||||
array[c] = array[b]; // eslint-disable-line functional/immutable-data, no-param-reassign
|
||||
array[b] = a; // eslint-disable-line functional/immutable-data, no-param-reassign
|
||||
}
|
||||
return array;
|
||||
}
|
||||
function addMethods(base, methods) {
|
||||
if (!methods) {
|
||||
return base;
|
||||
}
|
||||
Object.keys(methods).forEach(key => {
|
||||
// eslint-disable-next-line functional/immutable-data, no-param-reassign
|
||||
base[key] = methods[key](base);
|
||||
});
|
||||
return base;
|
||||
}
|
||||
function encode(format, ...args) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let i = 0;
|
||||
return format.replace(/%s/g, () => encodeURIComponent(args[i++]));
|
||||
}
|
||||
|
||||
const version = '4.24.0';
|
||||
|
||||
const AuthMode = {
|
||||
/**
|
||||
* If auth credentials should be in query parameters.
|
||||
*/
|
||||
WithinQueryParameters: 0,
|
||||
/**
|
||||
* If auth credentials should be in headers.
|
||||
*/
|
||||
WithinHeaders: 1,
|
||||
};
|
||||
|
||||
const LogLevelEnum = {
|
||||
Debug: 1,
|
||||
Info: 2,
|
||||
Error: 3,
|
||||
};
|
||||
|
||||
/* eslint no-console: 0 */
|
||||
function createConsoleLogger(logLevel) {
|
||||
return {
|
||||
debug(message, args) {
|
||||
if (LogLevelEnum.Debug >= logLevel) {
|
||||
console.debug(message, args);
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
info(message, args) {
|
||||
if (LogLevelEnum.Info >= logLevel) {
|
||||
console.info(message, args);
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
error(message, args) {
|
||||
console.error(message, args);
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createBrowserXhrRequester() {
|
||||
return {
|
||||
send(request) {
|
||||
return new Promise((resolve) => {
|
||||
const baseRequester = new XMLHttpRequest();
|
||||
baseRequester.open(request.method, request.url, true);
|
||||
Object.keys(request.headers).forEach(key => baseRequester.setRequestHeader(key, request.headers[key]));
|
||||
const createTimeout = (timeout, content) => {
|
||||
return setTimeout(() => {
|
||||
baseRequester.abort();
|
||||
resolve({
|
||||
status: 0,
|
||||
content,
|
||||
isTimedOut: true,
|
||||
});
|
||||
}, timeout * 1000);
|
||||
};
|
||||
const connectTimeout = createTimeout(request.connectTimeout, 'Connection timeout');
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let responseTimeout;
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
baseRequester.onreadystatechange = () => {
|
||||
if (baseRequester.readyState > baseRequester.OPENED && responseTimeout === undefined) {
|
||||
clearTimeout(connectTimeout);
|
||||
responseTimeout = createTimeout(request.responseTimeout, 'Socket timeout');
|
||||
}
|
||||
};
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
baseRequester.onerror = () => {
|
||||
// istanbul ignore next
|
||||
if (baseRequester.status === 0) {
|
||||
clearTimeout(connectTimeout);
|
||||
clearTimeout(responseTimeout);
|
||||
resolve({
|
||||
content: baseRequester.responseText || 'Network request failed',
|
||||
status: baseRequester.status,
|
||||
isTimedOut: false,
|
||||
});
|
||||
}
|
||||
};
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
baseRequester.onload = () => {
|
||||
clearTimeout(connectTimeout);
|
||||
clearTimeout(responseTimeout);
|
||||
resolve({
|
||||
content: baseRequester.responseText,
|
||||
status: baseRequester.status,
|
||||
isTimedOut: false,
|
||||
});
|
||||
};
|
||||
baseRequester.send(request.data);
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createMappedRequestOptions(requestOptions, timeout) {
|
||||
const options = requestOptions || {};
|
||||
const data = options.data || {};
|
||||
Object.keys(options).forEach(key => {
|
||||
if (['timeout', 'headers', 'queryParameters', 'data', 'cacheable'].indexOf(key) === -1) {
|
||||
data[key] = options[key]; // eslint-disable-line functional/immutable-data
|
||||
}
|
||||
});
|
||||
return {
|
||||
data: Object.entries(data).length > 0 ? data : undefined,
|
||||
timeout: options.timeout || timeout,
|
||||
headers: options.headers || {},
|
||||
queryParameters: options.queryParameters || {},
|
||||
cacheable: options.cacheable,
|
||||
};
|
||||
}
|
||||
|
||||
const CallEnum = {
|
||||
/**
|
||||
* If the host is read only.
|
||||
*/
|
||||
Read: 1,
|
||||
/**
|
||||
* If the host is write only.
|
||||
*/
|
||||
Write: 2,
|
||||
/**
|
||||
* If the host is both read and write.
|
||||
*/
|
||||
Any: 3,
|
||||
};
|
||||
|
||||
const HostStatusEnum = {
|
||||
Up: 1,
|
||||
Down: 2,
|
||||
Timeouted: 3,
|
||||
};
|
||||
|
||||
// By default, API Clients at Algolia have expiration delay
|
||||
// of 5 mins. In the JavaScript client, we have 2 mins.
|
||||
const EXPIRATION_DELAY = 2 * 60 * 1000;
|
||||
function createStatefulHost(host, status = HostStatusEnum.Up) {
|
||||
return {
|
||||
...host,
|
||||
status,
|
||||
lastUpdate: Date.now(),
|
||||
};
|
||||
}
|
||||
function isStatefulHostUp(host) {
|
||||
return host.status === HostStatusEnum.Up || Date.now() - host.lastUpdate > EXPIRATION_DELAY;
|
||||
}
|
||||
function isStatefulHostTimeouted(host) {
|
||||
return (host.status === HostStatusEnum.Timeouted && Date.now() - host.lastUpdate <= EXPIRATION_DELAY);
|
||||
}
|
||||
|
||||
function createStatelessHost(options) {
|
||||
if (typeof options === 'string') {
|
||||
return {
|
||||
protocol: 'https',
|
||||
url: options,
|
||||
accept: CallEnum.Any,
|
||||
};
|
||||
}
|
||||
return {
|
||||
protocol: options.protocol || 'https',
|
||||
url: options.url,
|
||||
accept: options.accept || CallEnum.Any,
|
||||
};
|
||||
}
|
||||
|
||||
const MethodEnum = {
|
||||
Delete: 'DELETE',
|
||||
Get: 'GET',
|
||||
Post: 'POST',
|
||||
Put: 'PUT',
|
||||
};
|
||||
|
||||
function createRetryableOptions(hostsCache, statelessHosts) {
|
||||
return Promise.all(statelessHosts.map(statelessHost => {
|
||||
return hostsCache.get(statelessHost, () => {
|
||||
return Promise.resolve(createStatefulHost(statelessHost));
|
||||
});
|
||||
})).then(statefulHosts => {
|
||||
const hostsUp = statefulHosts.filter(host => isStatefulHostUp(host));
|
||||
const hostsTimeouted = statefulHosts.filter(host => isStatefulHostTimeouted(host));
|
||||
/**
|
||||
* Note, we put the hosts that previously timeouted on the end of the list.
|
||||
*/
|
||||
const hostsAvailable = [...hostsUp, ...hostsTimeouted];
|
||||
const statelessHostsAvailable = hostsAvailable.length > 0
|
||||
? hostsAvailable.map(host => createStatelessHost(host))
|
||||
: statelessHosts;
|
||||
return {
|
||||
getTimeout(timeoutsCount, baseTimeout) {
|
||||
/**
|
||||
* Imagine that you have 4 hosts, if timeouts will increase
|
||||
* on the following way: 1 (timeouted) > 4 (timeouted) > 5 (200)
|
||||
*
|
||||
* Note that, the very next request, we start from the previous timeout
|
||||
*
|
||||
* 5 (timeouted) > 6 (timeouted) > 7 ...
|
||||
*
|
||||
* This strategy may need to be reviewed, but is the strategy on the our
|
||||
* current v3 version.
|
||||
*/
|
||||
const timeoutMultiplier = hostsTimeouted.length === 0 && timeoutsCount === 0
|
||||
? 1
|
||||
: hostsTimeouted.length + 3 + timeoutsCount;
|
||||
return timeoutMultiplier * baseTimeout;
|
||||
},
|
||||
statelessHosts: statelessHostsAvailable,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
const isNetworkError = ({ isTimedOut, status }) => {
|
||||
return !isTimedOut && ~~status === 0;
|
||||
};
|
||||
const isRetryable = (response) => {
|
||||
const status = response.status;
|
||||
const isTimedOut = response.isTimedOut;
|
||||
return (isTimedOut || isNetworkError(response) || (~~(status / 100) !== 2 && ~~(status / 100) !== 4));
|
||||
};
|
||||
const isSuccess = ({ status }) => {
|
||||
return ~~(status / 100) === 2;
|
||||
};
|
||||
const retryDecision = (response, outcomes) => {
|
||||
if (isRetryable(response)) {
|
||||
return outcomes.onRetry(response);
|
||||
}
|
||||
if (isSuccess(response)) {
|
||||
return outcomes.onSuccess(response);
|
||||
}
|
||||
return outcomes.onFail(response);
|
||||
};
|
||||
|
||||
function retryableRequest(transporter, statelessHosts, request, requestOptions) {
|
||||
const stackTrace = []; // eslint-disable-line functional/prefer-readonly-type
|
||||
/**
|
||||
* First we prepare the payload that do not depend from hosts.
|
||||
*/
|
||||
const data = serializeData(request, requestOptions);
|
||||
const headers = serializeHeaders(transporter, requestOptions);
|
||||
const method = request.method;
|
||||
// On `GET`, the data is proxied to query parameters.
|
||||
const dataQueryParameters = request.method !== MethodEnum.Get
|
||||
? {}
|
||||
: {
|
||||
...request.data,
|
||||
...requestOptions.data,
|
||||
};
|
||||
const queryParameters = {
|
||||
'x-algolia-agent': transporter.userAgent.value,
|
||||
...transporter.queryParameters,
|
||||
...dataQueryParameters,
|
||||
...requestOptions.queryParameters,
|
||||
};
|
||||
let timeoutsCount = 0; // eslint-disable-line functional/no-let
|
||||
const retry = (hosts, // eslint-disable-line functional/prefer-readonly-type
|
||||
getTimeout) => {
|
||||
/**
|
||||
* We iterate on each host, until there is no host left.
|
||||
*/
|
||||
const host = hosts.pop(); // eslint-disable-line functional/immutable-data
|
||||
if (host === undefined) {
|
||||
throw createRetryError(stackTraceWithoutCredentials(stackTrace));
|
||||
}
|
||||
const payload = {
|
||||
data,
|
||||
headers,
|
||||
method,
|
||||
url: serializeUrl(host, request.path, queryParameters),
|
||||
connectTimeout: getTimeout(timeoutsCount, transporter.timeouts.connect),
|
||||
responseTimeout: getTimeout(timeoutsCount, requestOptions.timeout),
|
||||
};
|
||||
/**
|
||||
* The stackFrame is pushed to the stackTrace so we
|
||||
* can have information about onRetry and onFailure
|
||||
* decisions.
|
||||
*/
|
||||
const pushToStackTrace = (response) => {
|
||||
const stackFrame = {
|
||||
request: payload,
|
||||
response,
|
||||
host,
|
||||
triesLeft: hosts.length,
|
||||
};
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
stackTrace.push(stackFrame);
|
||||
return stackFrame;
|
||||
};
|
||||
const decisions = {
|
||||
onSuccess: response => deserializeSuccess(response),
|
||||
onRetry(response) {
|
||||
const stackFrame = pushToStackTrace(response);
|
||||
/**
|
||||
* If response is a timeout, we increaset the number of
|
||||
* timeouts so we can increase the timeout later.
|
||||
*/
|
||||
if (response.isTimedOut) {
|
||||
timeoutsCount++;
|
||||
}
|
||||
return Promise.all([
|
||||
/**
|
||||
* Failures are individually send the logger, allowing
|
||||
* the end user to debug / store stack frames even
|
||||
* when a retry error does not happen.
|
||||
*/
|
||||
transporter.logger.info('Retryable failure', stackFrameWithoutCredentials(stackFrame)),
|
||||
/**
|
||||
* We also store the state of the host in failure cases. If the host, is
|
||||
* down it will remain down for the next 2 minutes. In a timeout situation,
|
||||
* this host will be added end of the list of hosts on the next request.
|
||||
*/
|
||||
transporter.hostsCache.set(host, createStatefulHost(host, response.isTimedOut ? HostStatusEnum.Timeouted : HostStatusEnum.Down)),
|
||||
]).then(() => retry(hosts, getTimeout));
|
||||
},
|
||||
onFail(response) {
|
||||
pushToStackTrace(response);
|
||||
throw deserializeFailure(response, stackTraceWithoutCredentials(stackTrace));
|
||||
},
|
||||
};
|
||||
return transporter.requester.send(payload).then(response => {
|
||||
return retryDecision(response, decisions);
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Finally, for each retryable host perform request until we got a non
|
||||
* retryable response. Some notes here:
|
||||
*
|
||||
* 1. The reverse here is applied so we can apply a `pop` later on => more performant.
|
||||
* 2. We also get from the retryable options a timeout multiplier that is tailored
|
||||
* for the current context.
|
||||
*/
|
||||
return createRetryableOptions(transporter.hostsCache, statelessHosts).then(options => {
|
||||
return retry([...options.statelessHosts].reverse(), options.getTimeout);
|
||||
});
|
||||
}
|
||||
|
||||
function createTransporter(options) {
|
||||
const { hostsCache, logger, requester, requestsCache, responsesCache, timeouts, userAgent, hosts, queryParameters, headers, } = options;
|
||||
const transporter = {
|
||||
hostsCache,
|
||||
logger,
|
||||
requester,
|
||||
requestsCache,
|
||||
responsesCache,
|
||||
timeouts,
|
||||
userAgent,
|
||||
headers,
|
||||
queryParameters,
|
||||
hosts: hosts.map(host => createStatelessHost(host)),
|
||||
read(request, requestOptions) {
|
||||
/**
|
||||
* First, we compute the user request options. Now, keep in mind,
|
||||
* that using request options the user is able to modified the intire
|
||||
* payload of the request. Such as headers, query parameters, and others.
|
||||
*/
|
||||
const mappedRequestOptions = createMappedRequestOptions(requestOptions, transporter.timeouts.read);
|
||||
const createRetryableRequest = () => {
|
||||
/**
|
||||
* Then, we prepare a function factory that contains the construction of
|
||||
* the retryable request. At this point, we may *not* perform the actual
|
||||
* request. But we want to have the function factory ready.
|
||||
*/
|
||||
return retryableRequest(transporter, transporter.hosts.filter(host => (host.accept & CallEnum.Read) !== 0), request, mappedRequestOptions);
|
||||
};
|
||||
/**
|
||||
* Once we have the function factory ready, we need to determine of the
|
||||
* request is "cacheable" - should be cached. Note that, once again,
|
||||
* the user can force this option.
|
||||
*/
|
||||
const cacheable = mappedRequestOptions.cacheable !== undefined
|
||||
? mappedRequestOptions.cacheable
|
||||
: request.cacheable;
|
||||
/**
|
||||
* If is not "cacheable", we immediatly trigger the retryable request, no
|
||||
* need to check cache implementations.
|
||||
*/
|
||||
if (cacheable !== true) {
|
||||
return createRetryableRequest();
|
||||
}
|
||||
/**
|
||||
* If the request is "cacheable", we need to first compute the key to ask
|
||||
* the cache implementations if this request is on progress or if the
|
||||
* response already exists on the cache.
|
||||
*/
|
||||
const key = {
|
||||
request,
|
||||
mappedRequestOptions,
|
||||
transporter: {
|
||||
queryParameters: transporter.queryParameters,
|
||||
headers: transporter.headers,
|
||||
},
|
||||
};
|
||||
/**
|
||||
* With the computed key, we first ask the responses cache
|
||||
* implemention if this request was been resolved before.
|
||||
*/
|
||||
return transporter.responsesCache.get(key, () => {
|
||||
/**
|
||||
* If the request has never resolved before, we actually ask if there
|
||||
* is a current request with the same key on progress.
|
||||
*/
|
||||
return transporter.requestsCache.get(key, () => {
|
||||
return (transporter.requestsCache
|
||||
/**
|
||||
* Finally, if there is no request in progress with the same key,
|
||||
* this `createRetryableRequest()` will actually trigger the
|
||||
* retryable request.
|
||||
*/
|
||||
.set(key, createRetryableRequest())
|
||||
.then(response => Promise.all([transporter.requestsCache.delete(key), response]), err => Promise.all([transporter.requestsCache.delete(key), Promise.reject(err)]))
|
||||
.then(([_, response]) => response));
|
||||
});
|
||||
}, {
|
||||
/**
|
||||
* Of course, once we get this response back from the server, we
|
||||
* tell response cache to actually store the received response
|
||||
* to be used later.
|
||||
*/
|
||||
miss: response => transporter.responsesCache.set(key, response),
|
||||
});
|
||||
},
|
||||
write(request, requestOptions) {
|
||||
/**
|
||||
* On write requests, no cache mechanisms are applied, and we
|
||||
* proxy the request immediately to the requester.
|
||||
*/
|
||||
return retryableRequest(transporter, transporter.hosts.filter(host => (host.accept & CallEnum.Write) !== 0), request, createMappedRequestOptions(requestOptions, transporter.timeouts.write));
|
||||
},
|
||||
};
|
||||
return transporter;
|
||||
}
|
||||
|
||||
function createUserAgent(version) {
|
||||
const userAgent = {
|
||||
value: `Algolia for JavaScript (${version})`,
|
||||
add(options) {
|
||||
const addedUserAgent = `; ${options.segment}${options.version !== undefined ? ` (${options.version})` : ''}`;
|
||||
if (userAgent.value.indexOf(addedUserAgent) === -1) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
userAgent.value = `${userAgent.value}${addedUserAgent}`;
|
||||
}
|
||||
return userAgent;
|
||||
},
|
||||
};
|
||||
return userAgent;
|
||||
}
|
||||
|
||||
function deserializeSuccess(response) {
|
||||
// eslint-disable-next-line functional/no-try-statement
|
||||
try {
|
||||
return JSON.parse(response.content);
|
||||
}
|
||||
catch (e) {
|
||||
throw createDeserializationError(e.message, response);
|
||||
}
|
||||
}
|
||||
function deserializeFailure({ content, status }, stackFrame) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let message = content;
|
||||
// eslint-disable-next-line functional/no-try-statement
|
||||
try {
|
||||
message = JSON.parse(content).message;
|
||||
}
|
||||
catch (e) {
|
||||
// ..
|
||||
}
|
||||
return createApiError(message, status, stackFrame);
|
||||
}
|
||||
|
||||
function serializeUrl(host, path, queryParameters) {
|
||||
const queryParametersAsString = serializeQueryParameters(queryParameters);
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let url = `${host.protocol}://${host.url}/${path.charAt(0) === '/' ? path.substr(1) : path}`;
|
||||
if (queryParametersAsString.length) {
|
||||
url += `?${queryParametersAsString}`;
|
||||
}
|
||||
return url;
|
||||
}
|
||||
function serializeQueryParameters(parameters) {
|
||||
const isObjectOrArray = (value) => Object.prototype.toString.call(value) === '[object Object]' ||
|
||||
Object.prototype.toString.call(value) === '[object Array]';
|
||||
return Object.keys(parameters)
|
||||
.map(key => encode('%s=%s', key, isObjectOrArray(parameters[key]) ? JSON.stringify(parameters[key]) : parameters[key]))
|
||||
.join('&');
|
||||
}
|
||||
function serializeData(request, requestOptions) {
|
||||
if (request.method === MethodEnum.Get ||
|
||||
(request.data === undefined && requestOptions.data === undefined)) {
|
||||
return undefined;
|
||||
}
|
||||
const data = Array.isArray(request.data)
|
||||
? request.data
|
||||
: { ...request.data, ...requestOptions.data };
|
||||
return JSON.stringify(data);
|
||||
}
|
||||
function serializeHeaders(transporter, requestOptions) {
|
||||
const headers = {
|
||||
...transporter.headers,
|
||||
...requestOptions.headers,
|
||||
};
|
||||
const serializedHeaders = {};
|
||||
Object.keys(headers).forEach(header => {
|
||||
const value = headers[header];
|
||||
// @ts-ignore
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
serializedHeaders[header.toLowerCase()] = value;
|
||||
});
|
||||
return serializedHeaders;
|
||||
}
|
||||
|
||||
function stackTraceWithoutCredentials(stackTrace) {
|
||||
return stackTrace.map(stackFrame => stackFrameWithoutCredentials(stackFrame));
|
||||
}
|
||||
function stackFrameWithoutCredentials(stackFrame) {
|
||||
const modifiedHeaders = stackFrame.request.headers['x-algolia-api-key']
|
||||
? { 'x-algolia-api-key': '*****' }
|
||||
: {};
|
||||
return {
|
||||
...stackFrame,
|
||||
request: {
|
||||
...stackFrame.request,
|
||||
headers: {
|
||||
...stackFrame.request.headers,
|
||||
...modifiedHeaders,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createApiError(message, status, transporterStackTrace) {
|
||||
return {
|
||||
name: 'ApiError',
|
||||
message,
|
||||
status,
|
||||
transporterStackTrace,
|
||||
};
|
||||
}
|
||||
|
||||
function createDeserializationError(message, response) {
|
||||
return {
|
||||
name: 'DeserializationError',
|
||||
message,
|
||||
response,
|
||||
};
|
||||
}
|
||||
|
||||
function createRetryError(transporterStackTrace) {
|
||||
return {
|
||||
name: 'RetryError',
|
||||
message: 'Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support .',
|
||||
transporterStackTrace,
|
||||
};
|
||||
}
|
||||
|
||||
const createRecommendClient = options => {
|
||||
const appId = options.appId;
|
||||
const auth = createAuth(options.authMode !== undefined ? options.authMode : AuthMode.WithinHeaders, appId, options.apiKey);
|
||||
const transporter = createTransporter({
|
||||
hosts: [
|
||||
{ url: `${appId}-dsn.algolia.net`, accept: CallEnum.Read },
|
||||
{ url: `${appId}.algolia.net`, accept: CallEnum.Write },
|
||||
].concat(shuffle([
|
||||
{ url: `${appId}-1.algolianet.com` },
|
||||
{ url: `${appId}-2.algolianet.com` },
|
||||
{ url: `${appId}-3.algolianet.com` },
|
||||
])),
|
||||
...options,
|
||||
headers: {
|
||||
...auth.headers(),
|
||||
...{ 'content-type': 'application/x-www-form-urlencoded' },
|
||||
...options.headers,
|
||||
},
|
||||
queryParameters: {
|
||||
...auth.queryParameters(),
|
||||
...options.queryParameters,
|
||||
},
|
||||
});
|
||||
const base = {
|
||||
transporter,
|
||||
appId,
|
||||
addAlgoliaAgent(segment, version) {
|
||||
transporter.userAgent.add({ segment, version });
|
||||
},
|
||||
clearCache() {
|
||||
return Promise.all([
|
||||
transporter.requestsCache.clear(),
|
||||
transporter.responsesCache.clear(),
|
||||
]).then(() => undefined);
|
||||
},
|
||||
};
|
||||
return addMethods(base, options.methods);
|
||||
};
|
||||
|
||||
const getRecommendations = base => {
|
||||
return (queries, requestOptions) => {
|
||||
const requests = queries.map(query => ({
|
||||
...query,
|
||||
// The `threshold` param is required by the endpoint to make it easier
|
||||
// to provide a default value later, so we default it in the client
|
||||
// so that users don't have to provide a value.
|
||||
threshold: query.threshold || 0,
|
||||
}));
|
||||
return base.transporter.read({
|
||||
method: MethodEnum.Post,
|
||||
path: '1/indexes/*/recommendations',
|
||||
data: {
|
||||
requests,
|
||||
},
|
||||
cacheable: true,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getFrequentlyBoughtTogether = base => {
|
||||
return (queries, requestOptions) => {
|
||||
return getRecommendations(base)(queries.map(query => ({
|
||||
...query,
|
||||
fallbackParameters: {},
|
||||
model: 'bought-together',
|
||||
})), requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getRelatedProducts = base => {
|
||||
return (queries, requestOptions) => {
|
||||
return getRecommendations(base)(queries.map(query => ({
|
||||
...query,
|
||||
model: 'related-products',
|
||||
})), requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getTrendingFacets = base => {
|
||||
return (queries, requestOptions) => {
|
||||
const requests = queries.map(query => ({
|
||||
...query,
|
||||
model: 'trending-facets',
|
||||
// The `threshold` param is required by the endpoint to make it easier
|
||||
// to provide a default value later, so we default it in the client
|
||||
// so that users don't have to provide a value.
|
||||
threshold: query.threshold || 0,
|
||||
}));
|
||||
return base.transporter.read({
|
||||
method: MethodEnum.Post,
|
||||
path: '1/indexes/*/recommendations',
|
||||
data: {
|
||||
requests,
|
||||
},
|
||||
cacheable: true,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getTrendingItems = base => {
|
||||
return (queries, requestOptions) => {
|
||||
const requests = queries.map(query => ({
|
||||
...query,
|
||||
model: 'trending-items',
|
||||
// The `threshold` param is required by the endpoint to make it easier
|
||||
// to provide a default value later, so we default it in the client
|
||||
// so that users don't have to provide a value.
|
||||
threshold: query.threshold || 0,
|
||||
}));
|
||||
return base.transporter.read({
|
||||
method: MethodEnum.Post,
|
||||
path: '1/indexes/*/recommendations',
|
||||
data: {
|
||||
requests,
|
||||
},
|
||||
cacheable: true,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getLookingSimilar = base => {
|
||||
return (queries, requestOptions) => {
|
||||
return getRecommendations(base)(queries.map(query => ({
|
||||
...query,
|
||||
model: 'looking-similar',
|
||||
})), requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const getRecommendedForYou = base => {
|
||||
return (queries, requestOptions) => {
|
||||
const requests = queries.map(query => ({
|
||||
...query,
|
||||
model: 'recommended-for-you',
|
||||
threshold: query.threshold || 0,
|
||||
}));
|
||||
return base.transporter.read({
|
||||
method: MethodEnum.Post,
|
||||
path: '1/indexes/*/recommendations',
|
||||
data: {
|
||||
requests,
|
||||
},
|
||||
cacheable: true,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
function recommend(appId, apiKey, options) {
|
||||
const commonOptions = {
|
||||
appId,
|
||||
apiKey,
|
||||
timeouts: {
|
||||
connect: 1,
|
||||
read: 2,
|
||||
write: 30,
|
||||
},
|
||||
requester: createBrowserXhrRequester(),
|
||||
logger: createConsoleLogger(LogLevelEnum.Error),
|
||||
responsesCache: createInMemoryCache(),
|
||||
requestsCache: createInMemoryCache({ serializable: false }),
|
||||
hostsCache: createFallbackableCache({
|
||||
caches: [
|
||||
createBrowserLocalStorageCache({ key: `${version}-${appId}` }),
|
||||
createInMemoryCache(),
|
||||
],
|
||||
}),
|
||||
userAgent: createUserAgent(version)
|
||||
.add({ segment: 'Recommend', version })
|
||||
.add({ segment: 'Browser' }),
|
||||
authMode: AuthMode.WithinQueryParameters,
|
||||
};
|
||||
return createRecommendClient({
|
||||
...commonOptions,
|
||||
...options,
|
||||
methods: {
|
||||
getFrequentlyBoughtTogether,
|
||||
getRecommendations,
|
||||
getRelatedProducts,
|
||||
getTrendingFacets,
|
||||
getTrendingItems,
|
||||
getLookingSimilar,
|
||||
getRecommendedForYou,
|
||||
},
|
||||
});
|
||||
}
|
||||
/* eslint-disable functional/immutable-data */
|
||||
recommend.version = version;
|
||||
recommend.getFrequentlyBoughtTogether = getFrequentlyBoughtTogether;
|
||||
recommend.getRecommendations = getRecommendations;
|
||||
recommend.getRelatedProducts = getRelatedProducts;
|
||||
recommend.getTrendingFacets = getTrendingFacets;
|
||||
recommend.getTrendingItems = getTrendingItems;
|
||||
recommend.getLookingSimilar = getLookingSimilar;
|
||||
recommend.getRecommendedForYou = getRecommendedForYou;
|
||||
|
||||
export default recommend;
|
||||
2
scripts/node_modules/@algolia/recommend/dist/recommend.umd.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/recommend/dist/recommend.umd.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
scripts/node_modules/@algolia/recommend/index.d.ts
generated
vendored
Normal file
3
scripts/node_modules/@algolia/recommend/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
/* eslint-disable import/no-unresolved*/
|
||||
export * from './dist/recommend';
|
||||
export { default } from './dist/recommend';
|
||||
15
scripts/node_modules/@algolia/recommend/index.js
generated
vendored
Normal file
15
scripts/node_modules/@algolia/recommend/index.js
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
/* eslint-disable functional/immutable-data, import/no-commonjs */
|
||||
const recommend = require('./dist/recommend.cjs.js');
|
||||
|
||||
/**
|
||||
* The Common JS build is the default entry point for the Node environment. Keep in
|
||||
* in mind, that for the browser environment, we hint the bundler to use the UMD
|
||||
* build instead as specified on the key `browser` of our `package.json` file.
|
||||
*/
|
||||
module.exports = recommend;
|
||||
|
||||
/**
|
||||
* In addition, we also set explicitly the default export below making
|
||||
* this Common JS module in compliance with es6 modules specification.
|
||||
*/
|
||||
module.exports.default = recommend;
|
||||
40
scripts/node_modules/@algolia/recommend/package.json
generated
vendored
Normal file
40
scripts/node_modules/@algolia/recommend/package.json
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
{
|
||||
"name": "@algolia/recommend",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"description": "The perfect starting point to integrate Algolia Recommend within your JavaScript project.",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-javascript.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"jsdelivr": "./dist/recommend.umd.js",
|
||||
"unpkg": "./dist/recommend.umd.js",
|
||||
"browser": {
|
||||
"./index.js": "./dist/recommend.umd.js"
|
||||
},
|
||||
"types": "index.d.ts",
|
||||
"files": [
|
||||
"dist",
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"scripts": {
|
||||
"test:exports": "node --experimental-modules test/module/is-es-module.mjs && node test/module/is-cjs-module.cjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"@algolia/cache-browser-local-storage": "4.24.0",
|
||||
"@algolia/cache-common": "4.24.0",
|
||||
"@algolia/cache-in-memory": "4.24.0",
|
||||
"@algolia/client-common": "4.24.0",
|
||||
"@algolia/client-search": "4.24.0",
|
||||
"@algolia/logger-common": "4.24.0",
|
||||
"@algolia/logger-console": "4.24.0",
|
||||
"@algolia/requester-browser-xhr": "4.24.0",
|
||||
"@algolia/requester-common": "4.24.0",
|
||||
"@algolia/requester-node-http": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
}
|
||||
61
scripts/node_modules/@algolia/requester-browser-xhr/dist/requester-browser-xhr.cjs.js
generated
vendored
Normal file
61
scripts/node_modules/@algolia/requester-browser-xhr/dist/requester-browser-xhr.cjs.js
generated
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
function createBrowserXhrRequester() {
|
||||
return {
|
||||
send(request) {
|
||||
return new Promise((resolve) => {
|
||||
const baseRequester = new XMLHttpRequest();
|
||||
baseRequester.open(request.method, request.url, true);
|
||||
Object.keys(request.headers).forEach(key => baseRequester.setRequestHeader(key, request.headers[key]));
|
||||
const createTimeout = (timeout, content) => {
|
||||
return setTimeout(() => {
|
||||
baseRequester.abort();
|
||||
resolve({
|
||||
status: 0,
|
||||
content,
|
||||
isTimedOut: true,
|
||||
});
|
||||
}, timeout * 1000);
|
||||
};
|
||||
const connectTimeout = createTimeout(request.connectTimeout, 'Connection timeout');
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let responseTimeout;
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
baseRequester.onreadystatechange = () => {
|
||||
if (baseRequester.readyState > baseRequester.OPENED && responseTimeout === undefined) {
|
||||
clearTimeout(connectTimeout);
|
||||
responseTimeout = createTimeout(request.responseTimeout, 'Socket timeout');
|
||||
}
|
||||
};
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
baseRequester.onerror = () => {
|
||||
// istanbul ignore next
|
||||
if (baseRequester.status === 0) {
|
||||
clearTimeout(connectTimeout);
|
||||
clearTimeout(responseTimeout);
|
||||
resolve({
|
||||
content: baseRequester.responseText || 'Network request failed',
|
||||
status: baseRequester.status,
|
||||
isTimedOut: false,
|
||||
});
|
||||
}
|
||||
};
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
baseRequester.onload = () => {
|
||||
clearTimeout(connectTimeout);
|
||||
clearTimeout(responseTimeout);
|
||||
resolve({
|
||||
content: baseRequester.responseText,
|
||||
status: baseRequester.status,
|
||||
isTimedOut: false,
|
||||
});
|
||||
};
|
||||
baseRequester.send(request.data);
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
exports.createBrowserXhrRequester = createBrowserXhrRequester;
|
||||
5
scripts/node_modules/@algolia/requester-browser-xhr/dist/requester-browser-xhr.d.ts
generated
vendored
Normal file
5
scripts/node_modules/@algolia/requester-browser-xhr/dist/requester-browser-xhr.d.ts
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
import { Requester } from '@algolia/requester-common';
|
||||
|
||||
export declare function createBrowserXhrRequester(): Requester;
|
||||
|
||||
export { }
|
||||
57
scripts/node_modules/@algolia/requester-browser-xhr/dist/requester-browser-xhr.esm.js
generated
vendored
Normal file
57
scripts/node_modules/@algolia/requester-browser-xhr/dist/requester-browser-xhr.esm.js
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
function createBrowserXhrRequester() {
|
||||
return {
|
||||
send(request) {
|
||||
return new Promise((resolve) => {
|
||||
const baseRequester = new XMLHttpRequest();
|
||||
baseRequester.open(request.method, request.url, true);
|
||||
Object.keys(request.headers).forEach(key => baseRequester.setRequestHeader(key, request.headers[key]));
|
||||
const createTimeout = (timeout, content) => {
|
||||
return setTimeout(() => {
|
||||
baseRequester.abort();
|
||||
resolve({
|
||||
status: 0,
|
||||
content,
|
||||
isTimedOut: true,
|
||||
});
|
||||
}, timeout * 1000);
|
||||
};
|
||||
const connectTimeout = createTimeout(request.connectTimeout, 'Connection timeout');
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let responseTimeout;
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
baseRequester.onreadystatechange = () => {
|
||||
if (baseRequester.readyState > baseRequester.OPENED && responseTimeout === undefined) {
|
||||
clearTimeout(connectTimeout);
|
||||
responseTimeout = createTimeout(request.responseTimeout, 'Socket timeout');
|
||||
}
|
||||
};
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
baseRequester.onerror = () => {
|
||||
// istanbul ignore next
|
||||
if (baseRequester.status === 0) {
|
||||
clearTimeout(connectTimeout);
|
||||
clearTimeout(responseTimeout);
|
||||
resolve({
|
||||
content: baseRequester.responseText || 'Network request failed',
|
||||
status: baseRequester.status,
|
||||
isTimedOut: false,
|
||||
});
|
||||
}
|
||||
};
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
baseRequester.onload = () => {
|
||||
clearTimeout(connectTimeout);
|
||||
clearTimeout(responseTimeout);
|
||||
resolve({
|
||||
content: baseRequester.responseText,
|
||||
status: baseRequester.status,
|
||||
isTimedOut: false,
|
||||
});
|
||||
};
|
||||
baseRequester.send(request.data);
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export { createBrowserXhrRequester };
|
||||
2
scripts/node_modules/@algolia/requester-browser-xhr/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/requester-browser-xhr/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/requester-browser-xhr.cjs.js');
|
||||
22
scripts/node_modules/@algolia/requester-browser-xhr/package.json
generated
vendored
Normal file
22
scripts/node_modules/@algolia/requester-browser-xhr/package.json
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "@algolia/requester-browser-xhr",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"description": "Promise-based request library for browser using xhr.",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-javascript.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/requester-browser-xhr.esm.js",
|
||||
"types": "dist/requester-browser-xhr.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@algolia/requester-common": "4.24.0"
|
||||
}
|
||||
}
|
||||
12
scripts/node_modules/@algolia/requester-common/dist/requester-common.cjs.js
generated
vendored
Normal file
12
scripts/node_modules/@algolia/requester-common/dist/requester-common.cjs.js
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
const MethodEnum = {
|
||||
Delete: 'DELETE',
|
||||
Get: 'GET',
|
||||
Post: 'POST',
|
||||
Put: 'PUT',
|
||||
};
|
||||
|
||||
exports.MethodEnum = MethodEnum;
|
||||
67
scripts/node_modules/@algolia/requester-common/dist/requester-common.d.ts
generated
vendored
Normal file
67
scripts/node_modules/@algolia/requester-common/dist/requester-common.d.ts
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
export declare type Destroyable = {
|
||||
/**
|
||||
* Destroy any sockets that are currently in use by the agent.
|
||||
*
|
||||
* It is usually not necessary to do this. However, if using an agent with keepAlive enabled, then
|
||||
* it is best to explicitly shut down the agent when it will no longer be used. Otherwise, sockets
|
||||
* may hang open for quite a long time before the server terminates them.
|
||||
*/
|
||||
readonly destroy: () => Readonly<Promise<void>>;
|
||||
};
|
||||
|
||||
export declare const MethodEnum: Readonly<Record<string, MethodType>>;
|
||||
|
||||
export declare type MethodType = 'DELETE' | 'GET' | 'POST' | 'PUT';
|
||||
|
||||
declare type Request_2 = {
|
||||
/**
|
||||
* The headers of the request.
|
||||
*/
|
||||
readonly headers: Readonly<Record<string, string>>;
|
||||
/**
|
||||
* The method of the request. `GET`, etc.
|
||||
*/
|
||||
readonly method: MethodType;
|
||||
/**
|
||||
* The complete url of the request, with the protocol.
|
||||
*/
|
||||
readonly url: string;
|
||||
/**
|
||||
* The timeout to stablish a connection with the server.
|
||||
*/
|
||||
readonly connectTimeout: number;
|
||||
/**
|
||||
* The timeout to receive the response.
|
||||
*/
|
||||
readonly responseTimeout: number;
|
||||
/**
|
||||
* The data to be transfered to the server.
|
||||
*/
|
||||
readonly data: string | undefined;
|
||||
};
|
||||
export { Request_2 as Request }
|
||||
|
||||
export declare type Requester = {
|
||||
/**
|
||||
* Sends the given `request` to the server.
|
||||
*/
|
||||
readonly send: (request: Request_2) => Readonly<Promise<Response_2>>;
|
||||
};
|
||||
|
||||
declare type Response_2 = {
|
||||
/**
|
||||
* The raw response from the server.
|
||||
*/
|
||||
content: string;
|
||||
/**
|
||||
* If the request timeouted.
|
||||
*/
|
||||
isTimedOut: boolean;
|
||||
/**
|
||||
* The http status code.
|
||||
*/
|
||||
status: number;
|
||||
};
|
||||
export { Response_2 as Response }
|
||||
|
||||
export { }
|
||||
8
scripts/node_modules/@algolia/requester-common/dist/requester-common.esm.js
generated
vendored
Normal file
8
scripts/node_modules/@algolia/requester-common/dist/requester-common.esm.js
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
const MethodEnum = {
|
||||
Delete: 'DELETE',
|
||||
Get: 'GET',
|
||||
Post: 'POST',
|
||||
Put: 'PUT',
|
||||
};
|
||||
|
||||
export { MethodEnum };
|
||||
2
scripts/node_modules/@algolia/requester-common/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/requester-common/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/requester-common.cjs.js');
|
||||
19
scripts/node_modules/@algolia/requester-common/package.json
generated
vendored
Normal file
19
scripts/node_modules/@algolia/requester-common/package.json
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "@algolia/requester-common",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"description": "Common interfaces for promise-based request libraries",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-js.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/requester-common.esm.js",
|
||||
"types": "dist/requester-common.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
]
|
||||
}
|
||||
87
scripts/node_modules/@algolia/requester-node-http/dist/requester-node-http.cjs.js
generated
vendored
Normal file
87
scripts/node_modules/@algolia/requester-node-http/dist/requester-node-http.cjs.js
generated
vendored
Normal file
@ -0,0 +1,87 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var http = require('http');
|
||||
var https = require('https');
|
||||
var URL = require('url');
|
||||
|
||||
/* eslint functional/prefer-readonly-type: 0 */
|
||||
const agentOptions = { keepAlive: true };
|
||||
const defaultHttpAgent = new http.Agent(agentOptions);
|
||||
const defaultHttpsAgent = new https.Agent(agentOptions);
|
||||
function createNodeHttpRequester({ agent: userGlobalAgent, httpAgent: userHttpAgent, httpsAgent: userHttpsAgent, requesterOptions = {}, } = {}) {
|
||||
const httpAgent = userHttpAgent || userGlobalAgent || defaultHttpAgent;
|
||||
const httpsAgent = userHttpsAgent || userGlobalAgent || defaultHttpsAgent;
|
||||
return {
|
||||
send(request) {
|
||||
return new Promise(resolve => {
|
||||
const url = URL.parse(request.url);
|
||||
const path = url.query === null ? url.pathname : `${url.pathname}?${url.query}`;
|
||||
const options = {
|
||||
...requesterOptions,
|
||||
agent: url.protocol === 'https:' ? httpsAgent : httpAgent,
|
||||
hostname: url.hostname,
|
||||
path,
|
||||
method: request.method,
|
||||
headers: {
|
||||
...(requesterOptions && requesterOptions.headers ? requesterOptions.headers : {}),
|
||||
...request.headers,
|
||||
},
|
||||
...(url.port !== undefined ? { port: url.port || '' } : {}),
|
||||
};
|
||||
const req = (url.protocol === 'https:' ? https : http).request(options, response => {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let contentBuffers = [];
|
||||
response.on('data', chunk => {
|
||||
contentBuffers = contentBuffers.concat(chunk);
|
||||
});
|
||||
response.on('end', () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||
clearTimeout(connectTimeout);
|
||||
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||
clearTimeout(responseTimeout);
|
||||
resolve({
|
||||
status: response.statusCode || 0,
|
||||
content: Buffer.concat(contentBuffers).toString(),
|
||||
isTimedOut: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
const createTimeout = (timeout, content) => {
|
||||
return setTimeout(() => {
|
||||
req.abort();
|
||||
resolve({
|
||||
status: 0,
|
||||
content,
|
||||
isTimedOut: true,
|
||||
});
|
||||
}, timeout * 1000);
|
||||
};
|
||||
const connectTimeout = createTimeout(request.connectTimeout, 'Connection timeout');
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let responseTimeout;
|
||||
req.on('error', error => {
|
||||
clearTimeout(connectTimeout);
|
||||
clearTimeout(responseTimeout);
|
||||
resolve({ status: 0, content: error.message, isTimedOut: false });
|
||||
});
|
||||
req.once('response', () => {
|
||||
clearTimeout(connectTimeout);
|
||||
responseTimeout = createTimeout(request.responseTimeout, 'Socket timeout');
|
||||
});
|
||||
if (request.data !== undefined) {
|
||||
req.write(request.data);
|
||||
}
|
||||
req.end();
|
||||
});
|
||||
},
|
||||
destroy() {
|
||||
httpAgent.destroy();
|
||||
httpsAgent.destroy();
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
exports.createNodeHttpRequester = createNodeHttpRequester;
|
||||
17
scripts/node_modules/@algolia/requester-node-http/dist/requester-node-http.d.ts
generated
vendored
Normal file
17
scripts/node_modules/@algolia/requester-node-http/dist/requester-node-http.d.ts
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
/// <reference types="node" />
|
||||
|
||||
import { Destroyable } from '@algolia/requester-common';
|
||||
import * as http from 'http';
|
||||
import * as https from 'https';
|
||||
import { Requester } from '@algolia/requester-common';
|
||||
|
||||
export declare function createNodeHttpRequester({ agent: userGlobalAgent, httpAgent: userHttpAgent, httpsAgent: userHttpsAgent, requesterOptions, }?: NodeHttpRequesterOptions): Requester & Destroyable;
|
||||
|
||||
export declare type NodeHttpRequesterOptions = {
|
||||
agent?: https.Agent | http.Agent;
|
||||
httpAgent?: http.Agent;
|
||||
httpsAgent?: https.Agent;
|
||||
requesterOptions?: https.RequestOptions;
|
||||
};
|
||||
|
||||
export { }
|
||||
85
scripts/node_modules/@algolia/requester-node-http/dist/requester-node-http.esm.js
generated
vendored
Normal file
85
scripts/node_modules/@algolia/requester-node-http/dist/requester-node-http.esm.js
generated
vendored
Normal file
@ -0,0 +1,85 @@
|
||||
import * as http from 'http';
|
||||
import { Agent } from 'http';
|
||||
import * as https from 'https';
|
||||
import { Agent as Agent$1 } from 'https';
|
||||
import { parse } from 'url';
|
||||
|
||||
/* eslint functional/prefer-readonly-type: 0 */
|
||||
const agentOptions = { keepAlive: true };
|
||||
const defaultHttpAgent = new Agent(agentOptions);
|
||||
const defaultHttpsAgent = new Agent$1(agentOptions);
|
||||
function createNodeHttpRequester({ agent: userGlobalAgent, httpAgent: userHttpAgent, httpsAgent: userHttpsAgent, requesterOptions = {}, } = {}) {
|
||||
const httpAgent = userHttpAgent || userGlobalAgent || defaultHttpAgent;
|
||||
const httpsAgent = userHttpsAgent || userGlobalAgent || defaultHttpsAgent;
|
||||
return {
|
||||
send(request) {
|
||||
return new Promise(resolve => {
|
||||
const url = parse(request.url);
|
||||
const path = url.query === null ? url.pathname : `${url.pathname}?${url.query}`;
|
||||
const options = {
|
||||
...requesterOptions,
|
||||
agent: url.protocol === 'https:' ? httpsAgent : httpAgent,
|
||||
hostname: url.hostname,
|
||||
path,
|
||||
method: request.method,
|
||||
headers: {
|
||||
...(requesterOptions && requesterOptions.headers ? requesterOptions.headers : {}),
|
||||
...request.headers,
|
||||
},
|
||||
...(url.port !== undefined ? { port: url.port || '' } : {}),
|
||||
};
|
||||
const req = (url.protocol === 'https:' ? https : http).request(options, response => {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let contentBuffers = [];
|
||||
response.on('data', chunk => {
|
||||
contentBuffers = contentBuffers.concat(chunk);
|
||||
});
|
||||
response.on('end', () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||
clearTimeout(connectTimeout);
|
||||
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||
clearTimeout(responseTimeout);
|
||||
resolve({
|
||||
status: response.statusCode || 0,
|
||||
content: Buffer.concat(contentBuffers).toString(),
|
||||
isTimedOut: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
const createTimeout = (timeout, content) => {
|
||||
return setTimeout(() => {
|
||||
req.abort();
|
||||
resolve({
|
||||
status: 0,
|
||||
content,
|
||||
isTimedOut: true,
|
||||
});
|
||||
}, timeout * 1000);
|
||||
};
|
||||
const connectTimeout = createTimeout(request.connectTimeout, 'Connection timeout');
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let responseTimeout;
|
||||
req.on('error', error => {
|
||||
clearTimeout(connectTimeout);
|
||||
clearTimeout(responseTimeout);
|
||||
resolve({ status: 0, content: error.message, isTimedOut: false });
|
||||
});
|
||||
req.once('response', () => {
|
||||
clearTimeout(connectTimeout);
|
||||
responseTimeout = createTimeout(request.responseTimeout, 'Socket timeout');
|
||||
});
|
||||
if (request.data !== undefined) {
|
||||
req.write(request.data);
|
||||
}
|
||||
req.end();
|
||||
});
|
||||
},
|
||||
destroy() {
|
||||
httpAgent.destroy();
|
||||
httpsAgent.destroy();
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export { createNodeHttpRequester };
|
||||
2
scripts/node_modules/@algolia/requester-node-http/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/requester-node-http/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/requester-node-http.cjs.js');
|
||||
22
scripts/node_modules/@algolia/requester-node-http/package.json
generated
vendored
Normal file
22
scripts/node_modules/@algolia/requester-node-http/package.json
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "@algolia/requester-node-http",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"description": "Promise-based request library for node using the native http module.",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-javascript.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/requester-node-http.esm.js",
|
||||
"types": "dist/requester-node-http.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@algolia/requester-common": "4.24.0"
|
||||
}
|
||||
}
|
||||
483
scripts/node_modules/@algolia/transporter/dist/transporter.cjs.js
generated
vendored
Normal file
483
scripts/node_modules/@algolia/transporter/dist/transporter.cjs.js
generated
vendored
Normal file
@ -0,0 +1,483 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var requesterCommon = require('@algolia/requester-common');
|
||||
|
||||
function createMappedRequestOptions(requestOptions, timeout) {
|
||||
const options = requestOptions || {};
|
||||
const data = options.data || {};
|
||||
Object.keys(options).forEach(key => {
|
||||
if (['timeout', 'headers', 'queryParameters', 'data', 'cacheable'].indexOf(key) === -1) {
|
||||
data[key] = options[key]; // eslint-disable-line functional/immutable-data
|
||||
}
|
||||
});
|
||||
return {
|
||||
data: Object.entries(data).length > 0 ? data : undefined,
|
||||
timeout: options.timeout || timeout,
|
||||
headers: options.headers || {},
|
||||
queryParameters: options.queryParameters || {},
|
||||
cacheable: options.cacheable,
|
||||
};
|
||||
}
|
||||
|
||||
const CallEnum = {
|
||||
/**
|
||||
* If the host is read only.
|
||||
*/
|
||||
Read: 1,
|
||||
/**
|
||||
* If the host is write only.
|
||||
*/
|
||||
Write: 2,
|
||||
/**
|
||||
* If the host is both read and write.
|
||||
*/
|
||||
Any: 3,
|
||||
};
|
||||
|
||||
const HostStatusEnum = {
|
||||
Up: 1,
|
||||
Down: 2,
|
||||
Timeouted: 3,
|
||||
};
|
||||
|
||||
// By default, API Clients at Algolia have expiration delay
|
||||
// of 5 mins. In the JavaScript client, we have 2 mins.
|
||||
const EXPIRATION_DELAY = 2 * 60 * 1000;
|
||||
function createStatefulHost(host, status = HostStatusEnum.Up) {
|
||||
return {
|
||||
...host,
|
||||
status,
|
||||
lastUpdate: Date.now(),
|
||||
};
|
||||
}
|
||||
function isStatefulHostUp(host) {
|
||||
return host.status === HostStatusEnum.Up || Date.now() - host.lastUpdate > EXPIRATION_DELAY;
|
||||
}
|
||||
function isStatefulHostTimeouted(host) {
|
||||
return (host.status === HostStatusEnum.Timeouted && Date.now() - host.lastUpdate <= EXPIRATION_DELAY);
|
||||
}
|
||||
|
||||
function createStatelessHost(options) {
|
||||
if (typeof options === 'string') {
|
||||
return {
|
||||
protocol: 'https',
|
||||
url: options,
|
||||
accept: CallEnum.Any,
|
||||
};
|
||||
}
|
||||
return {
|
||||
protocol: options.protocol || 'https',
|
||||
url: options.url,
|
||||
accept: options.accept || CallEnum.Any,
|
||||
};
|
||||
}
|
||||
|
||||
function createRetryableOptions(hostsCache, statelessHosts) {
|
||||
return Promise.all(statelessHosts.map(statelessHost => {
|
||||
return hostsCache.get(statelessHost, () => {
|
||||
return Promise.resolve(createStatefulHost(statelessHost));
|
||||
});
|
||||
})).then(statefulHosts => {
|
||||
const hostsUp = statefulHosts.filter(host => isStatefulHostUp(host));
|
||||
const hostsTimeouted = statefulHosts.filter(host => isStatefulHostTimeouted(host));
|
||||
/**
|
||||
* Note, we put the hosts that previously timeouted on the end of the list.
|
||||
*/
|
||||
const hostsAvailable = [...hostsUp, ...hostsTimeouted];
|
||||
const statelessHostsAvailable = hostsAvailable.length > 0
|
||||
? hostsAvailable.map(host => createStatelessHost(host))
|
||||
: statelessHosts;
|
||||
return {
|
||||
getTimeout(timeoutsCount, baseTimeout) {
|
||||
/**
|
||||
* Imagine that you have 4 hosts, if timeouts will increase
|
||||
* on the following way: 1 (timeouted) > 4 (timeouted) > 5 (200)
|
||||
*
|
||||
* Note that, the very next request, we start from the previous timeout
|
||||
*
|
||||
* 5 (timeouted) > 6 (timeouted) > 7 ...
|
||||
*
|
||||
* This strategy may need to be reviewed, but is the strategy on the our
|
||||
* current v3 version.
|
||||
*/
|
||||
const timeoutMultiplier = hostsTimeouted.length === 0 && timeoutsCount === 0
|
||||
? 1
|
||||
: hostsTimeouted.length + 3 + timeoutsCount;
|
||||
return timeoutMultiplier * baseTimeout;
|
||||
},
|
||||
statelessHosts: statelessHostsAvailable,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
const isNetworkError = ({ isTimedOut, status }) => {
|
||||
return !isTimedOut && ~~status === 0;
|
||||
};
|
||||
const isRetryable = (response) => {
|
||||
const status = response.status;
|
||||
const isTimedOut = response.isTimedOut;
|
||||
return (isTimedOut || isNetworkError(response) || (~~(status / 100) !== 2 && ~~(status / 100) !== 4));
|
||||
};
|
||||
const isSuccess = ({ status }) => {
|
||||
return ~~(status / 100) === 2;
|
||||
};
|
||||
const retryDecision = (response, outcomes) => {
|
||||
if (isRetryable(response)) {
|
||||
return outcomes.onRetry(response);
|
||||
}
|
||||
if (isSuccess(response)) {
|
||||
return outcomes.onSuccess(response);
|
||||
}
|
||||
return outcomes.onFail(response);
|
||||
};
|
||||
|
||||
function retryableRequest(transporter, statelessHosts, request, requestOptions) {
|
||||
const stackTrace = []; // eslint-disable-line functional/prefer-readonly-type
|
||||
/**
|
||||
* First we prepare the payload that do not depend from hosts.
|
||||
*/
|
||||
const data = serializeData(request, requestOptions);
|
||||
const headers = serializeHeaders(transporter, requestOptions);
|
||||
const method = request.method;
|
||||
// On `GET`, the data is proxied to query parameters.
|
||||
const dataQueryParameters = request.method !== requesterCommon.MethodEnum.Get
|
||||
? {}
|
||||
: {
|
||||
...request.data,
|
||||
...requestOptions.data,
|
||||
};
|
||||
const queryParameters = {
|
||||
'x-algolia-agent': transporter.userAgent.value,
|
||||
...transporter.queryParameters,
|
||||
...dataQueryParameters,
|
||||
...requestOptions.queryParameters,
|
||||
};
|
||||
let timeoutsCount = 0; // eslint-disable-line functional/no-let
|
||||
const retry = (hosts, // eslint-disable-line functional/prefer-readonly-type
|
||||
getTimeout) => {
|
||||
/**
|
||||
* We iterate on each host, until there is no host left.
|
||||
*/
|
||||
const host = hosts.pop(); // eslint-disable-line functional/immutable-data
|
||||
if (host === undefined) {
|
||||
throw createRetryError(stackTraceWithoutCredentials(stackTrace));
|
||||
}
|
||||
const payload = {
|
||||
data,
|
||||
headers,
|
||||
method,
|
||||
url: serializeUrl(host, request.path, queryParameters),
|
||||
connectTimeout: getTimeout(timeoutsCount, transporter.timeouts.connect),
|
||||
responseTimeout: getTimeout(timeoutsCount, requestOptions.timeout),
|
||||
};
|
||||
/**
|
||||
* The stackFrame is pushed to the stackTrace so we
|
||||
* can have information about onRetry and onFailure
|
||||
* decisions.
|
||||
*/
|
||||
const pushToStackTrace = (response) => {
|
||||
const stackFrame = {
|
||||
request: payload,
|
||||
response,
|
||||
host,
|
||||
triesLeft: hosts.length,
|
||||
};
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
stackTrace.push(stackFrame);
|
||||
return stackFrame;
|
||||
};
|
||||
const decisions = {
|
||||
onSuccess: response => deserializeSuccess(response),
|
||||
onRetry(response) {
|
||||
const stackFrame = pushToStackTrace(response);
|
||||
/**
|
||||
* If response is a timeout, we increaset the number of
|
||||
* timeouts so we can increase the timeout later.
|
||||
*/
|
||||
if (response.isTimedOut) {
|
||||
timeoutsCount++;
|
||||
}
|
||||
return Promise.all([
|
||||
/**
|
||||
* Failures are individually send the logger, allowing
|
||||
* the end user to debug / store stack frames even
|
||||
* when a retry error does not happen.
|
||||
*/
|
||||
transporter.logger.info('Retryable failure', stackFrameWithoutCredentials(stackFrame)),
|
||||
/**
|
||||
* We also store the state of the host in failure cases. If the host, is
|
||||
* down it will remain down for the next 2 minutes. In a timeout situation,
|
||||
* this host will be added end of the list of hosts on the next request.
|
||||
*/
|
||||
transporter.hostsCache.set(host, createStatefulHost(host, response.isTimedOut ? HostStatusEnum.Timeouted : HostStatusEnum.Down)),
|
||||
]).then(() => retry(hosts, getTimeout));
|
||||
},
|
||||
onFail(response) {
|
||||
pushToStackTrace(response);
|
||||
throw deserializeFailure(response, stackTraceWithoutCredentials(stackTrace));
|
||||
},
|
||||
};
|
||||
return transporter.requester.send(payload).then(response => {
|
||||
return retryDecision(response, decisions);
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Finally, for each retryable host perform request until we got a non
|
||||
* retryable response. Some notes here:
|
||||
*
|
||||
* 1. The reverse here is applied so we can apply a `pop` later on => more performant.
|
||||
* 2. We also get from the retryable options a timeout multiplier that is tailored
|
||||
* for the current context.
|
||||
*/
|
||||
return createRetryableOptions(transporter.hostsCache, statelessHosts).then(options => {
|
||||
return retry([...options.statelessHosts].reverse(), options.getTimeout);
|
||||
});
|
||||
}
|
||||
|
||||
function createTransporter(options) {
|
||||
const { hostsCache, logger, requester, requestsCache, responsesCache, timeouts, userAgent, hosts, queryParameters, headers, } = options;
|
||||
const transporter = {
|
||||
hostsCache,
|
||||
logger,
|
||||
requester,
|
||||
requestsCache,
|
||||
responsesCache,
|
||||
timeouts,
|
||||
userAgent,
|
||||
headers,
|
||||
queryParameters,
|
||||
hosts: hosts.map(host => createStatelessHost(host)),
|
||||
read(request, requestOptions) {
|
||||
/**
|
||||
* First, we compute the user request options. Now, keep in mind,
|
||||
* that using request options the user is able to modified the intire
|
||||
* payload of the request. Such as headers, query parameters, and others.
|
||||
*/
|
||||
const mappedRequestOptions = createMappedRequestOptions(requestOptions, transporter.timeouts.read);
|
||||
const createRetryableRequest = () => {
|
||||
/**
|
||||
* Then, we prepare a function factory that contains the construction of
|
||||
* the retryable request. At this point, we may *not* perform the actual
|
||||
* request. But we want to have the function factory ready.
|
||||
*/
|
||||
return retryableRequest(transporter, transporter.hosts.filter(host => (host.accept & CallEnum.Read) !== 0), request, mappedRequestOptions);
|
||||
};
|
||||
/**
|
||||
* Once we have the function factory ready, we need to determine of the
|
||||
* request is "cacheable" - should be cached. Note that, once again,
|
||||
* the user can force this option.
|
||||
*/
|
||||
const cacheable = mappedRequestOptions.cacheable !== undefined
|
||||
? mappedRequestOptions.cacheable
|
||||
: request.cacheable;
|
||||
/**
|
||||
* If is not "cacheable", we immediatly trigger the retryable request, no
|
||||
* need to check cache implementations.
|
||||
*/
|
||||
if (cacheable !== true) {
|
||||
return createRetryableRequest();
|
||||
}
|
||||
/**
|
||||
* If the request is "cacheable", we need to first compute the key to ask
|
||||
* the cache implementations if this request is on progress or if the
|
||||
* response already exists on the cache.
|
||||
*/
|
||||
const key = {
|
||||
request,
|
||||
mappedRequestOptions,
|
||||
transporter: {
|
||||
queryParameters: transporter.queryParameters,
|
||||
headers: transporter.headers,
|
||||
},
|
||||
};
|
||||
/**
|
||||
* With the computed key, we first ask the responses cache
|
||||
* implemention if this request was been resolved before.
|
||||
*/
|
||||
return transporter.responsesCache.get(key, () => {
|
||||
/**
|
||||
* If the request has never resolved before, we actually ask if there
|
||||
* is a current request with the same key on progress.
|
||||
*/
|
||||
return transporter.requestsCache.get(key, () => {
|
||||
return (transporter.requestsCache
|
||||
/**
|
||||
* Finally, if there is no request in progress with the same key,
|
||||
* this `createRetryableRequest()` will actually trigger the
|
||||
* retryable request.
|
||||
*/
|
||||
.set(key, createRetryableRequest())
|
||||
.then(response => Promise.all([transporter.requestsCache.delete(key), response]), err => Promise.all([transporter.requestsCache.delete(key), Promise.reject(err)]))
|
||||
.then(([_, response]) => response));
|
||||
});
|
||||
}, {
|
||||
/**
|
||||
* Of course, once we get this response back from the server, we
|
||||
* tell response cache to actually store the received response
|
||||
* to be used later.
|
||||
*/
|
||||
miss: response => transporter.responsesCache.set(key, response),
|
||||
});
|
||||
},
|
||||
write(request, requestOptions) {
|
||||
/**
|
||||
* On write requests, no cache mechanisms are applied, and we
|
||||
* proxy the request immediately to the requester.
|
||||
*/
|
||||
return retryableRequest(transporter, transporter.hosts.filter(host => (host.accept & CallEnum.Write) !== 0), request, createMappedRequestOptions(requestOptions, transporter.timeouts.write));
|
||||
},
|
||||
};
|
||||
return transporter;
|
||||
}
|
||||
|
||||
function createUserAgent(version) {
|
||||
const userAgent = {
|
||||
value: `Algolia for JavaScript (${version})`,
|
||||
add(options) {
|
||||
const addedUserAgent = `; ${options.segment}${options.version !== undefined ? ` (${options.version})` : ''}`;
|
||||
if (userAgent.value.indexOf(addedUserAgent) === -1) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
userAgent.value = `${userAgent.value}${addedUserAgent}`;
|
||||
}
|
||||
return userAgent;
|
||||
},
|
||||
};
|
||||
return userAgent;
|
||||
}
|
||||
|
||||
function deserializeSuccess(response) {
|
||||
// eslint-disable-next-line functional/no-try-statement
|
||||
try {
|
||||
return JSON.parse(response.content);
|
||||
}
|
||||
catch (e) {
|
||||
throw createDeserializationError(e.message, response);
|
||||
}
|
||||
}
|
||||
function deserializeFailure({ content, status }, stackFrame) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let message = content;
|
||||
// eslint-disable-next-line functional/no-try-statement
|
||||
try {
|
||||
message = JSON.parse(content).message;
|
||||
}
|
||||
catch (e) {
|
||||
// ..
|
||||
}
|
||||
return createApiError(message, status, stackFrame);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line functional/prefer-readonly-type
|
||||
function encode(format, ...args) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let i = 0;
|
||||
return format.replace(/%s/g, () => encodeURIComponent(args[i++]));
|
||||
}
|
||||
|
||||
function serializeUrl(host, path, queryParameters) {
|
||||
const queryParametersAsString = serializeQueryParameters(queryParameters);
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let url = `${host.protocol}://${host.url}/${path.charAt(0) === '/' ? path.substr(1) : path}`;
|
||||
if (queryParametersAsString.length) {
|
||||
url += `?${queryParametersAsString}`;
|
||||
}
|
||||
return url;
|
||||
}
|
||||
function serializeQueryParameters(parameters) {
|
||||
const isObjectOrArray = (value) => Object.prototype.toString.call(value) === '[object Object]' ||
|
||||
Object.prototype.toString.call(value) === '[object Array]';
|
||||
return Object.keys(parameters)
|
||||
.map(key => encode('%s=%s', key, isObjectOrArray(parameters[key]) ? JSON.stringify(parameters[key]) : parameters[key]))
|
||||
.join('&');
|
||||
}
|
||||
function serializeData(request, requestOptions) {
|
||||
if (request.method === requesterCommon.MethodEnum.Get ||
|
||||
(request.data === undefined && requestOptions.data === undefined)) {
|
||||
return undefined;
|
||||
}
|
||||
const data = Array.isArray(request.data)
|
||||
? request.data
|
||||
: { ...request.data, ...requestOptions.data };
|
||||
return JSON.stringify(data);
|
||||
}
|
||||
function serializeHeaders(transporter, requestOptions) {
|
||||
const headers = {
|
||||
...transporter.headers,
|
||||
...requestOptions.headers,
|
||||
};
|
||||
const serializedHeaders = {};
|
||||
Object.keys(headers).forEach(header => {
|
||||
const value = headers[header];
|
||||
// @ts-ignore
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
serializedHeaders[header.toLowerCase()] = value;
|
||||
});
|
||||
return serializedHeaders;
|
||||
}
|
||||
|
||||
function stackTraceWithoutCredentials(stackTrace) {
|
||||
return stackTrace.map(stackFrame => stackFrameWithoutCredentials(stackFrame));
|
||||
}
|
||||
function stackFrameWithoutCredentials(stackFrame) {
|
||||
const modifiedHeaders = stackFrame.request.headers['x-algolia-api-key']
|
||||
? { 'x-algolia-api-key': '*****' }
|
||||
: {};
|
||||
return {
|
||||
...stackFrame,
|
||||
request: {
|
||||
...stackFrame.request,
|
||||
headers: {
|
||||
...stackFrame.request.headers,
|
||||
...modifiedHeaders,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createApiError(message, status, transporterStackTrace) {
|
||||
return {
|
||||
name: 'ApiError',
|
||||
message,
|
||||
status,
|
||||
transporterStackTrace,
|
||||
};
|
||||
}
|
||||
|
||||
function createDeserializationError(message, response) {
|
||||
return {
|
||||
name: 'DeserializationError',
|
||||
message,
|
||||
response,
|
||||
};
|
||||
}
|
||||
|
||||
function createRetryError(transporterStackTrace) {
|
||||
return {
|
||||
name: 'RetryError',
|
||||
message: 'Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support .',
|
||||
transporterStackTrace,
|
||||
};
|
||||
}
|
||||
|
||||
exports.CallEnum = CallEnum;
|
||||
exports.HostStatusEnum = HostStatusEnum;
|
||||
exports.createApiError = createApiError;
|
||||
exports.createDeserializationError = createDeserializationError;
|
||||
exports.createMappedRequestOptions = createMappedRequestOptions;
|
||||
exports.createRetryError = createRetryError;
|
||||
exports.createStatefulHost = createStatefulHost;
|
||||
exports.createStatelessHost = createStatelessHost;
|
||||
exports.createTransporter = createTransporter;
|
||||
exports.createUserAgent = createUserAgent;
|
||||
exports.deserializeFailure = deserializeFailure;
|
||||
exports.deserializeSuccess = deserializeSuccess;
|
||||
exports.isStatefulHostTimeouted = isStatefulHostTimeouted;
|
||||
exports.isStatefulHostUp = isStatefulHostUp;
|
||||
exports.serializeData = serializeData;
|
||||
exports.serializeHeaders = serializeHeaders;
|
||||
exports.serializeQueryParameters = serializeQueryParameters;
|
||||
exports.serializeUrl = serializeUrl;
|
||||
exports.stackFrameWithoutCredentials = stackFrameWithoutCredentials;
|
||||
exports.stackTraceWithoutCredentials = stackTraceWithoutCredentials;
|
||||
383
scripts/node_modules/@algolia/transporter/dist/transporter.d.ts
generated
vendored
Normal file
383
scripts/node_modules/@algolia/transporter/dist/transporter.d.ts
generated
vendored
Normal file
@ -0,0 +1,383 @@
|
||||
import { Cache as Cache_2 } from '@algolia/cache-common';
|
||||
import { Logger } from '@algolia/logger-common';
|
||||
import { MethodType } from '@algolia/requester-common';
|
||||
import { Request as Request_3 } from '@algolia/requester-common';
|
||||
import { Requester } from '@algolia/requester-common';
|
||||
import { Response as Response_2 } from '@algolia/requester-common';
|
||||
|
||||
export declare type ApiError = Error & {
|
||||
/**
|
||||
* The http status code.
|
||||
*/
|
||||
readonly status: number;
|
||||
/**
|
||||
* Contains report of stack frames of the
|
||||
* execution of a certain request.
|
||||
*/
|
||||
readonly transporterStackTrace: readonly StackFrame[];
|
||||
};
|
||||
|
||||
export declare const CallEnum: Readonly<Record<string, CallType>>;
|
||||
|
||||
export declare type CallType = 1 | 2 | 3;
|
||||
|
||||
export declare function createApiError(message: string, status: number, transporterStackTrace: readonly StackFrame[]): ApiError;
|
||||
|
||||
export declare function createDeserializationError(message: string, response: Response_2): DeserializationError;
|
||||
|
||||
export declare function createMappedRequestOptions(requestOptions?: RequestOptions, timeout?: number): MappedRequestOptions;
|
||||
|
||||
export declare function createRetryError(transporterStackTrace: readonly StackFrame[]): RetryError;
|
||||
|
||||
export declare function createStatefulHost(host: StatelessHost, status?: HostStatusType): StatefulHost;
|
||||
|
||||
export declare function createStatelessHost(options: HostOptions): StatelessHost;
|
||||
|
||||
export declare function createTransporter(options: TransporterOptions): Transporter;
|
||||
|
||||
export declare function createUserAgent(version: string): UserAgent;
|
||||
|
||||
export declare type DeserializationError = Error & {
|
||||
/**
|
||||
* The raw response from the server.
|
||||
*/
|
||||
readonly response: Response_2;
|
||||
};
|
||||
|
||||
export declare function deserializeFailure({ content, status }: Response_2, stackFrame: readonly StackFrame[]): Error;
|
||||
|
||||
export declare function deserializeSuccess<TObject>(response: Response_2): TObject;
|
||||
|
||||
declare type Headers_2 = Readonly<Record<string, string>>;
|
||||
export { Headers_2 as Headers }
|
||||
|
||||
export declare type HostOptions = string | {
|
||||
/**
|
||||
* The url of the server, without the protocol.
|
||||
*/
|
||||
readonly url: string;
|
||||
/**
|
||||
* The type of host. Defaults to `Any`.
|
||||
*/
|
||||
readonly accept?: CallType;
|
||||
/**
|
||||
* The protocol. Defaults to `https`.
|
||||
*/
|
||||
readonly protocol?: string;
|
||||
};
|
||||
|
||||
export declare const HostStatusEnum: Readonly<Record<string, HostStatusType>>;
|
||||
|
||||
export declare type HostStatusType = 1 | 2 | 3;
|
||||
|
||||
export declare function isStatefulHostTimeouted(host: StatefulHost): boolean;
|
||||
|
||||
export declare function isStatefulHostUp(host: StatefulHost): boolean;
|
||||
|
||||
export declare type MappedRequestOptions = {
|
||||
/**
|
||||
* If the request should be cached.
|
||||
*/
|
||||
readonly cacheable: boolean | undefined;
|
||||
/**
|
||||
* The `read` or `write` timeout of the request.
|
||||
*/
|
||||
readonly timeout: number | undefined;
|
||||
/**
|
||||
* The headers of the request.
|
||||
*/
|
||||
readonly headers: Record<string, string>;
|
||||
/**
|
||||
* The query parameters of the request.
|
||||
*/
|
||||
readonly queryParameters: Record<string, any>;
|
||||
/**
|
||||
* The data to be transfered to the server.
|
||||
*/
|
||||
readonly data?: Record<string, string>;
|
||||
};
|
||||
|
||||
export declare type QueryParameters = Readonly<Record<string, string>>;
|
||||
|
||||
declare type Request_2 = {
|
||||
/**
|
||||
* The method of the request. `GET`, etc.
|
||||
*/
|
||||
readonly method: MethodType;
|
||||
/**
|
||||
* The path of the request. i.e: `/1/indexes`.
|
||||
*/
|
||||
readonly path: string;
|
||||
/**
|
||||
* The data to transfer to the server.
|
||||
*/
|
||||
readonly data?: Record<string, any> | ReadonlyArray<Record<string, any>>;
|
||||
/**
|
||||
* If the response should persist on cache.
|
||||
*/
|
||||
readonly cacheable?: boolean;
|
||||
};
|
||||
export { Request_2 as Request }
|
||||
|
||||
export declare type RequestOptions = {
|
||||
/**
|
||||
* If the given request should persist on the cache. Keep in mind,
|
||||
* that some methods may have this option enabled by default.
|
||||
*/
|
||||
readonly cacheable?: boolean;
|
||||
/**
|
||||
* Custom timeout for the request. Note that, in normal situacions
|
||||
* the given timeout will be applied. But the transporter layer may
|
||||
* increase this timeout if there is need for it.
|
||||
*/
|
||||
readonly timeout?: number;
|
||||
/**
|
||||
* Custom headers for the request. This headers are
|
||||
* going to be merged the transporter headers.
|
||||
*/
|
||||
readonly headers?: Readonly<Record<string, string>>;
|
||||
/**
|
||||
* Custom query parameters for the request. This query parameters are
|
||||
* going to be merged the transporter query parameters.
|
||||
*/
|
||||
readonly queryParameters?: Record<string, any>;
|
||||
/**
|
||||
* Custom data for the request. This data are
|
||||
* going to be merged the transporter data.
|
||||
*/
|
||||
readonly data?: Record<string, any>;
|
||||
/**
|
||||
* Additional request body values. It's only taken in
|
||||
* consideration in `POST` and `PUT` requests.
|
||||
*/
|
||||
[key: string]: any;
|
||||
};
|
||||
|
||||
export declare type RetryError = Error & {
|
||||
/**
|
||||
* Contains report of stack frames of the
|
||||
* execution of a certain request.
|
||||
*/
|
||||
readonly transporterStackTrace: readonly StackFrame[];
|
||||
};
|
||||
|
||||
export declare function serializeData(request: Request_2, requestOptions: RequestOptions): string | undefined;
|
||||
|
||||
export declare function serializeHeaders(transporter: Transporter, requestOptions: RequestOptions): Headers_2;
|
||||
|
||||
export declare function serializeQueryParameters(parameters: Readonly<Record<string, any>>): string;
|
||||
|
||||
export declare function serializeUrl(host: StatelessHost, path: string, queryParameters: Readonly<Record<string, string>>): string;
|
||||
|
||||
export declare type StackFrame = {
|
||||
/**
|
||||
* The request made.
|
||||
*/
|
||||
readonly request: Request_3;
|
||||
/**
|
||||
* The received response.
|
||||
*/
|
||||
readonly response: Response_2;
|
||||
/**
|
||||
* The host associated with the `request` and the `response`.
|
||||
*/
|
||||
readonly host: StatelessHost;
|
||||
/**
|
||||
* The number of tries left.
|
||||
*/
|
||||
readonly triesLeft: number;
|
||||
};
|
||||
|
||||
export declare function stackFrameWithoutCredentials(stackFrame: StackFrame): StackFrame;
|
||||
|
||||
export declare function stackTraceWithoutCredentials(stackTrace: readonly StackFrame[]): readonly StackFrame[];
|
||||
|
||||
export declare type StatefulHost = StatelessHost & {
|
||||
/**
|
||||
* Holds the last time this host failed in milliseconds elapsed
|
||||
* since the UNIX epoch. This failure can be because of an
|
||||
* timeout error or a because the host is not available.
|
||||
*/
|
||||
readonly lastUpdate: number;
|
||||
/**
|
||||
* Holds the host status. Note that, depending of the `lastUpdate`
|
||||
* an host may be considered as `Up` on the transporter layer.
|
||||
*/
|
||||
readonly status: HostStatusType;
|
||||
};
|
||||
|
||||
export declare type StatelessHost = {
|
||||
/**
|
||||
* The protocol of the stateless host. Between `http` and `https`.
|
||||
*/
|
||||
readonly protocol: string;
|
||||
/**
|
||||
* The url, without protocol.
|
||||
*/
|
||||
readonly url: string;
|
||||
/**
|
||||
* The type of the host.
|
||||
*/
|
||||
readonly accept: CallType;
|
||||
};
|
||||
|
||||
export declare type Timeouts = {
|
||||
/**
|
||||
* The timeout to stablish a connection with the server.
|
||||
*/
|
||||
readonly connect: number;
|
||||
/**
|
||||
* The timeout to receive the response on read requests.
|
||||
*/
|
||||
readonly read: number;
|
||||
/**
|
||||
* The timeout to receive the response on write requests.
|
||||
*/
|
||||
readonly write: number;
|
||||
};
|
||||
|
||||
export declare type Transporter = {
|
||||
/**
|
||||
* The cache of the hosts. Usually used to persist
|
||||
* the state of the host when its down.
|
||||
*/
|
||||
readonly hostsCache: Cache_2;
|
||||
/**
|
||||
* The logger instance to send events of the transporter.
|
||||
*/
|
||||
readonly logger: Logger;
|
||||
/**
|
||||
* The underlying requester used. Should differ
|
||||
* depending of the enviroment where the client
|
||||
* will be used.
|
||||
*/
|
||||
readonly requester: Requester;
|
||||
/**
|
||||
* The cache of the requests. When requests are
|
||||
* `cacheable`, the returned promised persists
|
||||
* in this cache to shared in similar resquests
|
||||
* before being resolved.
|
||||
*/
|
||||
readonly requestsCache: Cache_2;
|
||||
/**
|
||||
* The cache of the responses. When requests are
|
||||
* `cacheable`, the returned responses persists
|
||||
* in this cache to shared in similar resquests.
|
||||
*/
|
||||
readonly responsesCache: Cache_2;
|
||||
/**
|
||||
* The timeouts used by the requester. The transporter
|
||||
* layer may increase this timeouts as defined on the
|
||||
* retry strategy.
|
||||
*/
|
||||
readonly timeouts: Timeouts;
|
||||
/**
|
||||
* The user agent used. Sent on query parameters.
|
||||
*/
|
||||
readonly userAgent: UserAgent;
|
||||
/**
|
||||
* The headers used on each request.
|
||||
*/
|
||||
readonly headers: Headers_2;
|
||||
/**
|
||||
* The query parameters used on each request.
|
||||
*/
|
||||
readonly queryParameters: QueryParameters;
|
||||
/**
|
||||
* The hosts used by the retry strategy.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
hosts: readonly StatelessHost[];
|
||||
/**
|
||||
* Performs a read request using read hosts.
|
||||
*/
|
||||
readonly read: <TResponse>(request: Request_2, requestOptions?: RequestOptions) => Readonly<Promise<TResponse>>;
|
||||
/**
|
||||
* Performs a write request using write hosts.
|
||||
*/
|
||||
readonly write: <TResponse>(request: Request_2, requestOptions?: RequestOptions) => Readonly<Promise<TResponse>>;
|
||||
};
|
||||
|
||||
export declare type TransporterOptions = {
|
||||
/**
|
||||
* The cache of the hosts. Usually used to persist
|
||||
* the state of the host when its down.
|
||||
*/
|
||||
readonly hostsCache: Cache_2;
|
||||
/**
|
||||
* The logger instance to send events of the transporter.
|
||||
*/
|
||||
readonly logger: Logger;
|
||||
/**
|
||||
* The underlying requester used. Should differ
|
||||
* depending of the enviroment where the client
|
||||
* will be used.
|
||||
*/
|
||||
readonly requester: Requester;
|
||||
/**
|
||||
* The cache of the requests. When requests are
|
||||
* `cacheable`, the returned promised persists
|
||||
* in this cache to shared in similar resquests
|
||||
* before being resolved.
|
||||
*/
|
||||
readonly requestsCache: Cache_2;
|
||||
/**
|
||||
* The cache of the responses. When requests are
|
||||
* `cacheable`, the returned responses persists
|
||||
* in this cache to shared in similar resquests.
|
||||
*/
|
||||
readonly responsesCache: Cache_2;
|
||||
/**
|
||||
* The timeouts used by the requester. The transporter
|
||||
* layer may increase this timeouts as defined on the
|
||||
* retry strategy.
|
||||
*/
|
||||
readonly timeouts: Timeouts;
|
||||
/**
|
||||
* The hosts used by the requester.
|
||||
*/
|
||||
readonly hosts: readonly HostOptions[];
|
||||
/**
|
||||
* The headers used by the requester. The transporter
|
||||
* layer may add some extra headers during the request
|
||||
* for the user agent, and others.
|
||||
*/
|
||||
readonly headers: Headers_2;
|
||||
/**
|
||||
* The query parameters used by the requester. The transporter
|
||||
* layer may add some extra headers during the request
|
||||
* for the user agent, and others.
|
||||
*/
|
||||
readonly queryParameters: QueryParameters;
|
||||
/**
|
||||
* The user agent used. Sent on query parameters.
|
||||
*/
|
||||
readonly userAgent: UserAgent;
|
||||
};
|
||||
|
||||
export declare type UserAgent = {
|
||||
/**
|
||||
* The raw value of the user agent.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
value: string;
|
||||
/**
|
||||
* Mutates the current user agent ading the given user agent options.
|
||||
*/
|
||||
readonly add: (options: UserAgentOptions) => UserAgent;
|
||||
};
|
||||
|
||||
export declare type UserAgentOptions = {
|
||||
/**
|
||||
* The segment. Usually the integration name.
|
||||
*/
|
||||
readonly segment: string;
|
||||
/**
|
||||
* The version. Usually the integration version.
|
||||
*/
|
||||
readonly version?: string;
|
||||
};
|
||||
|
||||
export { }
|
||||
460
scripts/node_modules/@algolia/transporter/dist/transporter.esm.js
generated
vendored
Normal file
460
scripts/node_modules/@algolia/transporter/dist/transporter.esm.js
generated
vendored
Normal file
@ -0,0 +1,460 @@
|
||||
import { MethodEnum } from '@algolia/requester-common';
|
||||
|
||||
function createMappedRequestOptions(requestOptions, timeout) {
|
||||
const options = requestOptions || {};
|
||||
const data = options.data || {};
|
||||
Object.keys(options).forEach(key => {
|
||||
if (['timeout', 'headers', 'queryParameters', 'data', 'cacheable'].indexOf(key) === -1) {
|
||||
data[key] = options[key]; // eslint-disable-line functional/immutable-data
|
||||
}
|
||||
});
|
||||
return {
|
||||
data: Object.entries(data).length > 0 ? data : undefined,
|
||||
timeout: options.timeout || timeout,
|
||||
headers: options.headers || {},
|
||||
queryParameters: options.queryParameters || {},
|
||||
cacheable: options.cacheable,
|
||||
};
|
||||
}
|
||||
|
||||
const CallEnum = {
|
||||
/**
|
||||
* If the host is read only.
|
||||
*/
|
||||
Read: 1,
|
||||
/**
|
||||
* If the host is write only.
|
||||
*/
|
||||
Write: 2,
|
||||
/**
|
||||
* If the host is both read and write.
|
||||
*/
|
||||
Any: 3,
|
||||
};
|
||||
|
||||
const HostStatusEnum = {
|
||||
Up: 1,
|
||||
Down: 2,
|
||||
Timeouted: 3,
|
||||
};
|
||||
|
||||
// By default, API Clients at Algolia have expiration delay
|
||||
// of 5 mins. In the JavaScript client, we have 2 mins.
|
||||
const EXPIRATION_DELAY = 2 * 60 * 1000;
|
||||
function createStatefulHost(host, status = HostStatusEnum.Up) {
|
||||
return {
|
||||
...host,
|
||||
status,
|
||||
lastUpdate: Date.now(),
|
||||
};
|
||||
}
|
||||
function isStatefulHostUp(host) {
|
||||
return host.status === HostStatusEnum.Up || Date.now() - host.lastUpdate > EXPIRATION_DELAY;
|
||||
}
|
||||
function isStatefulHostTimeouted(host) {
|
||||
return (host.status === HostStatusEnum.Timeouted && Date.now() - host.lastUpdate <= EXPIRATION_DELAY);
|
||||
}
|
||||
|
||||
function createStatelessHost(options) {
|
||||
if (typeof options === 'string') {
|
||||
return {
|
||||
protocol: 'https',
|
||||
url: options,
|
||||
accept: CallEnum.Any,
|
||||
};
|
||||
}
|
||||
return {
|
||||
protocol: options.protocol || 'https',
|
||||
url: options.url,
|
||||
accept: options.accept || CallEnum.Any,
|
||||
};
|
||||
}
|
||||
|
||||
function createRetryableOptions(hostsCache, statelessHosts) {
|
||||
return Promise.all(statelessHosts.map(statelessHost => {
|
||||
return hostsCache.get(statelessHost, () => {
|
||||
return Promise.resolve(createStatefulHost(statelessHost));
|
||||
});
|
||||
})).then(statefulHosts => {
|
||||
const hostsUp = statefulHosts.filter(host => isStatefulHostUp(host));
|
||||
const hostsTimeouted = statefulHosts.filter(host => isStatefulHostTimeouted(host));
|
||||
/**
|
||||
* Note, we put the hosts that previously timeouted on the end of the list.
|
||||
*/
|
||||
const hostsAvailable = [...hostsUp, ...hostsTimeouted];
|
||||
const statelessHostsAvailable = hostsAvailable.length > 0
|
||||
? hostsAvailable.map(host => createStatelessHost(host))
|
||||
: statelessHosts;
|
||||
return {
|
||||
getTimeout(timeoutsCount, baseTimeout) {
|
||||
/**
|
||||
* Imagine that you have 4 hosts, if timeouts will increase
|
||||
* on the following way: 1 (timeouted) > 4 (timeouted) > 5 (200)
|
||||
*
|
||||
* Note that, the very next request, we start from the previous timeout
|
||||
*
|
||||
* 5 (timeouted) > 6 (timeouted) > 7 ...
|
||||
*
|
||||
* This strategy may need to be reviewed, but is the strategy on the our
|
||||
* current v3 version.
|
||||
*/
|
||||
const timeoutMultiplier = hostsTimeouted.length === 0 && timeoutsCount === 0
|
||||
? 1
|
||||
: hostsTimeouted.length + 3 + timeoutsCount;
|
||||
return timeoutMultiplier * baseTimeout;
|
||||
},
|
||||
statelessHosts: statelessHostsAvailable,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
const isNetworkError = ({ isTimedOut, status }) => {
|
||||
return !isTimedOut && ~~status === 0;
|
||||
};
|
||||
const isRetryable = (response) => {
|
||||
const status = response.status;
|
||||
const isTimedOut = response.isTimedOut;
|
||||
return (isTimedOut || isNetworkError(response) || (~~(status / 100) !== 2 && ~~(status / 100) !== 4));
|
||||
};
|
||||
const isSuccess = ({ status }) => {
|
||||
return ~~(status / 100) === 2;
|
||||
};
|
||||
const retryDecision = (response, outcomes) => {
|
||||
if (isRetryable(response)) {
|
||||
return outcomes.onRetry(response);
|
||||
}
|
||||
if (isSuccess(response)) {
|
||||
return outcomes.onSuccess(response);
|
||||
}
|
||||
return outcomes.onFail(response);
|
||||
};
|
||||
|
||||
function retryableRequest(transporter, statelessHosts, request, requestOptions) {
|
||||
const stackTrace = []; // eslint-disable-line functional/prefer-readonly-type
|
||||
/**
|
||||
* First we prepare the payload that do not depend from hosts.
|
||||
*/
|
||||
const data = serializeData(request, requestOptions);
|
||||
const headers = serializeHeaders(transporter, requestOptions);
|
||||
const method = request.method;
|
||||
// On `GET`, the data is proxied to query parameters.
|
||||
const dataQueryParameters = request.method !== MethodEnum.Get
|
||||
? {}
|
||||
: {
|
||||
...request.data,
|
||||
...requestOptions.data,
|
||||
};
|
||||
const queryParameters = {
|
||||
'x-algolia-agent': transporter.userAgent.value,
|
||||
...transporter.queryParameters,
|
||||
...dataQueryParameters,
|
||||
...requestOptions.queryParameters,
|
||||
};
|
||||
let timeoutsCount = 0; // eslint-disable-line functional/no-let
|
||||
const retry = (hosts, // eslint-disable-line functional/prefer-readonly-type
|
||||
getTimeout) => {
|
||||
/**
|
||||
* We iterate on each host, until there is no host left.
|
||||
*/
|
||||
const host = hosts.pop(); // eslint-disable-line functional/immutable-data
|
||||
if (host === undefined) {
|
||||
throw createRetryError(stackTraceWithoutCredentials(stackTrace));
|
||||
}
|
||||
const payload = {
|
||||
data,
|
||||
headers,
|
||||
method,
|
||||
url: serializeUrl(host, request.path, queryParameters),
|
||||
connectTimeout: getTimeout(timeoutsCount, transporter.timeouts.connect),
|
||||
responseTimeout: getTimeout(timeoutsCount, requestOptions.timeout),
|
||||
};
|
||||
/**
|
||||
* The stackFrame is pushed to the stackTrace so we
|
||||
* can have information about onRetry and onFailure
|
||||
* decisions.
|
||||
*/
|
||||
const pushToStackTrace = (response) => {
|
||||
const stackFrame = {
|
||||
request: payload,
|
||||
response,
|
||||
host,
|
||||
triesLeft: hosts.length,
|
||||
};
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
stackTrace.push(stackFrame);
|
||||
return stackFrame;
|
||||
};
|
||||
const decisions = {
|
||||
onSuccess: response => deserializeSuccess(response),
|
||||
onRetry(response) {
|
||||
const stackFrame = pushToStackTrace(response);
|
||||
/**
|
||||
* If response is a timeout, we increaset the number of
|
||||
* timeouts so we can increase the timeout later.
|
||||
*/
|
||||
if (response.isTimedOut) {
|
||||
timeoutsCount++;
|
||||
}
|
||||
return Promise.all([
|
||||
/**
|
||||
* Failures are individually send the logger, allowing
|
||||
* the end user to debug / store stack frames even
|
||||
* when a retry error does not happen.
|
||||
*/
|
||||
transporter.logger.info('Retryable failure', stackFrameWithoutCredentials(stackFrame)),
|
||||
/**
|
||||
* We also store the state of the host in failure cases. If the host, is
|
||||
* down it will remain down for the next 2 minutes. In a timeout situation,
|
||||
* this host will be added end of the list of hosts on the next request.
|
||||
*/
|
||||
transporter.hostsCache.set(host, createStatefulHost(host, response.isTimedOut ? HostStatusEnum.Timeouted : HostStatusEnum.Down)),
|
||||
]).then(() => retry(hosts, getTimeout));
|
||||
},
|
||||
onFail(response) {
|
||||
pushToStackTrace(response);
|
||||
throw deserializeFailure(response, stackTraceWithoutCredentials(stackTrace));
|
||||
},
|
||||
};
|
||||
return transporter.requester.send(payload).then(response => {
|
||||
return retryDecision(response, decisions);
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Finally, for each retryable host perform request until we got a non
|
||||
* retryable response. Some notes here:
|
||||
*
|
||||
* 1. The reverse here is applied so we can apply a `pop` later on => more performant.
|
||||
* 2. We also get from the retryable options a timeout multiplier that is tailored
|
||||
* for the current context.
|
||||
*/
|
||||
return createRetryableOptions(transporter.hostsCache, statelessHosts).then(options => {
|
||||
return retry([...options.statelessHosts].reverse(), options.getTimeout);
|
||||
});
|
||||
}
|
||||
|
||||
function createTransporter(options) {
|
||||
const { hostsCache, logger, requester, requestsCache, responsesCache, timeouts, userAgent, hosts, queryParameters, headers, } = options;
|
||||
const transporter = {
|
||||
hostsCache,
|
||||
logger,
|
||||
requester,
|
||||
requestsCache,
|
||||
responsesCache,
|
||||
timeouts,
|
||||
userAgent,
|
||||
headers,
|
||||
queryParameters,
|
||||
hosts: hosts.map(host => createStatelessHost(host)),
|
||||
read(request, requestOptions) {
|
||||
/**
|
||||
* First, we compute the user request options. Now, keep in mind,
|
||||
* that using request options the user is able to modified the intire
|
||||
* payload of the request. Such as headers, query parameters, and others.
|
||||
*/
|
||||
const mappedRequestOptions = createMappedRequestOptions(requestOptions, transporter.timeouts.read);
|
||||
const createRetryableRequest = () => {
|
||||
/**
|
||||
* Then, we prepare a function factory that contains the construction of
|
||||
* the retryable request. At this point, we may *not* perform the actual
|
||||
* request. But we want to have the function factory ready.
|
||||
*/
|
||||
return retryableRequest(transporter, transporter.hosts.filter(host => (host.accept & CallEnum.Read) !== 0), request, mappedRequestOptions);
|
||||
};
|
||||
/**
|
||||
* Once we have the function factory ready, we need to determine of the
|
||||
* request is "cacheable" - should be cached. Note that, once again,
|
||||
* the user can force this option.
|
||||
*/
|
||||
const cacheable = mappedRequestOptions.cacheable !== undefined
|
||||
? mappedRequestOptions.cacheable
|
||||
: request.cacheable;
|
||||
/**
|
||||
* If is not "cacheable", we immediatly trigger the retryable request, no
|
||||
* need to check cache implementations.
|
||||
*/
|
||||
if (cacheable !== true) {
|
||||
return createRetryableRequest();
|
||||
}
|
||||
/**
|
||||
* If the request is "cacheable", we need to first compute the key to ask
|
||||
* the cache implementations if this request is on progress or if the
|
||||
* response already exists on the cache.
|
||||
*/
|
||||
const key = {
|
||||
request,
|
||||
mappedRequestOptions,
|
||||
transporter: {
|
||||
queryParameters: transporter.queryParameters,
|
||||
headers: transporter.headers,
|
||||
},
|
||||
};
|
||||
/**
|
||||
* With the computed key, we first ask the responses cache
|
||||
* implemention if this request was been resolved before.
|
||||
*/
|
||||
return transporter.responsesCache.get(key, () => {
|
||||
/**
|
||||
* If the request has never resolved before, we actually ask if there
|
||||
* is a current request with the same key on progress.
|
||||
*/
|
||||
return transporter.requestsCache.get(key, () => {
|
||||
return (transporter.requestsCache
|
||||
/**
|
||||
* Finally, if there is no request in progress with the same key,
|
||||
* this `createRetryableRequest()` will actually trigger the
|
||||
* retryable request.
|
||||
*/
|
||||
.set(key, createRetryableRequest())
|
||||
.then(response => Promise.all([transporter.requestsCache.delete(key), response]), err => Promise.all([transporter.requestsCache.delete(key), Promise.reject(err)]))
|
||||
.then(([_, response]) => response));
|
||||
});
|
||||
}, {
|
||||
/**
|
||||
* Of course, once we get this response back from the server, we
|
||||
* tell response cache to actually store the received response
|
||||
* to be used later.
|
||||
*/
|
||||
miss: response => transporter.responsesCache.set(key, response),
|
||||
});
|
||||
},
|
||||
write(request, requestOptions) {
|
||||
/**
|
||||
* On write requests, no cache mechanisms are applied, and we
|
||||
* proxy the request immediately to the requester.
|
||||
*/
|
||||
return retryableRequest(transporter, transporter.hosts.filter(host => (host.accept & CallEnum.Write) !== 0), request, createMappedRequestOptions(requestOptions, transporter.timeouts.write));
|
||||
},
|
||||
};
|
||||
return transporter;
|
||||
}
|
||||
|
||||
function createUserAgent(version) {
|
||||
const userAgent = {
|
||||
value: `Algolia for JavaScript (${version})`,
|
||||
add(options) {
|
||||
const addedUserAgent = `; ${options.segment}${options.version !== undefined ? ` (${options.version})` : ''}`;
|
||||
if (userAgent.value.indexOf(addedUserAgent) === -1) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
userAgent.value = `${userAgent.value}${addedUserAgent}`;
|
||||
}
|
||||
return userAgent;
|
||||
},
|
||||
};
|
||||
return userAgent;
|
||||
}
|
||||
|
||||
function deserializeSuccess(response) {
|
||||
// eslint-disable-next-line functional/no-try-statement
|
||||
try {
|
||||
return JSON.parse(response.content);
|
||||
}
|
||||
catch (e) {
|
||||
throw createDeserializationError(e.message, response);
|
||||
}
|
||||
}
|
||||
function deserializeFailure({ content, status }, stackFrame) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let message = content;
|
||||
// eslint-disable-next-line functional/no-try-statement
|
||||
try {
|
||||
message = JSON.parse(content).message;
|
||||
}
|
||||
catch (e) {
|
||||
// ..
|
||||
}
|
||||
return createApiError(message, status, stackFrame);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line functional/prefer-readonly-type
|
||||
function encode(format, ...args) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let i = 0;
|
||||
return format.replace(/%s/g, () => encodeURIComponent(args[i++]));
|
||||
}
|
||||
|
||||
function serializeUrl(host, path, queryParameters) {
|
||||
const queryParametersAsString = serializeQueryParameters(queryParameters);
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let url = `${host.protocol}://${host.url}/${path.charAt(0) === '/' ? path.substr(1) : path}`;
|
||||
if (queryParametersAsString.length) {
|
||||
url += `?${queryParametersAsString}`;
|
||||
}
|
||||
return url;
|
||||
}
|
||||
function serializeQueryParameters(parameters) {
|
||||
const isObjectOrArray = (value) => Object.prototype.toString.call(value) === '[object Object]' ||
|
||||
Object.prototype.toString.call(value) === '[object Array]';
|
||||
return Object.keys(parameters)
|
||||
.map(key => encode('%s=%s', key, isObjectOrArray(parameters[key]) ? JSON.stringify(parameters[key]) : parameters[key]))
|
||||
.join('&');
|
||||
}
|
||||
function serializeData(request, requestOptions) {
|
||||
if (request.method === MethodEnum.Get ||
|
||||
(request.data === undefined && requestOptions.data === undefined)) {
|
||||
return undefined;
|
||||
}
|
||||
const data = Array.isArray(request.data)
|
||||
? request.data
|
||||
: { ...request.data, ...requestOptions.data };
|
||||
return JSON.stringify(data);
|
||||
}
|
||||
function serializeHeaders(transporter, requestOptions) {
|
||||
const headers = {
|
||||
...transporter.headers,
|
||||
...requestOptions.headers,
|
||||
};
|
||||
const serializedHeaders = {};
|
||||
Object.keys(headers).forEach(header => {
|
||||
const value = headers[header];
|
||||
// @ts-ignore
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
serializedHeaders[header.toLowerCase()] = value;
|
||||
});
|
||||
return serializedHeaders;
|
||||
}
|
||||
|
||||
function stackTraceWithoutCredentials(stackTrace) {
|
||||
return stackTrace.map(stackFrame => stackFrameWithoutCredentials(stackFrame));
|
||||
}
|
||||
function stackFrameWithoutCredentials(stackFrame) {
|
||||
const modifiedHeaders = stackFrame.request.headers['x-algolia-api-key']
|
||||
? { 'x-algolia-api-key': '*****' }
|
||||
: {};
|
||||
return {
|
||||
...stackFrame,
|
||||
request: {
|
||||
...stackFrame.request,
|
||||
headers: {
|
||||
...stackFrame.request.headers,
|
||||
...modifiedHeaders,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createApiError(message, status, transporterStackTrace) {
|
||||
return {
|
||||
name: 'ApiError',
|
||||
message,
|
||||
status,
|
||||
transporterStackTrace,
|
||||
};
|
||||
}
|
||||
|
||||
function createDeserializationError(message, response) {
|
||||
return {
|
||||
name: 'DeserializationError',
|
||||
message,
|
||||
response,
|
||||
};
|
||||
}
|
||||
|
||||
function createRetryError(transporterStackTrace) {
|
||||
return {
|
||||
name: 'RetryError',
|
||||
message: 'Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support .',
|
||||
transporterStackTrace,
|
||||
};
|
||||
}
|
||||
|
||||
export { CallEnum, HostStatusEnum, createApiError, createDeserializationError, createMappedRequestOptions, createRetryError, createStatefulHost, createStatelessHost, createTransporter, createUserAgent, deserializeFailure, deserializeSuccess, isStatefulHostTimeouted, isStatefulHostUp, serializeData, serializeHeaders, serializeQueryParameters, serializeUrl, stackFrameWithoutCredentials, stackTraceWithoutCredentials };
|
||||
2
scripts/node_modules/@algolia/transporter/index.js
generated
vendored
Normal file
2
scripts/node_modules/@algolia/transporter/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./dist/transporter.cjs.js');
|
||||
24
scripts/node_modules/@algolia/transporter/package.json
generated
vendored
Normal file
24
scripts/node_modules/@algolia/transporter/package.json
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
{
|
||||
"name": "@algolia/transporter",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"description": "Promise-based transporter layer with embedded retry strategy.",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-javascript.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"module": "dist/transporter.esm.js",
|
||||
"types": "dist/transporter.d.ts",
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@algolia/cache-common": "4.24.0",
|
||||
"@algolia/logger-common": "4.24.0",
|
||||
"@algolia/requester-common": "4.24.0"
|
||||
}
|
||||
}
|
||||
82
scripts/node_modules/algoliasearch/README.md
generated
vendored
Normal file
82
scripts/node_modules/algoliasearch/README.md
generated
vendored
Normal file
@ -0,0 +1,82 @@
|
||||
<p align="center">
|
||||
<a href="https://www.algolia.com">
|
||||
<img alt="Algolia for JavaScript" src="https://raw.githubusercontent.com/algolia/algoliasearch-client-common/master/banners/javascript.png" >
|
||||
</a>
|
||||
|
||||
<h4 align="center">The perfect starting point to integrate <a href="https://algolia.com" target="_blank">Algolia</a> within your JavaScript project</h4>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://npmjs.org/package/algoliasearch"><img src="https://img.shields.io/npm/v/algoliasearch.svg?style=flat-square" alt="NPM version"></img></a>
|
||||
<a href="http://npm-stat.com/charts.html?package=algoliasearch"><img src="https://img.shields.io/npm/dm/algoliasearch.svg?style=flat-square" alt="NPM downloads"></a>
|
||||
<a href="https://www.jsdelivr.com/package/npm/algoliasearch"><img src="https://data.jsdelivr.com/v1/package/npm/algoliasearch/badge" alt="jsDelivr Downloads"></img></a>
|
||||
<a href="LICENSE.md"><img src="https://img.shields.io/badge/license-MIT-green.svg?style=flat-square" alt="License"></a>
|
||||
</p>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://www.algolia.com/doc/api-client/getting-started/install/javascript/" target="_blank">Documentation</a> •
|
||||
<a href="https://www.algolia.com/doc/guides/building-search-ui/what-is-instantsearch/js/" target="_blank">InstantSearch</a> •
|
||||
<a href="https://discourse.algolia.com" target="_blank">Community Forum</a> •
|
||||
<a href="http://stackoverflow.com/questions/tagged/algolia" target="_blank">Stack Overflow</a> •
|
||||
<a href="https://github.com/algolia/algoliasearch-client-javascript/issues" target="_blank">Report a bug</a> •
|
||||
<a href="https://www.algolia.com/support" target="_blank">Support</a>
|
||||
</p>
|
||||
|
||||
## ✨ Features
|
||||
|
||||
- Thin & **minimal low-level HTTP client** to interact with Algolia's API
|
||||
- Works both on the **browser** and **node.js**
|
||||
- **UMD compatible**, you can use it with any module loader
|
||||
- Built with TypeScript
|
||||
|
||||
## 💡 Getting Started
|
||||
|
||||
First, install Algolia JavaScript API Client via the [npm](https://www.npmjs.com/get-npm) package manager:
|
||||
|
||||
```bash
|
||||
npm install algoliasearch
|
||||
```
|
||||
|
||||
Then, create objects on your index:
|
||||
|
||||
```js
|
||||
const algoliasearch = require("algoliasearch");
|
||||
|
||||
const client = algoliasearch("YourApplicationID", "YourAdminAPIKey");
|
||||
const index = client.initIndex("your_index_name");
|
||||
|
||||
const objects = [
|
||||
{
|
||||
objectID: 1,
|
||||
name: "Foo"
|
||||
}
|
||||
];
|
||||
|
||||
index
|
||||
.saveObjects(objects)
|
||||
.then(({ objectIDs }) => {
|
||||
console.log(objectIDs);
|
||||
})
|
||||
.catch(err => {
|
||||
console.log(err);
|
||||
});
|
||||
```
|
||||
|
||||
Finally, let's actually search using the `search` method:
|
||||
|
||||
```js
|
||||
index
|
||||
.search("Fo")
|
||||
.then(({ hits }) => {
|
||||
console.log(hits);
|
||||
})
|
||||
.catch(err => {
|
||||
console.log(err);
|
||||
});
|
||||
```
|
||||
|
||||
For full documentation, visit the **[online documentation](https://www.algolia.com/doc/api-client/getting-started/install/javascript/)**.
|
||||
|
||||
## 📄 License
|
||||
|
||||
Algolia JavaScript API Client is an open-sourced software licensed under the [MIT license](LICENSE.md).
|
||||
51
scripts/node_modules/algoliasearch/dist/algoliasearch-lite.d.ts
generated
vendored
Normal file
51
scripts/node_modules/algoliasearch/dist/algoliasearch-lite.d.ts
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
import { ClientTransporterOptions } from '@algolia/client-common';
|
||||
import { FindAnswersOptions } from '@algolia/client-search';
|
||||
import { FindAnswersResponse } from '@algolia/client-search';
|
||||
import { MultipleQueriesOptions } from '@algolia/client-search';
|
||||
import { MultipleQueriesQuery } from '@algolia/client-search';
|
||||
import { MultipleQueriesResponse } from '@algolia/client-search';
|
||||
import { Request as Request_2 } from '@algolia/transporter';
|
||||
import { RequestOptions } from '@algolia/transporter';
|
||||
import { SearchClient as SearchClient_2 } from '@algolia/client-search';
|
||||
import { SearchClientOptions } from '@algolia/client-search';
|
||||
import { SearchForFacetValuesQueryParams } from '@algolia/client-search';
|
||||
import { SearchForFacetValuesResponse } from '@algolia/client-search';
|
||||
import { SearchIndex as SearchIndex_2 } from '@algolia/client-search';
|
||||
import { SearchOptions } from '@algolia/client-search';
|
||||
import { SearchResponse } from '@algolia/client-search';
|
||||
import { WithRecommendMethods } from '@algolia/recommend';
|
||||
|
||||
declare function algoliasearch(appId: string, apiKey: string, options?: AlgoliaSearchOptions): SearchClient;
|
||||
|
||||
declare namespace algoliasearch {
|
||||
var version: string;
|
||||
}
|
||||
export default algoliasearch;
|
||||
|
||||
export declare type AlgoliaSearchOptions = Partial<ClientTransporterOptions> & WithoutCredentials<SearchClientOptions>;
|
||||
|
||||
declare type Credentials = {
|
||||
readonly appId: string;
|
||||
readonly apiKey: string;
|
||||
};
|
||||
|
||||
export declare type SearchClient = SearchClient_2 & {
|
||||
readonly initIndex: (indexName: string) => SearchIndex;
|
||||
readonly search: <TObject>(queries: readonly MultipleQueriesQuery[], requestOptions?: RequestOptions & MultipleQueriesOptions) => Readonly<Promise<MultipleQueriesResponse<TObject>>>;
|
||||
readonly searchForFacetValues: (queries: ReadonlyArray<{
|
||||
readonly indexName: string;
|
||||
readonly params: SearchForFacetValuesQueryParams & SearchOptions;
|
||||
}>, requestOptions?: RequestOptions) => Readonly<Promise<readonly SearchForFacetValuesResponse[]>>;
|
||||
readonly customRequest: <TResponse>(request: Request_2, requestOptions?: RequestOptions) => Readonly<Promise<TResponse>>;
|
||||
readonly getRecommendations: WithRecommendMethods<SearchClient_2>['getRecommendations'];
|
||||
};
|
||||
|
||||
export declare type SearchIndex = SearchIndex_2 & {
|
||||
readonly search: <TObject>(query: string, requestOptions?: RequestOptions & SearchOptions) => Readonly<Promise<SearchResponse<TObject>>>;
|
||||
readonly searchForFacetValues: (facetName: string, facetQuery: string, requestOptions?: RequestOptions & SearchOptions) => Readonly<Promise<SearchForFacetValuesResponse>>;
|
||||
readonly findAnswers: <TObject>(query: string, queryLanguages: readonly string[], requestOptions?: RequestOptions & FindAnswersOptions) => Readonly<Promise<FindAnswersResponse<TObject>>>;
|
||||
};
|
||||
|
||||
export declare type WithoutCredentials<TClientOptions extends Credentials> = Omit<TClientOptions, keyof Credentials>;
|
||||
|
||||
export { }
|
||||
961
scripts/node_modules/algoliasearch/dist/algoliasearch-lite.esm.browser.js
generated
vendored
Normal file
961
scripts/node_modules/algoliasearch/dist/algoliasearch-lite.esm.browser.js
generated
vendored
Normal file
@ -0,0 +1,961 @@
|
||||
function createBrowserLocalStorageCache(options) {
|
||||
const namespaceKey = `algoliasearch-client-js-${options.key}`;
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let storage;
|
||||
const getStorage = () => {
|
||||
if (storage === undefined) {
|
||||
storage = options.localStorage || window.localStorage;
|
||||
}
|
||||
return storage;
|
||||
};
|
||||
const getNamespace = () => {
|
||||
return JSON.parse(getStorage().getItem(namespaceKey) || '{}');
|
||||
};
|
||||
const setNamespace = (namespace) => {
|
||||
getStorage().setItem(namespaceKey, JSON.stringify(namespace));
|
||||
};
|
||||
const removeOutdatedCacheItems = () => {
|
||||
const timeToLive = options.timeToLive ? options.timeToLive * 1000 : null;
|
||||
const namespace = getNamespace();
|
||||
const filteredNamespaceWithoutOldFormattedCacheItems = Object.fromEntries(Object.entries(namespace).filter(([, cacheItem]) => {
|
||||
return cacheItem.timestamp !== undefined;
|
||||
}));
|
||||
setNamespace(filteredNamespaceWithoutOldFormattedCacheItems);
|
||||
if (!timeToLive)
|
||||
return;
|
||||
const filteredNamespaceWithoutExpiredItems = Object.fromEntries(Object.entries(filteredNamespaceWithoutOldFormattedCacheItems).filter(([, cacheItem]) => {
|
||||
const currentTimestamp = new Date().getTime();
|
||||
const isExpired = cacheItem.timestamp + timeToLive < currentTimestamp;
|
||||
return !isExpired;
|
||||
}));
|
||||
setNamespace(filteredNamespaceWithoutExpiredItems);
|
||||
};
|
||||
return {
|
||||
get(key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
removeOutdatedCacheItems();
|
||||
const keyAsString = JSON.stringify(key);
|
||||
return getNamespace()[keyAsString];
|
||||
})
|
||||
.then(value => {
|
||||
return Promise.all([value ? value.value : defaultValue(), value !== undefined]);
|
||||
})
|
||||
.then(([value, exists]) => {
|
||||
return Promise.all([value, exists || events.miss(value)]);
|
||||
})
|
||||
.then(([value]) => value);
|
||||
},
|
||||
set(key, value) {
|
||||
return Promise.resolve().then(() => {
|
||||
const namespace = getNamespace();
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
namespace[JSON.stringify(key)] = {
|
||||
timestamp: new Date().getTime(),
|
||||
value,
|
||||
};
|
||||
getStorage().setItem(namespaceKey, JSON.stringify(namespace));
|
||||
return value;
|
||||
});
|
||||
},
|
||||
delete(key) {
|
||||
return Promise.resolve().then(() => {
|
||||
const namespace = getNamespace();
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
delete namespace[JSON.stringify(key)];
|
||||
getStorage().setItem(namespaceKey, JSON.stringify(namespace));
|
||||
});
|
||||
},
|
||||
clear() {
|
||||
return Promise.resolve().then(() => {
|
||||
getStorage().removeItem(namespaceKey);
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// @todo Add logger on options to debug when caches go wrong.
|
||||
function createFallbackableCache(options) {
|
||||
const caches = [...options.caches];
|
||||
const current = caches.shift(); // eslint-disable-line functional/immutable-data
|
||||
if (current === undefined) {
|
||||
return createNullCache();
|
||||
}
|
||||
return {
|
||||
get(key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
return current.get(key, defaultValue, events).catch(() => {
|
||||
return createFallbackableCache({ caches }).get(key, defaultValue, events);
|
||||
});
|
||||
},
|
||||
set(key, value) {
|
||||
return current.set(key, value).catch(() => {
|
||||
return createFallbackableCache({ caches }).set(key, value);
|
||||
});
|
||||
},
|
||||
delete(key) {
|
||||
return current.delete(key).catch(() => {
|
||||
return createFallbackableCache({ caches }).delete(key);
|
||||
});
|
||||
},
|
||||
clear() {
|
||||
return current.clear().catch(() => {
|
||||
return createFallbackableCache({ caches }).clear();
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createNullCache() {
|
||||
return {
|
||||
get(_key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
const value = defaultValue();
|
||||
return value
|
||||
.then(result => Promise.all([result, events.miss(result)]))
|
||||
.then(([result]) => result);
|
||||
},
|
||||
set(_key, value) {
|
||||
return Promise.resolve(value);
|
||||
},
|
||||
delete(_key) {
|
||||
return Promise.resolve();
|
||||
},
|
||||
clear() {
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createInMemoryCache(options = { serializable: true }) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let cache = {};
|
||||
return {
|
||||
get(key, defaultValue, events = {
|
||||
miss: () => Promise.resolve(),
|
||||
}) {
|
||||
const keyAsString = JSON.stringify(key);
|
||||
if (keyAsString in cache) {
|
||||
return Promise.resolve(options.serializable ? JSON.parse(cache[keyAsString]) : cache[keyAsString]);
|
||||
}
|
||||
const promise = defaultValue();
|
||||
const miss = (events && events.miss) || (() => Promise.resolve());
|
||||
return promise.then((value) => miss(value)).then(() => promise);
|
||||
},
|
||||
set(key, value) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
cache[JSON.stringify(key)] = options.serializable ? JSON.stringify(value) : value;
|
||||
return Promise.resolve(value);
|
||||
},
|
||||
delete(key) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
delete cache[JSON.stringify(key)];
|
||||
return Promise.resolve();
|
||||
},
|
||||
clear() {
|
||||
cache = {};
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createAuth(authMode, appId, apiKey) {
|
||||
const credentials = {
|
||||
'x-algolia-api-key': apiKey,
|
||||
'x-algolia-application-id': appId,
|
||||
};
|
||||
return {
|
||||
headers() {
|
||||
return authMode === AuthMode.WithinHeaders ? credentials : {};
|
||||
},
|
||||
queryParameters() {
|
||||
return authMode === AuthMode.WithinQueryParameters ? credentials : {};
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line functional/prefer-readonly-type
|
||||
function shuffle(array) {
|
||||
let c = array.length - 1; // eslint-disable-line functional/no-let
|
||||
// eslint-disable-next-line functional/no-loop-statement
|
||||
for (c; c > 0; c--) {
|
||||
const b = Math.floor(Math.random() * (c + 1));
|
||||
const a = array[c];
|
||||
array[c] = array[b]; // eslint-disable-line functional/immutable-data, no-param-reassign
|
||||
array[b] = a; // eslint-disable-line functional/immutable-data, no-param-reassign
|
||||
}
|
||||
return array;
|
||||
}
|
||||
function addMethods(base, methods) {
|
||||
if (!methods) {
|
||||
return base;
|
||||
}
|
||||
Object.keys(methods).forEach(key => {
|
||||
// eslint-disable-next-line functional/immutable-data, no-param-reassign
|
||||
base[key] = methods[key](base);
|
||||
});
|
||||
return base;
|
||||
}
|
||||
function encode(format, ...args) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let i = 0;
|
||||
return format.replace(/%s/g, () => encodeURIComponent(args[i++]));
|
||||
}
|
||||
|
||||
const version = '4.24.0';
|
||||
|
||||
const AuthMode = {
|
||||
/**
|
||||
* If auth credentials should be in query parameters.
|
||||
*/
|
||||
WithinQueryParameters: 0,
|
||||
/**
|
||||
* If auth credentials should be in headers.
|
||||
*/
|
||||
WithinHeaders: 1,
|
||||
};
|
||||
|
||||
function createMappedRequestOptions(requestOptions, timeout) {
|
||||
const options = requestOptions || {};
|
||||
const data = options.data || {};
|
||||
Object.keys(options).forEach(key => {
|
||||
if (['timeout', 'headers', 'queryParameters', 'data', 'cacheable'].indexOf(key) === -1) {
|
||||
data[key] = options[key]; // eslint-disable-line functional/immutable-data
|
||||
}
|
||||
});
|
||||
return {
|
||||
data: Object.entries(data).length > 0 ? data : undefined,
|
||||
timeout: options.timeout || timeout,
|
||||
headers: options.headers || {},
|
||||
queryParameters: options.queryParameters || {},
|
||||
cacheable: options.cacheable,
|
||||
};
|
||||
}
|
||||
|
||||
const CallEnum = {
|
||||
/**
|
||||
* If the host is read only.
|
||||
*/
|
||||
Read: 1,
|
||||
/**
|
||||
* If the host is write only.
|
||||
*/
|
||||
Write: 2,
|
||||
/**
|
||||
* If the host is both read and write.
|
||||
*/
|
||||
Any: 3,
|
||||
};
|
||||
|
||||
const HostStatusEnum = {
|
||||
Up: 1,
|
||||
Down: 2,
|
||||
Timeouted: 3,
|
||||
};
|
||||
|
||||
// By default, API Clients at Algolia have expiration delay
|
||||
// of 5 mins. In the JavaScript client, we have 2 mins.
|
||||
const EXPIRATION_DELAY = 2 * 60 * 1000;
|
||||
function createStatefulHost(host, status = HostStatusEnum.Up) {
|
||||
return {
|
||||
...host,
|
||||
status,
|
||||
lastUpdate: Date.now(),
|
||||
};
|
||||
}
|
||||
function isStatefulHostUp(host) {
|
||||
return host.status === HostStatusEnum.Up || Date.now() - host.lastUpdate > EXPIRATION_DELAY;
|
||||
}
|
||||
function isStatefulHostTimeouted(host) {
|
||||
return (host.status === HostStatusEnum.Timeouted && Date.now() - host.lastUpdate <= EXPIRATION_DELAY);
|
||||
}
|
||||
|
||||
function createStatelessHost(options) {
|
||||
if (typeof options === 'string') {
|
||||
return {
|
||||
protocol: 'https',
|
||||
url: options,
|
||||
accept: CallEnum.Any,
|
||||
};
|
||||
}
|
||||
return {
|
||||
protocol: options.protocol || 'https',
|
||||
url: options.url,
|
||||
accept: options.accept || CallEnum.Any,
|
||||
};
|
||||
}
|
||||
|
||||
const MethodEnum = {
|
||||
Delete: 'DELETE',
|
||||
Get: 'GET',
|
||||
Post: 'POST',
|
||||
Put: 'PUT',
|
||||
};
|
||||
|
||||
function createRetryableOptions(hostsCache, statelessHosts) {
|
||||
return Promise.all(statelessHosts.map(statelessHost => {
|
||||
return hostsCache.get(statelessHost, () => {
|
||||
return Promise.resolve(createStatefulHost(statelessHost));
|
||||
});
|
||||
})).then(statefulHosts => {
|
||||
const hostsUp = statefulHosts.filter(host => isStatefulHostUp(host));
|
||||
const hostsTimeouted = statefulHosts.filter(host => isStatefulHostTimeouted(host));
|
||||
/**
|
||||
* Note, we put the hosts that previously timeouted on the end of the list.
|
||||
*/
|
||||
const hostsAvailable = [...hostsUp, ...hostsTimeouted];
|
||||
const statelessHostsAvailable = hostsAvailable.length > 0
|
||||
? hostsAvailable.map(host => createStatelessHost(host))
|
||||
: statelessHosts;
|
||||
return {
|
||||
getTimeout(timeoutsCount, baseTimeout) {
|
||||
/**
|
||||
* Imagine that you have 4 hosts, if timeouts will increase
|
||||
* on the following way: 1 (timeouted) > 4 (timeouted) > 5 (200)
|
||||
*
|
||||
* Note that, the very next request, we start from the previous timeout
|
||||
*
|
||||
* 5 (timeouted) > 6 (timeouted) > 7 ...
|
||||
*
|
||||
* This strategy may need to be reviewed, but is the strategy on the our
|
||||
* current v3 version.
|
||||
*/
|
||||
const timeoutMultiplier = hostsTimeouted.length === 0 && timeoutsCount === 0
|
||||
? 1
|
||||
: hostsTimeouted.length + 3 + timeoutsCount;
|
||||
return timeoutMultiplier * baseTimeout;
|
||||
},
|
||||
statelessHosts: statelessHostsAvailable,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
const isNetworkError = ({ isTimedOut, status }) => {
|
||||
return !isTimedOut && ~~status === 0;
|
||||
};
|
||||
const isRetryable = (response) => {
|
||||
const status = response.status;
|
||||
const isTimedOut = response.isTimedOut;
|
||||
return (isTimedOut || isNetworkError(response) || (~~(status / 100) !== 2 && ~~(status / 100) !== 4));
|
||||
};
|
||||
const isSuccess = ({ status }) => {
|
||||
return ~~(status / 100) === 2;
|
||||
};
|
||||
const retryDecision = (response, outcomes) => {
|
||||
if (isRetryable(response)) {
|
||||
return outcomes.onRetry(response);
|
||||
}
|
||||
if (isSuccess(response)) {
|
||||
return outcomes.onSuccess(response);
|
||||
}
|
||||
return outcomes.onFail(response);
|
||||
};
|
||||
|
||||
function retryableRequest(transporter, statelessHosts, request, requestOptions) {
|
||||
const stackTrace = []; // eslint-disable-line functional/prefer-readonly-type
|
||||
/**
|
||||
* First we prepare the payload that do not depend from hosts.
|
||||
*/
|
||||
const data = serializeData(request, requestOptions);
|
||||
const headers = serializeHeaders(transporter, requestOptions);
|
||||
const method = request.method;
|
||||
// On `GET`, the data is proxied to query parameters.
|
||||
const dataQueryParameters = request.method !== MethodEnum.Get
|
||||
? {}
|
||||
: {
|
||||
...request.data,
|
||||
...requestOptions.data,
|
||||
};
|
||||
const queryParameters = {
|
||||
'x-algolia-agent': transporter.userAgent.value,
|
||||
...transporter.queryParameters,
|
||||
...dataQueryParameters,
|
||||
...requestOptions.queryParameters,
|
||||
};
|
||||
let timeoutsCount = 0; // eslint-disable-line functional/no-let
|
||||
const retry = (hosts, // eslint-disable-line functional/prefer-readonly-type
|
||||
getTimeout) => {
|
||||
/**
|
||||
* We iterate on each host, until there is no host left.
|
||||
*/
|
||||
const host = hosts.pop(); // eslint-disable-line functional/immutable-data
|
||||
if (host === undefined) {
|
||||
throw createRetryError(stackTraceWithoutCredentials(stackTrace));
|
||||
}
|
||||
const payload = {
|
||||
data,
|
||||
headers,
|
||||
method,
|
||||
url: serializeUrl(host, request.path, queryParameters),
|
||||
connectTimeout: getTimeout(timeoutsCount, transporter.timeouts.connect),
|
||||
responseTimeout: getTimeout(timeoutsCount, requestOptions.timeout),
|
||||
};
|
||||
/**
|
||||
* The stackFrame is pushed to the stackTrace so we
|
||||
* can have information about onRetry and onFailure
|
||||
* decisions.
|
||||
*/
|
||||
const pushToStackTrace = (response) => {
|
||||
const stackFrame = {
|
||||
request: payload,
|
||||
response,
|
||||
host,
|
||||
triesLeft: hosts.length,
|
||||
};
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
stackTrace.push(stackFrame);
|
||||
return stackFrame;
|
||||
};
|
||||
const decisions = {
|
||||
onSuccess: response => deserializeSuccess(response),
|
||||
onRetry(response) {
|
||||
const stackFrame = pushToStackTrace(response);
|
||||
/**
|
||||
* If response is a timeout, we increaset the number of
|
||||
* timeouts so we can increase the timeout later.
|
||||
*/
|
||||
if (response.isTimedOut) {
|
||||
timeoutsCount++;
|
||||
}
|
||||
return Promise.all([
|
||||
/**
|
||||
* Failures are individually send the logger, allowing
|
||||
* the end user to debug / store stack frames even
|
||||
* when a retry error does not happen.
|
||||
*/
|
||||
transporter.logger.info('Retryable failure', stackFrameWithoutCredentials(stackFrame)),
|
||||
/**
|
||||
* We also store the state of the host in failure cases. If the host, is
|
||||
* down it will remain down for the next 2 minutes. In a timeout situation,
|
||||
* this host will be added end of the list of hosts on the next request.
|
||||
*/
|
||||
transporter.hostsCache.set(host, createStatefulHost(host, response.isTimedOut ? HostStatusEnum.Timeouted : HostStatusEnum.Down)),
|
||||
]).then(() => retry(hosts, getTimeout));
|
||||
},
|
||||
onFail(response) {
|
||||
pushToStackTrace(response);
|
||||
throw deserializeFailure(response, stackTraceWithoutCredentials(stackTrace));
|
||||
},
|
||||
};
|
||||
return transporter.requester.send(payload).then(response => {
|
||||
return retryDecision(response, decisions);
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Finally, for each retryable host perform request until we got a non
|
||||
* retryable response. Some notes here:
|
||||
*
|
||||
* 1. The reverse here is applied so we can apply a `pop` later on => more performant.
|
||||
* 2. We also get from the retryable options a timeout multiplier that is tailored
|
||||
* for the current context.
|
||||
*/
|
||||
return createRetryableOptions(transporter.hostsCache, statelessHosts).then(options => {
|
||||
return retry([...options.statelessHosts].reverse(), options.getTimeout);
|
||||
});
|
||||
}
|
||||
|
||||
function createTransporter(options) {
|
||||
const { hostsCache, logger, requester, requestsCache, responsesCache, timeouts, userAgent, hosts, queryParameters, headers, } = options;
|
||||
const transporter = {
|
||||
hostsCache,
|
||||
logger,
|
||||
requester,
|
||||
requestsCache,
|
||||
responsesCache,
|
||||
timeouts,
|
||||
userAgent,
|
||||
headers,
|
||||
queryParameters,
|
||||
hosts: hosts.map(host => createStatelessHost(host)),
|
||||
read(request, requestOptions) {
|
||||
/**
|
||||
* First, we compute the user request options. Now, keep in mind,
|
||||
* that using request options the user is able to modified the intire
|
||||
* payload of the request. Such as headers, query parameters, and others.
|
||||
*/
|
||||
const mappedRequestOptions = createMappedRequestOptions(requestOptions, transporter.timeouts.read);
|
||||
const createRetryableRequest = () => {
|
||||
/**
|
||||
* Then, we prepare a function factory that contains the construction of
|
||||
* the retryable request. At this point, we may *not* perform the actual
|
||||
* request. But we want to have the function factory ready.
|
||||
*/
|
||||
return retryableRequest(transporter, transporter.hosts.filter(host => (host.accept & CallEnum.Read) !== 0), request, mappedRequestOptions);
|
||||
};
|
||||
/**
|
||||
* Once we have the function factory ready, we need to determine of the
|
||||
* request is "cacheable" - should be cached. Note that, once again,
|
||||
* the user can force this option.
|
||||
*/
|
||||
const cacheable = mappedRequestOptions.cacheable !== undefined
|
||||
? mappedRequestOptions.cacheable
|
||||
: request.cacheable;
|
||||
/**
|
||||
* If is not "cacheable", we immediatly trigger the retryable request, no
|
||||
* need to check cache implementations.
|
||||
*/
|
||||
if (cacheable !== true) {
|
||||
return createRetryableRequest();
|
||||
}
|
||||
/**
|
||||
* If the request is "cacheable", we need to first compute the key to ask
|
||||
* the cache implementations if this request is on progress or if the
|
||||
* response already exists on the cache.
|
||||
*/
|
||||
const key = {
|
||||
request,
|
||||
mappedRequestOptions,
|
||||
transporter: {
|
||||
queryParameters: transporter.queryParameters,
|
||||
headers: transporter.headers,
|
||||
},
|
||||
};
|
||||
/**
|
||||
* With the computed key, we first ask the responses cache
|
||||
* implemention if this request was been resolved before.
|
||||
*/
|
||||
return transporter.responsesCache.get(key, () => {
|
||||
/**
|
||||
* If the request has never resolved before, we actually ask if there
|
||||
* is a current request with the same key on progress.
|
||||
*/
|
||||
return transporter.requestsCache.get(key, () => {
|
||||
return (transporter.requestsCache
|
||||
/**
|
||||
* Finally, if there is no request in progress with the same key,
|
||||
* this `createRetryableRequest()` will actually trigger the
|
||||
* retryable request.
|
||||
*/
|
||||
.set(key, createRetryableRequest())
|
||||
.then(response => Promise.all([transporter.requestsCache.delete(key), response]), err => Promise.all([transporter.requestsCache.delete(key), Promise.reject(err)]))
|
||||
.then(([_, response]) => response));
|
||||
});
|
||||
}, {
|
||||
/**
|
||||
* Of course, once we get this response back from the server, we
|
||||
* tell response cache to actually store the received response
|
||||
* to be used later.
|
||||
*/
|
||||
miss: response => transporter.responsesCache.set(key, response),
|
||||
});
|
||||
},
|
||||
write(request, requestOptions) {
|
||||
/**
|
||||
* On write requests, no cache mechanisms are applied, and we
|
||||
* proxy the request immediately to the requester.
|
||||
*/
|
||||
return retryableRequest(transporter, transporter.hosts.filter(host => (host.accept & CallEnum.Write) !== 0), request, createMappedRequestOptions(requestOptions, transporter.timeouts.write));
|
||||
},
|
||||
};
|
||||
return transporter;
|
||||
}
|
||||
|
||||
function createUserAgent(version) {
|
||||
const userAgent = {
|
||||
value: `Algolia for JavaScript (${version})`,
|
||||
add(options) {
|
||||
const addedUserAgent = `; ${options.segment}${options.version !== undefined ? ` (${options.version})` : ''}`;
|
||||
if (userAgent.value.indexOf(addedUserAgent) === -1) {
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
userAgent.value = `${userAgent.value}${addedUserAgent}`;
|
||||
}
|
||||
return userAgent;
|
||||
},
|
||||
};
|
||||
return userAgent;
|
||||
}
|
||||
|
||||
function deserializeSuccess(response) {
|
||||
// eslint-disable-next-line functional/no-try-statement
|
||||
try {
|
||||
return JSON.parse(response.content);
|
||||
}
|
||||
catch (e) {
|
||||
throw createDeserializationError(e.message, response);
|
||||
}
|
||||
}
|
||||
function deserializeFailure({ content, status }, stackFrame) {
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let message = content;
|
||||
// eslint-disable-next-line functional/no-try-statement
|
||||
try {
|
||||
message = JSON.parse(content).message;
|
||||
}
|
||||
catch (e) {
|
||||
// ..
|
||||
}
|
||||
return createApiError(message, status, stackFrame);
|
||||
}
|
||||
|
||||
function serializeUrl(host, path, queryParameters) {
|
||||
const queryParametersAsString = serializeQueryParameters(queryParameters);
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let url = `${host.protocol}://${host.url}/${path.charAt(0) === '/' ? path.substr(1) : path}`;
|
||||
if (queryParametersAsString.length) {
|
||||
url += `?${queryParametersAsString}`;
|
||||
}
|
||||
return url;
|
||||
}
|
||||
function serializeQueryParameters(parameters) {
|
||||
const isObjectOrArray = (value) => Object.prototype.toString.call(value) === '[object Object]' ||
|
||||
Object.prototype.toString.call(value) === '[object Array]';
|
||||
return Object.keys(parameters)
|
||||
.map(key => encode('%s=%s', key, isObjectOrArray(parameters[key]) ? JSON.stringify(parameters[key]) : parameters[key]))
|
||||
.join('&');
|
||||
}
|
||||
function serializeData(request, requestOptions) {
|
||||
if (request.method === MethodEnum.Get ||
|
||||
(request.data === undefined && requestOptions.data === undefined)) {
|
||||
return undefined;
|
||||
}
|
||||
const data = Array.isArray(request.data)
|
||||
? request.data
|
||||
: { ...request.data, ...requestOptions.data };
|
||||
return JSON.stringify(data);
|
||||
}
|
||||
function serializeHeaders(transporter, requestOptions) {
|
||||
const headers = {
|
||||
...transporter.headers,
|
||||
...requestOptions.headers,
|
||||
};
|
||||
const serializedHeaders = {};
|
||||
Object.keys(headers).forEach(header => {
|
||||
const value = headers[header];
|
||||
// @ts-ignore
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
serializedHeaders[header.toLowerCase()] = value;
|
||||
});
|
||||
return serializedHeaders;
|
||||
}
|
||||
|
||||
function stackTraceWithoutCredentials(stackTrace) {
|
||||
return stackTrace.map(stackFrame => stackFrameWithoutCredentials(stackFrame));
|
||||
}
|
||||
function stackFrameWithoutCredentials(stackFrame) {
|
||||
const modifiedHeaders = stackFrame.request.headers['x-algolia-api-key']
|
||||
? { 'x-algolia-api-key': '*****' }
|
||||
: {};
|
||||
return {
|
||||
...stackFrame,
|
||||
request: {
|
||||
...stackFrame.request,
|
||||
headers: {
|
||||
...stackFrame.request.headers,
|
||||
...modifiedHeaders,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createApiError(message, status, transporterStackTrace) {
|
||||
return {
|
||||
name: 'ApiError',
|
||||
message,
|
||||
status,
|
||||
transporterStackTrace,
|
||||
};
|
||||
}
|
||||
|
||||
function createDeserializationError(message, response) {
|
||||
return {
|
||||
name: 'DeserializationError',
|
||||
message,
|
||||
response,
|
||||
};
|
||||
}
|
||||
|
||||
function createRetryError(transporterStackTrace) {
|
||||
return {
|
||||
name: 'RetryError',
|
||||
message: 'Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support .',
|
||||
transporterStackTrace,
|
||||
};
|
||||
}
|
||||
|
||||
const createSearchClient = options => {
|
||||
const appId = options.appId;
|
||||
const auth = createAuth(options.authMode !== undefined ? options.authMode : AuthMode.WithinHeaders, appId, options.apiKey);
|
||||
const transporter = createTransporter({
|
||||
hosts: [
|
||||
{ url: `${appId}-dsn.algolia.net`, accept: CallEnum.Read },
|
||||
{ url: `${appId}.algolia.net`, accept: CallEnum.Write },
|
||||
].concat(shuffle([
|
||||
{ url: `${appId}-1.algolianet.com` },
|
||||
{ url: `${appId}-2.algolianet.com` },
|
||||
{ url: `${appId}-3.algolianet.com` },
|
||||
])),
|
||||
...options,
|
||||
headers: {
|
||||
...auth.headers(),
|
||||
...{ 'content-type': 'application/x-www-form-urlencoded' },
|
||||
...options.headers,
|
||||
},
|
||||
queryParameters: {
|
||||
...auth.queryParameters(),
|
||||
...options.queryParameters,
|
||||
},
|
||||
});
|
||||
const base = {
|
||||
transporter,
|
||||
appId,
|
||||
addAlgoliaAgent(segment, version) {
|
||||
transporter.userAgent.add({ segment, version });
|
||||
},
|
||||
clearCache() {
|
||||
return Promise.all([
|
||||
transporter.requestsCache.clear(),
|
||||
transporter.responsesCache.clear(),
|
||||
]).then(() => undefined);
|
||||
},
|
||||
};
|
||||
return addMethods(base, options.methods);
|
||||
};
|
||||
|
||||
const customRequest = (base) => {
|
||||
return (request, requestOptions) => {
|
||||
if (request.method === MethodEnum.Get) {
|
||||
return base.transporter.read(request, requestOptions);
|
||||
}
|
||||
return base.transporter.write(request, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const initIndex = (base) => {
|
||||
return (indexName, options = {}) => {
|
||||
const searchIndex = {
|
||||
transporter: base.transporter,
|
||||
appId: base.appId,
|
||||
indexName,
|
||||
};
|
||||
return addMethods(searchIndex, options.methods);
|
||||
};
|
||||
};
|
||||
|
||||
const multipleQueries = (base) => {
|
||||
return (queries, requestOptions) => {
|
||||
const requests = queries.map(query => {
|
||||
return {
|
||||
...query,
|
||||
params: serializeQueryParameters(query.params || {}),
|
||||
};
|
||||
});
|
||||
return base.transporter.read({
|
||||
method: MethodEnum.Post,
|
||||
path: '1/indexes/*/queries',
|
||||
data: {
|
||||
requests,
|
||||
},
|
||||
cacheable: true,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const multipleSearchForFacetValues = (base) => {
|
||||
return (queries, requestOptions) => {
|
||||
return Promise.all(queries.map(query => {
|
||||
const { facetName, facetQuery, ...params } = query.params;
|
||||
return initIndex(base)(query.indexName, {
|
||||
methods: { searchForFacetValues },
|
||||
}).searchForFacetValues(facetName, facetQuery, {
|
||||
...requestOptions,
|
||||
...params,
|
||||
});
|
||||
}));
|
||||
};
|
||||
};
|
||||
|
||||
const findAnswers = (base) => {
|
||||
return (query, queryLanguages, requestOptions) => {
|
||||
return base.transporter.read({
|
||||
method: MethodEnum.Post,
|
||||
path: encode('1/answers/%s/prediction', base.indexName),
|
||||
data: {
|
||||
query,
|
||||
queryLanguages,
|
||||
},
|
||||
cacheable: true,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const search = (base) => {
|
||||
return (query, requestOptions) => {
|
||||
return base.transporter.read({
|
||||
method: MethodEnum.Post,
|
||||
path: encode('1/indexes/%s/query', base.indexName),
|
||||
data: {
|
||||
query,
|
||||
},
|
||||
cacheable: true,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const searchForFacetValues = (base) => {
|
||||
return (facetName, facetQuery, requestOptions) => {
|
||||
return base.transporter.read({
|
||||
method: MethodEnum.Post,
|
||||
path: encode('1/indexes/%s/facets/%s/query', base.indexName, facetName),
|
||||
data: {
|
||||
facetQuery,
|
||||
},
|
||||
cacheable: true,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
const LogLevelEnum = {
|
||||
Debug: 1,
|
||||
Info: 2,
|
||||
Error: 3,
|
||||
};
|
||||
|
||||
/* eslint no-console: 0 */
|
||||
function createConsoleLogger(logLevel) {
|
||||
return {
|
||||
debug(message, args) {
|
||||
if (LogLevelEnum.Debug >= logLevel) {
|
||||
console.debug(message, args);
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
info(message, args) {
|
||||
if (LogLevelEnum.Info >= logLevel) {
|
||||
console.info(message, args);
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
error(message, args) {
|
||||
console.error(message, args);
|
||||
return Promise.resolve();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const getRecommendations = base => {
|
||||
return (queries, requestOptions) => {
|
||||
const requests = queries.map(query => ({
|
||||
...query,
|
||||
// The `threshold` param is required by the endpoint to make it easier
|
||||
// to provide a default value later, so we default it in the client
|
||||
// so that users don't have to provide a value.
|
||||
threshold: query.threshold || 0,
|
||||
}));
|
||||
return base.transporter.read({
|
||||
method: MethodEnum.Post,
|
||||
path: '1/indexes/*/recommendations',
|
||||
data: {
|
||||
requests,
|
||||
},
|
||||
cacheable: true,
|
||||
}, requestOptions);
|
||||
};
|
||||
};
|
||||
|
||||
function createBrowserXhrRequester() {
|
||||
return {
|
||||
send(request) {
|
||||
return new Promise((resolve) => {
|
||||
const baseRequester = new XMLHttpRequest();
|
||||
baseRequester.open(request.method, request.url, true);
|
||||
Object.keys(request.headers).forEach(key => baseRequester.setRequestHeader(key, request.headers[key]));
|
||||
const createTimeout = (timeout, content) => {
|
||||
return setTimeout(() => {
|
||||
baseRequester.abort();
|
||||
resolve({
|
||||
status: 0,
|
||||
content,
|
||||
isTimedOut: true,
|
||||
});
|
||||
}, timeout * 1000);
|
||||
};
|
||||
const connectTimeout = createTimeout(request.connectTimeout, 'Connection timeout');
|
||||
// eslint-disable-next-line functional/no-let
|
||||
let responseTimeout;
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
baseRequester.onreadystatechange = () => {
|
||||
if (baseRequester.readyState > baseRequester.OPENED && responseTimeout === undefined) {
|
||||
clearTimeout(connectTimeout);
|
||||
responseTimeout = createTimeout(request.responseTimeout, 'Socket timeout');
|
||||
}
|
||||
};
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
baseRequester.onerror = () => {
|
||||
// istanbul ignore next
|
||||
if (baseRequester.status === 0) {
|
||||
clearTimeout(connectTimeout);
|
||||
clearTimeout(responseTimeout);
|
||||
resolve({
|
||||
content: baseRequester.responseText || 'Network request failed',
|
||||
status: baseRequester.status,
|
||||
isTimedOut: false,
|
||||
});
|
||||
}
|
||||
};
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
baseRequester.onload = () => {
|
||||
clearTimeout(connectTimeout);
|
||||
clearTimeout(responseTimeout);
|
||||
resolve({
|
||||
content: baseRequester.responseText,
|
||||
status: baseRequester.status,
|
||||
isTimedOut: false,
|
||||
});
|
||||
};
|
||||
baseRequester.send(request.data);
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function algoliasearch(appId, apiKey, options) {
|
||||
const commonOptions = {
|
||||
appId,
|
||||
apiKey,
|
||||
timeouts: {
|
||||
connect: 1,
|
||||
read: 2,
|
||||
write: 30,
|
||||
},
|
||||
requester: createBrowserXhrRequester(),
|
||||
logger: createConsoleLogger(LogLevelEnum.Error),
|
||||
responsesCache: createInMemoryCache(),
|
||||
requestsCache: createInMemoryCache({ serializable: false }),
|
||||
hostsCache: createFallbackableCache({
|
||||
caches: [
|
||||
createBrowserLocalStorageCache({ key: `${version}-${appId}` }),
|
||||
createInMemoryCache(),
|
||||
],
|
||||
}),
|
||||
userAgent: createUserAgent(version).add({
|
||||
segment: 'Browser',
|
||||
version: 'lite',
|
||||
}),
|
||||
authMode: AuthMode.WithinQueryParameters,
|
||||
};
|
||||
return createSearchClient({
|
||||
...commonOptions,
|
||||
...options,
|
||||
methods: {
|
||||
search: multipleQueries,
|
||||
searchForFacetValues: multipleSearchForFacetValues,
|
||||
multipleQueries,
|
||||
multipleSearchForFacetValues,
|
||||
customRequest,
|
||||
initIndex: base => (indexName) => {
|
||||
return initIndex(base)(indexName, {
|
||||
methods: { search, searchForFacetValues, findAnswers },
|
||||
});
|
||||
},
|
||||
getRecommendations,
|
||||
},
|
||||
});
|
||||
}
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
algoliasearch.version = version;
|
||||
|
||||
export default algoliasearch;
|
||||
2
scripts/node_modules/algoliasearch/dist/algoliasearch-lite.umd.js
generated
vendored
Normal file
2
scripts/node_modules/algoliasearch/dist/algoliasearch-lite.umd.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
164
scripts/node_modules/algoliasearch/dist/algoliasearch.cjs.js
generated
vendored
Normal file
164
scripts/node_modules/algoliasearch/dist/algoliasearch.cjs.js
generated
vendored
Normal file
@ -0,0 +1,164 @@
|
||||
'use strict';
|
||||
|
||||
var cacheCommon = require('@algolia/cache-common');
|
||||
var cacheInMemory = require('@algolia/cache-in-memory');
|
||||
var clientAnalytics = require('@algolia/client-analytics');
|
||||
var clientCommon = require('@algolia/client-common');
|
||||
var clientPersonalization = require('@algolia/client-personalization');
|
||||
var clientSearch = require('@algolia/client-search');
|
||||
var loggerCommon = require('@algolia/logger-common');
|
||||
var recommend = require('@algolia/recommend');
|
||||
var requesterNodeHttp = require('@algolia/requester-node-http');
|
||||
var transporter = require('@algolia/transporter');
|
||||
|
||||
function algoliasearch(appId, apiKey, options) {
|
||||
const commonOptions = {
|
||||
appId,
|
||||
apiKey,
|
||||
timeouts: {
|
||||
connect: 2,
|
||||
read: 5,
|
||||
write: 30,
|
||||
},
|
||||
requester: requesterNodeHttp.createNodeHttpRequester(),
|
||||
logger: loggerCommon.createNullLogger(),
|
||||
responsesCache: cacheCommon.createNullCache(),
|
||||
requestsCache: cacheCommon.createNullCache(),
|
||||
hostsCache: cacheInMemory.createInMemoryCache(),
|
||||
userAgent: transporter.createUserAgent(clientCommon.version).add({
|
||||
segment: 'Node.js',
|
||||
version: process.versions.node,
|
||||
}),
|
||||
};
|
||||
const searchClientOptions = { ...commonOptions, ...options };
|
||||
const initPersonalization = () => (clientOptions) => {
|
||||
return clientPersonalization.createPersonalizationClient({
|
||||
...commonOptions,
|
||||
...clientOptions,
|
||||
methods: {
|
||||
getPersonalizationStrategy: clientPersonalization.getPersonalizationStrategy,
|
||||
setPersonalizationStrategy: clientPersonalization.setPersonalizationStrategy,
|
||||
},
|
||||
});
|
||||
};
|
||||
return clientSearch.createSearchClient({
|
||||
...searchClientOptions,
|
||||
methods: {
|
||||
search: clientSearch.multipleQueries,
|
||||
searchForFacetValues: clientSearch.multipleSearchForFacetValues,
|
||||
multipleBatch: clientSearch.multipleBatch,
|
||||
multipleGetObjects: clientSearch.multipleGetObjects,
|
||||
multipleQueries: clientSearch.multipleQueries,
|
||||
copyIndex: clientSearch.copyIndex,
|
||||
copySettings: clientSearch.copySettings,
|
||||
copyRules: clientSearch.copyRules,
|
||||
copySynonyms: clientSearch.copySynonyms,
|
||||
moveIndex: clientSearch.moveIndex,
|
||||
listIndices: clientSearch.listIndices,
|
||||
getLogs: clientSearch.getLogs,
|
||||
listClusters: clientSearch.listClusters,
|
||||
multipleSearchForFacetValues: clientSearch.multipleSearchForFacetValues,
|
||||
getApiKey: clientSearch.getApiKey,
|
||||
addApiKey: clientSearch.addApiKey,
|
||||
listApiKeys: clientSearch.listApiKeys,
|
||||
updateApiKey: clientSearch.updateApiKey,
|
||||
deleteApiKey: clientSearch.deleteApiKey,
|
||||
restoreApiKey: clientSearch.restoreApiKey,
|
||||
assignUserID: clientSearch.assignUserID,
|
||||
assignUserIDs: clientSearch.assignUserIDs,
|
||||
getUserID: clientSearch.getUserID,
|
||||
searchUserIDs: clientSearch.searchUserIDs,
|
||||
listUserIDs: clientSearch.listUserIDs,
|
||||
getTopUserIDs: clientSearch.getTopUserIDs,
|
||||
removeUserID: clientSearch.removeUserID,
|
||||
hasPendingMappings: clientSearch.hasPendingMappings,
|
||||
generateSecuredApiKey: clientSearch.generateSecuredApiKey,
|
||||
getSecuredApiKeyRemainingValidity: clientSearch.getSecuredApiKeyRemainingValidity,
|
||||
destroy: clientCommon.destroy,
|
||||
clearDictionaryEntries: clientSearch.clearDictionaryEntries,
|
||||
deleteDictionaryEntries: clientSearch.deleteDictionaryEntries,
|
||||
getDictionarySettings: clientSearch.getDictionarySettings,
|
||||
getAppTask: clientSearch.getAppTask,
|
||||
replaceDictionaryEntries: clientSearch.replaceDictionaryEntries,
|
||||
saveDictionaryEntries: clientSearch.saveDictionaryEntries,
|
||||
searchDictionaryEntries: clientSearch.searchDictionaryEntries,
|
||||
setDictionarySettings: clientSearch.setDictionarySettings,
|
||||
waitAppTask: clientSearch.waitAppTask,
|
||||
customRequest: clientSearch.customRequest,
|
||||
initIndex: base => (indexName) => {
|
||||
return clientSearch.initIndex(base)(indexName, {
|
||||
methods: {
|
||||
batch: clientSearch.batch,
|
||||
delete: clientSearch.deleteIndex,
|
||||
findAnswers: clientSearch.findAnswers,
|
||||
getObject: clientSearch.getObject,
|
||||
getObjects: clientSearch.getObjects,
|
||||
saveObject: clientSearch.saveObject,
|
||||
saveObjects: clientSearch.saveObjects,
|
||||
search: clientSearch.search,
|
||||
searchForFacetValues: clientSearch.searchForFacetValues,
|
||||
waitTask: clientSearch.waitTask,
|
||||
setSettings: clientSearch.setSettings,
|
||||
getSettings: clientSearch.getSettings,
|
||||
partialUpdateObject: clientSearch.partialUpdateObject,
|
||||
partialUpdateObjects: clientSearch.partialUpdateObjects,
|
||||
deleteObject: clientSearch.deleteObject,
|
||||
deleteObjects: clientSearch.deleteObjects,
|
||||
deleteBy: clientSearch.deleteBy,
|
||||
clearObjects: clientSearch.clearObjects,
|
||||
browseObjects: clientSearch.browseObjects,
|
||||
getObjectPosition: clientSearch.getObjectPosition,
|
||||
findObject: clientSearch.findObject,
|
||||
exists: clientSearch.exists,
|
||||
saveSynonym: clientSearch.saveSynonym,
|
||||
saveSynonyms: clientSearch.saveSynonyms,
|
||||
getSynonym: clientSearch.getSynonym,
|
||||
searchSynonyms: clientSearch.searchSynonyms,
|
||||
browseSynonyms: clientSearch.browseSynonyms,
|
||||
deleteSynonym: clientSearch.deleteSynonym,
|
||||
clearSynonyms: clientSearch.clearSynonyms,
|
||||
replaceAllObjects: clientSearch.replaceAllObjects,
|
||||
replaceAllSynonyms: clientSearch.replaceAllSynonyms,
|
||||
searchRules: clientSearch.searchRules,
|
||||
getRule: clientSearch.getRule,
|
||||
deleteRule: clientSearch.deleteRule,
|
||||
saveRule: clientSearch.saveRule,
|
||||
saveRules: clientSearch.saveRules,
|
||||
replaceAllRules: clientSearch.replaceAllRules,
|
||||
browseRules: clientSearch.browseRules,
|
||||
clearRules: clientSearch.clearRules,
|
||||
},
|
||||
});
|
||||
},
|
||||
initAnalytics: () => (clientOptions) => {
|
||||
return clientAnalytics.createAnalyticsClient({
|
||||
...commonOptions,
|
||||
...clientOptions,
|
||||
methods: {
|
||||
addABTest: clientAnalytics.addABTest,
|
||||
getABTest: clientAnalytics.getABTest,
|
||||
getABTests: clientAnalytics.getABTests,
|
||||
stopABTest: clientAnalytics.stopABTest,
|
||||
deleteABTest: clientAnalytics.deleteABTest,
|
||||
},
|
||||
});
|
||||
},
|
||||
initPersonalization,
|
||||
initRecommendation: () => (clientOptions) => {
|
||||
searchClientOptions.logger.info('The `initRecommendation` method is deprecated. Use `initPersonalization` instead.');
|
||||
return initPersonalization()(clientOptions);
|
||||
},
|
||||
getRecommendations: recommend.getRecommendations,
|
||||
getFrequentlyBoughtTogether: recommend.getFrequentlyBoughtTogether,
|
||||
getLookingSimilar: recommend.getLookingSimilar,
|
||||
getRecommendedForYou: recommend.getRecommendedForYou,
|
||||
getRelatedProducts: recommend.getRelatedProducts,
|
||||
getTrendingFacets: recommend.getTrendingFacets,
|
||||
getTrendingItems: recommend.getTrendingItems,
|
||||
},
|
||||
});
|
||||
}
|
||||
// eslint-disable-next-line functional/immutable-data
|
||||
algoliasearch.version = clientCommon.version;
|
||||
|
||||
module.exports = algoliasearch;
|
||||
255
scripts/node_modules/algoliasearch/dist/algoliasearch.d.ts
generated
vendored
Normal file
255
scripts/node_modules/algoliasearch/dist/algoliasearch.d.ts
generated
vendored
Normal file
@ -0,0 +1,255 @@
|
||||
import { ABTest } from '@algolia/client-analytics';
|
||||
import { AddABTestResponse } from '@algolia/client-analytics';
|
||||
import { AddApiKeyOptions } from '@algolia/client-search';
|
||||
import { AddApiKeyResponse } from '@algolia/client-search';
|
||||
import { AnalyticsClient as AnalyticsClient_2 } from '@algolia/client-analytics';
|
||||
import { AnalyticsClientOptions } from '@algolia/client-analytics';
|
||||
import { ApiKeyACLType } from '@algolia/client-search';
|
||||
import { AssignUserIDResponse } from '@algolia/client-search';
|
||||
import { AssignUserIDsResponse } from '@algolia/client-search';
|
||||
import { BatchRequest } from '@algolia/client-search';
|
||||
import { BatchResponse } from '@algolia/client-search';
|
||||
import { BrowseOptions } from '@algolia/client-search';
|
||||
import { ChunkedBatchResponse } from '@algolia/client-search';
|
||||
import { ChunkOptions } from '@algolia/client-search';
|
||||
import { ClearRulesOptions } from '@algolia/client-search';
|
||||
import { ClearSynonymsOptions } from '@algolia/client-search';
|
||||
import { ClientTransporterOptions } from '@algolia/client-common';
|
||||
import { CopyIndexOptions } from '@algolia/client-search';
|
||||
import { DeleteABTestResponse } from '@algolia/client-analytics';
|
||||
import { DeleteApiKeyResponse } from '@algolia/client-search';
|
||||
import { DeleteByFiltersOptions } from '@algolia/client-search';
|
||||
import { DeleteResponse } from '@algolia/client-search';
|
||||
import { DeleteSynonymOptions } from '@algolia/client-search';
|
||||
import { Destroyable } from '@algolia/requester-common';
|
||||
import { DictionaryEntriesOptions } from '@algolia/client-search';
|
||||
import { DictionaryEntriesResponse } from '@algolia/client-search';
|
||||
import { DictionaryEntry } from '@algolia/client-search';
|
||||
import { DictionaryName } from '@algolia/client-search';
|
||||
import { DictionarySettings } from '@algolia/client-search';
|
||||
import { FindAnswersOptions } from '@algolia/client-search';
|
||||
import { FindAnswersResponse } from '@algolia/client-search';
|
||||
import { FindObjectOptions } from '@algolia/client-search';
|
||||
import { FindObjectResponse } from '@algolia/client-search';
|
||||
import { GetABTestResponse } from '@algolia/client-analytics';
|
||||
import { GetABTestsOptions } from '@algolia/client-analytics';
|
||||
import { GetABTestsResponse } from '@algolia/client-analytics';
|
||||
import { GetApiKeyResponse } from '@algolia/client-search';
|
||||
import { GetDictionarySettingsResponse } from '@algolia/client-search';
|
||||
import { GetLogsResponse } from '@algolia/client-search';
|
||||
import { GetObjectOptions } from '@algolia/client-search';
|
||||
import { GetObjectsOptions } from '@algolia/client-search';
|
||||
import { GetObjectsResponse } from '@algolia/client-search';
|
||||
import { GetPersonalizationStrategyResponse } from '@algolia/client-personalization';
|
||||
import { GetTopUserIDsResponse } from '@algolia/client-search';
|
||||
import { HasPendingMappingsOptions } from '@algolia/client-search';
|
||||
import { HasPendingMappingsResponse } from '@algolia/client-search';
|
||||
import { IndexOperationResponse } from '@algolia/client-search';
|
||||
import { ListApiKeysResponse } from '@algolia/client-search';
|
||||
import { ListClustersResponse } from '@algolia/client-search';
|
||||
import { ListIndicesResponse } from '@algolia/client-search';
|
||||
import { ListUserIDsOptions } from '@algolia/client-search';
|
||||
import { ListUserIDsResponse } from '@algolia/client-search';
|
||||
import { MultipleBatchRequest } from '@algolia/client-search';
|
||||
import { MultipleBatchResponse } from '@algolia/client-search';
|
||||
import { MultipleGetObject } from '@algolia/client-search';
|
||||
import { MultipleGetObjectsResponse } from '@algolia/client-search';
|
||||
import { MultipleQueriesOptions } from '@algolia/client-search';
|
||||
import { MultipleQueriesQuery } from '@algolia/client-search';
|
||||
import { MultipleQueriesResponse } from '@algolia/client-search';
|
||||
import { ObjectWithObjectID } from '@algolia/client-search';
|
||||
import { PartialUpdateObjectResponse } from '@algolia/client-search';
|
||||
import { PartialUpdateObjectsOptions } from '@algolia/client-search';
|
||||
import { PersonalizationClient as PersonalizationClient_2 } from '@algolia/client-personalization';
|
||||
import { PersonalizationClientOptions } from '@algolia/client-personalization';
|
||||
import { PersonalizationStrategy } from '@algolia/client-personalization';
|
||||
import { RemoveUserIDResponse } from '@algolia/client-search';
|
||||
import { ReplaceAllObjectsOptions } from '@algolia/client-search';
|
||||
import { Request as Request_2 } from '@algolia/transporter';
|
||||
import { RequestOptions } from '@algolia/transporter';
|
||||
import { RestoreApiKeyResponse } from '@algolia/client-search';
|
||||
import { Rule } from '@algolia/client-search';
|
||||
import { SaveObjectResponse } from '@algolia/client-search';
|
||||
import { SaveObjectsOptions } from '@algolia/client-search';
|
||||
import { SaveRuleResponse } from '@algolia/client-search';
|
||||
import { SaveRulesOptions } from '@algolia/client-search';
|
||||
import { SaveRulesResponse } from '@algolia/client-search';
|
||||
import { SaveSynonymResponse } from '@algolia/client-search';
|
||||
import { SaveSynonymsOptions } from '@algolia/client-search';
|
||||
import { SaveSynonymsResponse } from '@algolia/client-search';
|
||||
import { SearchClient as SearchClient_2 } from '@algolia/client-search';
|
||||
import { SearchClientOptions } from '@algolia/client-search';
|
||||
import { SearchDictionaryEntriesResponse } from '@algolia/client-search';
|
||||
import { SearchForFacetValuesQueryParams } from '@algolia/client-search';
|
||||
import { SearchForFacetValuesResponse } from '@algolia/client-search';
|
||||
import { SearchIndex as SearchIndex_2 } from '@algolia/client-search';
|
||||
import { SearchOptions } from '@algolia/client-search';
|
||||
import { SearchResponse } from '@algolia/client-search';
|
||||
import { SearchRulesOptions } from '@algolia/client-search';
|
||||
import { SearchSynonymsOptions } from '@algolia/client-search';
|
||||
import { SearchSynonymsResponse } from '@algolia/client-search';
|
||||
import { SearchUserIDsOptions } from '@algolia/client-search';
|
||||
import { SearchUserIDsResponse } from '@algolia/client-search';
|
||||
import { SecuredApiKeyRestrictions } from '@algolia/client-search';
|
||||
import { SetPersonalizationStrategyResponse } from '@algolia/client-personalization';
|
||||
import { SetSettingsResponse } from '@algolia/client-search';
|
||||
import { Settings } from '@algolia/client-search';
|
||||
import { StopABTestResponse } from '@algolia/client-analytics';
|
||||
import { Synonym } from '@algolia/client-search';
|
||||
import { TaskStatusResponse } from '@algolia/client-search';
|
||||
import { UpdateApiKeyOptions } from '@algolia/client-search';
|
||||
import { UpdateApiKeyResponse } from '@algolia/client-search';
|
||||
import { UserIDResponse } from '@algolia/client-search';
|
||||
import { WaitablePromise } from '@algolia/client-common';
|
||||
import { WithRecommendMethods } from '@algolia/recommend';
|
||||
|
||||
declare function algoliasearch(appId: string, apiKey: string, options?: AlgoliaSearchOptions): SearchClient;
|
||||
|
||||
declare namespace algoliasearch {
|
||||
var version: string;
|
||||
}
|
||||
export default algoliasearch;
|
||||
|
||||
export declare type AlgoliaSearchOptions = Partial<ClientTransporterOptions> & WithoutCredentials<SearchClientOptions>;
|
||||
|
||||
export declare type AnalyticsClient = AnalyticsClient_2 & {
|
||||
readonly addABTest: (abTest: ABTest, requestOptions?: RequestOptions) => Readonly<Promise<AddABTestResponse>>;
|
||||
readonly getABTest: (abTestID: number, requestOptions?: RequestOptions) => Readonly<Promise<GetABTestResponse>>;
|
||||
readonly getABTests: (requestOptions?: RequestOptions & GetABTestsOptions) => Readonly<Promise<GetABTestsResponse>>;
|
||||
readonly stopABTest: (abTestID: number, requestOptions?: RequestOptions) => Readonly<Promise<StopABTestResponse>>;
|
||||
readonly deleteABTest: (abTestID: number, requestOptions?: RequestOptions) => Readonly<Promise<DeleteABTestResponse>>;
|
||||
};
|
||||
|
||||
declare type Credentials = {
|
||||
readonly appId: string;
|
||||
readonly apiKey: string;
|
||||
};
|
||||
|
||||
export declare type InitAnalyticsOptions = Partial<ClientTransporterOptions> & OptionalCredentials<AnalyticsClientOptions>;
|
||||
|
||||
export declare type InitPersonalizationOptions = Partial<ClientTransporterOptions> & OptionalCredentials<PersonalizationClientOptions>;
|
||||
|
||||
/**
|
||||
* @deprecated Use `InitPersonalizationOptions` instead.
|
||||
*/
|
||||
export declare type InitRecommendationOptions = InitPersonalizationOptions;
|
||||
|
||||
export declare type OptionalCredentials<TClientOptions extends Credentials> = Omit<TClientOptions, keyof Credentials> & Pick<Partial<TClientOptions>, keyof Credentials>;
|
||||
|
||||
export declare type PersonalizationClient = PersonalizationClient_2 & {
|
||||
readonly getPersonalizationStrategy: (requestOptions?: RequestOptions) => Readonly<Promise<GetPersonalizationStrategyResponse>>;
|
||||
readonly setPersonalizationStrategy: (personalizationStrategy: PersonalizationStrategy, requestOptions?: RequestOptions) => Readonly<Promise<SetPersonalizationStrategyResponse>>;
|
||||
};
|
||||
|
||||
/**
|
||||
* @deprecated Use `PersonalizationClient` instead.
|
||||
*/
|
||||
export declare type RecommendationClient = PersonalizationClient;
|
||||
|
||||
export declare type SearchClient = SearchClient_2 & {
|
||||
readonly initIndex: (indexName: string) => SearchIndex;
|
||||
readonly search: <TObject>(queries: readonly MultipleQueriesQuery[], requestOptions?: RequestOptions & MultipleQueriesOptions) => Readonly<Promise<MultipleQueriesResponse<TObject>>>;
|
||||
readonly searchForFacetValues: (queries: ReadonlyArray<{
|
||||
readonly indexName: string;
|
||||
readonly params: SearchForFacetValuesQueryParams & SearchOptions;
|
||||
}>, requestOptions?: RequestOptions) => Readonly<Promise<readonly SearchForFacetValuesResponse[]>>;
|
||||
readonly multipleBatch: (requests: readonly MultipleBatchRequest[], requestOptions?: RequestOptions) => Readonly<WaitablePromise<MultipleBatchResponse>>;
|
||||
readonly multipleGetObjects: <TObject>(requests: readonly MultipleGetObject[], requestOptions?: RequestOptions) => Readonly<Promise<MultipleGetObjectsResponse<TObject>>>;
|
||||
readonly multipleQueries: <TObject>(queries: readonly MultipleQueriesQuery[], requestOptions?: RequestOptions & MultipleQueriesOptions) => Readonly<Promise<MultipleQueriesResponse<TObject>>>;
|
||||
readonly copyIndex: (from: string, to: string, requestOptions?: CopyIndexOptions & RequestOptions) => Readonly<WaitablePromise<IndexOperationResponse>>;
|
||||
readonly copySettings: (from: string, to: string, requestOptions?: RequestOptions) => Readonly<WaitablePromise<IndexOperationResponse>>;
|
||||
readonly copyRules: (from: string, to: string, requestOptions?: RequestOptions) => Readonly<WaitablePromise<IndexOperationResponse>>;
|
||||
readonly copySynonyms: (from: string, to: string, requestOptions?: RequestOptions) => Readonly<WaitablePromise<IndexOperationResponse>>;
|
||||
readonly moveIndex: (from: string, to: string, requestOptions?: RequestOptions) => Readonly<WaitablePromise<IndexOperationResponse>>;
|
||||
readonly listIndices: (requestOptions?: RequestOptions) => Readonly<Promise<ListIndicesResponse>>;
|
||||
readonly getLogs: (requestOptions?: RequestOptions) => Readonly<Promise<GetLogsResponse>>;
|
||||
readonly listClusters: (requestOptions?: RequestOptions) => Readonly<Promise<ListClustersResponse>>;
|
||||
readonly multipleSearchForFacetValues: (queries: ReadonlyArray<{
|
||||
readonly indexName: string;
|
||||
readonly params: SearchForFacetValuesQueryParams & SearchOptions;
|
||||
}>, requestOptions?: RequestOptions) => Readonly<Promise<readonly SearchForFacetValuesResponse[]>>;
|
||||
readonly getApiKey: (apiKey: string, requestOptions?: RequestOptions) => Readonly<Promise<GetApiKeyResponse>>;
|
||||
readonly addApiKey: (acl: readonly ApiKeyACLType[], requestOptions?: AddApiKeyOptions & Pick<RequestOptions, Exclude<keyof RequestOptions, 'queryParameters'>>) => Readonly<WaitablePromise<AddApiKeyResponse>>;
|
||||
readonly listApiKeys: (requestOptions?: RequestOptions) => Readonly<Promise<ListApiKeysResponse>>;
|
||||
readonly updateApiKey: (apiKey: string, requestOptions?: UpdateApiKeyOptions & Pick<RequestOptions, Exclude<keyof RequestOptions, 'queryParameters'>>) => Readonly<WaitablePromise<UpdateApiKeyResponse>>;
|
||||
readonly deleteApiKey: (apiKey: string, requestOptions?: RequestOptions) => Readonly<WaitablePromise<DeleteApiKeyResponse>>;
|
||||
readonly restoreApiKey: (apiKey: string, requestOptions?: RequestOptions) => Readonly<WaitablePromise<RestoreApiKeyResponse>>;
|
||||
readonly assignUserID: (userID: string, clusterName: string, requestOptions?: RequestOptions) => Readonly<Promise<AssignUserIDResponse>>;
|
||||
readonly assignUserIDs: (userIDs: readonly string[], clusterName: string, requestOptions?: RequestOptions) => Readonly<Promise<AssignUserIDsResponse>>;
|
||||
readonly getUserID: (userID: string, requestOptions?: RequestOptions) => Readonly<Promise<UserIDResponse>>;
|
||||
readonly searchUserIDs: (query: string, requestOptions?: SearchUserIDsOptions & RequestOptions) => Readonly<Promise<SearchUserIDsResponse>>;
|
||||
readonly listUserIDs: (requestOptions?: ListUserIDsOptions & RequestOptions) => Readonly<Promise<ListUserIDsResponse>>;
|
||||
readonly getTopUserIDs: (requestOptions?: RequestOptions) => Readonly<Promise<GetTopUserIDsResponse>>;
|
||||
readonly removeUserID: (userID: string, requestOptions?: RequestOptions) => Readonly<Promise<RemoveUserIDResponse>>;
|
||||
readonly hasPendingMappings: (requestOptions?: HasPendingMappingsOptions & RequestOptions) => Readonly<Promise<HasPendingMappingsResponse>>;
|
||||
readonly generateSecuredApiKey: (parentApiKey: string, restrictions: SecuredApiKeyRestrictions) => string;
|
||||
readonly getSecuredApiKeyRemainingValidity: (securedApiKey: string) => number;
|
||||
readonly clearDictionaryEntries: (dictionary: DictionaryName, requestOptions?: RequestOptions & DictionaryEntriesOptions) => Readonly<WaitablePromise<DictionaryEntriesResponse>>;
|
||||
readonly deleteDictionaryEntries: (dictionary: DictionaryName, objectIDs: readonly string[], requestOptions?: RequestOptions & DictionaryEntriesOptions) => Readonly<WaitablePromise<DictionaryEntriesResponse>>;
|
||||
readonly replaceDictionaryEntries: (dictionary: DictionaryName, entries: readonly DictionaryEntry[], requestOptions?: RequestOptions & DictionaryEntriesOptions) => Readonly<WaitablePromise<DictionaryEntriesResponse>>;
|
||||
readonly saveDictionaryEntries: (dictionary: DictionaryName, entries: readonly DictionaryEntry[], requestOptions?: RequestOptions & DictionaryEntriesOptions) => Readonly<WaitablePromise<DictionaryEntriesResponse>>;
|
||||
readonly searchDictionaryEntries: (dictionary: DictionaryName, query: string, requestOptions?: RequestOptions) => Readonly<Promise<SearchDictionaryEntriesResponse>>;
|
||||
readonly getDictionarySettings: (requestOptions?: RequestOptions) => Readonly<Promise<GetDictionarySettingsResponse>>;
|
||||
readonly setDictionarySettings: (settings: DictionarySettings, requestOptions?: RequestOptions) => Readonly<WaitablePromise<DictionaryEntriesResponse>>;
|
||||
readonly getAppTask: (taskID: number, requestOptions?: RequestOptions) => Readonly<Promise<TaskStatusResponse>>;
|
||||
readonly customRequest: <TResponse>(request: Request_2, requestOptions?: RequestOptions) => Readonly<Promise<TResponse>>;
|
||||
readonly initAnalytics: (options?: InitAnalyticsOptions) => AnalyticsClient;
|
||||
readonly initPersonalization: (options?: InitPersonalizationOptions) => PersonalizationClient;
|
||||
/**
|
||||
* @deprecated Use `initPersonalization` instead.
|
||||
*/
|
||||
readonly initRecommendation: (options?: InitPersonalizationOptions) => PersonalizationClient;
|
||||
readonly getRecommendations: WithRecommendMethods<SearchClient_2>['getRecommendations'];
|
||||
readonly getFrequentlyBoughtTogether: WithRecommendMethods<SearchClient_2>['getFrequentlyBoughtTogether'];
|
||||
readonly getLookingSimilar: WithRecommendMethods<SearchClient_2>['getLookingSimilar'];
|
||||
readonly getRecommendedForYou: WithRecommendMethods<SearchClient_2>['getRecommendedForYou'];
|
||||
readonly getRelatedProducts: WithRecommendMethods<SearchClient_2>['getRelatedProducts'];
|
||||
readonly getTrendingFacets: WithRecommendMethods<SearchClient_2>['getTrendingFacets'];
|
||||
readonly getTrendingItems: WithRecommendMethods<SearchClient_2>['getTrendingItems'];
|
||||
} & Destroyable;
|
||||
|
||||
export declare type SearchIndex = SearchIndex_2 & {
|
||||
readonly search: <TObject>(query: string, requestOptions?: RequestOptions & SearchOptions) => Readonly<Promise<SearchResponse<TObject>>>;
|
||||
readonly searchForFacetValues: (facetName: string, facetQuery: string, requestOptions?: RequestOptions & SearchOptions) => Readonly<Promise<SearchForFacetValuesResponse>>;
|
||||
readonly findAnswers: <TObject>(query: string, queryLanguages: readonly string[], requestOptions?: RequestOptions & FindAnswersOptions) => Readonly<Promise<FindAnswersResponse<TObject>>>;
|
||||
readonly batch: (requests: readonly BatchRequest[], requestOptions?: RequestOptions) => Readonly<WaitablePromise<BatchResponse>>;
|
||||
readonly delete: (requestOptions?: RequestOptions) => Readonly<WaitablePromise<DeleteResponse>>;
|
||||
readonly getObject: <TObject>(objectID: string, requestOptions?: RequestOptions & GetObjectOptions) => Readonly<Promise<TObject & ObjectWithObjectID>>;
|
||||
readonly getObjects: <TObject>(objectIDs: readonly string[], requestOptions?: RequestOptions & GetObjectsOptions) => Readonly<Promise<GetObjectsResponse<TObject>>>;
|
||||
readonly saveObject: (object: Readonly<Record<string, any>>, requestOptions?: RequestOptions & ChunkOptions & SaveObjectsOptions) => Readonly<WaitablePromise<SaveObjectResponse>>;
|
||||
readonly saveObjects: (objects: ReadonlyArray<Readonly<Record<string, any>>>, requestOptions?: RequestOptions & ChunkOptions & SaveObjectsOptions) => Readonly<WaitablePromise<ChunkedBatchResponse>>;
|
||||
readonly waitTask: (taskID: number, requestOptions?: RequestOptions) => Readonly<Promise<void>>;
|
||||
readonly setSettings: (settings: Settings, requestOptions?: RequestOptions) => Readonly<WaitablePromise<SetSettingsResponse>>;
|
||||
readonly getSettings: (requestOptions?: RequestOptions) => Readonly<Promise<Settings>>;
|
||||
readonly partialUpdateObject: (object: Record<string, any>, requestOptions?: RequestOptions & ChunkOptions & PartialUpdateObjectsOptions) => Readonly<WaitablePromise<PartialUpdateObjectResponse>>;
|
||||
readonly partialUpdateObjects: (objects: ReadonlyArray<Record<string, any>>, requestOptions?: RequestOptions & ChunkOptions & PartialUpdateObjectsOptions) => Readonly<WaitablePromise<ChunkedBatchResponse>>;
|
||||
readonly deleteObject: (objectID: string, requestOptions?: RequestOptions) => Readonly<WaitablePromise<DeleteResponse>>;
|
||||
readonly deleteObjects: (objectIDs: readonly string[], requestOptions?: RequestOptions & ChunkOptions) => Readonly<WaitablePromise<ChunkedBatchResponse>>;
|
||||
readonly deleteBy: (filters: DeleteByFiltersOptions, requestOptions?: RequestOptions) => Readonly<WaitablePromise<DeleteResponse>>;
|
||||
readonly clearObjects: (requestOptions?: RequestOptions) => Readonly<WaitablePromise<DeleteResponse>>;
|
||||
readonly browseObjects: <TObject>(requestOptions?: SearchOptions & BrowseOptions<TObject> & RequestOptions) => Readonly<Promise<void>>;
|
||||
readonly getObjectPosition: (searchResponse: SearchResponse<{}>, objectID: string) => number;
|
||||
readonly findObject: <TObject>(callback: (object: TObject & ObjectWithObjectID) => boolean, requestOptions?: FindObjectOptions & RequestOptions) => Readonly<Promise<FindObjectResponse<TObject>>>;
|
||||
readonly exists: (requestOptions?: RequestOptions) => Readonly<Promise<boolean>>;
|
||||
readonly saveSynonym: (synonym: Synonym, requestOptions?: RequestOptions & SaveSynonymsOptions) => Readonly<WaitablePromise<SaveSynonymResponse>>;
|
||||
readonly saveSynonyms: (synonyms: readonly Synonym[], requestOptions?: SaveSynonymsOptions & RequestOptions) => Readonly<WaitablePromise<SaveSynonymsResponse>>;
|
||||
readonly getSynonym: (objectID: string, requestOptions?: RequestOptions) => Readonly<Promise<Synonym>>;
|
||||
readonly searchSynonyms: (query: string, requestOptions?: SearchSynonymsOptions & RequestOptions) => Readonly<Promise<SearchSynonymsResponse>>;
|
||||
readonly browseSynonyms: (requestOptions?: SearchSynonymsOptions & BrowseOptions<Synonym> & RequestOptions) => Readonly<Promise<void>>;
|
||||
readonly deleteSynonym: (objectID: string, requestOptions?: DeleteSynonymOptions & RequestOptions) => Readonly<WaitablePromise<DeleteResponse>>;
|
||||
readonly clearSynonyms: (requestOptions?: ClearSynonymsOptions & RequestOptions) => Readonly<WaitablePromise<DeleteResponse>>;
|
||||
readonly replaceAllObjects: (objects: ReadonlyArray<Readonly<Record<string, any>>>, requestOptions?: ReplaceAllObjectsOptions & ChunkOptions & SaveObjectsOptions & RequestOptions) => Readonly<WaitablePromise<ChunkedBatchResponse>>;
|
||||
readonly replaceAllSynonyms: (synonyms: readonly Synonym[], requestOptions?: RequestOptions & Pick<SaveSynonymsOptions, Exclude<keyof SaveSynonymsOptions, 'clearExistingSynonyms' | 'replaceExistingSynonyms'>>) => Readonly<WaitablePromise<SaveSynonymsResponse>>;
|
||||
readonly searchRules: (query: string, requestOptions?: RequestOptions & SearchRulesOptions) => Readonly<Promise<SearchResponse<Rule>>>;
|
||||
readonly getRule: (objectID: string, requestOptions?: RequestOptions) => Readonly<Promise<Rule>>;
|
||||
readonly deleteRule: (objectID: string, requestOptions?: RequestOptions) => Readonly<WaitablePromise<DeleteResponse>>;
|
||||
readonly saveRule: (rule: Rule, requestOptions?: RequestOptions & SaveRulesOptions) => Readonly<WaitablePromise<SaveRuleResponse>>;
|
||||
readonly saveRules: (rules: readonly Rule[], requestOptions?: RequestOptions & SaveRulesOptions) => Readonly<WaitablePromise<SaveRulesResponse>>;
|
||||
readonly replaceAllRules: (rules: readonly Rule[], requestOptions?: RequestOptions & SaveRulesOptions) => Readonly<WaitablePromise<SaveRulesResponse>>;
|
||||
readonly browseRules: (requestOptions?: SearchRulesOptions & BrowseOptions<Rule> & RequestOptions) => Readonly<Promise<void>>;
|
||||
readonly clearRules: (requestOptions?: RequestOptions & ClearRulesOptions) => Readonly<WaitablePromise<DeleteResponse>>;
|
||||
};
|
||||
|
||||
export declare type WithoutCredentials<TClientOptions extends Credentials> = Omit<TClientOptions, keyof Credentials>;
|
||||
|
||||
export { }
|
||||
2371
scripts/node_modules/algoliasearch/dist/algoliasearch.esm.browser.js
generated
vendored
Normal file
2371
scripts/node_modules/algoliasearch/dist/algoliasearch.esm.browser.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2
scripts/node_modules/algoliasearch/dist/algoliasearch.umd.js
generated
vendored
Normal file
2
scripts/node_modules/algoliasearch/dist/algoliasearch.umd.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
scripts/node_modules/algoliasearch/index.d.ts
generated
vendored
Normal file
3
scripts/node_modules/algoliasearch/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
/* eslint-disable import/no-unresolved*/
|
||||
export * from './dist/algoliasearch';
|
||||
export { default } from './dist/algoliasearch';
|
||||
15
scripts/node_modules/algoliasearch/index.js
generated
vendored
Normal file
15
scripts/node_modules/algoliasearch/index.js
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
/* eslint-disable functional/immutable-data, import/no-commonjs */
|
||||
const algoliasearch = require('./dist/algoliasearch.cjs.js');
|
||||
|
||||
/**
|
||||
* The Common JS build is the default entry point for the Node environment. Keep in
|
||||
* in mind, that for the browser environment, we hint the bundler to use the UMD
|
||||
* build instead as specified on the key `browser` of our `package.json` file.
|
||||
*/
|
||||
module.exports = algoliasearch;
|
||||
|
||||
/**
|
||||
* In addition, we also set explicitly the default export below making
|
||||
* this Common JS module in compliance with es6 modules specification.
|
||||
*/
|
||||
module.exports.default = algoliasearch;
|
||||
3
scripts/node_modules/algoliasearch/lite.d.ts
generated
vendored
Normal file
3
scripts/node_modules/algoliasearch/lite.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
/* eslint-disable import/no-unresolved*/
|
||||
export * from './dist/algoliasearch-lite';
|
||||
export { default } from './dist/algoliasearch-lite';
|
||||
2
scripts/node_modules/algoliasearch/lite.js
generated
vendored
Normal file
2
scripts/node_modules/algoliasearch/lite.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// eslint-disable-next-line functional/immutable-data, import/no-commonjs
|
||||
module.exports = require('./index');
|
||||
47
scripts/node_modules/algoliasearch/package.json
generated
vendored
Normal file
47
scripts/node_modules/algoliasearch/package.json
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
{
|
||||
"name": "algoliasearch",
|
||||
"version": "4.24.0",
|
||||
"private": false,
|
||||
"description": "A fully-featured and blazing-fast JavaScript API client to interact with Algolia API.",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/algolia/algoliasearch-client-javascript.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"jsdelivr": "./dist/algoliasearch.umd.js",
|
||||
"unpkg": "./dist/algoliasearch.umd.js",
|
||||
"browser": {
|
||||
"./index.js": "./dist/algoliasearch.umd.js",
|
||||
"./lite.js": "./dist/algoliasearch-lite.umd.js"
|
||||
},
|
||||
"types": "index.d.ts",
|
||||
"files": [
|
||||
"dist",
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"lite.js",
|
||||
"lite.d.ts"
|
||||
],
|
||||
"scripts": {
|
||||
"test:exports": "node --experimental-modules test/module/is-es-module.mjs && node test/module/is-cjs-module.cjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"@algolia/cache-browser-local-storage": "4.24.0",
|
||||
"@algolia/cache-common": "4.24.0",
|
||||
"@algolia/cache-in-memory": "4.24.0",
|
||||
"@algolia/client-account": "4.24.0",
|
||||
"@algolia/client-analytics": "4.24.0",
|
||||
"@algolia/client-common": "4.24.0",
|
||||
"@algolia/client-personalization": "4.24.0",
|
||||
"@algolia/client-search": "4.24.0",
|
||||
"@algolia/logger-common": "4.24.0",
|
||||
"@algolia/logger-console": "4.24.0",
|
||||
"@algolia/recommend": "4.24.0",
|
||||
"@algolia/requester-browser-xhr": "4.24.0",
|
||||
"@algolia/requester-common": "4.24.0",
|
||||
"@algolia/requester-node-http": "4.24.0",
|
||||
"@algolia/transporter": "4.24.0"
|
||||
}
|
||||
}
|
||||
185
scripts/node_modules/argparse/CHANGELOG.md
generated
vendored
Normal file
185
scripts/node_modules/argparse/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,185 @@
|
||||
1.0.10 / 2018-02-15
|
||||
------------------
|
||||
|
||||
- Use .concat instead of + for arrays, #122.
|
||||
|
||||
|
||||
1.0.9 / 2016-09-29
|
||||
------------------
|
||||
|
||||
- Rerelease after 1.0.8 - deps cleanup.
|
||||
|
||||
|
||||
1.0.8 / 2016-09-29
|
||||
------------------
|
||||
|
||||
- Maintenance (deps bump, fix node 6.5+ tests, coverage report).
|
||||
|
||||
|
||||
1.0.7 / 2016-03-17
|
||||
------------------
|
||||
|
||||
- Teach `addArgument` to accept string arg names. #97, @tomxtobin.
|
||||
|
||||
|
||||
1.0.6 / 2016-02-06
|
||||
------------------
|
||||
|
||||
- Maintenance: moved to eslint & updated CS.
|
||||
|
||||
|
||||
1.0.5 / 2016-02-05
|
||||
------------------
|
||||
|
||||
- Removed lodash dependency to significantly reduce install size.
|
||||
Thanks to @mourner.
|
||||
|
||||
|
||||
1.0.4 / 2016-01-17
|
||||
------------------
|
||||
|
||||
- Maintenance: lodash update to 4.0.0.
|
||||
|
||||
|
||||
1.0.3 / 2015-10-27
|
||||
------------------
|
||||
|
||||
- Fix parse `=` in args: `--examplepath="C:\myfolder\env=x64"`. #84, @CatWithApple.
|
||||
|
||||
|
||||
1.0.2 / 2015-03-22
|
||||
------------------
|
||||
|
||||
- Relaxed lodash version dependency.
|
||||
|
||||
|
||||
1.0.1 / 2015-02-20
|
||||
------------------
|
||||
|
||||
- Changed dependencies to be compatible with ancient nodejs.
|
||||
|
||||
|
||||
1.0.0 / 2015-02-19
|
||||
------------------
|
||||
|
||||
- Maintenance release.
|
||||
- Replaced `underscore` with `lodash`.
|
||||
- Bumped version to 1.0.0 to better reflect semver meaning.
|
||||
- HISTORY.md -> CHANGELOG.md
|
||||
|
||||
|
||||
0.1.16 / 2013-12-01
|
||||
-------------------
|
||||
|
||||
- Maintenance release. Updated dependencies and docs.
|
||||
|
||||
|
||||
0.1.15 / 2013-05-13
|
||||
-------------------
|
||||
|
||||
- Fixed #55, @trebor89
|
||||
|
||||
|
||||
0.1.14 / 2013-05-12
|
||||
-------------------
|
||||
|
||||
- Fixed #62, @maxtaco
|
||||
|
||||
|
||||
0.1.13 / 2013-04-08
|
||||
-------------------
|
||||
|
||||
- Added `.npmignore` to reduce package size
|
||||
|
||||
|
||||
0.1.12 / 2013-02-10
|
||||
-------------------
|
||||
|
||||
- Fixed conflictHandler (#46), @hpaulj
|
||||
|
||||
|
||||
0.1.11 / 2013-02-07
|
||||
-------------------
|
||||
|
||||
- Multiple bugfixes, @hpaulj
|
||||
- Added 70+ tests (ported from python), @hpaulj
|
||||
- Added conflictHandler, @applepicke
|
||||
- Added fromfilePrefixChar, @hpaulj
|
||||
|
||||
|
||||
0.1.10 / 2012-12-30
|
||||
-------------------
|
||||
|
||||
- Added [mutual exclusion](http://docs.python.org/dev/library/argparse.html#mutual-exclusion)
|
||||
support, thanks to @hpaulj
|
||||
- Fixed options check for `storeConst` & `appendConst` actions, thanks to @hpaulj
|
||||
|
||||
|
||||
0.1.9 / 2012-12-27
|
||||
------------------
|
||||
|
||||
- Fixed option dest interferens with other options (issue #23), thanks to @hpaulj
|
||||
- Fixed default value behavior with `*` positionals, thanks to @hpaulj
|
||||
- Improve `getDefault()` behavior, thanks to @hpaulj
|
||||
- Imrove negative argument parsing, thanks to @hpaulj
|
||||
|
||||
|
||||
0.1.8 / 2012-12-01
|
||||
------------------
|
||||
|
||||
- Fixed parser parents (issue #19), thanks to @hpaulj
|
||||
- Fixed negative argument parse (issue #20), thanks to @hpaulj
|
||||
|
||||
|
||||
0.1.7 / 2012-10-14
|
||||
------------------
|
||||
|
||||
- Fixed 'choices' argument parse (issue #16)
|
||||
- Fixed stderr output (issue #15)
|
||||
|
||||
|
||||
0.1.6 / 2012-09-09
|
||||
------------------
|
||||
|
||||
- Fixed check for conflict of options (thanks to @tomxtobin)
|
||||
|
||||
|
||||
0.1.5 / 2012-09-03
|
||||
------------------
|
||||
|
||||
- Fix parser #setDefaults method (thanks to @tomxtobin)
|
||||
|
||||
|
||||
0.1.4 / 2012-07-30
|
||||
------------------
|
||||
|
||||
- Fixed pseudo-argument support (thanks to @CGamesPlay)
|
||||
- Fixed addHelp default (should be true), if not set (thanks to @benblank)
|
||||
|
||||
|
||||
0.1.3 / 2012-06-27
|
||||
------------------
|
||||
|
||||
- Fixed formatter api name: Formatter -> HelpFormatter
|
||||
|
||||
|
||||
0.1.2 / 2012-05-29
|
||||
------------------
|
||||
|
||||
- Added basic tests
|
||||
- Removed excess whitespace in help
|
||||
- Fixed error reporting, when parcer with subcommands
|
||||
called with empty arguments
|
||||
|
||||
|
||||
0.1.1 / 2012-05-23
|
||||
------------------
|
||||
|
||||
- Fixed line wrapping in help formatter
|
||||
- Added better error reporting on invalid arguments
|
||||
|
||||
|
||||
0.1.0 / 2012-05-16
|
||||
------------------
|
||||
|
||||
- First release.
|
||||
21
scripts/node_modules/argparse/LICENSE
generated
vendored
Normal file
21
scripts/node_modules/argparse/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (C) 2012 by Vitaly Puzrin
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user